pyconvexity 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (35) hide show
  1. pyconvexity/__init__.py +30 -6
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/data/README.md +101 -0
  4. pyconvexity/data/__init__.py +18 -0
  5. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  6. pyconvexity/data/loaders/__init__.py +3 -0
  7. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  8. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/cache.py +212 -0
  10. pyconvexity/data/sources/__init__.py +5 -0
  11. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  12. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  13. pyconvexity/data/sources/gem.py +412 -0
  14. pyconvexity/io/__init__.py +32 -0
  15. pyconvexity/io/excel_exporter.py +991 -0
  16. pyconvexity/io/excel_importer.py +1112 -0
  17. pyconvexity/io/netcdf_exporter.py +192 -0
  18. pyconvexity/io/netcdf_importer.py +599 -0
  19. pyconvexity/models/__init__.py +7 -0
  20. pyconvexity/models/attributes.py +3 -1
  21. pyconvexity/models/components.py +3 -0
  22. pyconvexity/models/scenarios.py +177 -0
  23. pyconvexity/solvers/__init__.py +29 -0
  24. pyconvexity/solvers/pypsa/__init__.py +24 -0
  25. pyconvexity/solvers/pypsa/api.py +398 -0
  26. pyconvexity/solvers/pypsa/batch_loader.py +311 -0
  27. pyconvexity/solvers/pypsa/builder.py +656 -0
  28. pyconvexity/solvers/pypsa/constraints.py +321 -0
  29. pyconvexity/solvers/pypsa/solver.py +1255 -0
  30. pyconvexity/solvers/pypsa/storage.py +2207 -0
  31. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/METADATA +5 -2
  32. pyconvexity-0.1.3.dist-info/RECORD +45 -0
  33. pyconvexity-0.1.1.dist-info/RECORD +0 -20
  34. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/WHEEL +0 -0
  35. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,991 @@
1
+ """
2
+ Excel exporter for PyConvexity energy system models.
3
+ Exports complete network models to Excel workbooks with multiple sheets.
4
+ """
5
+
6
+ import logging
7
+ import sqlite3
8
+ from typing import Dict, Any, Optional, List
9
+ from pathlib import Path
10
+ import pandas as pd
11
+ from datetime import datetime
12
+ import json
13
+
14
+ # Import functions directly from pyconvexity
15
+ from pyconvexity.core.database import open_connection
16
+ from pyconvexity.core.errors import AttributeNotFound
17
+ from pyconvexity.models import (
18
+ list_components_by_type, list_carriers, get_network_info,
19
+ get_network_time_periods, get_attribute, list_component_attributes,
20
+ get_network_config
21
+ )
22
+ from pyconvexity.validation import list_validation_rules
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ class ExcelModelExporter:
27
+ """Export entire network model to Excel workbook"""
28
+
29
+ def __init__(self):
30
+ self.logger = logging.getLogger(__name__)
31
+
32
+ def export_model_to_excel(
33
+ self,
34
+ db_path: str,
35
+ network_id: int,
36
+ output_path: str,
37
+ scenario_id: Optional[int] = None,
38
+ progress_callback: Optional[callable] = None
39
+ ) -> Dict[str, Any]:
40
+ """
41
+ Export complete network model to Excel workbook
42
+
43
+ Args:
44
+ db_path: Database path
45
+ network_id: Network ID to export
46
+ output_path: Excel file output path
47
+ scenario_id: Scenario ID (defaults to master scenario)
48
+ progress_callback: Optional callback for progress updates
49
+
50
+ Returns:
51
+ Export statistics and metadata
52
+ """
53
+
54
+ try:
55
+ if progress_callback:
56
+ progress_callback(0, "Starting Excel export...")
57
+
58
+ # Connect to database
59
+ conn = open_connection(db_path)
60
+
61
+ if progress_callback:
62
+ progress_callback(5, "Loading network information...")
63
+
64
+ # Get network information
65
+ network_info = get_network_info(conn, network_id)
66
+
67
+ # Get master scenario if no scenario specified
68
+ if scenario_id is None:
69
+ cursor = conn.execute(
70
+ "SELECT id FROM scenarios WHERE network_id = ? AND is_master = TRUE",
71
+ (network_id,)
72
+ )
73
+ scenario_result = cursor.fetchone()
74
+ if scenario_result:
75
+ scenario_id = scenario_result[0]
76
+ else:
77
+ raise ValueError("No master scenario found for network")
78
+
79
+ if progress_callback:
80
+ progress_callback(10, "Loading carriers...")
81
+
82
+ # Get carriers
83
+ carriers = list_carriers(conn, network_id)
84
+
85
+ if progress_callback:
86
+ progress_callback(15, "Loading components...")
87
+
88
+ # Get all component types
89
+ component_types = ['BUS', 'GENERATOR', 'LOAD', 'LINE', 'LINK', 'STORAGE_UNIT', 'STORE', 'CONSTRAINT']
90
+
91
+ # Load components by type
92
+ components_by_type = {}
93
+ for comp_type in component_types:
94
+ components = list_components_by_type(conn, network_id, comp_type)
95
+ components_by_type[comp_type] = components
96
+
97
+ if progress_callback:
98
+ progress_callback(25, "Processing component attributes...")
99
+
100
+ # Process components and their attributes
101
+ processed_components = {}
102
+ timeseries_data = {}
103
+
104
+ for comp_type, components in components_by_type.items():
105
+ processed_components[comp_type] = []
106
+ timeseries_data[comp_type] = {}
107
+
108
+ for component in components:
109
+ # Check for cancellation during processing
110
+ if progress_callback:
111
+ try:
112
+ progress_callback(None, None) # Check for cancellation
113
+ except KeyboardInterrupt:
114
+ self.logger.info("Excel export cancelled by user")
115
+ raise
116
+
117
+ # Get component attributes (all possible attributes for this component type)
118
+ attributes = self._get_component_attributes(conn, component.id, scenario_id, comp_type)
119
+
120
+ # Process component data
121
+ processed_component = self._process_component_for_excel(
122
+ component, attributes, carriers, components_by_type
123
+ )
124
+ processed_components[comp_type].append(processed_component)
125
+
126
+ # Extract timeseries data
127
+ for attr_name, attr_data in attributes.items():
128
+ if isinstance(attr_data, dict) and 'Timeseries' in attr_data:
129
+ if comp_type not in timeseries_data:
130
+ timeseries_data[comp_type] = {}
131
+ if attr_name not in timeseries_data[comp_type]:
132
+ timeseries_data[comp_type][attr_name] = {}
133
+ # Store the timeseries points
134
+ timeseries_data[comp_type][attr_name][component.name] = attr_data['points']
135
+
136
+ if progress_callback:
137
+ progress_callback(50, "Creating Excel workbook...")
138
+
139
+ # Check for cancellation before starting Excel creation
140
+ if progress_callback:
141
+ try:
142
+ progress_callback(None, None) # Check for cancellation
143
+ except KeyboardInterrupt:
144
+ self.logger.info("Excel export cancelled before workbook creation")
145
+ raise
146
+
147
+ # Get scenario information if scenario_id is provided
148
+ scenario_info = None
149
+ if scenario_id is not None:
150
+ scenario_info = self._get_scenario_info(conn, scenario_id)
151
+
152
+ # Create Excel workbook
153
+ with pd.ExcelWriter(output_path, engine='openpyxl') as writer:
154
+ # Create overview sheet
155
+ self._create_overview_sheet(writer, network_info, processed_components, scenario_info)
156
+
157
+ # Create component sheets
158
+ for comp_type in component_types:
159
+ if processed_components[comp_type]:
160
+ # Check for cancellation during sheet creation
161
+ if progress_callback:
162
+ try:
163
+ progress_callback(None, None) # Check for cancellation
164
+ except KeyboardInterrupt:
165
+ self.logger.info(f"Excel export cancelled during {comp_type} sheet creation")
166
+ raise
167
+
168
+ self._create_component_sheet(writer, conn, comp_type, processed_components[comp_type])
169
+
170
+ # Create timeseries sheet if there's timeseries data
171
+ if comp_type in timeseries_data and timeseries_data[comp_type]:
172
+ self._create_timeseries_sheet(
173
+ writer, comp_type, timeseries_data[comp_type], network_id, conn
174
+ )
175
+
176
+ # Create carriers sheet
177
+ self._create_carriers_sheet(writer, carriers)
178
+
179
+ # Create network config sheet
180
+ self._create_network_config_sheet(writer, network_id, conn)
181
+
182
+ # Create statistics sheet if solve results are available
183
+ self._create_statistics_sheet(writer, network_id, scenario_id, conn)
184
+
185
+ # Create per-year statistics sheet if available
186
+ self._create_per_year_statistics_sheet(writer, network_id, scenario_id, conn)
187
+
188
+ if progress_callback:
189
+ progress_callback(100, "Excel export completed")
190
+
191
+ # Calculate statistics
192
+ stats = self._calculate_export_stats(processed_components, timeseries_data)
193
+
194
+ return {
195
+ "success": True,
196
+ "message": f"Network exported to Excel: {output_path}",
197
+ "output_path": output_path,
198
+ "stats": stats
199
+ }
200
+
201
+ except Exception as e:
202
+ self.logger.error(f"Excel export failed: {e}", exc_info=True)
203
+ if progress_callback:
204
+ progress_callback(None, f"Export failed: {str(e)}")
205
+ raise
206
+
207
+ def _get_component_attributes(self, conn, component_id: int, scenario_id: int, component_type: str) -> Dict[str, Any]:
208
+ """Get all possible attributes for a component type, with values where set"""
209
+ attributes = {}
210
+
211
+ # Get ALL possible attribute names for this component type from validation rules
212
+ validation_rules = list_validation_rules(conn, component_type)
213
+
214
+ for rule in validation_rules:
215
+ attr_name = rule.attribute_name
216
+ try:
217
+ # Try to get the attribute value (may not exist)
218
+ attr_value = get_attribute(conn, component_id, attr_name, scenario_id)
219
+
220
+ if attr_value.variant == "Static":
221
+ # Extract static value
222
+ static_value = attr_value.static_value
223
+ if static_value.data_type() == "float":
224
+ attributes[attr_name] = static_value.as_f64()
225
+ elif static_value.data_type() == "int":
226
+ attributes[attr_name] = int(static_value.as_f64())
227
+ elif static_value.data_type() == "boolean":
228
+ attributes[attr_name] = static_value.data["Boolean"]
229
+ elif static_value.data_type() == "string":
230
+ attributes[attr_name] = static_value.data["String"]
231
+ else:
232
+ attributes[attr_name] = static_value.data
233
+
234
+ elif attr_value.variant == "Timeseries":
235
+ # Store timeseries points for later processing
236
+ attributes[attr_name] = {
237
+ 'Timeseries': True,
238
+ 'points': attr_value.timeseries_value
239
+ }
240
+
241
+ except AttributeNotFound:
242
+ # Attribute not set - always use empty string for blank Excel cell
243
+ attributes[attr_name] = ""
244
+
245
+ except Exception as e:
246
+ self.logger.warning(f"Failed to load attribute {attr_name} for component {component_id}: {e}")
247
+ # Still include the attribute with empty value
248
+ attributes[attr_name] = ""
249
+ continue
250
+
251
+ return attributes
252
+
253
+ def _process_component_for_excel(self, component, attributes: Dict, carriers: List, components_by_type: Dict) -> Dict[str, Any]:
254
+ """Process a component for Excel export"""
255
+ processed = {
256
+ 'name': component.name,
257
+ 'type': component.component_type.lower(),
258
+ }
259
+
260
+ # Add carrier name
261
+ if component.carrier_id:
262
+ carrier = next((c for c in carriers if c['id'] == component.carrier_id), None)
263
+ carrier_name = carrier['name'] if carrier else 'CARRIER_NOT_FOUND'
264
+ processed['carrier'] = carrier_name
265
+ self.logger.info(f"Component '{component.name}' has carrier_id={component.carrier_id}, resolved to carrier: {carrier_name}")
266
+ else:
267
+ processed['carrier'] = '' # Use empty string for no carrier
268
+ self.logger.info(f"Component '{component.name}' has no carrier_id (carrier_id={component.carrier_id})")
269
+
270
+ # Add bus connections
271
+ if component.bus_id:
272
+ bus = next((b for b in components_by_type.get('BUS', []) if b.id == component.bus_id), None)
273
+ processed['bus'] = bus.name if bus else ''
274
+ else:
275
+ processed['bus'] = ''
276
+
277
+ if component.bus0_id:
278
+ bus0 = next((b for b in components_by_type.get('BUS', []) if b.id == component.bus0_id), None)
279
+ processed['bus0'] = bus0.name if bus0 else ''
280
+ else:
281
+ processed['bus0'] = ''
282
+
283
+ if component.bus1_id:
284
+ bus1 = next((b for b in components_by_type.get('BUS', []) if b.id == component.bus1_id), None)
285
+ processed['bus1'] = bus1.name if bus1 else ''
286
+ else:
287
+ processed['bus1'] = ''
288
+
289
+ # Add coordinates
290
+ processed['latitude'] = component.latitude if component.latitude is not None else ''
291
+ processed['longitude'] = component.longitude if component.longitude is not None else ''
292
+
293
+ # Add attributes
294
+ for attr_name, attr_value in attributes.items():
295
+ if isinstance(attr_value, dict) and 'Timeseries' in attr_value:
296
+ processed[attr_name] = '[timeseries]'
297
+ else:
298
+ # Special handling for carrier attribute - don't overwrite relationship carrier
299
+ if attr_name == 'carrier':
300
+ if component.carrier_id is not None:
301
+ self.logger.info(f"DEBUG: Skipping carrier attribute '{attr_value}' for '{component.name}' - using relationship carrier '{processed['carrier']}'")
302
+ continue # Skip the carrier attribute, keep the relationship carrier
303
+ else:
304
+ self.logger.info(f"DEBUG: Using carrier attribute '{attr_value}' for '{component.name}' (no relationship carrier)")
305
+
306
+ processed[attr_name] = attr_value
307
+
308
+ self.logger.info(f"DEBUG: Final processed data for '{component.name}': carrier='{processed.get('carrier', 'NOT_SET')}'")
309
+ return processed
310
+
311
+ def _filter_component_columns(self, conn, component_data: Dict[str, Any], component_type: str) -> Dict[str, Any]:
312
+ """Filter out unused columns based on component type, following DatabaseTable logic"""
313
+
314
+ filtered_data = {}
315
+
316
+ # Always include basic fields (name, carrier, latitude, longitude)
317
+ # Note: bus connections are NOT basic fields - they are component-type specific
318
+ # Note: "type" is NOT included - it's implicit based on the sheet/component type
319
+ # Note: CONSTRAINT components don't have carrier, latitude, or longitude - they are code-based rules
320
+ if component_type.upper() == 'CONSTRAINT':
321
+ basic_fields = ['name'] # Constraints only have name - no physical location or carrier
322
+ else:
323
+ basic_fields = ['name', 'carrier', 'latitude', 'longitude']
324
+
325
+ for field in basic_fields:
326
+ if field in component_data:
327
+ filtered_data[field] = component_data[field]
328
+ self.logger.info(f"Added basic field '{field}' = '{component_data[field]}' for component type {component_type}")
329
+ if field == 'carrier':
330
+ self.logger.info(f"DEBUG: Setting carrier field to '{component_data[field]}' from component_data")
331
+
332
+ # Add bus connection columns based on component type - EXACT DatabaseTable logic
333
+ component_type_lower = component_type.lower()
334
+ needs_bus_connection = component_type_lower in ['generator', 'load', 'storage_unit', 'store', 'unmet_load']
335
+ needs_two_bus_connections = component_type_lower in ['line', 'link']
336
+
337
+
338
+ if needs_bus_connection:
339
+ if 'bus' in component_data:
340
+ filtered_data['bus'] = component_data['bus']
341
+ elif needs_two_bus_connections:
342
+ if 'bus0' in component_data:
343
+ filtered_data['bus0'] = component_data['bus0']
344
+ if 'bus1' in component_data:
345
+ filtered_data['bus1'] = component_data['bus1']
346
+ else:
347
+ # Buses and other components don't get bus connection columns
348
+ self.logger.info(f"No bus connection columns for {component_type_lower}")
349
+
350
+ # Get validation rules to determine which attributes are input vs output
351
+ try:
352
+
353
+ # Add all other attributes that aren't filtered out
354
+ for key, value in component_data.items():
355
+ if key in filtered_data:
356
+ continue # Already handled
357
+
358
+ # Filter out unused attributes following DatabaseTable logic
359
+ should_exclude = False
360
+ exclude_reason = ""
361
+
362
+ # Note: Carrier attribute exclusion is now handled in _process_component_for_excel
363
+ # to prevent overwriting relationship carriers
364
+
365
+ # Remove location and carrier attributes for CONSTRAINT components (they don't have physical location or carriers)
366
+ if component_type.upper() == 'CONSTRAINT' and key in ['carrier', 'latitude', 'longitude']:
367
+ should_exclude = True
368
+ exclude_reason = f"constraint exclusion - constraints don't have {key}"
369
+
370
+ # Remove 'type' and 'unit' attributes for buses (not used in this application)
371
+ elif component_type.upper() == 'BUS' and key in ['type', 'unit']:
372
+ should_exclude = True
373
+ exclude_reason = f"bus-specific exclusion ({key})"
374
+
375
+ # Remove 'x' and 'y' coordinates for buses only - we use latitude/longitude instead
376
+ elif component_type.upper() == 'BUS' and key in ['x', 'y']:
377
+ should_exclude = True
378
+ exclude_reason = f"bus coordinate exclusion ({key})"
379
+
380
+ # Remove sub-network and slack generator attributes for buses
381
+ elif component_type.upper() == 'BUS' and key in ['sub_network', 'slack_generator']:
382
+ should_exclude = True
383
+ exclude_reason = f"bus network exclusion ({key})"
384
+
385
+ # CRITICAL: Remove bus connection columns for components that shouldn't have them
386
+ elif key in ['bus', 'bus0', 'bus1']:
387
+ if key == 'bus' and not needs_bus_connection:
388
+ should_exclude = True
389
+ exclude_reason = f"bus connection not needed for {component_type_lower}"
390
+ elif key in ['bus0', 'bus1'] and not needs_two_bus_connections:
391
+ should_exclude = True
392
+ exclude_reason = f"two-bus connection not needed for {component_type_lower}"
393
+
394
+
395
+ if should_exclude:
396
+ self.logger.info(f"Excluded {key}: {exclude_reason}")
397
+ else:
398
+ # Special handling for carrier attribute - don't overwrite relationship field
399
+ if key == 'carrier' and 'carrier' in filtered_data:
400
+ self.logger.info(f"Skipping carrier attribute '{value}' - keeping relationship carrier '{filtered_data['carrier']}'")
401
+ else:
402
+ filtered_data[key] = value
403
+ self.logger.info(f"Added attribute: {key} = {value}")
404
+
405
+ except Exception as e:
406
+ self.logger.warning(f"Could not load validation rules for filtering: {e}")
407
+ # Fallback: include all attributes except the basic exclusions
408
+ for key, value in component_data.items():
409
+ if key in filtered_data:
410
+ continue
411
+ if key == 'carrier': # Skip carrier attribute
412
+ continue
413
+ filtered_data[key] = value
414
+
415
+
416
+ return filtered_data
417
+
418
+ def _create_overview_sheet(self, writer, network_info: Dict, processed_components: Dict, scenario_info: Dict = None):
419
+ """Create overview sheet with network metadata"""
420
+ # Create key-value pairs as separate lists for two columns
421
+ keys = []
422
+ values = []
423
+
424
+ # Network information
425
+ keys.extend(['Name', 'Description', 'Time Start', 'Time End', 'Time Interval'])
426
+ values.extend([
427
+ network_info['name'],
428
+ network_info.get('description', ''),
429
+ network_info['time_start'],
430
+ network_info['time_end'],
431
+ network_info['time_interval']
432
+ ])
433
+
434
+ # Scenario information
435
+ if scenario_info:
436
+ keys.append('')
437
+ values.append('')
438
+ keys.extend(['Scenario Information', 'Scenario Name', 'Scenario Description', 'Is Master Scenario', 'Scenario Created'])
439
+ values.extend([
440
+ '',
441
+ scenario_info.get('name', 'Unknown'),
442
+ scenario_info.get('description', '') or 'No description',
443
+ 'Yes' if scenario_info.get('is_master', False) else 'No',
444
+ scenario_info.get('created_at', '')
445
+ ])
446
+
447
+ # Empty row
448
+ keys.append('')
449
+ values.append('')
450
+
451
+ # Export information
452
+ keys.extend(['Export Information', 'Export Date', 'Export Version'])
453
+ values.extend(['', datetime.now().strftime('%Y-%m-%d %H:%M:%S'), self._get_app_version()])
454
+
455
+ # Create two-column DataFrame
456
+ df = pd.DataFrame({
457
+ 'Property': keys,
458
+ 'Value': values
459
+ })
460
+ df.to_excel(writer, sheet_name='Overview', index=False)
461
+
462
+ def _get_scenario_info(self, conn, scenario_id: int) -> Dict[str, Any]:
463
+ """Get scenario information from database"""
464
+ try:
465
+ cursor = conn.execute("""
466
+ SELECT id, network_id, name, description, is_master, created_at
467
+ FROM scenarios
468
+ WHERE id = ?
469
+ """, (scenario_id,))
470
+
471
+ row = cursor.fetchone()
472
+ if not row:
473
+ self.logger.warning(f"No scenario found with ID {scenario_id}")
474
+ return {}
475
+
476
+ return {
477
+ 'id': row[0],
478
+ 'network_id': row[1],
479
+ 'name': row[2],
480
+ 'description': row[3],
481
+ 'is_master': bool(row[4]),
482
+ 'created_at': row[5]
483
+ }
484
+
485
+ except Exception as e:
486
+ self.logger.warning(f"Failed to retrieve scenario info: {e}")
487
+ return {}
488
+
489
+ def _create_component_sheet(self, writer, conn, component_type: str, components: List[Dict]):
490
+ """Create a sheet for a specific component type"""
491
+ if not components:
492
+ return
493
+
494
+ # Apply column filtering to each component
495
+ filtered_components = []
496
+ for component in components:
497
+ filtered_component = self._filter_component_columns(conn, component, component_type)
498
+ filtered_components.append(filtered_component)
499
+
500
+ # Convert to DataFrame
501
+ df = pd.DataFrame(filtered_components)
502
+
503
+ # Reorder columns to put core fields first
504
+ core_columns = ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude']
505
+ other_columns = []
506
+ for col in df.columns:
507
+ if col not in core_columns:
508
+ other_columns.append(col)
509
+ ordered_columns = []
510
+ for col in core_columns:
511
+ if col in df.columns:
512
+ ordered_columns.append(col)
513
+ ordered_columns.extend(other_columns)
514
+
515
+ df = df[ordered_columns]
516
+
517
+ # Write to Excel with proper pluralization
518
+ sheet_name_mapping = {
519
+ 'BUS': 'Buses',
520
+ 'GENERATOR': 'Generators',
521
+ 'LOAD': 'Loads',
522
+ 'LINE': 'Lines',
523
+ 'LINK': 'Links',
524
+ 'STORAGE_UNIT': 'Storage Units',
525
+ 'STORE': 'Stores',
526
+ 'CONSTRAINT': 'Constraints'
527
+ }
528
+ sheet_name = sheet_name_mapping.get(component_type, f"{component_type.title()}s")
529
+ df.to_excel(writer, sheet_name=sheet_name, index=False)
530
+
531
+ def _create_timeseries_sheet(self, writer, component_type: str, timeseries_data: Dict, network_id: int, conn):
532
+ """Create a timeseries sheet for a component type"""
533
+ # Get network time periods
534
+ time_periods = get_network_time_periods(conn, network_id)
535
+ if not time_periods:
536
+ self.logger.warning(f"No time periods found for network {network_id}, skipping timeseries sheet for {component_type}")
537
+ return
538
+
539
+ self.logger.info(f"Creating timeseries sheet for {component_type} with {len(time_periods)} time periods")
540
+ self.logger.info(f"First few time periods: {[(p.formatted_time, p.timestamp, p.period_index) for p in time_periods[:3]]}")
541
+
542
+ # Create DataFrame with human-readable timestamps
543
+ timestamps = [period.formatted_time for period in time_periods] # Use formatted_time instead of timestamp
544
+ df_data = {'timestamp': timestamps}
545
+
546
+ # Add component columns for each attribute
547
+ for attr_name, component_data in timeseries_data.items():
548
+ for component_name, timeseries_points in component_data.items():
549
+ if isinstance(timeseries_points, list):
550
+ # Remove repetitive logging
551
+ # self.logger.info(f"Processing {component_name}_{attr_name} with {len(timeseries_points)} points")
552
+
553
+ # Extract values from TimeseriesPoint objects
554
+ values = [point.value for point in timeseries_points]
555
+ # Pad or truncate to match time periods
556
+ while len(values) < len(timestamps):
557
+ values.append(0)
558
+ values = values[:len(timestamps)]
559
+ df_data[f"{component_name}_{attr_name}"] = values
560
+
561
+ df = pd.DataFrame(df_data)
562
+ sheet_name = f"{component_type.title()} Timeseries"
563
+ df.to_excel(writer, sheet_name=sheet_name, index=False)
564
+ self.logger.info(f"Created timeseries sheet '{sheet_name}' with {len(df)} rows and {len(df.columns)} columns")
565
+
566
+ def _create_carriers_sheet(self, writer, carriers: List[Dict]):
567
+ """Create carriers sheet"""
568
+ if not carriers:
569
+ return
570
+
571
+ df = pd.DataFrame(carriers)
572
+ df.to_excel(writer, sheet_name='Carriers', index=False)
573
+
574
+ def _create_network_config_sheet(self, writer, network_id: int, conn):
575
+ """Create network configuration sheet"""
576
+ try:
577
+ config = get_network_config(conn, network_id, None) # Master scenario
578
+ if config:
579
+ config_data = []
580
+ for param_name, param_value in config.items():
581
+ config_data.append({
582
+ 'Parameter': param_name,
583
+ 'Value': str(param_value),
584
+ 'Type': type(param_value).__name__,
585
+ 'Description': ''
586
+ })
587
+
588
+ if config_data:
589
+ df = pd.DataFrame(config_data)
590
+ df.to_excel(writer, sheet_name='Network Config', index=False)
591
+ except Exception as e:
592
+ self.logger.warning(f"Could not create network config sheet: {e}")
593
+
594
+ def _calculate_export_stats(self, processed_components: Dict, timeseries_data: Dict) -> Dict[str, Any]:
595
+ """Calculate export statistics"""
596
+ total_components = sum(len(components) for components in processed_components.values())
597
+ total_timeseries = sum(
598
+ len(attr_data)
599
+ for comp_data in timeseries_data.values()
600
+ for attr_data in comp_data.values()
601
+ )
602
+
603
+ return {
604
+ 'total_components': total_components,
605
+ 'total_timeseries': total_timeseries,
606
+ 'component_types': len(processed_components),
607
+ 'components_by_type': {
608
+ comp_type: len(components)
609
+ for comp_type, components in processed_components.items()
610
+ }
611
+ }
612
+
613
+ def _get_solve_results(self, conn, network_id: int, scenario_id: int) -> Optional[Dict[str, Any]]:
614
+ """Get solve results from the database"""
615
+ try:
616
+ cursor = conn.execute("""
617
+ SELECT results_json, metadata_json, solver_name, solve_status,
618
+ objective_value, solve_time_seconds, solved_at
619
+ FROM network_solve_results
620
+ WHERE network_id = ? AND scenario_id = ?
621
+ """, (network_id, scenario_id))
622
+
623
+ row = cursor.fetchone()
624
+ if not row:
625
+ self.logger.info(f"No solve results found for network {network_id}, scenario {scenario_id}")
626
+ return None
627
+
628
+ results_json_str, metadata_json_str, solver_name, solve_status, objective_value, solve_time, solved_at = row
629
+
630
+ # Parse the JSON results
631
+ if results_json_str:
632
+ results = json.loads(results_json_str)
633
+ # Add metadata from the table columns
634
+ results['solver_name'] = solver_name
635
+ results['solve_status'] = solve_status
636
+ results['objective_value'] = objective_value
637
+ results['solve_time_seconds'] = solve_time
638
+ results['solved_at'] = solved_at
639
+
640
+ if metadata_json_str:
641
+ metadata = json.loads(metadata_json_str)
642
+ results['metadata'] = metadata
643
+
644
+ return results
645
+
646
+ return None
647
+
648
+ except Exception as e:
649
+ self.logger.warning(f"Failed to retrieve solve results: {e}")
650
+ return None
651
+
652
+ def _get_solve_results_by_year(self, conn, network_id: int, scenario_id: int) -> Optional[Dict[int, Dict[str, Any]]]:
653
+ """Get per-year solve results from the database"""
654
+ try:
655
+ cursor = conn.execute("""
656
+ SELECT year, results_json, metadata_json
657
+ FROM network_solve_results_by_year
658
+ WHERE network_id = ? AND scenario_id = ?
659
+ ORDER BY year
660
+ """, (network_id, scenario_id))
661
+
662
+ rows = cursor.fetchall()
663
+ if not rows:
664
+ self.logger.info(f"No per-year solve results found for network {network_id}, scenario {scenario_id}")
665
+ return None
666
+
667
+ year_results = {}
668
+ for row in rows:
669
+ year, results_json_str, metadata_json_str = row
670
+
671
+ if results_json_str:
672
+ year_data = json.loads(results_json_str)
673
+
674
+ # Add metadata if available
675
+ if metadata_json_str:
676
+ metadata = json.loads(metadata_json_str)
677
+ year_data['metadata'] = metadata
678
+
679
+ year_results[year] = year_data
680
+
681
+ return year_results if year_results else None
682
+
683
+ except Exception as e:
684
+ self.logger.warning(f"Failed to retrieve per-year solve results: {e}")
685
+ return None
686
+
687
+ def _create_statistics_sheet(self, writer, network_id: int, scenario_id: int, conn):
688
+ """Create statistics sheet with full-run solve results (no per-year data)"""
689
+ try:
690
+ # Get solve results
691
+ solve_results = self._get_solve_results(conn, network_id, scenario_id)
692
+ if not solve_results:
693
+ self.logger.info("No solve results available, skipping statistics sheet")
694
+ return
695
+
696
+ # Prepare data for the statistics sheet
697
+ stats_data = []
698
+
699
+ # Section 1: Solve Summary
700
+ stats_data.extend([
701
+ ['SOLVE SUMMARY', ''],
702
+ ['Solver Name', solve_results.get('solver_name', 'Unknown')],
703
+ ['Solve Status', solve_results.get('solve_status', 'Unknown')],
704
+ ['Solve Time (seconds)', solve_results.get('solve_time_seconds', 0)],
705
+ ['Objective Value', solve_results.get('objective_value', 0)],
706
+ ['Solved At', solve_results.get('solved_at', '')],
707
+ ['', ''] # Empty row separator
708
+ ])
709
+
710
+ # Extract network statistics if available
711
+ network_stats = solve_results.get('network_statistics', {})
712
+
713
+ # Section 2: Core Network Statistics
714
+ core_summary = network_stats.get('core_summary', {})
715
+ if core_summary:
716
+ stats_data.extend([
717
+ ['CORE NETWORK STATISTICS', ''],
718
+ ['Total Generation (MWh)', core_summary.get('total_generation_mwh', 0)],
719
+ ['Total Demand (MWh)', core_summary.get('total_demand_mwh', 0)],
720
+ ['Total Cost', core_summary.get('total_cost', 0)],
721
+ ['Load Factor', core_summary.get('load_factor', 0)],
722
+ ['Unserved Energy (MWh)', core_summary.get('unserved_energy_mwh', 0)],
723
+ ['', '']
724
+ ])
725
+
726
+ # Section 3: Custom Statistics
727
+ custom_stats = network_stats.get('custom_statistics', {})
728
+ if custom_stats:
729
+ # Emissions by Carrier
730
+ emissions = custom_stats.get('emissions_by_carrier', {})
731
+ if emissions:
732
+ stats_data.extend([
733
+ ['EMISSIONS BY CARRIER (tons CO2)', '']
734
+ ])
735
+ for carrier, value in emissions.items():
736
+ if value > 0: # Only show carriers with emissions
737
+ stats_data.append([carrier, value])
738
+ stats_data.extend([
739
+ ['Total Emissions (tons CO2)', custom_stats.get('total_emissions_tons_co2', 0)],
740
+ ['', '']
741
+ ])
742
+
743
+ # Generation Dispatch by Carrier
744
+ dispatch = custom_stats.get('dispatch_by_carrier', {})
745
+ if dispatch:
746
+ stats_data.extend([
747
+ ['GENERATION DISPATCH BY CARRIER (MWh)', '']
748
+ ])
749
+ for carrier, value in dispatch.items():
750
+ if value > 0: # Only show carriers with generation
751
+ stats_data.append([carrier, value])
752
+ stats_data.append(['', ''])
753
+
754
+ # Power Capacity by Carrier (MW)
755
+ power_capacity = custom_stats.get('power_capacity_by_carrier', {})
756
+ if power_capacity:
757
+ stats_data.extend([
758
+ ['POWER CAPACITY BY CARRIER (MW)', '']
759
+ ])
760
+ for carrier, value in power_capacity.items():
761
+ if value > 0: # Only show carriers with capacity
762
+ stats_data.append([carrier, value])
763
+ stats_data.append(['', ''])
764
+
765
+ # Energy Capacity by Carrier (MWh)
766
+ energy_capacity = custom_stats.get('energy_capacity_by_carrier', {})
767
+ if energy_capacity:
768
+ stats_data.extend([
769
+ ['ENERGY CAPACITY BY CARRIER (MWh)', '']
770
+ ])
771
+ for carrier, value in energy_capacity.items():
772
+ if value > 0: # Only show carriers with capacity
773
+ stats_data.append([carrier, value])
774
+ stats_data.append(['', ''])
775
+
776
+ # Capital Costs by Carrier
777
+ capital_costs = custom_stats.get('capital_cost_by_carrier', {})
778
+ if capital_costs:
779
+ stats_data.extend([
780
+ ['CAPITAL COSTS BY CARRIER', '']
781
+ ])
782
+ for carrier, value in capital_costs.items():
783
+ if value > 0: # Only show carriers with costs
784
+ stats_data.append([carrier, value])
785
+ stats_data.extend([
786
+ ['Total Capital Cost', custom_stats.get('total_capital_cost', 0)],
787
+ ['', '']
788
+ ])
789
+
790
+ # Operational Costs by Carrier
791
+ op_costs = custom_stats.get('operational_cost_by_carrier', {})
792
+ if op_costs:
793
+ stats_data.extend([
794
+ ['OPERATIONAL COSTS BY CARRIER', '']
795
+ ])
796
+ for carrier, value in op_costs.items():
797
+ if value > 0: # Only show carriers with costs
798
+ stats_data.append([carrier, value])
799
+ stats_data.extend([
800
+ ['Total Operational Cost', custom_stats.get('total_operational_cost', 0)],
801
+ ['', '']
802
+ ])
803
+
804
+ # Total System Costs by Carrier
805
+ total_costs = custom_stats.get('total_system_cost_by_carrier', {})
806
+ if total_costs:
807
+ stats_data.extend([
808
+ ['TOTAL SYSTEM COSTS BY CARRIER', '']
809
+ ])
810
+ for carrier, value in total_costs.items():
811
+ if value > 0: # Only show carriers with costs
812
+ stats_data.append([carrier, value])
813
+ stats_data.extend([
814
+ ['Total Currency Cost', custom_stats.get('total_currency_cost', 0)],
815
+ ['Average Price per MWh', custom_stats.get('average_price_per_mwh', 0)],
816
+ ['', '']
817
+ ])
818
+
819
+ # Unmet Load Statistics
820
+ unmet_stats = custom_stats.get('unmet_load_statistics', {})
821
+ if unmet_stats:
822
+ stats_data.extend([
823
+ ['UNMET LOAD STATISTICS', ''],
824
+ ['Unmet Load (MWh)', unmet_stats.get('unmet_load_mwh', 0)],
825
+ ['Unmet Load Percentage', custom_stats.get('unmet_load_percentage', 0)],
826
+ ['Max Unmet Load Hour (MW)', custom_stats.get('max_unmet_load_hour_mw', 0)],
827
+ ['', '']
828
+ ])
829
+
830
+ # Section 4: Component Storage Statistics
831
+ storage_stats = solve_results.get('component_storage_stats', {})
832
+ if storage_stats:
833
+ stats_data.extend([
834
+ ['COMPONENT STORAGE STATISTICS', '']
835
+ ])
836
+ for key, value in storage_stats.items():
837
+ # Convert snake_case to readable format
838
+ readable_key = key.replace('_', ' ').title()
839
+ stats_data.append([readable_key, value])
840
+ stats_data.append(['', ''])
841
+
842
+ # Section 5: Runtime Information
843
+ runtime_info = network_stats.get('runtime_info', {})
844
+ if runtime_info:
845
+ stats_data.extend([
846
+ ['RUNTIME INFORMATION', '']
847
+ ])
848
+ for key, value in runtime_info.items():
849
+ # Convert snake_case to readable format
850
+ readable_key = key.replace('_', ' ').title()
851
+ stats_data.append([readable_key, value])
852
+ stats_data.append(['', ''])
853
+
854
+ # Section 6: Solver Information
855
+ solver_info = network_stats.get('solver_info', {})
856
+ if solver_info:
857
+ stats_data.extend([
858
+ ['SOLVER INFORMATION', '']
859
+ ])
860
+ for key, value in solver_info.items():
861
+ # Convert snake_case to readable format
862
+ readable_key = key.replace('_', ' ').title()
863
+ stats_data.append([readable_key, value])
864
+ stats_data.append(['', ''])
865
+
866
+ # Create DataFrame and write to Excel (simple 2-column format)
867
+ if stats_data:
868
+ df = pd.DataFrame(stats_data, columns=['Parameter', 'Value'])
869
+ df.to_excel(writer, sheet_name='Statistics', index=False)
870
+ self.logger.info(f"Created Statistics sheet with {len(stats_data)} rows")
871
+
872
+ except Exception as e:
873
+ self.logger.warning(f"Failed to create statistics sheet: {e}")
874
+ # Don't fail the entire export if statistics sheet fails
875
+
876
+ def _create_per_year_statistics_sheet(self, writer, network_id: int, scenario_id: int, conn):
877
+ """Create per-year statistics sheet in tidy data format"""
878
+ try:
879
+ # Get per-year solve results
880
+ year_results = self._get_solve_results_by_year(conn, network_id, scenario_id)
881
+ if not year_results:
882
+ self.logger.info("No per-year solve results available, skipping per-year statistics sheet")
883
+ return
884
+
885
+ # Prepare tidy data: Variable, Year, Carrier, Value, Units
886
+ tidy_data = []
887
+
888
+ # Get sorted years
889
+ years = sorted(year_results.keys())
890
+
891
+ # Define the statistics we want to include with their units
892
+ stat_definitions = [
893
+ ('dispatch_by_carrier', 'Generation Dispatch', 'MWh'),
894
+ ('power_capacity_by_carrier', 'Power Capacity', 'MW'),
895
+ ('energy_capacity_by_carrier', 'Energy Capacity', 'MWh'),
896
+ ('capital_cost_by_carrier', 'Capital Cost', 'Currency'),
897
+ ('operational_cost_by_carrier', 'Operational Cost', 'Currency'),
898
+ ('total_system_cost_by_carrier', 'Total System Cost', 'Currency'),
899
+ ('emissions_by_carrier', 'Emissions', 'tons CO2')
900
+ ]
901
+
902
+ # Process each statistic type
903
+ for stat_key, stat_name, units in stat_definitions:
904
+ # Collect all carriers across all years for this statistic
905
+ all_carriers = set()
906
+ for year in years:
907
+ year_data = year_results[year]
908
+ if 'network_statistics' in year_data and 'custom_statistics' in year_data['network_statistics']:
909
+ custom_stats = year_data['network_statistics']['custom_statistics']
910
+ if stat_key in custom_stats:
911
+ all_carriers.update(custom_stats[stat_key].keys())
912
+
913
+ # Add data rows for each carrier and year combination
914
+ for carrier in sorted(all_carriers):
915
+ for year in years:
916
+ year_data = year_results[year]
917
+ value = 0.0
918
+
919
+ if 'network_statistics' in year_data and 'custom_statistics' in year_data['network_statistics']:
920
+ custom_stats = year_data['network_statistics']['custom_statistics']
921
+ if stat_key in custom_stats and carrier in custom_stats[stat_key]:
922
+ value = custom_stats[stat_key][carrier]
923
+
924
+ # Only include rows with non-zero values to keep the data clean
925
+ if value > 0:
926
+ tidy_data.append([stat_name, year, carrier, value, units])
927
+
928
+ # Add core summary statistics (these don't have carriers)
929
+ core_stat_definitions = [
930
+ ('total_generation_mwh', 'Total Generation', 'MWh'),
931
+ ('total_demand_mwh', 'Total Demand', 'MWh'),
932
+ ('total_cost', 'Total Cost', 'Currency'),
933
+ ('load_factor', 'Load Factor', 'Ratio'),
934
+ ('unserved_energy_mwh', 'Unserved Energy', 'MWh'),
935
+ ('total_emissions_tons_co2', 'Total Emissions', 'tons CO2')
936
+ ]
937
+
938
+ for stat_key, stat_name, units in core_stat_definitions:
939
+ for year in years:
940
+ year_data = year_results[year]
941
+ value = 0.0
942
+
943
+ # Check both core_summary and custom_statistics
944
+ if 'network_statistics' in year_data:
945
+ network_stats = year_data['network_statistics']
946
+
947
+ # Try core_summary first
948
+ if 'core_summary' in network_stats and stat_key in network_stats['core_summary']:
949
+ value = network_stats['core_summary'][stat_key]
950
+ # Try custom_statistics as fallback
951
+ elif 'custom_statistics' in network_stats and stat_key in network_stats['custom_statistics']:
952
+ value = network_stats['custom_statistics'][stat_key]
953
+
954
+ # Include all core statistics (even zeros for completeness)
955
+ tidy_data.append([stat_name, year, 'Total', value, units])
956
+
957
+ # Create DataFrame and write to Excel
958
+ if tidy_data:
959
+ df = pd.DataFrame(tidy_data, columns=['Variable', 'Year', 'Carrier', 'Value', 'Units'])
960
+ df.to_excel(writer, sheet_name='Per-Year Statistics', index=False)
961
+ self.logger.info(f"Created Per-Year Statistics sheet with {len(tidy_data)} rows")
962
+ else:
963
+ self.logger.info("No per-year statistics data to export")
964
+
965
+ except Exception as e:
966
+ self.logger.warning(f"Failed to create per-year statistics sheet: {e}")
967
+ # Don't fail the entire export if per-year statistics sheet fails
968
+
969
+ def _get_app_version(self) -> str:
970
+ """Get the application version."""
971
+ try:
972
+ # Try to read from package.json in the project root
973
+ import json
974
+ import os
975
+ from pathlib import Path
976
+
977
+ # Look for package.json in parent directories
978
+ current_dir = Path(__file__).parent
979
+ while current_dir != current_dir.parent:
980
+ package_json = current_dir / "package.json"
981
+ if package_json.exists():
982
+ with open(package_json, "r") as f:
983
+ package_data = json.load(f)
984
+ return package_data.get("version", "1.0.0")
985
+ current_dir = current_dir.parent
986
+
987
+ # Fallback version
988
+ return "1.0.0"
989
+ except Exception as e:
990
+ self.logger.warning(f"Could not get app version: {e}")
991
+ return "1.0.0"