pyconvexity 0.4.0__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. pyconvexity/__init__.py +87 -46
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/core/__init__.py +3 -5
  4. pyconvexity/core/database.py +111 -103
  5. pyconvexity/core/errors.py +16 -10
  6. pyconvexity/core/types.py +61 -54
  7. pyconvexity/data/__init__.py +0 -1
  8. pyconvexity/data/loaders/cache.py +65 -64
  9. pyconvexity/data/schema/01_core_schema.sql +134 -234
  10. pyconvexity/data/schema/02_data_metadata.sql +38 -168
  11. pyconvexity/data/schema/03_validation_data.sql +327 -264
  12. pyconvexity/data/sources/gem.py +169 -139
  13. pyconvexity/io/__init__.py +4 -10
  14. pyconvexity/io/excel_exporter.py +694 -480
  15. pyconvexity/io/excel_importer.py +817 -545
  16. pyconvexity/io/netcdf_exporter.py +66 -61
  17. pyconvexity/io/netcdf_importer.py +850 -619
  18. pyconvexity/models/__init__.py +109 -59
  19. pyconvexity/models/attributes.py +197 -178
  20. pyconvexity/models/carriers.py +70 -67
  21. pyconvexity/models/components.py +260 -236
  22. pyconvexity/models/network.py +202 -284
  23. pyconvexity/models/results.py +65 -55
  24. pyconvexity/models/scenarios.py +58 -88
  25. pyconvexity/solvers/__init__.py +5 -5
  26. pyconvexity/solvers/pypsa/__init__.py +3 -3
  27. pyconvexity/solvers/pypsa/api.py +150 -134
  28. pyconvexity/solvers/pypsa/batch_loader.py +165 -162
  29. pyconvexity/solvers/pypsa/builder.py +390 -291
  30. pyconvexity/solvers/pypsa/constraints.py +184 -162
  31. pyconvexity/solvers/pypsa/solver.py +968 -663
  32. pyconvexity/solvers/pypsa/storage.py +1377 -671
  33. pyconvexity/timeseries.py +63 -60
  34. pyconvexity/validation/__init__.py +14 -6
  35. pyconvexity/validation/rules.py +95 -84
  36. pyconvexity-0.4.1.dist-info/METADATA +46 -0
  37. pyconvexity-0.4.1.dist-info/RECORD +42 -0
  38. pyconvexity/data/schema/04_scenario_schema.sql +0 -122
  39. pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
  40. pyconvexity-0.4.0.dist-info/METADATA +0 -138
  41. pyconvexity-0.4.0.dist-info/RECORD +0 -44
  42. {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
@@ -15,97 +15,104 @@ import json
15
15
  from pyconvexity.core.database import open_connection
16
16
  from pyconvexity.core.errors import AttributeNotFound
17
17
  from pyconvexity.models import (
18
- list_components_by_type, list_carriers, get_network_info,
19
- get_network_time_periods, get_attribute, list_component_attributes,
20
- get_network_config
18
+ list_components_by_type,
19
+ list_carriers,
20
+ get_network_info,
21
+ get_network_time_periods,
22
+ get_attribute,
23
+ list_component_attributes,
24
+ get_network_config,
21
25
  )
22
26
  from pyconvexity.validation import list_validation_rules
23
27
  from pyconvexity.models.attributes import get_timeseries as get_timeseries_conn
24
28
 
25
29
  logger = logging.getLogger(__name__)
26
30
 
31
+
27
32
  class ExcelModelExporter:
28
33
  """Export entire network model to Excel workbook"""
29
-
34
+
30
35
  def __init__(self):
31
36
  self.logger = logging.getLogger(__name__)
32
-
37
+
33
38
  def export_model_to_excel(
34
39
  self,
35
40
  db_path: str,
36
- network_id: int,
37
41
  output_path: str,
38
42
  scenario_id: Optional[int] = None,
39
- progress_callback: Optional[callable] = None
43
+ progress_callback: Optional[callable] = None,
40
44
  ) -> Dict[str, Any]:
41
45
  """
42
46
  Export complete network model to Excel workbook
43
-
47
+
44
48
  Args:
45
49
  db_path: Database path
46
- network_id: Network ID to export
50
+
47
51
  output_path: Excel file output path
48
52
  scenario_id: Scenario ID (defaults to master scenario)
49
53
  progress_callback: Optional callback for progress updates
50
-
54
+
51
55
  Returns:
52
56
  Export statistics and metadata
53
57
  """
54
-
58
+
55
59
  try:
56
60
  if progress_callback:
57
61
  progress_callback(0, "Starting Excel export...")
58
-
62
+
59
63
  # Connect to database
60
64
  conn = open_connection(db_path)
61
-
65
+
62
66
  if progress_callback:
63
67
  progress_callback(5, "Loading network information...")
64
-
68
+
65
69
  # Get network information
66
- network_info = get_network_info(conn, network_id)
67
-
70
+ network_info = get_network_info(conn)
71
+
68
72
  # Get master scenario if no scenario specified
69
73
  if scenario_id is None:
70
- cursor = conn.execute(
71
- "SELECT id FROM scenarios WHERE network_id = ? AND is_master = TRUE",
72
- (network_id,)
73
- )
74
- scenario_result = cursor.fetchone()
75
- if scenario_result:
76
- scenario_id = scenario_result[0]
77
- else:
78
- raise ValueError("No master scenario found for network")
79
-
74
+ # Base network uses scenario_id = NULL, no need to query
75
+ # scenario_id remains None for base network
76
+ pass
77
+
80
78
  if progress_callback:
81
79
  progress_callback(10, "Loading carriers...")
82
-
80
+
83
81
  # Get carriers
84
- carriers = list_carriers(conn, network_id)
85
-
82
+ carriers = list_carriers(conn)
83
+
86
84
  if progress_callback:
87
85
  progress_callback(15, "Loading components...")
88
-
86
+
89
87
  # Get all component types
90
- component_types = ['BUS', 'GENERATOR', 'LOAD', 'LINE', 'LINK', 'STORAGE_UNIT', 'STORE', 'CONSTRAINT']
91
-
88
+ component_types = [
89
+ "BUS",
90
+ "GENERATOR",
91
+ "LOAD",
92
+ "LINE",
93
+ "LINK",
94
+ "STORAGE_UNIT",
95
+ "STORE",
96
+ "CONSTRAINT",
97
+ ]
98
+
92
99
  # Load components by type
93
100
  components_by_type = {}
94
101
  for comp_type in component_types:
95
- components = list_components_by_type(conn, network_id, comp_type)
102
+ components = list_components_by_type(conn, comp_type)
96
103
  components_by_type[comp_type] = components
97
-
104
+
98
105
  if progress_callback:
99
106
  progress_callback(25, "Processing component attributes...")
100
-
107
+
101
108
  # Process components and their attributes
102
109
  processed_components = {}
103
110
  timeseries_data = {}
104
-
111
+
105
112
  for comp_type, components in components_by_type.items():
106
113
  processed_components[comp_type] = []
107
114
  timeseries_data[comp_type] = {}
108
-
115
+
109
116
  for component in components:
110
117
  # Check for cancellation during processing
111
118
  if progress_callback:
@@ -114,35 +121,41 @@ class ExcelModelExporter:
114
121
  except KeyboardInterrupt:
115
122
  self.logger.info("Excel export cancelled by user")
116
123
  raise
117
-
124
+
118
125
  # Get component attributes (all possible attributes for this component type)
119
- attributes = self._get_component_attributes(conn, component.id, scenario_id, comp_type)
120
-
126
+ attributes = self._get_component_attributes(
127
+ conn, component.id, scenario_id, comp_type
128
+ )
129
+
121
130
  # Process component data
122
131
  processed_component = self._process_component_for_excel(
123
132
  component, attributes, carriers, components_by_type
124
133
  )
125
134
  processed_components[comp_type].append(processed_component)
126
-
135
+
127
136
  # Extract timeseries data
128
137
  for attr_name, attr_data in attributes.items():
129
- if isinstance(attr_data, dict) and 'Timeseries' in attr_data:
138
+ if isinstance(attr_data, dict) and "Timeseries" in attr_data:
130
139
  if comp_type not in timeseries_data:
131
140
  timeseries_data[comp_type] = {}
132
141
  if attr_name not in timeseries_data[comp_type]:
133
142
  timeseries_data[comp_type][attr_name] = {}
134
-
143
+
135
144
  # Handle both new efficient format and legacy format
136
- if 'values' in attr_data:
145
+ if "values" in attr_data:
137
146
  # New efficient format - store values directly
138
- timeseries_data[comp_type][attr_name][component.name] = attr_data['values']
139
- elif 'points' in attr_data:
147
+ timeseries_data[comp_type][attr_name][
148
+ component.name
149
+ ] = attr_data["values"]
150
+ elif "points" in attr_data:
140
151
  # Legacy format - store the timeseries points
141
- timeseries_data[comp_type][attr_name][component.name] = attr_data['points']
142
-
152
+ timeseries_data[comp_type][attr_name][
153
+ component.name
154
+ ] = attr_data["points"]
155
+
143
156
  if progress_callback:
144
157
  progress_callback(50, "Creating Excel workbook...")
145
-
158
+
146
159
  # Check for cancellation before starting Excel creation
147
160
  if progress_callback:
148
161
  try:
@@ -150,17 +163,19 @@ class ExcelModelExporter:
150
163
  except KeyboardInterrupt:
151
164
  self.logger.info("Excel export cancelled before workbook creation")
152
165
  raise
153
-
166
+
154
167
  # Get scenario information if scenario_id is provided
155
168
  scenario_info = None
156
169
  if scenario_id is not None:
157
170
  scenario_info = self._get_scenario_info(conn, scenario_id)
158
-
171
+
159
172
  # Create Excel workbook
160
- with pd.ExcelWriter(output_path, engine='openpyxl') as writer:
173
+ with pd.ExcelWriter(output_path, engine="openpyxl") as writer:
161
174
  # Create overview sheet
162
- self._create_overview_sheet(writer, network_info, processed_components, scenario_info)
163
-
175
+ self._create_overview_sheet(
176
+ writer, network_info, processed_components, scenario_info
177
+ )
178
+
164
179
  # Create component sheets
165
180
  for comp_type in component_types:
166
181
  if processed_components[comp_type]:
@@ -169,61 +184,67 @@ class ExcelModelExporter:
169
184
  try:
170
185
  progress_callback(None, None) # Check for cancellation
171
186
  except KeyboardInterrupt:
172
- self.logger.info(f"Excel export cancelled during {comp_type} sheet creation")
187
+ self.logger.info(
188
+ f"Excel export cancelled during {comp_type} sheet creation"
189
+ )
173
190
  raise
174
-
175
- self._create_component_sheet(writer, conn, comp_type, processed_components[comp_type])
176
-
191
+
192
+ self._create_component_sheet(
193
+ writer, conn, comp_type, processed_components[comp_type]
194
+ )
195
+
177
196
  # Create timeseries sheet if there's timeseries data
178
197
  if comp_type in timeseries_data and timeseries_data[comp_type]:
179
198
  self._create_timeseries_sheet(
180
- writer, comp_type, timeseries_data[comp_type], network_id, conn
199
+ writer, comp_type, timeseries_data[comp_type], conn
181
200
  )
182
-
201
+
183
202
  # Create carriers sheet
184
203
  self._create_carriers_sheet(writer, carriers)
185
-
204
+
186
205
  # Create network config sheet
187
- self._create_network_config_sheet(writer, network_id, conn)
188
-
206
+ self._create_network_config_sheet(writer, conn)
207
+
189
208
  # Create statistics sheet if solve results are available
190
- self._create_statistics_sheet(writer, network_id, scenario_id, conn)
191
-
209
+ self._create_statistics_sheet(writer, scenario_id, conn)
210
+
192
211
  # Create per-year statistics sheet if available
193
- self._create_per_year_statistics_sheet(writer, network_id, scenario_id, conn)
194
-
212
+ self._create_per_year_statistics_sheet(writer, scenario_id, conn)
213
+
195
214
  if progress_callback:
196
215
  progress_callback(100, "Excel export completed")
197
-
216
+
198
217
  # Calculate statistics
199
218
  stats = self._calculate_export_stats(processed_components, timeseries_data)
200
-
219
+
201
220
  return {
202
221
  "success": True,
203
222
  "message": f"Network exported to Excel: {output_path}",
204
223
  "output_path": output_path,
205
- "stats": stats
224
+ "stats": stats,
206
225
  }
207
-
226
+
208
227
  except Exception as e:
209
228
  self.logger.error(f"Excel export failed: {e}", exc_info=True)
210
229
  if progress_callback:
211
230
  progress_callback(None, f"Export failed: {str(e)}")
212
231
  raise
213
-
214
- def _get_component_attributes(self, conn, component_id: int, scenario_id: int, component_type: str) -> Dict[str, Any]:
232
+
233
+ def _get_component_attributes(
234
+ self, conn, component_id: int, scenario_id: int, component_type: str
235
+ ) -> Dict[str, Any]:
215
236
  """Get all possible attributes for a component type, with values where set"""
216
237
  attributes = {}
217
-
238
+
218
239
  # Get ALL possible attribute names for this component type from validation rules
219
240
  validation_rules = list_validation_rules(conn, component_type)
220
-
241
+
221
242
  for rule in validation_rules:
222
243
  attr_name = rule.attribute_name
223
244
  try:
224
245
  # Try to get the attribute value (may not exist)
225
246
  attr_value = get_attribute(conn, component_id, attr_name, scenario_id)
226
-
247
+
227
248
  if attr_value.variant == "Static":
228
249
  # Extract static value
229
250
  static_value = attr_value.static_value
@@ -237,294 +258,394 @@ class ExcelModelExporter:
237
258
  attributes[attr_name] = static_value.data["String"]
238
259
  else:
239
260
  attributes[attr_name] = static_value.data
240
-
261
+
241
262
  elif attr_value.variant == "Timeseries":
242
263
  # Use new efficient timeseries access
243
264
  try:
244
- timeseries = get_timeseries_conn(conn, component_id, attr_name, scenario_id)
265
+ timeseries = get_timeseries_conn(
266
+ conn, component_id, attr_name, scenario_id
267
+ )
245
268
  if timeseries and timeseries.values:
246
269
  attributes[attr_name] = {
247
- 'Timeseries': True,
248
- 'values': timeseries.values
270
+ "Timeseries": True,
271
+ "values": timeseries.values,
249
272
  }
250
273
  else:
251
274
  # Fallback to legacy method if new method fails
252
275
  attributes[attr_name] = {
253
- 'Timeseries': True,
254
- 'points': attr_value.timeseries_value
276
+ "Timeseries": True,
277
+ "points": attr_value.timeseries_value,
255
278
  }
256
279
  except Exception as ts_e:
257
- self.logger.warning(f"Failed to load timeseries {attr_name} for component {component_id}: {ts_e}")
280
+ self.logger.warning(
281
+ f"Failed to load timeseries {attr_name} for component {component_id}: {ts_e}"
282
+ )
258
283
  # Fallback to legacy method
259
284
  attributes[attr_name] = {
260
- 'Timeseries': True,
261
- 'points': attr_value.timeseries_value
285
+ "Timeseries": True,
286
+ "points": attr_value.timeseries_value,
262
287
  }
263
-
288
+
264
289
  except AttributeNotFound:
265
290
  # Attribute not set - always use empty string for blank Excel cell
266
291
  attributes[attr_name] = ""
267
-
292
+
268
293
  except Exception as e:
269
- self.logger.warning(f"Failed to load attribute {attr_name} for component {component_id}: {e}")
294
+ self.logger.warning(
295
+ f"Failed to load attribute {attr_name} for component {component_id}: {e}"
296
+ )
270
297
  # Still include the attribute with empty value
271
298
  attributes[attr_name] = ""
272
299
  continue
273
-
300
+
274
301
  return attributes
275
-
276
- def _process_component_for_excel(self, component, attributes: Dict, carriers: List, components_by_type: Dict) -> Dict[str, Any]:
302
+
303
+ def _process_component_for_excel(
304
+ self, component, attributes: Dict, carriers: List, components_by_type: Dict
305
+ ) -> Dict[str, Any]:
277
306
  """Process a component for Excel export"""
278
307
  processed = {
279
- 'name': component.name,
280
- 'type': component.component_type.lower(),
308
+ "name": component.name,
309
+ "type": component.component_type.lower(),
281
310
  }
282
-
311
+
283
312
  # Add carrier name
284
313
  if component.carrier_id:
285
- carrier = next((c for c in carriers if c['id'] == component.carrier_id), None)
286
- carrier_name = carrier['name'] if carrier else 'CARRIER_NOT_FOUND'
287
- processed['carrier'] = carrier_name
288
- self.logger.info(f"Component '{component.name}' has carrier_id={component.carrier_id}, resolved to carrier: {carrier_name}")
314
+ # Carriers are objects with attributes, not dictionaries
315
+ carrier = next((c for c in carriers if c.id == component.carrier_id), None)
316
+ carrier_name = carrier.name if carrier else "CARRIER_NOT_FOUND"
317
+ processed["carrier"] = carrier_name
318
+ self.logger.info(
319
+ f"Component '{component.name}' has carrier_id={component.carrier_id}, resolved to carrier: {carrier_name}"
320
+ )
289
321
  else:
290
- processed['carrier'] = '' # Use empty string for no carrier
291
- self.logger.info(f"Component '{component.name}' has no carrier_id (carrier_id={component.carrier_id})")
292
-
322
+ processed["carrier"] = "" # Use empty string for no carrier
323
+ self.logger.info(
324
+ f"Component '{component.name}' has no carrier_id (carrier_id={component.carrier_id})"
325
+ )
326
+
293
327
  # Add bus connections
294
328
  if component.bus_id:
295
- bus = next((b for b in components_by_type.get('BUS', []) if b.id == component.bus_id), None)
296
- processed['bus'] = bus.name if bus else ''
329
+ bus = next(
330
+ (
331
+ b
332
+ for b in components_by_type.get("BUS", [])
333
+ if b.id == component.bus_id
334
+ ),
335
+ None,
336
+ )
337
+ processed["bus"] = bus.name if bus else ""
297
338
  else:
298
- processed['bus'] = ''
299
-
339
+ processed["bus"] = ""
340
+
300
341
  if component.bus0_id:
301
- bus0 = next((b for b in components_by_type.get('BUS', []) if b.id == component.bus0_id), None)
302
- processed['bus0'] = bus0.name if bus0 else ''
342
+ bus0 = next(
343
+ (
344
+ b
345
+ for b in components_by_type.get("BUS", [])
346
+ if b.id == component.bus0_id
347
+ ),
348
+ None,
349
+ )
350
+ processed["bus0"] = bus0.name if bus0 else ""
303
351
  else:
304
- processed['bus0'] = ''
305
-
352
+ processed["bus0"] = ""
353
+
306
354
  if component.bus1_id:
307
- bus1 = next((b for b in components_by_type.get('BUS', []) if b.id == component.bus1_id), None)
308
- processed['bus1'] = bus1.name if bus1 else ''
355
+ bus1 = next(
356
+ (
357
+ b
358
+ for b in components_by_type.get("BUS", [])
359
+ if b.id == component.bus1_id
360
+ ),
361
+ None,
362
+ )
363
+ processed["bus1"] = bus1.name if bus1 else ""
309
364
  else:
310
- processed['bus1'] = ''
311
-
365
+ processed["bus1"] = ""
366
+
312
367
  # Add coordinates
313
- processed['latitude'] = component.latitude if component.latitude is not None else ''
314
- processed['longitude'] = component.longitude if component.longitude is not None else ''
315
-
368
+ processed["latitude"] = (
369
+ component.latitude if component.latitude is not None else ""
370
+ )
371
+ processed["longitude"] = (
372
+ component.longitude if component.longitude is not None else ""
373
+ )
374
+
316
375
  # Add attributes
317
376
  for attr_name, attr_value in attributes.items():
318
- if isinstance(attr_value, dict) and 'Timeseries' in attr_value:
319
- processed[attr_name] = '[timeseries]'
377
+ if isinstance(attr_value, dict) and "Timeseries" in attr_value:
378
+ processed[attr_name] = "[timeseries]"
320
379
  else:
321
380
  # Special handling for carrier attribute - don't overwrite relationship carrier
322
- if attr_name == 'carrier':
381
+ if attr_name == "carrier":
323
382
  if component.carrier_id is not None:
324
- self.logger.info(f"DEBUG: Skipping carrier attribute '{attr_value}' for '{component.name}' - using relationship carrier '{processed['carrier']}'")
383
+ self.logger.info(
384
+ f"DEBUG: Skipping carrier attribute '{attr_value}' for '{component.name}' - using relationship carrier '{processed['carrier']}'"
385
+ )
325
386
  continue # Skip the carrier attribute, keep the relationship carrier
326
387
  else:
327
- self.logger.info(f"DEBUG: Using carrier attribute '{attr_value}' for '{component.name}' (no relationship carrier)")
328
-
388
+ self.logger.info(
389
+ f"DEBUG: Using carrier attribute '{attr_value}' for '{component.name}' (no relationship carrier)"
390
+ )
391
+
329
392
  processed[attr_name] = attr_value
330
-
331
- self.logger.info(f"DEBUG: Final processed data for '{component.name}': carrier='{processed.get('carrier', 'NOT_SET')}'")
393
+
394
+ self.logger.info(
395
+ f"DEBUG: Final processed data for '{component.name}': carrier='{processed.get('carrier', 'NOT_SET')}'"
396
+ )
332
397
  return processed
333
-
334
- def _filter_component_columns(self, conn, component_data: Dict[str, Any], component_type: str) -> Dict[str, Any]:
398
+
399
+ def _filter_component_columns(
400
+ self, conn, component_data: Dict[str, Any], component_type: str
401
+ ) -> Dict[str, Any]:
335
402
  """Filter out unused columns based on component type, following DatabaseTable logic"""
336
-
403
+
337
404
  filtered_data = {}
338
-
405
+
339
406
  # Always include basic fields (name, carrier, latitude, longitude)
340
407
  # Note: bus connections are NOT basic fields - they are component-type specific
341
408
  # Note: "type" is NOT included - it's implicit based on the sheet/component type
342
409
  # Note: CONSTRAINT components don't have carrier, latitude, or longitude - they are code-based rules
343
- if component_type.upper() == 'CONSTRAINT':
344
- basic_fields = ['name'] # Constraints only have name - no physical location or carrier
410
+ if component_type.upper() == "CONSTRAINT":
411
+ basic_fields = [
412
+ "name"
413
+ ] # Constraints only have name - no physical location or carrier
345
414
  else:
346
- basic_fields = ['name', 'carrier', 'latitude', 'longitude']
347
-
415
+ basic_fields = ["name", "carrier", "latitude", "longitude"]
416
+
348
417
  for field in basic_fields:
349
418
  if field in component_data:
350
419
  filtered_data[field] = component_data[field]
351
- self.logger.info(f"Added basic field '{field}' = '{component_data[field]}' for component type {component_type}")
352
- if field == 'carrier':
353
- self.logger.info(f"DEBUG: Setting carrier field to '{component_data[field]}' from component_data")
354
-
420
+ self.logger.info(
421
+ f"Added basic field '{field}' = '{component_data[field]}' for component type {component_type}"
422
+ )
423
+ if field == "carrier":
424
+ self.logger.info(
425
+ f"DEBUG: Setting carrier field to '{component_data[field]}' from component_data"
426
+ )
427
+
355
428
  # Add bus connection columns based on component type - EXACT DatabaseTable logic
356
429
  component_type_lower = component_type.lower()
357
- needs_bus_connection = component_type_lower in ['generator', 'load', 'storage_unit', 'store', 'unmet_load']
358
- needs_two_bus_connections = component_type_lower in ['line', 'link']
359
-
430
+ needs_bus_connection = component_type_lower in [
431
+ "generator",
432
+ "load",
433
+ "storage_unit",
434
+ "store",
435
+ "unmet_load",
436
+ ]
437
+ needs_two_bus_connections = component_type_lower in ["line", "link"]
360
438
 
361
439
  if needs_bus_connection:
362
- if 'bus' in component_data:
363
- filtered_data['bus'] = component_data['bus']
440
+ if "bus" in component_data:
441
+ filtered_data["bus"] = component_data["bus"]
364
442
  elif needs_two_bus_connections:
365
- if 'bus0' in component_data:
366
- filtered_data['bus0'] = component_data['bus0']
367
- if 'bus1' in component_data:
368
- filtered_data['bus1'] = component_data['bus1']
443
+ if "bus0" in component_data:
444
+ filtered_data["bus0"] = component_data["bus0"]
445
+ if "bus1" in component_data:
446
+ filtered_data["bus1"] = component_data["bus1"]
369
447
  else:
370
448
  # Buses and other components don't get bus connection columns
371
449
  self.logger.info(f"No bus connection columns for {component_type_lower}")
372
-
450
+
373
451
  # Get validation rules to determine which attributes are input vs output
374
452
  try:
375
-
453
+
376
454
  # Add all other attributes that aren't filtered out
377
455
  for key, value in component_data.items():
378
456
  if key in filtered_data:
379
457
  continue # Already handled
380
-
458
+
381
459
  # Filter out unused attributes following DatabaseTable logic
382
460
  should_exclude = False
383
461
  exclude_reason = ""
384
-
462
+
385
463
  # Note: Carrier attribute exclusion is now handled in _process_component_for_excel
386
464
  # to prevent overwriting relationship carriers
387
-
465
+
388
466
  # Remove location and carrier attributes for CONSTRAINT components (they don't have physical location or carriers)
389
- if component_type.upper() == 'CONSTRAINT' and key in ['carrier', 'latitude', 'longitude']:
467
+ if component_type.upper() == "CONSTRAINT" and key in [
468
+ "carrier",
469
+ "latitude",
470
+ "longitude",
471
+ ]:
390
472
  should_exclude = True
391
- exclude_reason = f"constraint exclusion - constraints don't have {key}"
392
-
473
+ exclude_reason = (
474
+ f"constraint exclusion - constraints don't have {key}"
475
+ )
476
+
393
477
  # Remove 'type' and 'unit' attributes for buses (not used in this application)
394
- elif component_type.upper() == 'BUS' and key in ['type', 'unit']:
478
+ elif component_type.upper() == "BUS" and key in ["type", "unit"]:
395
479
  should_exclude = True
396
480
  exclude_reason = f"bus-specific exclusion ({key})"
397
-
481
+
398
482
  # Remove 'x' and 'y' coordinates for buses only - we use latitude/longitude instead
399
- elif component_type.upper() == 'BUS' and key in ['x', 'y']:
483
+ elif component_type.upper() == "BUS" and key in ["x", "y"]:
400
484
  should_exclude = True
401
485
  exclude_reason = f"bus coordinate exclusion ({key})"
402
-
486
+
403
487
  # Remove sub-network and slack generator attributes for buses
404
- elif component_type.upper() == 'BUS' and key in ['sub_network', 'slack_generator']:
488
+ elif component_type.upper() == "BUS" and key in [
489
+ "sub_network",
490
+ "slack_generator",
491
+ ]:
405
492
  should_exclude = True
406
493
  exclude_reason = f"bus network exclusion ({key})"
407
-
494
+
408
495
  # CRITICAL: Remove bus connection columns for components that shouldn't have them
409
- elif key in ['bus', 'bus0', 'bus1']:
410
- if key == 'bus' and not needs_bus_connection:
496
+ elif key in ["bus", "bus0", "bus1"]:
497
+ if key == "bus" and not needs_bus_connection:
411
498
  should_exclude = True
412
- exclude_reason = f"bus connection not needed for {component_type_lower}"
413
- elif key in ['bus0', 'bus1'] and not needs_two_bus_connections:
499
+ exclude_reason = (
500
+ f"bus connection not needed for {component_type_lower}"
501
+ )
502
+ elif key in ["bus0", "bus1"] and not needs_two_bus_connections:
414
503
  should_exclude = True
415
- exclude_reason = f"two-bus connection not needed for {component_type_lower}"
416
-
417
-
504
+ exclude_reason = (
505
+ f"two-bus connection not needed for {component_type_lower}"
506
+ )
507
+
418
508
  if should_exclude:
419
509
  self.logger.info(f"Excluded {key}: {exclude_reason}")
420
510
  else:
421
511
  # Special handling for carrier attribute - don't overwrite relationship field
422
- if key == 'carrier' and 'carrier' in filtered_data:
423
- self.logger.info(f"Skipping carrier attribute '{value}' - keeping relationship carrier '{filtered_data['carrier']}'")
512
+ if key == "carrier" and "carrier" in filtered_data:
513
+ self.logger.info(
514
+ f"Skipping carrier attribute '{value}' - keeping relationship carrier '{filtered_data['carrier']}'"
515
+ )
424
516
  else:
425
517
  filtered_data[key] = value
426
518
  self.logger.info(f"Added attribute: {key} = {value}")
427
-
519
+
428
520
  except Exception as e:
429
521
  self.logger.warning(f"Could not load validation rules for filtering: {e}")
430
522
  # Fallback: include all attributes except the basic exclusions
431
523
  for key, value in component_data.items():
432
524
  if key in filtered_data:
433
525
  continue
434
- if key == 'carrier': # Skip carrier attribute
526
+ if key == "carrier": # Skip carrier attribute
435
527
  continue
436
528
  filtered_data[key] = value
437
-
438
-
529
+
439
530
  return filtered_data
440
-
441
- def _create_overview_sheet(self, writer, network_info: Dict, processed_components: Dict, scenario_info: Dict = None):
531
+
532
+ def _create_overview_sheet(
533
+ self,
534
+ writer,
535
+ network_info: Dict,
536
+ processed_components: Dict,
537
+ scenario_info: Dict = None,
538
+ ):
442
539
  """Create overview sheet with network metadata"""
443
540
  # Create key-value pairs as separate lists for two columns
444
541
  keys = []
445
542
  values = []
446
-
543
+
447
544
  # Network information
448
- keys.extend(['Name', 'Description', 'Time Start', 'Time End', 'Time Interval'])
449
- values.extend([
450
- network_info['name'],
451
- network_info.get('description', ''),
452
- network_info['time_start'],
453
- network_info['time_end'],
454
- network_info['time_interval']
455
- ])
456
-
545
+ keys.extend(["Name", "Description", "Time Start", "Time End", "Time Interval"])
546
+ values.extend(
547
+ [
548
+ network_info["name"],
549
+ network_info.get("description", ""),
550
+ network_info["time_start"],
551
+ network_info["time_end"],
552
+ network_info["time_interval"],
553
+ ]
554
+ )
555
+
457
556
  # Scenario information
458
557
  if scenario_info:
459
- keys.append('')
460
- values.append('')
461
- keys.extend(['Scenario Information', 'Scenario Name', 'Scenario Description', 'Is Master Scenario', 'Scenario Created'])
462
- values.extend([
463
- '',
464
- scenario_info.get('name', 'Unknown'),
465
- scenario_info.get('description', '') or 'No description',
466
- 'Yes' if scenario_info.get('is_master', False) else 'No',
467
- scenario_info.get('created_at', '')
468
- ])
469
-
558
+ keys.append("")
559
+ values.append("")
560
+ keys.extend(
561
+ [
562
+ "Scenario Information",
563
+ "Scenario Name",
564
+ "Scenario Description",
565
+ "Is Master Scenario",
566
+ "Scenario Created",
567
+ ]
568
+ )
569
+ values.extend(
570
+ [
571
+ "",
572
+ scenario_info.get("name", "Unknown"),
573
+ scenario_info.get("description", "") or "No description",
574
+ "Yes" if scenario_info.get("is_master", False) else "No",
575
+ scenario_info.get("created_at", ""),
576
+ ]
577
+ )
578
+
470
579
  # Empty row
471
- keys.append('')
472
- values.append('')
473
-
580
+ keys.append("")
581
+ values.append("")
582
+
474
583
  # Export information
475
- keys.extend(['Export Information', 'Export Date', 'Export Version'])
476
- values.extend(['', datetime.now().strftime('%Y-%m-%d %H:%M:%S'), self._get_app_version()])
477
-
584
+ keys.extend(["Export Information", "Export Date", "Export Version"])
585
+ values.extend(
586
+ ["", datetime.now().strftime("%Y-%m-%d %H:%M:%S"), self._get_app_version()]
587
+ )
588
+
478
589
  # Create two-column DataFrame
479
- df = pd.DataFrame({
480
- 'Property': keys,
481
- 'Value': values
482
- })
483
- df.to_excel(writer, sheet_name='Overview', index=False)
484
-
590
+ df = pd.DataFrame({"Property": keys, "Value": values})
591
+ df.to_excel(writer, sheet_name="Overview", index=False)
592
+
485
593
  def _get_scenario_info(self, conn, scenario_id: int) -> Dict[str, Any]:
486
594
  """Get scenario information from database"""
487
595
  try:
488
- cursor = conn.execute("""
489
- SELECT id, network_id, name, description, is_master, created_at
596
+ cursor = conn.execute(
597
+ """
598
+ SELECT id, name, description, created_at
490
599
  FROM scenarios
491
600
  WHERE id = ?
492
- """, (scenario_id,))
493
-
601
+ """,
602
+ (scenario_id,),
603
+ )
604
+
494
605
  row = cursor.fetchone()
495
606
  if not row:
496
607
  self.logger.warning(f"No scenario found with ID {scenario_id}")
497
608
  return {}
498
-
609
+
499
610
  return {
500
- 'id': row[0],
501
- 'network_id': row[1],
502
- 'name': row[2],
503
- 'description': row[3],
504
- 'is_master': bool(row[4]),
505
- 'created_at': row[5]
611
+ "id": row[0],
612
+ "name": row[1],
613
+ "description": row[2],
614
+ "created_at": row[3],
506
615
  }
507
-
616
+
508
617
  except Exception as e:
509
618
  self.logger.warning(f"Failed to retrieve scenario info: {e}")
510
619
  return {}
511
-
512
- def _create_component_sheet(self, writer, conn, component_type: str, components: List[Dict]):
620
+
621
+ def _create_component_sheet(
622
+ self, writer, conn, component_type: str, components: List[Dict]
623
+ ):
513
624
  """Create a sheet for a specific component type"""
514
625
  if not components:
515
626
  return
516
-
627
+
517
628
  # Apply column filtering to each component
518
629
  filtered_components = []
519
630
  for component in components:
520
- filtered_component = self._filter_component_columns(conn, component, component_type)
631
+ filtered_component = self._filter_component_columns(
632
+ conn, component, component_type
633
+ )
521
634
  filtered_components.append(filtered_component)
522
-
635
+
523
636
  # Convert to DataFrame
524
637
  df = pd.DataFrame(filtered_components)
525
-
638
+
526
639
  # Reorder columns to put core fields first
527
- core_columns = ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude']
640
+ core_columns = [
641
+ "name",
642
+ "carrier",
643
+ "bus",
644
+ "bus0",
645
+ "bus1",
646
+ "latitude",
647
+ "longitude",
648
+ ]
528
649
  other_columns = []
529
650
  for col in df.columns:
530
651
  if col not in core_columns:
@@ -534,459 +655,552 @@ class ExcelModelExporter:
534
655
  if col in df.columns:
535
656
  ordered_columns.append(col)
536
657
  ordered_columns.extend(other_columns)
537
-
658
+
538
659
  df = df[ordered_columns]
539
-
660
+
540
661
  # Write to Excel with proper pluralization
541
662
  sheet_name_mapping = {
542
- 'BUS': 'Buses',
543
- 'GENERATOR': 'Generators',
544
- 'LOAD': 'Loads',
545
- 'LINE': 'Lines',
546
- 'LINK': 'Links',
547
- 'STORAGE_UNIT': 'Storage Units',
548
- 'STORE': 'Stores',
549
- 'CONSTRAINT': 'Constraints'
663
+ "BUS": "Buses",
664
+ "GENERATOR": "Generators",
665
+ "LOAD": "Loads",
666
+ "LINE": "Lines",
667
+ "LINK": "Links",
668
+ "STORAGE_UNIT": "Storage Units",
669
+ "STORE": "Stores",
670
+ "CONSTRAINT": "Constraints",
550
671
  }
551
- sheet_name = sheet_name_mapping.get(component_type, f"{component_type.title()}s")
672
+ sheet_name = sheet_name_mapping.get(
673
+ component_type, f"{component_type.title()}s"
674
+ )
552
675
  df.to_excel(writer, sheet_name=sheet_name, index=False)
553
-
554
- def _create_timeseries_sheet(self, writer, component_type: str, timeseries_data: Dict, network_id: int, conn):
676
+
677
+ def _create_timeseries_sheet(
678
+ self, writer, component_type: str, timeseries_data: Dict, conn
679
+ ):
555
680
  """Create a timeseries sheet for a component type"""
556
681
  # Get network time periods
557
- time_periods = get_network_time_periods(conn, network_id)
682
+ time_periods = get_network_time_periods(conn)
558
683
  if not time_periods:
559
- self.logger.warning(f"No time periods found for network {network_id}, skipping timeseries sheet for {component_type}")
684
+ self.logger.warning(
685
+ f"No time periods found, skipping timeseries sheet for {component_type}"
686
+ )
560
687
  return
561
-
562
- self.logger.info(f"Creating timeseries sheet for {component_type} with {len(time_periods)} time periods")
563
- self.logger.info(f"First few time periods: {[(p.formatted_time, p.timestamp, p.period_index) for p in time_periods[:3]]}")
564
-
688
+
689
+ self.logger.info(
690
+ f"Creating timeseries sheet for {component_type} with {len(time_periods)} time periods"
691
+ )
692
+ self.logger.info(
693
+ f"First few time periods: {[(p.formatted_time, p.timestamp, p.period_index) for p in time_periods[:3]]}"
694
+ )
695
+
565
696
  # Create DataFrame with human-readable timestamps
566
- timestamps = [period.formatted_time for period in time_periods] # Use formatted_time instead of timestamp
567
- df_data = {'timestamp': timestamps}
568
-
697
+ timestamps = [
698
+ period.formatted_time for period in time_periods
699
+ ] # Use formatted_time instead of timestamp
700
+ df_data = {"timestamp": timestamps}
701
+
569
702
  # Add component columns for each attribute
570
703
  for attr_name, component_data in timeseries_data.items():
571
704
  for component_name, timeseries_data_item in component_data.items():
572
705
  if isinstance(timeseries_data_item, list):
573
706
  # Handle efficient format (list of values)
574
707
  values = timeseries_data_item
575
-
708
+
576
709
  # Pad or truncate to match time periods
577
710
  while len(values) < len(timestamps):
578
711
  values.append(0)
579
- values = values[:len(timestamps)]
712
+ values = values[: len(timestamps)]
580
713
  df_data[f"{component_name}_{attr_name}"] = values
581
-
714
+
582
715
  df = pd.DataFrame(df_data)
583
716
  sheet_name = f"{component_type.title()} Timeseries"
584
717
  df.to_excel(writer, sheet_name=sheet_name, index=False)
585
- self.logger.info(f"Created timeseries sheet '{sheet_name}' with {len(df)} rows and {len(df.columns)} columns")
586
-
718
+ self.logger.info(
719
+ f"Created timeseries sheet '{sheet_name}' with {len(df)} rows and {len(df.columns)} columns"
720
+ )
721
+
587
722
  def _create_carriers_sheet(self, writer, carriers: List[Dict]):
588
723
  """Create carriers sheet"""
589
724
  if not carriers:
590
725
  return
591
-
726
+
592
727
  df = pd.DataFrame(carriers)
593
- df.to_excel(writer, sheet_name='Carriers', index=False)
594
-
595
- def _create_network_config_sheet(self, writer, network_id: int, conn):
728
+ df.to_excel(writer, sheet_name="Carriers", index=False)
729
+
730
+ def _create_network_config_sheet(self, writer, conn):
596
731
  """Create network configuration sheet"""
597
732
  try:
598
- config = get_network_config(conn, network_id, None) # Master scenario
733
+ config = get_network_config(conn, None) # Master scenario
599
734
  if config:
600
735
  config_data = []
601
736
  for param_name, param_value in config.items():
602
- config_data.append({
603
- 'Parameter': param_name,
604
- 'Value': str(param_value),
605
- 'Type': type(param_value).__name__,
606
- 'Description': ''
607
- })
608
-
737
+ config_data.append(
738
+ {
739
+ "Parameter": param_name,
740
+ "Value": str(param_value),
741
+ "Type": type(param_value).__name__,
742
+ "Description": "",
743
+ }
744
+ )
745
+
609
746
  if config_data:
610
747
  df = pd.DataFrame(config_data)
611
- df.to_excel(writer, sheet_name='Network Config', index=False)
748
+ df.to_excel(writer, sheet_name="Network Config", index=False)
612
749
  except Exception as e:
613
750
  self.logger.warning(f"Could not create network config sheet: {e}")
614
-
615
- def _calculate_export_stats(self, processed_components: Dict, timeseries_data: Dict) -> Dict[str, Any]:
751
+
752
+ def _calculate_export_stats(
753
+ self, processed_components: Dict, timeseries_data: Dict
754
+ ) -> Dict[str, Any]:
616
755
  """Calculate export statistics"""
617
- total_components = sum(len(components) for components in processed_components.values())
756
+ total_components = sum(
757
+ len(components) for components in processed_components.values()
758
+ )
618
759
  total_timeseries = sum(
619
- len(attr_data)
620
- for comp_data in timeseries_data.values()
760
+ len(attr_data)
761
+ for comp_data in timeseries_data.values()
621
762
  for attr_data in comp_data.values()
622
763
  )
623
-
764
+
624
765
  return {
625
- 'total_components': total_components,
626
- 'total_timeseries': total_timeseries,
627
- 'component_types': len(processed_components),
628
- 'components_by_type': {
629
- comp_type: len(components)
766
+ "total_components": total_components,
767
+ "total_timeseries": total_timeseries,
768
+ "component_types": len(processed_components),
769
+ "components_by_type": {
770
+ comp_type: len(components)
630
771
  for comp_type, components in processed_components.items()
631
- }
632
- }
772
+ },
773
+ }
633
774
 
634
- def _get_solve_results(self, conn, network_id: int, scenario_id: int) -> Optional[Dict[str, Any]]:
775
+ def _get_solve_results(
776
+ self, conn, scenario_id: Optional[int]
777
+ ) -> Optional[Dict[str, Any]]:
635
778
  """Get solve results from the database"""
636
779
  try:
637
- cursor = conn.execute("""
780
+ cursor = conn.execute(
781
+ """
638
782
  SELECT results_json, metadata_json, solver_name, solve_status,
639
783
  objective_value, solve_time_seconds, solved_at
640
784
  FROM network_solve_results
641
- WHERE network_id = ? AND scenario_id = ?
642
- """, (network_id, scenario_id))
643
-
785
+ WHERE scenario_id = ? OR (scenario_id IS NULL AND ? IS NULL)
786
+ """,
787
+ (scenario_id, scenario_id),
788
+ )
789
+
644
790
  row = cursor.fetchone()
645
791
  if not row:
646
- self.logger.info(f"No solve results found for network {network_id}, scenario {scenario_id}")
792
+ self.logger.info(f"No solve results found for scenario {scenario_id}")
647
793
  return None
648
-
649
- results_json_str, metadata_json_str, solver_name, solve_status, objective_value, solve_time, solved_at = row
650
-
794
+
795
+ (
796
+ results_json_str,
797
+ metadata_json_str,
798
+ solver_name,
799
+ solve_status,
800
+ objective_value,
801
+ solve_time,
802
+ solved_at,
803
+ ) = row
804
+
651
805
  # Parse the JSON results
652
806
  if results_json_str:
653
807
  results = json.loads(results_json_str)
654
808
  # Add metadata from the table columns
655
- results['solver_name'] = solver_name
656
- results['solve_status'] = solve_status
657
- results['objective_value'] = objective_value
658
- results['solve_time_seconds'] = solve_time
659
- results['solved_at'] = solved_at
660
-
809
+ results["solver_name"] = solver_name
810
+ results["solve_status"] = solve_status
811
+ results["objective_value"] = objective_value
812
+ results["solve_time_seconds"] = solve_time
813
+ results["solved_at"] = solved_at
814
+
661
815
  if metadata_json_str:
662
816
  metadata = json.loads(metadata_json_str)
663
- results['metadata'] = metadata
664
-
817
+ results["metadata"] = metadata
818
+
665
819
  return results
666
-
820
+
667
821
  return None
668
-
822
+
669
823
  except Exception as e:
670
824
  self.logger.warning(f"Failed to retrieve solve results: {e}")
671
825
  return None
672
-
673
- def _get_solve_results_by_year(self, conn, network_id: int, scenario_id: int) -> Optional[Dict[int, Dict[str, Any]]]:
826
+
827
+ def _get_solve_results_by_year(
828
+ self, conn, scenario_id: Optional[int]
829
+ ) -> Optional[Dict[int, Dict[str, Any]]]:
674
830
  """Get per-year solve results from the database"""
675
831
  try:
676
- cursor = conn.execute("""
832
+ cursor = conn.execute(
833
+ """
677
834
  SELECT year, results_json, metadata_json
678
835
  FROM network_solve_results_by_year
679
- WHERE network_id = ? AND scenario_id = ?
836
+ WHERE scenario_id = ? OR (scenario_id IS NULL AND ? IS NULL)
680
837
  ORDER BY year
681
- """, (network_id, scenario_id))
682
-
838
+ """,
839
+ (scenario_id, scenario_id),
840
+ )
841
+
683
842
  rows = cursor.fetchall()
684
843
  if not rows:
685
- self.logger.info(f"No per-year solve results found for network {network_id}, scenario {scenario_id}")
844
+ self.logger.info(
845
+ f"No per-year solve results found for scenario {scenario_id}"
846
+ )
686
847
  return None
687
-
848
+
688
849
  year_results = {}
689
850
  for row in rows:
690
851
  year, results_json_str, metadata_json_str = row
691
-
852
+
692
853
  if results_json_str:
693
854
  year_data = json.loads(results_json_str)
694
-
855
+
695
856
  # Add metadata if available
696
857
  if metadata_json_str:
697
858
  metadata = json.loads(metadata_json_str)
698
- year_data['metadata'] = metadata
699
-
859
+ year_data["metadata"] = metadata
860
+
700
861
  year_results[year] = year_data
701
-
862
+
702
863
  return year_results if year_results else None
703
-
864
+
704
865
  except Exception as e:
705
866
  self.logger.warning(f"Failed to retrieve per-year solve results: {e}")
706
867
  return None
707
-
708
- def _create_statistics_sheet(self, writer, network_id: int, scenario_id: int, conn):
868
+
869
+ def _create_statistics_sheet(self, writer, scenario_id: int, conn):
709
870
  """Create statistics sheet with full-run solve results (no per-year data)"""
710
871
  try:
711
872
  # Get solve results
712
- solve_results = self._get_solve_results(conn, network_id, scenario_id)
873
+ solve_results = self._get_solve_results(conn, scenario_id)
713
874
  if not solve_results:
714
- self.logger.info("No solve results available, skipping statistics sheet")
875
+ self.logger.info(
876
+ "No solve results available, skipping statistics sheet"
877
+ )
715
878
  return
716
-
879
+
717
880
  # Prepare data for the statistics sheet
718
881
  stats_data = []
719
-
882
+
720
883
  # Section 1: Solve Summary
721
- stats_data.extend([
722
- ['SOLVE SUMMARY', ''],
723
- ['Solver Name', solve_results.get('solver_name', 'Unknown')],
724
- ['Solve Status', solve_results.get('solve_status', 'Unknown')],
725
- ['Solve Time (seconds)', solve_results.get('solve_time_seconds', 0)],
726
- ['Objective Value', solve_results.get('objective_value', 0)],
727
- ['Solved At', solve_results.get('solved_at', '')],
728
- ['', ''] # Empty row separator
729
- ])
730
-
884
+ stats_data.extend(
885
+ [
886
+ ["SOLVE SUMMARY", ""],
887
+ ["Solver Name", solve_results.get("solver_name", "Unknown")],
888
+ ["Solve Status", solve_results.get("solve_status", "Unknown")],
889
+ [
890
+ "Solve Time (seconds)",
891
+ solve_results.get("solve_time_seconds", 0),
892
+ ],
893
+ ["Objective Value", solve_results.get("objective_value", 0)],
894
+ ["Solved At", solve_results.get("solved_at", "")],
895
+ ["", ""], # Empty row separator
896
+ ]
897
+ )
898
+
731
899
  # Extract network statistics if available
732
- network_stats = solve_results.get('network_statistics', {})
733
-
900
+ network_stats = solve_results.get("network_statistics", {})
901
+
734
902
  # Section 2: Core Network Statistics
735
- core_summary = network_stats.get('core_summary', {})
903
+ core_summary = network_stats.get("core_summary", {})
736
904
  if core_summary:
737
- stats_data.extend([
738
- ['CORE NETWORK STATISTICS', ''],
739
- ['Total Generation (MWh)', core_summary.get('total_generation_mwh', 0)],
740
- ['Total Demand (MWh)', core_summary.get('total_demand_mwh', 0)],
741
- ['Total Cost', core_summary.get('total_cost', 0)],
742
- ['Load Factor', core_summary.get('load_factor', 0)],
743
- ['Unserved Energy (MWh)', core_summary.get('unserved_energy_mwh', 0)],
744
- ['', '']
745
- ])
746
-
905
+ stats_data.extend(
906
+ [
907
+ ["CORE NETWORK STATISTICS", ""],
908
+ [
909
+ "Total Generation (MWh)",
910
+ core_summary.get("total_generation_mwh", 0),
911
+ ],
912
+ ["Total Demand (MWh)", core_summary.get("total_demand_mwh", 0)],
913
+ ["Total Cost", core_summary.get("total_cost", 0)],
914
+ ["Load Factor", core_summary.get("load_factor", 0)],
915
+ [
916
+ "Unserved Energy (MWh)",
917
+ core_summary.get("unserved_energy_mwh", 0),
918
+ ],
919
+ ["", ""],
920
+ ]
921
+ )
922
+
747
923
  # Section 3: Custom Statistics
748
- custom_stats = network_stats.get('custom_statistics', {})
924
+ custom_stats = network_stats.get("custom_statistics", {})
749
925
  if custom_stats:
750
926
  # Emissions by Carrier
751
- emissions = custom_stats.get('emissions_by_carrier', {})
927
+ emissions = custom_stats.get("emissions_by_carrier", {})
752
928
  if emissions:
753
- stats_data.extend([
754
- ['EMISSIONS BY CARRIER (tons CO2)', '']
755
- ])
929
+ stats_data.extend([["EMISSIONS BY CARRIER (tons CO2)", ""]])
756
930
  for carrier, value in emissions.items():
757
931
  if value > 0: # Only show carriers with emissions
758
932
  stats_data.append([carrier, value])
759
- stats_data.extend([
760
- ['Total Emissions (tons CO2)', custom_stats.get('total_emissions_tons_co2', 0)],
761
- ['', '']
762
- ])
763
-
933
+ stats_data.extend(
934
+ [
935
+ [
936
+ "Total Emissions (tons CO2)",
937
+ custom_stats.get("total_emissions_tons_co2", 0),
938
+ ],
939
+ ["", ""],
940
+ ]
941
+ )
942
+
764
943
  # Generation Dispatch by Carrier
765
- dispatch = custom_stats.get('dispatch_by_carrier', {})
944
+ dispatch = custom_stats.get("dispatch_by_carrier", {})
766
945
  if dispatch:
767
- stats_data.extend([
768
- ['GENERATION DISPATCH BY CARRIER (MWh)', '']
769
- ])
946
+ stats_data.extend([["GENERATION DISPATCH BY CARRIER (MWh)", ""]])
770
947
  for carrier, value in dispatch.items():
771
948
  if value > 0: # Only show carriers with generation
772
949
  stats_data.append([carrier, value])
773
- stats_data.append(['', ''])
774
-
950
+ stats_data.append(["", ""])
951
+
775
952
  # Power Capacity by Carrier (MW)
776
- power_capacity = custom_stats.get('power_capacity_by_carrier', {})
953
+ power_capacity = custom_stats.get("power_capacity_by_carrier", {})
777
954
  if power_capacity:
778
- stats_data.extend([
779
- ['POWER CAPACITY BY CARRIER (MW)', '']
780
- ])
955
+ stats_data.extend([["POWER CAPACITY BY CARRIER (MW)", ""]])
781
956
  for carrier, value in power_capacity.items():
782
957
  if value > 0: # Only show carriers with capacity
783
958
  stats_data.append([carrier, value])
784
- stats_data.append(['', ''])
785
-
959
+ stats_data.append(["", ""])
960
+
786
961
  # Energy Capacity by Carrier (MWh)
787
- energy_capacity = custom_stats.get('energy_capacity_by_carrier', {})
962
+ energy_capacity = custom_stats.get("energy_capacity_by_carrier", {})
788
963
  if energy_capacity:
789
- stats_data.extend([
790
- ['ENERGY CAPACITY BY CARRIER (MWh)', '']
791
- ])
964
+ stats_data.extend([["ENERGY CAPACITY BY CARRIER (MWh)", ""]])
792
965
  for carrier, value in energy_capacity.items():
793
966
  if value > 0: # Only show carriers with capacity
794
967
  stats_data.append([carrier, value])
795
- stats_data.append(['', ''])
796
-
968
+ stats_data.append(["", ""])
969
+
797
970
  # Capital Costs by Carrier
798
- capital_costs = custom_stats.get('capital_cost_by_carrier', {})
971
+ capital_costs = custom_stats.get("capital_cost_by_carrier", {})
799
972
  if capital_costs:
800
- stats_data.extend([
801
- ['CAPITAL COSTS BY CARRIER', '']
802
- ])
973
+ stats_data.extend([["CAPITAL COSTS BY CARRIER", ""]])
803
974
  for carrier, value in capital_costs.items():
804
975
  if value > 0: # Only show carriers with costs
805
976
  stats_data.append([carrier, value])
806
- stats_data.extend([
807
- ['Total Capital Cost', custom_stats.get('total_capital_cost', 0)],
808
- ['', '']
809
- ])
810
-
977
+ stats_data.extend(
978
+ [
979
+ [
980
+ "Total Capital Cost",
981
+ custom_stats.get("total_capital_cost", 0),
982
+ ],
983
+ ["", ""],
984
+ ]
985
+ )
986
+
811
987
  # Operational Costs by Carrier
812
- op_costs = custom_stats.get('operational_cost_by_carrier', {})
988
+ op_costs = custom_stats.get("operational_cost_by_carrier", {})
813
989
  if op_costs:
814
- stats_data.extend([
815
- ['OPERATIONAL COSTS BY CARRIER', '']
816
- ])
990
+ stats_data.extend([["OPERATIONAL COSTS BY CARRIER", ""]])
817
991
  for carrier, value in op_costs.items():
818
992
  if value > 0: # Only show carriers with costs
819
993
  stats_data.append([carrier, value])
820
- stats_data.extend([
821
- ['Total Operational Cost', custom_stats.get('total_operational_cost', 0)],
822
- ['', '']
823
- ])
824
-
994
+ stats_data.extend(
995
+ [
996
+ [
997
+ "Total Operational Cost",
998
+ custom_stats.get("total_operational_cost", 0),
999
+ ],
1000
+ ["", ""],
1001
+ ]
1002
+ )
1003
+
825
1004
  # Total System Costs by Carrier
826
- total_costs = custom_stats.get('total_system_cost_by_carrier', {})
1005
+ total_costs = custom_stats.get("total_system_cost_by_carrier", {})
827
1006
  if total_costs:
828
- stats_data.extend([
829
- ['TOTAL SYSTEM COSTS BY CARRIER', '']
830
- ])
1007
+ stats_data.extend([["TOTAL SYSTEM COSTS BY CARRIER", ""]])
831
1008
  for carrier, value in total_costs.items():
832
1009
  if value > 0: # Only show carriers with costs
833
1010
  stats_data.append([carrier, value])
834
- stats_data.extend([
835
- ['Total Currency Cost', custom_stats.get('total_currency_cost', 0)],
836
- ['Average Price per MWh', custom_stats.get('average_price_per_mwh', 0)],
837
- ['', '']
838
- ])
839
-
1011
+ stats_data.extend(
1012
+ [
1013
+ [
1014
+ "Total Currency Cost",
1015
+ custom_stats.get("total_currency_cost", 0),
1016
+ ],
1017
+ [
1018
+ "Average Price per MWh",
1019
+ custom_stats.get("average_price_per_mwh", 0),
1020
+ ],
1021
+ ["", ""],
1022
+ ]
1023
+ )
1024
+
840
1025
  # Unmet Load Statistics
841
- unmet_stats = custom_stats.get('unmet_load_statistics', {})
1026
+ unmet_stats = custom_stats.get("unmet_load_statistics", {})
842
1027
  if unmet_stats:
843
- stats_data.extend([
844
- ['UNMET LOAD STATISTICS', ''],
845
- ['Unmet Load (MWh)', unmet_stats.get('unmet_load_mwh', 0)],
846
- ['Unmet Load Percentage', custom_stats.get('unmet_load_percentage', 0)],
847
- ['Max Unmet Load Hour (MW)', custom_stats.get('max_unmet_load_hour_mw', 0)],
848
- ['', '']
849
- ])
850
-
1028
+ stats_data.extend(
1029
+ [
1030
+ ["UNMET LOAD STATISTICS", ""],
1031
+ ["Unmet Load (MWh)", unmet_stats.get("unmet_load_mwh", 0)],
1032
+ [
1033
+ "Unmet Load Percentage",
1034
+ custom_stats.get("unmet_load_percentage", 0),
1035
+ ],
1036
+ [
1037
+ "Max Unmet Load Hour (MW)",
1038
+ custom_stats.get("max_unmet_load_hour_mw", 0),
1039
+ ],
1040
+ ["", ""],
1041
+ ]
1042
+ )
1043
+
851
1044
  # Section 4: Component Storage Statistics
852
- storage_stats = solve_results.get('component_storage_stats', {})
1045
+ storage_stats = solve_results.get("component_storage_stats", {})
853
1046
  if storage_stats:
854
- stats_data.extend([
855
- ['COMPONENT STORAGE STATISTICS', '']
856
- ])
1047
+ stats_data.extend([["COMPONENT STORAGE STATISTICS", ""]])
857
1048
  for key, value in storage_stats.items():
858
1049
  # Convert snake_case to readable format
859
- readable_key = key.replace('_', ' ').title()
1050
+ readable_key = key.replace("_", " ").title()
860
1051
  stats_data.append([readable_key, value])
861
- stats_data.append(['', ''])
862
-
1052
+ stats_data.append(["", ""])
1053
+
863
1054
  # Section 5: Runtime Information
864
- runtime_info = network_stats.get('runtime_info', {})
1055
+ runtime_info = network_stats.get("runtime_info", {})
865
1056
  if runtime_info:
866
- stats_data.extend([
867
- ['RUNTIME INFORMATION', '']
868
- ])
1057
+ stats_data.extend([["RUNTIME INFORMATION", ""]])
869
1058
  for key, value in runtime_info.items():
870
1059
  # Convert snake_case to readable format
871
- readable_key = key.replace('_', ' ').title()
1060
+ readable_key = key.replace("_", " ").title()
872
1061
  stats_data.append([readable_key, value])
873
- stats_data.append(['', ''])
874
-
1062
+ stats_data.append(["", ""])
1063
+
875
1064
  # Section 6: Solver Information
876
- solver_info = network_stats.get('solver_info', {})
1065
+ solver_info = network_stats.get("solver_info", {})
877
1066
  if solver_info:
878
- stats_data.extend([
879
- ['SOLVER INFORMATION', '']
880
- ])
1067
+ stats_data.extend([["SOLVER INFORMATION", ""]])
881
1068
  for key, value in solver_info.items():
882
1069
  # Convert snake_case to readable format
883
- readable_key = key.replace('_', ' ').title()
1070
+ readable_key = key.replace("_", " ").title()
884
1071
  stats_data.append([readable_key, value])
885
- stats_data.append(['', ''])
886
-
1072
+ stats_data.append(["", ""])
1073
+
887
1074
  # Create DataFrame and write to Excel (simple 2-column format)
888
1075
  if stats_data:
889
- df = pd.DataFrame(stats_data, columns=['Parameter', 'Value'])
890
- df.to_excel(writer, sheet_name='Statistics', index=False)
891
- self.logger.info(f"Created Statistics sheet with {len(stats_data)} rows")
892
-
1076
+ df = pd.DataFrame(stats_data, columns=["Parameter", "Value"])
1077
+ df.to_excel(writer, sheet_name="Statistics", index=False)
1078
+ self.logger.info(
1079
+ f"Created Statistics sheet with {len(stats_data)} rows"
1080
+ )
1081
+
893
1082
  except Exception as e:
894
1083
  self.logger.warning(f"Failed to create statistics sheet: {e}")
895
1084
  # Don't fail the entire export if statistics sheet fails
896
-
897
- def _create_per_year_statistics_sheet(self, writer, network_id: int, scenario_id: int, conn):
1085
+
1086
+ def _create_per_year_statistics_sheet(self, writer, scenario_id: int, conn):
898
1087
  """Create per-year statistics sheet in tidy data format"""
899
1088
  try:
900
1089
  # Get per-year solve results
901
- year_results = self._get_solve_results_by_year(conn, network_id, scenario_id)
1090
+ year_results = self._get_solve_results_by_year(conn, scenario_id)
902
1091
  if not year_results:
903
- self.logger.info("No per-year solve results available, skipping per-year statistics sheet")
1092
+ self.logger.info(
1093
+ "No per-year solve results available, skipping per-year statistics sheet"
1094
+ )
904
1095
  return
905
-
1096
+
906
1097
  # Prepare tidy data: Variable, Year, Carrier, Value, Units
907
1098
  tidy_data = []
908
-
1099
+
909
1100
  # Get sorted years
910
1101
  years = sorted(year_results.keys())
911
-
1102
+
912
1103
  # Define the statistics we want to include with their units
913
1104
  stat_definitions = [
914
- ('dispatch_by_carrier', 'Generation Dispatch', 'MWh'),
915
- ('power_capacity_by_carrier', 'Power Capacity', 'MW'),
916
- ('energy_capacity_by_carrier', 'Energy Capacity', 'MWh'),
917
- ('capital_cost_by_carrier', 'Capital Cost', 'Currency'),
918
- ('operational_cost_by_carrier', 'Operational Cost', 'Currency'),
919
- ('total_system_cost_by_carrier', 'Total System Cost', 'Currency'),
920
- ('emissions_by_carrier', 'Emissions', 'tons CO2')
1105
+ ("dispatch_by_carrier", "Generation Dispatch", "MWh"),
1106
+ ("power_capacity_by_carrier", "Power Capacity", "MW"),
1107
+ ("energy_capacity_by_carrier", "Energy Capacity", "MWh"),
1108
+ ("capital_cost_by_carrier", "Capital Cost", "Currency"),
1109
+ ("operational_cost_by_carrier", "Operational Cost", "Currency"),
1110
+ ("total_system_cost_by_carrier", "Total System Cost", "Currency"),
1111
+ ("emissions_by_carrier", "Emissions", "tons CO2"),
921
1112
  ]
922
-
1113
+
923
1114
  # Process each statistic type
924
1115
  for stat_key, stat_name, units in stat_definitions:
925
1116
  # Collect all carriers across all years for this statistic
926
1117
  all_carriers = set()
927
1118
  for year in years:
928
1119
  year_data = year_results[year]
929
- if 'network_statistics' in year_data and 'custom_statistics' in year_data['network_statistics']:
930
- custom_stats = year_data['network_statistics']['custom_statistics']
1120
+ if (
1121
+ "network_statistics" in year_data
1122
+ and "custom_statistics" in year_data["network_statistics"]
1123
+ ):
1124
+ custom_stats = year_data["network_statistics"][
1125
+ "custom_statistics"
1126
+ ]
931
1127
  if stat_key in custom_stats:
932
1128
  all_carriers.update(custom_stats[stat_key].keys())
933
-
1129
+
934
1130
  # Add data rows for each carrier and year combination
935
1131
  for carrier in sorted(all_carriers):
936
1132
  for year in years:
937
1133
  year_data = year_results[year]
938
1134
  value = 0.0
939
-
940
- if 'network_statistics' in year_data and 'custom_statistics' in year_data['network_statistics']:
941
- custom_stats = year_data['network_statistics']['custom_statistics']
942
- if stat_key in custom_stats and carrier in custom_stats[stat_key]:
1135
+
1136
+ if (
1137
+ "network_statistics" in year_data
1138
+ and "custom_statistics" in year_data["network_statistics"]
1139
+ ):
1140
+ custom_stats = year_data["network_statistics"][
1141
+ "custom_statistics"
1142
+ ]
1143
+ if (
1144
+ stat_key in custom_stats
1145
+ and carrier in custom_stats[stat_key]
1146
+ ):
943
1147
  value = custom_stats[stat_key][carrier]
944
-
1148
+
945
1149
  # Only include rows with non-zero values to keep the data clean
946
1150
  if value > 0:
947
1151
  tidy_data.append([stat_name, year, carrier, value, units])
948
-
1152
+
949
1153
  # Add core summary statistics (these don't have carriers)
950
1154
  core_stat_definitions = [
951
- ('total_generation_mwh', 'Total Generation', 'MWh'),
952
- ('total_demand_mwh', 'Total Demand', 'MWh'),
953
- ('total_cost', 'Total Cost', 'Currency'),
954
- ('load_factor', 'Load Factor', 'Ratio'),
955
- ('unserved_energy_mwh', 'Unserved Energy', 'MWh'),
956
- ('total_emissions_tons_co2', 'Total Emissions', 'tons CO2')
1155
+ ("total_generation_mwh", "Total Generation", "MWh"),
1156
+ ("total_demand_mwh", "Total Demand", "MWh"),
1157
+ ("total_cost", "Total Cost", "Currency"),
1158
+ ("load_factor", "Load Factor", "Ratio"),
1159
+ ("unserved_energy_mwh", "Unserved Energy", "MWh"),
1160
+ ("total_emissions_tons_co2", "Total Emissions", "tons CO2"),
957
1161
  ]
958
-
1162
+
959
1163
  for stat_key, stat_name, units in core_stat_definitions:
960
1164
  for year in years:
961
1165
  year_data = year_results[year]
962
1166
  value = 0.0
963
-
1167
+
964
1168
  # Check both core_summary and custom_statistics
965
- if 'network_statistics' in year_data:
966
- network_stats = year_data['network_statistics']
967
-
1169
+ if "network_statistics" in year_data:
1170
+ network_stats = year_data["network_statistics"]
1171
+
968
1172
  # Try core_summary first
969
- if 'core_summary' in network_stats and stat_key in network_stats['core_summary']:
970
- value = network_stats['core_summary'][stat_key]
1173
+ if (
1174
+ "core_summary" in network_stats
1175
+ and stat_key in network_stats["core_summary"]
1176
+ ):
1177
+ value = network_stats["core_summary"][stat_key]
971
1178
  # Try custom_statistics as fallback
972
- elif 'custom_statistics' in network_stats and stat_key in network_stats['custom_statistics']:
973
- value = network_stats['custom_statistics'][stat_key]
974
-
1179
+ elif (
1180
+ "custom_statistics" in network_stats
1181
+ and stat_key in network_stats["custom_statistics"]
1182
+ ):
1183
+ value = network_stats["custom_statistics"][stat_key]
1184
+
975
1185
  # Include all core statistics (even zeros for completeness)
976
- tidy_data.append([stat_name, year, 'Total', value, units])
977
-
1186
+ tidy_data.append([stat_name, year, "Total", value, units])
1187
+
978
1188
  # Create DataFrame and write to Excel
979
1189
  if tidy_data:
980
- df = pd.DataFrame(tidy_data, columns=['Variable', 'Year', 'Carrier', 'Value', 'Units'])
981
- df.to_excel(writer, sheet_name='Per-Year Statistics', index=False)
982
- self.logger.info(f"Created Per-Year Statistics sheet with {len(tidy_data)} rows")
1190
+ df = pd.DataFrame(
1191
+ tidy_data, columns=["Variable", "Year", "Carrier", "Value", "Units"]
1192
+ )
1193
+ df.to_excel(writer, sheet_name="Per-Year Statistics", index=False)
1194
+ self.logger.info(
1195
+ f"Created Per-Year Statistics sheet with {len(tidy_data)} rows"
1196
+ )
983
1197
  else:
984
1198
  self.logger.info("No per-year statistics data to export")
985
-
1199
+
986
1200
  except Exception as e:
987
1201
  self.logger.warning(f"Failed to create per-year statistics sheet: {e}")
988
1202
  # Don't fail the entire export if per-year statistics sheet fails
989
-
1203
+
990
1204
  def _get_app_version(self) -> str:
991
1205
  """Get the application version."""
992
1206
  try:
@@ -994,7 +1208,7 @@ class ExcelModelExporter:
994
1208
  import json
995
1209
  import os
996
1210
  from pathlib import Path
997
-
1211
+
998
1212
  # Look for package.json in parent directories
999
1213
  current_dir = Path(__file__).parent
1000
1214
  while current_dir != current_dir.parent:
@@ -1004,7 +1218,7 @@ class ExcelModelExporter:
1004
1218
  package_data = json.load(f)
1005
1219
  return package_data.get("version", "1.0.0")
1006
1220
  current_dir = current_dir.parent
1007
-
1221
+
1008
1222
  # Fallback version
1009
1223
  return "1.0.0"
1010
1224
  except Exception as e: