pyconvexity 0.3.8.post7__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. pyconvexity/__init__.py +87 -46
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/core/__init__.py +3 -5
  4. pyconvexity/core/database.py +111 -103
  5. pyconvexity/core/errors.py +16 -10
  6. pyconvexity/core/types.py +61 -54
  7. pyconvexity/data/__init__.py +0 -1
  8. pyconvexity/data/loaders/cache.py +65 -64
  9. pyconvexity/data/schema/01_core_schema.sql +134 -234
  10. pyconvexity/data/schema/02_data_metadata.sql +38 -168
  11. pyconvexity/data/schema/03_validation_data.sql +327 -264
  12. pyconvexity/data/sources/gem.py +169 -139
  13. pyconvexity/io/__init__.py +4 -10
  14. pyconvexity/io/excel_exporter.py +694 -480
  15. pyconvexity/io/excel_importer.py +817 -545
  16. pyconvexity/io/netcdf_exporter.py +66 -61
  17. pyconvexity/io/netcdf_importer.py +850 -619
  18. pyconvexity/models/__init__.py +109 -59
  19. pyconvexity/models/attributes.py +197 -178
  20. pyconvexity/models/carriers.py +70 -67
  21. pyconvexity/models/components.py +260 -236
  22. pyconvexity/models/network.py +202 -284
  23. pyconvexity/models/results.py +65 -55
  24. pyconvexity/models/scenarios.py +58 -88
  25. pyconvexity/solvers/__init__.py +5 -5
  26. pyconvexity/solvers/pypsa/__init__.py +3 -3
  27. pyconvexity/solvers/pypsa/api.py +150 -134
  28. pyconvexity/solvers/pypsa/batch_loader.py +165 -162
  29. pyconvexity/solvers/pypsa/builder.py +390 -291
  30. pyconvexity/solvers/pypsa/constraints.py +184 -162
  31. pyconvexity/solvers/pypsa/solver.py +968 -666
  32. pyconvexity/solvers/pypsa/storage.py +1377 -671
  33. pyconvexity/timeseries.py +63 -60
  34. pyconvexity/validation/__init__.py +14 -6
  35. pyconvexity/validation/rules.py +95 -84
  36. pyconvexity-0.4.1.dist-info/METADATA +46 -0
  37. pyconvexity-0.4.1.dist-info/RECORD +42 -0
  38. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  39. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  40. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  41. pyconvexity/data/schema/04_scenario_schema.sql +0 -122
  42. pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
  43. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  44. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  45. pyconvexity-0.3.8.post7.dist-info/METADATA +0 -138
  46. pyconvexity-0.3.8.post7.dist-info/RECORD +0 -49
  47. {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
  48. {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
@@ -16,109 +16,137 @@ from pyconvexity.core.database import open_connection
16
16
  from pyconvexity.core.types import StaticValue, CreateNetworkRequest
17
17
  from pyconvexity.core.errors import AttributeNotFound, ValidationError
18
18
  from pyconvexity.models import (
19
- list_components_by_type, create_component, update_component, create_network,
20
- set_network_config, create_carrier, get_master_scenario_id, get_network_time_periods,
21
- list_carriers, set_static_attribute, set_timeseries_attribute, get_bus_name_to_id_map,
22
- get_network_info, delete_attribute
19
+ list_components_by_type,
20
+ create_component,
21
+ update_component,
22
+ create_network,
23
+ set_network_config,
24
+ create_carrier,
25
+ get_network_time_periods,
26
+ list_carriers,
27
+ set_static_attribute,
28
+ set_timeseries_attribute,
29
+ get_bus_name_to_id_map,
30
+ get_network_info,
31
+ delete_attribute,
23
32
  )
24
33
  from pyconvexity.validation import get_validation_rule
25
34
  from pyconvexity.timeseries import set_timeseries
26
- from pyconvexity.models.attributes import set_timeseries_attribute as set_timeseries_conn
35
+ from pyconvexity.models.attributes import (
36
+ set_timeseries_attribute as set_timeseries_conn,
37
+ )
27
38
 
28
39
  logger = logging.getLogger(__name__)
29
40
 
41
+
30
42
  class ExcelModelImporter:
31
43
  """Import network model from Excel workbook"""
32
-
44
+
33
45
  def __init__(self):
34
46
  self.logger = logging.getLogger(__name__)
35
-
47
+
36
48
  def import_model_from_excel(
37
49
  self,
38
50
  db_path: str,
39
51
  excel_path: str,
40
- network_id: Optional[int] = None,
41
52
  network_name: Optional[str] = None,
42
53
  network_description: Optional[str] = None,
43
54
  scenario_id: Optional[int] = None,
44
- progress_callback: Optional[callable] = None
55
+ progress_callback: Optional[callable] = None,
45
56
  ) -> Dict[str, Any]:
46
57
  """
47
58
  Import network model from Excel workbook
48
-
59
+
49
60
  Args:
50
61
  db_path: Database path
51
62
  excel_path: Excel file path
52
- network_id: Network ID to import into (if updating existing)
63
+
53
64
  network_name: Name for new network (if creating new)
54
65
  network_description: Description for new network (if creating new)
55
66
  scenario_id: Scenario ID (defaults to master scenario)
56
67
  update_existing: Whether to update existing components
57
68
  add_new: Whether to add new components
58
69
  progress_callback: Optional callback for progress updates
59
-
70
+
60
71
  Returns:
61
72
  Import statistics and metadata
62
73
  """
63
-
74
+
64
75
  conn = None
65
76
  try:
66
77
  if progress_callback:
67
78
  progress_callback(0, "Starting Excel import...")
68
-
79
+
69
80
  # Connect to database
70
81
  conn = open_connection(db_path)
71
-
72
- # Determine if we're creating a new network or updating existing
73
- create_new_network = network_id is None
74
-
82
+
83
+ # Single network per database - always update existing network metadata
84
+ # Check if network already exists
85
+ try:
86
+ existing_network = get_network_info(conn)
87
+ create_new_network = False
88
+ except Exception:
89
+ create_new_network = True
90
+
75
91
  if create_new_network:
76
92
  if progress_callback:
77
93
  progress_callback(3, "Reading Excel Overview sheet...")
78
-
94
+
79
95
  # Read network configuration from Overview sheet
80
- overview_df = pd.read_excel(excel_path, sheet_name='Overview')
96
+ overview_df = pd.read_excel(excel_path, sheet_name="Overview")
81
97
  network_config = self._read_overview_sheet(overview_df)
82
-
98
+
83
99
  self.logger.info(f"Network config from Overview: {network_config}")
84
-
100
+
85
101
  # Extract network name from Excel if not provided
86
- excel_network_name = network_config.get('name')
102
+ excel_network_name = network_config.get("name")
87
103
  if excel_network_name:
88
104
  final_network_name = excel_network_name
89
- self.logger.info(f"Using network name from Excel: '{final_network_name}'")
105
+ self.logger.info(
106
+ f"Using network name from Excel: '{final_network_name}'"
107
+ )
90
108
  elif network_name:
91
109
  final_network_name = network_name
92
- self.logger.info(f"Using provided network name: '{final_network_name}'")
110
+ self.logger.info(
111
+ f"Using provided network name: '{final_network_name}'"
112
+ )
93
113
  else:
94
114
  # Fallback to filename if no name in Excel or provided
95
115
  final_network_name = Path(excel_path).stem
96
- self.logger.info(f"Using filename as network name: '{final_network_name}'")
97
-
116
+ self.logger.info(
117
+ f"Using filename as network name: '{final_network_name}'"
118
+ )
119
+
98
120
  # Extract description from Excel if not provided
99
- excel_description = network_config.get('description')
121
+ excel_description = network_config.get("description")
100
122
  if excel_description:
101
123
  final_description = excel_description
102
- self.logger.info(f"Using description from Excel: '{final_description}'")
124
+ self.logger.info(
125
+ f"Using description from Excel: '{final_description}'"
126
+ )
103
127
  elif network_description:
104
128
  final_description = network_description
105
- self.logger.info(f"Using provided description: '{final_description}'")
129
+ self.logger.info(
130
+ f"Using provided description: '{final_description}'"
131
+ )
106
132
  else:
107
133
  final_description = f"Imported from {Path(excel_path).name}"
108
- self.logger.info(f"Using default description: '{final_description}'")
109
-
134
+ self.logger.info(
135
+ f"Using default description: '{final_description}'"
136
+ )
137
+
110
138
  if progress_callback:
111
139
  progress_callback(5, f"Creating network '{final_network_name}'...")
112
-
140
+
113
141
  # Create new network
114
142
  network_request = CreateNetworkRequest(
115
143
  name=final_network_name,
116
144
  description=final_description,
117
- time_resolution=network_config.get('time_resolution', "H"),
118
- start_time=network_config.get('start_time'),
119
- end_time=network_config.get('end_time')
145
+ time_resolution=network_config.get("time_resolution", "H"),
146
+ start_time=network_config.get("start_time"),
147
+ end_time=network_config.get("end_time"),
120
148
  )
121
-
149
+
122
150
  # Validate that we have the required time information
123
151
  if not network_request.start_time or not network_request.end_time:
124
152
  missing_fields = []
@@ -126,141 +154,166 @@ class ExcelModelImporter:
126
154
  missing_fields.append("Time Start")
127
155
  if not network_request.end_time:
128
156
  missing_fields.append("Time End")
129
-
130
- self.logger.error(f"Missing required time information in Overview sheet: {missing_fields}")
157
+
158
+ self.logger.error(
159
+ f"Missing required time information in Overview sheet: {missing_fields}"
160
+ )
131
161
  self.logger.error(f"Available overview data: {network_config}")
132
- raise ValueError(f"Excel file is missing required time information: {', '.join(missing_fields)}. "
133
- f"Please ensure the Overview sheet contains 'Time Start' and 'Time End' fields.")
134
-
135
- self.logger.info(f"Creating network with: name='{network_request.name}', "
136
- f"start_time='{network_request.start_time}', "
137
- f"end_time='{network_request.end_time}', "
138
- f"time_resolution='{network_request.time_resolution}'")
139
-
140
- network_id = create_network(conn, network_request)
141
-
162
+ raise ValueError(
163
+ f"Excel file is missing required time information: {', '.join(missing_fields)}. "
164
+ f"Please ensure the Overview sheet contains 'Time Start' and 'Time End' fields."
165
+ )
166
+
167
+ self.logger.info(
168
+ f"Creating network with: name='{network_request.name}', "
169
+ f"start_time='{network_request.start_time}', "
170
+ f"end_time='{network_request.end_time}', "
171
+ f"time_resolution='{network_request.time_resolution}'"
172
+ )
173
+
174
+ create_network(conn, network_request)
175
+
142
176
  # Generate time periods for the network
143
177
  self._generate_time_periods(
144
- conn,
145
- network_id,
146
- network_request.start_time,
147
- network_request.end_time,
148
- network_request.time_resolution
178
+ conn,
179
+ network_request.start_time,
180
+ network_request.end_time,
181
+ network_request.time_resolution,
149
182
  )
150
-
183
+
151
184
  # Verify time periods were created
152
- verification_periods = get_network_time_periods(conn, network_id)
153
- self.logger.info(f"Network {network_id} now has {len(verification_periods)} time periods")
154
-
185
+ verification_periods = get_network_time_periods(conn)
186
+ self.logger.info(
187
+ f"Network now has {len(verification_periods)} time periods"
188
+ )
189
+
155
190
  conn.commit()
156
-
191
+
157
192
  if progress_callback:
158
- progress_callback(5, f"Created network '{final_network_name}' (ID: {network_id})")
193
+ progress_callback(5, f"Updated network '{final_network_name}'")
159
194
  else:
160
195
  if progress_callback:
161
- progress_callback(3, f"Using existing network ID: {network_id}")
162
-
196
+ progress_callback(3, f"Updating existing network")
197
+
163
198
  # For existing networks, validate time axis compatibility
164
199
  if progress_callback:
165
200
  progress_callback(5, "Validating time axis compatibility...")
166
-
201
+
167
202
  # Read network configuration from Overview sheet to compare
168
203
  try:
169
- overview_df = pd.read_excel(excel_path, sheet_name='Overview')
204
+ overview_df = pd.read_excel(excel_path, sheet_name="Overview")
170
205
  excel_time_config = self._read_overview_sheet(overview_df)
171
206
  except Exception as e:
172
207
  self.logger.warning(f"Could not read Overview sheet: {e}")
173
- self.logger.warning("Skipping time axis validation - assuming Excel is compatible")
208
+ self.logger.warning(
209
+ "Skipping time axis validation - assuming Excel is compatible"
210
+ )
174
211
  excel_time_config = {}
175
-
212
+
176
213
  # Validate time axis matches existing network
177
- self._validate_time_axis_compatibility(conn, network_id, excel_time_config)
178
-
179
- self.logger.info("Time axis validation passed - Excel matches existing network")
180
-
214
+ self._validate_time_axis_compatibility(conn, excel_time_config)
215
+
216
+ self.logger.info(
217
+ "Time axis validation passed - Excel matches existing network"
218
+ )
219
+
181
220
  # Set import behavior based on whether this is a new or existing network
182
- if network_id is None:
221
+ # Always add all components for single network per database
222
+ if True:
183
223
  # New network: Always add all components from Excel
184
224
  actual_update_existing = False # No existing components to update
185
- actual_add_new = True # Add everything from Excel
186
- self.logger.info("Import mode: NEW NETWORK - Adding all components from Excel")
225
+ actual_add_new = True # Add everything from Excel
226
+ self.logger.info(
227
+ "Import mode: NEW NETWORK - Adding all components from Excel"
228
+ )
187
229
  else:
188
230
  # Existing network: Always update existing and add new (user's requirement)
189
- actual_update_existing = True # Update components that exist
190
- actual_add_new = True # Add components that don't exist
191
- self.logger.info("Import mode: EXISTING NETWORK - Update existing + add new components")
192
-
193
- # Get master scenario if no scenario specified
194
- if scenario_id is None:
195
- scenario_id = get_master_scenario_id(conn, network_id)
196
- if scenario_id is None:
197
- raise ValueError("No master scenario found for network")
198
-
231
+ actual_update_existing = True # Update components that exist
232
+ actual_add_new = True # Add components that don't exist
233
+ self.logger.info(
234
+ "Import mode: EXISTING NETWORK - Update existing + add new components"
235
+ )
236
+
199
237
  if progress_callback:
200
238
  progress_callback(8, "Reading Excel file...")
201
-
239
+
202
240
  # Read Excel file
203
241
  excel_data = self._read_excel_file(excel_path)
204
-
242
+
205
243
  if progress_callback:
206
244
  progress_callback(18, "Processing carriers...")
207
-
245
+
208
246
  # Import carriers first
209
- carriers_df = excel_data.get('Carriers', pd.DataFrame())
210
- carriers_imported = self._import_carriers(conn, network_id, carriers_df)
211
-
247
+ carriers_df = excel_data.get("Carriers", pd.DataFrame())
248
+ carriers_imported = self._import_carriers(conn, carriers_df)
249
+
212
250
  if progress_callback:
213
251
  progress_callback(28, "Processing components...")
214
-
252
+
215
253
  # Import components by type
216
- component_types = ['Buses', 'Generators', 'Loads', 'Lines', 'Links', 'Storage Units', 'Stores', 'Constraints']
254
+ component_types = [
255
+ "Buses",
256
+ "Generators",
257
+ "Loads",
258
+ "Lines",
259
+ "Links",
260
+ "Storage Units",
261
+ "Stores",
262
+ "Constraints",
263
+ ]
217
264
  components_imported = {}
218
-
265
+
219
266
  for sheet_name in component_types:
220
267
  if sheet_name in excel_data:
221
268
  comp_type = self._get_component_type_from_sheet(sheet_name)
222
- self.logger.info(f"Processing sheet '{sheet_name}' as component type '{comp_type}' with {len(excel_data[sheet_name])} rows")
269
+ self.logger.info(
270
+ f"Processing sheet '{sheet_name}' as component type '{comp_type}' with {len(excel_data[sheet_name])} rows"
271
+ )
223
272
  components_imported[comp_type] = self._import_components(
224
- conn, network_id, comp_type, excel_data[sheet_name],
225
- scenario_id, actual_update_existing, actual_add_new
273
+ conn,
274
+ comp_type,
275
+ excel_data[sheet_name],
276
+ scenario_id,
277
+ actual_update_existing,
278
+ actual_add_new,
226
279
  )
227
-
280
+
228
281
  if progress_callback:
229
282
  progress_callback(78, "Processing timeseries data...")
230
-
283
+
231
284
  # Import timeseries data
232
285
  timeseries_imported = self._import_timeseries_data(
233
- conn, network_id, excel_data, scenario_id
286
+ conn, excel_data, scenario_id
234
287
  )
235
-
288
+
236
289
  if progress_callback:
237
290
  progress_callback(93, "Processing network configuration...")
238
-
291
+
239
292
  # Import network configuration
240
- network_config_df = excel_data.get('Network Config', pd.DataFrame())
241
- config_imported = self._import_network_config(
242
- conn, network_id, network_config_df
243
- )
244
-
293
+ network_config_df = excel_data.get("Network Config", pd.DataFrame())
294
+ config_imported = self._import_network_config(conn, network_config_df)
295
+
245
296
  conn.commit()
246
-
297
+
247
298
  if progress_callback:
248
299
  progress_callback(100, "Excel import completed")
249
-
300
+
250
301
  # Calculate statistics
251
302
  stats = self._calculate_import_stats(
252
- carriers_imported, components_imported, timeseries_imported, config_imported
303
+ carriers_imported,
304
+ components_imported,
305
+ timeseries_imported,
306
+ config_imported,
253
307
  )
254
- stats['network_id'] = network_id
255
- stats['created_new_network'] = create_new_network
256
-
308
+ # network_id no longer needed in stats
309
+ stats["created_new_network"] = False # Single network per database
310
+
257
311
  return {
258
312
  "success": True,
259
- "message": f"Network {'created' if create_new_network else 'updated'} from Excel: {excel_path}",
260
- "network_id": network_id,
261
- "stats": stats
313
+ "message": f"Network updated from Excel: {excel_path}",
314
+ "stats": stats,
262
315
  }
263
-
316
+
264
317
  except Exception as e:
265
318
  self.logger.error(f"Excel import failed: {e}", exc_info=True)
266
319
  if progress_callback:
@@ -273,54 +326,65 @@ class ExcelModelImporter:
273
326
  conn.close()
274
327
  except Exception as e:
275
328
  self.logger.warning(f"Failed to close database connection: {e}")
276
-
277
- def _generate_time_periods(self, conn, network_id: int, start_time: str, end_time: str, time_resolution: str) -> None:
329
+
330
+ def _generate_time_periods(
331
+ self, conn, start_time: str, end_time: str, time_resolution: str
332
+ ) -> None:
278
333
  """Generate and insert time periods for the network"""
279
334
  import pandas as pd
280
335
  from datetime import datetime
281
-
336
+
282
337
  try:
283
338
  # Parse start and end times
284
339
  start_dt = pd.to_datetime(start_time)
285
340
  end_dt = pd.to_datetime(end_time)
286
-
341
+
287
342
  # Convert time_resolution to pandas frequency string
288
- if time_resolution == 'H':
289
- freq_str = 'H'
290
- elif time_resolution == 'D':
291
- freq_str = 'D'
292
- elif time_resolution.endswith('H'):
343
+ if time_resolution == "H":
344
+ freq_str = "H"
345
+ elif time_resolution == "D":
346
+ freq_str = "D"
347
+ elif time_resolution.endswith("H"):
293
348
  hours = int(time_resolution[:-1])
294
- freq_str = f'{hours}H'
295
- elif time_resolution.endswith('min'):
349
+ freq_str = f"{hours}H"
350
+ elif time_resolution.endswith("min"):
296
351
  minutes = int(time_resolution[:-3])
297
- freq_str = f'{minutes}min'
352
+ freq_str = f"{minutes}min"
298
353
  else:
299
- self.logger.warning(f"Unknown time resolution '{time_resolution}', defaulting to hourly")
300
- freq_str = 'H'
301
-
354
+ self.logger.warning(
355
+ f"Unknown time resolution '{time_resolution}', defaulting to hourly"
356
+ )
357
+ freq_str = "H"
358
+
302
359
  # Generate timestamps
303
- timestamps = pd.date_range(start=start_dt, end=end_dt, freq=freq_str, inclusive='both')
304
-
305
- self.logger.info(f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution")
306
-
360
+ timestamps = pd.date_range(
361
+ start=start_dt, end=end_dt, freq=freq_str, inclusive="both"
362
+ )
363
+
364
+ self.logger.info(
365
+ f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution"
366
+ )
367
+
307
368
  # Insert optimized time periods metadata
308
369
  period_count = len(timestamps)
309
370
  start_timestamp = int(timestamps[0].timestamp())
310
-
371
+
311
372
  # Calculate interval in seconds
312
373
  if len(timestamps) > 1:
313
374
  interval_seconds = int((timestamps[1] - timestamps[0]).total_seconds())
314
375
  else:
315
376
  interval_seconds = 3600 # Default to hourly
316
-
317
- conn.execute("""
318
- INSERT INTO network_time_periods (network_id, period_count, start_timestamp, interval_seconds)
319
- VALUES (?, ?, ?, ?)
320
- """, (network_id, period_count, start_timestamp, interval_seconds))
321
-
322
- self.logger.info(f"Successfully created {len(timestamps)} time periods for network {network_id}")
323
-
377
+
378
+ conn.execute(
379
+ """
380
+ INSERT INTO network_time_periods (period_count, start_timestamp, interval_seconds)
381
+ VALUES (?, ?, ?)
382
+ """,
383
+ (period_count, start_timestamp, interval_seconds),
384
+ )
385
+
386
+ self.logger.info(f"Successfully created {len(timestamps)} time periods")
387
+
324
388
  except Exception as e:
325
389
  self.logger.error(f"Failed to generate time periods: {e}")
326
390
  raise
@@ -328,24 +392,26 @@ class ExcelModelImporter:
328
392
  def _read_overview_sheet(self, overview_df: pd.DataFrame) -> Dict[str, Any]:
329
393
  """Extract network configuration from Overview sheet"""
330
394
  config = {}
331
-
395
+
332
396
  if overview_df.empty:
333
397
  self.logger.warning("Overview sheet is empty")
334
398
  return config
335
-
336
- self.logger.info(f"Overview sheet has {len(overview_df)} rows and columns: {list(overview_df.columns)}")
399
+
400
+ self.logger.info(
401
+ f"Overview sheet has {len(overview_df)} rows and columns: {list(overview_df.columns)}"
402
+ )
337
403
  self.logger.info(f"First few rows of overview sheet:\n{overview_df.head()}")
338
-
404
+
339
405
  # Convert to a simple key-value lookup
340
406
  overview_data = {}
341
-
407
+
342
408
  # Handle both old single-column format and new two-column format
343
- if 'Property' in overview_df.columns and 'Value' in overview_df.columns:
409
+ if "Property" in overview_df.columns and "Value" in overview_df.columns:
344
410
  # New two-column format
345
411
  for _, row in overview_df.iterrows():
346
- key = str(row['Property']).strip() if pd.notna(row['Property']) else ""
347
- value = str(row['Value']).strip() if pd.notna(row['Value']) else ""
348
- if key and value and value != 'nan':
412
+ key = str(row["Property"]).strip() if pd.notna(row["Property"]) else ""
413
+ value = str(row["Value"]).strip() if pd.notna(row["Value"]) else ""
414
+ if key and value and value != "nan":
349
415
  overview_data[key] = value
350
416
  self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
351
417
  elif len(overview_df.columns) >= 2:
@@ -353,212 +419,229 @@ class ExcelModelImporter:
353
419
  for i, row in overview_df.iterrows():
354
420
  key = str(row.iloc[0]).strip() if pd.notna(row.iloc[0]) else ""
355
421
  value = str(row.iloc[1]).strip() if pd.notna(row.iloc[1]) else ""
356
- if key and value and value != 'nan':
422
+ if key and value and value != "nan":
357
423
  overview_data[key] = value
358
424
  self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
359
425
  else:
360
- self.logger.error(f"Overview sheet format not recognized. Columns: {list(overview_df.columns)}")
426
+ self.logger.error(
427
+ f"Overview sheet format not recognized. Columns: {list(overview_df.columns)}"
428
+ )
361
429
  return config
362
-
430
+
363
431
  self.logger.info(f"Parsed overview data: {overview_data}")
364
-
432
+
365
433
  # Extract network configuration
366
- if 'Name' in overview_data:
367
- config['name'] = overview_data['Name']
368
- if 'Description' in overview_data:
369
- config['description'] = overview_data['Description']
370
- if 'Time Start' in overview_data:
371
- config['start_time'] = overview_data['Time Start']
434
+ if "Name" in overview_data:
435
+ config["name"] = overview_data["Name"]
436
+ if "Description" in overview_data:
437
+ config["description"] = overview_data["Description"]
438
+ if "Time Start" in overview_data:
439
+ config["start_time"] = overview_data["Time Start"]
372
440
  self.logger.info(f"Found Time Start: {config['start_time']}")
373
- if 'Time End' in overview_data:
374
- config['end_time'] = overview_data['Time End']
441
+ if "Time End" in overview_data:
442
+ config["end_time"] = overview_data["Time End"]
375
443
  self.logger.info(f"Found Time End: {config['end_time']}")
376
- if 'Time Interval' in overview_data:
444
+ if "Time Interval" in overview_data:
377
445
  # Convert time interval format to our format
378
- interval = overview_data['Time Interval'].strip()
446
+ interval = overview_data["Time Interval"].strip()
379
447
  self.logger.info(f"Found Time Interval: '{interval}'")
380
-
381
- if interval == 'P1D':
382
- config['time_resolution'] = 'D' # Daily
383
- elif interval == 'PT1H' or interval == 'h' or interval == 'H':
384
- config['time_resolution'] = 'H' # Hourly
385
- elif interval.startswith('PT') and interval.endswith('H'):
448
+
449
+ if interval == "P1D":
450
+ config["time_resolution"] = "D" # Daily
451
+ elif interval == "PT1H" or interval == "h" or interval == "H":
452
+ config["time_resolution"] = "H" # Hourly
453
+ elif interval.startswith("PT") and interval.endswith("H"):
386
454
  # Extract hours (e.g., 'PT3H' -> '3H')
387
455
  hours = interval[2:-1]
388
- config['time_resolution'] = f'{hours}H'
389
- elif interval.endswith('h') or interval.endswith('H'):
456
+ config["time_resolution"] = f"{hours}H"
457
+ elif interval.endswith("h") or interval.endswith("H"):
390
458
  # Handle simple formats like '2h', '3H'
391
459
  if interval[:-1].isdigit():
392
460
  hours = interval[:-1]
393
- config['time_resolution'] = f'{hours}H'
461
+ config["time_resolution"] = f"{hours}H"
394
462
  else:
395
- config['time_resolution'] = 'H' # Default to hourly
463
+ config["time_resolution"] = "H" # Default to hourly
396
464
  else:
397
- self.logger.warning(f"Unknown time interval format '{interval}', defaulting to hourly")
398
- config['time_resolution'] = 'H' # Default to hourly
399
-
465
+ self.logger.warning(
466
+ f"Unknown time interval format '{interval}', defaulting to hourly"
467
+ )
468
+ config["time_resolution"] = "H" # Default to hourly
469
+
400
470
  self.logger.info(f"Final network config from Overview sheet: {config}")
401
471
  return config
402
-
472
+
403
473
  def _read_excel_file(self, excel_path: str) -> Dict[str, pd.DataFrame]:
404
474
  """Read Excel file and return dictionary of DataFrames by sheet name"""
405
475
  excel_data = {}
406
-
476
+
407
477
  try:
408
478
  # Read all sheets
409
479
  excel_file = pd.ExcelFile(excel_path)
410
-
480
+
411
481
  self.logger.info(f"Excel file contains sheets: {excel_file.sheet_names}")
412
-
482
+
413
483
  for sheet_name in excel_file.sheet_names:
414
- if sheet_name == 'Overview':
484
+ if sheet_name == "Overview":
415
485
  continue # Skip overview sheet
416
-
486
+
417
487
  df = pd.read_excel(excel_path, sheet_name=sheet_name)
418
488
  if not df.empty:
419
489
  excel_data[sheet_name] = df
420
490
  self.logger.info(f"Loaded sheet '{sheet_name}' with {len(df)} rows")
421
491
  else:
422
492
  self.logger.info(f"Skipped empty sheet '{sheet_name}'")
423
-
493
+
424
494
  except Exception as e:
425
495
  raise ValueError(f"Failed to read Excel file: {e}")
426
-
496
+
427
497
  return excel_data
428
-
498
+
429
499
  def _get_component_type_from_sheet(self, sheet_name: str) -> str:
430
500
  """Convert sheet name to component type"""
431
501
  mapping = {
432
- 'Buses': 'BUS',
433
- 'Generators': 'GENERATOR',
434
- 'Loads': 'LOAD',
435
- 'Lines': 'LINE',
436
- 'Links': 'LINK',
437
- 'Storage Units': 'STORAGE_UNIT',
438
- 'Stores': 'STORE',
439
- 'Constraints': 'CONSTRAINT'
502
+ "Buses": "BUS",
503
+ "Generators": "GENERATOR",
504
+ "Loads": "LOAD",
505
+ "Lines": "LINE",
506
+ "Links": "LINK",
507
+ "Storage Units": "STORAGE_UNIT",
508
+ "Stores": "STORE",
509
+ "Constraints": "CONSTRAINT",
440
510
  }
441
511
  return mapping.get(sheet_name, sheet_name.upper())
442
-
443
- def _import_carriers(self, conn, network_id: int, carriers_df: pd.DataFrame) -> Dict[str, Any]:
512
+
513
+ def _import_carriers(self, conn, carriers_df: pd.DataFrame) -> Dict[str, Any]:
444
514
  """Import carriers from Excel data"""
445
- imported = {'created': 0, 'updated': 0, 'errors': 0}
446
-
515
+ imported = {"created": 0, "updated": 0, "errors": 0}
516
+
447
517
  if carriers_df.empty:
448
518
  return imported
449
-
519
+
450
520
  # Get existing carriers
451
- existing_carriers = list_carriers(conn, network_id)
452
- existing_names = {carrier['name'] for carrier in existing_carriers}
453
-
521
+ existing_carriers = list_carriers(conn)
522
+ existing_names = {carrier.name for carrier in existing_carriers}
523
+
454
524
  for _, row in carriers_df.iterrows():
455
525
  try:
456
- carrier_name = str(row.get('name', '')).strip()
526
+ carrier_name = str(row.get("name", "")).strip()
457
527
  if not carrier_name:
458
528
  continue
459
-
529
+
460
530
  # Check if carrier exists
461
531
  if carrier_name in existing_names:
462
- imported['updated'] += 1
532
+ imported["updated"] += 1
463
533
  else:
464
534
  # Create new carrier
465
535
  create_carrier(
466
536
  conn,
467
- network_id,
468
- carrier_name,
469
- co2_emissions=row.get('co2_emissions', 0.0),
470
- color=row.get('color', '#ffffff'),
471
- nice_name=row.get('nice_name', carrier_name)
537
+ carrier_name,
538
+ co2_emissions=row.get("co2_emissions", 0.0),
539
+ color=row.get("color", "#ffffff"),
540
+ nice_name=row.get("nice_name", carrier_name),
472
541
  )
473
- imported['created'] += 1
474
-
542
+ imported["created"] += 1
543
+
475
544
  except Exception as e:
476
545
  self.logger.error(f"Failed to import carrier {carrier_name}: {e}")
477
- imported['errors'] += 1
478
-
546
+ imported["errors"] += 1
547
+
479
548
  return imported
480
-
549
+
481
550
  def _import_components(
482
- self,
483
- conn,
484
- network_id: int,
485
- component_type: str,
551
+ self,
552
+ conn,
553
+ component_type: str,
486
554
  components_df: pd.DataFrame,
487
555
  scenario_id: int,
488
556
  update_existing: bool,
489
- add_new: bool
557
+ add_new: bool,
490
558
  ) -> Dict[str, Any]:
491
559
  """Import components of a specific type"""
492
- imported = {'created': 0, 'updated': 0, 'errors': 0}
493
-
560
+ imported = {"created": 0, "updated": 0, "errors": 0}
561
+
494
562
  if components_df.empty:
495
563
  return imported
496
-
564
+
497
565
  # Get existing components of this type
498
- existing_components = list_components_by_type(conn, network_id, component_type)
566
+ existing_components = list_components_by_type(conn, component_type)
499
567
  existing_names = {comp.name for comp in existing_components}
500
-
568
+
501
569
  # Get carriers and buses for foreign key resolution
502
- carriers = list_carriers(conn, network_id)
503
- buses = list_components_by_type(conn, network_id, 'BUS')
504
-
505
- carrier_name_to_id = {carrier['name']: carrier['id'] for carrier in carriers}
570
+ carriers = list_carriers(conn)
571
+ buses = list_components_by_type(conn, "BUS")
572
+
573
+ carrier_name_to_id = {carrier.name: carrier.id for carrier in carriers}
506
574
  bus_name_to_id = {bus.name: bus.id for bus in buses}
507
-
508
-
575
+
509
576
  for _, row in components_df.iterrows():
510
577
  try:
511
- component_name = str(row.get('name', '')).strip()
578
+ component_name = str(row.get("name", "")).strip()
512
579
  if not component_name:
513
580
  continue
514
-
581
+
515
582
  # Debug logging for CONSTRAINT components (reduced verbosity)
516
- if component_type == 'CONSTRAINT':
583
+ if component_type == "CONSTRAINT":
517
584
  self.logger.debug(f"Processing CONSTRAINT '{component_name}'")
518
-
585
+
519
586
  # Resolve foreign keys
520
587
  carrier_id = None
521
588
  # CONSTRAINT components don't have carriers
522
- if row.get('carrier') and component_type != 'CONSTRAINT':
523
- carrier_name = str(row['carrier']).strip()
589
+ if row.get("carrier") and component_type != "CONSTRAINT":
590
+ carrier_name = str(row["carrier"]).strip()
524
591
  carrier_id = carrier_name_to_id.get(carrier_name)
525
- self.logger.info(f"Component '{component_name}' has carrier '{carrier_name}', resolved to carrier_id: {carrier_id}")
592
+ self.logger.info(
593
+ f"Component '{component_name}' has carrier '{carrier_name}', resolved to carrier_id: {carrier_id}"
594
+ )
526
595
  if carrier_id is None:
527
- self.logger.warning(f"Carrier '{carrier_name}' not found for component '{component_name}'. Available carriers: {list(carrier_name_to_id.keys())}")
528
- elif component_type == 'CONSTRAINT':
529
- self.logger.debug(f"CONSTRAINT '{component_name}' - skipping carrier resolution")
530
-
596
+ self.logger.warning(
597
+ f"Carrier '{carrier_name}' not found for component '{component_name}'. Available carriers: {list(carrier_name_to_id.keys())}"
598
+ )
599
+ elif component_type == "CONSTRAINT":
600
+ self.logger.debug(
601
+ f"CONSTRAINT '{component_name}' - skipping carrier resolution"
602
+ )
603
+
531
604
  bus_id = None
532
605
  # CONSTRAINT components don't connect to buses
533
- if row.get('bus') and component_type != 'CONSTRAINT':
534
- bus_name = str(row['bus']).strip()
606
+ if row.get("bus") and component_type != "CONSTRAINT":
607
+ bus_name = str(row["bus"]).strip()
535
608
  bus_id = bus_name_to_id.get(bus_name)
536
609
  if bus_id is None:
537
- self.logger.warning(f"Bus '{bus_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
538
-
610
+ self.logger.warning(
611
+ f"Bus '{bus_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}"
612
+ )
613
+
539
614
  bus0_id = None
540
- if row.get('bus0') and component_type != 'CONSTRAINT':
541
- bus0_name = str(row['bus0']).strip()
615
+ if row.get("bus0") and component_type != "CONSTRAINT":
616
+ bus0_name = str(row["bus0"]).strip()
542
617
  bus0_id = bus_name_to_id.get(bus0_name)
543
618
  if bus0_id is None:
544
- self.logger.warning(f"Bus0 '{bus0_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
545
-
619
+ self.logger.warning(
620
+ f"Bus0 '{bus0_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}"
621
+ )
622
+
546
623
  bus1_id = None
547
- if row.get('bus1') and component_type != 'CONSTRAINT':
548
- bus1_name = str(row['bus1']).strip()
624
+ if row.get("bus1") and component_type != "CONSTRAINT":
625
+ bus1_name = str(row["bus1"]).strip()
549
626
  bus1_id = bus_name_to_id.get(bus1_name)
550
627
  if bus1_id is None:
551
- self.logger.warning(f"Bus1 '{bus1_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
552
-
628
+ self.logger.warning(
629
+ f"Bus1 '{bus1_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}"
630
+ )
631
+
553
632
  # Check if component exists
554
633
  if component_name in existing_names and update_existing:
555
634
  # Update existing component
556
- existing_comp = next(c for c in existing_components if c.name == component_name)
557
-
635
+ existing_comp = next(
636
+ c for c in existing_components if c.name == component_name
637
+ )
638
+
558
639
  try:
559
640
  # Update component using the proper function
560
641
  # CONSTRAINT components must have carrier_id=None per database schema
561
- final_carrier_id = None if component_type == 'CONSTRAINT' else carrier_id
642
+ final_carrier_id = (
643
+ None if component_type == "CONSTRAINT" else carrier_id
644
+ )
562
645
  update_component(
563
646
  conn,
564
647
  existing_comp.id,
@@ -566,41 +649,52 @@ class ExcelModelImporter:
566
649
  bus_id=bus_id,
567
650
  bus0_id=bus0_id,
568
651
  bus1_id=bus1_id,
569
- latitude=row.get('latitude'),
570
- longitude=row.get('longitude')
652
+ latitude=row.get("latitude"),
653
+ longitude=row.get("longitude"),
571
654
  )
572
-
655
+
573
656
  # Update attributes
574
- self._update_component_attributes(conn, existing_comp.id, row, scenario_id)
575
- imported['updated'] += 1
576
-
657
+ self._update_component_attributes(
658
+ conn, existing_comp.id, row, scenario_id
659
+ )
660
+ imported["updated"] += 1
661
+
577
662
  except Exception as e:
578
- self.logger.error(f"Failed to update component '{component_name}': {e}")
579
- imported['errors'] += 1
663
+ self.logger.error(
664
+ f"Failed to update component '{component_name}': {e}"
665
+ )
666
+ imported["errors"] += 1
580
667
  continue
581
-
668
+
582
669
  elif component_name not in existing_names and add_new:
583
670
  # Create new component using the proper function
584
671
  # CONSTRAINT components must have carrier_id=None per database schema
585
- final_carrier_id = None if component_type == 'CONSTRAINT' else carrier_id
586
-
672
+ final_carrier_id = (
673
+ None if component_type == "CONSTRAINT" else carrier_id
674
+ )
675
+
587
676
  # Handle latitude/longitude - CONSTRAINT components don't have location
588
- if component_type == 'CONSTRAINT':
677
+ if component_type == "CONSTRAINT":
589
678
  lat_val = None
590
679
  lon_val = None
591
- self.logger.debug(f"CONSTRAINT '{component_name}' - setting latitude/longitude to None")
680
+ self.logger.debug(
681
+ f"CONSTRAINT '{component_name}' - setting latitude/longitude to None"
682
+ )
592
683
  else:
593
684
  # Clean empty strings for other component types
594
- lat_val = row.get('latitude')
595
- lon_val = row.get('longitude')
596
- if lat_val == '' or (isinstance(lat_val, str) and lat_val.strip() == ''):
685
+ lat_val = row.get("latitude")
686
+ lon_val = row.get("longitude")
687
+ if lat_val == "" or (
688
+ isinstance(lat_val, str) and lat_val.strip() == ""
689
+ ):
597
690
  lat_val = None
598
- if lon_val == '' or (isinstance(lon_val, str) and lon_val.strip() == ''):
691
+ if lon_val == "" or (
692
+ isinstance(lon_val, str) and lon_val.strip() == ""
693
+ ):
599
694
  lon_val = None
600
-
695
+
601
696
  component_id = create_component(
602
697
  conn,
603
- network_id,
604
698
  component_type,
605
699
  component_name,
606
700
  longitude=lon_val,
@@ -608,193 +702,290 @@ class ExcelModelImporter:
608
702
  carrier_id=final_carrier_id,
609
703
  bus_id=bus_id,
610
704
  bus0_id=bus0_id,
611
- bus1_id=bus1_id
705
+ bus1_id=bus1_id,
612
706
  )
613
-
707
+
614
708
  # Set attributes
615
709
  self._set_component_attributes(conn, component_id, row, scenario_id)
616
- imported['created'] += 1
617
-
710
+ imported["created"] += 1
711
+
618
712
  except Exception as e:
619
- self.logger.error(f"Failed to import component '{component_name}' of type '{component_type}': {e}")
620
- self.logger.error(f"Component data: name='{component_name}', carrier_id={carrier_id}, bus_id={bus_id}, bus0_id={bus0_id}, bus1_id={bus1_id}")
621
- imported['errors'] += 1
622
-
713
+ self.logger.error(
714
+ f"Failed to import component '{component_name}' of type '{component_type}': {e}"
715
+ )
716
+ self.logger.error(
717
+ f"Component data: name='{component_name}', carrier_id={carrier_id}, bus_id={bus_id}, bus0_id={bus0_id}, bus1_id={bus1_id}"
718
+ )
719
+ imported["errors"] += 1
720
+
623
721
  return imported
624
-
625
- def _update_component_attributes(self, conn, component_id: int, row: pd.Series, scenario_id: int):
722
+
723
+ def _update_component_attributes(
724
+ self, conn, component_id: int, row: pd.Series, scenario_id: int
725
+ ):
626
726
  """Update attributes for an existing component"""
627
727
  # Get validation rules for this component type
628
728
  cursor = conn.execute(
629
- "SELECT component_type FROM components WHERE id = ?",
630
- (component_id,)
729
+ "SELECT component_type FROM components WHERE id = ?", (component_id,)
631
730
  )
632
731
  component_type = cursor.fetchone()[0]
633
-
732
+
634
733
  # Process each column as potential attribute
635
734
  for column, value in row.items():
636
- if column in ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude', 'type']:
735
+ if column in [
736
+ "name",
737
+ "carrier",
738
+ "bus",
739
+ "bus0",
740
+ "bus1",
741
+ "latitude",
742
+ "longitude",
743
+ "type",
744
+ ]:
637
745
  continue # Skip basic fields
638
-
639
- if value == '[timeseries]':
746
+
747
+ if value == "[timeseries]":
640
748
  continue # Skip timeseries markers
641
-
749
+
642
750
  # Check if this is a valid attribute
643
751
  validation_rule = get_validation_rule(conn, component_type, column)
644
752
  if validation_rule:
645
753
  # Handle blank cells (empty strings or NaN) - these should unset the attribute
646
- if pd.isna(value) or value == '':
754
+ if pd.isna(value) or value == "":
647
755
  try:
648
756
  delete_attribute(conn, component_id, column, scenario_id)
649
- self.logger.debug(f"Unset attribute '{column}' for component {component_id} due to blank cell")
757
+ self.logger.debug(
758
+ f"Unset attribute '{column}' for component {component_id} due to blank cell"
759
+ )
650
760
  except Exception as e:
651
761
  # Attribute might not exist, which is fine
652
- self.logger.debug(f"Could not unset attribute '{column}' for component {component_id}: {e}")
762
+ self.logger.debug(
763
+ f"Could not unset attribute '{column}' for component {component_id}: {e}"
764
+ )
653
765
  else:
654
766
  # Set the attribute with the provided value
655
- self._set_single_attribute(conn, component_id, column, value, validation_rule, scenario_id)
656
-
657
- def _set_component_attributes(self, conn, component_id: int, row: pd.Series, scenario_id: int):
767
+ self._set_single_attribute(
768
+ conn, component_id, column, value, validation_rule, scenario_id
769
+ )
770
+
771
+ def _set_component_attributes(
772
+ self, conn, component_id: int, row: pd.Series, scenario_id: int
773
+ ):
658
774
  """Set attributes for a new component"""
659
775
  # Get validation rules for this component type
660
776
  cursor = conn.execute(
661
- "SELECT component_type FROM components WHERE id = ?",
662
- (component_id,)
777
+ "SELECT component_type FROM components WHERE id = ?", (component_id,)
663
778
  )
664
779
  component_type = cursor.fetchone()[0]
665
-
780
+
666
781
  # Process each column as potential attribute
667
782
  for column, value in row.items():
668
- if column in ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude', 'type']:
783
+ if column in [
784
+ "name",
785
+ "carrier",
786
+ "bus",
787
+ "bus0",
788
+ "bus1",
789
+ "latitude",
790
+ "longitude",
791
+ "type",
792
+ ]:
669
793
  continue # Skip basic fields
670
-
671
- if value == '[timeseries]':
794
+
795
+ if value == "[timeseries]":
672
796
  continue # Skip timeseries markers
673
-
797
+
674
798
  # Check if this is a valid attribute
675
799
  validation_rule = get_validation_rule(conn, component_type, column)
676
800
  if validation_rule:
677
801
  # For new components, only set attributes that have actual values
678
802
  # Blank cells (empty strings or NaN) are left unset (which is the default state)
679
- if not (pd.isna(value) or value == ''):
803
+ if not (pd.isna(value) or value == ""):
680
804
  # Set the attribute with the provided value
681
- self._set_single_attribute(conn, component_id, column, value, validation_rule, scenario_id)
682
-
683
- def _set_single_attribute(self, conn, component_id: int, attr_name: str, value: Any, validation_rule: Dict, scenario_id: int):
805
+ self._set_single_attribute(
806
+ conn, component_id, column, value, validation_rule, scenario_id
807
+ )
808
+
809
+ def _set_single_attribute(
810
+ self,
811
+ conn,
812
+ component_id: int,
813
+ attr_name: str,
814
+ value: Any,
815
+ validation_rule: Dict,
816
+ scenario_id: int,
817
+ ):
684
818
  """Set a single attribute with proper type conversion"""
685
- data_type = validation_rule.data_type if hasattr(validation_rule, 'data_type') else validation_rule.get('data_type', 'string')
686
-
819
+ data_type = (
820
+ validation_rule.data_type
821
+ if hasattr(validation_rule, "data_type")
822
+ else validation_rule.get("data_type", "string")
823
+ )
824
+
687
825
  try:
688
- if data_type == 'float':
826
+ if data_type == "float":
689
827
  static_value = StaticValue(float(value))
690
- set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
691
- elif data_type == 'int':
828
+ set_static_attribute(
829
+ conn, component_id, attr_name, static_value, scenario_id
830
+ )
831
+ elif data_type == "int":
692
832
  static_value = StaticValue(int(value))
693
- set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
694
- elif data_type == 'boolean':
695
- bool_value = str(value).lower() in ['true', '1', 'yes']
833
+ set_static_attribute(
834
+ conn, component_id, attr_name, static_value, scenario_id
835
+ )
836
+ elif data_type == "boolean":
837
+ bool_value = str(value).lower() in ["true", "1", "yes"]
696
838
  static_value = StaticValue(bool_value)
697
- set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
839
+ set_static_attribute(
840
+ conn, component_id, attr_name, static_value, scenario_id
841
+ )
698
842
  else: # string
699
843
  static_value = StaticValue(str(value))
700
- set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
844
+ set_static_attribute(
845
+ conn, component_id, attr_name, static_value, scenario_id
846
+ )
701
847
  except (AttributeNotFound, ValidationError):
702
848
  # Skip missing attributes or validation errors silently (same as PyPSA solver)
703
849
  pass
704
850
  except Exception as e:
705
- self.logger.warning(f"Failed to set attribute {attr_name} for component {component_id}: {e}")
706
-
707
- def _import_timeseries_data(self, conn, network_id: int, excel_data: Dict, scenario_id: int) -> Dict[str, Any]:
851
+ self.logger.warning(
852
+ f"Failed to set attribute {attr_name} for component {component_id}: {e}"
853
+ )
854
+
855
+ def _import_timeseries_data(
856
+ self, conn, excel_data: Dict, scenario_id: int
857
+ ) -> Dict[str, Any]:
708
858
  """Import timeseries data from Excel sheets"""
709
- imported = {'attributes': 0, 'errors': 0}
710
-
859
+ imported = {"attributes": 0, "errors": 0}
860
+
711
861
  # Get network time periods for timestamp mapping
712
- network_time_periods = get_network_time_periods(conn, network_id)
713
- time_period_map = {period.formatted_time: period for period in network_time_periods}
714
-
862
+ network_time_periods = get_network_time_periods(conn)
863
+ time_period_map = {
864
+ period.formatted_time: period for period in network_time_periods
865
+ }
866
+
715
867
  expected_length = len(network_time_periods)
716
- self.logger.info(f"TIMESERIES DEBUG: Network has {expected_length} time periods for timeseries import")
868
+ self.logger.info(
869
+ f"TIMESERIES DEBUG: Network has {expected_length} time periods for timeseries import"
870
+ )
717
871
  if network_time_periods:
718
- self.logger.info(f"TIMESERIES DEBUG: Time period range: {network_time_periods[0].formatted_time} to {network_time_periods[-1].formatted_time}")
872
+ self.logger.info(
873
+ f"TIMESERIES DEBUG: Time period range: {network_time_periods[0].formatted_time} to {network_time_periods[-1].formatted_time}"
874
+ )
719
875
  else:
720
- self.logger.error("TIMESERIES DEBUG: NO TIME PERIODS FOUND! Timeseries import will fail.")
876
+ self.logger.error(
877
+ "TIMESERIES DEBUG: NO TIME PERIODS FOUND! Timeseries import will fail."
878
+ )
721
879
  return imported
722
-
880
+
723
881
  # Look for timeseries sheets
724
882
  for sheet_name, df in excel_data.items():
725
- if 'Timeseries' in sheet_name and not df.empty:
726
- self.logger.info(f"TIMESERIES DEBUG: Processing sheet '{sheet_name}' with {len(df)} rows")
727
- component_type = self._get_component_type_from_sheet(sheet_name.replace(' Timeseries', ''))
728
-
883
+ if "Timeseries" in sheet_name and not df.empty:
884
+ self.logger.info(
885
+ f"TIMESERIES DEBUG: Processing sheet '{sheet_name}' with {len(df)} rows"
886
+ )
887
+ component_type = self._get_component_type_from_sheet(
888
+ sheet_name.replace(" Timeseries", "")
889
+ )
890
+
729
891
  # Get timestamps
730
- timestamps = df.get('timestamp', [])
892
+ timestamps = df.get("timestamp", [])
731
893
  if timestamps.empty:
732
- self.logger.warning(f"TIMESERIES DEBUG: No timestamp column found in {sheet_name}")
894
+ self.logger.warning(
895
+ f"TIMESERIES DEBUG: No timestamp column found in {sheet_name}"
896
+ )
733
897
  continue
734
-
898
+
735
899
  excel_ts_length = len(timestamps)
736
- self.logger.info(f"TIMESERIES DEBUG: Sheet '{sheet_name}' has {excel_ts_length} timestamps (expected: {expected_length})")
900
+ self.logger.info(
901
+ f"TIMESERIES DEBUG: Sheet '{sheet_name}' has {excel_ts_length} timestamps (expected: {expected_length})"
902
+ )
737
903
  if excel_ts_length != expected_length:
738
- self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH in sheet '{sheet_name}': Excel has {excel_ts_length}, network expects {expected_length} (difference: {excel_ts_length - expected_length})")
739
-
904
+ self.logger.warning(
905
+ f"TIMESERIES DEBUG: LENGTH MISMATCH in sheet '{sheet_name}': Excel has {excel_ts_length}, network expects {expected_length} (difference: {excel_ts_length - expected_length})"
906
+ )
907
+
740
908
  # Log timestamp range for debugging
741
909
  if len(timestamps) > 0:
742
910
  first_ts = str(timestamps.iloc[0]).strip()
743
911
  last_ts = str(timestamps.iloc[-1]).strip()
744
- self.logger.info(f"TIMESERIES DEBUG: Sheet timestamp range: '{first_ts}' to '{last_ts}'")
745
-
912
+ self.logger.info(
913
+ f"TIMESERIES DEBUG: Sheet timestamp range: '{first_ts}' to '{last_ts}'"
914
+ )
915
+
746
916
  # Process each column (except timestamp)
747
917
  for column in df.columns:
748
- if column == 'timestamp':
918
+ if column == "timestamp":
749
919
  continue
750
-
920
+
751
921
  # Parse component name and attribute from column name
752
922
  # Format: "Component Name_attribute_name"
753
923
  # We need to find the last underscore that separates component name from attribute
754
- if '_' in column:
924
+ if "_" in column:
755
925
  # Find all components of this type to match against
756
- components = list_components_by_type(conn, network_id, component_type)
926
+ components = list_components_by_type(conn, component_type)
757
927
  component_names = [c.name for c in components]
758
-
928
+
759
929
  # Try to find the component name by matching against known components
760
930
  component_name = None
761
931
  attr_name = None
762
-
932
+
763
933
  for comp_name in component_names:
764
934
  # Check if column starts with component name + underscore
765
935
  prefix = f"{comp_name}_"
766
936
  if column.startswith(prefix):
767
937
  component_name = comp_name
768
- attr_name = column[len(prefix):]
938
+ attr_name = column[len(prefix) :]
769
939
  break
770
-
940
+
771
941
  if component_name and attr_name:
772
942
  # Find component by name
773
- component = next((c for c in components if c.name == component_name), None)
774
-
943
+ component = next(
944
+ (c for c in components if c.name == component_name),
945
+ None,
946
+ )
947
+
775
948
  if component:
776
949
  # Create timeseries data using efficient array format
777
950
  timeseries_values = []
778
951
  filled_missing_values = 0
779
-
952
+
780
953
  # Debug: Show first few timestamps for comparison
781
954
  if len(timestamps) > 0 and len(network_time_periods) > 0:
782
955
  excel_first = str(timestamps.iloc[0]).strip()
783
- excel_last = str(timestamps.iloc[-1]).strip() if len(timestamps) > 1 else excel_first
956
+ excel_last = (
957
+ str(timestamps.iloc[-1]).strip()
958
+ if len(timestamps) > 1
959
+ else excel_first
960
+ )
784
961
  network_first = network_time_periods[0].formatted_time
785
- network_last = network_time_periods[-1].formatted_time if len(network_time_periods) > 1 else network_first
786
-
787
- self.logger.info(f"TIMESERIES DEBUG: Timestamp comparison for '{component_name}.{attr_name}':")
788
- self.logger.info(f" Excel range: '{excel_first}' to '{excel_last}' ({len(timestamps)} periods)")
789
- self.logger.info(f" Network range: '{network_first}' to '{network_last}' ({len(network_time_periods)} periods)")
790
-
962
+ network_last = (
963
+ network_time_periods[-1].formatted_time
964
+ if len(network_time_periods) > 1
965
+ else network_first
966
+ )
967
+
968
+ self.logger.info(
969
+ f"TIMESERIES DEBUG: Timestamp comparison for '{component_name}.{attr_name}':"
970
+ )
971
+ self.logger.info(
972
+ f" Excel range: '{excel_first}' to '{excel_last}' ({len(timestamps)} periods)"
973
+ )
974
+ self.logger.info(
975
+ f" Network range: '{network_first}' to '{network_last}' ({len(network_time_periods)} periods)"
976
+ )
977
+
791
978
  # Take the first N values from Excel where N = expected network periods
792
979
  # This puts responsibility on user to format Excel correctly
793
- max_periods = min(len(timestamps), len(network_time_periods), len(df[column]))
794
-
980
+ max_periods = min(
981
+ len(timestamps),
982
+ len(network_time_periods),
983
+ len(df[column]),
984
+ )
985
+
795
986
  for i in range(max_periods):
796
987
  value = df[column].iloc[i]
797
-
988
+
798
989
  # Handle missing values - use 0.0 as default
799
990
  if pd.isna(value):
800
991
  actual_value = 0.0
@@ -805,251 +996,320 @@ class ExcelModelImporter:
805
996
  except (ValueError, TypeError):
806
997
  actual_value = 0.0
807
998
  filled_missing_values += 1
808
-
999
+
809
1000
  timeseries_values.append(actual_value)
810
-
1001
+
811
1002
  final_ts_length = len(timeseries_values)
812
- self.logger.info(f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
813
- f"Excel rows={excel_ts_length}, "
814
- f"Network periods={expected_length}, "
815
- f"Used={max_periods}, "
816
- f"Filled missing={filled_missing_values}, "
817
- f"Final length={final_ts_length}")
818
-
1003
+ self.logger.info(
1004
+ f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
1005
+ f"Excel rows={excel_ts_length}, "
1006
+ f"Network periods={expected_length}, "
1007
+ f"Used={max_periods}, "
1008
+ f"Filled missing={filled_missing_values}, "
1009
+ f"Final length={final_ts_length}"
1010
+ )
1011
+
819
1012
  if filled_missing_values > 0:
820
- self.logger.warning(f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'")
821
-
1013
+ self.logger.warning(
1014
+ f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'"
1015
+ )
1016
+
822
1017
  if excel_ts_length != expected_length:
823
- self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH for '{component_name}.{attr_name}': "
824
- f"Excel has {excel_ts_length} rows, network expects {expected_length} periods")
825
-
1018
+ self.logger.warning(
1019
+ f"TIMESERIES DEBUG: LENGTH MISMATCH for '{component_name}.{attr_name}': "
1020
+ f"Excel has {excel_ts_length} rows, network expects {expected_length} periods"
1021
+ )
1022
+
826
1023
  if final_ts_length != expected_length:
827
- self.logger.warning(f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
828
- f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})")
829
-
1024
+ self.logger.warning(
1025
+ f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
1026
+ f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})"
1027
+ )
1028
+
830
1029
  if timeseries_values:
831
1030
  try:
832
1031
  # Use new efficient timeseries API
833
1032
  set_timeseries_conn(
834
- conn, component.id, attr_name, timeseries_values, scenario_id
1033
+ conn,
1034
+ component.id,
1035
+ attr_name,
1036
+ timeseries_values,
1037
+ scenario_id,
1038
+ )
1039
+ imported["attributes"] += 1
1040
+ self.logger.info(
1041
+ f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'"
835
1042
  )
836
- imported['attributes'] += 1
837
- self.logger.info(f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'")
838
1043
  except Exception as e:
839
- self.logger.error(f"TIMESERIES DEBUG: Failed to set timeseries attribute {attr_name} for {component_name}: {e}")
840
- imported['errors'] += 1
1044
+ self.logger.error(
1045
+ f"TIMESERIES DEBUG: Failed to set timeseries attribute {attr_name} for {component_name}: {e}"
1046
+ )
1047
+ imported["errors"] += 1
841
1048
  else:
842
- self.logger.warning(f"TIMESERIES DEBUG: No valid timeseries data found for {component_name}.{attr_name}")
1049
+ self.logger.warning(
1050
+ f"TIMESERIES DEBUG: No valid timeseries data found for {component_name}.{attr_name}"
1051
+ )
843
1052
  else:
844
- self.logger.warning(f"TIMESERIES DEBUG: Component '{component_name}' not found for timeseries import")
1053
+ self.logger.warning(
1054
+ f"TIMESERIES DEBUG: Component '{component_name}' not found for timeseries import"
1055
+ )
845
1056
  else:
846
- self.logger.warning(f"TIMESERIES DEBUG: Could not parse column '{column}' into component and attribute names")
1057
+ self.logger.warning(
1058
+ f"TIMESERIES DEBUG: Could not parse column '{column}' into component and attribute names"
1059
+ )
847
1060
  else:
848
- self.logger.warning(f"TIMESERIES DEBUG: Column '{column}' does not contain underscore separator")
849
-
1061
+ self.logger.warning(
1062
+ f"TIMESERIES DEBUG: Column '{column}' does not contain underscore separator"
1063
+ )
1064
+
850
1065
  return imported
851
-
852
- def _import_network_config(self, conn, network_id: int, config_df: pd.DataFrame) -> Dict[str, Any]:
1066
+
1067
+ def _import_network_config(self, conn, config_df: pd.DataFrame) -> Dict[str, Any]:
853
1068
  """Import network configuration from Excel"""
854
- imported = {'parameters': 0, 'errors': 0}
855
-
1069
+ imported = {"parameters": 0, "errors": 0}
1070
+
856
1071
  # Handle case where config_df might be a list (when sheet doesn't exist)
857
1072
  if not isinstance(config_df, pd.DataFrame):
858
- self.logger.info("No Network Config sheet found, using default configuration")
1073
+ self.logger.info(
1074
+ "No Network Config sheet found, using default configuration"
1075
+ )
859
1076
  # Set default network configuration
860
1077
  default_config = {
861
- 'unmet_load_active': True,
862
- 'discount_rate': 0.01,
863
- 'solver_name': 'highs',
864
- 'currency': 'USD'
1078
+ "unmet_load_active": True,
1079
+ "discount_rate": 0.01,
1080
+ "solver_name": "highs",
1081
+ "currency": "USD",
865
1082
  }
866
-
1083
+
867
1084
  for param_name, param_value in default_config.items():
868
1085
  try:
869
1086
  if isinstance(param_value, bool):
870
- param_type = 'boolean'
1087
+ param_type = "boolean"
871
1088
  elif isinstance(param_value, float):
872
- param_type = 'real'
1089
+ param_type = "real"
873
1090
  elif isinstance(param_value, int):
874
- param_type = 'integer'
1091
+ param_type = "integer"
875
1092
  else:
876
- param_type = 'string'
877
-
1093
+ param_type = "string"
1094
+
878
1095
  set_network_config(
879
- conn,
880
- network_id,
881
- param_name,
882
- param_value,
883
- param_type,
1096
+ conn,
1097
+ param_name,
1098
+ param_value,
1099
+ param_type,
884
1100
  scenario_id=None, # Network default
885
- description=f"Default {param_name} setting"
1101
+ description=f"Default {param_name} setting",
886
1102
  )
887
- imported['parameters'] += 1
888
- self.logger.info(f"Set default network config: {param_name} = {param_value}")
889
-
1103
+ imported["parameters"] += 1
1104
+ self.logger.info(
1105
+ f"Set default network config: {param_name} = {param_value}"
1106
+ )
1107
+
890
1108
  except Exception as e:
891
- self.logger.error(f"Failed to set default network config parameter {param_name}: {e}")
892
- imported['errors'] += 1
893
-
1109
+ self.logger.error(
1110
+ f"Failed to set default network config parameter {param_name}: {e}"
1111
+ )
1112
+ imported["errors"] += 1
1113
+
894
1114
  return imported
895
-
1115
+
896
1116
  if config_df.empty:
897
- self.logger.info("Network Config sheet is empty, using default configuration")
1117
+ self.logger.info(
1118
+ "Network Config sheet is empty, using default configuration"
1119
+ )
898
1120
  # Set default network configuration
899
1121
  default_config = {
900
- 'unmet_load_active': True,
901
- 'discount_rate': 0.01,
902
- 'solver_name': 'default',
903
- 'currency': 'USD'
1122
+ "unmet_load_active": True,
1123
+ "discount_rate": 0.01,
1124
+ "solver_name": "default",
1125
+ "currency": "USD",
904
1126
  }
905
-
1127
+
906
1128
  for param_name, param_value in default_config.items():
907
1129
  try:
908
1130
  if isinstance(param_value, bool):
909
- param_type = 'boolean'
1131
+ param_type = "boolean"
910
1132
  elif isinstance(param_value, float):
911
- param_type = 'real'
1133
+ param_type = "real"
912
1134
  elif isinstance(param_value, int):
913
- param_type = 'integer'
1135
+ param_type = "integer"
914
1136
  else:
915
- param_type = 'string'
916
-
1137
+ param_type = "string"
1138
+
917
1139
  set_network_config(
918
- conn,
919
- network_id,
920
- param_name,
921
- param_value,
922
- param_type,
1140
+ conn,
1141
+ param_name,
1142
+ param_value,
1143
+ param_type,
923
1144
  scenario_id=None, # Network default
924
- description=f"Default {param_name} setting"
1145
+ description=f"Default {param_name} setting",
1146
+ )
1147
+ imported["parameters"] += 1
1148
+ self.logger.info(
1149
+ f"Set default network config: {param_name} = {param_value}"
925
1150
  )
926
- imported['parameters'] += 1
927
- self.logger.info(f"Set default network config: {param_name} = {param_value}")
928
-
1151
+
929
1152
  except Exception as e:
930
- self.logger.error(f"Failed to set default network config parameter {param_name}: {e}")
931
- imported['errors'] += 1
932
-
1153
+ self.logger.error(
1154
+ f"Failed to set default network config parameter {param_name}: {e}"
1155
+ )
1156
+ imported["errors"] += 1
1157
+
933
1158
  return imported
934
-
1159
+
935
1160
  for _, row in config_df.iterrows():
936
1161
  try:
937
- param_name = str(row.get('Parameter', '')).strip()
938
- param_value = row.get('Value', '')
939
- param_type = str(row.get('Type', 'string')).strip()
940
- param_description = str(row.get('Description', '')).strip()
941
-
1162
+ param_name = str(row.get("Parameter", "")).strip()
1163
+ param_value = row.get("Value", "")
1164
+ param_type = str(row.get("Type", "string")).strip()
1165
+ param_description = str(row.get("Description", "")).strip()
1166
+
942
1167
  if not param_name:
943
1168
  continue
944
-
1169
+
945
1170
  # Validate parameter type and map Python types to database types
946
- valid_types = {'boolean', 'real', 'integer', 'string', 'json'}
947
-
1171
+ valid_types = {"boolean", "real", "integer", "string", "json"}
1172
+
948
1173
  # Map Python type names to database type names
949
1174
  type_mapping = {
950
- 'bool': 'boolean',
951
- 'float': 'real',
952
- 'int': 'integer',
953
- 'str': 'string'
1175
+ "bool": "boolean",
1176
+ "float": "real",
1177
+ "int": "integer",
1178
+ "str": "string",
954
1179
  }
955
-
1180
+
956
1181
  # Convert Python type name to database type name if needed
957
1182
  if param_type in type_mapping:
958
1183
  param_type = type_mapping[param_type]
959
-
1184
+
960
1185
  if param_type not in valid_types:
961
- self.logger.error(f"Invalid parameter type '{param_type}' for parameter '{param_name}'. Must be one of {valid_types}")
962
- imported['errors'] += 1
1186
+ self.logger.error(
1187
+ f"Invalid parameter type '{param_type}' for parameter '{param_name}'. Must be one of {valid_types}"
1188
+ )
1189
+ imported["errors"] += 1
963
1190
  continue
964
-
1191
+
965
1192
  # Convert value based on type
966
1193
  try:
967
- if param_type == 'boolean':
1194
+ if param_type == "boolean":
968
1195
  # Handle various boolean representations
969
1196
  if isinstance(param_value, bool):
970
1197
  converted_value = param_value
971
1198
  elif isinstance(param_value, str):
972
- converted_value = param_value.lower() in {'true', '1', 'yes', 'on'}
1199
+ converted_value = param_value.lower() in {
1200
+ "true",
1201
+ "1",
1202
+ "yes",
1203
+ "on",
1204
+ }
973
1205
  elif isinstance(param_value, (int, float)):
974
1206
  converted_value = bool(param_value)
975
1207
  else:
976
1208
  converted_value = False
977
- elif param_type == 'real':
1209
+ elif param_type == "real":
978
1210
  converted_value = float(param_value)
979
- elif param_type == 'integer':
980
- converted_value = int(float(param_value)) # Handle float strings like "1.0"
981
- elif param_type == 'json':
1211
+ elif param_type == "integer":
1212
+ converted_value = int(
1213
+ float(param_value)
1214
+ ) # Handle float strings like "1.0"
1215
+ elif param_type == "json":
982
1216
  if isinstance(param_value, str):
983
1217
  import json
1218
+
984
1219
  converted_value = json.loads(param_value)
985
1220
  else:
986
1221
  converted_value = param_value
987
1222
  else: # string
988
1223
  converted_value = str(param_value)
989
1224
  except (ValueError, TypeError, json.JSONDecodeError) as e:
990
- self.logger.error(f"Failed to convert parameter '{param_name}' value '{param_value}' to type '{param_type}': {e}")
991
- imported['errors'] += 1
1225
+ self.logger.error(
1226
+ f"Failed to convert parameter '{param_name}' value '{param_value}' to type '{param_type}': {e}"
1227
+ )
1228
+ imported["errors"] += 1
992
1229
  continue
993
-
1230
+
994
1231
  # Use the proper set_network_config function from pyconvexity
995
1232
  set_network_config(
996
- conn,
997
- network_id,
998
- param_name,
999
- converted_value,
1000
- param_type,
1233
+ conn,
1234
+ param_name,
1235
+ converted_value,
1236
+ param_type,
1001
1237
  scenario_id=None, # Network default
1002
- description=param_description if param_description else None
1003
- )
1004
- imported['parameters'] += 1
1005
-
1238
+ description=param_description if param_description else None,
1239
+ )
1240
+ imported["parameters"] += 1
1241
+
1006
1242
  except Exception as e:
1007
- self.logger.error(f"Failed to import network config parameter {param_name}: {e}")
1008
- imported['errors'] += 1
1009
-
1243
+ self.logger.error(
1244
+ f"Failed to import network config parameter {param_name}: {e}"
1245
+ )
1246
+ imported["errors"] += 1
1247
+
1010
1248
  return imported
1011
-
1012
- def _validate_time_axis_compatibility(self, conn, network_id: int, excel_time_config: Dict[str, str]) -> None:
1249
+
1250
+ def _validate_time_axis_compatibility(
1251
+ self, conn, excel_time_config: Dict[str, str]
1252
+ ) -> None:
1013
1253
  """Validate that Excel time axis matches existing network time axis"""
1014
1254
  try:
1015
1255
  # Get existing network info
1016
- existing_network = get_network_info(conn, network_id)
1017
-
1256
+ existing_network = get_network_info(conn)
1257
+
1018
1258
  # Compare time axis parameters
1019
- excel_start = excel_time_config.get('start_time', '').strip()
1020
- excel_end = excel_time_config.get('end_time', '').strip()
1021
- excel_interval = excel_time_config.get('time_resolution', '').strip()
1022
-
1023
- existing_start = existing_network.get('time_start', '').strip()
1024
- existing_end = existing_network.get('time_end', '').strip()
1025
- existing_interval = existing_network.get('time_interval', '').strip()
1026
-
1259
+ excel_start = excel_time_config.get("start_time", "").strip()
1260
+ excel_end = excel_time_config.get("end_time", "").strip()
1261
+ excel_interval = excel_time_config.get("time_resolution", "").strip()
1262
+
1263
+ existing_start = existing_network.get("time_start", "").strip()
1264
+ existing_end = existing_network.get("time_end", "").strip()
1265
+ existing_interval = existing_network.get("time_interval", "").strip()
1266
+
1027
1267
  self.logger.info(f"TIME AXIS DEBUG: Validating time axis compatibility")
1028
- self.logger.info(f"TIME AXIS DEBUG: Excel: {excel_start} to {excel_end}, interval: {excel_interval}")
1029
- self.logger.info(f"TIME AXIS DEBUG: Network: {existing_start} to {existing_end}, interval: {existing_interval}")
1030
-
1268
+ self.logger.info(
1269
+ f"TIME AXIS DEBUG: Excel: {excel_start} to {excel_end}, interval: {excel_interval}"
1270
+ )
1271
+ self.logger.info(
1272
+ f"TIME AXIS DEBUG: Network: {existing_start} to {existing_end}, interval: {existing_interval}"
1273
+ )
1274
+
1031
1275
  # Skip validation if Excel doesn't have time information (allow partial updates)
1032
1276
  if not excel_start or not excel_end or not excel_interval:
1033
- self.logger.warning("TIME AXIS DEBUG: Excel Overview sheet missing time axis information - skipping validation")
1034
- self.logger.warning("TIME AXIS DEBUG: Assuming Excel data is compatible with existing network time axis")
1277
+ self.logger.warning(
1278
+ "TIME AXIS DEBUG: Excel Overview sheet missing time axis information - skipping validation"
1279
+ )
1280
+ self.logger.warning(
1281
+ "TIME AXIS DEBUG: Assuming Excel data is compatible with existing network time axis"
1282
+ )
1035
1283
  return
1036
-
1284
+
1037
1285
  # Normalize case and format for time interval comparison
1038
1286
  excel_interval_normalized = self._normalize_time_interval(excel_interval)
1039
- existing_interval_normalized = self._normalize_time_interval(existing_interval)
1040
-
1041
- self.logger.info(f"TIME AXIS DEBUG: Normalized intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}'")
1042
-
1287
+ existing_interval_normalized = self._normalize_time_interval(
1288
+ existing_interval
1289
+ )
1290
+
1291
+ self.logger.info(
1292
+ f"TIME AXIS DEBUG: Normalized intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}'"
1293
+ )
1294
+
1043
1295
  # Check if they match
1044
- if (excel_start != existing_start or
1045
- excel_end != existing_end or
1046
- excel_interval_normalized != existing_interval_normalized):
1047
-
1296
+ if (
1297
+ excel_start != existing_start
1298
+ or excel_end != existing_end
1299
+ or excel_interval_normalized != existing_interval_normalized
1300
+ ):
1301
+
1048
1302
  self.logger.error(f"TIME AXIS DEBUG: MISMATCH DETECTED!")
1049
- self.logger.error(f"TIME AXIS DEBUG: Start times - Excel: '{excel_start}', Network: '{existing_start}' (match: {excel_start == existing_start})")
1050
- self.logger.error(f"TIME AXIS DEBUG: End times - Excel: '{excel_end}', Network: '{existing_end}' (match: {excel_end == existing_end})")
1051
- self.logger.error(f"TIME AXIS DEBUG: Intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}' (match: {excel_interval_normalized == existing_interval_normalized})")
1052
-
1303
+ self.logger.error(
1304
+ f"TIME AXIS DEBUG: Start times - Excel: '{excel_start}', Network: '{existing_start}' (match: {excel_start == existing_start})"
1305
+ )
1306
+ self.logger.error(
1307
+ f"TIME AXIS DEBUG: End times - Excel: '{excel_end}', Network: '{existing_end}' (match: {excel_end == existing_end})"
1308
+ )
1309
+ self.logger.error(
1310
+ f"TIME AXIS DEBUG: Intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}' (match: {excel_interval_normalized == existing_interval_normalized})"
1311
+ )
1312
+
1053
1313
  raise ValueError(
1054
1314
  f"Time axis mismatch! "
1055
1315
  f"Excel has {excel_start} to {excel_end} ({excel_interval}), "
@@ -1057,53 +1317,65 @@ class ExcelModelImporter:
1057
1317
  f"Time axis must match exactly when importing into an existing network."
1058
1318
  )
1059
1319
  else:
1060
- self.logger.info(f"TIME AXIS DEBUG: Time axis validation PASSED - Excel and network time axes match")
1061
-
1320
+ self.logger.info(
1321
+ f"TIME AXIS DEBUG: Time axis validation PASSED - Excel and network time axes match"
1322
+ )
1323
+
1062
1324
  except Exception as e:
1063
1325
  if "Time axis mismatch" in str(e):
1064
1326
  raise # Re-raise validation errors
1065
1327
  else:
1066
1328
  # Log other errors but don't fail the import
1067
1329
  self.logger.error(f"Error during time axis validation: {e}")
1068
- self.logger.warning("Continuing with import despite time axis validation error")
1069
-
1330
+ self.logger.warning(
1331
+ "Continuing with import despite time axis validation error"
1332
+ )
1333
+
1070
1334
  def _normalize_time_interval(self, interval: str) -> str:
1071
1335
  """Normalize time interval format for comparison"""
1072
1336
  interval = interval.strip().upper()
1073
-
1337
+
1074
1338
  # Handle common variations
1075
- if interval in ['H', '1H', 'PT1H', 'HOURLY']:
1076
- return 'H'
1077
- elif interval in ['D', '1D', 'P1D', 'DAILY']:
1078
- return 'D'
1079
- elif interval.endswith('H') and interval[:-1].isdigit():
1339
+ if interval in ["H", "1H", "PT1H", "HOURLY"]:
1340
+ return "H"
1341
+ elif interval in ["D", "1D", "P1D", "DAILY"]:
1342
+ return "D"
1343
+ elif interval.endswith("H") and interval[:-1].isdigit():
1080
1344
  return interval # Already normalized (e.g., '2H', '3H')
1081
- elif interval.startswith('PT') and interval.endswith('H'):
1345
+ elif interval.startswith("PT") and interval.endswith("H"):
1082
1346
  # Convert PT3H -> 3H
1083
1347
  hours = interval[2:-1]
1084
- return f'{hours}H'
1085
-
1348
+ return f"{hours}H"
1349
+
1086
1350
  return interval
1087
-
1088
- def _calculate_import_stats(self, carriers_imported: Dict, components_imported: Dict,
1089
- timeseries_imported: Dict, config_imported: Dict) -> Dict[str, Any]:
1351
+
1352
+ def _calculate_import_stats(
1353
+ self,
1354
+ carriers_imported: Dict,
1355
+ components_imported: Dict,
1356
+ timeseries_imported: Dict,
1357
+ config_imported: Dict,
1358
+ ) -> Dict[str, Any]:
1090
1359
  """Calculate import statistics"""
1091
- total_created = carriers_imported['created'] + sum(
1092
- comp['created'] for comp in components_imported.values()
1360
+ total_created = carriers_imported["created"] + sum(
1361
+ comp["created"] for comp in components_imported.values()
1093
1362
  )
1094
- total_updated = carriers_imported['updated'] + sum(
1095
- comp['updated'] for comp in components_imported.values()
1363
+ total_updated = carriers_imported["updated"] + sum(
1364
+ comp["updated"] for comp in components_imported.values()
1096
1365
  )
1097
- total_errors = carriers_imported['errors'] + sum(
1098
- comp['errors'] for comp in components_imported.values()
1099
- ) + timeseries_imported['errors'] + config_imported['errors']
1100
-
1366
+ total_errors = (
1367
+ carriers_imported["errors"]
1368
+ + sum(comp["errors"] for comp in components_imported.values())
1369
+ + timeseries_imported["errors"]
1370
+ + config_imported["errors"]
1371
+ )
1372
+
1101
1373
  return {
1102
- 'total_created': total_created,
1103
- 'total_updated': total_updated,
1104
- 'total_errors': total_errors,
1105
- 'carriers': carriers_imported,
1106
- 'components': components_imported,
1107
- 'timeseries': timeseries_imported,
1108
- 'network_config': config_imported
1374
+ "total_created": total_created,
1375
+ "total_updated": total_updated,
1376
+ "total_errors": total_errors,
1377
+ "carriers": carriers_imported,
1378
+ "components": components_imported,
1379
+ "timeseries": timeseries_imported,
1380
+ "network_config": config_imported,
1109
1381
  }