pyconvexity 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (35) hide show
  1. pyconvexity/__init__.py +30 -6
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/data/README.md +101 -0
  4. pyconvexity/data/__init__.py +18 -0
  5. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  6. pyconvexity/data/loaders/__init__.py +3 -0
  7. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  8. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/cache.py +212 -0
  10. pyconvexity/data/sources/__init__.py +5 -0
  11. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  12. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  13. pyconvexity/data/sources/gem.py +412 -0
  14. pyconvexity/io/__init__.py +32 -0
  15. pyconvexity/io/excel_exporter.py +991 -0
  16. pyconvexity/io/excel_importer.py +1112 -0
  17. pyconvexity/io/netcdf_exporter.py +192 -0
  18. pyconvexity/io/netcdf_importer.py +599 -0
  19. pyconvexity/models/__init__.py +7 -0
  20. pyconvexity/models/attributes.py +3 -1
  21. pyconvexity/models/components.py +3 -0
  22. pyconvexity/models/scenarios.py +177 -0
  23. pyconvexity/solvers/__init__.py +29 -0
  24. pyconvexity/solvers/pypsa/__init__.py +24 -0
  25. pyconvexity/solvers/pypsa/api.py +398 -0
  26. pyconvexity/solvers/pypsa/batch_loader.py +311 -0
  27. pyconvexity/solvers/pypsa/builder.py +656 -0
  28. pyconvexity/solvers/pypsa/constraints.py +321 -0
  29. pyconvexity/solvers/pypsa/solver.py +1255 -0
  30. pyconvexity/solvers/pypsa/storage.py +2207 -0
  31. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/METADATA +5 -2
  32. pyconvexity-0.1.3.dist-info/RECORD +45 -0
  33. pyconvexity-0.1.1.dist-info/RECORD +0 -20
  34. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/WHEEL +0 -0
  35. {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1112 @@
1
+ """
2
+ Excel importer for PyConvexity energy system models.
3
+ Imports network models from Excel workbooks with multiple sheets.
4
+ """
5
+
6
+ import logging
7
+ import pandas as pd
8
+ import numpy as np
9
+ from typing import Dict, Any, Optional, List, Tuple
10
+ from pathlib import Path
11
+ from datetime import datetime
12
+ import json
13
+
14
+ # Import functions directly from pyconvexity
15
+ from pyconvexity.core.database import open_connection
16
+ from pyconvexity.core.types import StaticValue, TimeseriesPoint, CreateNetworkRequest
17
+ from pyconvexity.core.errors import AttributeNotFound, ValidationError
18
+ from pyconvexity.models import (
19
+ list_components_by_type, create_component, update_component, create_network,
20
+ set_network_config, create_carrier, get_master_scenario_id, get_network_time_periods,
21
+ list_carriers, set_static_attribute, set_timeseries_attribute, get_bus_name_to_id_map,
22
+ get_network_info, delete_attribute
23
+ )
24
+ from pyconvexity.validation import get_validation_rule
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+ class ExcelModelImporter:
29
+ """Import network model from Excel workbook"""
30
+
31
+ def __init__(self):
32
+ self.logger = logging.getLogger(__name__)
33
+
34
+ def import_model_from_excel(
35
+ self,
36
+ db_path: str,
37
+ excel_path: str,
38
+ network_id: Optional[int] = None,
39
+ network_name: Optional[str] = None,
40
+ network_description: Optional[str] = None,
41
+ scenario_id: Optional[int] = None,
42
+ progress_callback: Optional[callable] = None
43
+ ) -> Dict[str, Any]:
44
+ """
45
+ Import network model from Excel workbook
46
+
47
+ Args:
48
+ db_path: Database path
49
+ excel_path: Excel file path
50
+ network_id: Network ID to import into (if updating existing)
51
+ network_name: Name for new network (if creating new)
52
+ network_description: Description for new network (if creating new)
53
+ scenario_id: Scenario ID (defaults to master scenario)
54
+ update_existing: Whether to update existing components
55
+ add_new: Whether to add new components
56
+ progress_callback: Optional callback for progress updates
57
+
58
+ Returns:
59
+ Import statistics and metadata
60
+ """
61
+
62
+ conn = None
63
+ try:
64
+ if progress_callback:
65
+ progress_callback(0, "Starting Excel import...")
66
+
67
+ # Connect to database
68
+ conn = open_connection(db_path)
69
+
70
+ # Determine if we're creating a new network or updating existing
71
+ create_new_network = network_id is None
72
+
73
+ if create_new_network:
74
+ if progress_callback:
75
+ progress_callback(3, "Reading Excel Overview sheet...")
76
+
77
+ # Read network configuration from Overview sheet
78
+ overview_df = pd.read_excel(excel_path, sheet_name='Overview')
79
+ network_config = self._read_overview_sheet(overview_df)
80
+
81
+ self.logger.info(f"Network config from Overview: {network_config}")
82
+
83
+ # Extract network name from Excel if not provided
84
+ excel_network_name = network_config.get('name')
85
+ if excel_network_name:
86
+ final_network_name = excel_network_name
87
+ self.logger.info(f"Using network name from Excel: '{final_network_name}'")
88
+ elif network_name:
89
+ final_network_name = network_name
90
+ self.logger.info(f"Using provided network name: '{final_network_name}'")
91
+ else:
92
+ # Fallback to filename if no name in Excel or provided
93
+ final_network_name = Path(excel_path).stem
94
+ self.logger.info(f"Using filename as network name: '{final_network_name}'")
95
+
96
+ # Extract description from Excel if not provided
97
+ excel_description = network_config.get('description')
98
+ if excel_description:
99
+ final_description = excel_description
100
+ self.logger.info(f"Using description from Excel: '{final_description}'")
101
+ elif network_description:
102
+ final_description = network_description
103
+ self.logger.info(f"Using provided description: '{final_description}'")
104
+ else:
105
+ final_description = f"Imported from {Path(excel_path).name}"
106
+ self.logger.info(f"Using default description: '{final_description}'")
107
+
108
+ if progress_callback:
109
+ progress_callback(5, f"Creating network '{final_network_name}'...")
110
+
111
+ # Create new network
112
+ network_request = CreateNetworkRequest(
113
+ name=final_network_name,
114
+ description=final_description,
115
+ time_resolution=network_config.get('time_resolution', "H"),
116
+ start_time=network_config.get('start_time'),
117
+ end_time=network_config.get('end_time')
118
+ )
119
+
120
+ # Validate that we have the required time information
121
+ if not network_request.start_time or not network_request.end_time:
122
+ missing_fields = []
123
+ if not network_request.start_time:
124
+ missing_fields.append("Time Start")
125
+ if not network_request.end_time:
126
+ missing_fields.append("Time End")
127
+
128
+ self.logger.error(f"Missing required time information in Overview sheet: {missing_fields}")
129
+ self.logger.error(f"Available overview data: {network_config}")
130
+ raise ValueError(f"Excel file is missing required time information: {', '.join(missing_fields)}. "
131
+ f"Please ensure the Overview sheet contains 'Time Start' and 'Time End' fields.")
132
+
133
+ self.logger.info(f"Creating network with: name='{network_request.name}', "
134
+ f"start_time='{network_request.start_time}', "
135
+ f"end_time='{network_request.end_time}', "
136
+ f"time_resolution='{network_request.time_resolution}'")
137
+
138
+ network_id = create_network(conn, network_request)
139
+
140
+ # Generate time periods for the network
141
+ self._generate_time_periods(
142
+ conn,
143
+ network_id,
144
+ network_request.start_time,
145
+ network_request.end_time,
146
+ network_request.time_resolution
147
+ )
148
+
149
+ # Verify time periods were created
150
+ verification_periods = get_network_time_periods(conn, network_id)
151
+ self.logger.info(f"Network {network_id} now has {len(verification_periods)} time periods")
152
+
153
+ conn.commit()
154
+
155
+ if progress_callback:
156
+ progress_callback(5, f"Created network '{final_network_name}' (ID: {network_id})")
157
+ else:
158
+ if progress_callback:
159
+ progress_callback(3, f"Using existing network ID: {network_id}")
160
+
161
+ # For existing networks, validate time axis compatibility
162
+ if progress_callback:
163
+ progress_callback(5, "Validating time axis compatibility...")
164
+
165
+ # Read network configuration from Overview sheet to compare
166
+ try:
167
+ overview_df = pd.read_excel(excel_path, sheet_name='Overview')
168
+ excel_time_config = self._read_overview_sheet(overview_df)
169
+ except Exception as e:
170
+ self.logger.warning(f"Could not read Overview sheet: {e}")
171
+ self.logger.warning("Skipping time axis validation - assuming Excel is compatible")
172
+ excel_time_config = {}
173
+
174
+ # Validate time axis matches existing network
175
+ self._validate_time_axis_compatibility(conn, network_id, excel_time_config)
176
+
177
+ self.logger.info("Time axis validation passed - Excel matches existing network")
178
+
179
+ # Set import behavior based on whether this is a new or existing network
180
+ if network_id is None:
181
+ # New network: Always add all components from Excel
182
+ actual_update_existing = False # No existing components to update
183
+ actual_add_new = True # Add everything from Excel
184
+ self.logger.info("Import mode: NEW NETWORK - Adding all components from Excel")
185
+ else:
186
+ # Existing network: Always update existing and add new (user's requirement)
187
+ actual_update_existing = True # Update components that exist
188
+ actual_add_new = True # Add components that don't exist
189
+ self.logger.info("Import mode: EXISTING NETWORK - Update existing + add new components")
190
+
191
+ # Get master scenario if no scenario specified
192
+ if scenario_id is None:
193
+ scenario_id = get_master_scenario_id(conn, network_id)
194
+ if scenario_id is None:
195
+ raise ValueError("No master scenario found for network")
196
+
197
+ if progress_callback:
198
+ progress_callback(8, "Reading Excel file...")
199
+
200
+ # Read Excel file
201
+ excel_data = self._read_excel_file(excel_path)
202
+
203
+ if progress_callback:
204
+ progress_callback(18, "Processing carriers...")
205
+
206
+ # Import carriers first
207
+ carriers_df = excel_data.get('Carriers', pd.DataFrame())
208
+ carriers_imported = self._import_carriers(conn, network_id, carriers_df)
209
+
210
+ if progress_callback:
211
+ progress_callback(28, "Processing components...")
212
+
213
+ # Import components by type
214
+ component_types = ['Buses', 'Generators', 'Loads', 'Lines', 'Links', 'Storage Units', 'Stores', 'Constraints']
215
+ components_imported = {}
216
+
217
+ for sheet_name in component_types:
218
+ if sheet_name in excel_data:
219
+ comp_type = self._get_component_type_from_sheet(sheet_name)
220
+ self.logger.info(f"Processing sheet '{sheet_name}' as component type '{comp_type}' with {len(excel_data[sheet_name])} rows")
221
+ components_imported[comp_type] = self._import_components(
222
+ conn, network_id, comp_type, excel_data[sheet_name],
223
+ scenario_id, actual_update_existing, actual_add_new
224
+ )
225
+
226
+ if progress_callback:
227
+ progress_callback(78, "Processing timeseries data...")
228
+
229
+ # Import timeseries data
230
+ timeseries_imported = self._import_timeseries_data(
231
+ conn, network_id, excel_data, scenario_id
232
+ )
233
+
234
+ if progress_callback:
235
+ progress_callback(93, "Processing network configuration...")
236
+
237
+ # Import network configuration
238
+ network_config_df = excel_data.get('Network Config', pd.DataFrame())
239
+ config_imported = self._import_network_config(
240
+ conn, network_id, network_config_df
241
+ )
242
+
243
+ conn.commit()
244
+
245
+ if progress_callback:
246
+ progress_callback(100, "Excel import completed")
247
+
248
+ # Calculate statistics
249
+ stats = self._calculate_import_stats(
250
+ carriers_imported, components_imported, timeseries_imported, config_imported
251
+ )
252
+ stats['network_id'] = network_id
253
+ stats['created_new_network'] = create_new_network
254
+
255
+ return {
256
+ "success": True,
257
+ "message": f"Network {'created' if create_new_network else 'updated'} from Excel: {excel_path}",
258
+ "network_id": network_id,
259
+ "stats": stats
260
+ }
261
+
262
+ except Exception as e:
263
+ self.logger.error(f"Excel import failed: {e}", exc_info=True)
264
+ if progress_callback:
265
+ progress_callback(None, f"Import failed: {str(e)}")
266
+ raise
267
+ finally:
268
+ # Always close the connection, even on error
269
+ if conn is not None:
270
+ try:
271
+ conn.close()
272
+ except Exception as e:
273
+ self.logger.warning(f"Failed to close database connection: {e}")
274
+
275
+ def _generate_time_periods(self, conn, network_id: int, start_time: str, end_time: str, time_resolution: str) -> None:
276
+ """Generate and insert time periods for the network"""
277
+ import pandas as pd
278
+ from datetime import datetime
279
+
280
+ try:
281
+ # Parse start and end times
282
+ start_dt = pd.to_datetime(start_time)
283
+ end_dt = pd.to_datetime(end_time)
284
+
285
+ # Convert time_resolution to pandas frequency string
286
+ if time_resolution == 'H':
287
+ freq_str = 'H'
288
+ elif time_resolution == 'D':
289
+ freq_str = 'D'
290
+ elif time_resolution.endswith('H'):
291
+ hours = int(time_resolution[:-1])
292
+ freq_str = f'{hours}H'
293
+ elif time_resolution.endswith('min'):
294
+ minutes = int(time_resolution[:-3])
295
+ freq_str = f'{minutes}min'
296
+ else:
297
+ self.logger.warning(f"Unknown time resolution '{time_resolution}', defaulting to hourly")
298
+ freq_str = 'H'
299
+
300
+ # Generate timestamps
301
+ timestamps = pd.date_range(start=start_dt, end=end_dt, freq=freq_str, inclusive='both')
302
+
303
+ self.logger.info(f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution")
304
+
305
+ # Insert time periods into database
306
+ for period_index, timestamp in enumerate(timestamps):
307
+ timestamp_str = timestamp.strftime('%Y-%m-%d %H:%M:%S')
308
+
309
+ conn.execute("""
310
+ INSERT INTO network_time_periods (network_id, timestamp, period_index)
311
+ VALUES (?, ?, ?)
312
+ """, (network_id, timestamp_str, period_index))
313
+
314
+ self.logger.info(f"Successfully created {len(timestamps)} time periods for network {network_id}")
315
+
316
+ except Exception as e:
317
+ self.logger.error(f"Failed to generate time periods: {e}")
318
+ raise
319
+
320
+ def _read_overview_sheet(self, overview_df: pd.DataFrame) -> Dict[str, Any]:
321
+ """Extract network configuration from Overview sheet"""
322
+ config = {}
323
+
324
+ if overview_df.empty:
325
+ self.logger.warning("Overview sheet is empty")
326
+ return config
327
+
328
+ self.logger.info(f"Overview sheet has {len(overview_df)} rows and columns: {list(overview_df.columns)}")
329
+ self.logger.info(f"First few rows of overview sheet:\n{overview_df.head()}")
330
+
331
+ # Convert to a simple key-value lookup
332
+ overview_data = {}
333
+
334
+ # Handle both old single-column format and new two-column format
335
+ if 'Property' in overview_df.columns and 'Value' in overview_df.columns:
336
+ # New two-column format
337
+ for _, row in overview_df.iterrows():
338
+ key = str(row['Property']).strip() if pd.notna(row['Property']) else ""
339
+ value = str(row['Value']).strip() if pd.notna(row['Value']) else ""
340
+ if key and value and value != 'nan':
341
+ overview_data[key] = value
342
+ self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
343
+ elif len(overview_df.columns) >= 2:
344
+ # Old format - try to read from first two columns
345
+ for i, row in overview_df.iterrows():
346
+ key = str(row.iloc[0]).strip() if pd.notna(row.iloc[0]) else ""
347
+ value = str(row.iloc[1]).strip() if pd.notna(row.iloc[1]) else ""
348
+ if key and value and value != 'nan':
349
+ overview_data[key] = value
350
+ self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
351
+ else:
352
+ self.logger.error(f"Overview sheet format not recognized. Columns: {list(overview_df.columns)}")
353
+ return config
354
+
355
+ self.logger.info(f"Parsed overview data: {overview_data}")
356
+
357
+ # Extract network configuration
358
+ if 'Name' in overview_data:
359
+ config['name'] = overview_data['Name']
360
+ if 'Description' in overview_data:
361
+ config['description'] = overview_data['Description']
362
+ if 'Time Start' in overview_data:
363
+ config['start_time'] = overview_data['Time Start']
364
+ self.logger.info(f"Found Time Start: {config['start_time']}")
365
+ if 'Time End' in overview_data:
366
+ config['end_time'] = overview_data['Time End']
367
+ self.logger.info(f"Found Time End: {config['end_time']}")
368
+ if 'Time Interval' in overview_data:
369
+ # Convert time interval format to our format
370
+ interval = overview_data['Time Interval'].strip()
371
+ self.logger.info(f"Found Time Interval: '{interval}'")
372
+
373
+ if interval == 'P1D':
374
+ config['time_resolution'] = 'D' # Daily
375
+ elif interval == 'PT1H' or interval == 'h' or interval == 'H':
376
+ config['time_resolution'] = 'H' # Hourly
377
+ elif interval.startswith('PT') and interval.endswith('H'):
378
+ # Extract hours (e.g., 'PT3H' -> '3H')
379
+ hours = interval[2:-1]
380
+ config['time_resolution'] = f'{hours}H'
381
+ elif interval.endswith('h') or interval.endswith('H'):
382
+ # Handle simple formats like '2h', '3H'
383
+ if interval[:-1].isdigit():
384
+ hours = interval[:-1]
385
+ config['time_resolution'] = f'{hours}H'
386
+ else:
387
+ config['time_resolution'] = 'H' # Default to hourly
388
+ else:
389
+ self.logger.warning(f"Unknown time interval format '{interval}', defaulting to hourly")
390
+ config['time_resolution'] = 'H' # Default to hourly
391
+
392
+ self.logger.info(f"Final network config from Overview sheet: {config}")
393
+ return config
394
+
395
+ def _read_excel_file(self, excel_path: str) -> Dict[str, pd.DataFrame]:
396
+ """Read Excel file and return dictionary of DataFrames by sheet name"""
397
+ excel_data = {}
398
+
399
+ try:
400
+ # Read all sheets
401
+ excel_file = pd.ExcelFile(excel_path)
402
+
403
+ self.logger.info(f"Excel file contains sheets: {excel_file.sheet_names}")
404
+
405
+ for sheet_name in excel_file.sheet_names:
406
+ if sheet_name == 'Overview':
407
+ continue # Skip overview sheet
408
+
409
+ df = pd.read_excel(excel_path, sheet_name=sheet_name)
410
+ if not df.empty:
411
+ excel_data[sheet_name] = df
412
+ self.logger.info(f"Loaded sheet '{sheet_name}' with {len(df)} rows")
413
+ else:
414
+ self.logger.info(f"Skipped empty sheet '{sheet_name}'")
415
+
416
+ except Exception as e:
417
+ raise ValueError(f"Failed to read Excel file: {e}")
418
+
419
+ return excel_data
420
+
421
+ def _get_component_type_from_sheet(self, sheet_name: str) -> str:
422
+ """Convert sheet name to component type"""
423
+ mapping = {
424
+ 'Buses': 'BUS',
425
+ 'Generators': 'GENERATOR',
426
+ 'Loads': 'LOAD',
427
+ 'Lines': 'LINE',
428
+ 'Links': 'LINK',
429
+ 'Storage Units': 'STORAGE_UNIT',
430
+ 'Stores': 'STORE',
431
+ 'Constraints': 'CONSTRAINT'
432
+ }
433
+ return mapping.get(sheet_name, sheet_name.upper())
434
+
435
+ def _import_carriers(self, conn, network_id: int, carriers_df: pd.DataFrame) -> Dict[str, Any]:
436
+ """Import carriers from Excel data"""
437
+ imported = {'created': 0, 'updated': 0, 'errors': 0}
438
+
439
+ if carriers_df.empty:
440
+ return imported
441
+
442
+ # Get existing carriers
443
+ existing_carriers = list_carriers(conn, network_id)
444
+ existing_names = {carrier['name'] for carrier in existing_carriers}
445
+
446
+ for _, row in carriers_df.iterrows():
447
+ try:
448
+ carrier_name = str(row.get('name', '')).strip()
449
+ if not carrier_name:
450
+ continue
451
+
452
+ # Check if carrier exists
453
+ if carrier_name in existing_names:
454
+ imported['updated'] += 1
455
+ else:
456
+ # Create new carrier
457
+ create_carrier(
458
+ conn,
459
+ network_id,
460
+ carrier_name,
461
+ co2_emissions=row.get('co2_emissions', 0.0),
462
+ color=row.get('color', '#ffffff'),
463
+ nice_name=row.get('nice_name', carrier_name)
464
+ )
465
+ imported['created'] += 1
466
+
467
+ except Exception as e:
468
+ self.logger.error(f"Failed to import carrier {carrier_name}: {e}")
469
+ imported['errors'] += 1
470
+
471
+ return imported
472
+
473
+ def _import_components(
474
+ self,
475
+ conn,
476
+ network_id: int,
477
+ component_type: str,
478
+ components_df: pd.DataFrame,
479
+ scenario_id: int,
480
+ update_existing: bool,
481
+ add_new: bool
482
+ ) -> Dict[str, Any]:
483
+ """Import components of a specific type"""
484
+ imported = {'created': 0, 'updated': 0, 'errors': 0}
485
+
486
+ if components_df.empty:
487
+ return imported
488
+
489
+ # Get existing components of this type
490
+ existing_components = list_components_by_type(conn, network_id, component_type)
491
+ existing_names = {comp.name for comp in existing_components}
492
+
493
+ # Get carriers and buses for foreign key resolution
494
+ carriers = list_carriers(conn, network_id)
495
+ buses = list_components_by_type(conn, network_id, 'BUS')
496
+
497
+ carrier_name_to_id = {carrier['name']: carrier['id'] for carrier in carriers}
498
+ bus_name_to_id = {bus.name: bus.id for bus in buses}
499
+
500
+
501
+ for _, row in components_df.iterrows():
502
+ try:
503
+ component_name = str(row.get('name', '')).strip()
504
+ if not component_name:
505
+ continue
506
+
507
+ # Debug logging for CONSTRAINT components (reduced verbosity)
508
+ if component_type == 'CONSTRAINT':
509
+ self.logger.debug(f"Processing CONSTRAINT '{component_name}'")
510
+
511
+ # Resolve foreign keys
512
+ carrier_id = None
513
+ # CONSTRAINT components don't have carriers
514
+ if row.get('carrier') and component_type != 'CONSTRAINT':
515
+ carrier_name = str(row['carrier']).strip()
516
+ carrier_id = carrier_name_to_id.get(carrier_name)
517
+ self.logger.info(f"Component '{component_name}' has carrier '{carrier_name}', resolved to carrier_id: {carrier_id}")
518
+ if carrier_id is None:
519
+ self.logger.warning(f"Carrier '{carrier_name}' not found for component '{component_name}'. Available carriers: {list(carrier_name_to_id.keys())}")
520
+ elif component_type == 'CONSTRAINT':
521
+ self.logger.debug(f"CONSTRAINT '{component_name}' - skipping carrier resolution")
522
+
523
+ bus_id = None
524
+ # CONSTRAINT components don't connect to buses
525
+ if row.get('bus') and component_type != 'CONSTRAINT':
526
+ bus_name = str(row['bus']).strip()
527
+ bus_id = bus_name_to_id.get(bus_name)
528
+ if bus_id is None:
529
+ self.logger.warning(f"Bus '{bus_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
530
+
531
+ bus0_id = None
532
+ if row.get('bus0') and component_type != 'CONSTRAINT':
533
+ bus0_name = str(row['bus0']).strip()
534
+ bus0_id = bus_name_to_id.get(bus0_name)
535
+ if bus0_id is None:
536
+ self.logger.warning(f"Bus0 '{bus0_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
537
+
538
+ bus1_id = None
539
+ if row.get('bus1') and component_type != 'CONSTRAINT':
540
+ bus1_name = str(row['bus1']).strip()
541
+ bus1_id = bus_name_to_id.get(bus1_name)
542
+ if bus1_id is None:
543
+ self.logger.warning(f"Bus1 '{bus1_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
544
+
545
+ # Check if component exists
546
+ if component_name in existing_names and update_existing:
547
+ # Update existing component
548
+ existing_comp = next(c for c in existing_components if c.name == component_name)
549
+
550
+ try:
551
+ # Update component using the proper function
552
+ # CONSTRAINT components must have carrier_id=None per database schema
553
+ final_carrier_id = None if component_type == 'CONSTRAINT' else carrier_id
554
+ update_component(
555
+ conn,
556
+ existing_comp.id,
557
+ carrier_id=final_carrier_id,
558
+ bus_id=bus_id,
559
+ bus0_id=bus0_id,
560
+ bus1_id=bus1_id,
561
+ latitude=row.get('latitude'),
562
+ longitude=row.get('longitude')
563
+ )
564
+
565
+ # Update attributes
566
+ self._update_component_attributes(conn, existing_comp.id, row, scenario_id)
567
+ imported['updated'] += 1
568
+
569
+ except Exception as e:
570
+ self.logger.error(f"Failed to update component '{component_name}': {e}")
571
+ imported['errors'] += 1
572
+ continue
573
+
574
+ elif component_name not in existing_names and add_new:
575
+ # Create new component using the proper function
576
+ # CONSTRAINT components must have carrier_id=None per database schema
577
+ final_carrier_id = None if component_type == 'CONSTRAINT' else carrier_id
578
+
579
+ # Handle latitude/longitude - CONSTRAINT components don't have location
580
+ if component_type == 'CONSTRAINT':
581
+ lat_val = None
582
+ lon_val = None
583
+ self.logger.debug(f"CONSTRAINT '{component_name}' - setting latitude/longitude to None")
584
+ else:
585
+ # Clean empty strings for other component types
586
+ lat_val = row.get('latitude')
587
+ lon_val = row.get('longitude')
588
+ if lat_val == '' or (isinstance(lat_val, str) and lat_val.strip() == ''):
589
+ lat_val = None
590
+ if lon_val == '' or (isinstance(lon_val, str) and lon_val.strip() == ''):
591
+ lon_val = None
592
+
593
+ component_id = create_component(
594
+ conn,
595
+ network_id,
596
+ component_type,
597
+ component_name,
598
+ longitude=lon_val,
599
+ latitude=lat_val,
600
+ carrier_id=final_carrier_id,
601
+ bus_id=bus_id,
602
+ bus0_id=bus0_id,
603
+ bus1_id=bus1_id
604
+ )
605
+
606
+ # Set attributes
607
+ self._set_component_attributes(conn, component_id, row, scenario_id)
608
+ imported['created'] += 1
609
+
610
+ except Exception as e:
611
+ self.logger.error(f"Failed to import component '{component_name}' of type '{component_type}': {e}")
612
+ self.logger.error(f"Component data: name='{component_name}', carrier_id={carrier_id}, bus_id={bus_id}, bus0_id={bus0_id}, bus1_id={bus1_id}")
613
+ imported['errors'] += 1
614
+
615
+ return imported
616
+
617
+ def _update_component_attributes(self, conn, component_id: int, row: pd.Series, scenario_id: int):
618
+ """Update attributes for an existing component"""
619
+ # Get validation rules for this component type
620
+ cursor = conn.execute(
621
+ "SELECT component_type FROM components WHERE id = ?",
622
+ (component_id,)
623
+ )
624
+ component_type = cursor.fetchone()[0]
625
+
626
+ # Process each column as potential attribute
627
+ for column, value in row.items():
628
+ if column in ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude', 'type']:
629
+ continue # Skip basic fields
630
+
631
+ if value == '[timeseries]':
632
+ continue # Skip timeseries markers
633
+
634
+ # Check if this is a valid attribute
635
+ validation_rule = get_validation_rule(conn, component_type, column)
636
+ if validation_rule:
637
+ # Handle blank cells (empty strings or NaN) - these should unset the attribute
638
+ if pd.isna(value) or value == '':
639
+ try:
640
+ delete_attribute(conn, component_id, column, scenario_id)
641
+ self.logger.debug(f"Unset attribute '{column}' for component {component_id} due to blank cell")
642
+ except Exception as e:
643
+ # Attribute might not exist, which is fine
644
+ self.logger.debug(f"Could not unset attribute '{column}' for component {component_id}: {e}")
645
+ else:
646
+ # Set the attribute with the provided value
647
+ self._set_single_attribute(conn, component_id, column, value, validation_rule, scenario_id)
648
+
649
+ def _set_component_attributes(self, conn, component_id: int, row: pd.Series, scenario_id: int):
650
+ """Set attributes for a new component"""
651
+ # Get validation rules for this component type
652
+ cursor = conn.execute(
653
+ "SELECT component_type FROM components WHERE id = ?",
654
+ (component_id,)
655
+ )
656
+ component_type = cursor.fetchone()[0]
657
+
658
+ # Process each column as potential attribute
659
+ for column, value in row.items():
660
+ if column in ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude', 'type']:
661
+ continue # Skip basic fields
662
+
663
+ if value == '[timeseries]':
664
+ continue # Skip timeseries markers
665
+
666
+ # Check if this is a valid attribute
667
+ validation_rule = get_validation_rule(conn, component_type, column)
668
+ if validation_rule:
669
+ # For new components, only set attributes that have actual values
670
+ # Blank cells (empty strings or NaN) are left unset (which is the default state)
671
+ if not (pd.isna(value) or value == ''):
672
+ # Set the attribute with the provided value
673
+ self._set_single_attribute(conn, component_id, column, value, validation_rule, scenario_id)
674
+
675
+ def _set_single_attribute(self, conn, component_id: int, attr_name: str, value: Any, validation_rule: Dict, scenario_id: int):
676
+ """Set a single attribute with proper type conversion"""
677
+ data_type = validation_rule.data_type if hasattr(validation_rule, 'data_type') else validation_rule.get('data_type', 'string')
678
+
679
+ try:
680
+ if data_type == 'float':
681
+ static_value = StaticValue(float(value))
682
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
683
+ elif data_type == 'int':
684
+ static_value = StaticValue(int(value))
685
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
686
+ elif data_type == 'boolean':
687
+ bool_value = str(value).lower() in ['true', '1', 'yes']
688
+ static_value = StaticValue(bool_value)
689
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
690
+ else: # string
691
+ static_value = StaticValue(str(value))
692
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
693
+ except (AttributeNotFound, ValidationError):
694
+ # Skip missing attributes or validation errors silently (same as PyPSA solver)
695
+ pass
696
+ except Exception as e:
697
+ self.logger.warning(f"Failed to set attribute {attr_name} for component {component_id}: {e}")
698
+
699
+ def _import_timeseries_data(self, conn, network_id: int, excel_data: Dict, scenario_id: int) -> Dict[str, Any]:
700
+ """Import timeseries data from Excel sheets"""
701
+ imported = {'attributes': 0, 'errors': 0}
702
+
703
+ # Get network time periods for timestamp mapping
704
+ network_time_periods = get_network_time_periods(conn, network_id)
705
+ time_period_map = {period.formatted_time: period for period in network_time_periods}
706
+
707
+ expected_length = len(network_time_periods)
708
+ self.logger.info(f"TIMESERIES DEBUG: Network has {expected_length} time periods for timeseries import")
709
+ if network_time_periods:
710
+ self.logger.info(f"TIMESERIES DEBUG: Time period range: {network_time_periods[0].formatted_time} to {network_time_periods[-1].formatted_time}")
711
+ else:
712
+ self.logger.error("TIMESERIES DEBUG: NO TIME PERIODS FOUND! Timeseries import will fail.")
713
+ return imported
714
+
715
+ # Look for timeseries sheets
716
+ for sheet_name, df in excel_data.items():
717
+ if 'Timeseries' in sheet_name and not df.empty:
718
+ self.logger.info(f"TIMESERIES DEBUG: Processing sheet '{sheet_name}' with {len(df)} rows")
719
+ component_type = self._get_component_type_from_sheet(sheet_name.replace(' Timeseries', ''))
720
+
721
+ # Get timestamps
722
+ timestamps = df.get('timestamp', [])
723
+ if timestamps.empty:
724
+ self.logger.warning(f"TIMESERIES DEBUG: No timestamp column found in {sheet_name}")
725
+ continue
726
+
727
+ excel_ts_length = len(timestamps)
728
+ self.logger.info(f"TIMESERIES DEBUG: Sheet '{sheet_name}' has {excel_ts_length} timestamps (expected: {expected_length})")
729
+ if excel_ts_length != expected_length:
730
+ self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH in sheet '{sheet_name}': Excel has {excel_ts_length}, network expects {expected_length} (difference: {excel_ts_length - expected_length})")
731
+
732
+ # Log timestamp range for debugging
733
+ if len(timestamps) > 0:
734
+ first_ts = str(timestamps.iloc[0]).strip()
735
+ last_ts = str(timestamps.iloc[-1]).strip()
736
+ self.logger.info(f"TIMESERIES DEBUG: Sheet timestamp range: '{first_ts}' to '{last_ts}'")
737
+
738
+ # Process each column (except timestamp)
739
+ for column in df.columns:
740
+ if column == 'timestamp':
741
+ continue
742
+
743
+ # Parse component name and attribute from column name
744
+ # Format: "Component Name_attribute_name"
745
+ # We need to find the last underscore that separates component name from attribute
746
+ if '_' in column:
747
+ # Find all components of this type to match against
748
+ components = list_components_by_type(conn, network_id, component_type)
749
+ component_names = [c.name for c in components]
750
+
751
+ # Try to find the component name by matching against known components
752
+ component_name = None
753
+ attr_name = None
754
+
755
+ for comp_name in component_names:
756
+ # Check if column starts with component name + underscore
757
+ prefix = f"{comp_name}_"
758
+ if column.startswith(prefix):
759
+ component_name = comp_name
760
+ attr_name = column[len(prefix):]
761
+ break
762
+
763
+ if component_name and attr_name:
764
+ # Find component by name
765
+ component = next((c for c in components if c.name == component_name), None)
766
+
767
+ if component:
768
+ # Create timeseries data
769
+ timeseries_data = []
770
+ matched_timestamps = 0
771
+ fallback_timestamps = 0
772
+ skipped_timestamps = 0
773
+ filled_missing_values = 0
774
+
775
+ for i, (timestamp, value) in enumerate(zip(timestamps, df[column])):
776
+ # Convert timestamp string to match network time periods
777
+ timestamp_str = str(timestamp).strip()
778
+
779
+ # Handle missing values - use 0.0 as default
780
+ if pd.isna(value):
781
+ actual_value = 0.0
782
+ filled_missing_values += 1
783
+ self.logger.debug(f"Using default value 0.0 for missing data at timestamp {timestamp_str} in {component_name}.{attr_name}")
784
+ else:
785
+ try:
786
+ actual_value = float(value)
787
+ except (ValueError, TypeError):
788
+ actual_value = 0.0
789
+ filled_missing_values += 1
790
+ self.logger.debug(f"Using default value 0.0 for invalid data '{value}' at timestamp {timestamp_str} in {component_name}.{attr_name}")
791
+
792
+ # Try to find matching time period
793
+ if timestamp_str in time_period_map:
794
+ period = time_period_map[timestamp_str]
795
+ timeseries_point = TimeseriesPoint(
796
+ timestamp=period.timestamp,
797
+ value=actual_value,
798
+ period_index=period.period_index
799
+ )
800
+ matched_timestamps += 1
801
+ else:
802
+ # Fallback: try to parse timestamp and use index
803
+ try:
804
+ parsed_timestamp = pd.to_datetime(timestamp_str)
805
+ timeseries_point = TimeseriesPoint(
806
+ timestamp=int(parsed_timestamp.timestamp()),
807
+ value=actual_value,
808
+ period_index=i
809
+ )
810
+ fallback_timestamps += 1
811
+ except Exception as e:
812
+ skipped_timestamps += 1
813
+ self.logger.warning(f"Failed to parse timestamp '{timestamp_str}' in {component_name}.{attr_name}: {e}")
814
+ continue
815
+
816
+ timeseries_data.append(timeseries_point)
817
+
818
+ final_ts_length = len(timeseries_data)
819
+ self.logger.info(f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
820
+ f"Excel rows={excel_ts_length}, "
821
+ f"Matched={matched_timestamps}, "
822
+ f"Fallback={fallback_timestamps}, "
823
+ f"Skipped={skipped_timestamps}, "
824
+ f"Filled missing={filled_missing_values}, "
825
+ f"Final length={final_ts_length}")
826
+
827
+ if filled_missing_values > 0:
828
+ self.logger.warning(f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'")
829
+
830
+ if final_ts_length != expected_length:
831
+ self.logger.warning(f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
832
+ f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})")
833
+
834
+ if timeseries_data:
835
+ try:
836
+ set_timeseries_attribute(
837
+ conn, component.id, attr_name, timeseries_data, scenario_id
838
+ )
839
+ imported['attributes'] += 1
840
+ self.logger.info(f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'")
841
+ except Exception as e:
842
+ self.logger.error(f"TIMESERIES DEBUG: Failed to set timeseries attribute {attr_name} for {component_name}: {e}")
843
+ imported['errors'] += 1
844
+ else:
845
+ self.logger.warning(f"TIMESERIES DEBUG: No valid timeseries data found for {component_name}.{attr_name}")
846
+ else:
847
+ self.logger.warning(f"TIMESERIES DEBUG: Component '{component_name}' not found for timeseries import")
848
+ else:
849
+ self.logger.warning(f"TIMESERIES DEBUG: Could not parse column '{column}' into component and attribute names")
850
+ else:
851
+ self.logger.warning(f"TIMESERIES DEBUG: Column '{column}' does not contain underscore separator")
852
+
853
+ return imported
854
+
855
+ def _import_network_config(self, conn, network_id: int, config_df: pd.DataFrame) -> Dict[str, Any]:
856
+ """Import network configuration from Excel"""
857
+ imported = {'parameters': 0, 'errors': 0}
858
+
859
+ # Handle case where config_df might be a list (when sheet doesn't exist)
860
+ if not isinstance(config_df, pd.DataFrame):
861
+ self.logger.info("No Network Config sheet found, using default configuration")
862
+ # Set default network configuration
863
+ default_config = {
864
+ 'unmet_load_active': True,
865
+ 'discount_rate': 0.01,
866
+ 'solver_name': 'highs',
867
+ 'currency': 'USD'
868
+ }
869
+
870
+ for param_name, param_value in default_config.items():
871
+ try:
872
+ if isinstance(param_value, bool):
873
+ param_type = 'boolean'
874
+ elif isinstance(param_value, float):
875
+ param_type = 'real'
876
+ elif isinstance(param_value, int):
877
+ param_type = 'integer'
878
+ else:
879
+ param_type = 'string'
880
+
881
+ set_network_config(
882
+ conn,
883
+ network_id,
884
+ param_name,
885
+ param_value,
886
+ param_type,
887
+ scenario_id=None, # Network default
888
+ description=f"Default {param_name} setting"
889
+ )
890
+ imported['parameters'] += 1
891
+ self.logger.info(f"Set default network config: {param_name} = {param_value}")
892
+
893
+ except Exception as e:
894
+ self.logger.error(f"Failed to set default network config parameter {param_name}: {e}")
895
+ imported['errors'] += 1
896
+
897
+ return imported
898
+
899
+ if config_df.empty:
900
+ self.logger.info("Network Config sheet is empty, using default configuration")
901
+ # Set default network configuration
902
+ default_config = {
903
+ 'unmet_load_active': True,
904
+ 'discount_rate': 0.01,
905
+ 'solver_name': 'default',
906
+ 'currency': 'USD'
907
+ }
908
+
909
+ for param_name, param_value in default_config.items():
910
+ try:
911
+ if isinstance(param_value, bool):
912
+ param_type = 'boolean'
913
+ elif isinstance(param_value, float):
914
+ param_type = 'real'
915
+ elif isinstance(param_value, int):
916
+ param_type = 'integer'
917
+ else:
918
+ param_type = 'string'
919
+
920
+ set_network_config(
921
+ conn,
922
+ network_id,
923
+ param_name,
924
+ param_value,
925
+ param_type,
926
+ scenario_id=None, # Network default
927
+ description=f"Default {param_name} setting"
928
+ )
929
+ imported['parameters'] += 1
930
+ self.logger.info(f"Set default network config: {param_name} = {param_value}")
931
+
932
+ except Exception as e:
933
+ self.logger.error(f"Failed to set default network config parameter {param_name}: {e}")
934
+ imported['errors'] += 1
935
+
936
+ return imported
937
+
938
+ for _, row in config_df.iterrows():
939
+ try:
940
+ param_name = str(row.get('Parameter', '')).strip()
941
+ param_value = row.get('Value', '')
942
+ param_type = str(row.get('Type', 'string')).strip()
943
+ param_description = str(row.get('Description', '')).strip()
944
+
945
+ if not param_name:
946
+ continue
947
+
948
+ # Validate parameter type and map Python types to database types
949
+ valid_types = {'boolean', 'real', 'integer', 'string', 'json'}
950
+
951
+ # Map Python type names to database type names
952
+ type_mapping = {
953
+ 'bool': 'boolean',
954
+ 'float': 'real',
955
+ 'int': 'integer',
956
+ 'str': 'string'
957
+ }
958
+
959
+ # Convert Python type name to database type name if needed
960
+ if param_type in type_mapping:
961
+ param_type = type_mapping[param_type]
962
+
963
+ if param_type not in valid_types:
964
+ self.logger.error(f"Invalid parameter type '{param_type}' for parameter '{param_name}'. Must be one of {valid_types}")
965
+ imported['errors'] += 1
966
+ continue
967
+
968
+ # Convert value based on type
969
+ try:
970
+ if param_type == 'boolean':
971
+ # Handle various boolean representations
972
+ if isinstance(param_value, bool):
973
+ converted_value = param_value
974
+ elif isinstance(param_value, str):
975
+ converted_value = param_value.lower() in {'true', '1', 'yes', 'on'}
976
+ elif isinstance(param_value, (int, float)):
977
+ converted_value = bool(param_value)
978
+ else:
979
+ converted_value = False
980
+ elif param_type == 'real':
981
+ converted_value = float(param_value)
982
+ elif param_type == 'integer':
983
+ converted_value = int(float(param_value)) # Handle float strings like "1.0"
984
+ elif param_type == 'json':
985
+ if isinstance(param_value, str):
986
+ import json
987
+ converted_value = json.loads(param_value)
988
+ else:
989
+ converted_value = param_value
990
+ else: # string
991
+ converted_value = str(param_value)
992
+ except (ValueError, TypeError, json.JSONDecodeError) as e:
993
+ self.logger.error(f"Failed to convert parameter '{param_name}' value '{param_value}' to type '{param_type}': {e}")
994
+ imported['errors'] += 1
995
+ continue
996
+
997
+ # Use the proper set_network_config function from pyconvexity
998
+ set_network_config(
999
+ conn,
1000
+ network_id,
1001
+ param_name,
1002
+ converted_value,
1003
+ param_type,
1004
+ scenario_id=None, # Network default
1005
+ description=param_description if param_description else None
1006
+ )
1007
+ imported['parameters'] += 1
1008
+
1009
+ except Exception as e:
1010
+ self.logger.error(f"Failed to import network config parameter {param_name}: {e}")
1011
+ imported['errors'] += 1
1012
+
1013
+ return imported
1014
+
1015
+ def _validate_time_axis_compatibility(self, conn, network_id: int, excel_time_config: Dict[str, str]) -> None:
1016
+ """Validate that Excel time axis matches existing network time axis"""
1017
+ try:
1018
+ # Get existing network info
1019
+ existing_network = get_network_info(conn, network_id)
1020
+
1021
+ # Compare time axis parameters
1022
+ excel_start = excel_time_config.get('start_time', '').strip()
1023
+ excel_end = excel_time_config.get('end_time', '').strip()
1024
+ excel_interval = excel_time_config.get('time_resolution', '').strip()
1025
+
1026
+ existing_start = existing_network.get('time_start', '').strip()
1027
+ existing_end = existing_network.get('time_end', '').strip()
1028
+ existing_interval = existing_network.get('time_interval', '').strip()
1029
+
1030
+ self.logger.info(f"TIME AXIS DEBUG: Validating time axis compatibility")
1031
+ self.logger.info(f"TIME AXIS DEBUG: Excel: {excel_start} to {excel_end}, interval: {excel_interval}")
1032
+ self.logger.info(f"TIME AXIS DEBUG: Network: {existing_start} to {existing_end}, interval: {existing_interval}")
1033
+
1034
+ # Skip validation if Excel doesn't have time information (allow partial updates)
1035
+ if not excel_start or not excel_end or not excel_interval:
1036
+ self.logger.warning("TIME AXIS DEBUG: Excel Overview sheet missing time axis information - skipping validation")
1037
+ self.logger.warning("TIME AXIS DEBUG: Assuming Excel data is compatible with existing network time axis")
1038
+ return
1039
+
1040
+ # Normalize case and format for time interval comparison
1041
+ excel_interval_normalized = self._normalize_time_interval(excel_interval)
1042
+ existing_interval_normalized = self._normalize_time_interval(existing_interval)
1043
+
1044
+ self.logger.info(f"TIME AXIS DEBUG: Normalized intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}'")
1045
+
1046
+ # Check if they match
1047
+ if (excel_start != existing_start or
1048
+ excel_end != existing_end or
1049
+ excel_interval_normalized != existing_interval_normalized):
1050
+
1051
+ self.logger.error(f"TIME AXIS DEBUG: MISMATCH DETECTED!")
1052
+ self.logger.error(f"TIME AXIS DEBUG: Start times - Excel: '{excel_start}', Network: '{existing_start}' (match: {excel_start == existing_start})")
1053
+ self.logger.error(f"TIME AXIS DEBUG: End times - Excel: '{excel_end}', Network: '{existing_end}' (match: {excel_end == existing_end})")
1054
+ self.logger.error(f"TIME AXIS DEBUG: Intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}' (match: {excel_interval_normalized == existing_interval_normalized})")
1055
+
1056
+ raise ValueError(
1057
+ f"Time axis mismatch! "
1058
+ f"Excel has {excel_start} to {excel_end} ({excel_interval}), "
1059
+ f"but existing network has {existing_start} to {existing_end} ({existing_interval}). "
1060
+ f"Time axis must match exactly when importing into an existing network."
1061
+ )
1062
+ else:
1063
+ self.logger.info(f"TIME AXIS DEBUG: Time axis validation PASSED - Excel and network time axes match")
1064
+
1065
+ except Exception as e:
1066
+ if "Time axis mismatch" in str(e):
1067
+ raise # Re-raise validation errors
1068
+ else:
1069
+ # Log other errors but don't fail the import
1070
+ self.logger.error(f"Error during time axis validation: {e}")
1071
+ self.logger.warning("Continuing with import despite time axis validation error")
1072
+
1073
+ def _normalize_time_interval(self, interval: str) -> str:
1074
+ """Normalize time interval format for comparison"""
1075
+ interval = interval.strip().upper()
1076
+
1077
+ # Handle common variations
1078
+ if interval in ['H', '1H', 'PT1H', 'HOURLY']:
1079
+ return 'H'
1080
+ elif interval in ['D', '1D', 'P1D', 'DAILY']:
1081
+ return 'D'
1082
+ elif interval.endswith('H') and interval[:-1].isdigit():
1083
+ return interval # Already normalized (e.g., '2H', '3H')
1084
+ elif interval.startswith('PT') and interval.endswith('H'):
1085
+ # Convert PT3H -> 3H
1086
+ hours = interval[2:-1]
1087
+ return f'{hours}H'
1088
+
1089
+ return interval
1090
+
1091
+ def _calculate_import_stats(self, carriers_imported: Dict, components_imported: Dict,
1092
+ timeseries_imported: Dict, config_imported: Dict) -> Dict[str, Any]:
1093
+ """Calculate import statistics"""
1094
+ total_created = carriers_imported['created'] + sum(
1095
+ comp['created'] for comp in components_imported.values()
1096
+ )
1097
+ total_updated = carriers_imported['updated'] + sum(
1098
+ comp['updated'] for comp in components_imported.values()
1099
+ )
1100
+ total_errors = carriers_imported['errors'] + sum(
1101
+ comp['errors'] for comp in components_imported.values()
1102
+ ) + timeseries_imported['errors'] + config_imported['errors']
1103
+
1104
+ return {
1105
+ 'total_created': total_created,
1106
+ 'total_updated': total_updated,
1107
+ 'total_errors': total_errors,
1108
+ 'carriers': carriers_imported,
1109
+ 'components': components_imported,
1110
+ 'timeseries': timeseries_imported,
1111
+ 'network_config': config_imported
1112
+ }