pyconvexity 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (43) hide show
  1. pyconvexity/__init__.py +57 -8
  2. pyconvexity/_version.py +1 -2
  3. pyconvexity/core/__init__.py +0 -2
  4. pyconvexity/core/database.py +158 -0
  5. pyconvexity/core/types.py +105 -18
  6. pyconvexity/data/README.md +101 -0
  7. pyconvexity/data/__init__.py +18 -0
  8. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/__init__.py +3 -0
  10. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  11. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  12. pyconvexity/data/loaders/cache.py +212 -0
  13. pyconvexity/data/schema/01_core_schema.sql +12 -12
  14. pyconvexity/data/schema/02_data_metadata.sql +17 -321
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  17. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  18. pyconvexity/data/sources/gem.py +412 -0
  19. pyconvexity/io/__init__.py +32 -0
  20. pyconvexity/io/excel_exporter.py +1012 -0
  21. pyconvexity/io/excel_importer.py +1109 -0
  22. pyconvexity/io/netcdf_exporter.py +192 -0
  23. pyconvexity/io/netcdf_importer.py +1602 -0
  24. pyconvexity/models/__init__.py +7 -0
  25. pyconvexity/models/attributes.py +209 -72
  26. pyconvexity/models/components.py +3 -0
  27. pyconvexity/models/network.py +17 -15
  28. pyconvexity/models/scenarios.py +177 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +24 -0
  31. pyconvexity/solvers/pypsa/api.py +421 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +304 -0
  33. pyconvexity/solvers/pypsa/builder.py +566 -0
  34. pyconvexity/solvers/pypsa/constraints.py +321 -0
  35. pyconvexity/solvers/pypsa/solver.py +1106 -0
  36. pyconvexity/solvers/pypsa/storage.py +1574 -0
  37. pyconvexity/timeseries.py +327 -0
  38. pyconvexity/validation/rules.py +2 -2
  39. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/METADATA +5 -2
  40. pyconvexity-0.1.4.dist-info/RECORD +46 -0
  41. pyconvexity-0.1.2.dist-info/RECORD +0 -20
  42. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1109 @@
1
+ """
2
+ Excel importer for PyConvexity energy system models.
3
+ Imports network models from Excel workbooks with multiple sheets.
4
+ """
5
+
6
+ import logging
7
+ import pandas as pd
8
+ import numpy as np
9
+ from typing import Dict, Any, Optional, List, Tuple
10
+ from pathlib import Path
11
+ from datetime import datetime
12
+ import json
13
+
14
+ # Import functions directly from pyconvexity
15
+ from pyconvexity.core.database import open_connection
16
+ from pyconvexity.core.types import StaticValue, CreateNetworkRequest
17
+ from pyconvexity.core.errors import AttributeNotFound, ValidationError
18
+ from pyconvexity.models import (
19
+ list_components_by_type, create_component, update_component, create_network,
20
+ set_network_config, create_carrier, get_master_scenario_id, get_network_time_periods,
21
+ list_carriers, set_static_attribute, set_timeseries_attribute, get_bus_name_to_id_map,
22
+ get_network_info, delete_attribute
23
+ )
24
+ from pyconvexity.validation import get_validation_rule
25
+ from pyconvexity.timeseries import set_timeseries
26
+ from pyconvexity.models.attributes import set_timeseries_attribute as set_timeseries_conn
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+ class ExcelModelImporter:
31
+ """Import network model from Excel workbook"""
32
+
33
+ def __init__(self):
34
+ self.logger = logging.getLogger(__name__)
35
+
36
+ def import_model_from_excel(
37
+ self,
38
+ db_path: str,
39
+ excel_path: str,
40
+ network_id: Optional[int] = None,
41
+ network_name: Optional[str] = None,
42
+ network_description: Optional[str] = None,
43
+ scenario_id: Optional[int] = None,
44
+ progress_callback: Optional[callable] = None
45
+ ) -> Dict[str, Any]:
46
+ """
47
+ Import network model from Excel workbook
48
+
49
+ Args:
50
+ db_path: Database path
51
+ excel_path: Excel file path
52
+ network_id: Network ID to import into (if updating existing)
53
+ network_name: Name for new network (if creating new)
54
+ network_description: Description for new network (if creating new)
55
+ scenario_id: Scenario ID (defaults to master scenario)
56
+ update_existing: Whether to update existing components
57
+ add_new: Whether to add new components
58
+ progress_callback: Optional callback for progress updates
59
+
60
+ Returns:
61
+ Import statistics and metadata
62
+ """
63
+
64
+ conn = None
65
+ try:
66
+ if progress_callback:
67
+ progress_callback(0, "Starting Excel import...")
68
+
69
+ # Connect to database
70
+ conn = open_connection(db_path)
71
+
72
+ # Determine if we're creating a new network or updating existing
73
+ create_new_network = network_id is None
74
+
75
+ if create_new_network:
76
+ if progress_callback:
77
+ progress_callback(3, "Reading Excel Overview sheet...")
78
+
79
+ # Read network configuration from Overview sheet
80
+ overview_df = pd.read_excel(excel_path, sheet_name='Overview')
81
+ network_config = self._read_overview_sheet(overview_df)
82
+
83
+ self.logger.info(f"Network config from Overview: {network_config}")
84
+
85
+ # Extract network name from Excel if not provided
86
+ excel_network_name = network_config.get('name')
87
+ if excel_network_name:
88
+ final_network_name = excel_network_name
89
+ self.logger.info(f"Using network name from Excel: '{final_network_name}'")
90
+ elif network_name:
91
+ final_network_name = network_name
92
+ self.logger.info(f"Using provided network name: '{final_network_name}'")
93
+ else:
94
+ # Fallback to filename if no name in Excel or provided
95
+ final_network_name = Path(excel_path).stem
96
+ self.logger.info(f"Using filename as network name: '{final_network_name}'")
97
+
98
+ # Extract description from Excel if not provided
99
+ excel_description = network_config.get('description')
100
+ if excel_description:
101
+ final_description = excel_description
102
+ self.logger.info(f"Using description from Excel: '{final_description}'")
103
+ elif network_description:
104
+ final_description = network_description
105
+ self.logger.info(f"Using provided description: '{final_description}'")
106
+ else:
107
+ final_description = f"Imported from {Path(excel_path).name}"
108
+ self.logger.info(f"Using default description: '{final_description}'")
109
+
110
+ if progress_callback:
111
+ progress_callback(5, f"Creating network '{final_network_name}'...")
112
+
113
+ # Create new network
114
+ network_request = CreateNetworkRequest(
115
+ name=final_network_name,
116
+ description=final_description,
117
+ time_resolution=network_config.get('time_resolution', "H"),
118
+ start_time=network_config.get('start_time'),
119
+ end_time=network_config.get('end_time')
120
+ )
121
+
122
+ # Validate that we have the required time information
123
+ if not network_request.start_time or not network_request.end_time:
124
+ missing_fields = []
125
+ if not network_request.start_time:
126
+ missing_fields.append("Time Start")
127
+ if not network_request.end_time:
128
+ missing_fields.append("Time End")
129
+
130
+ self.logger.error(f"Missing required time information in Overview sheet: {missing_fields}")
131
+ self.logger.error(f"Available overview data: {network_config}")
132
+ raise ValueError(f"Excel file is missing required time information: {', '.join(missing_fields)}. "
133
+ f"Please ensure the Overview sheet contains 'Time Start' and 'Time End' fields.")
134
+
135
+ self.logger.info(f"Creating network with: name='{network_request.name}', "
136
+ f"start_time='{network_request.start_time}', "
137
+ f"end_time='{network_request.end_time}', "
138
+ f"time_resolution='{network_request.time_resolution}'")
139
+
140
+ network_id = create_network(conn, network_request)
141
+
142
+ # Generate time periods for the network
143
+ self._generate_time_periods(
144
+ conn,
145
+ network_id,
146
+ network_request.start_time,
147
+ network_request.end_time,
148
+ network_request.time_resolution
149
+ )
150
+
151
+ # Verify time periods were created
152
+ verification_periods = get_network_time_periods(conn, network_id)
153
+ self.logger.info(f"Network {network_id} now has {len(verification_periods)} time periods")
154
+
155
+ conn.commit()
156
+
157
+ if progress_callback:
158
+ progress_callback(5, f"Created network '{final_network_name}' (ID: {network_id})")
159
+ else:
160
+ if progress_callback:
161
+ progress_callback(3, f"Using existing network ID: {network_id}")
162
+
163
+ # For existing networks, validate time axis compatibility
164
+ if progress_callback:
165
+ progress_callback(5, "Validating time axis compatibility...")
166
+
167
+ # Read network configuration from Overview sheet to compare
168
+ try:
169
+ overview_df = pd.read_excel(excel_path, sheet_name='Overview')
170
+ excel_time_config = self._read_overview_sheet(overview_df)
171
+ except Exception as e:
172
+ self.logger.warning(f"Could not read Overview sheet: {e}")
173
+ self.logger.warning("Skipping time axis validation - assuming Excel is compatible")
174
+ excel_time_config = {}
175
+
176
+ # Validate time axis matches existing network
177
+ self._validate_time_axis_compatibility(conn, network_id, excel_time_config)
178
+
179
+ self.logger.info("Time axis validation passed - Excel matches existing network")
180
+
181
+ # Set import behavior based on whether this is a new or existing network
182
+ if network_id is None:
183
+ # New network: Always add all components from Excel
184
+ actual_update_existing = False # No existing components to update
185
+ actual_add_new = True # Add everything from Excel
186
+ self.logger.info("Import mode: NEW NETWORK - Adding all components from Excel")
187
+ else:
188
+ # Existing network: Always update existing and add new (user's requirement)
189
+ actual_update_existing = True # Update components that exist
190
+ actual_add_new = True # Add components that don't exist
191
+ self.logger.info("Import mode: EXISTING NETWORK - Update existing + add new components")
192
+
193
+ # Get master scenario if no scenario specified
194
+ if scenario_id is None:
195
+ scenario_id = get_master_scenario_id(conn, network_id)
196
+ if scenario_id is None:
197
+ raise ValueError("No master scenario found for network")
198
+
199
+ if progress_callback:
200
+ progress_callback(8, "Reading Excel file...")
201
+
202
+ # Read Excel file
203
+ excel_data = self._read_excel_file(excel_path)
204
+
205
+ if progress_callback:
206
+ progress_callback(18, "Processing carriers...")
207
+
208
+ # Import carriers first
209
+ carriers_df = excel_data.get('Carriers', pd.DataFrame())
210
+ carriers_imported = self._import_carriers(conn, network_id, carriers_df)
211
+
212
+ if progress_callback:
213
+ progress_callback(28, "Processing components...")
214
+
215
+ # Import components by type
216
+ component_types = ['Buses', 'Generators', 'Loads', 'Lines', 'Links', 'Storage Units', 'Stores', 'Constraints']
217
+ components_imported = {}
218
+
219
+ for sheet_name in component_types:
220
+ if sheet_name in excel_data:
221
+ comp_type = self._get_component_type_from_sheet(sheet_name)
222
+ self.logger.info(f"Processing sheet '{sheet_name}' as component type '{comp_type}' with {len(excel_data[sheet_name])} rows")
223
+ components_imported[comp_type] = self._import_components(
224
+ conn, network_id, comp_type, excel_data[sheet_name],
225
+ scenario_id, actual_update_existing, actual_add_new
226
+ )
227
+
228
+ if progress_callback:
229
+ progress_callback(78, "Processing timeseries data...")
230
+
231
+ # Import timeseries data
232
+ timeseries_imported = self._import_timeseries_data(
233
+ conn, network_id, excel_data, scenario_id
234
+ )
235
+
236
+ if progress_callback:
237
+ progress_callback(93, "Processing network configuration...")
238
+
239
+ # Import network configuration
240
+ network_config_df = excel_data.get('Network Config', pd.DataFrame())
241
+ config_imported = self._import_network_config(
242
+ conn, network_id, network_config_df
243
+ )
244
+
245
+ conn.commit()
246
+
247
+ if progress_callback:
248
+ progress_callback(100, "Excel import completed")
249
+
250
+ # Calculate statistics
251
+ stats = self._calculate_import_stats(
252
+ carriers_imported, components_imported, timeseries_imported, config_imported
253
+ )
254
+ stats['network_id'] = network_id
255
+ stats['created_new_network'] = create_new_network
256
+
257
+ return {
258
+ "success": True,
259
+ "message": f"Network {'created' if create_new_network else 'updated'} from Excel: {excel_path}",
260
+ "network_id": network_id,
261
+ "stats": stats
262
+ }
263
+
264
+ except Exception as e:
265
+ self.logger.error(f"Excel import failed: {e}", exc_info=True)
266
+ if progress_callback:
267
+ progress_callback(None, f"Import failed: {str(e)}")
268
+ raise
269
+ finally:
270
+ # Always close the connection, even on error
271
+ if conn is not None:
272
+ try:
273
+ conn.close()
274
+ except Exception as e:
275
+ self.logger.warning(f"Failed to close database connection: {e}")
276
+
277
+ def _generate_time_periods(self, conn, network_id: int, start_time: str, end_time: str, time_resolution: str) -> None:
278
+ """Generate and insert time periods for the network"""
279
+ import pandas as pd
280
+ from datetime import datetime
281
+
282
+ try:
283
+ # Parse start and end times
284
+ start_dt = pd.to_datetime(start_time)
285
+ end_dt = pd.to_datetime(end_time)
286
+
287
+ # Convert time_resolution to pandas frequency string
288
+ if time_resolution == 'H':
289
+ freq_str = 'H'
290
+ elif time_resolution == 'D':
291
+ freq_str = 'D'
292
+ elif time_resolution.endswith('H'):
293
+ hours = int(time_resolution[:-1])
294
+ freq_str = f'{hours}H'
295
+ elif time_resolution.endswith('min'):
296
+ minutes = int(time_resolution[:-3])
297
+ freq_str = f'{minutes}min'
298
+ else:
299
+ self.logger.warning(f"Unknown time resolution '{time_resolution}', defaulting to hourly")
300
+ freq_str = 'H'
301
+
302
+ # Generate timestamps
303
+ timestamps = pd.date_range(start=start_dt, end=end_dt, freq=freq_str, inclusive='both')
304
+
305
+ self.logger.info(f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution")
306
+
307
+ # Insert optimized time periods metadata
308
+ period_count = len(timestamps)
309
+ start_timestamp = int(timestamps[0].timestamp())
310
+
311
+ # Calculate interval in seconds
312
+ if len(timestamps) > 1:
313
+ interval_seconds = int((timestamps[1] - timestamps[0]).total_seconds())
314
+ else:
315
+ interval_seconds = 3600 # Default to hourly
316
+
317
+ conn.execute("""
318
+ INSERT INTO network_time_periods (network_id, period_count, start_timestamp, interval_seconds)
319
+ VALUES (?, ?, ?, ?)
320
+ """, (network_id, period_count, start_timestamp, interval_seconds))
321
+
322
+ self.logger.info(f"Successfully created {len(timestamps)} time periods for network {network_id}")
323
+
324
+ except Exception as e:
325
+ self.logger.error(f"Failed to generate time periods: {e}")
326
+ raise
327
+
328
+ def _read_overview_sheet(self, overview_df: pd.DataFrame) -> Dict[str, Any]:
329
+ """Extract network configuration from Overview sheet"""
330
+ config = {}
331
+
332
+ if overview_df.empty:
333
+ self.logger.warning("Overview sheet is empty")
334
+ return config
335
+
336
+ self.logger.info(f"Overview sheet has {len(overview_df)} rows and columns: {list(overview_df.columns)}")
337
+ self.logger.info(f"First few rows of overview sheet:\n{overview_df.head()}")
338
+
339
+ # Convert to a simple key-value lookup
340
+ overview_data = {}
341
+
342
+ # Handle both old single-column format and new two-column format
343
+ if 'Property' in overview_df.columns and 'Value' in overview_df.columns:
344
+ # New two-column format
345
+ for _, row in overview_df.iterrows():
346
+ key = str(row['Property']).strip() if pd.notna(row['Property']) else ""
347
+ value = str(row['Value']).strip() if pd.notna(row['Value']) else ""
348
+ if key and value and value != 'nan':
349
+ overview_data[key] = value
350
+ self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
351
+ elif len(overview_df.columns) >= 2:
352
+ # Old format - try to read from first two columns
353
+ for i, row in overview_df.iterrows():
354
+ key = str(row.iloc[0]).strip() if pd.notna(row.iloc[0]) else ""
355
+ value = str(row.iloc[1]).strip() if pd.notna(row.iloc[1]) else ""
356
+ if key and value and value != 'nan':
357
+ overview_data[key] = value
358
+ self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
359
+ else:
360
+ self.logger.error(f"Overview sheet format not recognized. Columns: {list(overview_df.columns)}")
361
+ return config
362
+
363
+ self.logger.info(f"Parsed overview data: {overview_data}")
364
+
365
+ # Extract network configuration
366
+ if 'Name' in overview_data:
367
+ config['name'] = overview_data['Name']
368
+ if 'Description' in overview_data:
369
+ config['description'] = overview_data['Description']
370
+ if 'Time Start' in overview_data:
371
+ config['start_time'] = overview_data['Time Start']
372
+ self.logger.info(f"Found Time Start: {config['start_time']}")
373
+ if 'Time End' in overview_data:
374
+ config['end_time'] = overview_data['Time End']
375
+ self.logger.info(f"Found Time End: {config['end_time']}")
376
+ if 'Time Interval' in overview_data:
377
+ # Convert time interval format to our format
378
+ interval = overview_data['Time Interval'].strip()
379
+ self.logger.info(f"Found Time Interval: '{interval}'")
380
+
381
+ if interval == 'P1D':
382
+ config['time_resolution'] = 'D' # Daily
383
+ elif interval == 'PT1H' or interval == 'h' or interval == 'H':
384
+ config['time_resolution'] = 'H' # Hourly
385
+ elif interval.startswith('PT') and interval.endswith('H'):
386
+ # Extract hours (e.g., 'PT3H' -> '3H')
387
+ hours = interval[2:-1]
388
+ config['time_resolution'] = f'{hours}H'
389
+ elif interval.endswith('h') or interval.endswith('H'):
390
+ # Handle simple formats like '2h', '3H'
391
+ if interval[:-1].isdigit():
392
+ hours = interval[:-1]
393
+ config['time_resolution'] = f'{hours}H'
394
+ else:
395
+ config['time_resolution'] = 'H' # Default to hourly
396
+ else:
397
+ self.logger.warning(f"Unknown time interval format '{interval}', defaulting to hourly")
398
+ config['time_resolution'] = 'H' # Default to hourly
399
+
400
+ self.logger.info(f"Final network config from Overview sheet: {config}")
401
+ return config
402
+
403
+ def _read_excel_file(self, excel_path: str) -> Dict[str, pd.DataFrame]:
404
+ """Read Excel file and return dictionary of DataFrames by sheet name"""
405
+ excel_data = {}
406
+
407
+ try:
408
+ # Read all sheets
409
+ excel_file = pd.ExcelFile(excel_path)
410
+
411
+ self.logger.info(f"Excel file contains sheets: {excel_file.sheet_names}")
412
+
413
+ for sheet_name in excel_file.sheet_names:
414
+ if sheet_name == 'Overview':
415
+ continue # Skip overview sheet
416
+
417
+ df = pd.read_excel(excel_path, sheet_name=sheet_name)
418
+ if not df.empty:
419
+ excel_data[sheet_name] = df
420
+ self.logger.info(f"Loaded sheet '{sheet_name}' with {len(df)} rows")
421
+ else:
422
+ self.logger.info(f"Skipped empty sheet '{sheet_name}'")
423
+
424
+ except Exception as e:
425
+ raise ValueError(f"Failed to read Excel file: {e}")
426
+
427
+ return excel_data
428
+
429
+ def _get_component_type_from_sheet(self, sheet_name: str) -> str:
430
+ """Convert sheet name to component type"""
431
+ mapping = {
432
+ 'Buses': 'BUS',
433
+ 'Generators': 'GENERATOR',
434
+ 'Loads': 'LOAD',
435
+ 'Lines': 'LINE',
436
+ 'Links': 'LINK',
437
+ 'Storage Units': 'STORAGE_UNIT',
438
+ 'Stores': 'STORE',
439
+ 'Constraints': 'CONSTRAINT'
440
+ }
441
+ return mapping.get(sheet_name, sheet_name.upper())
442
+
443
+ def _import_carriers(self, conn, network_id: int, carriers_df: pd.DataFrame) -> Dict[str, Any]:
444
+ """Import carriers from Excel data"""
445
+ imported = {'created': 0, 'updated': 0, 'errors': 0}
446
+
447
+ if carriers_df.empty:
448
+ return imported
449
+
450
+ # Get existing carriers
451
+ existing_carriers = list_carriers(conn, network_id)
452
+ existing_names = {carrier['name'] for carrier in existing_carriers}
453
+
454
+ for _, row in carriers_df.iterrows():
455
+ try:
456
+ carrier_name = str(row.get('name', '')).strip()
457
+ if not carrier_name:
458
+ continue
459
+
460
+ # Check if carrier exists
461
+ if carrier_name in existing_names:
462
+ imported['updated'] += 1
463
+ else:
464
+ # Create new carrier
465
+ create_carrier(
466
+ conn,
467
+ network_id,
468
+ carrier_name,
469
+ co2_emissions=row.get('co2_emissions', 0.0),
470
+ color=row.get('color', '#ffffff'),
471
+ nice_name=row.get('nice_name', carrier_name)
472
+ )
473
+ imported['created'] += 1
474
+
475
+ except Exception as e:
476
+ self.logger.error(f"Failed to import carrier {carrier_name}: {e}")
477
+ imported['errors'] += 1
478
+
479
+ return imported
480
+
481
+ def _import_components(
482
+ self,
483
+ conn,
484
+ network_id: int,
485
+ component_type: str,
486
+ components_df: pd.DataFrame,
487
+ scenario_id: int,
488
+ update_existing: bool,
489
+ add_new: bool
490
+ ) -> Dict[str, Any]:
491
+ """Import components of a specific type"""
492
+ imported = {'created': 0, 'updated': 0, 'errors': 0}
493
+
494
+ if components_df.empty:
495
+ return imported
496
+
497
+ # Get existing components of this type
498
+ existing_components = list_components_by_type(conn, network_id, component_type)
499
+ existing_names = {comp.name for comp in existing_components}
500
+
501
+ # Get carriers and buses for foreign key resolution
502
+ carriers = list_carriers(conn, network_id)
503
+ buses = list_components_by_type(conn, network_id, 'BUS')
504
+
505
+ carrier_name_to_id = {carrier['name']: carrier['id'] for carrier in carriers}
506
+ bus_name_to_id = {bus.name: bus.id for bus in buses}
507
+
508
+
509
+ for _, row in components_df.iterrows():
510
+ try:
511
+ component_name = str(row.get('name', '')).strip()
512
+ if not component_name:
513
+ continue
514
+
515
+ # Debug logging for CONSTRAINT components (reduced verbosity)
516
+ if component_type == 'CONSTRAINT':
517
+ self.logger.debug(f"Processing CONSTRAINT '{component_name}'")
518
+
519
+ # Resolve foreign keys
520
+ carrier_id = None
521
+ # CONSTRAINT components don't have carriers
522
+ if row.get('carrier') and component_type != 'CONSTRAINT':
523
+ carrier_name = str(row['carrier']).strip()
524
+ carrier_id = carrier_name_to_id.get(carrier_name)
525
+ self.logger.info(f"Component '{component_name}' has carrier '{carrier_name}', resolved to carrier_id: {carrier_id}")
526
+ if carrier_id is None:
527
+ self.logger.warning(f"Carrier '{carrier_name}' not found for component '{component_name}'. Available carriers: {list(carrier_name_to_id.keys())}")
528
+ elif component_type == 'CONSTRAINT':
529
+ self.logger.debug(f"CONSTRAINT '{component_name}' - skipping carrier resolution")
530
+
531
+ bus_id = None
532
+ # CONSTRAINT components don't connect to buses
533
+ if row.get('bus') and component_type != 'CONSTRAINT':
534
+ bus_name = str(row['bus']).strip()
535
+ bus_id = bus_name_to_id.get(bus_name)
536
+ if bus_id is None:
537
+ self.logger.warning(f"Bus '{bus_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
538
+
539
+ bus0_id = None
540
+ if row.get('bus0') and component_type != 'CONSTRAINT':
541
+ bus0_name = str(row['bus0']).strip()
542
+ bus0_id = bus_name_to_id.get(bus0_name)
543
+ if bus0_id is None:
544
+ self.logger.warning(f"Bus0 '{bus0_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
545
+
546
+ bus1_id = None
547
+ if row.get('bus1') and component_type != 'CONSTRAINT':
548
+ bus1_name = str(row['bus1']).strip()
549
+ bus1_id = bus_name_to_id.get(bus1_name)
550
+ if bus1_id is None:
551
+ self.logger.warning(f"Bus1 '{bus1_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}")
552
+
553
+ # Check if component exists
554
+ if component_name in existing_names and update_existing:
555
+ # Update existing component
556
+ existing_comp = next(c for c in existing_components if c.name == component_name)
557
+
558
+ try:
559
+ # Update component using the proper function
560
+ # CONSTRAINT components must have carrier_id=None per database schema
561
+ final_carrier_id = None if component_type == 'CONSTRAINT' else carrier_id
562
+ update_component(
563
+ conn,
564
+ existing_comp.id,
565
+ carrier_id=final_carrier_id,
566
+ bus_id=bus_id,
567
+ bus0_id=bus0_id,
568
+ bus1_id=bus1_id,
569
+ latitude=row.get('latitude'),
570
+ longitude=row.get('longitude')
571
+ )
572
+
573
+ # Update attributes
574
+ self._update_component_attributes(conn, existing_comp.id, row, scenario_id)
575
+ imported['updated'] += 1
576
+
577
+ except Exception as e:
578
+ self.logger.error(f"Failed to update component '{component_name}': {e}")
579
+ imported['errors'] += 1
580
+ continue
581
+
582
+ elif component_name not in existing_names and add_new:
583
+ # Create new component using the proper function
584
+ # CONSTRAINT components must have carrier_id=None per database schema
585
+ final_carrier_id = None if component_type == 'CONSTRAINT' else carrier_id
586
+
587
+ # Handle latitude/longitude - CONSTRAINT components don't have location
588
+ if component_type == 'CONSTRAINT':
589
+ lat_val = None
590
+ lon_val = None
591
+ self.logger.debug(f"CONSTRAINT '{component_name}' - setting latitude/longitude to None")
592
+ else:
593
+ # Clean empty strings for other component types
594
+ lat_val = row.get('latitude')
595
+ lon_val = row.get('longitude')
596
+ if lat_val == '' or (isinstance(lat_val, str) and lat_val.strip() == ''):
597
+ lat_val = None
598
+ if lon_val == '' or (isinstance(lon_val, str) and lon_val.strip() == ''):
599
+ lon_val = None
600
+
601
+ component_id = create_component(
602
+ conn,
603
+ network_id,
604
+ component_type,
605
+ component_name,
606
+ longitude=lon_val,
607
+ latitude=lat_val,
608
+ carrier_id=final_carrier_id,
609
+ bus_id=bus_id,
610
+ bus0_id=bus0_id,
611
+ bus1_id=bus1_id
612
+ )
613
+
614
+ # Set attributes
615
+ self._set_component_attributes(conn, component_id, row, scenario_id)
616
+ imported['created'] += 1
617
+
618
+ except Exception as e:
619
+ self.logger.error(f"Failed to import component '{component_name}' of type '{component_type}': {e}")
620
+ self.logger.error(f"Component data: name='{component_name}', carrier_id={carrier_id}, bus_id={bus_id}, bus0_id={bus0_id}, bus1_id={bus1_id}")
621
+ imported['errors'] += 1
622
+
623
+ return imported
624
+
625
+ def _update_component_attributes(self, conn, component_id: int, row: pd.Series, scenario_id: int):
626
+ """Update attributes for an existing component"""
627
+ # Get validation rules for this component type
628
+ cursor = conn.execute(
629
+ "SELECT component_type FROM components WHERE id = ?",
630
+ (component_id,)
631
+ )
632
+ component_type = cursor.fetchone()[0]
633
+
634
+ # Process each column as potential attribute
635
+ for column, value in row.items():
636
+ if column in ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude', 'type']:
637
+ continue # Skip basic fields
638
+
639
+ if value == '[timeseries]':
640
+ continue # Skip timeseries markers
641
+
642
+ # Check if this is a valid attribute
643
+ validation_rule = get_validation_rule(conn, component_type, column)
644
+ if validation_rule:
645
+ # Handle blank cells (empty strings or NaN) - these should unset the attribute
646
+ if pd.isna(value) or value == '':
647
+ try:
648
+ delete_attribute(conn, component_id, column, scenario_id)
649
+ self.logger.debug(f"Unset attribute '{column}' for component {component_id} due to blank cell")
650
+ except Exception as e:
651
+ # Attribute might not exist, which is fine
652
+ self.logger.debug(f"Could not unset attribute '{column}' for component {component_id}: {e}")
653
+ else:
654
+ # Set the attribute with the provided value
655
+ self._set_single_attribute(conn, component_id, column, value, validation_rule, scenario_id)
656
+
657
+ def _set_component_attributes(self, conn, component_id: int, row: pd.Series, scenario_id: int):
658
+ """Set attributes for a new component"""
659
+ # Get validation rules for this component type
660
+ cursor = conn.execute(
661
+ "SELECT component_type FROM components WHERE id = ?",
662
+ (component_id,)
663
+ )
664
+ component_type = cursor.fetchone()[0]
665
+
666
+ # Process each column as potential attribute
667
+ for column, value in row.items():
668
+ if column in ['name', 'carrier', 'bus', 'bus0', 'bus1', 'latitude', 'longitude', 'type']:
669
+ continue # Skip basic fields
670
+
671
+ if value == '[timeseries]':
672
+ continue # Skip timeseries markers
673
+
674
+ # Check if this is a valid attribute
675
+ validation_rule = get_validation_rule(conn, component_type, column)
676
+ if validation_rule:
677
+ # For new components, only set attributes that have actual values
678
+ # Blank cells (empty strings or NaN) are left unset (which is the default state)
679
+ if not (pd.isna(value) or value == ''):
680
+ # Set the attribute with the provided value
681
+ self._set_single_attribute(conn, component_id, column, value, validation_rule, scenario_id)
682
+
683
+ def _set_single_attribute(self, conn, component_id: int, attr_name: str, value: Any, validation_rule: Dict, scenario_id: int):
684
+ """Set a single attribute with proper type conversion"""
685
+ data_type = validation_rule.data_type if hasattr(validation_rule, 'data_type') else validation_rule.get('data_type', 'string')
686
+
687
+ try:
688
+ if data_type == 'float':
689
+ static_value = StaticValue(float(value))
690
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
691
+ elif data_type == 'int':
692
+ static_value = StaticValue(int(value))
693
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
694
+ elif data_type == 'boolean':
695
+ bool_value = str(value).lower() in ['true', '1', 'yes']
696
+ static_value = StaticValue(bool_value)
697
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
698
+ else: # string
699
+ static_value = StaticValue(str(value))
700
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
701
+ except (AttributeNotFound, ValidationError):
702
+ # Skip missing attributes or validation errors silently (same as PyPSA solver)
703
+ pass
704
+ except Exception as e:
705
+ self.logger.warning(f"Failed to set attribute {attr_name} for component {component_id}: {e}")
706
+
707
+ def _import_timeseries_data(self, conn, network_id: int, excel_data: Dict, scenario_id: int) -> Dict[str, Any]:
708
+ """Import timeseries data from Excel sheets"""
709
+ imported = {'attributes': 0, 'errors': 0}
710
+
711
+ # Get network time periods for timestamp mapping
712
+ network_time_periods = get_network_time_periods(conn, network_id)
713
+ time_period_map = {period.formatted_time: period for period in network_time_periods}
714
+
715
+ expected_length = len(network_time_periods)
716
+ self.logger.info(f"TIMESERIES DEBUG: Network has {expected_length} time periods for timeseries import")
717
+ if network_time_periods:
718
+ self.logger.info(f"TIMESERIES DEBUG: Time period range: {network_time_periods[0].formatted_time} to {network_time_periods[-1].formatted_time}")
719
+ else:
720
+ self.logger.error("TIMESERIES DEBUG: NO TIME PERIODS FOUND! Timeseries import will fail.")
721
+ return imported
722
+
723
+ # Look for timeseries sheets
724
+ for sheet_name, df in excel_data.items():
725
+ if 'Timeseries' in sheet_name and not df.empty:
726
+ self.logger.info(f"TIMESERIES DEBUG: Processing sheet '{sheet_name}' with {len(df)} rows")
727
+ component_type = self._get_component_type_from_sheet(sheet_name.replace(' Timeseries', ''))
728
+
729
+ # Get timestamps
730
+ timestamps = df.get('timestamp', [])
731
+ if timestamps.empty:
732
+ self.logger.warning(f"TIMESERIES DEBUG: No timestamp column found in {sheet_name}")
733
+ continue
734
+
735
+ excel_ts_length = len(timestamps)
736
+ self.logger.info(f"TIMESERIES DEBUG: Sheet '{sheet_name}' has {excel_ts_length} timestamps (expected: {expected_length})")
737
+ if excel_ts_length != expected_length:
738
+ self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH in sheet '{sheet_name}': Excel has {excel_ts_length}, network expects {expected_length} (difference: {excel_ts_length - expected_length})")
739
+
740
+ # Log timestamp range for debugging
741
+ if len(timestamps) > 0:
742
+ first_ts = str(timestamps.iloc[0]).strip()
743
+ last_ts = str(timestamps.iloc[-1]).strip()
744
+ self.logger.info(f"TIMESERIES DEBUG: Sheet timestamp range: '{first_ts}' to '{last_ts}'")
745
+
746
+ # Process each column (except timestamp)
747
+ for column in df.columns:
748
+ if column == 'timestamp':
749
+ continue
750
+
751
+ # Parse component name and attribute from column name
752
+ # Format: "Component Name_attribute_name"
753
+ # We need to find the last underscore that separates component name from attribute
754
+ if '_' in column:
755
+ # Find all components of this type to match against
756
+ components = list_components_by_type(conn, network_id, component_type)
757
+ component_names = [c.name for c in components]
758
+
759
+ # Try to find the component name by matching against known components
760
+ component_name = None
761
+ attr_name = None
762
+
763
+ for comp_name in component_names:
764
+ # Check if column starts with component name + underscore
765
+ prefix = f"{comp_name}_"
766
+ if column.startswith(prefix):
767
+ component_name = comp_name
768
+ attr_name = column[len(prefix):]
769
+ break
770
+
771
+ if component_name and attr_name:
772
+ # Find component by name
773
+ component = next((c for c in components if c.name == component_name), None)
774
+
775
+ if component:
776
+ # Create timeseries data using efficient array format
777
+ timeseries_values = []
778
+ filled_missing_values = 0
779
+
780
+ # Debug: Show first few timestamps for comparison
781
+ if len(timestamps) > 0 and len(network_time_periods) > 0:
782
+ excel_first = str(timestamps.iloc[0]).strip()
783
+ excel_last = str(timestamps.iloc[-1]).strip() if len(timestamps) > 1 else excel_first
784
+ network_first = network_time_periods[0].formatted_time
785
+ network_last = network_time_periods[-1].formatted_time if len(network_time_periods) > 1 else network_first
786
+
787
+ self.logger.info(f"TIMESERIES DEBUG: Timestamp comparison for '{component_name}.{attr_name}':")
788
+ self.logger.info(f" Excel range: '{excel_first}' to '{excel_last}' ({len(timestamps)} periods)")
789
+ self.logger.info(f" Network range: '{network_first}' to '{network_last}' ({len(network_time_periods)} periods)")
790
+
791
+ # Take the first N values from Excel where N = expected network periods
792
+ # This puts responsibility on user to format Excel correctly
793
+ max_periods = min(len(timestamps), len(network_time_periods), len(df[column]))
794
+
795
+ for i in range(max_periods):
796
+ value = df[column].iloc[i]
797
+
798
+ # Handle missing values - use 0.0 as default
799
+ if pd.isna(value):
800
+ actual_value = 0.0
801
+ filled_missing_values += 1
802
+ else:
803
+ try:
804
+ actual_value = float(value)
805
+ except (ValueError, TypeError):
806
+ actual_value = 0.0
807
+ filled_missing_values += 1
808
+
809
+ timeseries_values.append(actual_value)
810
+
811
+ final_ts_length = len(timeseries_values)
812
+ self.logger.info(f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
813
+ f"Excel rows={excel_ts_length}, "
814
+ f"Network periods={expected_length}, "
815
+ f"Used={max_periods}, "
816
+ f"Filled missing={filled_missing_values}, "
817
+ f"Final length={final_ts_length}")
818
+
819
+ if filled_missing_values > 0:
820
+ self.logger.warning(f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'")
821
+
822
+ if excel_ts_length != expected_length:
823
+ self.logger.warning(f"TIMESERIES DEBUG: LENGTH MISMATCH for '{component_name}.{attr_name}': "
824
+ f"Excel has {excel_ts_length} rows, network expects {expected_length} periods")
825
+
826
+ if final_ts_length != expected_length:
827
+ self.logger.warning(f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
828
+ f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})")
829
+
830
+ if timeseries_values:
831
+ try:
832
+ # Use new efficient timeseries API
833
+ set_timeseries_conn(
834
+ conn, component.id, attr_name, timeseries_values, scenario_id
835
+ )
836
+ imported['attributes'] += 1
837
+ self.logger.info(f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'")
838
+ except Exception as e:
839
+ self.logger.error(f"TIMESERIES DEBUG: Failed to set timeseries attribute {attr_name} for {component_name}: {e}")
840
+ imported['errors'] += 1
841
+ else:
842
+ self.logger.warning(f"TIMESERIES DEBUG: No valid timeseries data found for {component_name}.{attr_name}")
843
+ else:
844
+ self.logger.warning(f"TIMESERIES DEBUG: Component '{component_name}' not found for timeseries import")
845
+ else:
846
+ self.logger.warning(f"TIMESERIES DEBUG: Could not parse column '{column}' into component and attribute names")
847
+ else:
848
+ self.logger.warning(f"TIMESERIES DEBUG: Column '{column}' does not contain underscore separator")
849
+
850
+ return imported
851
+
852
+ def _import_network_config(self, conn, network_id: int, config_df: pd.DataFrame) -> Dict[str, Any]:
853
+ """Import network configuration from Excel"""
854
+ imported = {'parameters': 0, 'errors': 0}
855
+
856
+ # Handle case where config_df might be a list (when sheet doesn't exist)
857
+ if not isinstance(config_df, pd.DataFrame):
858
+ self.logger.info("No Network Config sheet found, using default configuration")
859
+ # Set default network configuration
860
+ default_config = {
861
+ 'unmet_load_active': True,
862
+ 'discount_rate': 0.01,
863
+ 'solver_name': 'highs',
864
+ 'currency': 'USD'
865
+ }
866
+
867
+ for param_name, param_value in default_config.items():
868
+ try:
869
+ if isinstance(param_value, bool):
870
+ param_type = 'boolean'
871
+ elif isinstance(param_value, float):
872
+ param_type = 'real'
873
+ elif isinstance(param_value, int):
874
+ param_type = 'integer'
875
+ else:
876
+ param_type = 'string'
877
+
878
+ set_network_config(
879
+ conn,
880
+ network_id,
881
+ param_name,
882
+ param_value,
883
+ param_type,
884
+ scenario_id=None, # Network default
885
+ description=f"Default {param_name} setting"
886
+ )
887
+ imported['parameters'] += 1
888
+ self.logger.info(f"Set default network config: {param_name} = {param_value}")
889
+
890
+ except Exception as e:
891
+ self.logger.error(f"Failed to set default network config parameter {param_name}: {e}")
892
+ imported['errors'] += 1
893
+
894
+ return imported
895
+
896
+ if config_df.empty:
897
+ self.logger.info("Network Config sheet is empty, using default configuration")
898
+ # Set default network configuration
899
+ default_config = {
900
+ 'unmet_load_active': True,
901
+ 'discount_rate': 0.01,
902
+ 'solver_name': 'default',
903
+ 'currency': 'USD'
904
+ }
905
+
906
+ for param_name, param_value in default_config.items():
907
+ try:
908
+ if isinstance(param_value, bool):
909
+ param_type = 'boolean'
910
+ elif isinstance(param_value, float):
911
+ param_type = 'real'
912
+ elif isinstance(param_value, int):
913
+ param_type = 'integer'
914
+ else:
915
+ param_type = 'string'
916
+
917
+ set_network_config(
918
+ conn,
919
+ network_id,
920
+ param_name,
921
+ param_value,
922
+ param_type,
923
+ scenario_id=None, # Network default
924
+ description=f"Default {param_name} setting"
925
+ )
926
+ imported['parameters'] += 1
927
+ self.logger.info(f"Set default network config: {param_name} = {param_value}")
928
+
929
+ except Exception as e:
930
+ self.logger.error(f"Failed to set default network config parameter {param_name}: {e}")
931
+ imported['errors'] += 1
932
+
933
+ return imported
934
+
935
+ for _, row in config_df.iterrows():
936
+ try:
937
+ param_name = str(row.get('Parameter', '')).strip()
938
+ param_value = row.get('Value', '')
939
+ param_type = str(row.get('Type', 'string')).strip()
940
+ param_description = str(row.get('Description', '')).strip()
941
+
942
+ if not param_name:
943
+ continue
944
+
945
+ # Validate parameter type and map Python types to database types
946
+ valid_types = {'boolean', 'real', 'integer', 'string', 'json'}
947
+
948
+ # Map Python type names to database type names
949
+ type_mapping = {
950
+ 'bool': 'boolean',
951
+ 'float': 'real',
952
+ 'int': 'integer',
953
+ 'str': 'string'
954
+ }
955
+
956
+ # Convert Python type name to database type name if needed
957
+ if param_type in type_mapping:
958
+ param_type = type_mapping[param_type]
959
+
960
+ if param_type not in valid_types:
961
+ self.logger.error(f"Invalid parameter type '{param_type}' for parameter '{param_name}'. Must be one of {valid_types}")
962
+ imported['errors'] += 1
963
+ continue
964
+
965
+ # Convert value based on type
966
+ try:
967
+ if param_type == 'boolean':
968
+ # Handle various boolean representations
969
+ if isinstance(param_value, bool):
970
+ converted_value = param_value
971
+ elif isinstance(param_value, str):
972
+ converted_value = param_value.lower() in {'true', '1', 'yes', 'on'}
973
+ elif isinstance(param_value, (int, float)):
974
+ converted_value = bool(param_value)
975
+ else:
976
+ converted_value = False
977
+ elif param_type == 'real':
978
+ converted_value = float(param_value)
979
+ elif param_type == 'integer':
980
+ converted_value = int(float(param_value)) # Handle float strings like "1.0"
981
+ elif param_type == 'json':
982
+ if isinstance(param_value, str):
983
+ import json
984
+ converted_value = json.loads(param_value)
985
+ else:
986
+ converted_value = param_value
987
+ else: # string
988
+ converted_value = str(param_value)
989
+ except (ValueError, TypeError, json.JSONDecodeError) as e:
990
+ self.logger.error(f"Failed to convert parameter '{param_name}' value '{param_value}' to type '{param_type}': {e}")
991
+ imported['errors'] += 1
992
+ continue
993
+
994
+ # Use the proper set_network_config function from pyconvexity
995
+ set_network_config(
996
+ conn,
997
+ network_id,
998
+ param_name,
999
+ converted_value,
1000
+ param_type,
1001
+ scenario_id=None, # Network default
1002
+ description=param_description if param_description else None
1003
+ )
1004
+ imported['parameters'] += 1
1005
+
1006
+ except Exception as e:
1007
+ self.logger.error(f"Failed to import network config parameter {param_name}: {e}")
1008
+ imported['errors'] += 1
1009
+
1010
+ return imported
1011
+
1012
+ def _validate_time_axis_compatibility(self, conn, network_id: int, excel_time_config: Dict[str, str]) -> None:
1013
+ """Validate that Excel time axis matches existing network time axis"""
1014
+ try:
1015
+ # Get existing network info
1016
+ existing_network = get_network_info(conn, network_id)
1017
+
1018
+ # Compare time axis parameters
1019
+ excel_start = excel_time_config.get('start_time', '').strip()
1020
+ excel_end = excel_time_config.get('end_time', '').strip()
1021
+ excel_interval = excel_time_config.get('time_resolution', '').strip()
1022
+
1023
+ existing_start = existing_network.get('time_start', '').strip()
1024
+ existing_end = existing_network.get('time_end', '').strip()
1025
+ existing_interval = existing_network.get('time_interval', '').strip()
1026
+
1027
+ self.logger.info(f"TIME AXIS DEBUG: Validating time axis compatibility")
1028
+ self.logger.info(f"TIME AXIS DEBUG: Excel: {excel_start} to {excel_end}, interval: {excel_interval}")
1029
+ self.logger.info(f"TIME AXIS DEBUG: Network: {existing_start} to {existing_end}, interval: {existing_interval}")
1030
+
1031
+ # Skip validation if Excel doesn't have time information (allow partial updates)
1032
+ if not excel_start or not excel_end or not excel_interval:
1033
+ self.logger.warning("TIME AXIS DEBUG: Excel Overview sheet missing time axis information - skipping validation")
1034
+ self.logger.warning("TIME AXIS DEBUG: Assuming Excel data is compatible with existing network time axis")
1035
+ return
1036
+
1037
+ # Normalize case and format for time interval comparison
1038
+ excel_interval_normalized = self._normalize_time_interval(excel_interval)
1039
+ existing_interval_normalized = self._normalize_time_interval(existing_interval)
1040
+
1041
+ self.logger.info(f"TIME AXIS DEBUG: Normalized intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}'")
1042
+
1043
+ # Check if they match
1044
+ if (excel_start != existing_start or
1045
+ excel_end != existing_end or
1046
+ excel_interval_normalized != existing_interval_normalized):
1047
+
1048
+ self.logger.error(f"TIME AXIS DEBUG: MISMATCH DETECTED!")
1049
+ self.logger.error(f"TIME AXIS DEBUG: Start times - Excel: '{excel_start}', Network: '{existing_start}' (match: {excel_start == existing_start})")
1050
+ self.logger.error(f"TIME AXIS DEBUG: End times - Excel: '{excel_end}', Network: '{existing_end}' (match: {excel_end == existing_end})")
1051
+ self.logger.error(f"TIME AXIS DEBUG: Intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}' (match: {excel_interval_normalized == existing_interval_normalized})")
1052
+
1053
+ raise ValueError(
1054
+ f"Time axis mismatch! "
1055
+ f"Excel has {excel_start} to {excel_end} ({excel_interval}), "
1056
+ f"but existing network has {existing_start} to {existing_end} ({existing_interval}). "
1057
+ f"Time axis must match exactly when importing into an existing network."
1058
+ )
1059
+ else:
1060
+ self.logger.info(f"TIME AXIS DEBUG: Time axis validation PASSED - Excel and network time axes match")
1061
+
1062
+ except Exception as e:
1063
+ if "Time axis mismatch" in str(e):
1064
+ raise # Re-raise validation errors
1065
+ else:
1066
+ # Log other errors but don't fail the import
1067
+ self.logger.error(f"Error during time axis validation: {e}")
1068
+ self.logger.warning("Continuing with import despite time axis validation error")
1069
+
1070
+ def _normalize_time_interval(self, interval: str) -> str:
1071
+ """Normalize time interval format for comparison"""
1072
+ interval = interval.strip().upper()
1073
+
1074
+ # Handle common variations
1075
+ if interval in ['H', '1H', 'PT1H', 'HOURLY']:
1076
+ return 'H'
1077
+ elif interval in ['D', '1D', 'P1D', 'DAILY']:
1078
+ return 'D'
1079
+ elif interval.endswith('H') and interval[:-1].isdigit():
1080
+ return interval # Already normalized (e.g., '2H', '3H')
1081
+ elif interval.startswith('PT') and interval.endswith('H'):
1082
+ # Convert PT3H -> 3H
1083
+ hours = interval[2:-1]
1084
+ return f'{hours}H'
1085
+
1086
+ return interval
1087
+
1088
+ def _calculate_import_stats(self, carriers_imported: Dict, components_imported: Dict,
1089
+ timeseries_imported: Dict, config_imported: Dict) -> Dict[str, Any]:
1090
+ """Calculate import statistics"""
1091
+ total_created = carriers_imported['created'] + sum(
1092
+ comp['created'] for comp in components_imported.values()
1093
+ )
1094
+ total_updated = carriers_imported['updated'] + sum(
1095
+ comp['updated'] for comp in components_imported.values()
1096
+ )
1097
+ total_errors = carriers_imported['errors'] + sum(
1098
+ comp['errors'] for comp in components_imported.values()
1099
+ ) + timeseries_imported['errors'] + config_imported['errors']
1100
+
1101
+ return {
1102
+ 'total_created': total_created,
1103
+ 'total_updated': total_updated,
1104
+ 'total_errors': total_errors,
1105
+ 'carriers': carriers_imported,
1106
+ 'components': components_imported,
1107
+ 'timeseries': timeseries_imported,
1108
+ 'network_config': config_imported
1109
+ }