pyconvexity 0.1.3__py3-none-any.whl → 0.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

@@ -14,14 +14,16 @@ import math
14
14
  # Import functions directly from pyconvexity
15
15
  from pyconvexity.core.database import open_connection, create_database_with_schema
16
16
  from pyconvexity.core.types import (
17
- StaticValue, CreateNetworkRequest, CreateComponentRequest, TimeseriesPoint
17
+ StaticValue, CreateNetworkRequest, CreateComponentRequest
18
18
  )
19
- from pyconvexity.core.errors import PyConvexityError as DbError
19
+ from pyconvexity.core.errors import PyConvexityError as DbError, ValidationError
20
20
  from pyconvexity.models import (
21
21
  create_network, create_carrier, insert_component, set_static_attribute,
22
- get_bus_name_to_id_map, set_timeseries_attribute, get_component_type, get_attribute
22
+ get_bus_name_to_id_map, set_timeseries_attribute, get_component_type, get_attribute,
23
+ get_network_time_periods
23
24
  )
24
25
  from pyconvexity.validation import get_validation_rule
26
+ from pyconvexity.timeseries import set_timeseries
25
27
 
26
28
  logger = logging.getLogger(__name__)
27
29
 
@@ -450,7 +452,7 @@ class NetCDFModelImporter:
450
452
  return create_network(conn, request)
451
453
 
452
454
  def _create_network_time_periods(self, conn, network, network_id: int) -> None:
453
- """Create network time periods from PyPSA snapshots"""
455
+ """Create network time periods from PyPSA snapshots using optimized approach"""
454
456
  # Use our robust helper to extract datetime snapshots
455
457
  snapshots = self._extract_datetime_snapshots(network)
456
458
 
@@ -458,100 +460,632 @@ class NetCDFModelImporter:
458
460
  self.logger.warning("No valid snapshots found in PyPSA network, skipping time periods creation")
459
461
  return
460
462
 
461
- # Insert time periods
462
- for period_index, snapshot in enumerate(snapshots):
463
- timestamp_str = snapshot.strftime('%Y-%m-%d %H:%M:%S')
464
-
465
- conn.execute("""
466
- INSERT INTO network_time_periods (network_id, timestamp, period_index)
467
- VALUES (?, ?, ?)
468
- """, (network_id, timestamp_str, period_index))
463
+ # Insert optimized time periods metadata
464
+ period_count = len(snapshots)
465
+ start_timestamp = int(snapshots[0].timestamp())
466
+
467
+ # Calculate interval in seconds
468
+ if len(snapshots) > 1:
469
+ interval_seconds = int((snapshots[1] - snapshots[0]).total_seconds())
470
+ else:
471
+ interval_seconds = 3600 # Default to hourly
472
+
473
+ conn.execute("""
474
+ INSERT INTO network_time_periods (network_id, period_count, start_timestamp, interval_seconds)
475
+ VALUES (?, ?, ?, ?)
476
+ """, (network_id, period_count, start_timestamp, interval_seconds))
469
477
 
470
478
  # Placeholder methods - in a full implementation, these would contain
471
479
  # the detailed import logic from the original netcdf_importer.py
472
480
 
473
481
  def _import_carriers(self, conn, network, network_id: int) -> int:
474
- """Import carriers from PyPSA network"""
475
- # Simplified implementation - full version would be from original file
482
+ """Import carriers from PyPSA network, discovering from both network and component levels"""
476
483
  count = 0
484
+ created_carriers = set()
485
+
486
+ # Discover all carriers from components (not just n.carriers table)
487
+ all_carriers = set()
477
488
 
478
489
  # Get carriers from network.carriers table if it exists
479
490
  if hasattr(network, 'carriers') and not network.carriers.empty:
480
- for carrier_name, carrier_data in network.carriers.iterrows():
481
- co2_emissions = carrier_data.get('co2_emissions', 0.0)
482
- color = carrier_data.get('color', '#3498db')
483
- nice_name = carrier_data.get('nice_name', carrier_name)
484
-
485
- create_carrier(conn, network_id, carrier_name, co2_emissions, color, nice_name)
486
- count += 1
491
+ all_carriers.update(network.carriers.index)
492
+
493
+ # Get carriers from generators
494
+ if hasattr(network, 'generators') and not network.generators.empty and 'carrier' in network.generators.columns:
495
+ component_carriers = set(network.generators.carrier.dropna().unique())
496
+ all_carriers.update(component_carriers)
497
+
498
+ # Get carriers from storage units
499
+ if hasattr(network, 'storage_units') and not network.storage_units.empty and 'carrier' in network.storage_units.columns:
500
+ component_carriers = set(network.storage_units.carrier.dropna().unique())
501
+ all_carriers.update(component_carriers)
502
+
503
+ # Get carriers from stores
504
+ if hasattr(network, 'stores') and not network.stores.empty and 'carrier' in network.stores.columns:
505
+ component_carriers = set(network.stores.carrier.dropna().unique())
506
+ all_carriers.update(component_carriers)
487
507
 
488
- # Ensure we have essential carriers
489
- if count == 0:
490
- create_carrier(conn, network_id, 'AC', 0.0, '#3498db', 'AC Electricity')
508
+ # Get carriers from loads (if they have carriers)
509
+ if hasattr(network, 'loads') and not network.loads.empty and 'carrier' in network.loads.columns:
510
+ component_carriers = set(network.loads.carrier.dropna().unique())
511
+ all_carriers.update(component_carriers)
512
+
513
+ # Get carriers from buses (if they have carriers)
514
+ if hasattr(network, 'buses') and not network.buses.empty and 'carrier' in network.buses.columns:
515
+ component_carriers = set(network.buses.carrier.dropna().unique())
516
+ all_carriers.update(component_carriers)
517
+
518
+ # Convert to sorted list for consistent ordering
519
+ all_carriers = sorted(list(all_carriers))
520
+
521
+ # Define a color palette similar to the Python code
522
+ color_palette = [
523
+ '#1f77b4', # C0 - blue
524
+ '#ff7f0e', # C1 - orange
525
+ '#2ca02c', # C2 - green
526
+ '#d62728', # C3 - red
527
+ '#9467bd', # C4 - purple
528
+ '#8c564b', # C5 - brown
529
+ '#e377c2', # C6 - pink
530
+ '#7f7f7f', # C7 - gray
531
+ '#bcbd22', # C8 - olive
532
+ '#17becf', # C9 - cyan
533
+ '#aec7e8', # light blue
534
+ '#ffbb78', # light orange
535
+ '#98df8a', # light green
536
+ '#ff9896', # light red
537
+ '#c5b0d5', # light purple
538
+ ]
539
+
540
+ # Create carriers from discovered list
541
+ for i, carrier_name in enumerate(all_carriers):
542
+ # Get carrier data from network.carriers if available
543
+ carrier_data = {}
544
+ if hasattr(network, 'carriers') and not network.carriers.empty and carrier_name in network.carriers.index:
545
+ # Use .iloc with index position to avoid fragmentation
546
+ carrier_idx = network.carriers.index.get_loc(carrier_name)
547
+ carrier_data = network.carriers.iloc[carrier_idx]
548
+
549
+ # Extract attributes with defaults
550
+ co2_emissions = carrier_data.get('co2_emissions', 0.0)
551
+
552
+ # Use color from network.carriers if available, otherwise assign from palette
553
+ if 'color' in carrier_data and pd.notna(carrier_data['color']):
554
+ color = carrier_data['color']
555
+ else:
556
+ color = color_palette[i % len(color_palette)]
557
+
558
+ nice_name = carrier_data.get('nice_name', None)
559
+
560
+ # Create the carrier
561
+ create_carrier(conn, network_id, carrier_name, co2_emissions, color, nice_name)
562
+ created_carriers.add(carrier_name)
491
563
  count += 1
492
564
 
565
+ # Ensure we have essential carriers for bus validation
566
+ # Buses can only use AC, DC, heat, or gas carriers according to database constraints
567
+ essential_carriers = {
568
+ 'AC': {'co2_emissions': 0.0, 'color': '#3498db', 'nice_name': 'AC Electricity'},
569
+ 'electricity': {'co2_emissions': 0.0, 'color': '#2ecc71', 'nice_name': 'Electricity'}
570
+ }
571
+
572
+ for carrier_name, carrier_props in essential_carriers.items():
573
+ if carrier_name not in created_carriers:
574
+ create_carrier(
575
+ conn,
576
+ network_id,
577
+ carrier_name,
578
+ carrier_props['co2_emissions'],
579
+ carrier_props['color'],
580
+ carrier_props['nice_name']
581
+ )
582
+ created_carriers.add(carrier_name)
583
+ count += 1
584
+
493
585
  return count
494
586
 
495
587
  def _import_buses(self, conn, network, network_id: int, strict_validation: bool) -> int:
496
588
  """Import buses from PyPSA network"""
497
- # Simplified implementation - full version would be from original file
498
589
  count = 0
499
590
 
500
- if hasattr(network, 'buses') and not network.buses.empty:
501
- for bus_name, bus_data in network.buses.iterrows():
502
- # Extract coordinates
503
- longitude = bus_data.get('x', None)
504
- latitude = bus_data.get('y', None)
591
+ if not hasattr(network, 'buses') or network.buses.empty:
592
+ return count
593
+
594
+ for bus_name, bus_data in network.buses.iterrows():
595
+ try:
596
+ # Generate a unique name for this bus
597
+ unique_name = self._generate_unique_name(str(bus_name), 'BUS')
505
598
 
506
- # Handle NaN values
507
- if pd.isna(longitude):
599
+ # Extract and log coordinate data for debugging
600
+ x_value = bus_data.get('x', None)
601
+ y_value = bus_data.get('y', None)
602
+ self.logger.debug(f"Bus '{bus_name}' -> '{unique_name}': x={x_value} (type: {type(x_value)}), y={y_value} (type: {type(y_value)})")
603
+
604
+ # Handle NaN/None values properly
605
+ longitude = None if x_value is None or (hasattr(x_value, '__iter__') and len(str(x_value)) == 0) else float(x_value) if x_value != '' else None
606
+ latitude = None if y_value is None or (hasattr(y_value, '__iter__') and len(str(y_value)) == 0) else float(y_value) if y_value != '' else None
607
+
608
+ # Additional check for pandas NaN values
609
+ if longitude is not None and pd.isna(longitude):
508
610
  longitude = None
509
- if pd.isna(latitude):
611
+ if latitude is not None and pd.isna(latitude):
510
612
  latitude = None
511
613
 
512
- # Create component record
614
+ # Get or create carrier
615
+ carrier_name = bus_data.get('carrier', 'AC')
616
+ carrier_id = self._get_or_create_carrier(conn, network_id, carrier_name)
617
+
618
+ # Create component record using atomic function
619
+ # Note: PyPSA 'x'/'y' coordinates are mapped to 'longitude'/'latitude' columns here
513
620
  request = CreateComponentRequest(
514
621
  network_id=network_id,
515
622
  component_type='BUS',
516
- name=str(bus_name),
517
- latitude=latitude,
518
- longitude=longitude
623
+ name=unique_name, # Use globally unique name
624
+ latitude=latitude, # PyPSA y -> latitude
625
+ longitude=longitude, # PyPSA x -> longitude
626
+ carrier_id=carrier_id
519
627
  )
520
628
  component_id = insert_component(conn, request)
629
+
630
+ # Import bus attributes (location/coordinate data is handled above, not as attributes)
631
+ self._import_component_attributes(conn, component_id, bus_data, 'BUS', strict_validation)
632
+
633
+ # Import timeseries attributes for buses
634
+ self._import_component_timeseries(conn, network, component_id, bus_name, 'BUS', strict_validation)
635
+
521
636
  count += 1
637
+
638
+ except Exception as e:
639
+ if strict_validation:
640
+ raise
641
+ self.logger.warning(f"Failed to import bus {bus_name}: {e}")
642
+ continue
522
643
 
523
644
  return count
524
645
 
525
646
  # Additional placeholder methods for other component types
526
647
  def _import_generators(self, conn, network, network_id: int, strict_validation: bool, scatter_radius: float, location_map) -> int:
527
648
  """Import generators from PyPSA network"""
528
- # Simplified - full implementation would be from original file
529
- return len(network.generators) if hasattr(network, 'generators') else 0
649
+ count = 0
650
+
651
+ if not hasattr(network, 'generators') or network.generators.empty:
652
+ return count
653
+
654
+ # Get bus name to ID mapping
655
+ bus_name_to_id = get_bus_name_to_id_map(conn, network_id)
656
+
657
+ # Get master scenario ID
658
+ master_scenario_id = self._get_master_scenario_id(conn, network_id)
659
+
660
+ for gen_name, gen_data in network.generators.iterrows():
661
+ try:
662
+ # Get bus connection
663
+ bus_name = gen_data.get('bus')
664
+ bus_id = bus_name_to_id.get(bus_name) if bus_name else None
665
+
666
+ if not bus_id:
667
+ self.logger.warning(f"Generator {gen_name}: bus '{bus_name}' not found, skipping")
668
+ continue
669
+
670
+ # Get or create carrier
671
+ carrier_name = gen_data.get('carrier', 'AC')
672
+ carrier_id = self._get_or_create_carrier(conn, network_id, carrier_name)
673
+
674
+ # Generate coordinates near the bus
675
+ latitude, longitude = self._generate_component_coordinates(
676
+ conn, bus_id, scatter_radius, location_map, gen_name
677
+ )
678
+
679
+ # Create component record
680
+ request = CreateComponentRequest(
681
+ network_id=network_id,
682
+ component_type='GENERATOR',
683
+ name=str(gen_name),
684
+ latitude=latitude,
685
+ longitude=longitude,
686
+ carrier_id=carrier_id,
687
+ bus_id=bus_id
688
+ )
689
+ component_id = insert_component(conn, request)
690
+
691
+ # Import generator attributes
692
+ self._import_component_attributes(conn, component_id, gen_data, 'GENERATOR', strict_validation)
693
+
694
+ # Import timeseries attributes for generators
695
+ self._import_component_timeseries(conn, network, component_id, gen_name, 'GENERATOR', strict_validation)
696
+
697
+ count += 1
698
+
699
+ except Exception as e:
700
+ if strict_validation:
701
+ raise
702
+ self.logger.warning(f"Failed to import generator {gen_name}: {e}")
703
+ continue
704
+
705
+ return count
530
706
 
531
707
  def _import_loads(self, conn, network, network_id: int, strict_validation: bool, scatter_radius: float, location_map) -> int:
532
708
  """Import loads from PyPSA network"""
533
- # Simplified - full implementation would be from original file
534
- return len(network.loads) if hasattr(network, 'loads') else 0
709
+ count = 0
710
+
711
+ if not hasattr(network, 'loads') or network.loads.empty:
712
+ return count
713
+
714
+ bus_map = get_bus_name_to_id_map(conn, network_id)
715
+ bus_coords = self._get_bus_coordinates_map(conn, network_id)
716
+
717
+ # Count components per bus for better distribution
718
+ components_per_bus = {}
719
+ for load_name, load_data in network.loads.iterrows():
720
+ bus_name = load_data['bus']
721
+ components_per_bus[bus_name] = components_per_bus.get(bus_name, 0) + 1
722
+
723
+ bus_component_counters = {}
724
+
725
+ for load_name, load_data in network.loads.iterrows():
726
+ try:
727
+ bus_id = bus_map.get(load_data['bus'])
728
+ if bus_id is None:
729
+ self.logger.warning(f"Bus '{load_data['bus']}' not found for load '{load_name}'")
730
+ continue
731
+
732
+ # Generate a unique name for this load
733
+ unique_name = self._generate_unique_name(str(load_name), 'LOAD')
734
+
735
+ # Try to get coordinates from CSV first, then fall back to scattered coordinates
736
+ latitude, longitude = None, None
737
+
738
+ # Check CSV coordinates first
739
+ csv_coords = self._get_csv_coordinates(unique_name, location_map)
740
+ if csv_coords:
741
+ latitude, longitude = csv_coords
742
+ elif bus_id in bus_coords:
743
+ # Fall back to scattered coordinates around the connected bus
744
+ bus_lat, bus_lon = bus_coords[bus_id]
745
+ bus_name = load_data['bus']
746
+
747
+ # Get component index for this bus
748
+ component_index = bus_component_counters.get(bus_name, 0)
749
+ bus_component_counters[bus_name] = component_index + 1
750
+
751
+ latitude, longitude = self._generate_scattered_coordinates(
752
+ bus_lat, bus_lon, scatter_radius,
753
+ components_per_bus[bus_name], component_index
754
+ )
755
+
756
+ # Get carrier ID if carrier is specified
757
+ carrier_id = None
758
+ if 'carrier' in load_data and pd.notna(load_data['carrier']):
759
+ carrier_id = self._get_or_create_carrier(conn, network_id, load_data['carrier'])
760
+
761
+ # Create component record using atomic function
762
+ request = CreateComponentRequest(
763
+ network_id=network_id,
764
+ component_type='LOAD',
765
+ name=unique_name, # Use globally unique name
766
+ bus_id=bus_id,
767
+ carrier_id=carrier_id,
768
+ latitude=latitude,
769
+ longitude=longitude
770
+ )
771
+ component_id = insert_component(conn, request)
772
+
773
+ # Import load attributes
774
+ self._import_component_attributes(conn, component_id, load_data, 'LOAD', strict_validation)
775
+
776
+ # Import timeseries attributes for loads
777
+ self._import_component_timeseries(conn, network, component_id, load_name, 'LOAD', strict_validation)
778
+
779
+ count += 1
780
+
781
+ except Exception as e:
782
+ if strict_validation:
783
+ raise
784
+ self.logger.warning(f"Failed to import load {load_name}: {e}")
785
+ continue
786
+
787
+ return count
535
788
 
536
789
  def _import_lines(self, conn, network, network_id: int, strict_validation: bool, location_map) -> int:
537
790
  """Import lines from PyPSA network"""
538
- # Simplified - full implementation would be from original file
539
- return len(network.lines) if hasattr(network, 'lines') else 0
791
+ count = 0
792
+ name_counter = {} # Track duplicate names
793
+
794
+ if not hasattr(network, 'lines') or network.lines.empty:
795
+ return count
796
+
797
+ bus_map = get_bus_name_to_id_map(conn, network_id)
798
+
799
+ for line_name, line_data in network.lines.iterrows():
800
+ try:
801
+ bus0_id = bus_map.get(line_data['bus0'])
802
+ bus1_id = bus_map.get(line_data['bus1'])
803
+
804
+ if bus0_id is None or bus1_id is None:
805
+ self.logger.warning(f"Bus not found for line '{line_name}': bus0='{line_data['bus0']}', bus1='{line_data['bus1']}'")
806
+ continue
807
+
808
+ # Handle duplicate names by appending counter
809
+ unique_name = line_name
810
+ if line_name in name_counter:
811
+ name_counter[line_name] += 1
812
+ unique_name = f"{line_name}_{name_counter[line_name]}"
813
+ self.logger.warning(f"Duplicate line name '{line_name}' renamed to '{unique_name}'")
814
+ else:
815
+ name_counter[line_name] = 0
816
+
817
+ # Check for CSV coordinates
818
+ latitude, longitude = None, None
819
+ csv_coords = self._get_csv_coordinates(unique_name, location_map)
820
+ if csv_coords:
821
+ latitude, longitude = csv_coords
822
+
823
+ # Lines always use AC carrier
824
+ carrier_id = self._get_or_create_carrier(conn, network_id, 'AC')
825
+
826
+ # Create component record using atomic function
827
+ request = CreateComponentRequest(
828
+ network_id=network_id,
829
+ component_type='LINE',
830
+ name=unique_name, # Use deduplicated name
831
+ bus0_id=bus0_id,
832
+ bus1_id=bus1_id,
833
+ carrier_id=carrier_id,
834
+ latitude=latitude,
835
+ longitude=longitude
836
+ )
837
+ component_id = insert_component(conn, request)
838
+
839
+ # Import line attributes
840
+ self._import_component_attributes(conn, component_id, line_data, 'LINE', strict_validation)
841
+
842
+ # Import timeseries attributes for lines
843
+ self._import_component_timeseries(conn, network, component_id, line_name, 'LINE', strict_validation)
844
+
845
+ count += 1
846
+
847
+ except Exception as e:
848
+ if strict_validation:
849
+ raise
850
+ self.logger.warning(f"Failed to import line {line_name}: {e}")
851
+ continue
852
+
853
+ return count
540
854
 
541
855
  def _import_links(self, conn, network, network_id: int, strict_validation: bool, location_map) -> int:
542
856
  """Import links from PyPSA network"""
543
- # Simplified - full implementation would be from original file
544
- return len(network.links) if hasattr(network, 'links') else 0
857
+ count = 0
858
+
859
+ if not hasattr(network, 'links') or network.links.empty:
860
+ return count
861
+
862
+ bus_map = get_bus_name_to_id_map(conn, network_id)
863
+
864
+ for link_name, link_data in network.links.iterrows():
865
+ try:
866
+ bus0_id = bus_map.get(link_data['bus0'])
867
+ bus1_id = bus_map.get(link_data['bus1'])
868
+
869
+ if bus0_id is None or bus1_id is None:
870
+ self.logger.warning(f"Bus not found for link '{link_name}': bus0='{link_data['bus0']}', bus1='{link_data['bus1']}'")
871
+ continue
872
+
873
+ # Generate a unique name for this link
874
+ unique_name = self._generate_unique_name(str(link_name), 'LINK')
875
+
876
+ # Check for CSV coordinates
877
+ latitude, longitude = None, None
878
+ csv_coords = self._get_csv_coordinates(unique_name, location_map)
879
+ if csv_coords:
880
+ latitude, longitude = csv_coords
881
+
882
+ # Get carrier ID if carrier is specified
883
+ carrier_id = None
884
+ if 'carrier' in link_data and pd.notna(link_data['carrier']):
885
+ carrier_id = self._get_or_create_carrier(conn, network_id, link_data['carrier'])
886
+ else:
887
+ # Default to DC for links
888
+ carrier_id = self._get_or_create_carrier(conn, network_id, 'DC')
889
+
890
+ # Create component record using atomic function
891
+ request = CreateComponentRequest(
892
+ network_id=network_id,
893
+ component_type='LINK',
894
+ name=unique_name, # Use globally unique name
895
+ bus0_id=bus0_id,
896
+ bus1_id=bus1_id,
897
+ carrier_id=carrier_id,
898
+ latitude=latitude,
899
+ longitude=longitude
900
+ )
901
+ component_id = insert_component(conn, request)
902
+
903
+ # Import link attributes
904
+ self._import_component_attributes(conn, component_id, link_data, 'LINK', strict_validation)
905
+
906
+ # Import timeseries attributes for links
907
+ self._import_component_timeseries(conn, network, component_id, link_name, 'LINK', strict_validation)
908
+
909
+ count += 1
910
+
911
+ except Exception as e:
912
+ if strict_validation:
913
+ raise
914
+ self.logger.warning(f"Failed to import link {link_name}: {e}")
915
+ continue
916
+
917
+ return count
545
918
 
546
919
  def _import_storage_units(self, conn, network, network_id: int, strict_validation: bool, scatter_radius: float, location_map) -> int:
547
920
  """Import storage units from PyPSA network"""
548
- # Simplified - full implementation would be from original file
549
- return len(network.storage_units) if hasattr(network, 'storage_units') else 0
921
+ count = 0
922
+
923
+ if not hasattr(network, 'storage_units') or network.storage_units.empty:
924
+ return count
925
+
926
+ bus_map = get_bus_name_to_id_map(conn, network_id)
927
+ bus_coords = self._get_bus_coordinates_map(conn, network_id)
928
+
929
+ # Count components per bus for better distribution
930
+ components_per_bus = {}
931
+ for su_name, su_data in network.storage_units.iterrows():
932
+ bus_name = su_data['bus']
933
+ components_per_bus[bus_name] = components_per_bus.get(bus_name, 0) + 1
934
+
935
+ bus_component_counters = {}
936
+
937
+ for su_name, su_data in network.storage_units.iterrows():
938
+ try:
939
+ bus_id = bus_map.get(su_data['bus'])
940
+ if bus_id is None:
941
+ self.logger.warning(f"Bus '{su_data['bus']}' not found for storage unit '{su_name}'")
942
+ continue
943
+
944
+ # Generate a unique name for this storage unit
945
+ unique_name = self._generate_unique_name(str(su_name), 'STORAGE_UNIT')
946
+
947
+ # Try to get coordinates from CSV first, then fall back to scattered coordinates
948
+ latitude, longitude = None, None
949
+
950
+ # Check CSV coordinates first
951
+ csv_coords = self._get_csv_coordinates(unique_name, location_map)
952
+ if csv_coords:
953
+ latitude, longitude = csv_coords
954
+ elif bus_id in bus_coords:
955
+ # Fall back to scattered coordinates around the connected bus
956
+ bus_lat, bus_lon = bus_coords[bus_id]
957
+ bus_name = su_data['bus']
958
+
959
+ # Get component index for this bus
960
+ component_index = bus_component_counters.get(bus_name, 0)
961
+ bus_component_counters[bus_name] = component_index + 1
962
+
963
+ latitude, longitude = self._generate_scattered_coordinates(
964
+ bus_lat, bus_lon, scatter_radius,
965
+ components_per_bus[bus_name], component_index
966
+ )
967
+
968
+ # Get carrier ID if carrier is specified
969
+ carrier_id = None
970
+ if 'carrier' in su_data and pd.notna(su_data['carrier']):
971
+ carrier_id = self._get_or_create_carrier(conn, network_id, su_data['carrier'])
972
+
973
+ # Create component record using atomic function
974
+ request = CreateComponentRequest(
975
+ network_id=network_id,
976
+ component_type='STORAGE_UNIT',
977
+ name=unique_name, # Use globally unique name
978
+ bus_id=bus_id,
979
+ carrier_id=carrier_id,
980
+ latitude=latitude,
981
+ longitude=longitude
982
+ )
983
+ component_id = insert_component(conn, request)
984
+
985
+ # Import storage unit attributes
986
+ self._import_component_attributes(conn, component_id, su_data, 'STORAGE_UNIT', strict_validation)
987
+
988
+ # Import timeseries attributes for storage units
989
+ self._import_component_timeseries(conn, network, component_id, su_name, 'STORAGE_UNIT', strict_validation)
990
+
991
+ count += 1
992
+
993
+ except Exception as e:
994
+ if strict_validation:
995
+ raise
996
+ self.logger.warning(f"Failed to import storage unit {su_name}: {e}")
997
+ continue
998
+
999
+ return count
550
1000
 
551
1001
  def _import_stores(self, conn, network, network_id: int, strict_validation: bool, scatter_radius: float, location_map) -> int:
552
1002
  """Import stores from PyPSA network"""
553
- # Simplified - full implementation would be from original file
554
- return len(network.stores) if hasattr(network, 'stores') else 0
1003
+ count = 0
1004
+ name_counter = {} # Track duplicate names
1005
+
1006
+ if not hasattr(network, 'stores') or network.stores.empty:
1007
+ return count
1008
+
1009
+ bus_map = get_bus_name_to_id_map(conn, network_id)
1010
+ bus_coords = self._get_bus_coordinates_map(conn, network_id)
1011
+
1012
+ # Count components per bus for better distribution
1013
+ components_per_bus = {}
1014
+ for store_name, store_data in network.stores.iterrows():
1015
+ bus_name = store_data['bus']
1016
+ components_per_bus[bus_name] = components_per_bus.get(bus_name, 0) + 1
1017
+
1018
+ bus_component_counters = {} # Track how many components we've placed at each bus
1019
+
1020
+ for store_name, store_data in network.stores.iterrows():
1021
+ try:
1022
+ bus_id = bus_map.get(store_data['bus'])
1023
+ if bus_id is None:
1024
+ self.logger.warning(f"Bus '{store_data['bus']}' not found for store '{store_name}'")
1025
+ continue
1026
+
1027
+ # Handle duplicate names by appending counter
1028
+ unique_name = store_name
1029
+ if store_name in name_counter:
1030
+ name_counter[store_name] += 1
1031
+ unique_name = f"{store_name}_{name_counter[store_name]}"
1032
+ self.logger.warning(f"Duplicate store name '{store_name}' renamed to '{unique_name}'")
1033
+ else:
1034
+ name_counter[store_name] = 0
1035
+
1036
+ # Try to get coordinates from CSV first, then fall back to scattered coordinates
1037
+ latitude, longitude = None, None
1038
+
1039
+ # Check CSV coordinates first
1040
+ csv_coords = self._get_csv_coordinates(unique_name, location_map)
1041
+ if csv_coords:
1042
+ latitude, longitude = csv_coords
1043
+ elif bus_id in bus_coords:
1044
+ # Fall back to scattered coordinates around the connected bus
1045
+ bus_lat, bus_lon = bus_coords[bus_id]
1046
+ bus_name = store_data['bus']
1047
+
1048
+ # Get component index for this bus
1049
+ component_index = bus_component_counters.get(bus_name, 0)
1050
+ bus_component_counters[bus_name] = component_index + 1
1051
+
1052
+ latitude, longitude = self._generate_scattered_coordinates(
1053
+ bus_lat, bus_lon, scatter_radius,
1054
+ components_per_bus[bus_name], component_index
1055
+ )
1056
+
1057
+ # Get carrier ID if carrier is specified
1058
+ carrier_id = None
1059
+ if 'carrier' in store_data and pd.notna(store_data['carrier']):
1060
+ carrier_id = self._get_or_create_carrier(conn, network_id, store_data['carrier'])
1061
+
1062
+ # Create component record using atomic function
1063
+ request = CreateComponentRequest(
1064
+ network_id=network_id,
1065
+ component_type='STORE',
1066
+ name=unique_name, # Use deduplicated name
1067
+ bus_id=bus_id,
1068
+ carrier_id=carrier_id,
1069
+ latitude=latitude,
1070
+ longitude=longitude
1071
+ )
1072
+ component_id = insert_component(conn, request)
1073
+
1074
+ # Import store attributes
1075
+ self._import_component_attributes(conn, component_id, store_data, 'STORE', strict_validation)
1076
+
1077
+ # Import timeseries attributes for stores
1078
+ self._import_component_timeseries(conn, network, component_id, store_name, 'STORE', strict_validation)
1079
+
1080
+ count += 1
1081
+
1082
+ except Exception as e:
1083
+ if strict_validation:
1084
+ raise
1085
+ self.logger.warning(f"Failed to import store {store_name}: {e}")
1086
+ continue
1087
+
1088
+ return count
555
1089
 
556
1090
  def _get_bus_coordinates(self, conn, network_id: int) -> List[Tuple[float, float]]:
557
1091
  """Get coordinates of all buses in the network that have valid coordinates"""
@@ -594,6 +1128,475 @@ class NetCDFModelImporter:
594
1128
  return scatter_radius_degrees
595
1129
 
596
1130
  def _detect_and_load_location_csv(self, netcdf_path: str) -> Optional[Dict[str, Tuple[float, float]]]:
597
- """Detect and load companion CSV file with component locations"""
598
- # Simplified implementation - full version would be from original file
1131
+ """
1132
+ Detect and load companion CSV file with component locations.
1133
+
1134
+ Args:
1135
+ netcdf_path: Path to the NetCDF file (e.g., /path/to/fileX.nc)
1136
+
1137
+ Returns:
1138
+ Dictionary mapping component names to (latitude, longitude) tuples, or None if no CSV found
1139
+ """
1140
+ try:
1141
+ # Construct expected CSV path: replace .nc with _locations.csv
1142
+ netcdf_file = Path(netcdf_path)
1143
+ csv_path = netcdf_file.parent / f"{netcdf_file.stem}_locations.csv"
1144
+
1145
+ if not csv_path.exists():
1146
+ return None
1147
+
1148
+ # Parse the CSV file
1149
+ try:
1150
+ location_df = pd.read_csv(csv_path)
1151
+
1152
+ # Validate required columns
1153
+ required_columns = {'name', 'longitude', 'latitude'}
1154
+ if not required_columns.issubset(location_df.columns):
1155
+ missing_cols = required_columns - set(location_df.columns)
1156
+ self.logger.warning(f"Location CSV missing required columns: {missing_cols}. Found columns: {list(location_df.columns)}")
1157
+ return None
1158
+
1159
+ # Create lookup dictionary
1160
+ location_map = {}
1161
+ skipped_count = 0
1162
+
1163
+ for _, row in location_df.iterrows():
1164
+ name = row['name']
1165
+ longitude = row['longitude']
1166
+ latitude = row['latitude']
1167
+
1168
+ # Skip rows with missing data
1169
+ if pd.isna(name) or pd.isna(longitude) or pd.isna(latitude):
1170
+ skipped_count += 1
1171
+ continue
1172
+
1173
+ # Validate coordinate ranges
1174
+ if not (-180 <= longitude <= 180) or not (-90 <= latitude <= 90):
1175
+ self.logger.warning(f"Invalid coordinates for '{name}': longitude={longitude}, latitude={latitude}")
1176
+ skipped_count += 1
1177
+ continue
1178
+
1179
+ location_map[str(name).strip()] = (float(latitude), float(longitude))
1180
+
1181
+ self.logger.info(f"Loaded {len(location_map)} component locations from CSV (skipped {skipped_count} invalid entries)")
1182
+ return location_map
1183
+
1184
+ except Exception as e:
1185
+ self.logger.error(f"Failed to parse location CSV {csv_path}: {e}")
1186
+ return None
1187
+
1188
+ except Exception as e:
1189
+ self.logger.warning(f"Error detecting location CSV: {e}")
1190
+ return None
1191
+
1192
+ def _get_master_scenario_id(self, conn, network_id: int) -> int:
1193
+ """Get the master scenario ID for a network"""
1194
+ cursor = conn.execute(
1195
+ "SELECT id FROM scenarios WHERE network_id = ? AND is_master = 1",
1196
+ (network_id,)
1197
+ )
1198
+ result = cursor.fetchone()
1199
+ if not result:
1200
+ raise ValueError(f"No master scenario found for network {network_id}")
1201
+ return result[0]
1202
+
1203
+ def _get_or_create_carrier(self, conn, network_id: int, carrier_name: str) -> int:
1204
+ """Get existing carrier ID or create new carrier"""
1205
+ # Try to find existing carrier
1206
+ cursor = conn.execute(
1207
+ "SELECT id FROM carriers WHERE network_id = ? AND name = ?",
1208
+ (network_id, carrier_name)
1209
+ )
1210
+ result = cursor.fetchone()
1211
+ if result:
1212
+ return result[0]
1213
+
1214
+ # Create new carrier
1215
+ carrier_id = create_carrier(conn, network_id, carrier_name, 0.0, '#3498db', carrier_name)
1216
+ return carrier_id
1217
+
1218
+ def _generate_component_coordinates(
1219
+ self,
1220
+ conn,
1221
+ bus_id: int,
1222
+ scatter_radius: float,
1223
+ location_map: Optional[Dict],
1224
+ component_name: str
1225
+ ) -> Tuple[Optional[float], Optional[float]]:
1226
+ """Generate coordinates for a component near its connected bus"""
1227
+ # Check location map first
1228
+ if location_map and component_name in location_map:
1229
+ return location_map[component_name]
1230
+
1231
+ # Get bus coordinates
1232
+ cursor = conn.execute(
1233
+ "SELECT latitude, longitude FROM components WHERE id = ?",
1234
+ (bus_id,)
1235
+ )
1236
+ result = cursor.fetchone()
1237
+ if not result or result[0] is None or result[1] is None:
1238
+ return None, None
1239
+
1240
+ bus_lat, bus_lon = result[0], result[1]
1241
+
1242
+ # Generate unique name-based offset
1243
+ name_hash = hash(component_name) % 1000
1244
+ angle = (name_hash / 1000.0) * 2 * math.pi
1245
+
1246
+ # Apply scatter radius
1247
+ lat_offset = scatter_radius * math.cos(angle)
1248
+ lon_offset = scatter_radius * math.sin(angle)
1249
+
1250
+ return bus_lat + lat_offset, bus_lon + lon_offset
1251
+
1252
+ def _import_component_attributes(
1253
+ self,
1254
+ conn,
1255
+ component_id: int,
1256
+ component_data: pd.Series,
1257
+ component_type: str,
1258
+ strict_validation: bool
1259
+ ):
1260
+ """Import component attributes, excluding bus connection columns"""
1261
+
1262
+ # Get master scenario ID
1263
+ network_id_result = conn.execute("SELECT network_id FROM components WHERE id = ?", (component_id,)).fetchone()
1264
+ if not network_id_result:
1265
+ self.logger.error(f"Could not find network_id for component {component_id}")
1266
+ return
1267
+
1268
+ network_id = network_id_result[0]
1269
+ scenario_id = self._get_master_scenario_id(conn, network_id)
1270
+
1271
+ # Skip these columns as they're handled in the components table
1272
+ skip_columns = {
1273
+ 'bus', 'bus0', 'bus1', 'name', # Bus connections and name
1274
+ 'x', 'y', 'location', # Coordinate/location data (stored as latitude/longitude columns)
1275
+ 'carrier' # Carrier reference (stored as carrier_id column)
1276
+ }
1277
+
1278
+ attribute_count = 0
1279
+ skipped_count = 0
1280
+
1281
+ for attr_name, value in component_data.items():
1282
+ if attr_name in skip_columns:
1283
+ skipped_count += 1
1284
+ continue
1285
+
1286
+ if pd.isna(value):
1287
+ skipped_count += 1
1288
+ continue
1289
+
1290
+ # Convert value to appropriate format for our database and use smart attribute setting
1291
+ try:
1292
+ # Get validation rule to check expected data type
1293
+ try:
1294
+ rule = get_validation_rule(conn, component_type, attr_name)
1295
+ expected_type = rule.data_type
1296
+ except:
1297
+ expected_type = None
1298
+
1299
+ # Convert based on expected type or infer from value
1300
+ if expected_type == 'boolean':
1301
+ # Handle boolean attributes that might come as int/float from PyPSA
1302
+ if isinstance(value, (bool, np.bool_)):
1303
+ static_value = StaticValue(bool(value))
1304
+ elif isinstance(value, (int, np.integer)):
1305
+ static_value = StaticValue(bool(value)) # 0 -> False, 1 -> True
1306
+ elif isinstance(value, (float, np.floating)):
1307
+ static_value = StaticValue(bool(int(value))) # 0.0 -> False, 1.0 -> True
1308
+ else:
1309
+ static_value = StaticValue(str(value).lower() == 'true')
1310
+ elif expected_type == 'int':
1311
+ # Handle integer attributes
1312
+ if isinstance(value, (int, np.integer)):
1313
+ static_value = StaticValue(int(value))
1314
+ elif isinstance(value, (float, np.floating)):
1315
+ if np.isfinite(value):
1316
+ static_value = StaticValue(int(value))
1317
+ else:
1318
+ skipped_count += 1
1319
+ continue
1320
+ elif isinstance(value, bool):
1321
+ static_value = StaticValue(int(value))
1322
+ else:
1323
+ static_value = StaticValue(int(float(str(value))))
1324
+ elif expected_type == 'float':
1325
+ # Handle float attributes
1326
+ if isinstance(value, (float, np.floating)):
1327
+ if np.isfinite(value):
1328
+ static_value = StaticValue(float(value))
1329
+ else:
1330
+ skipped_count += 1
1331
+ continue
1332
+ elif isinstance(value, (int, np.integer)):
1333
+ static_value = StaticValue(float(value))
1334
+ elif isinstance(value, bool):
1335
+ static_value = StaticValue(float(value))
1336
+ else:
1337
+ static_value = StaticValue(float(str(value)))
1338
+ else:
1339
+ # Fallback to type inference for unknown or string types
1340
+ if isinstance(value, bool):
1341
+ static_value = StaticValue(bool(value))
1342
+ elif isinstance(value, (int, np.integer)):
1343
+ static_value = StaticValue(int(value))
1344
+ elif isinstance(value, (float, np.floating)):
1345
+ if np.isfinite(value):
1346
+ static_value = StaticValue(float(value))
1347
+ else:
1348
+ skipped_count += 1
1349
+ continue # Skip infinite/NaN values
1350
+ else:
1351
+ static_value = StaticValue(str(value))
1352
+
1353
+ # Use direct static attribute setting
1354
+ set_static_attribute(conn, component_id, attr_name, static_value, scenario_id)
1355
+ attribute_count += 1
1356
+
1357
+ except Exception as e:
1358
+ # Handle validation errors from db_utils functions
1359
+ if ("No validation rule found" in str(e) or
1360
+ "does not allow" in str(e) or
1361
+ "ValidationError" in str(type(e).__name__)):
1362
+ if strict_validation:
1363
+ raise
1364
+ else:
1365
+ self.logger.warning(f"Skipping undefined/invalid attribute '{attr_name}' for {component_type} component {component_id}: {e}")
1366
+ skipped_count += 1
1367
+ continue
1368
+ else:
1369
+ # Log but don't fail on other attribute import errors (like type conversion issues)
1370
+ self.logger.warning(f"Skipping attribute {attr_name} for component {component_id}: {e}")
1371
+ skipped_count += 1
1372
+
1373
+ def _import_component_timeseries(
1374
+ self,
1375
+ conn,
1376
+ network,
1377
+ component_id: int,
1378
+ component_name: str,
1379
+ component_type: str,
1380
+ strict_validation: bool
1381
+ ):
1382
+ """Import timeseries attributes from PyPSA network"""
1383
+
1384
+ # Get master scenario ID
1385
+ network_id_result = conn.execute("SELECT network_id FROM components WHERE id = ?", (component_id,)).fetchone()
1386
+ if not network_id_result:
1387
+ self.logger.error(f"Could not find network_id for component {component_id}")
1388
+ return
1389
+
1390
+ network_id = network_id_result[0]
1391
+ scenario_id = self._get_master_scenario_id(conn, network_id)
1392
+
1393
+ # Map component types to their PyPSA timeseries DataFrames
1394
+ timeseries_map = {
1395
+ 'BUS': getattr(network, 'buses_t', {}),
1396
+ 'GENERATOR': getattr(network, 'generators_t', {}),
1397
+ 'LOAD': getattr(network, 'loads_t', {}),
1398
+ 'LINE': getattr(network, 'lines_t', {}),
1399
+ 'LINK': getattr(network, 'links_t', {}),
1400
+ 'STORAGE_UNIT': getattr(network, 'storage_units_t', {}),
1401
+ 'STORE': getattr(network, 'stores_t', {})
1402
+ }
1403
+
1404
+ component_timeseries = timeseries_map.get(component_type, {})
1405
+
1406
+ if not component_timeseries:
1407
+ return
1408
+
1409
+ timeseries_count = 0
1410
+
1411
+ # Iterate through each timeseries attribute (e.g., 'p', 'q', 'p_set', 'p_max_pu', etc.)
1412
+ for attr_name, timeseries_df in component_timeseries.items():
1413
+ if component_name not in timeseries_df.columns:
1414
+ continue
1415
+
1416
+ # Get the timeseries data for this component
1417
+ component_series = timeseries_df[component_name]
1418
+
1419
+ # Skip if all values are NaN
1420
+ if component_series.isna().all():
1421
+ continue
1422
+
1423
+ try:
1424
+ # Convert pandas Series to list of values (using optimized approach)
1425
+ values = []
1426
+
1427
+ for value in component_series:
1428
+ # Skip NaN values by using 0.0 as default (PyPSA convention)
1429
+ if pd.isna(value):
1430
+ values.append(0.0)
1431
+ else:
1432
+ values.append(float(value))
1433
+
1434
+ if not values:
1435
+ self.logger.warning(f"No valid timeseries points for '{attr_name}' on {component_type} '{component_name}'")
1436
+ continue
1437
+
1438
+ # Use optimized timeseries attribute setting
1439
+ set_timeseries_attribute(conn, component_id, attr_name, values, scenario_id)
1440
+ timeseries_count += 1
1441
+
1442
+ except Exception as e:
1443
+ if strict_validation:
1444
+ raise
1445
+ else:
1446
+ self.logger.warning(f"Skipping timeseries attribute '{attr_name}' for {component_type} component '{component_name}': {e}")
1447
+ continue
1448
+
1449
+ if timeseries_count > 0:
1450
+ self.logger.debug(f"Imported {timeseries_count} timeseries attributes for {component_type} '{component_name}'")
1451
+
1452
+ def _generate_unique_name(self, base_name: str, component_type: str) -> str:
1453
+ """
1454
+ Generate a unique name for a component, ensuring no duplicates across all component types.
1455
+
1456
+ Args:
1457
+ base_name: The original name to start with
1458
+ component_type: The type of component (used in the suffix if needed)
1459
+
1460
+ Returns:
1461
+ A unique name that hasn't been used yet
1462
+ """
1463
+ # First try the base name
1464
+ if base_name not in self._used_names:
1465
+ self._used_names.add(base_name)
1466
+ return base_name
1467
+
1468
+ # If base name is taken, try appending the component type
1469
+ typed_name = f"{base_name}_{component_type.lower()}"
1470
+ if typed_name not in self._used_names:
1471
+ self._used_names.add(typed_name)
1472
+ return typed_name
1473
+
1474
+ # If that's taken too, start adding numbers
1475
+ counter = 1
1476
+ while True:
1477
+ unique_name = f"{base_name}_{counter}"
1478
+ if unique_name not in self._used_names:
1479
+ self._used_names.add(unique_name)
1480
+ return unique_name
1481
+ counter += 1
1482
+
1483
+ def _generate_scattered_coordinates(
1484
+ self,
1485
+ bus_lat: float,
1486
+ bus_lon: float,
1487
+ scatter_radius: float,
1488
+ component_count_at_bus: int,
1489
+ component_index: int
1490
+ ) -> Tuple[float, float]:
1491
+ """
1492
+ Generate scattered coordinates around a bus location.
1493
+
1494
+ Args:
1495
+ bus_lat: Bus latitude
1496
+ bus_lon: Bus longitude
1497
+ scatter_radius: Radius in degrees to scatter within
1498
+ component_count_at_bus: Total number of components at this bus
1499
+ component_index: Index of this component (0-based)
1500
+
1501
+ Returns:
1502
+ Tuple of (latitude, longitude) for the scattered position
1503
+ """
1504
+ if component_count_at_bus == 1:
1505
+ # Single component - place it at a moderate distance from the bus
1506
+ angle = random.uniform(0, 2 * math.pi)
1507
+ distance = scatter_radius * random.uniform(0.5, 0.8) # 50-80% of scatter radius
1508
+ else:
1509
+ # Multiple components - arrange in a rough circle with some randomness
1510
+ base_angle = (2 * math.pi * component_index) / component_count_at_bus
1511
+ angle_jitter = random.uniform(-math.pi/8, math.pi/8) # ±22.5 degrees jitter
1512
+ angle = base_angle + angle_jitter
1513
+
1514
+ # Vary distance randomly within the radius (use more of the available radius)
1515
+ distance = scatter_radius * random.uniform(0.6, 1.0) # 60-100% of scatter radius
1516
+
1517
+ # Calculate new coordinates
1518
+ new_lat = bus_lat + distance * math.cos(angle)
1519
+ new_lon = bus_lon + distance * math.sin(angle)
1520
+
1521
+ return new_lat, new_lon
1522
+
1523
+ def _get_bus_coordinates_map(self, conn, network_id: int) -> Dict[int, Tuple[float, float]]:
1524
+ """
1525
+ Get a mapping from bus component ID to coordinates.
1526
+
1527
+ Returns:
1528
+ Dictionary mapping bus component ID to (latitude, longitude) tuple
1529
+ """
1530
+ cursor = conn.execute("""
1531
+ SELECT id, latitude, longitude FROM components
1532
+ WHERE network_id = ? AND component_type = 'BUS'
1533
+ AND latitude IS NOT NULL AND longitude IS NOT NULL
1534
+ AND NOT (latitude = 0 AND longitude = 0)
1535
+ """, (network_id,))
1536
+
1537
+ bus_coords = {row[0]: (row[1], row[2]) for row in cursor.fetchall()}
1538
+ return bus_coords
1539
+
1540
+ def _resolve_original_component_name(self, unique_name: str) -> str:
1541
+ """
1542
+ Resolve a potentially modified unique name back to its original name for CSV lookup.
1543
+
1544
+ Args:
1545
+ unique_name: The unique name that may have been modified (e.g., "component_1", "component_generator")
1546
+
1547
+ Returns:
1548
+ The original name for CSV lookup
1549
+ """
1550
+ # Remove common suffixes added by _generate_unique_name
1551
+ # Pattern 1: Remove "_NUMBER" suffix (e.g., "component_1" -> "component")
1552
+ import re
1553
+
1554
+ # First try removing "_NUMBER" pattern
1555
+ no_number_suffix = re.sub(r'_\d+$', '', unique_name)
1556
+ if no_number_suffix != unique_name:
1557
+ return no_number_suffix
1558
+
1559
+ # Then try removing "_COMPONENT_TYPE" pattern (e.g., "component_generator" -> "component")
1560
+ component_types = ['bus', 'generator', 'load', 'line', 'link', 'storage_unit', 'store']
1561
+ for comp_type in component_types:
1562
+ suffix = f"_{comp_type.lower()}"
1563
+ if unique_name.endswith(suffix):
1564
+ return unique_name[:-len(suffix)]
1565
+
1566
+ # If no patterns match, return the original name
1567
+ return unique_name
1568
+
1569
+ def _get_csv_coordinates(
1570
+ self,
1571
+ component_name: str,
1572
+ location_map: Optional[Dict[str, Tuple[float, float]]]
1573
+ ) -> Optional[Tuple[float, float]]:
1574
+ """
1575
+ Get coordinates for a component from the CSV location map.
1576
+
1577
+ Args:
1578
+ component_name: The component name (potentially modified for uniqueness)
1579
+ location_map: Dictionary mapping original names to coordinates
1580
+
1581
+ Returns:
1582
+ (latitude, longitude) tuple if found, None otherwise
1583
+ """
1584
+ if not location_map:
1585
+ return None
1586
+
1587
+ # Try exact match first
1588
+ if component_name in location_map:
1589
+ coordinates = location_map[component_name]
1590
+ self.logger.debug(f"CSV location exact match for '{component_name}': {coordinates}")
1591
+ return coordinates
1592
+
1593
+ # Try resolving back to original name
1594
+ original_name = self._resolve_original_component_name(component_name)
1595
+ if original_name != component_name and original_name in location_map:
1596
+ coordinates = location_map[original_name]
1597
+ self.logger.debug(f"CSV location resolved match for '{component_name}' -> '{original_name}': {coordinates}")
1598
+ return coordinates
1599
+
1600
+ # No match found
1601
+ self.logger.debug(f"No CSV location found for component '{component_name}' (original: '{original_name}')")
599
1602
  return None