pyconvexity 0.1.3__py3-none-any.whl → 0.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

@@ -1,15 +1,16 @@
1
1
  """
2
2
  Network building functionality for PyPSA solver integration.
3
3
 
4
- Handles loading data from database and constructing PyPSA Network objects.
4
+ Simplified to always use MultiIndex format for consistent multi-period optimization.
5
5
  """
6
6
 
7
7
  import logging
8
+ import json
8
9
  import pandas as pd
9
10
  from typing import Dict, Any, Optional, Callable
10
11
 
11
12
  from pyconvexity.models import (
12
- list_components_by_type, get_network_time_periods, get_network_config
13
+ list_components_by_type, get_network_time_periods, get_network_info
13
14
  )
14
15
 
15
16
  logger = logging.getLogger(__name__)
@@ -19,9 +20,8 @@ class NetworkBuilder:
19
20
  """
20
21
  Builds PyPSA networks from database data.
21
22
 
22
- This class handles the complex process of loading network components,
23
- attributes, and time series data from the database and constructing
24
- a properly configured PyPSA Network object.
23
+ Simplified to always create MultiIndex snapshots for consistent multi-period optimization,
24
+ even for single-year models.
25
25
  """
26
26
 
27
27
  def __init__(self):
@@ -36,13 +36,8 @@ class NetworkBuilder:
36
36
  ) from e
37
37
 
38
38
  # Import batch loader for efficient data loading
39
- try:
40
- from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
41
- self.batch_loader = PyPSABatchLoader()
42
- except ImportError:
43
- # Fallback to individual loading if batch loader not available
44
- self.batch_loader = None
45
- logger.warning("PyPSABatchLoader not available, using individual component loading")
39
+ from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
40
+ self.batch_loader = PyPSABatchLoader()
46
41
 
47
42
  def build_network(
48
43
  self,
@@ -87,7 +82,7 @@ class NetworkBuilder:
87
82
  if progress_callback:
88
83
  progress_callback(20, "Loading components...")
89
84
 
90
- # Load all components
85
+ # Load all components using efficient batch loader
91
86
  self._load_components(conn, network_id, network, scenario_id, progress_callback)
92
87
 
93
88
  # NOTE: Snapshot weightings will be set AFTER multi-period optimization setup
@@ -184,35 +179,44 @@ class NetworkBuilder:
184
179
  }
185
180
 
186
181
  def _set_time_index(self, conn, network_id: int, network: 'pypsa.Network'):
187
- """Set time index from network time periods."""
182
+ """Set time index from network time periods - always create MultiIndex for consistency."""
188
183
  try:
189
184
  time_periods = get_network_time_periods(conn, network_id)
190
- if time_periods:
191
- # Convert to pandas DatetimeIndex
192
- timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
193
-
194
- # Set the snapshots to the timestamps
195
- network.set_snapshots(timestamps)
185
+ if not time_periods:
186
+ logger.error("No time periods found for network")
187
+ return
196
188
 
197
- # Extract years for year-based statistics
198
- try:
199
- if hasattr(network.snapshots, 'year'):
200
- years = sorted(network.snapshots.year.unique())
201
- network._available_years = years
202
- logger.info(f"Extracted {len(years)} years from network.snapshots.year: {years}")
203
- else:
204
- # Manually extract years from timestamps
205
- years_from_timestamps = sorted(list(set([ts.year for ts in timestamps])))
206
- network._available_years = years_from_timestamps
207
- logger.info(f"Extracted {len(years_from_timestamps)} years from timestamps: {years_from_timestamps}")
208
-
209
- except Exception as year_error:
210
- logger.warning(f"Failed to extract years for year-based statistics: {year_error}")
211
- network._available_years = []
189
+ # Convert to pandas DatetimeIndex
190
+ timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
191
+
192
+ # Extract unique years for investment periods
193
+ years = sorted(list(set([ts.year for ts in timestamps])))
194
+ logger.info(f"Found {len(years)} investment periods: {years}")
195
+
196
+ # Always create MultiIndex following PyPSA multi-investment tutorial format
197
+ # First level: investment periods (years), Second level: timesteps
198
+ multi_snapshots = []
199
+ for ts in timestamps:
200
+ multi_snapshots.append((ts.year, ts))
201
+
202
+ multi_index = pd.MultiIndex.from_tuples(multi_snapshots, names=['period', 'timestep'])
203
+
204
+ # Verify MultiIndex is unique (should always be true now with UTC timestamps)
205
+ if not multi_index.is_unique:
206
+ raise ValueError(f"Created MultiIndex is not unique! Check timestamp generation.")
207
+
208
+ logger.info(f"Created MultiIndex with {len(multi_index)} snapshots")
209
+ network.set_snapshots(multi_index)
210
+
211
+ # Set investment periods for multi-period optimization
212
+ network.investment_periods = years
213
+
214
+ # Store years for statistics
215
+ network._available_years = years
216
+
217
+ logger.info(f"Created MultiIndex with {len(multi_index)} snapshots across {len(years)} periods")
218
+ logger.info(f"Investment periods: {network.investment_periods}")
212
219
 
213
- else:
214
- logger.warning("No time periods found for network, year-based statistics will not be available")
215
- network._available_years = []
216
220
  except Exception as e:
217
221
  logger.error(f"Failed to set time index: {e}")
218
222
  network._available_years = []
@@ -260,15 +264,11 @@ class NetworkBuilder:
260
264
  scenario_id: Optional[int],
261
265
  progress_callback: Optional[Callable[[int, str], None]] = None
262
266
  ):
263
- """Load all network components."""
264
- # Load component connections if batch loader available
265
- if self.batch_loader:
266
- connections = self.batch_loader.batch_load_component_connections(conn, network_id)
267
- bus_id_to_name = connections['bus_id_to_name']
268
- carrier_id_to_name = connections['carrier_id_to_name']
269
- else:
270
- bus_id_to_name = self._build_bus_id_to_name_map(conn, network_id)
271
- carrier_id_to_name = self._build_carrier_id_to_name_map(conn, network_id)
267
+ """Load all network components using batch loader."""
268
+ # Load component connections
269
+ connections = self.batch_loader.batch_load_component_connections(conn, network_id)
270
+ bus_id_to_name = connections['bus_id_to_name']
271
+ carrier_id_to_name = connections['carrier_id_to_name']
272
272
 
273
273
  # Component type mapping for later identification
274
274
  component_type_map = {}
@@ -307,22 +307,25 @@ class NetworkBuilder:
307
307
  # Store component type mapping on network
308
308
  network._component_type_map = component_type_map
309
309
 
310
+
310
311
  def _load_buses(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int], component_type_map: Dict[str, str]):
311
312
  """Load bus components."""
312
313
  buses = list_components_by_type(conn, network_id, 'BUS')
313
314
  bus_ids = [bus.id for bus in buses]
314
315
 
315
- if self.batch_loader:
316
- bus_attributes = self.batch_loader.batch_load_component_attributes(conn, bus_ids, scenario_id)
317
- bus_timeseries = self.batch_loader.batch_load_component_timeseries(conn, bus_ids, scenario_id)
318
- else:
319
- bus_attributes = self._load_component_attributes_individually(conn, bus_ids, scenario_id)
320
- bus_timeseries = {}
316
+ bus_attributes = self.batch_loader.batch_load_component_attributes(conn, bus_ids, scenario_id)
317
+ bus_timeseries = self.batch_loader.batch_load_component_timeseries(conn, bus_ids, scenario_id)
321
318
 
322
319
  for bus in buses:
323
320
  attrs = bus_attributes.get(bus.id, {})
324
321
  timeseries = bus_timeseries.get(bus.id, {})
325
322
 
323
+ # Add coordinate data from components table (PyPSA uses 'x' for longitude, 'y' for latitude)
324
+ if bus.longitude is not None:
325
+ attrs['x'] = bus.longitude
326
+ if bus.latitude is not None:
327
+ attrs['y'] = bus.latitude
328
+
326
329
  # Merge timeseries into attributes
327
330
  attrs.update(timeseries)
328
331
 
@@ -338,12 +341,8 @@ class NetworkBuilder:
338
341
 
339
342
  generator_ids = [gen.id for gen in all_generators]
340
343
 
341
- if self.batch_loader:
342
- generator_attributes = self.batch_loader.batch_load_component_attributes(conn, generator_ids, scenario_id)
343
- generator_timeseries = self.batch_loader.batch_load_component_timeseries(conn, generator_ids, scenario_id)
344
- else:
345
- generator_attributes = self._load_component_attributes_individually(conn, generator_ids, scenario_id)
346
- generator_timeseries = {}
344
+ generator_attributes = self.batch_loader.batch_load_component_attributes(conn, generator_ids, scenario_id)
345
+ generator_timeseries = self.batch_loader.batch_load_component_timeseries(conn, generator_ids, scenario_id)
347
346
 
348
347
  for gen in all_generators:
349
348
  attrs = generator_attributes.get(gen.id, {})
@@ -373,12 +372,8 @@ class NetworkBuilder:
373
372
  loads = list_components_by_type(conn, network_id, 'LOAD')
374
373
  load_ids = [load.id for load in loads]
375
374
 
376
- if self.batch_loader:
377
- load_attributes = self.batch_loader.batch_load_component_attributes(conn, load_ids, scenario_id)
378
- load_timeseries = self.batch_loader.batch_load_component_timeseries(conn, load_ids, scenario_id)
379
- else:
380
- load_attributes = self._load_component_attributes_individually(conn, load_ids, scenario_id)
381
- load_timeseries = {}
375
+ load_attributes = self.batch_loader.batch_load_component_attributes(conn, load_ids, scenario_id)
376
+ load_timeseries = self.batch_loader.batch_load_component_timeseries(conn, load_ids, scenario_id)
382
377
 
383
378
  for load in loads:
384
379
  attrs = load_attributes.get(load.id, {})
@@ -405,12 +400,8 @@ class NetworkBuilder:
405
400
  lines = list_components_by_type(conn, network_id, 'LINE')
406
401
  line_ids = [line.id for line in lines]
407
402
 
408
- if self.batch_loader:
409
- line_attributes = self.batch_loader.batch_load_component_attributes(conn, line_ids, scenario_id)
410
- line_timeseries = self.batch_loader.batch_load_component_timeseries(conn, line_ids, scenario_id)
411
- else:
412
- line_attributes = self._load_component_attributes_individually(conn, line_ids, scenario_id)
413
- line_timeseries = {}
403
+ line_attributes = self.batch_loader.batch_load_component_attributes(conn, line_ids, scenario_id)
404
+ line_timeseries = self.batch_loader.batch_load_component_timeseries(conn, line_ids, scenario_id)
414
405
 
415
406
  for line in lines:
416
407
  attrs = line_attributes.get(line.id, {})
@@ -439,12 +430,8 @@ class NetworkBuilder:
439
430
  links = list_components_by_type(conn, network_id, 'LINK')
440
431
  link_ids = [link.id for link in links]
441
432
 
442
- if self.batch_loader:
443
- link_attributes = self.batch_loader.batch_load_component_attributes(conn, link_ids, scenario_id)
444
- link_timeseries = self.batch_loader.batch_load_component_timeseries(conn, link_ids, scenario_id)
445
- else:
446
- link_attributes = self._load_component_attributes_individually(conn, link_ids, scenario_id)
447
- link_timeseries = {}
433
+ link_attributes = self.batch_loader.batch_load_component_attributes(conn, link_ids, scenario_id)
434
+ link_timeseries = self.batch_loader.batch_load_component_timeseries(conn, link_ids, scenario_id)
448
435
 
449
436
  for link in links:
450
437
  attrs = link_attributes.get(link.id, {})
@@ -473,12 +460,8 @@ class NetworkBuilder:
473
460
  storage_units = list_components_by_type(conn, network_id, 'STORAGE_UNIT')
474
461
  storage_ids = [storage.id for storage in storage_units]
475
462
 
476
- if self.batch_loader:
477
- storage_attributes = self.batch_loader.batch_load_component_attributes(conn, storage_ids, scenario_id)
478
- storage_timeseries = self.batch_loader.batch_load_component_timeseries(conn, storage_ids, scenario_id)
479
- else:
480
- storage_attributes = self._load_component_attributes_individually(conn, storage_ids, scenario_id)
481
- storage_timeseries = {}
463
+ storage_attributes = self.batch_loader.batch_load_component_attributes(conn, storage_ids, scenario_id)
464
+ storage_timeseries = self.batch_loader.batch_load_component_timeseries(conn, storage_ids, scenario_id)
482
465
 
483
466
  for storage in storage_units:
484
467
  attrs = storage_attributes.get(storage.id, {})
@@ -505,12 +488,8 @@ class NetworkBuilder:
505
488
  stores = list_components_by_type(conn, network_id, 'STORE')
506
489
  store_ids = [store.id for store in stores]
507
490
 
508
- if self.batch_loader:
509
- store_attributes = self.batch_loader.batch_load_component_attributes(conn, store_ids, scenario_id)
510
- store_timeseries = self.batch_loader.batch_load_component_timeseries(conn, store_ids, scenario_id)
511
- else:
512
- store_attributes = self._load_component_attributes_individually(conn, store_ids, scenario_id)
513
- store_timeseries = {}
491
+ store_attributes = self.batch_loader.batch_load_component_attributes(conn, store_ids, scenario_id)
492
+ store_timeseries = self.batch_loader.batch_load_component_timeseries(conn, store_ids, scenario_id)
514
493
 
515
494
  for store in stores:
516
495
  attrs = store_attributes.get(store.id, {})
@@ -591,66 +570,3 @@ class NetworkBuilder:
591
570
  cursor = conn.execute("SELECT id, name FROM carriers WHERE network_id = ?", (network_id,))
592
571
  return {row[0]: row[1] for row in cursor.fetchall()}
593
572
 
594
- def _load_component_attributes_individually(self, conn, component_ids: list, scenario_id: Optional[int]) -> Dict[int, Dict[str, Any]]:
595
- """Fallback method to load component attributes individually."""
596
- from pyconvexity.models import get_attribute, list_component_attributes
597
- from pyconvexity.core.types import AttributeValue
598
- from pyconvexity.models.network import get_network_time_periods
599
- import pandas as pd
600
-
601
- # Get network time periods for proper timestamp alignment
602
- network_time_periods = None
603
- if component_ids:
604
- cursor = conn.execute("SELECT network_id FROM components WHERE id = ? LIMIT 1", (component_ids[0],))
605
- result = cursor.fetchone()
606
- if result:
607
- network_id = result[0]
608
- try:
609
- network_time_periods = get_network_time_periods(conn, network_id)
610
- except Exception as e:
611
- logger.warning(f"Failed to load network time periods: {e}")
612
-
613
- attributes = {}
614
- for comp_id in component_ids:
615
- try:
616
- attr_names = list_component_attributes(conn, comp_id)
617
- comp_attrs = {}
618
- for attr_name in attr_names:
619
- try:
620
- attr_value = get_attribute(conn, comp_id, attr_name, scenario_id)
621
- if attr_value is not None:
622
- # Handle different attribute value types
623
- if hasattr(attr_value, 'static_value') and attr_value.static_value is not None:
624
- # Static value
625
- comp_attrs[attr_name] = attr_value.static_value.value()
626
- elif hasattr(attr_value, 'timeseries_value') and attr_value.timeseries_value is not None:
627
- # Timeseries value - convert to pandas Series with proper timestamps
628
- timeseries_points = attr_value.timeseries_value
629
- if timeseries_points:
630
- # Sort by period_index to ensure correct order
631
- timeseries_points.sort(key=lambda x: x.period_index)
632
- values = [point.value for point in timeseries_points]
633
-
634
- # Create proper timestamps for PyPSA alignment
635
- if network_time_periods:
636
- timestamps = []
637
- for point in timeseries_points:
638
- if point.period_index < len(network_time_periods):
639
- tp = network_time_periods[point.period_index]
640
- timestamps.append(pd.Timestamp(tp.formatted_time))
641
- else:
642
- logger.warning(f"Period index {point.period_index} out of range for network time periods")
643
- timestamps.append(pd.Timestamp.now()) # Fallback
644
- comp_attrs[attr_name] = pd.Series(values, index=timestamps)
645
- else:
646
- # Fallback: use period_index as index
647
- period_indices = [point.period_index for point in timeseries_points]
648
- comp_attrs[attr_name] = pd.Series(values, index=period_indices)
649
- except Exception as e:
650
- logger.debug(f"Failed to load attribute {attr_name} for component {comp_id}: {e}")
651
- attributes[comp_id] = comp_attrs
652
- except Exception as e:
653
- logger.warning(f"Failed to load attributes for component {comp_id}: {e}")
654
- attributes[comp_id] = {}
655
-
656
- return attributes