pyconvexity 0.3.8.post7__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. pyconvexity/__init__.py +87 -46
  2. pyconvexity/_version.py +1 -1
  3. pyconvexity/core/__init__.py +3 -5
  4. pyconvexity/core/database.py +111 -103
  5. pyconvexity/core/errors.py +16 -10
  6. pyconvexity/core/types.py +61 -54
  7. pyconvexity/data/__init__.py +0 -1
  8. pyconvexity/data/loaders/cache.py +65 -64
  9. pyconvexity/data/schema/01_core_schema.sql +134 -234
  10. pyconvexity/data/schema/02_data_metadata.sql +38 -168
  11. pyconvexity/data/schema/03_validation_data.sql +327 -264
  12. pyconvexity/data/sources/gem.py +169 -139
  13. pyconvexity/io/__init__.py +4 -10
  14. pyconvexity/io/excel_exporter.py +694 -480
  15. pyconvexity/io/excel_importer.py +817 -545
  16. pyconvexity/io/netcdf_exporter.py +66 -61
  17. pyconvexity/io/netcdf_importer.py +850 -619
  18. pyconvexity/models/__init__.py +109 -59
  19. pyconvexity/models/attributes.py +197 -178
  20. pyconvexity/models/carriers.py +70 -67
  21. pyconvexity/models/components.py +260 -236
  22. pyconvexity/models/network.py +202 -284
  23. pyconvexity/models/results.py +65 -55
  24. pyconvexity/models/scenarios.py +58 -88
  25. pyconvexity/solvers/__init__.py +5 -5
  26. pyconvexity/solvers/pypsa/__init__.py +3 -3
  27. pyconvexity/solvers/pypsa/api.py +150 -134
  28. pyconvexity/solvers/pypsa/batch_loader.py +165 -162
  29. pyconvexity/solvers/pypsa/builder.py +390 -291
  30. pyconvexity/solvers/pypsa/constraints.py +184 -162
  31. pyconvexity/solvers/pypsa/solver.py +968 -666
  32. pyconvexity/solvers/pypsa/storage.py +1377 -671
  33. pyconvexity/timeseries.py +63 -60
  34. pyconvexity/validation/__init__.py +14 -6
  35. pyconvexity/validation/rules.py +95 -84
  36. pyconvexity-0.4.1.dist-info/METADATA +46 -0
  37. pyconvexity-0.4.1.dist-info/RECORD +42 -0
  38. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  39. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  40. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  41. pyconvexity/data/schema/04_scenario_schema.sql +0 -122
  42. pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
  43. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  44. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  45. pyconvexity-0.3.8.post7.dist-info/METADATA +0 -138
  46. pyconvexity-0.3.8.post7.dist-info/RECORD +0 -49
  47. {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
  48. {pyconvexity-0.3.8.post7.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
@@ -10,7 +10,9 @@ import pandas as pd
10
10
  from typing import Dict, Any, Optional, Callable
11
11
 
12
12
  from pyconvexity.models import (
13
- list_components_by_type, get_network_time_periods, get_network_info
13
+ list_components_by_type,
14
+ get_network_time_periods,
15
+ get_network_info,
14
16
  )
15
17
 
16
18
  logger = logging.getLogger(__name__)
@@ -19,125 +21,134 @@ logger = logging.getLogger(__name__)
19
21
  class NetworkBuilder:
20
22
  """
21
23
  Builds PyPSA networks from database data.
22
-
24
+
23
25
  Simplified to always create MultiIndex snapshots for consistent multi-period optimization,
24
26
  even for single-year models.
25
27
  """
26
-
28
+
27
29
  def __init__(self):
28
30
  # Import PyPSA with error handling
29
31
  try:
30
32
  import pypsa
33
+
31
34
  self.pypsa = pypsa
32
35
  except ImportError as e:
33
36
  raise ImportError(
34
37
  "PyPSA is not installed or could not be imported. "
35
38
  "Please ensure it is installed correctly in the environment."
36
39
  ) from e
37
-
40
+
38
41
  # Import batch loader for efficient data loading
39
42
  from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
43
+
40
44
  self.batch_loader = PyPSABatchLoader()
41
-
45
+
42
46
  def build_network(
43
- self,
44
- conn,
45
- network_id: int,
47
+ self,
48
+ conn,
46
49
  scenario_id: Optional[int] = None,
47
- progress_callback: Optional[Callable[[int, str], None]] = None
48
- ) -> 'pypsa.Network':
50
+ progress_callback: Optional[Callable[[int, str], None]] = None,
51
+ include_unmet_loads: bool = True,
52
+ ) -> "pypsa.Network":
49
53
  """
50
- Build complete PyPSA network from database.
51
-
54
+ Build complete PyPSA network from database (single network per database).
55
+
52
56
  Args:
53
57
  conn: Database connection
54
- network_id: ID of network to build
55
58
  scenario_id: Optional scenario ID
56
59
  progress_callback: Optional progress callback
57
-
60
+ include_unmet_loads: Whether to include unmet load components (default: True)
61
+
58
62
  Returns:
59
63
  Configured PyPSA Network object
60
64
  """
61
65
  if progress_callback:
62
66
  progress_callback(0, "Loading network metadata...")
63
-
67
+
64
68
  # Load network info
65
- network_info = self._load_network_info(conn, network_id)
66
-
69
+ network_info = self._load_network_info(conn)
70
+
67
71
  if progress_callback:
68
72
  progress_callback(5, f"Building network: {network_info['name']}")
69
-
73
+
70
74
  # Create PyPSA network
71
- network = self.pypsa.Network(name=network_info['name'])
72
-
75
+ network = self.pypsa.Network(name=network_info["name"])
76
+
73
77
  # Set time index
74
- self._set_time_index(conn, network_id, network)
75
-
78
+ self._set_time_index(conn, network)
79
+
76
80
  if progress_callback:
77
81
  progress_callback(15, "Loading carriers...")
78
-
82
+
79
83
  # Load carriers
80
- self._load_carriers(conn, network_id, network)
81
-
84
+ self._load_carriers(conn, network)
85
+
82
86
  if progress_callback:
83
87
  progress_callback(20, "Loading components...")
84
-
88
+
85
89
  # Load all components using efficient batch loader
86
- self._load_components(conn, network_id, network, scenario_id, progress_callback)
87
-
90
+ self._load_components(
91
+ conn, network, scenario_id, progress_callback, include_unmet_loads
92
+ )
93
+
88
94
  # NOTE: Snapshot weightings will be set AFTER multi-period optimization setup
89
95
  # in the solver, not here. This matches the old code's approach where PyPSA's
90
96
  # multi-period setup can reset snapshot weightings to 1.0
91
-
97
+
92
98
  if progress_callback:
93
99
  progress_callback(95, "Network build complete")
94
-
100
+
95
101
  return network
96
-
102
+
97
103
  def load_network_data(
98
- self,
99
- conn,
100
- network_id: int,
101
- scenario_id: Optional[int] = None
104
+ self, conn, scenario_id: Optional[int] = None
102
105
  ) -> Dict[str, Any]:
103
106
  """
104
- Load network data as structured dictionary without building PyPSA network.
105
-
107
+ Load network data as structured dictionary without building PyPSA network (single network per database).
108
+
106
109
  Args:
107
110
  conn: Database connection
108
- network_id: ID of network to load
109
111
  scenario_id: Optional scenario ID
110
-
112
+
111
113
  Returns:
112
114
  Dictionary with all network data
113
115
  """
114
116
  data = {
115
- "network_info": self._load_network_info(conn, network_id),
116
- "carriers": self._load_carriers_data(conn, network_id),
117
+ "network_info": self._load_network_info(conn),
118
+ "carriers": self._load_carriers_data(conn),
117
119
  "components": {},
118
- "time_periods": []
120
+ "time_periods": [],
119
121
  }
120
-
122
+
121
123
  # Load time periods
122
124
  try:
123
- time_periods = get_network_time_periods(conn, network_id)
125
+ time_periods = get_network_time_periods(conn)
124
126
  data["time_periods"] = [
125
127
  {
126
128
  "timestamp": tp.formatted_time,
127
129
  "period_index": tp.period_index,
128
- "weight": tp.weight
130
+ "weight": getattr(tp, "weight", 1.0), # Weight not in new schema
129
131
  }
130
132
  for tp in time_periods
131
133
  ]
132
134
  except Exception as e:
133
135
  logger.warning(f"Failed to load time periods: {e}")
134
-
136
+
135
137
  # Load all component types
136
- component_types = ['BUS', 'GENERATOR', 'UNMET_LOAD', 'LOAD', 'LINE', 'LINK', 'STORAGE_UNIT', 'STORE']
137
-
138
+ component_types = [
139
+ "BUS",
140
+ "GENERATOR",
141
+ "UNMET_LOAD",
142
+ "LOAD",
143
+ "LINE",
144
+ "LINK",
145
+ "STORAGE_UNIT",
146
+ "STORE",
147
+ ]
148
+
138
149
  for comp_type in component_types:
139
150
  try:
140
- components = list_components_by_type(conn, network_id, comp_type)
151
+ components = list_components_by_type(conn, comp_type)
141
152
  if components:
142
153
  data["components"][comp_type.lower()] = [
143
154
  {
@@ -149,409 +160,498 @@ class NetworkBuilder:
149
160
  "carrier_id": comp.carrier_id,
150
161
  "bus_id": comp.bus_id,
151
162
  "bus0_id": comp.bus0_id,
152
- "bus1_id": comp.bus1_id
163
+ "bus1_id": comp.bus1_id,
153
164
  }
154
165
  for comp in components
155
166
  ]
156
167
  except Exception as e:
157
168
  logger.warning(f"Failed to load {comp_type} components: {e}")
158
-
169
+
159
170
  return data
160
-
161
- def _load_network_info(self, conn, network_id: int) -> Dict[str, Any]:
162
- """Load network metadata."""
163
- cursor = conn.execute("""
164
- SELECT name, description, time_start, time_end, time_interval
165
- FROM networks
166
- WHERE id = ?
167
- """, (network_id,))
168
-
169
- row = cursor.fetchone()
170
- if not row:
171
- raise ValueError(f"Network with ID {network_id} not found")
172
-
173
- return {
174
- 'name': row[0],
175
- 'description': row[1],
176
- 'time_start': row[2],
177
- 'time_end': row[3],
178
- 'time_interval': row[4]
179
- }
180
-
181
- def _set_time_index(self, conn, network_id: int, network: 'pypsa.Network'):
171
+
172
+ def _load_network_info(self, conn) -> Dict[str, Any]:
173
+ """Load network metadata (single network per database)."""
174
+ from pyconvexity.models import get_network_info
175
+
176
+ return get_network_info(conn)
177
+
178
+ def _set_time_index(self, conn, network: "pypsa.Network"):
182
179
  """Set time index from network time periods - always create MultiIndex for consistency."""
183
180
  try:
184
- time_periods = get_network_time_periods(conn, network_id)
181
+ time_periods = get_network_time_periods(conn)
185
182
  if not time_periods:
186
183
  logger.error("No time periods found for network")
187
184
  return
188
-
185
+
189
186
  # Convert to pandas DatetimeIndex
190
187
  timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
191
-
188
+
192
189
  # Extract unique years for investment periods
193
190
  years = sorted(list(set([ts.year for ts in timestamps])))
194
191
  logger.info(f"Found {len(years)} investment periods: {years}")
195
-
192
+
196
193
  # Always create MultiIndex following PyPSA multi-investment tutorial format
197
194
  # First level: investment periods (years), Second level: timesteps
198
195
  multi_snapshots = []
199
196
  for ts in timestamps:
200
197
  multi_snapshots.append((ts.year, ts))
201
-
202
- multi_index = pd.MultiIndex.from_tuples(multi_snapshots, names=['period', 'timestep'])
203
-
198
+
199
+ multi_index = pd.MultiIndex.from_tuples(
200
+ multi_snapshots, names=["period", "timestep"]
201
+ )
202
+
204
203
  # Verify MultiIndex is unique (should always be true now with UTC timestamps)
205
204
  if not multi_index.is_unique:
206
- raise ValueError(f"Created MultiIndex is not unique! Check timestamp generation.")
207
-
205
+ raise ValueError(
206
+ f"Created MultiIndex is not unique! Check timestamp generation."
207
+ )
208
+
208
209
  logger.info(f"Created MultiIndex with {len(multi_index)} snapshots")
209
210
  network.set_snapshots(multi_index)
210
-
211
+
211
212
  # Set investment periods for multi-period optimization
212
213
  network.investment_periods = years
213
-
214
+
214
215
  # Store years for statistics
215
216
  network._available_years = years
216
-
217
- logger.info(f"Created MultiIndex with {len(multi_index)} snapshots across {len(years)} periods")
217
+
218
+ logger.info(
219
+ f"Created MultiIndex with {len(multi_index)} snapshots across {len(years)} periods"
220
+ )
218
221
  logger.info(f"Investment periods: {network.investment_periods}")
219
-
222
+
220
223
  except Exception as e:
221
224
  logger.error(f"Failed to set time index: {e}")
222
225
  network._available_years = []
223
-
224
- def _load_carriers(self, conn, network_id: int, network: 'pypsa.Network'):
225
- """Load carriers into PyPSA network."""
226
- carriers = self._load_carriers_data(conn, network_id)
226
+
227
+ def _load_carriers(self, conn, network: "pypsa.Network"):
228
+ """Load carriers into PyPSA network (single network per database)."""
229
+ carriers = self._load_carriers_data(conn)
227
230
  for carrier in carriers:
228
231
  filtered_attrs = self._filter_carrier_attrs(carrier)
229
- network.add("Carrier", carrier['name'], **filtered_attrs)
230
-
231
- def _load_carriers_data(self, conn, network_id: int) -> list:
232
- """Load carrier data from database."""
233
- cursor = conn.execute("""
232
+ network.add("Carrier", carrier["name"], **filtered_attrs)
233
+
234
+ def _load_carriers_data(self, conn) -> list:
235
+ """Load carrier data from database (single network per database)."""
236
+ cursor = conn.execute(
237
+ """
234
238
  SELECT name, co2_emissions, nice_name, color
235
239
  FROM carriers
236
- WHERE network_id = ?
237
240
  ORDER BY name
238
- """, (network_id,))
239
-
241
+ """
242
+ )
243
+
240
244
  carriers = []
241
245
  for row in cursor.fetchall():
242
- carriers.append({
243
- 'name': row[0],
244
- 'co2_emissions': row[1],
245
- 'nice_name': row[2],
246
- 'color': row[3]
247
- })
248
-
246
+ carriers.append(
247
+ {
248
+ "name": row[0],
249
+ "co2_emissions": row[1],
250
+ "nice_name": row[2],
251
+ "color": row[3],
252
+ }
253
+ )
254
+
249
255
  return carriers
250
-
256
+
251
257
  def _filter_carrier_attrs(self, carrier: Dict[str, Any]) -> Dict[str, Any]:
252
258
  """Filter carrier attributes for PyPSA compatibility."""
253
259
  filtered = {}
254
260
  for key, value in carrier.items():
255
- if key != 'name' and value is not None:
261
+ if key != "name" and value is not None:
256
262
  filtered[key] = value
257
263
  return filtered
258
-
264
+
259
265
  def _load_components(
260
- self,
261
- conn,
262
- network_id: int,
263
- network: 'pypsa.Network',
266
+ self,
267
+ conn,
268
+ network: "pypsa.Network",
264
269
  scenario_id: Optional[int],
265
- progress_callback: Optional[Callable[[int, str], None]] = None
270
+ progress_callback: Optional[Callable[[int, str], None]] = None,
271
+ include_unmet_loads: bool = True,
266
272
  ):
267
- """Load all network components using batch loader."""
273
+ """Load all network components using batch loader (single network per database)."""
268
274
  # Load component connections
269
- connections = self.batch_loader.batch_load_component_connections(conn, network_id)
270
- bus_id_to_name = connections['bus_id_to_name']
271
- carrier_id_to_name = connections['carrier_id_to_name']
272
-
275
+ connections = self.batch_loader.batch_load_component_connections(conn)
276
+ bus_id_to_name = connections["bus_id_to_name"]
277
+ carrier_id_to_name = connections["carrier_id_to_name"]
278
+
273
279
  # Component type mapping for later identification
274
280
  component_type_map = {}
275
-
281
+
276
282
  # Load buses
277
283
  if progress_callback:
278
284
  progress_callback(25, "Loading buses...")
279
- self._load_buses(conn, network_id, network, scenario_id, component_type_map)
280
-
281
- # Load generators (including unmet loads)
285
+ self._load_buses(conn, network, scenario_id, component_type_map)
286
+
287
+ # Load generators (including unmet loads if requested)
282
288
  if progress_callback:
283
289
  progress_callback(35, "Loading generators...")
284
- self._load_generators(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name, component_type_map)
285
-
290
+ self._load_generators(
291
+ conn,
292
+ network,
293
+ scenario_id,
294
+ bus_id_to_name,
295
+ carrier_id_to_name,
296
+ component_type_map,
297
+ include_unmet_loads,
298
+ )
299
+
286
300
  # Load loads
287
301
  if progress_callback:
288
302
  progress_callback(50, "Loading loads...")
289
- self._load_loads(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
290
-
303
+ self._load_loads(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
304
+
291
305
  # Load lines
292
306
  if progress_callback:
293
307
  progress_callback(65, "Loading lines...")
294
- self._load_lines(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
295
-
308
+ self._load_lines(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
309
+
296
310
  # Load links
297
311
  if progress_callback:
298
312
  progress_callback(75, "Loading links...")
299
- self._load_links(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
300
-
313
+ self._load_links(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
314
+
301
315
  # Load storage units
302
316
  if progress_callback:
303
317
  progress_callback(85, "Loading storage...")
304
- self._load_storage_units(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
305
- self._load_stores(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
306
-
318
+ self._load_storage_units(
319
+ conn, network, scenario_id, bus_id_to_name, carrier_id_to_name
320
+ )
321
+ self._load_stores(
322
+ conn, network, scenario_id, bus_id_to_name, carrier_id_to_name
323
+ )
324
+
307
325
  # Store component type mapping on network
308
326
  network._component_type_map = component_type_map
309
-
310
-
311
- def _load_buses(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int], component_type_map: Dict[str, str]):
312
- """Load bus components."""
313
- buses = list_components_by_type(conn, network_id, 'BUS')
327
+
328
+ def _load_buses(
329
+ self,
330
+ conn,
331
+ network: "pypsa.Network",
332
+ scenario_id: Optional[int],
333
+ component_type_map: Dict[str, str],
334
+ ):
335
+ """Load bus components (single network per database)."""
336
+ buses = list_components_by_type(conn, "BUS")
314
337
  bus_ids = [bus.id for bus in buses]
315
-
316
- bus_attributes = self.batch_loader.batch_load_component_attributes(conn, bus_ids, scenario_id)
317
- bus_timeseries = self.batch_loader.batch_load_component_timeseries(conn, bus_ids, scenario_id)
318
-
338
+
339
+ bus_attributes = self.batch_loader.batch_load_component_attributes(
340
+ conn, bus_ids, scenario_id
341
+ )
342
+ bus_timeseries = self.batch_loader.batch_load_component_timeseries(
343
+ conn, bus_ids, scenario_id
344
+ )
345
+
319
346
  for bus in buses:
320
347
  attrs = bus_attributes.get(bus.id, {})
321
348
  timeseries = bus_timeseries.get(bus.id, {})
322
-
349
+
323
350
  # Add coordinate data from components table (PyPSA uses 'x' for longitude, 'y' for latitude)
324
351
  if bus.longitude is not None:
325
- attrs['x'] = bus.longitude
352
+ attrs["x"] = bus.longitude
326
353
  if bus.latitude is not None:
327
- attrs['y'] = bus.latitude
328
-
354
+ attrs["y"] = bus.latitude
355
+
329
356
  # Merge timeseries into attributes
330
357
  attrs.update(timeseries)
331
-
358
+
332
359
  network.add("Bus", bus.name, **attrs)
333
360
  component_type_map[bus.name] = bus.component_type
334
-
335
- def _load_generators(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
336
- bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str], component_type_map: Dict[str, str]):
337
- """Load generator and unmet load components."""
338
- generators = list_components_by_type(conn, network_id, 'GENERATOR')
339
- unmet_loads = list_components_by_type(conn, network_id, 'UNMET_LOAD')
340
- all_generators = generators + unmet_loads
341
-
361
+
362
+ def _load_generators(
363
+ self,
364
+ conn,
365
+ network: "pypsa.Network",
366
+ scenario_id: Optional[int],
367
+ bus_id_to_name: Dict[int, str],
368
+ carrier_id_to_name: Dict[int, str],
369
+ component_type_map: Dict[str, str],
370
+ include_unmet_loads: bool = True,
371
+ ):
372
+ """Load generator and unmet load components (single network per database)."""
373
+ generators = list_components_by_type(conn, "GENERATOR")
374
+
375
+ # Conditionally load unmet loads based on parameter
376
+ if include_unmet_loads:
377
+ unmet_loads = list_components_by_type(conn, "UNMET_LOAD")
378
+ all_generators = generators + unmet_loads
379
+ logger.info(
380
+ f"Loading {len(generators)} generators and {len(unmet_loads)} unmet loads"
381
+ )
382
+ else:
383
+ all_generators = generators
384
+ logger.info(f"Loading {len(generators)} generators (unmet loads disabled)")
385
+
342
386
  generator_ids = [gen.id for gen in all_generators]
343
-
344
- generator_attributes = self.batch_loader.batch_load_component_attributes(conn, generator_ids, scenario_id)
345
- generator_timeseries = self.batch_loader.batch_load_component_timeseries(conn, generator_ids, scenario_id)
346
-
387
+
388
+ generator_attributes = self.batch_loader.batch_load_component_attributes(
389
+ conn, generator_ids, scenario_id
390
+ )
391
+ generator_timeseries = self.batch_loader.batch_load_component_timeseries(
392
+ conn, generator_ids, scenario_id
393
+ )
394
+
347
395
  for gen in all_generators:
348
396
  attrs = generator_attributes.get(gen.id, {})
349
397
  timeseries = generator_timeseries.get(gen.id, {})
350
-
398
+
351
399
  # Set bus connection
352
400
  if gen.bus_id:
353
401
  bus_name = bus_id_to_name.get(gen.bus_id, f"bus_{gen.bus_id}")
354
- attrs['bus'] = bus_name
355
-
402
+ attrs["bus"] = bus_name
403
+
356
404
  # Set carrier
357
405
  if gen.carrier_id:
358
- carrier_name = carrier_id_to_name.get(gen.carrier_id, '-')
359
- attrs['carrier'] = carrier_name
406
+ carrier_name = carrier_id_to_name.get(gen.carrier_id, "-")
407
+ attrs["carrier"] = carrier_name
360
408
  else:
361
- attrs['carrier'] = '-'
362
-
409
+ attrs["carrier"] = "-"
410
+
363
411
  # Merge timeseries into attributes
364
412
  attrs.update(timeseries)
365
-
413
+
366
414
  component_type_map[gen.name] = gen.component_type
367
415
  network.add("Generator", gen.name, **attrs)
368
-
369
- def _load_loads(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
370
- bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
371
- """Load load components."""
372
- loads = list_components_by_type(conn, network_id, 'LOAD')
416
+
417
+ def _load_loads(
418
+ self,
419
+ conn,
420
+ network: "pypsa.Network",
421
+ scenario_id: Optional[int],
422
+ bus_id_to_name: Dict[int, str],
423
+ carrier_id_to_name: Dict[int, str],
424
+ ):
425
+ """Load load components (single network per database)."""
426
+ loads = list_components_by_type(conn, "LOAD")
373
427
  load_ids = [load.id for load in loads]
374
-
375
- load_attributes = self.batch_loader.batch_load_component_attributes(conn, load_ids, scenario_id)
376
- load_timeseries = self.batch_loader.batch_load_component_timeseries(conn, load_ids, scenario_id)
377
-
428
+
429
+ load_attributes = self.batch_loader.batch_load_component_attributes(
430
+ conn, load_ids, scenario_id
431
+ )
432
+ load_timeseries = self.batch_loader.batch_load_component_timeseries(
433
+ conn, load_ids, scenario_id
434
+ )
435
+
378
436
  for load in loads:
379
437
  attrs = load_attributes.get(load.id, {})
380
438
  timeseries = load_timeseries.get(load.id, {})
381
-
439
+
382
440
  if load.bus_id:
383
441
  bus_name = bus_id_to_name.get(load.bus_id, f"bus_{load.bus_id}")
384
- attrs['bus'] = bus_name
385
-
442
+ attrs["bus"] = bus_name
443
+
386
444
  if load.carrier_id:
387
- carrier_name = carrier_id_to_name.get(load.carrier_id, '-')
388
- attrs['carrier'] = carrier_name
445
+ carrier_name = carrier_id_to_name.get(load.carrier_id, "-")
446
+ attrs["carrier"] = carrier_name
389
447
  else:
390
- attrs['carrier'] = '-'
391
-
448
+ attrs["carrier"] = "-"
449
+
392
450
  # Merge timeseries into attributes
393
451
  attrs.update(timeseries)
394
-
452
+
395
453
  network.add("Load", load.name, **attrs)
396
-
397
- def _load_lines(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
398
- bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
399
- """Load line components."""
400
- lines = list_components_by_type(conn, network_id, 'LINE')
454
+
455
+ def _load_lines(
456
+ self,
457
+ conn,
458
+ network: "pypsa.Network",
459
+ scenario_id: Optional[int],
460
+ bus_id_to_name: Dict[int, str],
461
+ carrier_id_to_name: Dict[int, str],
462
+ ):
463
+ """Load line components (single network per database)."""
464
+ lines = list_components_by_type(conn, "LINE")
401
465
  line_ids = [line.id for line in lines]
402
-
403
- line_attributes = self.batch_loader.batch_load_component_attributes(conn, line_ids, scenario_id)
404
- line_timeseries = self.batch_loader.batch_load_component_timeseries(conn, line_ids, scenario_id)
405
-
466
+
467
+ line_attributes = self.batch_loader.batch_load_component_attributes(
468
+ conn, line_ids, scenario_id
469
+ )
470
+ line_timeseries = self.batch_loader.batch_load_component_timeseries(
471
+ conn, line_ids, scenario_id
472
+ )
473
+
406
474
  for line in lines:
407
475
  attrs = line_attributes.get(line.id, {})
408
476
  timeseries = line_timeseries.get(line.id, {})
409
-
477
+
410
478
  if line.bus0_id and line.bus1_id:
411
479
  bus0_name = bus_id_to_name.get(line.bus0_id, f"bus_{line.bus0_id}")
412
480
  bus1_name = bus_id_to_name.get(line.bus1_id, f"bus_{line.bus1_id}")
413
- attrs['bus0'] = bus0_name
414
- attrs['bus1'] = bus1_name
415
-
481
+ attrs["bus0"] = bus0_name
482
+ attrs["bus1"] = bus1_name
483
+
416
484
  if line.carrier_id:
417
- carrier_name = carrier_id_to_name.get(line.carrier_id, 'AC')
418
- attrs['carrier'] = carrier_name
485
+ carrier_name = carrier_id_to_name.get(line.carrier_id, "AC")
486
+ attrs["carrier"] = carrier_name
419
487
  else:
420
- attrs['carrier'] = 'AC'
421
-
488
+ attrs["carrier"] = "AC"
489
+
422
490
  # Merge timeseries into attributes
423
491
  attrs.update(timeseries)
424
-
492
+
425
493
  network.add("Line", line.name, **attrs)
426
-
427
- def _load_links(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
428
- bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
429
- """Load link components."""
430
- links = list_components_by_type(conn, network_id, 'LINK')
494
+
495
+ def _load_links(
496
+ self,
497
+ conn,
498
+ network: "pypsa.Network",
499
+ scenario_id: Optional[int],
500
+ bus_id_to_name: Dict[int, str],
501
+ carrier_id_to_name: Dict[int, str],
502
+ ):
503
+ """Load link components (single network per database)."""
504
+ links = list_components_by_type(conn, "LINK")
431
505
  link_ids = [link.id for link in links]
432
-
433
- link_attributes = self.batch_loader.batch_load_component_attributes(conn, link_ids, scenario_id)
434
- link_timeseries = self.batch_loader.batch_load_component_timeseries(conn, link_ids, scenario_id)
435
-
506
+
507
+ link_attributes = self.batch_loader.batch_load_component_attributes(
508
+ conn, link_ids, scenario_id
509
+ )
510
+ link_timeseries = self.batch_loader.batch_load_component_timeseries(
511
+ conn, link_ids, scenario_id
512
+ )
513
+
436
514
  for link in links:
437
515
  attrs = link_attributes.get(link.id, {})
438
516
  timeseries = link_timeseries.get(link.id, {})
439
-
517
+
440
518
  if link.bus0_id and link.bus1_id:
441
519
  bus0_name = bus_id_to_name.get(link.bus0_id, f"bus_{link.bus0_id}")
442
520
  bus1_name = bus_id_to_name.get(link.bus1_id, f"bus_{link.bus1_id}")
443
- attrs['bus0'] = bus0_name
444
- attrs['bus1'] = bus1_name
445
-
521
+ attrs["bus0"] = bus0_name
522
+ attrs["bus1"] = bus1_name
523
+
446
524
  if link.carrier_id:
447
- carrier_name = carrier_id_to_name.get(link.carrier_id, 'DC')
448
- attrs['carrier'] = carrier_name
525
+ carrier_name = carrier_id_to_name.get(link.carrier_id, "DC")
526
+ attrs["carrier"] = carrier_name
449
527
  else:
450
- attrs['carrier'] = 'DC'
451
-
528
+ attrs["carrier"] = "DC"
529
+
452
530
  # Merge timeseries into attributes
453
531
  attrs.update(timeseries)
454
-
532
+
455
533
  network.add("Link", link.name, **attrs)
456
-
457
- def _load_storage_units(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
458
- bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
459
- """Load storage unit components."""
460
- storage_units = list_components_by_type(conn, network_id, 'STORAGE_UNIT')
534
+
535
+ def _load_storage_units(
536
+ self,
537
+ conn,
538
+ network: "pypsa.Network",
539
+ scenario_id: Optional[int],
540
+ bus_id_to_name: Dict[int, str],
541
+ carrier_id_to_name: Dict[int, str],
542
+ ):
543
+ """Load storage unit components (single network per database)."""
544
+ storage_units = list_components_by_type(conn, "STORAGE_UNIT")
461
545
  storage_ids = [storage.id for storage in storage_units]
462
-
463
- storage_attributes = self.batch_loader.batch_load_component_attributes(conn, storage_ids, scenario_id)
464
- storage_timeseries = self.batch_loader.batch_load_component_timeseries(conn, storage_ids, scenario_id)
465
-
546
+
547
+ storage_attributes = self.batch_loader.batch_load_component_attributes(
548
+ conn, storage_ids, scenario_id
549
+ )
550
+ storage_timeseries = self.batch_loader.batch_load_component_timeseries(
551
+ conn, storage_ids, scenario_id
552
+ )
553
+
466
554
  for storage in storage_units:
467
555
  attrs = storage_attributes.get(storage.id, {})
468
556
  timeseries = storage_timeseries.get(storage.id, {})
469
-
557
+
470
558
  if storage.bus_id:
471
559
  bus_name = bus_id_to_name.get(storage.bus_id, f"bus_{storage.bus_id}")
472
- attrs['bus'] = bus_name
473
-
560
+ attrs["bus"] = bus_name
561
+
474
562
  if storage.carrier_id:
475
- carrier_name = carrier_id_to_name.get(storage.carrier_id, '-')
476
- attrs['carrier'] = carrier_name
563
+ carrier_name = carrier_id_to_name.get(storage.carrier_id, "-")
564
+ attrs["carrier"] = carrier_name
477
565
  else:
478
- attrs['carrier'] = '-'
479
-
566
+ attrs["carrier"] = "-"
567
+
480
568
  # Merge timeseries into attributes
481
569
  attrs.update(timeseries)
482
-
570
+
483
571
  network.add("StorageUnit", storage.name, **attrs)
484
-
485
- def _load_stores(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
486
- bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
487
- """Load store components."""
488
- stores = list_components_by_type(conn, network_id, 'STORE')
572
+
573
+ def _load_stores(
574
+ self,
575
+ conn,
576
+ network: "pypsa.Network",
577
+ scenario_id: Optional[int],
578
+ bus_id_to_name: Dict[int, str],
579
+ carrier_id_to_name: Dict[int, str],
580
+ ):
581
+ """Load store components (single network per database)."""
582
+ stores = list_components_by_type(conn, "STORE")
489
583
  store_ids = [store.id for store in stores]
490
-
491
- store_attributes = self.batch_loader.batch_load_component_attributes(conn, store_ids, scenario_id)
492
- store_timeseries = self.batch_loader.batch_load_component_timeseries(conn, store_ids, scenario_id)
493
-
584
+
585
+ store_attributes = self.batch_loader.batch_load_component_attributes(
586
+ conn, store_ids, scenario_id
587
+ )
588
+ store_timeseries = self.batch_loader.batch_load_component_timeseries(
589
+ conn, store_ids, scenario_id
590
+ )
591
+
494
592
  for store in stores:
495
593
  attrs = store_attributes.get(store.id, {})
496
594
  timeseries = store_timeseries.get(store.id, {})
497
-
595
+
498
596
  if store.bus_id:
499
597
  bus_name = bus_id_to_name.get(store.bus_id, f"bus_{store.bus_id}")
500
- attrs['bus'] = bus_name
501
-
598
+ attrs["bus"] = bus_name
599
+
502
600
  if store.carrier_id:
503
- carrier_name = carrier_id_to_name.get(store.carrier_id, '-')
504
- attrs['carrier'] = carrier_name
601
+ carrier_name = carrier_id_to_name.get(store.carrier_id, "-")
602
+ attrs["carrier"] = carrier_name
505
603
  else:
506
- attrs['carrier'] = '-'
507
-
604
+ attrs["carrier"] = "-"
605
+
508
606
  # Merge timeseries into attributes
509
607
  attrs.update(timeseries)
510
-
608
+
511
609
  network.add("Store", store.name, **attrs)
512
-
513
- def _set_snapshot_weightings(self, conn, network_id: int, network: 'pypsa.Network'):
514
- """Set snapshot weightings from time periods."""
610
+
611
+ def _set_snapshot_weightings(self, conn, network: "pypsa.Network"):
612
+ """Set snapshot weightings from time periods (single network per database)."""
515
613
  try:
516
- time_periods = get_network_time_periods(conn, network_id)
614
+ time_periods = get_network_time_periods(conn)
517
615
  if time_periods and len(network.snapshots) > 0:
518
- # Create weightings array
519
- weightings = []
520
- for tp in time_periods:
521
- if tp.weight is not None:
522
- weightings.append(tp.weight)
523
- else:
524
- # Calculate from time interval if weight not specified
525
- network_config = get_network_config(conn, network_id)
526
- time_interval = network_config.get('time_interval', '1H')
527
- weight = self._parse_time_interval(time_interval)
528
- weightings.append(weight if weight else 1.0)
529
-
616
+ # Get network info to determine time interval
617
+ network_info = get_network_info(conn)
618
+ time_interval = network_info.get("time_interval", "PT1H")
619
+ weight = self._parse_time_interval(time_interval)
620
+ if weight is None:
621
+ weight = 1.0
622
+
623
+ # Create weightings array - all periods get same weight
624
+ weightings = [weight] * len(time_periods)
625
+
530
626
  if len(weightings) == len(network.snapshots):
531
627
  # Set all three columns like the old code - critical for proper objective calculation
532
- network.snapshot_weightings.loc[:, 'objective'] = weightings
533
- network.snapshot_weightings.loc[:, 'generators'] = weightings
534
- network.snapshot_weightings.loc[:, 'stores'] = weightings
535
- logger.info(f"Set snapshot weightings for {len(weightings)} time periods (objective, generators, stores)")
628
+ network.snapshot_weightings.loc[:, "objective"] = weightings
629
+ network.snapshot_weightings.loc[:, "generators"] = weightings
630
+ network.snapshot_weightings.loc[:, "stores"] = weightings
631
+ logger.info(
632
+ f"Set snapshot weightings for {len(weightings)} time periods (objective, generators, stores)"
633
+ )
536
634
  else:
537
- logger.warning(f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})")
635
+ logger.warning(
636
+ f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
637
+ )
538
638
  except Exception as e:
539
639
  logger.warning(f"Failed to set snapshot weightings: {e}")
540
-
640
+
541
641
  def _parse_time_interval(self, time_interval: str) -> Optional[float]:
542
642
  """Parse time interval string to hours."""
543
643
  if not time_interval:
544
644
  return None
545
-
645
+
546
646
  try:
547
647
  # Handle pandas frequency strings
548
- if time_interval.endswith('H'):
648
+ if time_interval.endswith("H"):
549
649
  return float(time_interval[:-1])
550
- elif time_interval.endswith('D'):
650
+ elif time_interval.endswith("D"):
551
651
  return float(time_interval[:-1]) * 24
552
- elif time_interval.endswith('M'):
652
+ elif time_interval.endswith("M"):
553
653
  return float(time_interval[:-1]) / 60
554
- elif time_interval.endswith('S'):
654
+ elif time_interval.endswith("S"):
555
655
  return float(time_interval[:-1]) / 3600
556
656
  else:
557
657
  # Try to parse as float (assume hours)
@@ -559,14 +659,13 @@ class NetworkBuilder:
559
659
  except (ValueError, TypeError):
560
660
  logger.warning(f"Could not parse time interval: {time_interval}")
561
661
  return None
562
-
563
- def _build_bus_id_to_name_map(self, conn, network_id: int) -> Dict[int, str]:
564
- """Build mapping from bus IDs to names."""
565
- buses = list_components_by_type(conn, network_id, 'BUS')
662
+
663
+ def _build_bus_id_to_name_map(self, conn) -> Dict[int, str]:
664
+ """Build mapping from bus IDs to names (single network per database)."""
665
+ buses = list_components_by_type(conn, "BUS")
566
666
  return {bus.id: bus.name for bus in buses}
567
-
568
- def _build_carrier_id_to_name_map(self, conn, network_id: int) -> Dict[int, str]:
569
- """Build mapping from carrier IDs to names."""
570
- cursor = conn.execute("SELECT id, name FROM carriers WHERE network_id = ?", (network_id,))
667
+
668
+ def _build_carrier_id_to_name_map(self, conn) -> Dict[int, str]:
669
+ """Build mapping from carrier IDs to names (single network per database)."""
670
+ cursor = conn.execute("SELECT id, name FROM carriers")
571
671
  return {row[0]: row[1] for row in cursor.fetchall()}
572
-