pyconvexity 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (43) hide show
  1. pyconvexity/__init__.py +57 -8
  2. pyconvexity/_version.py +1 -2
  3. pyconvexity/core/__init__.py +0 -2
  4. pyconvexity/core/database.py +158 -0
  5. pyconvexity/core/types.py +105 -18
  6. pyconvexity/data/README.md +101 -0
  7. pyconvexity/data/__init__.py +18 -0
  8. pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  9. pyconvexity/data/loaders/__init__.py +3 -0
  10. pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  11. pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  12. pyconvexity/data/loaders/cache.py +212 -0
  13. pyconvexity/data/schema/01_core_schema.sql +12 -12
  14. pyconvexity/data/schema/02_data_metadata.sql +17 -321
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  17. pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  18. pyconvexity/data/sources/gem.py +412 -0
  19. pyconvexity/io/__init__.py +32 -0
  20. pyconvexity/io/excel_exporter.py +1012 -0
  21. pyconvexity/io/excel_importer.py +1109 -0
  22. pyconvexity/io/netcdf_exporter.py +192 -0
  23. pyconvexity/io/netcdf_importer.py +1602 -0
  24. pyconvexity/models/__init__.py +7 -0
  25. pyconvexity/models/attributes.py +209 -72
  26. pyconvexity/models/components.py +3 -0
  27. pyconvexity/models/network.py +17 -15
  28. pyconvexity/models/scenarios.py +177 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +24 -0
  31. pyconvexity/solvers/pypsa/api.py +421 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +304 -0
  33. pyconvexity/solvers/pypsa/builder.py +566 -0
  34. pyconvexity/solvers/pypsa/constraints.py +321 -0
  35. pyconvexity/solvers/pypsa/solver.py +1106 -0
  36. pyconvexity/solvers/pypsa/storage.py +1574 -0
  37. pyconvexity/timeseries.py +327 -0
  38. pyconvexity/validation/rules.py +2 -2
  39. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/METADATA +5 -2
  40. pyconvexity-0.1.4.dist-info/RECORD +46 -0
  41. pyconvexity-0.1.2.dist-info/RECORD +0 -20
  42. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/WHEEL +0 -0
  43. {pyconvexity-0.1.2.dist-info → pyconvexity-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,566 @@
1
+ """
2
+ Network building functionality for PyPSA solver integration.
3
+
4
+ Simplified to always use MultiIndex format for consistent multi-period optimization.
5
+ """
6
+
7
+ import logging
8
+ import json
9
+ import pandas as pd
10
+ from typing import Dict, Any, Optional, Callable
11
+
12
+ from pyconvexity.models import (
13
+ list_components_by_type, get_network_time_periods, get_network_info
14
+ )
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class NetworkBuilder:
20
+ """
21
+ Builds PyPSA networks from database data.
22
+
23
+ Simplified to always create MultiIndex snapshots for consistent multi-period optimization,
24
+ even for single-year models.
25
+ """
26
+
27
+ def __init__(self):
28
+ # Import PyPSA with error handling
29
+ try:
30
+ import pypsa
31
+ self.pypsa = pypsa
32
+ except ImportError as e:
33
+ raise ImportError(
34
+ "PyPSA is not installed or could not be imported. "
35
+ "Please ensure it is installed correctly in the environment."
36
+ ) from e
37
+
38
+ # Import batch loader for efficient data loading
39
+ from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
40
+ self.batch_loader = PyPSABatchLoader()
41
+
42
+ def build_network(
43
+ self,
44
+ conn,
45
+ network_id: int,
46
+ scenario_id: Optional[int] = None,
47
+ progress_callback: Optional[Callable[[int, str], None]] = None
48
+ ) -> 'pypsa.Network':
49
+ """
50
+ Build complete PyPSA network from database.
51
+
52
+ Args:
53
+ conn: Database connection
54
+ network_id: ID of network to build
55
+ scenario_id: Optional scenario ID
56
+ progress_callback: Optional progress callback
57
+
58
+ Returns:
59
+ Configured PyPSA Network object
60
+ """
61
+ if progress_callback:
62
+ progress_callback(0, "Loading network metadata...")
63
+
64
+ # Load network info
65
+ network_info = self._load_network_info(conn, network_id)
66
+
67
+ if progress_callback:
68
+ progress_callback(5, f"Building network: {network_info['name']}")
69
+
70
+ # Create PyPSA network
71
+ network = self.pypsa.Network(name=network_info['name'])
72
+
73
+ # Set time index
74
+ self._set_time_index(conn, network_id, network)
75
+
76
+ if progress_callback:
77
+ progress_callback(15, "Loading carriers...")
78
+
79
+ # Load carriers
80
+ self._load_carriers(conn, network_id, network)
81
+
82
+ if progress_callback:
83
+ progress_callback(20, "Loading components...")
84
+
85
+ # Load all components using efficient batch loader
86
+ self._load_components(conn, network_id, network, scenario_id, progress_callback)
87
+
88
+ # NOTE: Snapshot weightings will be set AFTER multi-period optimization setup
89
+ # in the solver, not here. This matches the old code's approach where PyPSA's
90
+ # multi-period setup can reset snapshot weightings to 1.0
91
+
92
+ if progress_callback:
93
+ progress_callback(95, "Network build complete")
94
+
95
+ return network
96
+
97
+ def load_network_data(
98
+ self,
99
+ conn,
100
+ network_id: int,
101
+ scenario_id: Optional[int] = None
102
+ ) -> Dict[str, Any]:
103
+ """
104
+ Load network data as structured dictionary without building PyPSA network.
105
+
106
+ Args:
107
+ conn: Database connection
108
+ network_id: ID of network to load
109
+ scenario_id: Optional scenario ID
110
+
111
+ Returns:
112
+ Dictionary with all network data
113
+ """
114
+ data = {
115
+ "network_info": self._load_network_info(conn, network_id),
116
+ "carriers": self._load_carriers_data(conn, network_id),
117
+ "components": {},
118
+ "time_periods": []
119
+ }
120
+
121
+ # Load time periods
122
+ try:
123
+ time_periods = get_network_time_periods(conn, network_id)
124
+ data["time_periods"] = [
125
+ {
126
+ "timestamp": tp.formatted_time,
127
+ "period_index": tp.period_index,
128
+ "weight": tp.weight
129
+ }
130
+ for tp in time_periods
131
+ ]
132
+ except Exception as e:
133
+ logger.warning(f"Failed to load time periods: {e}")
134
+
135
+ # Load all component types
136
+ component_types = ['BUS', 'GENERATOR', 'UNMET_LOAD', 'LOAD', 'LINE', 'LINK', 'STORAGE_UNIT', 'STORE']
137
+
138
+ for comp_type in component_types:
139
+ try:
140
+ components = list_components_by_type(conn, network_id, comp_type)
141
+ if components:
142
+ data["components"][comp_type.lower()] = [
143
+ {
144
+ "id": comp.id,
145
+ "name": comp.name,
146
+ "component_type": comp.component_type,
147
+ "longitude": comp.longitude,
148
+ "latitude": comp.latitude,
149
+ "carrier_id": comp.carrier_id,
150
+ "bus_id": comp.bus_id,
151
+ "bus0_id": comp.bus0_id,
152
+ "bus1_id": comp.bus1_id
153
+ }
154
+ for comp in components
155
+ ]
156
+ except Exception as e:
157
+ logger.warning(f"Failed to load {comp_type} components: {e}")
158
+
159
+ return data
160
+
161
+ def _load_network_info(self, conn, network_id: int) -> Dict[str, Any]:
162
+ """Load network metadata."""
163
+ cursor = conn.execute("""
164
+ SELECT name, description, time_start, time_end, time_interval
165
+ FROM networks
166
+ WHERE id = ?
167
+ """, (network_id,))
168
+
169
+ row = cursor.fetchone()
170
+ if not row:
171
+ raise ValueError(f"Network with ID {network_id} not found")
172
+
173
+ return {
174
+ 'name': row[0],
175
+ 'description': row[1],
176
+ 'time_start': row[2],
177
+ 'time_end': row[3],
178
+ 'time_interval': row[4]
179
+ }
180
+
181
+ def _set_time_index(self, conn, network_id: int, network: 'pypsa.Network'):
182
+ """Set time index from network time periods - always create MultiIndex for consistency."""
183
+ try:
184
+ time_periods = get_network_time_periods(conn, network_id)
185
+ if not time_periods:
186
+ logger.error("No time periods found for network")
187
+ return
188
+
189
+ # Convert to pandas DatetimeIndex
190
+ timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
191
+
192
+ # Extract unique years for investment periods
193
+ years = sorted(list(set([ts.year for ts in timestamps])))
194
+ logger.info(f"Found {len(years)} investment periods: {years}")
195
+
196
+ # Always create MultiIndex following PyPSA multi-investment tutorial format
197
+ # First level: investment periods (years), Second level: timesteps
198
+ multi_snapshots = []
199
+ for ts in timestamps:
200
+ multi_snapshots.append((ts.year, ts))
201
+
202
+ multi_index = pd.MultiIndex.from_tuples(multi_snapshots, names=['period', 'timestep'])
203
+
204
+ # Verify MultiIndex is unique (should always be true now with UTC timestamps)
205
+ if not multi_index.is_unique:
206
+ raise ValueError(f"Created MultiIndex is not unique! Check timestamp generation.")
207
+
208
+ logger.info(f"Created MultiIndex with {len(multi_index)} snapshots")
209
+ network.set_snapshots(multi_index)
210
+
211
+ # Set investment periods for multi-period optimization
212
+ network.investment_periods = years
213
+
214
+ # Store years for statistics
215
+ network._available_years = years
216
+
217
+ logger.info(f"Created MultiIndex with {len(multi_index)} snapshots across {len(years)} periods")
218
+ logger.info(f"Investment periods: {network.investment_periods}")
219
+
220
+ except Exception as e:
221
+ logger.error(f"Failed to set time index: {e}")
222
+ network._available_years = []
223
+
224
+ def _load_carriers(self, conn, network_id: int, network: 'pypsa.Network'):
225
+ """Load carriers into PyPSA network."""
226
+ carriers = self._load_carriers_data(conn, network_id)
227
+ for carrier in carriers:
228
+ filtered_attrs = self._filter_carrier_attrs(carrier)
229
+ network.add("Carrier", carrier['name'], **filtered_attrs)
230
+
231
+ def _load_carriers_data(self, conn, network_id: int) -> list:
232
+ """Load carrier data from database."""
233
+ cursor = conn.execute("""
234
+ SELECT name, co2_emissions, nice_name, color
235
+ FROM carriers
236
+ WHERE network_id = ?
237
+ ORDER BY name
238
+ """, (network_id,))
239
+
240
+ carriers = []
241
+ for row in cursor.fetchall():
242
+ carriers.append({
243
+ 'name': row[0],
244
+ 'co2_emissions': row[1],
245
+ 'nice_name': row[2],
246
+ 'color': row[3]
247
+ })
248
+
249
+ return carriers
250
+
251
+ def _filter_carrier_attrs(self, carrier: Dict[str, Any]) -> Dict[str, Any]:
252
+ """Filter carrier attributes for PyPSA compatibility."""
253
+ filtered = {}
254
+ for key, value in carrier.items():
255
+ if key != 'name' and value is not None:
256
+ filtered[key] = value
257
+ return filtered
258
+
259
+ def _load_components(
260
+ self,
261
+ conn,
262
+ network_id: int,
263
+ network: 'pypsa.Network',
264
+ scenario_id: Optional[int],
265
+ progress_callback: Optional[Callable[[int, str], None]] = None
266
+ ):
267
+ """Load all network components using batch loader."""
268
+ # Load component connections
269
+ connections = self.batch_loader.batch_load_component_connections(conn, network_id)
270
+ bus_id_to_name = connections['bus_id_to_name']
271
+ carrier_id_to_name = connections['carrier_id_to_name']
272
+
273
+ # Component type mapping for later identification
274
+ component_type_map = {}
275
+
276
+ # Load buses
277
+ if progress_callback:
278
+ progress_callback(25, "Loading buses...")
279
+ self._load_buses(conn, network_id, network, scenario_id, component_type_map)
280
+
281
+ # Load generators (including unmet loads)
282
+ if progress_callback:
283
+ progress_callback(35, "Loading generators...")
284
+ self._load_generators(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name, component_type_map)
285
+
286
+ # Load loads
287
+ if progress_callback:
288
+ progress_callback(50, "Loading loads...")
289
+ self._load_loads(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
290
+
291
+ # Load lines
292
+ if progress_callback:
293
+ progress_callback(65, "Loading lines...")
294
+ self._load_lines(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
295
+
296
+ # Load links
297
+ if progress_callback:
298
+ progress_callback(75, "Loading links...")
299
+ self._load_links(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
300
+
301
+ # Load storage units
302
+ if progress_callback:
303
+ progress_callback(85, "Loading storage...")
304
+ self._load_storage_units(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
305
+ self._load_stores(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
306
+
307
+ # Store component type mapping on network
308
+ network._component_type_map = component_type_map
309
+
310
+
311
+ def _load_buses(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int], component_type_map: Dict[str, str]):
312
+ """Load bus components."""
313
+ buses = list_components_by_type(conn, network_id, 'BUS')
314
+ bus_ids = [bus.id for bus in buses]
315
+
316
+ bus_attributes = self.batch_loader.batch_load_component_attributes(conn, bus_ids, scenario_id)
317
+ bus_timeseries = self.batch_loader.batch_load_component_timeseries(conn, bus_ids, scenario_id)
318
+
319
+ for bus in buses:
320
+ attrs = bus_attributes.get(bus.id, {})
321
+ timeseries = bus_timeseries.get(bus.id, {})
322
+
323
+ # Merge timeseries into attributes
324
+ attrs.update(timeseries)
325
+
326
+ network.add("Bus", bus.name, **attrs)
327
+ component_type_map[bus.name] = bus.component_type
328
+
329
+ def _load_generators(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
330
+ bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str], component_type_map: Dict[str, str]):
331
+ """Load generator and unmet load components."""
332
+ generators = list_components_by_type(conn, network_id, 'GENERATOR')
333
+ unmet_loads = list_components_by_type(conn, network_id, 'UNMET_LOAD')
334
+ all_generators = generators + unmet_loads
335
+
336
+ generator_ids = [gen.id for gen in all_generators]
337
+
338
+ generator_attributes = self.batch_loader.batch_load_component_attributes(conn, generator_ids, scenario_id)
339
+ generator_timeseries = self.batch_loader.batch_load_component_timeseries(conn, generator_ids, scenario_id)
340
+
341
+ for gen in all_generators:
342
+ attrs = generator_attributes.get(gen.id, {})
343
+ timeseries = generator_timeseries.get(gen.id, {})
344
+
345
+ # Set bus connection
346
+ if gen.bus_id:
347
+ bus_name = bus_id_to_name.get(gen.bus_id, f"bus_{gen.bus_id}")
348
+ attrs['bus'] = bus_name
349
+
350
+ # Set carrier
351
+ if gen.carrier_id:
352
+ carrier_name = carrier_id_to_name.get(gen.carrier_id, '-')
353
+ attrs['carrier'] = carrier_name
354
+ else:
355
+ attrs['carrier'] = '-'
356
+
357
+ # Merge timeseries into attributes
358
+ attrs.update(timeseries)
359
+
360
+ component_type_map[gen.name] = gen.component_type
361
+ network.add("Generator", gen.name, **attrs)
362
+
363
+ def _load_loads(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
364
+ bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
365
+ """Load load components."""
366
+ loads = list_components_by_type(conn, network_id, 'LOAD')
367
+ load_ids = [load.id for load in loads]
368
+
369
+ load_attributes = self.batch_loader.batch_load_component_attributes(conn, load_ids, scenario_id)
370
+ load_timeseries = self.batch_loader.batch_load_component_timeseries(conn, load_ids, scenario_id)
371
+
372
+ for load in loads:
373
+ attrs = load_attributes.get(load.id, {})
374
+ timeseries = load_timeseries.get(load.id, {})
375
+
376
+ if load.bus_id:
377
+ bus_name = bus_id_to_name.get(load.bus_id, f"bus_{load.bus_id}")
378
+ attrs['bus'] = bus_name
379
+
380
+ if load.carrier_id:
381
+ carrier_name = carrier_id_to_name.get(load.carrier_id, '-')
382
+ attrs['carrier'] = carrier_name
383
+ else:
384
+ attrs['carrier'] = '-'
385
+
386
+ # Merge timeseries into attributes
387
+ attrs.update(timeseries)
388
+
389
+ network.add("Load", load.name, **attrs)
390
+
391
+ def _load_lines(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
392
+ bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
393
+ """Load line components."""
394
+ lines = list_components_by_type(conn, network_id, 'LINE')
395
+ line_ids = [line.id for line in lines]
396
+
397
+ line_attributes = self.batch_loader.batch_load_component_attributes(conn, line_ids, scenario_id)
398
+ line_timeseries = self.batch_loader.batch_load_component_timeseries(conn, line_ids, scenario_id)
399
+
400
+ for line in lines:
401
+ attrs = line_attributes.get(line.id, {})
402
+ timeseries = line_timeseries.get(line.id, {})
403
+
404
+ if line.bus0_id and line.bus1_id:
405
+ bus0_name = bus_id_to_name.get(line.bus0_id, f"bus_{line.bus0_id}")
406
+ bus1_name = bus_id_to_name.get(line.bus1_id, f"bus_{line.bus1_id}")
407
+ attrs['bus0'] = bus0_name
408
+ attrs['bus1'] = bus1_name
409
+
410
+ if line.carrier_id:
411
+ carrier_name = carrier_id_to_name.get(line.carrier_id, 'AC')
412
+ attrs['carrier'] = carrier_name
413
+ else:
414
+ attrs['carrier'] = 'AC'
415
+
416
+ # Merge timeseries into attributes
417
+ attrs.update(timeseries)
418
+
419
+ network.add("Line", line.name, **attrs)
420
+
421
+ def _load_links(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
422
+ bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
423
+ """Load link components."""
424
+ links = list_components_by_type(conn, network_id, 'LINK')
425
+ link_ids = [link.id for link in links]
426
+
427
+ link_attributes = self.batch_loader.batch_load_component_attributes(conn, link_ids, scenario_id)
428
+ link_timeseries = self.batch_loader.batch_load_component_timeseries(conn, link_ids, scenario_id)
429
+
430
+ for link in links:
431
+ attrs = link_attributes.get(link.id, {})
432
+ timeseries = link_timeseries.get(link.id, {})
433
+
434
+ if link.bus0_id and link.bus1_id:
435
+ bus0_name = bus_id_to_name.get(link.bus0_id, f"bus_{link.bus0_id}")
436
+ bus1_name = bus_id_to_name.get(link.bus1_id, f"bus_{link.bus1_id}")
437
+ attrs['bus0'] = bus0_name
438
+ attrs['bus1'] = bus1_name
439
+
440
+ if link.carrier_id:
441
+ carrier_name = carrier_id_to_name.get(link.carrier_id, 'DC')
442
+ attrs['carrier'] = carrier_name
443
+ else:
444
+ attrs['carrier'] = 'DC'
445
+
446
+ # Merge timeseries into attributes
447
+ attrs.update(timeseries)
448
+
449
+ network.add("Link", link.name, **attrs)
450
+
451
+ def _load_storage_units(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
452
+ bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
453
+ """Load storage unit components."""
454
+ storage_units = list_components_by_type(conn, network_id, 'STORAGE_UNIT')
455
+ storage_ids = [storage.id for storage in storage_units]
456
+
457
+ storage_attributes = self.batch_loader.batch_load_component_attributes(conn, storage_ids, scenario_id)
458
+ storage_timeseries = self.batch_loader.batch_load_component_timeseries(conn, storage_ids, scenario_id)
459
+
460
+ for storage in storage_units:
461
+ attrs = storage_attributes.get(storage.id, {})
462
+ timeseries = storage_timeseries.get(storage.id, {})
463
+
464
+ if storage.bus_id:
465
+ bus_name = bus_id_to_name.get(storage.bus_id, f"bus_{storage.bus_id}")
466
+ attrs['bus'] = bus_name
467
+
468
+ if storage.carrier_id:
469
+ carrier_name = carrier_id_to_name.get(storage.carrier_id, '-')
470
+ attrs['carrier'] = carrier_name
471
+ else:
472
+ attrs['carrier'] = '-'
473
+
474
+ # Merge timeseries into attributes
475
+ attrs.update(timeseries)
476
+
477
+ network.add("StorageUnit", storage.name, **attrs)
478
+
479
+ def _load_stores(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
480
+ bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
481
+ """Load store components."""
482
+ stores = list_components_by_type(conn, network_id, 'STORE')
483
+ store_ids = [store.id for store in stores]
484
+
485
+ store_attributes = self.batch_loader.batch_load_component_attributes(conn, store_ids, scenario_id)
486
+ store_timeseries = self.batch_loader.batch_load_component_timeseries(conn, store_ids, scenario_id)
487
+
488
+ for store in stores:
489
+ attrs = store_attributes.get(store.id, {})
490
+ timeseries = store_timeseries.get(store.id, {})
491
+
492
+ if store.bus_id:
493
+ bus_name = bus_id_to_name.get(store.bus_id, f"bus_{store.bus_id}")
494
+ attrs['bus'] = bus_name
495
+
496
+ if store.carrier_id:
497
+ carrier_name = carrier_id_to_name.get(store.carrier_id, '-')
498
+ attrs['carrier'] = carrier_name
499
+ else:
500
+ attrs['carrier'] = '-'
501
+
502
+ # Merge timeseries into attributes
503
+ attrs.update(timeseries)
504
+
505
+ network.add("Store", store.name, **attrs)
506
+
507
+ def _set_snapshot_weightings(self, conn, network_id: int, network: 'pypsa.Network'):
508
+ """Set snapshot weightings from time periods."""
509
+ try:
510
+ time_periods = get_network_time_periods(conn, network_id)
511
+ if time_periods and len(network.snapshots) > 0:
512
+ # Create weightings array
513
+ weightings = []
514
+ for tp in time_periods:
515
+ if tp.weight is not None:
516
+ weightings.append(tp.weight)
517
+ else:
518
+ # Calculate from time interval if weight not specified
519
+ network_config = get_network_config(conn, network_id)
520
+ time_interval = network_config.get('time_interval', '1H')
521
+ weight = self._parse_time_interval(time_interval)
522
+ weightings.append(weight if weight else 1.0)
523
+
524
+ if len(weightings) == len(network.snapshots):
525
+ # Set all three columns like the old code - critical for proper objective calculation
526
+ network.snapshot_weightings.loc[:, 'objective'] = weightings
527
+ network.snapshot_weightings.loc[:, 'generators'] = weightings
528
+ network.snapshot_weightings.loc[:, 'stores'] = weightings
529
+ logger.info(f"Set snapshot weightings for {len(weightings)} time periods (objective, generators, stores)")
530
+ else:
531
+ logger.warning(f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})")
532
+ except Exception as e:
533
+ logger.warning(f"Failed to set snapshot weightings: {e}")
534
+
535
+ def _parse_time_interval(self, time_interval: str) -> Optional[float]:
536
+ """Parse time interval string to hours."""
537
+ if not time_interval:
538
+ return None
539
+
540
+ try:
541
+ # Handle pandas frequency strings
542
+ if time_interval.endswith('H'):
543
+ return float(time_interval[:-1])
544
+ elif time_interval.endswith('D'):
545
+ return float(time_interval[:-1]) * 24
546
+ elif time_interval.endswith('M'):
547
+ return float(time_interval[:-1]) / 60
548
+ elif time_interval.endswith('S'):
549
+ return float(time_interval[:-1]) / 3600
550
+ else:
551
+ # Try to parse as float (assume hours)
552
+ return float(time_interval)
553
+ except (ValueError, TypeError):
554
+ logger.warning(f"Could not parse time interval: {time_interval}")
555
+ return None
556
+
557
+ def _build_bus_id_to_name_map(self, conn, network_id: int) -> Dict[int, str]:
558
+ """Build mapping from bus IDs to names."""
559
+ buses = list_components_by_type(conn, network_id, 'BUS')
560
+ return {bus.id: bus.name for bus in buses}
561
+
562
+ def _build_carrier_id_to_name_map(self, conn, network_id: int) -> Dict[int, str]:
563
+ """Build mapping from carrier IDs to names."""
564
+ cursor = conn.execute("SELECT id, name FROM carriers WHERE network_id = ?", (network_id,))
565
+ return {row[0]: row[1] for row in cursor.fetchall()}
566
+