pyconvexity 0.4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (44) hide show
  1. pyconvexity/__init__.py +241 -0
  2. pyconvexity/_version.py +1 -0
  3. pyconvexity/core/__init__.py +60 -0
  4. pyconvexity/core/database.py +485 -0
  5. pyconvexity/core/errors.py +106 -0
  6. pyconvexity/core/types.py +400 -0
  7. pyconvexity/dashboard.py +265 -0
  8. pyconvexity/data/README.md +101 -0
  9. pyconvexity/data/__init__.py +17 -0
  10. pyconvexity/data/loaders/__init__.py +3 -0
  11. pyconvexity/data/loaders/cache.py +213 -0
  12. pyconvexity/data/schema/01_core_schema.sql +420 -0
  13. pyconvexity/data/schema/02_data_metadata.sql +120 -0
  14. pyconvexity/data/schema/03_validation_data.sql +507 -0
  15. pyconvexity/data/sources/__init__.py +5 -0
  16. pyconvexity/data/sources/gem.py +442 -0
  17. pyconvexity/io/__init__.py +26 -0
  18. pyconvexity/io/excel_exporter.py +1226 -0
  19. pyconvexity/io/excel_importer.py +1381 -0
  20. pyconvexity/io/netcdf_exporter.py +191 -0
  21. pyconvexity/io/netcdf_importer.py +1802 -0
  22. pyconvexity/models/__init__.py +195 -0
  23. pyconvexity/models/attributes.py +730 -0
  24. pyconvexity/models/carriers.py +159 -0
  25. pyconvexity/models/components.py +611 -0
  26. pyconvexity/models/network.py +503 -0
  27. pyconvexity/models/results.py +148 -0
  28. pyconvexity/models/scenarios.py +234 -0
  29. pyconvexity/solvers/__init__.py +29 -0
  30. pyconvexity/solvers/pypsa/__init__.py +30 -0
  31. pyconvexity/solvers/pypsa/api.py +446 -0
  32. pyconvexity/solvers/pypsa/batch_loader.py +296 -0
  33. pyconvexity/solvers/pypsa/builder.py +655 -0
  34. pyconvexity/solvers/pypsa/clearing_price.py +678 -0
  35. pyconvexity/solvers/pypsa/constraints.py +405 -0
  36. pyconvexity/solvers/pypsa/solver.py +1442 -0
  37. pyconvexity/solvers/pypsa/storage.py +2096 -0
  38. pyconvexity/timeseries.py +330 -0
  39. pyconvexity/validation/__init__.py +25 -0
  40. pyconvexity/validation/rules.py +312 -0
  41. pyconvexity-0.4.8.dist-info/METADATA +148 -0
  42. pyconvexity-0.4.8.dist-info/RECORD +44 -0
  43. pyconvexity-0.4.8.dist-info/WHEEL +5 -0
  44. pyconvexity-0.4.8.dist-info/top_level.txt +1 -0
@@ -0,0 +1,655 @@
1
+ """
2
+ Network building functionality for PyPSA solver integration.
3
+
4
+ Simplified to always use MultiIndex format for consistent multi-period optimization.
5
+ """
6
+
7
+ import json
8
+ import pandas as pd
9
+ from typing import Dict, Any, Optional, Callable
10
+
11
+ from pyconvexity.models import (
12
+ list_components_by_type,
13
+ get_network_time_periods,
14
+ get_network_info,
15
+ )
16
+
17
+
18
+ class NetworkBuilder:
19
+ """
20
+ Builds PyPSA networks from database data.
21
+
22
+ Simplified to always create MultiIndex snapshots for consistent multi-period optimization,
23
+ even for single-year models.
24
+ """
25
+
26
+ def __init__(self, verbose: bool = False):
27
+ """
28
+ Initialize NetworkBuilder.
29
+
30
+ Args:
31
+ verbose: Enable detailed logging output
32
+ """
33
+ self.verbose = verbose
34
+
35
+ # Import PyPSA with error handling
36
+ try:
37
+ import pypsa
38
+
39
+ self.pypsa = pypsa
40
+ except ImportError as e:
41
+ raise ImportError(
42
+ "PyPSA is not installed or could not be imported. "
43
+ "Please ensure it is installed correctly in the environment."
44
+ ) from e
45
+
46
+ # Import batch loader for efficient data loading
47
+ from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
48
+
49
+ self.batch_loader = PyPSABatchLoader()
50
+
51
+ def build_network(
52
+ self,
53
+ conn,
54
+ scenario_id: Optional[int] = None,
55
+ progress_callback: Optional[Callable[[int, str], None]] = None,
56
+ include_unmet_loads: bool = True,
57
+ ) -> "pypsa.Network":
58
+ """
59
+ Build complete PyPSA network from database (single network per database).
60
+
61
+ Args:
62
+ conn: Database connection
63
+ scenario_id: Optional scenario ID
64
+ progress_callback: Optional progress callback
65
+ include_unmet_loads: Whether to include unmet load components (default: True)
66
+
67
+ Returns:
68
+ Configured PyPSA Network object
69
+ """
70
+ if progress_callback:
71
+ progress_callback(0, "Loading network metadata...")
72
+
73
+ # Load network info
74
+ network_info = self._load_network_info(conn)
75
+
76
+ if progress_callback:
77
+ progress_callback(5, f"Building network: {network_info['name']}")
78
+
79
+ # Create PyPSA network
80
+ network = self.pypsa.Network(name=network_info["name"])
81
+
82
+ # Set time index
83
+ self._set_time_index(conn, network)
84
+
85
+ if progress_callback:
86
+ progress_callback(15, "Loading carriers...")
87
+
88
+ # Load carriers
89
+ self._load_carriers(conn, network)
90
+
91
+ if progress_callback:
92
+ progress_callback(20, "Loading components...")
93
+
94
+ # Load all components using efficient batch loader
95
+ self._load_components(
96
+ conn, network, scenario_id, progress_callback, include_unmet_loads
97
+ )
98
+
99
+ # NOTE: Snapshot weightings will be set AFTER multi-period optimization setup
100
+ # in the solver, not here. This matches the old code's approach where PyPSA's
101
+ # multi-period setup can reset snapshot weightings to 1.0
102
+
103
+ if progress_callback:
104
+ progress_callback(95, "Network build complete")
105
+
106
+ return network
107
+
108
+ def load_network_data(
109
+ self, conn, scenario_id: Optional[int] = None
110
+ ) -> Dict[str, Any]:
111
+ """
112
+ Load network data as structured dictionary without building PyPSA network (single network per database).
113
+
114
+ Args:
115
+ conn: Database connection
116
+ scenario_id: Optional scenario ID
117
+
118
+ Returns:
119
+ Dictionary with all network data
120
+ """
121
+ data = {
122
+ "network_info": self._load_network_info(conn),
123
+ "carriers": self._load_carriers_data(conn),
124
+ "components": {},
125
+ "time_periods": [],
126
+ }
127
+
128
+ # Load time periods
129
+ try:
130
+ time_periods = get_network_time_periods(conn)
131
+ data["time_periods"] = [
132
+ {
133
+ "timestamp": tp.formatted_time,
134
+ "period_index": tp.period_index,
135
+ "weight": getattr(tp, "weight", 1.0), # Weight not in new schema
136
+ }
137
+ for tp in time_periods
138
+ ]
139
+ except Exception as e:
140
+ pass # Failed to load time periods
141
+
142
+ # Load all component types
143
+ component_types = [
144
+ "BUS",
145
+ "GENERATOR",
146
+ "UNMET_LOAD",
147
+ "LOAD",
148
+ "LINE",
149
+ "LINK",
150
+ "STORAGE_UNIT",
151
+ "STORE",
152
+ ]
153
+
154
+ for comp_type in component_types:
155
+ try:
156
+ components = list_components_by_type(conn, comp_type)
157
+ if components:
158
+ data["components"][comp_type.lower()] = [
159
+ {
160
+ "id": comp.id,
161
+ "name": comp.name,
162
+ "component_type": comp.component_type,
163
+ "longitude": comp.longitude,
164
+ "latitude": comp.latitude,
165
+ "carrier_id": comp.carrier_id,
166
+ "bus_id": comp.bus_id,
167
+ "bus0_id": comp.bus0_id,
168
+ "bus1_id": comp.bus1_id,
169
+ }
170
+ for comp in components
171
+ ]
172
+ except Exception as e:
173
+ pass # Failed to load components
174
+
175
+ return data
176
+
177
+ def _load_network_info(self, conn) -> Dict[str, Any]:
178
+ """Load network metadata (single network per database)."""
179
+ from pyconvexity.models import get_network_info
180
+
181
+ return get_network_info(conn)
182
+
183
+ def _set_time_index(self, conn, network: "pypsa.Network"):
184
+ """Set time index from network time periods - always create MultiIndex for consistency."""
185
+ try:
186
+ time_periods = get_network_time_periods(conn)
187
+ if not time_periods:
188
+ return
189
+
190
+ # Convert to pandas DatetimeIndex
191
+ timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
192
+
193
+ # Extract unique years for investment periods
194
+ years = sorted(list(set([ts.year for ts in timestamps])))
195
+
196
+ # Always create MultiIndex following PyPSA multi-investment tutorial format
197
+ # First level: investment periods (years), Second level: timesteps
198
+ multi_snapshots = []
199
+ for ts in timestamps:
200
+ multi_snapshots.append((ts.year, ts))
201
+
202
+ multi_index = pd.MultiIndex.from_tuples(
203
+ multi_snapshots, names=["period", "timestep"]
204
+ )
205
+
206
+ # Verify MultiIndex is unique (should always be true now with UTC timestamps)
207
+ if not multi_index.is_unique:
208
+ raise ValueError(
209
+ f"Created MultiIndex is not unique! Check timestamp generation."
210
+ )
211
+
212
+ network.set_snapshots(multi_index)
213
+
214
+ # Set investment periods for multi-period optimization
215
+ network.investment_periods = years
216
+
217
+ # Store years for statistics
218
+ network._available_years = years
219
+
220
+ except Exception as e:
221
+ network._available_years = []
222
+
223
+ def _load_carriers(self, conn, network: "pypsa.Network"):
224
+ """Load carriers into PyPSA network (single network per database)."""
225
+ carriers = self._load_carriers_data(conn)
226
+ for carrier in carriers:
227
+ filtered_attrs = self._filter_carrier_attrs(carrier)
228
+ network.add("Carrier", carrier["name"], **filtered_attrs)
229
+
230
+ def _load_carriers_data(self, conn) -> list:
231
+ """Load carrier data from database (single network per database)."""
232
+ cursor = conn.execute(
233
+ """
234
+ SELECT name, co2_emissions, nice_name, color
235
+ FROM carriers
236
+ ORDER BY name
237
+ """
238
+ )
239
+
240
+ carriers = []
241
+ for row in cursor.fetchall():
242
+ carriers.append(
243
+ {
244
+ "name": row[0],
245
+ "co2_emissions": row[1],
246
+ "nice_name": row[2],
247
+ "color": row[3],
248
+ }
249
+ )
250
+
251
+ return carriers
252
+
253
+ def _filter_carrier_attrs(self, carrier: Dict[str, Any]) -> Dict[str, Any]:
254
+ """Filter carrier attributes for PyPSA compatibility."""
255
+ filtered = {}
256
+ for key, value in carrier.items():
257
+ if key != "name" and value is not None:
258
+ filtered[key] = value
259
+ return filtered
260
+
261
+ def _load_components(
262
+ self,
263
+ conn,
264
+ network: "pypsa.Network",
265
+ scenario_id: Optional[int],
266
+ progress_callback: Optional[Callable[[int, str], None]] = None,
267
+ include_unmet_loads: bool = True,
268
+ ):
269
+ """Load all network components using batch loader (single network per database)."""
270
+ # Load component connections
271
+ connections = self.batch_loader.batch_load_component_connections(conn)
272
+ bus_id_to_name = connections["bus_id_to_name"]
273
+ carrier_id_to_name = connections["carrier_id_to_name"]
274
+
275
+ # Component type mapping for later identification
276
+ component_type_map = {}
277
+
278
+ # Load buses
279
+ if progress_callback:
280
+ progress_callback(25, "Loading buses...")
281
+ self._load_buses(conn, network, scenario_id, component_type_map)
282
+
283
+ # Load generators (including unmet loads if requested)
284
+ if progress_callback:
285
+ progress_callback(35, "Loading generators...")
286
+ self._load_generators(
287
+ conn,
288
+ network,
289
+ scenario_id,
290
+ bus_id_to_name,
291
+ carrier_id_to_name,
292
+ component_type_map,
293
+ include_unmet_loads,
294
+ )
295
+
296
+ # Load loads
297
+ if progress_callback:
298
+ progress_callback(50, "Loading loads...")
299
+ self._load_loads(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
300
+
301
+ # Load lines
302
+ if progress_callback:
303
+ progress_callback(65, "Loading lines...")
304
+ self._load_lines(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
305
+
306
+ # Load links
307
+ if progress_callback:
308
+ progress_callback(75, "Loading links...")
309
+ self._load_links(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
310
+
311
+ # Load storage units
312
+ if progress_callback:
313
+ progress_callback(85, "Loading storage...")
314
+ self._load_storage_units(
315
+ conn, network, scenario_id, bus_id_to_name, carrier_id_to_name
316
+ )
317
+ self._load_stores(
318
+ conn, network, scenario_id, bus_id_to_name, carrier_id_to_name
319
+ )
320
+
321
+ # Store component type mapping on network
322
+ network._component_type_map = component_type_map
323
+
324
+ def _load_buses(
325
+ self,
326
+ conn,
327
+ network: "pypsa.Network",
328
+ scenario_id: Optional[int],
329
+ component_type_map: Dict[str, str],
330
+ ):
331
+ """Load bus components (single network per database)."""
332
+ buses = list_components_by_type(conn, "BUS")
333
+ bus_ids = [bus.id for bus in buses]
334
+
335
+ bus_attributes = self.batch_loader.batch_load_component_attributes(
336
+ conn, bus_ids, scenario_id
337
+ )
338
+ bus_timeseries = self.batch_loader.batch_load_component_timeseries(
339
+ conn, bus_ids, scenario_id
340
+ )
341
+
342
+ for bus in buses:
343
+ attrs = bus_attributes.get(bus.id, {})
344
+ timeseries = bus_timeseries.get(bus.id, {})
345
+
346
+ # Add coordinate data from components table (PyPSA uses 'x' for longitude, 'y' for latitude)
347
+ if bus.longitude is not None:
348
+ attrs["x"] = bus.longitude
349
+ if bus.latitude is not None:
350
+ attrs["y"] = bus.latitude
351
+
352
+ # Merge timeseries into attributes
353
+ attrs.update(timeseries)
354
+
355
+ network.add("Bus", bus.name, **attrs)
356
+ component_type_map[bus.name] = bus.component_type
357
+
358
+ def _load_generators(
359
+ self,
360
+ conn,
361
+ network: "pypsa.Network",
362
+ scenario_id: Optional[int],
363
+ bus_id_to_name: Dict[int, str],
364
+ carrier_id_to_name: Dict[int, str],
365
+ component_type_map: Dict[str, str],
366
+ include_unmet_loads: bool = True,
367
+ ):
368
+ """Load generator and unmet load components (single network per database)."""
369
+ generators = list_components_by_type(conn, "GENERATOR")
370
+
371
+ # Conditionally load unmet loads based on parameter
372
+ if include_unmet_loads:
373
+ unmet_loads = list_components_by_type(conn, "UNMET_LOAD")
374
+ all_generators = generators + unmet_loads
375
+ else:
376
+ all_generators = generators
377
+
378
+ generator_ids = [gen.id for gen in all_generators]
379
+
380
+ generator_attributes = self.batch_loader.batch_load_component_attributes(
381
+ conn, generator_ids, scenario_id
382
+ )
383
+ generator_timeseries = self.batch_loader.batch_load_component_timeseries(
384
+ conn, generator_ids, scenario_id
385
+ )
386
+
387
+ for gen in all_generators:
388
+ attrs = generator_attributes.get(gen.id, {})
389
+ timeseries = generator_timeseries.get(gen.id, {})
390
+
391
+ # Set bus connection
392
+ if gen.bus_id:
393
+ bus_name = bus_id_to_name.get(gen.bus_id, f"bus_{gen.bus_id}")
394
+ attrs["bus"] = bus_name
395
+
396
+ # Set carrier
397
+ if gen.carrier_id:
398
+ carrier_name = carrier_id_to_name.get(gen.carrier_id, "-")
399
+ attrs["carrier"] = carrier_name
400
+ else:
401
+ attrs["carrier"] = "-"
402
+
403
+ # Merge timeseries into attributes
404
+ attrs.update(timeseries)
405
+
406
+ component_type_map[gen.name] = gen.component_type
407
+ network.add("Generator", gen.name, **attrs)
408
+
409
+ def _load_loads(
410
+ self,
411
+ conn,
412
+ network: "pypsa.Network",
413
+ scenario_id: Optional[int],
414
+ bus_id_to_name: Dict[int, str],
415
+ carrier_id_to_name: Dict[int, str],
416
+ ):
417
+ """Load load components (single network per database)."""
418
+ loads = list_components_by_type(conn, "LOAD")
419
+ load_ids = [load.id for load in loads]
420
+
421
+ load_attributes = self.batch_loader.batch_load_component_attributes(
422
+ conn, load_ids, scenario_id
423
+ )
424
+ load_timeseries = self.batch_loader.batch_load_component_timeseries(
425
+ conn, load_ids, scenario_id
426
+ )
427
+
428
+ for load in loads:
429
+ attrs = load_attributes.get(load.id, {})
430
+ timeseries = load_timeseries.get(load.id, {})
431
+
432
+ if load.bus_id:
433
+ bus_name = bus_id_to_name.get(load.bus_id, f"bus_{load.bus_id}")
434
+ attrs["bus"] = bus_name
435
+
436
+ if load.carrier_id:
437
+ carrier_name = carrier_id_to_name.get(load.carrier_id, "-")
438
+ attrs["carrier"] = carrier_name
439
+ else:
440
+ attrs["carrier"] = "-"
441
+
442
+ # Merge timeseries into attributes
443
+ attrs.update(timeseries)
444
+
445
+ network.add("Load", load.name, **attrs)
446
+
447
+ def _load_lines(
448
+ self,
449
+ conn,
450
+ network: "pypsa.Network",
451
+ scenario_id: Optional[int],
452
+ bus_id_to_name: Dict[int, str],
453
+ carrier_id_to_name: Dict[int, str],
454
+ ):
455
+ """Load line components (single network per database)."""
456
+ lines = list_components_by_type(conn, "LINE")
457
+ line_ids = [line.id for line in lines]
458
+
459
+ line_attributes = self.batch_loader.batch_load_component_attributes(
460
+ conn, line_ids, scenario_id
461
+ )
462
+ line_timeseries = self.batch_loader.batch_load_component_timeseries(
463
+ conn, line_ids, scenario_id
464
+ )
465
+
466
+ for line in lines:
467
+ attrs = line_attributes.get(line.id, {})
468
+ timeseries = line_timeseries.get(line.id, {})
469
+
470
+ if line.bus0_id and line.bus1_id:
471
+ bus0_name = bus_id_to_name.get(line.bus0_id, f"bus_{line.bus0_id}")
472
+ bus1_name = bus_id_to_name.get(line.bus1_id, f"bus_{line.bus1_id}")
473
+ attrs["bus0"] = bus0_name
474
+ attrs["bus1"] = bus1_name
475
+
476
+ if line.carrier_id:
477
+ carrier_name = carrier_id_to_name.get(line.carrier_id, "AC")
478
+ attrs["carrier"] = carrier_name
479
+ else:
480
+ attrs["carrier"] = "AC"
481
+
482
+ # Merge timeseries into attributes
483
+ attrs.update(timeseries)
484
+
485
+ network.add("Line", line.name, **attrs)
486
+
487
+ def _load_links(
488
+ self,
489
+ conn,
490
+ network: "pypsa.Network",
491
+ scenario_id: Optional[int],
492
+ bus_id_to_name: Dict[int, str],
493
+ carrier_id_to_name: Dict[int, str],
494
+ ):
495
+ """Load link components (single network per database)."""
496
+ links = list_components_by_type(conn, "LINK")
497
+ link_ids = [link.id for link in links]
498
+
499
+ link_attributes = self.batch_loader.batch_load_component_attributes(
500
+ conn, link_ids, scenario_id
501
+ )
502
+ link_timeseries = self.batch_loader.batch_load_component_timeseries(
503
+ conn, link_ids, scenario_id
504
+ )
505
+
506
+ for link in links:
507
+ attrs = link_attributes.get(link.id, {})
508
+ timeseries = link_timeseries.get(link.id, {})
509
+
510
+ if link.bus0_id and link.bus1_id:
511
+ bus0_name = bus_id_to_name.get(link.bus0_id, f"bus_{link.bus0_id}")
512
+ bus1_name = bus_id_to_name.get(link.bus1_id, f"bus_{link.bus1_id}")
513
+ attrs["bus0"] = bus0_name
514
+ attrs["bus1"] = bus1_name
515
+
516
+ if link.carrier_id:
517
+ carrier_name = carrier_id_to_name.get(link.carrier_id, "DC")
518
+ attrs["carrier"] = carrier_name
519
+ else:
520
+ attrs["carrier"] = "DC"
521
+
522
+ # Merge timeseries into attributes
523
+ attrs.update(timeseries)
524
+
525
+ network.add("Link", link.name, **attrs)
526
+
527
+ def _load_storage_units(
528
+ self,
529
+ conn,
530
+ network: "pypsa.Network",
531
+ scenario_id: Optional[int],
532
+ bus_id_to_name: Dict[int, str],
533
+ carrier_id_to_name: Dict[int, str],
534
+ ):
535
+ """Load storage unit components (single network per database)."""
536
+ storage_units = list_components_by_type(conn, "STORAGE_UNIT")
537
+ storage_ids = [storage.id for storage in storage_units]
538
+
539
+ storage_attributes = self.batch_loader.batch_load_component_attributes(
540
+ conn, storage_ids, scenario_id
541
+ )
542
+ storage_timeseries = self.batch_loader.batch_load_component_timeseries(
543
+ conn, storage_ids, scenario_id
544
+ )
545
+
546
+ for storage in storage_units:
547
+ attrs = storage_attributes.get(storage.id, {})
548
+ timeseries = storage_timeseries.get(storage.id, {})
549
+
550
+ if storage.bus_id:
551
+ bus_name = bus_id_to_name.get(storage.bus_id, f"bus_{storage.bus_id}")
552
+ attrs["bus"] = bus_name
553
+
554
+ if storage.carrier_id:
555
+ carrier_name = carrier_id_to_name.get(storage.carrier_id, "-")
556
+ attrs["carrier"] = carrier_name
557
+ else:
558
+ attrs["carrier"] = "-"
559
+
560
+ # Merge timeseries into attributes
561
+ attrs.update(timeseries)
562
+
563
+ network.add("StorageUnit", storage.name, **attrs)
564
+
565
+ def _load_stores(
566
+ self,
567
+ conn,
568
+ network: "pypsa.Network",
569
+ scenario_id: Optional[int],
570
+ bus_id_to_name: Dict[int, str],
571
+ carrier_id_to_name: Dict[int, str],
572
+ ):
573
+ """Load store components (single network per database)."""
574
+ stores = list_components_by_type(conn, "STORE")
575
+ store_ids = [store.id for store in stores]
576
+
577
+ store_attributes = self.batch_loader.batch_load_component_attributes(
578
+ conn, store_ids, scenario_id
579
+ )
580
+ store_timeseries = self.batch_loader.batch_load_component_timeseries(
581
+ conn, store_ids, scenario_id
582
+ )
583
+
584
+ for store in stores:
585
+ attrs = store_attributes.get(store.id, {})
586
+ timeseries = store_timeseries.get(store.id, {})
587
+
588
+ if store.bus_id:
589
+ bus_name = bus_id_to_name.get(store.bus_id, f"bus_{store.bus_id}")
590
+ attrs["bus"] = bus_name
591
+
592
+ if store.carrier_id:
593
+ carrier_name = carrier_id_to_name.get(store.carrier_id, "-")
594
+ attrs["carrier"] = carrier_name
595
+ else:
596
+ attrs["carrier"] = "-"
597
+
598
+ # Merge timeseries into attributes
599
+ attrs.update(timeseries)
600
+
601
+ network.add("Store", store.name, **attrs)
602
+
603
+ def _set_snapshot_weightings(self, conn, network: "pypsa.Network"):
604
+ """Set snapshot weightings from time periods (single network per database)."""
605
+ try:
606
+ time_periods = get_network_time_periods(conn)
607
+ if time_periods and len(network.snapshots) > 0:
608
+ # Get network info to determine time interval
609
+ network_info = get_network_info(conn)
610
+ time_interval = network_info.get("time_interval", "PT1H")
611
+ weight = self._parse_time_interval(time_interval)
612
+ if weight is None:
613
+ weight = 1.0
614
+
615
+ # Create weightings array - all periods get same weight
616
+ weightings = [weight] * len(time_periods)
617
+
618
+ if len(weightings) == len(network.snapshots):
619
+ # Set all three columns like the old code - critical for proper objective calculation
620
+ network.snapshot_weightings.loc[:, "objective"] = weightings
621
+ network.snapshot_weightings.loc[:, "generators"] = weightings
622
+ network.snapshot_weightings.loc[:, "stores"] = weightings
623
+ except Exception as e:
624
+ pass # Failed to set snapshot weightings
625
+
626
+ def _parse_time_interval(self, time_interval: str) -> Optional[float]:
627
+ """Parse time interval string to hours."""
628
+ if not time_interval:
629
+ return None
630
+
631
+ try:
632
+ # Handle pandas frequency strings
633
+ if time_interval.endswith("H"):
634
+ return float(time_interval[:-1])
635
+ elif time_interval.endswith("D"):
636
+ return float(time_interval[:-1]) * 24
637
+ elif time_interval.endswith("M"):
638
+ return float(time_interval[:-1]) / 60
639
+ elif time_interval.endswith("S"):
640
+ return float(time_interval[:-1]) / 3600
641
+ else:
642
+ # Try to parse as float (assume hours)
643
+ return float(time_interval)
644
+ except (ValueError, TypeError):
645
+ return None
646
+
647
+ def _build_bus_id_to_name_map(self, conn) -> Dict[int, str]:
648
+ """Build mapping from bus IDs to names (single network per database)."""
649
+ buses = list_components_by_type(conn, "BUS")
650
+ return {bus.id: bus.name for bus in buses}
651
+
652
+ def _build_carrier_id_to_name_map(self, conn) -> Dict[int, str]:
653
+ """Build mapping from carrier IDs to names (single network per database)."""
654
+ cursor = conn.execute("SELECT id, name FROM carriers")
655
+ return {row[0]: row[1] for row in cursor.fetchall()}