pyconvexity 0.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +226 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +506 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +197 -0
- pyconvexity/io/netcdf_importer.py +1833 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +24 -0
- pyconvexity/solvers/pypsa/api.py +460 -0
- pyconvexity/solvers/pypsa/batch_loader.py +307 -0
- pyconvexity/solvers/pypsa/builder.py +675 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1509 -0
- pyconvexity/solvers/pypsa/storage.py +2048 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.3.dist-info/METADATA +47 -0
- pyconvexity-0.4.3.dist-info/RECORD +42 -0
- pyconvexity-0.4.3.dist-info/WHEEL +5 -0
- pyconvexity-0.4.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,675 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Network building functionality for PyPSA solver integration.
|
|
3
|
+
|
|
4
|
+
Simplified to always use MultiIndex format for consistent multi-period optimization.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import json
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from typing import Dict, Any, Optional, Callable
|
|
11
|
+
|
|
12
|
+
from pyconvexity.models import (
|
|
13
|
+
list_components_by_type,
|
|
14
|
+
get_network_time_periods,
|
|
15
|
+
get_network_info,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class NetworkBuilder:
|
|
22
|
+
"""
|
|
23
|
+
Builds PyPSA networks from database data.
|
|
24
|
+
|
|
25
|
+
Simplified to always create MultiIndex snapshots for consistent multi-period optimization,
|
|
26
|
+
even for single-year models.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, verbose: bool = False):
|
|
30
|
+
"""
|
|
31
|
+
Initialize NetworkBuilder.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
verbose: Enable detailed logging output
|
|
35
|
+
"""
|
|
36
|
+
self.verbose = verbose
|
|
37
|
+
|
|
38
|
+
# Import PyPSA with error handling
|
|
39
|
+
try:
|
|
40
|
+
import pypsa
|
|
41
|
+
|
|
42
|
+
self.pypsa = pypsa
|
|
43
|
+
except ImportError as e:
|
|
44
|
+
raise ImportError(
|
|
45
|
+
"PyPSA is not installed or could not be imported. "
|
|
46
|
+
"Please ensure it is installed correctly in the environment."
|
|
47
|
+
) from e
|
|
48
|
+
|
|
49
|
+
# Import batch loader for efficient data loading
|
|
50
|
+
from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
|
|
51
|
+
|
|
52
|
+
self.batch_loader = PyPSABatchLoader()
|
|
53
|
+
|
|
54
|
+
def build_network(
|
|
55
|
+
self,
|
|
56
|
+
conn,
|
|
57
|
+
scenario_id: Optional[int] = None,
|
|
58
|
+
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
59
|
+
include_unmet_loads: bool = True,
|
|
60
|
+
) -> "pypsa.Network":
|
|
61
|
+
"""
|
|
62
|
+
Build complete PyPSA network from database (single network per database).
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
conn: Database connection
|
|
66
|
+
scenario_id: Optional scenario ID
|
|
67
|
+
progress_callback: Optional progress callback
|
|
68
|
+
include_unmet_loads: Whether to include unmet load components (default: True)
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Configured PyPSA Network object
|
|
72
|
+
"""
|
|
73
|
+
if progress_callback:
|
|
74
|
+
progress_callback(0, "Loading network metadata...")
|
|
75
|
+
|
|
76
|
+
# Load network info
|
|
77
|
+
network_info = self._load_network_info(conn)
|
|
78
|
+
|
|
79
|
+
if progress_callback:
|
|
80
|
+
progress_callback(5, f"Building network: {network_info['name']}")
|
|
81
|
+
|
|
82
|
+
# Create PyPSA network
|
|
83
|
+
network = self.pypsa.Network(name=network_info["name"])
|
|
84
|
+
|
|
85
|
+
# Set time index
|
|
86
|
+
self._set_time_index(conn, network)
|
|
87
|
+
|
|
88
|
+
if progress_callback:
|
|
89
|
+
progress_callback(15, "Loading carriers...")
|
|
90
|
+
|
|
91
|
+
# Load carriers
|
|
92
|
+
self._load_carriers(conn, network)
|
|
93
|
+
|
|
94
|
+
if progress_callback:
|
|
95
|
+
progress_callback(20, "Loading components...")
|
|
96
|
+
|
|
97
|
+
# Load all components using efficient batch loader
|
|
98
|
+
self._load_components(
|
|
99
|
+
conn, network, scenario_id, progress_callback, include_unmet_loads
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# NOTE: Snapshot weightings will be set AFTER multi-period optimization setup
|
|
103
|
+
# in the solver, not here. This matches the old code's approach where PyPSA's
|
|
104
|
+
# multi-period setup can reset snapshot weightings to 1.0
|
|
105
|
+
|
|
106
|
+
if progress_callback:
|
|
107
|
+
progress_callback(95, "Network build complete")
|
|
108
|
+
|
|
109
|
+
return network
|
|
110
|
+
|
|
111
|
+
def load_network_data(
|
|
112
|
+
self, conn, scenario_id: Optional[int] = None
|
|
113
|
+
) -> Dict[str, Any]:
|
|
114
|
+
"""
|
|
115
|
+
Load network data as structured dictionary without building PyPSA network (single network per database).
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
conn: Database connection
|
|
119
|
+
scenario_id: Optional scenario ID
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
Dictionary with all network data
|
|
123
|
+
"""
|
|
124
|
+
data = {
|
|
125
|
+
"network_info": self._load_network_info(conn),
|
|
126
|
+
"carriers": self._load_carriers_data(conn),
|
|
127
|
+
"components": {},
|
|
128
|
+
"time_periods": [],
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
# Load time periods
|
|
132
|
+
try:
|
|
133
|
+
time_periods = get_network_time_periods(conn)
|
|
134
|
+
data["time_periods"] = [
|
|
135
|
+
{
|
|
136
|
+
"timestamp": tp.formatted_time,
|
|
137
|
+
"period_index": tp.period_index,
|
|
138
|
+
"weight": getattr(tp, "weight", 1.0), # Weight not in new schema
|
|
139
|
+
}
|
|
140
|
+
for tp in time_periods
|
|
141
|
+
]
|
|
142
|
+
except Exception as e:
|
|
143
|
+
logger.warning(f"Failed to load time periods: {e}")
|
|
144
|
+
|
|
145
|
+
# Load all component types
|
|
146
|
+
component_types = [
|
|
147
|
+
"BUS",
|
|
148
|
+
"GENERATOR",
|
|
149
|
+
"UNMET_LOAD",
|
|
150
|
+
"LOAD",
|
|
151
|
+
"LINE",
|
|
152
|
+
"LINK",
|
|
153
|
+
"STORAGE_UNIT",
|
|
154
|
+
"STORE",
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
for comp_type in component_types:
|
|
158
|
+
try:
|
|
159
|
+
components = list_components_by_type(conn, comp_type)
|
|
160
|
+
if components:
|
|
161
|
+
data["components"][comp_type.lower()] = [
|
|
162
|
+
{
|
|
163
|
+
"id": comp.id,
|
|
164
|
+
"name": comp.name,
|
|
165
|
+
"component_type": comp.component_type,
|
|
166
|
+
"longitude": comp.longitude,
|
|
167
|
+
"latitude": comp.latitude,
|
|
168
|
+
"carrier_id": comp.carrier_id,
|
|
169
|
+
"bus_id": comp.bus_id,
|
|
170
|
+
"bus0_id": comp.bus0_id,
|
|
171
|
+
"bus1_id": comp.bus1_id,
|
|
172
|
+
}
|
|
173
|
+
for comp in components
|
|
174
|
+
]
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.warning(f"Failed to load {comp_type} components: {e}")
|
|
177
|
+
|
|
178
|
+
return data
|
|
179
|
+
|
|
180
|
+
def _load_network_info(self, conn) -> Dict[str, Any]:
|
|
181
|
+
"""Load network metadata (single network per database)."""
|
|
182
|
+
from pyconvexity.models import get_network_info
|
|
183
|
+
|
|
184
|
+
return get_network_info(conn)
|
|
185
|
+
|
|
186
|
+
def _set_time_index(self, conn, network: "pypsa.Network"):
|
|
187
|
+
"""Set time index from network time periods - always create MultiIndex for consistency."""
|
|
188
|
+
try:
|
|
189
|
+
time_periods = get_network_time_periods(conn)
|
|
190
|
+
if not time_periods:
|
|
191
|
+
logger.error("No time periods found for network")
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
# Convert to pandas DatetimeIndex
|
|
195
|
+
timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
|
|
196
|
+
|
|
197
|
+
# Extract unique years for investment periods
|
|
198
|
+
years = sorted(list(set([ts.year for ts in timestamps])))
|
|
199
|
+
|
|
200
|
+
# Always create MultiIndex following PyPSA multi-investment tutorial format
|
|
201
|
+
# First level: investment periods (years), Second level: timesteps
|
|
202
|
+
multi_snapshots = []
|
|
203
|
+
for ts in timestamps:
|
|
204
|
+
multi_snapshots.append((ts.year, ts))
|
|
205
|
+
|
|
206
|
+
multi_index = pd.MultiIndex.from_tuples(
|
|
207
|
+
multi_snapshots, names=["period", "timestep"]
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# Verify MultiIndex is unique (should always be true now with UTC timestamps)
|
|
211
|
+
if not multi_index.is_unique:
|
|
212
|
+
raise ValueError(
|
|
213
|
+
f"Created MultiIndex is not unique! Check timestamp generation."
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
network.set_snapshots(multi_index)
|
|
217
|
+
|
|
218
|
+
# Set investment periods for multi-period optimization
|
|
219
|
+
network.investment_periods = years
|
|
220
|
+
|
|
221
|
+
# Store years for statistics
|
|
222
|
+
network._available_years = years
|
|
223
|
+
|
|
224
|
+
logger.info(
|
|
225
|
+
f"Time index: {len(multi_index)} snapshots across {len(years)} investment periods: {years}"
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
except Exception as e:
|
|
229
|
+
logger.error(f"Failed to set time index: {e}")
|
|
230
|
+
network._available_years = []
|
|
231
|
+
|
|
232
|
+
def _load_carriers(self, conn, network: "pypsa.Network"):
|
|
233
|
+
"""Load carriers into PyPSA network (single network per database)."""
|
|
234
|
+
carriers = self._load_carriers_data(conn)
|
|
235
|
+
for carrier in carriers:
|
|
236
|
+
filtered_attrs = self._filter_carrier_attrs(carrier)
|
|
237
|
+
network.add("Carrier", carrier["name"], **filtered_attrs)
|
|
238
|
+
|
|
239
|
+
def _load_carriers_data(self, conn) -> list:
|
|
240
|
+
"""Load carrier data from database (single network per database)."""
|
|
241
|
+
cursor = conn.execute(
|
|
242
|
+
"""
|
|
243
|
+
SELECT name, co2_emissions, nice_name, color
|
|
244
|
+
FROM carriers
|
|
245
|
+
ORDER BY name
|
|
246
|
+
"""
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
carriers = []
|
|
250
|
+
for row in cursor.fetchall():
|
|
251
|
+
carriers.append(
|
|
252
|
+
{
|
|
253
|
+
"name": row[0],
|
|
254
|
+
"co2_emissions": row[1],
|
|
255
|
+
"nice_name": row[2],
|
|
256
|
+
"color": row[3],
|
|
257
|
+
}
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
return carriers
|
|
261
|
+
|
|
262
|
+
def _filter_carrier_attrs(self, carrier: Dict[str, Any]) -> Dict[str, Any]:
|
|
263
|
+
"""Filter carrier attributes for PyPSA compatibility."""
|
|
264
|
+
filtered = {}
|
|
265
|
+
for key, value in carrier.items():
|
|
266
|
+
if key != "name" and value is not None:
|
|
267
|
+
filtered[key] = value
|
|
268
|
+
return filtered
|
|
269
|
+
|
|
270
|
+
def _load_components(
|
|
271
|
+
self,
|
|
272
|
+
conn,
|
|
273
|
+
network: "pypsa.Network",
|
|
274
|
+
scenario_id: Optional[int],
|
|
275
|
+
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
276
|
+
include_unmet_loads: bool = True,
|
|
277
|
+
):
|
|
278
|
+
"""Load all network components using batch loader (single network per database)."""
|
|
279
|
+
# Load component connections
|
|
280
|
+
connections = self.batch_loader.batch_load_component_connections(conn)
|
|
281
|
+
bus_id_to_name = connections["bus_id_to_name"]
|
|
282
|
+
carrier_id_to_name = connections["carrier_id_to_name"]
|
|
283
|
+
|
|
284
|
+
# Component type mapping for later identification
|
|
285
|
+
component_type_map = {}
|
|
286
|
+
|
|
287
|
+
# Load buses
|
|
288
|
+
if progress_callback:
|
|
289
|
+
progress_callback(25, "Loading buses...")
|
|
290
|
+
self._load_buses(conn, network, scenario_id, component_type_map)
|
|
291
|
+
|
|
292
|
+
# Load generators (including unmet loads if requested)
|
|
293
|
+
if progress_callback:
|
|
294
|
+
progress_callback(35, "Loading generators...")
|
|
295
|
+
self._load_generators(
|
|
296
|
+
conn,
|
|
297
|
+
network,
|
|
298
|
+
scenario_id,
|
|
299
|
+
bus_id_to_name,
|
|
300
|
+
carrier_id_to_name,
|
|
301
|
+
component_type_map,
|
|
302
|
+
include_unmet_loads,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
# Load loads
|
|
306
|
+
if progress_callback:
|
|
307
|
+
progress_callback(50, "Loading loads...")
|
|
308
|
+
self._load_loads(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
309
|
+
|
|
310
|
+
# Load lines
|
|
311
|
+
if progress_callback:
|
|
312
|
+
progress_callback(65, "Loading lines...")
|
|
313
|
+
self._load_lines(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
314
|
+
|
|
315
|
+
# Load links
|
|
316
|
+
if progress_callback:
|
|
317
|
+
progress_callback(75, "Loading links...")
|
|
318
|
+
self._load_links(conn, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
319
|
+
|
|
320
|
+
# Load storage units
|
|
321
|
+
if progress_callback:
|
|
322
|
+
progress_callback(85, "Loading storage...")
|
|
323
|
+
self._load_storage_units(
|
|
324
|
+
conn, network, scenario_id, bus_id_to_name, carrier_id_to_name
|
|
325
|
+
)
|
|
326
|
+
self._load_stores(
|
|
327
|
+
conn, network, scenario_id, bus_id_to_name, carrier_id_to_name
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
# Store component type mapping on network
|
|
331
|
+
network._component_type_map = component_type_map
|
|
332
|
+
|
|
333
|
+
def _load_buses(
|
|
334
|
+
self,
|
|
335
|
+
conn,
|
|
336
|
+
network: "pypsa.Network",
|
|
337
|
+
scenario_id: Optional[int],
|
|
338
|
+
component_type_map: Dict[str, str],
|
|
339
|
+
):
|
|
340
|
+
"""Load bus components (single network per database)."""
|
|
341
|
+
buses = list_components_by_type(conn, "BUS")
|
|
342
|
+
bus_ids = [bus.id for bus in buses]
|
|
343
|
+
|
|
344
|
+
bus_attributes = self.batch_loader.batch_load_component_attributes(
|
|
345
|
+
conn, bus_ids, scenario_id
|
|
346
|
+
)
|
|
347
|
+
bus_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
348
|
+
conn, bus_ids, scenario_id
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
for bus in buses:
|
|
352
|
+
attrs = bus_attributes.get(bus.id, {})
|
|
353
|
+
timeseries = bus_timeseries.get(bus.id, {})
|
|
354
|
+
|
|
355
|
+
# Add coordinate data from components table (PyPSA uses 'x' for longitude, 'y' for latitude)
|
|
356
|
+
if bus.longitude is not None:
|
|
357
|
+
attrs["x"] = bus.longitude
|
|
358
|
+
if bus.latitude is not None:
|
|
359
|
+
attrs["y"] = bus.latitude
|
|
360
|
+
|
|
361
|
+
# Merge timeseries into attributes
|
|
362
|
+
attrs.update(timeseries)
|
|
363
|
+
|
|
364
|
+
network.add("Bus", bus.name, **attrs)
|
|
365
|
+
component_type_map[bus.name] = bus.component_type
|
|
366
|
+
|
|
367
|
+
def _load_generators(
|
|
368
|
+
self,
|
|
369
|
+
conn,
|
|
370
|
+
network: "pypsa.Network",
|
|
371
|
+
scenario_id: Optional[int],
|
|
372
|
+
bus_id_to_name: Dict[int, str],
|
|
373
|
+
carrier_id_to_name: Dict[int, str],
|
|
374
|
+
component_type_map: Dict[str, str],
|
|
375
|
+
include_unmet_loads: bool = True,
|
|
376
|
+
):
|
|
377
|
+
"""Load generator and unmet load components (single network per database)."""
|
|
378
|
+
generators = list_components_by_type(conn, "GENERATOR")
|
|
379
|
+
|
|
380
|
+
# Conditionally load unmet loads based on parameter
|
|
381
|
+
if include_unmet_loads:
|
|
382
|
+
unmet_loads = list_components_by_type(conn, "UNMET_LOAD")
|
|
383
|
+
all_generators = generators + unmet_loads
|
|
384
|
+
if self.verbose:
|
|
385
|
+
logger.info(
|
|
386
|
+
f"Loading {len(generators)} generators and {len(unmet_loads)} unmet loads"
|
|
387
|
+
)
|
|
388
|
+
else:
|
|
389
|
+
all_generators = generators
|
|
390
|
+
if self.verbose:
|
|
391
|
+
logger.info(f"Loading {len(generators)} generators (unmet loads disabled)")
|
|
392
|
+
|
|
393
|
+
generator_ids = [gen.id for gen in all_generators]
|
|
394
|
+
|
|
395
|
+
generator_attributes = self.batch_loader.batch_load_component_attributes(
|
|
396
|
+
conn, generator_ids, scenario_id
|
|
397
|
+
)
|
|
398
|
+
generator_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
399
|
+
conn, generator_ids, scenario_id
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
for gen in all_generators:
|
|
403
|
+
attrs = generator_attributes.get(gen.id, {})
|
|
404
|
+
timeseries = generator_timeseries.get(gen.id, {})
|
|
405
|
+
|
|
406
|
+
# Set bus connection
|
|
407
|
+
if gen.bus_id:
|
|
408
|
+
bus_name = bus_id_to_name.get(gen.bus_id, f"bus_{gen.bus_id}")
|
|
409
|
+
attrs["bus"] = bus_name
|
|
410
|
+
|
|
411
|
+
# Set carrier
|
|
412
|
+
if gen.carrier_id:
|
|
413
|
+
carrier_name = carrier_id_to_name.get(gen.carrier_id, "-")
|
|
414
|
+
attrs["carrier"] = carrier_name
|
|
415
|
+
else:
|
|
416
|
+
attrs["carrier"] = "-"
|
|
417
|
+
|
|
418
|
+
# Merge timeseries into attributes
|
|
419
|
+
attrs.update(timeseries)
|
|
420
|
+
|
|
421
|
+
component_type_map[gen.name] = gen.component_type
|
|
422
|
+
network.add("Generator", gen.name, **attrs)
|
|
423
|
+
|
|
424
|
+
def _load_loads(
|
|
425
|
+
self,
|
|
426
|
+
conn,
|
|
427
|
+
network: "pypsa.Network",
|
|
428
|
+
scenario_id: Optional[int],
|
|
429
|
+
bus_id_to_name: Dict[int, str],
|
|
430
|
+
carrier_id_to_name: Dict[int, str],
|
|
431
|
+
):
|
|
432
|
+
"""Load load components (single network per database)."""
|
|
433
|
+
loads = list_components_by_type(conn, "LOAD")
|
|
434
|
+
load_ids = [load.id for load in loads]
|
|
435
|
+
|
|
436
|
+
load_attributes = self.batch_loader.batch_load_component_attributes(
|
|
437
|
+
conn, load_ids, scenario_id
|
|
438
|
+
)
|
|
439
|
+
load_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
440
|
+
conn, load_ids, scenario_id
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
for load in loads:
|
|
444
|
+
attrs = load_attributes.get(load.id, {})
|
|
445
|
+
timeseries = load_timeseries.get(load.id, {})
|
|
446
|
+
|
|
447
|
+
if load.bus_id:
|
|
448
|
+
bus_name = bus_id_to_name.get(load.bus_id, f"bus_{load.bus_id}")
|
|
449
|
+
attrs["bus"] = bus_name
|
|
450
|
+
|
|
451
|
+
if load.carrier_id:
|
|
452
|
+
carrier_name = carrier_id_to_name.get(load.carrier_id, "-")
|
|
453
|
+
attrs["carrier"] = carrier_name
|
|
454
|
+
else:
|
|
455
|
+
attrs["carrier"] = "-"
|
|
456
|
+
|
|
457
|
+
# Merge timeseries into attributes
|
|
458
|
+
attrs.update(timeseries)
|
|
459
|
+
|
|
460
|
+
network.add("Load", load.name, **attrs)
|
|
461
|
+
|
|
462
|
+
def _load_lines(
|
|
463
|
+
self,
|
|
464
|
+
conn,
|
|
465
|
+
network: "pypsa.Network",
|
|
466
|
+
scenario_id: Optional[int],
|
|
467
|
+
bus_id_to_name: Dict[int, str],
|
|
468
|
+
carrier_id_to_name: Dict[int, str],
|
|
469
|
+
):
|
|
470
|
+
"""Load line components (single network per database)."""
|
|
471
|
+
lines = list_components_by_type(conn, "LINE")
|
|
472
|
+
line_ids = [line.id for line in lines]
|
|
473
|
+
|
|
474
|
+
line_attributes = self.batch_loader.batch_load_component_attributes(
|
|
475
|
+
conn, line_ids, scenario_id
|
|
476
|
+
)
|
|
477
|
+
line_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
478
|
+
conn, line_ids, scenario_id
|
|
479
|
+
)
|
|
480
|
+
|
|
481
|
+
for line in lines:
|
|
482
|
+
attrs = line_attributes.get(line.id, {})
|
|
483
|
+
timeseries = line_timeseries.get(line.id, {})
|
|
484
|
+
|
|
485
|
+
if line.bus0_id and line.bus1_id:
|
|
486
|
+
bus0_name = bus_id_to_name.get(line.bus0_id, f"bus_{line.bus0_id}")
|
|
487
|
+
bus1_name = bus_id_to_name.get(line.bus1_id, f"bus_{line.bus1_id}")
|
|
488
|
+
attrs["bus0"] = bus0_name
|
|
489
|
+
attrs["bus1"] = bus1_name
|
|
490
|
+
|
|
491
|
+
if line.carrier_id:
|
|
492
|
+
carrier_name = carrier_id_to_name.get(line.carrier_id, "AC")
|
|
493
|
+
attrs["carrier"] = carrier_name
|
|
494
|
+
else:
|
|
495
|
+
attrs["carrier"] = "AC"
|
|
496
|
+
|
|
497
|
+
# Merge timeseries into attributes
|
|
498
|
+
attrs.update(timeseries)
|
|
499
|
+
|
|
500
|
+
network.add("Line", line.name, **attrs)
|
|
501
|
+
|
|
502
|
+
def _load_links(
|
|
503
|
+
self,
|
|
504
|
+
conn,
|
|
505
|
+
network: "pypsa.Network",
|
|
506
|
+
scenario_id: Optional[int],
|
|
507
|
+
bus_id_to_name: Dict[int, str],
|
|
508
|
+
carrier_id_to_name: Dict[int, str],
|
|
509
|
+
):
|
|
510
|
+
"""Load link components (single network per database)."""
|
|
511
|
+
links = list_components_by_type(conn, "LINK")
|
|
512
|
+
link_ids = [link.id for link in links]
|
|
513
|
+
|
|
514
|
+
link_attributes = self.batch_loader.batch_load_component_attributes(
|
|
515
|
+
conn, link_ids, scenario_id
|
|
516
|
+
)
|
|
517
|
+
link_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
518
|
+
conn, link_ids, scenario_id
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
for link in links:
|
|
522
|
+
attrs = link_attributes.get(link.id, {})
|
|
523
|
+
timeseries = link_timeseries.get(link.id, {})
|
|
524
|
+
|
|
525
|
+
if link.bus0_id and link.bus1_id:
|
|
526
|
+
bus0_name = bus_id_to_name.get(link.bus0_id, f"bus_{link.bus0_id}")
|
|
527
|
+
bus1_name = bus_id_to_name.get(link.bus1_id, f"bus_{link.bus1_id}")
|
|
528
|
+
attrs["bus0"] = bus0_name
|
|
529
|
+
attrs["bus1"] = bus1_name
|
|
530
|
+
|
|
531
|
+
if link.carrier_id:
|
|
532
|
+
carrier_name = carrier_id_to_name.get(link.carrier_id, "DC")
|
|
533
|
+
attrs["carrier"] = carrier_name
|
|
534
|
+
else:
|
|
535
|
+
attrs["carrier"] = "DC"
|
|
536
|
+
|
|
537
|
+
# Merge timeseries into attributes
|
|
538
|
+
attrs.update(timeseries)
|
|
539
|
+
|
|
540
|
+
network.add("Link", link.name, **attrs)
|
|
541
|
+
|
|
542
|
+
def _load_storage_units(
|
|
543
|
+
self,
|
|
544
|
+
conn,
|
|
545
|
+
network: "pypsa.Network",
|
|
546
|
+
scenario_id: Optional[int],
|
|
547
|
+
bus_id_to_name: Dict[int, str],
|
|
548
|
+
carrier_id_to_name: Dict[int, str],
|
|
549
|
+
):
|
|
550
|
+
"""Load storage unit components (single network per database)."""
|
|
551
|
+
storage_units = list_components_by_type(conn, "STORAGE_UNIT")
|
|
552
|
+
storage_ids = [storage.id for storage in storage_units]
|
|
553
|
+
|
|
554
|
+
storage_attributes = self.batch_loader.batch_load_component_attributes(
|
|
555
|
+
conn, storage_ids, scenario_id
|
|
556
|
+
)
|
|
557
|
+
storage_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
558
|
+
conn, storage_ids, scenario_id
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
for storage in storage_units:
|
|
562
|
+
attrs = storage_attributes.get(storage.id, {})
|
|
563
|
+
timeseries = storage_timeseries.get(storage.id, {})
|
|
564
|
+
|
|
565
|
+
if storage.bus_id:
|
|
566
|
+
bus_name = bus_id_to_name.get(storage.bus_id, f"bus_{storage.bus_id}")
|
|
567
|
+
attrs["bus"] = bus_name
|
|
568
|
+
|
|
569
|
+
if storage.carrier_id:
|
|
570
|
+
carrier_name = carrier_id_to_name.get(storage.carrier_id, "-")
|
|
571
|
+
attrs["carrier"] = carrier_name
|
|
572
|
+
else:
|
|
573
|
+
attrs["carrier"] = "-"
|
|
574
|
+
|
|
575
|
+
# Merge timeseries into attributes
|
|
576
|
+
attrs.update(timeseries)
|
|
577
|
+
|
|
578
|
+
network.add("StorageUnit", storage.name, **attrs)
|
|
579
|
+
|
|
580
|
+
def _load_stores(
|
|
581
|
+
self,
|
|
582
|
+
conn,
|
|
583
|
+
network: "pypsa.Network",
|
|
584
|
+
scenario_id: Optional[int],
|
|
585
|
+
bus_id_to_name: Dict[int, str],
|
|
586
|
+
carrier_id_to_name: Dict[int, str],
|
|
587
|
+
):
|
|
588
|
+
"""Load store components (single network per database)."""
|
|
589
|
+
stores = list_components_by_type(conn, "STORE")
|
|
590
|
+
store_ids = [store.id for store in stores]
|
|
591
|
+
|
|
592
|
+
store_attributes = self.batch_loader.batch_load_component_attributes(
|
|
593
|
+
conn, store_ids, scenario_id
|
|
594
|
+
)
|
|
595
|
+
store_timeseries = self.batch_loader.batch_load_component_timeseries(
|
|
596
|
+
conn, store_ids, scenario_id
|
|
597
|
+
)
|
|
598
|
+
|
|
599
|
+
for store in stores:
|
|
600
|
+
attrs = store_attributes.get(store.id, {})
|
|
601
|
+
timeseries = store_timeseries.get(store.id, {})
|
|
602
|
+
|
|
603
|
+
if store.bus_id:
|
|
604
|
+
bus_name = bus_id_to_name.get(store.bus_id, f"bus_{store.bus_id}")
|
|
605
|
+
attrs["bus"] = bus_name
|
|
606
|
+
|
|
607
|
+
if store.carrier_id:
|
|
608
|
+
carrier_name = carrier_id_to_name.get(store.carrier_id, "-")
|
|
609
|
+
attrs["carrier"] = carrier_name
|
|
610
|
+
else:
|
|
611
|
+
attrs["carrier"] = "-"
|
|
612
|
+
|
|
613
|
+
# Merge timeseries into attributes
|
|
614
|
+
attrs.update(timeseries)
|
|
615
|
+
|
|
616
|
+
network.add("Store", store.name, **attrs)
|
|
617
|
+
|
|
618
|
+
def _set_snapshot_weightings(self, conn, network: "pypsa.Network"):
|
|
619
|
+
"""Set snapshot weightings from time periods (single network per database)."""
|
|
620
|
+
try:
|
|
621
|
+
time_periods = get_network_time_periods(conn)
|
|
622
|
+
if time_periods and len(network.snapshots) > 0:
|
|
623
|
+
# Get network info to determine time interval
|
|
624
|
+
network_info = get_network_info(conn)
|
|
625
|
+
time_interval = network_info.get("time_interval", "PT1H")
|
|
626
|
+
weight = self._parse_time_interval(time_interval)
|
|
627
|
+
if weight is None:
|
|
628
|
+
weight = 1.0
|
|
629
|
+
|
|
630
|
+
# Create weightings array - all periods get same weight
|
|
631
|
+
weightings = [weight] * len(time_periods)
|
|
632
|
+
|
|
633
|
+
if len(weightings) == len(network.snapshots):
|
|
634
|
+
# Set all three columns like the old code - critical for proper objective calculation
|
|
635
|
+
network.snapshot_weightings.loc[:, "objective"] = weightings
|
|
636
|
+
network.snapshot_weightings.loc[:, "generators"] = weightings
|
|
637
|
+
network.snapshot_weightings.loc[:, "stores"] = weightings
|
|
638
|
+
else:
|
|
639
|
+
logger.warning(
|
|
640
|
+
f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
|
|
641
|
+
)
|
|
642
|
+
except Exception as e:
|
|
643
|
+
logger.warning(f"Failed to set snapshot weightings: {e}")
|
|
644
|
+
|
|
645
|
+
def _parse_time_interval(self, time_interval: str) -> Optional[float]:
|
|
646
|
+
"""Parse time interval string to hours."""
|
|
647
|
+
if not time_interval:
|
|
648
|
+
return None
|
|
649
|
+
|
|
650
|
+
try:
|
|
651
|
+
# Handle pandas frequency strings
|
|
652
|
+
if time_interval.endswith("H"):
|
|
653
|
+
return float(time_interval[:-1])
|
|
654
|
+
elif time_interval.endswith("D"):
|
|
655
|
+
return float(time_interval[:-1]) * 24
|
|
656
|
+
elif time_interval.endswith("M"):
|
|
657
|
+
return float(time_interval[:-1]) / 60
|
|
658
|
+
elif time_interval.endswith("S"):
|
|
659
|
+
return float(time_interval[:-1]) / 3600
|
|
660
|
+
else:
|
|
661
|
+
# Try to parse as float (assume hours)
|
|
662
|
+
return float(time_interval)
|
|
663
|
+
except (ValueError, TypeError):
|
|
664
|
+
logger.warning(f"Could not parse time interval: {time_interval}")
|
|
665
|
+
return None
|
|
666
|
+
|
|
667
|
+
def _build_bus_id_to_name_map(self, conn) -> Dict[int, str]:
|
|
668
|
+
"""Build mapping from bus IDs to names (single network per database)."""
|
|
669
|
+
buses = list_components_by_type(conn, "BUS")
|
|
670
|
+
return {bus.id: bus.name for bus in buses}
|
|
671
|
+
|
|
672
|
+
def _build_carrier_id_to_name_map(self, conn) -> Dict[int, str]:
|
|
673
|
+
"""Build mapping from carrier IDs to names (single network per database)."""
|
|
674
|
+
cursor = conn.execute("SELECT id, name FROM carriers")
|
|
675
|
+
return {row[0]: row[1] for row in cursor.fetchall()}
|