pyconvexity 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +30 -6
- pyconvexity/_version.py +1 -1
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +18 -0
- pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/cache.py +212 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
- pyconvexity/data/sources/gem.py +412 -0
- pyconvexity/io/__init__.py +32 -0
- pyconvexity/io/excel_exporter.py +991 -0
- pyconvexity/io/excel_importer.py +1112 -0
- pyconvexity/io/netcdf_exporter.py +192 -0
- pyconvexity/io/netcdf_importer.py +599 -0
- pyconvexity/models/__init__.py +7 -0
- pyconvexity/models/attributes.py +3 -1
- pyconvexity/models/components.py +3 -0
- pyconvexity/models/scenarios.py +177 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +24 -0
- pyconvexity/solvers/pypsa/api.py +398 -0
- pyconvexity/solvers/pypsa/batch_loader.py +311 -0
- pyconvexity/solvers/pypsa/builder.py +656 -0
- pyconvexity/solvers/pypsa/constraints.py +321 -0
- pyconvexity/solvers/pypsa/solver.py +1255 -0
- pyconvexity/solvers/pypsa/storage.py +2207 -0
- {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/METADATA +5 -2
- pyconvexity-0.1.3.dist-info/RECORD +45 -0
- pyconvexity-0.1.1.dist-info/RECORD +0 -20
- {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/WHEEL +0 -0
- {pyconvexity-0.1.1.dist-info → pyconvexity-0.1.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,656 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Network building functionality for PyPSA solver integration.
|
|
3
|
+
|
|
4
|
+
Handles loading data from database and constructing PyPSA Network objects.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import pandas as pd
|
|
9
|
+
from typing import Dict, Any, Optional, Callable
|
|
10
|
+
|
|
11
|
+
from pyconvexity.models import (
|
|
12
|
+
list_components_by_type, get_network_time_periods, get_network_config
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class NetworkBuilder:
|
|
19
|
+
"""
|
|
20
|
+
Builds PyPSA networks from database data.
|
|
21
|
+
|
|
22
|
+
This class handles the complex process of loading network components,
|
|
23
|
+
attributes, and time series data from the database and constructing
|
|
24
|
+
a properly configured PyPSA Network object.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self):
|
|
28
|
+
# Import PyPSA with error handling
|
|
29
|
+
try:
|
|
30
|
+
import pypsa
|
|
31
|
+
self.pypsa = pypsa
|
|
32
|
+
except ImportError as e:
|
|
33
|
+
raise ImportError(
|
|
34
|
+
"PyPSA is not installed or could not be imported. "
|
|
35
|
+
"Please ensure it is installed correctly in the environment."
|
|
36
|
+
) from e
|
|
37
|
+
|
|
38
|
+
# Import batch loader for efficient data loading
|
|
39
|
+
try:
|
|
40
|
+
from pyconvexity.solvers.pypsa.batch_loader import PyPSABatchLoader
|
|
41
|
+
self.batch_loader = PyPSABatchLoader()
|
|
42
|
+
except ImportError:
|
|
43
|
+
# Fallback to individual loading if batch loader not available
|
|
44
|
+
self.batch_loader = None
|
|
45
|
+
logger.warning("PyPSABatchLoader not available, using individual component loading")
|
|
46
|
+
|
|
47
|
+
def build_network(
|
|
48
|
+
self,
|
|
49
|
+
conn,
|
|
50
|
+
network_id: int,
|
|
51
|
+
scenario_id: Optional[int] = None,
|
|
52
|
+
progress_callback: Optional[Callable[[int, str], None]] = None
|
|
53
|
+
) -> 'pypsa.Network':
|
|
54
|
+
"""
|
|
55
|
+
Build complete PyPSA network from database.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
conn: Database connection
|
|
59
|
+
network_id: ID of network to build
|
|
60
|
+
scenario_id: Optional scenario ID
|
|
61
|
+
progress_callback: Optional progress callback
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
Configured PyPSA Network object
|
|
65
|
+
"""
|
|
66
|
+
if progress_callback:
|
|
67
|
+
progress_callback(0, "Loading network metadata...")
|
|
68
|
+
|
|
69
|
+
# Load network info
|
|
70
|
+
network_info = self._load_network_info(conn, network_id)
|
|
71
|
+
|
|
72
|
+
if progress_callback:
|
|
73
|
+
progress_callback(5, f"Building network: {network_info['name']}")
|
|
74
|
+
|
|
75
|
+
# Create PyPSA network
|
|
76
|
+
network = self.pypsa.Network(name=network_info['name'])
|
|
77
|
+
|
|
78
|
+
# Set time index
|
|
79
|
+
self._set_time_index(conn, network_id, network)
|
|
80
|
+
|
|
81
|
+
if progress_callback:
|
|
82
|
+
progress_callback(15, "Loading carriers...")
|
|
83
|
+
|
|
84
|
+
# Load carriers
|
|
85
|
+
self._load_carriers(conn, network_id, network)
|
|
86
|
+
|
|
87
|
+
if progress_callback:
|
|
88
|
+
progress_callback(20, "Loading components...")
|
|
89
|
+
|
|
90
|
+
# Load all components
|
|
91
|
+
self._load_components(conn, network_id, network, scenario_id, progress_callback)
|
|
92
|
+
|
|
93
|
+
# NOTE: Snapshot weightings will be set AFTER multi-period optimization setup
|
|
94
|
+
# in the solver, not here. This matches the old code's approach where PyPSA's
|
|
95
|
+
# multi-period setup can reset snapshot weightings to 1.0
|
|
96
|
+
|
|
97
|
+
if progress_callback:
|
|
98
|
+
progress_callback(95, "Network build complete")
|
|
99
|
+
|
|
100
|
+
return network
|
|
101
|
+
|
|
102
|
+
def load_network_data(
|
|
103
|
+
self,
|
|
104
|
+
conn,
|
|
105
|
+
network_id: int,
|
|
106
|
+
scenario_id: Optional[int] = None
|
|
107
|
+
) -> Dict[str, Any]:
|
|
108
|
+
"""
|
|
109
|
+
Load network data as structured dictionary without building PyPSA network.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
conn: Database connection
|
|
113
|
+
network_id: ID of network to load
|
|
114
|
+
scenario_id: Optional scenario ID
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Dictionary with all network data
|
|
118
|
+
"""
|
|
119
|
+
data = {
|
|
120
|
+
"network_info": self._load_network_info(conn, network_id),
|
|
121
|
+
"carriers": self._load_carriers_data(conn, network_id),
|
|
122
|
+
"components": {},
|
|
123
|
+
"time_periods": []
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
# Load time periods
|
|
127
|
+
try:
|
|
128
|
+
time_periods = get_network_time_periods(conn, network_id)
|
|
129
|
+
data["time_periods"] = [
|
|
130
|
+
{
|
|
131
|
+
"timestamp": tp.formatted_time,
|
|
132
|
+
"period_index": tp.period_index,
|
|
133
|
+
"weight": tp.weight
|
|
134
|
+
}
|
|
135
|
+
for tp in time_periods
|
|
136
|
+
]
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.warning(f"Failed to load time periods: {e}")
|
|
139
|
+
|
|
140
|
+
# Load all component types
|
|
141
|
+
component_types = ['BUS', 'GENERATOR', 'UNMET_LOAD', 'LOAD', 'LINE', 'LINK', 'STORAGE_UNIT', 'STORE']
|
|
142
|
+
|
|
143
|
+
for comp_type in component_types:
|
|
144
|
+
try:
|
|
145
|
+
components = list_components_by_type(conn, network_id, comp_type)
|
|
146
|
+
if components:
|
|
147
|
+
data["components"][comp_type.lower()] = [
|
|
148
|
+
{
|
|
149
|
+
"id": comp.id,
|
|
150
|
+
"name": comp.name,
|
|
151
|
+
"component_type": comp.component_type,
|
|
152
|
+
"longitude": comp.longitude,
|
|
153
|
+
"latitude": comp.latitude,
|
|
154
|
+
"carrier_id": comp.carrier_id,
|
|
155
|
+
"bus_id": comp.bus_id,
|
|
156
|
+
"bus0_id": comp.bus0_id,
|
|
157
|
+
"bus1_id": comp.bus1_id
|
|
158
|
+
}
|
|
159
|
+
for comp in components
|
|
160
|
+
]
|
|
161
|
+
except Exception as e:
|
|
162
|
+
logger.warning(f"Failed to load {comp_type} components: {e}")
|
|
163
|
+
|
|
164
|
+
return data
|
|
165
|
+
|
|
166
|
+
def _load_network_info(self, conn, network_id: int) -> Dict[str, Any]:
|
|
167
|
+
"""Load network metadata."""
|
|
168
|
+
cursor = conn.execute("""
|
|
169
|
+
SELECT name, description, time_start, time_end, time_interval
|
|
170
|
+
FROM networks
|
|
171
|
+
WHERE id = ?
|
|
172
|
+
""", (network_id,))
|
|
173
|
+
|
|
174
|
+
row = cursor.fetchone()
|
|
175
|
+
if not row:
|
|
176
|
+
raise ValueError(f"Network with ID {network_id} not found")
|
|
177
|
+
|
|
178
|
+
return {
|
|
179
|
+
'name': row[0],
|
|
180
|
+
'description': row[1],
|
|
181
|
+
'time_start': row[2],
|
|
182
|
+
'time_end': row[3],
|
|
183
|
+
'time_interval': row[4]
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
def _set_time_index(self, conn, network_id: int, network: 'pypsa.Network'):
|
|
187
|
+
"""Set time index from network time periods."""
|
|
188
|
+
try:
|
|
189
|
+
time_periods = get_network_time_periods(conn, network_id)
|
|
190
|
+
if time_periods:
|
|
191
|
+
# Convert to pandas DatetimeIndex
|
|
192
|
+
timestamps = [pd.Timestamp(tp.formatted_time) for tp in time_periods]
|
|
193
|
+
|
|
194
|
+
# Set the snapshots to the timestamps
|
|
195
|
+
network.set_snapshots(timestamps)
|
|
196
|
+
|
|
197
|
+
# Extract years for year-based statistics
|
|
198
|
+
try:
|
|
199
|
+
if hasattr(network.snapshots, 'year'):
|
|
200
|
+
years = sorted(network.snapshots.year.unique())
|
|
201
|
+
network._available_years = years
|
|
202
|
+
logger.info(f"Extracted {len(years)} years from network.snapshots.year: {years}")
|
|
203
|
+
else:
|
|
204
|
+
# Manually extract years from timestamps
|
|
205
|
+
years_from_timestamps = sorted(list(set([ts.year for ts in timestamps])))
|
|
206
|
+
network._available_years = years_from_timestamps
|
|
207
|
+
logger.info(f"Extracted {len(years_from_timestamps)} years from timestamps: {years_from_timestamps}")
|
|
208
|
+
|
|
209
|
+
except Exception as year_error:
|
|
210
|
+
logger.warning(f"Failed to extract years for year-based statistics: {year_error}")
|
|
211
|
+
network._available_years = []
|
|
212
|
+
|
|
213
|
+
else:
|
|
214
|
+
logger.warning("No time periods found for network, year-based statistics will not be available")
|
|
215
|
+
network._available_years = []
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.error(f"Failed to set time index: {e}")
|
|
218
|
+
network._available_years = []
|
|
219
|
+
|
|
220
|
+
def _load_carriers(self, conn, network_id: int, network: 'pypsa.Network'):
|
|
221
|
+
"""Load carriers into PyPSA network."""
|
|
222
|
+
carriers = self._load_carriers_data(conn, network_id)
|
|
223
|
+
for carrier in carriers:
|
|
224
|
+
filtered_attrs = self._filter_carrier_attrs(carrier)
|
|
225
|
+
network.add("Carrier", carrier['name'], **filtered_attrs)
|
|
226
|
+
|
|
227
|
+
def _load_carriers_data(self, conn, network_id: int) -> list:
|
|
228
|
+
"""Load carrier data from database."""
|
|
229
|
+
cursor = conn.execute("""
|
|
230
|
+
SELECT name, co2_emissions, nice_name, color
|
|
231
|
+
FROM carriers
|
|
232
|
+
WHERE network_id = ?
|
|
233
|
+
ORDER BY name
|
|
234
|
+
""", (network_id,))
|
|
235
|
+
|
|
236
|
+
carriers = []
|
|
237
|
+
for row in cursor.fetchall():
|
|
238
|
+
carriers.append({
|
|
239
|
+
'name': row[0],
|
|
240
|
+
'co2_emissions': row[1],
|
|
241
|
+
'nice_name': row[2],
|
|
242
|
+
'color': row[3]
|
|
243
|
+
})
|
|
244
|
+
|
|
245
|
+
return carriers
|
|
246
|
+
|
|
247
|
+
def _filter_carrier_attrs(self, carrier: Dict[str, Any]) -> Dict[str, Any]:
|
|
248
|
+
"""Filter carrier attributes for PyPSA compatibility."""
|
|
249
|
+
filtered = {}
|
|
250
|
+
for key, value in carrier.items():
|
|
251
|
+
if key != 'name' and value is not None:
|
|
252
|
+
filtered[key] = value
|
|
253
|
+
return filtered
|
|
254
|
+
|
|
255
|
+
def _load_components(
|
|
256
|
+
self,
|
|
257
|
+
conn,
|
|
258
|
+
network_id: int,
|
|
259
|
+
network: 'pypsa.Network',
|
|
260
|
+
scenario_id: Optional[int],
|
|
261
|
+
progress_callback: Optional[Callable[[int, str], None]] = None
|
|
262
|
+
):
|
|
263
|
+
"""Load all network components."""
|
|
264
|
+
# Load component connections if batch loader available
|
|
265
|
+
if self.batch_loader:
|
|
266
|
+
connections = self.batch_loader.batch_load_component_connections(conn, network_id)
|
|
267
|
+
bus_id_to_name = connections['bus_id_to_name']
|
|
268
|
+
carrier_id_to_name = connections['carrier_id_to_name']
|
|
269
|
+
else:
|
|
270
|
+
bus_id_to_name = self._build_bus_id_to_name_map(conn, network_id)
|
|
271
|
+
carrier_id_to_name = self._build_carrier_id_to_name_map(conn, network_id)
|
|
272
|
+
|
|
273
|
+
# Component type mapping for later identification
|
|
274
|
+
component_type_map = {}
|
|
275
|
+
|
|
276
|
+
# Load buses
|
|
277
|
+
if progress_callback:
|
|
278
|
+
progress_callback(25, "Loading buses...")
|
|
279
|
+
self._load_buses(conn, network_id, network, scenario_id, component_type_map)
|
|
280
|
+
|
|
281
|
+
# Load generators (including unmet loads)
|
|
282
|
+
if progress_callback:
|
|
283
|
+
progress_callback(35, "Loading generators...")
|
|
284
|
+
self._load_generators(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name, component_type_map)
|
|
285
|
+
|
|
286
|
+
# Load loads
|
|
287
|
+
if progress_callback:
|
|
288
|
+
progress_callback(50, "Loading loads...")
|
|
289
|
+
self._load_loads(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
290
|
+
|
|
291
|
+
# Load lines
|
|
292
|
+
if progress_callback:
|
|
293
|
+
progress_callback(65, "Loading lines...")
|
|
294
|
+
self._load_lines(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
295
|
+
|
|
296
|
+
# Load links
|
|
297
|
+
if progress_callback:
|
|
298
|
+
progress_callback(75, "Loading links...")
|
|
299
|
+
self._load_links(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
300
|
+
|
|
301
|
+
# Load storage units
|
|
302
|
+
if progress_callback:
|
|
303
|
+
progress_callback(85, "Loading storage...")
|
|
304
|
+
self._load_storage_units(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
305
|
+
self._load_stores(conn, network_id, network, scenario_id, bus_id_to_name, carrier_id_to_name)
|
|
306
|
+
|
|
307
|
+
# Store component type mapping on network
|
|
308
|
+
network._component_type_map = component_type_map
|
|
309
|
+
|
|
310
|
+
def _load_buses(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int], component_type_map: Dict[str, str]):
|
|
311
|
+
"""Load bus components."""
|
|
312
|
+
buses = list_components_by_type(conn, network_id, 'BUS')
|
|
313
|
+
bus_ids = [bus.id for bus in buses]
|
|
314
|
+
|
|
315
|
+
if self.batch_loader:
|
|
316
|
+
bus_attributes = self.batch_loader.batch_load_component_attributes(conn, bus_ids, scenario_id)
|
|
317
|
+
bus_timeseries = self.batch_loader.batch_load_component_timeseries(conn, bus_ids, scenario_id)
|
|
318
|
+
else:
|
|
319
|
+
bus_attributes = self._load_component_attributes_individually(conn, bus_ids, scenario_id)
|
|
320
|
+
bus_timeseries = {}
|
|
321
|
+
|
|
322
|
+
for bus in buses:
|
|
323
|
+
attrs = bus_attributes.get(bus.id, {})
|
|
324
|
+
timeseries = bus_timeseries.get(bus.id, {})
|
|
325
|
+
|
|
326
|
+
# Merge timeseries into attributes
|
|
327
|
+
attrs.update(timeseries)
|
|
328
|
+
|
|
329
|
+
network.add("Bus", bus.name, **attrs)
|
|
330
|
+
component_type_map[bus.name] = bus.component_type
|
|
331
|
+
|
|
332
|
+
def _load_generators(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
|
|
333
|
+
bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str], component_type_map: Dict[str, str]):
|
|
334
|
+
"""Load generator and unmet load components."""
|
|
335
|
+
generators = list_components_by_type(conn, network_id, 'GENERATOR')
|
|
336
|
+
unmet_loads = list_components_by_type(conn, network_id, 'UNMET_LOAD')
|
|
337
|
+
all_generators = generators + unmet_loads
|
|
338
|
+
|
|
339
|
+
generator_ids = [gen.id for gen in all_generators]
|
|
340
|
+
|
|
341
|
+
if self.batch_loader:
|
|
342
|
+
generator_attributes = self.batch_loader.batch_load_component_attributes(conn, generator_ids, scenario_id)
|
|
343
|
+
generator_timeseries = self.batch_loader.batch_load_component_timeseries(conn, generator_ids, scenario_id)
|
|
344
|
+
else:
|
|
345
|
+
generator_attributes = self._load_component_attributes_individually(conn, generator_ids, scenario_id)
|
|
346
|
+
generator_timeseries = {}
|
|
347
|
+
|
|
348
|
+
for gen in all_generators:
|
|
349
|
+
attrs = generator_attributes.get(gen.id, {})
|
|
350
|
+
timeseries = generator_timeseries.get(gen.id, {})
|
|
351
|
+
|
|
352
|
+
# Set bus connection
|
|
353
|
+
if gen.bus_id:
|
|
354
|
+
bus_name = bus_id_to_name.get(gen.bus_id, f"bus_{gen.bus_id}")
|
|
355
|
+
attrs['bus'] = bus_name
|
|
356
|
+
|
|
357
|
+
# Set carrier
|
|
358
|
+
if gen.carrier_id:
|
|
359
|
+
carrier_name = carrier_id_to_name.get(gen.carrier_id, '-')
|
|
360
|
+
attrs['carrier'] = carrier_name
|
|
361
|
+
else:
|
|
362
|
+
attrs['carrier'] = '-'
|
|
363
|
+
|
|
364
|
+
# Merge timeseries into attributes
|
|
365
|
+
attrs.update(timeseries)
|
|
366
|
+
|
|
367
|
+
component_type_map[gen.name] = gen.component_type
|
|
368
|
+
network.add("Generator", gen.name, **attrs)
|
|
369
|
+
|
|
370
|
+
def _load_loads(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
|
|
371
|
+
bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
|
|
372
|
+
"""Load load components."""
|
|
373
|
+
loads = list_components_by_type(conn, network_id, 'LOAD')
|
|
374
|
+
load_ids = [load.id for load in loads]
|
|
375
|
+
|
|
376
|
+
if self.batch_loader:
|
|
377
|
+
load_attributes = self.batch_loader.batch_load_component_attributes(conn, load_ids, scenario_id)
|
|
378
|
+
load_timeseries = self.batch_loader.batch_load_component_timeseries(conn, load_ids, scenario_id)
|
|
379
|
+
else:
|
|
380
|
+
load_attributes = self._load_component_attributes_individually(conn, load_ids, scenario_id)
|
|
381
|
+
load_timeseries = {}
|
|
382
|
+
|
|
383
|
+
for load in loads:
|
|
384
|
+
attrs = load_attributes.get(load.id, {})
|
|
385
|
+
timeseries = load_timeseries.get(load.id, {})
|
|
386
|
+
|
|
387
|
+
if load.bus_id:
|
|
388
|
+
bus_name = bus_id_to_name.get(load.bus_id, f"bus_{load.bus_id}")
|
|
389
|
+
attrs['bus'] = bus_name
|
|
390
|
+
|
|
391
|
+
if load.carrier_id:
|
|
392
|
+
carrier_name = carrier_id_to_name.get(load.carrier_id, '-')
|
|
393
|
+
attrs['carrier'] = carrier_name
|
|
394
|
+
else:
|
|
395
|
+
attrs['carrier'] = '-'
|
|
396
|
+
|
|
397
|
+
# Merge timeseries into attributes
|
|
398
|
+
attrs.update(timeseries)
|
|
399
|
+
|
|
400
|
+
network.add("Load", load.name, **attrs)
|
|
401
|
+
|
|
402
|
+
def _load_lines(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
|
|
403
|
+
bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
|
|
404
|
+
"""Load line components."""
|
|
405
|
+
lines = list_components_by_type(conn, network_id, 'LINE')
|
|
406
|
+
line_ids = [line.id for line in lines]
|
|
407
|
+
|
|
408
|
+
if self.batch_loader:
|
|
409
|
+
line_attributes = self.batch_loader.batch_load_component_attributes(conn, line_ids, scenario_id)
|
|
410
|
+
line_timeseries = self.batch_loader.batch_load_component_timeseries(conn, line_ids, scenario_id)
|
|
411
|
+
else:
|
|
412
|
+
line_attributes = self._load_component_attributes_individually(conn, line_ids, scenario_id)
|
|
413
|
+
line_timeseries = {}
|
|
414
|
+
|
|
415
|
+
for line in lines:
|
|
416
|
+
attrs = line_attributes.get(line.id, {})
|
|
417
|
+
timeseries = line_timeseries.get(line.id, {})
|
|
418
|
+
|
|
419
|
+
if line.bus0_id and line.bus1_id:
|
|
420
|
+
bus0_name = bus_id_to_name.get(line.bus0_id, f"bus_{line.bus0_id}")
|
|
421
|
+
bus1_name = bus_id_to_name.get(line.bus1_id, f"bus_{line.bus1_id}")
|
|
422
|
+
attrs['bus0'] = bus0_name
|
|
423
|
+
attrs['bus1'] = bus1_name
|
|
424
|
+
|
|
425
|
+
if line.carrier_id:
|
|
426
|
+
carrier_name = carrier_id_to_name.get(line.carrier_id, 'AC')
|
|
427
|
+
attrs['carrier'] = carrier_name
|
|
428
|
+
else:
|
|
429
|
+
attrs['carrier'] = 'AC'
|
|
430
|
+
|
|
431
|
+
# Merge timeseries into attributes
|
|
432
|
+
attrs.update(timeseries)
|
|
433
|
+
|
|
434
|
+
network.add("Line", line.name, **attrs)
|
|
435
|
+
|
|
436
|
+
def _load_links(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
|
|
437
|
+
bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
|
|
438
|
+
"""Load link components."""
|
|
439
|
+
links = list_components_by_type(conn, network_id, 'LINK')
|
|
440
|
+
link_ids = [link.id for link in links]
|
|
441
|
+
|
|
442
|
+
if self.batch_loader:
|
|
443
|
+
link_attributes = self.batch_loader.batch_load_component_attributes(conn, link_ids, scenario_id)
|
|
444
|
+
link_timeseries = self.batch_loader.batch_load_component_timeseries(conn, link_ids, scenario_id)
|
|
445
|
+
else:
|
|
446
|
+
link_attributes = self._load_component_attributes_individually(conn, link_ids, scenario_id)
|
|
447
|
+
link_timeseries = {}
|
|
448
|
+
|
|
449
|
+
for link in links:
|
|
450
|
+
attrs = link_attributes.get(link.id, {})
|
|
451
|
+
timeseries = link_timeseries.get(link.id, {})
|
|
452
|
+
|
|
453
|
+
if link.bus0_id and link.bus1_id:
|
|
454
|
+
bus0_name = bus_id_to_name.get(link.bus0_id, f"bus_{link.bus0_id}")
|
|
455
|
+
bus1_name = bus_id_to_name.get(link.bus1_id, f"bus_{link.bus1_id}")
|
|
456
|
+
attrs['bus0'] = bus0_name
|
|
457
|
+
attrs['bus1'] = bus1_name
|
|
458
|
+
|
|
459
|
+
if link.carrier_id:
|
|
460
|
+
carrier_name = carrier_id_to_name.get(link.carrier_id, 'DC')
|
|
461
|
+
attrs['carrier'] = carrier_name
|
|
462
|
+
else:
|
|
463
|
+
attrs['carrier'] = 'DC'
|
|
464
|
+
|
|
465
|
+
# Merge timeseries into attributes
|
|
466
|
+
attrs.update(timeseries)
|
|
467
|
+
|
|
468
|
+
network.add("Link", link.name, **attrs)
|
|
469
|
+
|
|
470
|
+
def _load_storage_units(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
|
|
471
|
+
bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
|
|
472
|
+
"""Load storage unit components."""
|
|
473
|
+
storage_units = list_components_by_type(conn, network_id, 'STORAGE_UNIT')
|
|
474
|
+
storage_ids = [storage.id for storage in storage_units]
|
|
475
|
+
|
|
476
|
+
if self.batch_loader:
|
|
477
|
+
storage_attributes = self.batch_loader.batch_load_component_attributes(conn, storage_ids, scenario_id)
|
|
478
|
+
storage_timeseries = self.batch_loader.batch_load_component_timeseries(conn, storage_ids, scenario_id)
|
|
479
|
+
else:
|
|
480
|
+
storage_attributes = self._load_component_attributes_individually(conn, storage_ids, scenario_id)
|
|
481
|
+
storage_timeseries = {}
|
|
482
|
+
|
|
483
|
+
for storage in storage_units:
|
|
484
|
+
attrs = storage_attributes.get(storage.id, {})
|
|
485
|
+
timeseries = storage_timeseries.get(storage.id, {})
|
|
486
|
+
|
|
487
|
+
if storage.bus_id:
|
|
488
|
+
bus_name = bus_id_to_name.get(storage.bus_id, f"bus_{storage.bus_id}")
|
|
489
|
+
attrs['bus'] = bus_name
|
|
490
|
+
|
|
491
|
+
if storage.carrier_id:
|
|
492
|
+
carrier_name = carrier_id_to_name.get(storage.carrier_id, '-')
|
|
493
|
+
attrs['carrier'] = carrier_name
|
|
494
|
+
else:
|
|
495
|
+
attrs['carrier'] = '-'
|
|
496
|
+
|
|
497
|
+
# Merge timeseries into attributes
|
|
498
|
+
attrs.update(timeseries)
|
|
499
|
+
|
|
500
|
+
network.add("StorageUnit", storage.name, **attrs)
|
|
501
|
+
|
|
502
|
+
def _load_stores(self, conn, network_id: int, network: 'pypsa.Network', scenario_id: Optional[int],
|
|
503
|
+
bus_id_to_name: Dict[int, str], carrier_id_to_name: Dict[int, str]):
|
|
504
|
+
"""Load store components."""
|
|
505
|
+
stores = list_components_by_type(conn, network_id, 'STORE')
|
|
506
|
+
store_ids = [store.id for store in stores]
|
|
507
|
+
|
|
508
|
+
if self.batch_loader:
|
|
509
|
+
store_attributes = self.batch_loader.batch_load_component_attributes(conn, store_ids, scenario_id)
|
|
510
|
+
store_timeseries = self.batch_loader.batch_load_component_timeseries(conn, store_ids, scenario_id)
|
|
511
|
+
else:
|
|
512
|
+
store_attributes = self._load_component_attributes_individually(conn, store_ids, scenario_id)
|
|
513
|
+
store_timeseries = {}
|
|
514
|
+
|
|
515
|
+
for store in stores:
|
|
516
|
+
attrs = store_attributes.get(store.id, {})
|
|
517
|
+
timeseries = store_timeseries.get(store.id, {})
|
|
518
|
+
|
|
519
|
+
if store.bus_id:
|
|
520
|
+
bus_name = bus_id_to_name.get(store.bus_id, f"bus_{store.bus_id}")
|
|
521
|
+
attrs['bus'] = bus_name
|
|
522
|
+
|
|
523
|
+
if store.carrier_id:
|
|
524
|
+
carrier_name = carrier_id_to_name.get(store.carrier_id, '-')
|
|
525
|
+
attrs['carrier'] = carrier_name
|
|
526
|
+
else:
|
|
527
|
+
attrs['carrier'] = '-'
|
|
528
|
+
|
|
529
|
+
# Merge timeseries into attributes
|
|
530
|
+
attrs.update(timeseries)
|
|
531
|
+
|
|
532
|
+
network.add("Store", store.name, **attrs)
|
|
533
|
+
|
|
534
|
+
def _set_snapshot_weightings(self, conn, network_id: int, network: 'pypsa.Network'):
|
|
535
|
+
"""Set snapshot weightings from time periods."""
|
|
536
|
+
try:
|
|
537
|
+
time_periods = get_network_time_periods(conn, network_id)
|
|
538
|
+
if time_periods and len(network.snapshots) > 0:
|
|
539
|
+
# Create weightings array
|
|
540
|
+
weightings = []
|
|
541
|
+
for tp in time_periods:
|
|
542
|
+
if tp.weight is not None:
|
|
543
|
+
weightings.append(tp.weight)
|
|
544
|
+
else:
|
|
545
|
+
# Calculate from time interval if weight not specified
|
|
546
|
+
network_config = get_network_config(conn, network_id)
|
|
547
|
+
time_interval = network_config.get('time_interval', '1H')
|
|
548
|
+
weight = self._parse_time_interval(time_interval)
|
|
549
|
+
weightings.append(weight if weight else 1.0)
|
|
550
|
+
|
|
551
|
+
if len(weightings) == len(network.snapshots):
|
|
552
|
+
# Set all three columns like the old code - critical for proper objective calculation
|
|
553
|
+
network.snapshot_weightings.loc[:, 'objective'] = weightings
|
|
554
|
+
network.snapshot_weightings.loc[:, 'generators'] = weightings
|
|
555
|
+
network.snapshot_weightings.loc[:, 'stores'] = weightings
|
|
556
|
+
logger.info(f"Set snapshot weightings for {len(weightings)} time periods (objective, generators, stores)")
|
|
557
|
+
else:
|
|
558
|
+
logger.warning(f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})")
|
|
559
|
+
except Exception as e:
|
|
560
|
+
logger.warning(f"Failed to set snapshot weightings: {e}")
|
|
561
|
+
|
|
562
|
+
def _parse_time_interval(self, time_interval: str) -> Optional[float]:
|
|
563
|
+
"""Parse time interval string to hours."""
|
|
564
|
+
if not time_interval:
|
|
565
|
+
return None
|
|
566
|
+
|
|
567
|
+
try:
|
|
568
|
+
# Handle pandas frequency strings
|
|
569
|
+
if time_interval.endswith('H'):
|
|
570
|
+
return float(time_interval[:-1])
|
|
571
|
+
elif time_interval.endswith('D'):
|
|
572
|
+
return float(time_interval[:-1]) * 24
|
|
573
|
+
elif time_interval.endswith('M'):
|
|
574
|
+
return float(time_interval[:-1]) / 60
|
|
575
|
+
elif time_interval.endswith('S'):
|
|
576
|
+
return float(time_interval[:-1]) / 3600
|
|
577
|
+
else:
|
|
578
|
+
# Try to parse as float (assume hours)
|
|
579
|
+
return float(time_interval)
|
|
580
|
+
except (ValueError, TypeError):
|
|
581
|
+
logger.warning(f"Could not parse time interval: {time_interval}")
|
|
582
|
+
return None
|
|
583
|
+
|
|
584
|
+
def _build_bus_id_to_name_map(self, conn, network_id: int) -> Dict[int, str]:
|
|
585
|
+
"""Build mapping from bus IDs to names."""
|
|
586
|
+
buses = list_components_by_type(conn, network_id, 'BUS')
|
|
587
|
+
return {bus.id: bus.name for bus in buses}
|
|
588
|
+
|
|
589
|
+
def _build_carrier_id_to_name_map(self, conn, network_id: int) -> Dict[int, str]:
|
|
590
|
+
"""Build mapping from carrier IDs to names."""
|
|
591
|
+
cursor = conn.execute("SELECT id, name FROM carriers WHERE network_id = ?", (network_id,))
|
|
592
|
+
return {row[0]: row[1] for row in cursor.fetchall()}
|
|
593
|
+
|
|
594
|
+
def _load_component_attributes_individually(self, conn, component_ids: list, scenario_id: Optional[int]) -> Dict[int, Dict[str, Any]]:
|
|
595
|
+
"""Fallback method to load component attributes individually."""
|
|
596
|
+
from pyconvexity.models import get_attribute, list_component_attributes
|
|
597
|
+
from pyconvexity.core.types import AttributeValue
|
|
598
|
+
from pyconvexity.models.network import get_network_time_periods
|
|
599
|
+
import pandas as pd
|
|
600
|
+
|
|
601
|
+
# Get network time periods for proper timestamp alignment
|
|
602
|
+
network_time_periods = None
|
|
603
|
+
if component_ids:
|
|
604
|
+
cursor = conn.execute("SELECT network_id FROM components WHERE id = ? LIMIT 1", (component_ids[0],))
|
|
605
|
+
result = cursor.fetchone()
|
|
606
|
+
if result:
|
|
607
|
+
network_id = result[0]
|
|
608
|
+
try:
|
|
609
|
+
network_time_periods = get_network_time_periods(conn, network_id)
|
|
610
|
+
except Exception as e:
|
|
611
|
+
logger.warning(f"Failed to load network time periods: {e}")
|
|
612
|
+
|
|
613
|
+
attributes = {}
|
|
614
|
+
for comp_id in component_ids:
|
|
615
|
+
try:
|
|
616
|
+
attr_names = list_component_attributes(conn, comp_id)
|
|
617
|
+
comp_attrs = {}
|
|
618
|
+
for attr_name in attr_names:
|
|
619
|
+
try:
|
|
620
|
+
attr_value = get_attribute(conn, comp_id, attr_name, scenario_id)
|
|
621
|
+
if attr_value is not None:
|
|
622
|
+
# Handle different attribute value types
|
|
623
|
+
if hasattr(attr_value, 'static_value') and attr_value.static_value is not None:
|
|
624
|
+
# Static value
|
|
625
|
+
comp_attrs[attr_name] = attr_value.static_value.value()
|
|
626
|
+
elif hasattr(attr_value, 'timeseries_value') and attr_value.timeseries_value is not None:
|
|
627
|
+
# Timeseries value - convert to pandas Series with proper timestamps
|
|
628
|
+
timeseries_points = attr_value.timeseries_value
|
|
629
|
+
if timeseries_points:
|
|
630
|
+
# Sort by period_index to ensure correct order
|
|
631
|
+
timeseries_points.sort(key=lambda x: x.period_index)
|
|
632
|
+
values = [point.value for point in timeseries_points]
|
|
633
|
+
|
|
634
|
+
# Create proper timestamps for PyPSA alignment
|
|
635
|
+
if network_time_periods:
|
|
636
|
+
timestamps = []
|
|
637
|
+
for point in timeseries_points:
|
|
638
|
+
if point.period_index < len(network_time_periods):
|
|
639
|
+
tp = network_time_periods[point.period_index]
|
|
640
|
+
timestamps.append(pd.Timestamp(tp.formatted_time))
|
|
641
|
+
else:
|
|
642
|
+
logger.warning(f"Period index {point.period_index} out of range for network time periods")
|
|
643
|
+
timestamps.append(pd.Timestamp.now()) # Fallback
|
|
644
|
+
comp_attrs[attr_name] = pd.Series(values, index=timestamps)
|
|
645
|
+
else:
|
|
646
|
+
# Fallback: use period_index as index
|
|
647
|
+
period_indices = [point.period_index for point in timeseries_points]
|
|
648
|
+
comp_attrs[attr_name] = pd.Series(values, index=period_indices)
|
|
649
|
+
except Exception as e:
|
|
650
|
+
logger.debug(f"Failed to load attribute {attr_name} for component {comp_id}: {e}")
|
|
651
|
+
attributes[comp_id] = comp_attrs
|
|
652
|
+
except Exception as e:
|
|
653
|
+
logger.warning(f"Failed to load attributes for component {comp_id}: {e}")
|
|
654
|
+
attributes[comp_id] = {}
|
|
655
|
+
|
|
656
|
+
return attributes
|