pyconvexity 0.4.0__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyconvexity/__init__.py +87 -46
- pyconvexity/_version.py +1 -1
- pyconvexity/core/__init__.py +3 -5
- pyconvexity/core/database.py +111 -103
- pyconvexity/core/errors.py +16 -10
- pyconvexity/core/types.py +61 -54
- pyconvexity/data/__init__.py +0 -1
- pyconvexity/data/loaders/cache.py +65 -64
- pyconvexity/data/schema/01_core_schema.sql +134 -234
- pyconvexity/data/schema/02_data_metadata.sql +38 -168
- pyconvexity/data/schema/03_validation_data.sql +327 -264
- pyconvexity/data/sources/gem.py +169 -139
- pyconvexity/io/__init__.py +4 -10
- pyconvexity/io/excel_exporter.py +694 -480
- pyconvexity/io/excel_importer.py +817 -545
- pyconvexity/io/netcdf_exporter.py +66 -61
- pyconvexity/io/netcdf_importer.py +850 -619
- pyconvexity/models/__init__.py +109 -59
- pyconvexity/models/attributes.py +197 -178
- pyconvexity/models/carriers.py +70 -67
- pyconvexity/models/components.py +260 -236
- pyconvexity/models/network.py +202 -284
- pyconvexity/models/results.py +65 -55
- pyconvexity/models/scenarios.py +58 -88
- pyconvexity/solvers/__init__.py +5 -5
- pyconvexity/solvers/pypsa/__init__.py +3 -3
- pyconvexity/solvers/pypsa/api.py +150 -134
- pyconvexity/solvers/pypsa/batch_loader.py +165 -162
- pyconvexity/solvers/pypsa/builder.py +390 -291
- pyconvexity/solvers/pypsa/constraints.py +184 -162
- pyconvexity/solvers/pypsa/solver.py +968 -663
- pyconvexity/solvers/pypsa/storage.py +1377 -671
- pyconvexity/timeseries.py +63 -60
- pyconvexity/validation/__init__.py +14 -6
- pyconvexity/validation/rules.py +95 -84
- pyconvexity-0.4.1.dist-info/METADATA +46 -0
- pyconvexity-0.4.1.dist-info/RECORD +42 -0
- pyconvexity/data/schema/04_scenario_schema.sql +0 -122
- pyconvexity/data/schema/migrate_add_geometries.sql +0 -73
- pyconvexity-0.4.0.dist-info/METADATA +0 -138
- pyconvexity-0.4.0.dist-info/RECORD +0 -44
- {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/WHEEL +0 -0
- {pyconvexity-0.4.0.dist-info → pyconvexity-0.4.1.dist-info}/top_level.txt +0 -0
|
@@ -14,41 +14,50 @@ import math
|
|
|
14
14
|
# Import functions directly from pyconvexity
|
|
15
15
|
from pyconvexity.core.database import open_connection, create_database_with_schema
|
|
16
16
|
from pyconvexity.core.types import (
|
|
17
|
-
StaticValue,
|
|
17
|
+
StaticValue,
|
|
18
|
+
CreateNetworkRequest,
|
|
19
|
+
CreateComponentRequest,
|
|
18
20
|
)
|
|
19
21
|
from pyconvexity.core.errors import PyConvexityError as DbError, ValidationError
|
|
20
22
|
from pyconvexity.models import (
|
|
21
|
-
create_network,
|
|
22
|
-
|
|
23
|
-
|
|
23
|
+
create_network,
|
|
24
|
+
create_carrier,
|
|
25
|
+
insert_component,
|
|
26
|
+
set_static_attribute,
|
|
27
|
+
get_bus_name_to_id_map,
|
|
28
|
+
set_timeseries_attribute,
|
|
29
|
+
get_component_type,
|
|
30
|
+
get_attribute,
|
|
31
|
+
get_network_time_periods,
|
|
24
32
|
)
|
|
25
33
|
from pyconvexity.validation import get_validation_rule
|
|
26
34
|
from pyconvexity.timeseries import set_timeseries
|
|
27
35
|
|
|
28
36
|
logger = logging.getLogger(__name__)
|
|
29
37
|
|
|
38
|
+
|
|
30
39
|
class NetCDFModelImporter:
|
|
31
40
|
"""Import PyPSA NetCDF files into PyConvexity database format"""
|
|
32
|
-
|
|
41
|
+
|
|
33
42
|
def __init__(self):
|
|
34
43
|
self.logger = logging.getLogger(__name__)
|
|
35
44
|
# Set random seed for reproducible coordinate generation
|
|
36
45
|
random.seed(42)
|
|
37
46
|
np.random.seed(42)
|
|
38
47
|
self._used_names = set() # Global registry of all used names
|
|
39
|
-
|
|
48
|
+
|
|
40
49
|
def import_netcdf_to_database(
|
|
41
|
-
self,
|
|
42
|
-
netcdf_path: str,
|
|
43
|
-
db_path: str,
|
|
50
|
+
self,
|
|
51
|
+
netcdf_path: str,
|
|
52
|
+
db_path: str,
|
|
44
53
|
network_name: str,
|
|
45
54
|
network_description: Optional[str] = None,
|
|
46
55
|
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
47
|
-
strict_validation: bool = False
|
|
56
|
+
strict_validation: bool = False,
|
|
48
57
|
) -> Dict[str, Any]:
|
|
49
58
|
"""
|
|
50
59
|
Import a PyPSA NetCDF file into a new database.
|
|
51
|
-
|
|
60
|
+
|
|
52
61
|
Args:
|
|
53
62
|
netcdf_path: Path to the PyPSA NetCDF file
|
|
54
63
|
db_path: Path where to create the database
|
|
@@ -58,26 +67,29 @@ class NetCDFModelImporter:
|
|
|
58
67
|
strict_validation: Whether to skip undefined attributes rather than failing completely.
|
|
59
68
|
If True, will fail on any attribute not defined in the database schema.
|
|
60
69
|
If False (default), will skip undefined attributes with warnings.
|
|
61
|
-
|
|
70
|
+
|
|
62
71
|
Returns:
|
|
63
72
|
Dictionary with import results and statistics
|
|
64
73
|
"""
|
|
65
74
|
try:
|
|
66
75
|
if progress_callback:
|
|
67
76
|
progress_callback(0, "Starting NetCDF import...")
|
|
68
|
-
|
|
77
|
+
|
|
69
78
|
# Import PyPSA
|
|
70
79
|
pypsa = self._import_pypsa()
|
|
71
|
-
|
|
80
|
+
|
|
72
81
|
if progress_callback:
|
|
73
82
|
progress_callback(5, "Loading PyPSA network from NetCDF...")
|
|
74
|
-
|
|
83
|
+
|
|
75
84
|
# Load the PyPSA network
|
|
76
85
|
network = pypsa.Network(netcdf_path)
|
|
77
|
-
|
|
86
|
+
|
|
78
87
|
if progress_callback:
|
|
79
|
-
progress_callback(
|
|
80
|
-
|
|
88
|
+
progress_callback(
|
|
89
|
+
15,
|
|
90
|
+
f"Loaded network: {len(network.buses)} buses, {len(network.generators)} generators",
|
|
91
|
+
)
|
|
92
|
+
|
|
81
93
|
# Use the shared import logic
|
|
82
94
|
return self._import_network_to_database(
|
|
83
95
|
network=network,
|
|
@@ -87,9 +99,9 @@ class NetCDFModelImporter:
|
|
|
87
99
|
progress_callback=progress_callback,
|
|
88
100
|
strict_validation=strict_validation,
|
|
89
101
|
import_source="NetCDF",
|
|
90
|
-
netcdf_path=netcdf_path
|
|
102
|
+
netcdf_path=netcdf_path,
|
|
91
103
|
)
|
|
92
|
-
|
|
104
|
+
|
|
93
105
|
except Exception as e:
|
|
94
106
|
self.logger.error(f"Error importing NetCDF: {e}", exc_info=True)
|
|
95
107
|
if progress_callback:
|
|
@@ -97,17 +109,17 @@ class NetCDFModelImporter:
|
|
|
97
109
|
raise
|
|
98
110
|
|
|
99
111
|
def import_csv_to_database(
|
|
100
|
-
self,
|
|
101
|
-
csv_directory: str,
|
|
102
|
-
db_path: str,
|
|
112
|
+
self,
|
|
113
|
+
csv_directory: str,
|
|
114
|
+
db_path: str,
|
|
103
115
|
network_name: str,
|
|
104
116
|
network_description: Optional[str] = None,
|
|
105
117
|
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
106
|
-
strict_validation: bool = False
|
|
118
|
+
strict_validation: bool = False,
|
|
107
119
|
) -> Dict[str, Any]:
|
|
108
120
|
"""
|
|
109
121
|
Import a PyPSA network from CSV files into a new database.
|
|
110
|
-
|
|
122
|
+
|
|
111
123
|
Args:
|
|
112
124
|
csv_directory: Path to the directory containing PyPSA CSV files
|
|
113
125
|
db_path: Path where to create the database
|
|
@@ -115,29 +127,29 @@ class NetCDFModelImporter:
|
|
|
115
127
|
network_description: Optional description
|
|
116
128
|
progress_callback: Optional callback for progress updates (progress: int, message: str)
|
|
117
129
|
strict_validation: Whether to skip undefined attributes rather than failing
|
|
118
|
-
|
|
130
|
+
|
|
119
131
|
Returns:
|
|
120
132
|
Dictionary with import results and statistics
|
|
121
133
|
"""
|
|
122
134
|
try:
|
|
123
135
|
if progress_callback:
|
|
124
136
|
progress_callback(0, "Starting PyPSA CSV import...")
|
|
125
|
-
|
|
137
|
+
|
|
126
138
|
# Import PyPSA
|
|
127
139
|
pypsa = self._import_pypsa()
|
|
128
|
-
|
|
140
|
+
|
|
129
141
|
if progress_callback:
|
|
130
142
|
progress_callback(5, "Validating CSV files...")
|
|
131
|
-
|
|
143
|
+
|
|
132
144
|
# Validate CSV directory and files before attempting import
|
|
133
145
|
self._validate_csv_directory(csv_directory)
|
|
134
|
-
|
|
146
|
+
|
|
135
147
|
if progress_callback:
|
|
136
148
|
progress_callback(10, "Loading PyPSA network from CSV files...")
|
|
137
|
-
|
|
149
|
+
|
|
138
150
|
# Load the PyPSA network from CSV directory
|
|
139
151
|
network = pypsa.Network()
|
|
140
|
-
|
|
152
|
+
|
|
141
153
|
try:
|
|
142
154
|
network.import_from_csv_folder(csv_directory)
|
|
143
155
|
except Exception as e:
|
|
@@ -151,13 +163,16 @@ class NetCDFModelImporter:
|
|
|
151
163
|
elif "KeyError" in str(e):
|
|
152
164
|
error_msg += f"\n\nThis indicates a required column is missing from one of your CSV files. "
|
|
153
165
|
error_msg += "Please ensure your CSV files follow the PyPSA format specification."
|
|
154
|
-
|
|
166
|
+
|
|
155
167
|
self.logger.error(error_msg)
|
|
156
168
|
raise ValueError(error_msg)
|
|
157
|
-
|
|
169
|
+
|
|
158
170
|
if progress_callback:
|
|
159
|
-
progress_callback(
|
|
160
|
-
|
|
171
|
+
progress_callback(
|
|
172
|
+
20,
|
|
173
|
+
f"Loaded network: {len(network.buses)} buses, {len(network.generators)} generators",
|
|
174
|
+
)
|
|
175
|
+
|
|
161
176
|
# Use the shared import logic
|
|
162
177
|
return self._import_network_to_database(
|
|
163
178
|
network=network,
|
|
@@ -166,9 +181,9 @@ class NetCDFModelImporter:
|
|
|
166
181
|
network_description=network_description,
|
|
167
182
|
progress_callback=progress_callback,
|
|
168
183
|
strict_validation=strict_validation,
|
|
169
|
-
import_source="CSV"
|
|
184
|
+
import_source="CSV",
|
|
170
185
|
)
|
|
171
|
-
|
|
186
|
+
|
|
172
187
|
except Exception as e:
|
|
173
188
|
self.logger.error(f"Error importing PyPSA CSV: {e}", exc_info=True)
|
|
174
189
|
if progress_callback:
|
|
@@ -179,6 +194,7 @@ class NetCDFModelImporter:
|
|
|
179
194
|
"""Import PyPSA with standard error handling."""
|
|
180
195
|
try:
|
|
181
196
|
import pypsa
|
|
197
|
+
|
|
182
198
|
return pypsa
|
|
183
199
|
except ImportError as e:
|
|
184
200
|
self.logger.error(f"Failed to import PyPSA: {e}", exc_info=True)
|
|
@@ -187,45 +203,59 @@ class NetCDFModelImporter:
|
|
|
187
203
|
"Please ensure it is installed correctly in the environment."
|
|
188
204
|
) from e
|
|
189
205
|
except Exception as e:
|
|
190
|
-
self.logger.error(
|
|
206
|
+
self.logger.error(
|
|
207
|
+
f"An unexpected error occurred during PyPSA import: {e}", exc_info=True
|
|
208
|
+
)
|
|
191
209
|
raise
|
|
192
210
|
|
|
193
211
|
def _validate_csv_directory(self, csv_directory: str) -> None:
|
|
194
212
|
"""Validate that the CSV directory contains valid PyPSA CSV files"""
|
|
195
213
|
import os
|
|
196
214
|
import pandas as pd
|
|
197
|
-
|
|
215
|
+
|
|
198
216
|
csv_path = Path(csv_directory)
|
|
199
217
|
if not csv_path.exists():
|
|
200
218
|
raise ValueError(f"CSV directory does not exist: {csv_directory}")
|
|
201
|
-
|
|
219
|
+
|
|
202
220
|
if not csv_path.is_dir():
|
|
203
221
|
raise ValueError(f"Path is not a directory: {csv_directory}")
|
|
204
|
-
|
|
222
|
+
|
|
205
223
|
# Find CSV files
|
|
206
224
|
csv_files = list(csv_path.glob("*.csv"))
|
|
207
225
|
if not csv_files:
|
|
208
226
|
raise ValueError(f"No CSV files found in directory: {csv_directory}")
|
|
209
|
-
|
|
227
|
+
|
|
210
228
|
# Check each CSV file for basic validity
|
|
211
|
-
component_files = [
|
|
212
|
-
|
|
213
|
-
|
|
229
|
+
component_files = [
|
|
230
|
+
"buses.csv",
|
|
231
|
+
"generators.csv",
|
|
232
|
+
"loads.csv",
|
|
233
|
+
"lines.csv",
|
|
234
|
+
"links.csv",
|
|
235
|
+
"storage_units.csv",
|
|
236
|
+
"stores.csv",
|
|
237
|
+
]
|
|
238
|
+
required_files = ["buses.csv"] # At minimum, we need buses
|
|
239
|
+
|
|
214
240
|
# Check for required files
|
|
215
241
|
existing_files = [f.name for f in csv_files]
|
|
216
242
|
missing_required = [f for f in required_files if f not in existing_files]
|
|
217
243
|
if missing_required:
|
|
218
244
|
raise ValueError(f"Missing required CSV files: {missing_required}")
|
|
219
|
-
|
|
245
|
+
|
|
220
246
|
# Validate each component CSV file that exists
|
|
221
247
|
for csv_file in csv_files:
|
|
222
248
|
if csv_file.name in component_files:
|
|
223
249
|
try:
|
|
224
250
|
df = pd.read_csv(csv_file, nrows=0) # Just read headers
|
|
225
|
-
if
|
|
226
|
-
raise ValueError(
|
|
251
|
+
if "name" not in df.columns:
|
|
252
|
+
raise ValueError(
|
|
253
|
+
f"CSV file '{csv_file.name}' is missing required 'name' column. Found columns: {list(df.columns)}"
|
|
254
|
+
)
|
|
227
255
|
except Exception as e:
|
|
228
|
-
raise ValueError(
|
|
256
|
+
raise ValueError(
|
|
257
|
+
f"Error reading CSV file '{csv_file.name}': {str(e)}"
|
|
258
|
+
)
|
|
229
259
|
|
|
230
260
|
def _import_network_to_database(
|
|
231
261
|
self,
|
|
@@ -236,7 +266,7 @@ class NetCDFModelImporter:
|
|
|
236
266
|
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
237
267
|
strict_validation: bool = False,
|
|
238
268
|
import_source: str = "PyPSA",
|
|
239
|
-
netcdf_path: Optional[str] = None
|
|
269
|
+
netcdf_path: Optional[str] = None,
|
|
240
270
|
) -> Dict[str, Any]:
|
|
241
271
|
"""
|
|
242
272
|
Shared logic to import a PyPSA network object into a database.
|
|
@@ -245,106 +275,112 @@ class NetCDFModelImporter:
|
|
|
245
275
|
try:
|
|
246
276
|
if progress_callback:
|
|
247
277
|
progress_callback(0, "Starting network import...")
|
|
248
|
-
|
|
278
|
+
|
|
249
279
|
# Create the database with schema using atomic utility
|
|
250
280
|
create_database_with_schema(db_path)
|
|
251
|
-
|
|
281
|
+
|
|
252
282
|
if progress_callback:
|
|
253
283
|
progress_callback(5, "Database schema created")
|
|
254
|
-
|
|
284
|
+
|
|
255
285
|
# Connect to database
|
|
256
286
|
conn = open_connection(db_path)
|
|
257
|
-
|
|
287
|
+
|
|
258
288
|
try:
|
|
259
289
|
# Load companion location CSV if available (for NetCDF imports only)
|
|
260
290
|
location_map = None
|
|
261
291
|
if import_source == "NetCDF" and netcdf_path:
|
|
262
292
|
location_map = self._detect_and_load_location_csv(netcdf_path)
|
|
263
|
-
|
|
293
|
+
|
|
264
294
|
# Create the network record
|
|
265
|
-
|
|
295
|
+
self._create_network_record(
|
|
266
296
|
conn, network, network_name, network_description
|
|
267
297
|
)
|
|
268
|
-
|
|
298
|
+
|
|
269
299
|
if progress_callback:
|
|
270
|
-
progress_callback(10,
|
|
300
|
+
progress_callback(10, "Created network record")
|
|
271
301
|
|
|
272
|
-
#
|
|
273
|
-
|
|
274
|
-
scenarios = cursor.fetchall()
|
|
275
|
-
if scenarios:
|
|
276
|
-
main_scenario = next((s for s in scenarios if s[2] == True), None) # is_master = True
|
|
277
|
-
if not main_scenario:
|
|
278
|
-
self.logger.warning(f"No master scenario found in scenarios: {scenarios}")
|
|
279
|
-
else:
|
|
280
|
-
self.logger.error(f"No scenarios found after network creation - database trigger may have failed")
|
|
302
|
+
# Note: In the new schema, the base network uses scenario_id = NULL
|
|
303
|
+
# No master scenario record is needed in the scenarios table
|
|
281
304
|
|
|
282
305
|
# Create network time periods from PyPSA snapshots
|
|
283
|
-
self._create_network_time_periods(conn, network
|
|
284
|
-
|
|
306
|
+
self._create_network_time_periods(conn, network)
|
|
307
|
+
|
|
285
308
|
if progress_callback:
|
|
286
309
|
progress_callback(15, f"Created network time periods")
|
|
287
|
-
|
|
310
|
+
|
|
288
311
|
# Import carriers
|
|
289
|
-
carriers_count = self._import_carriers(conn, network
|
|
290
|
-
|
|
312
|
+
carriers_count = self._import_carriers(conn, network)
|
|
313
|
+
|
|
291
314
|
if progress_callback:
|
|
292
315
|
progress_callback(20, f"Imported {carriers_count} carriers")
|
|
293
|
-
|
|
316
|
+
|
|
294
317
|
# Import buses
|
|
295
|
-
buses_count = self._import_buses(conn, network,
|
|
296
|
-
|
|
318
|
+
buses_count = self._import_buses(conn, network, strict_validation)
|
|
319
|
+
|
|
297
320
|
if progress_callback:
|
|
298
321
|
progress_callback(25, f"Imported {buses_count} buses")
|
|
299
|
-
|
|
322
|
+
|
|
300
323
|
# Calculate scatter radius for non-bus components based on bus separation
|
|
301
|
-
bus_coordinates = self._get_bus_coordinates(conn
|
|
324
|
+
bus_coordinates = self._get_bus_coordinates(conn)
|
|
302
325
|
scatter_radius = self._calculate_bus_separation_radius(bus_coordinates)
|
|
303
|
-
|
|
326
|
+
|
|
304
327
|
# Import generators
|
|
305
|
-
generators_count = self._import_generators(
|
|
306
|
-
|
|
328
|
+
generators_count = self._import_generators(
|
|
329
|
+
conn, network, strict_validation, scatter_radius, location_map
|
|
330
|
+
)
|
|
331
|
+
|
|
307
332
|
if progress_callback:
|
|
308
333
|
progress_callback(30, f"Imported {generators_count} generators")
|
|
309
|
-
|
|
334
|
+
|
|
310
335
|
# Import loads
|
|
311
|
-
loads_count = self._import_loads(
|
|
312
|
-
|
|
336
|
+
loads_count = self._import_loads(
|
|
337
|
+
conn, network, strict_validation, scatter_radius, location_map
|
|
338
|
+
)
|
|
339
|
+
|
|
313
340
|
if progress_callback:
|
|
314
341
|
progress_callback(35, f"Imported {loads_count} loads")
|
|
315
|
-
|
|
342
|
+
|
|
316
343
|
# Import lines
|
|
317
|
-
lines_count = self._import_lines(
|
|
318
|
-
|
|
344
|
+
lines_count = self._import_lines(
|
|
345
|
+
conn, network, strict_validation, location_map
|
|
346
|
+
)
|
|
347
|
+
|
|
319
348
|
if progress_callback:
|
|
320
349
|
progress_callback(40, f"Imported {lines_count} lines")
|
|
321
|
-
|
|
350
|
+
|
|
322
351
|
# Import links
|
|
323
|
-
links_count = self._import_links(
|
|
324
|
-
|
|
352
|
+
links_count = self._import_links(
|
|
353
|
+
conn, network, strict_validation, location_map
|
|
354
|
+
)
|
|
355
|
+
|
|
325
356
|
if progress_callback:
|
|
326
357
|
progress_callback(45, f"Imported {links_count} links")
|
|
327
|
-
|
|
358
|
+
|
|
328
359
|
# Import storage units
|
|
329
|
-
storage_units_count = self._import_storage_units(
|
|
330
|
-
|
|
360
|
+
storage_units_count = self._import_storage_units(
|
|
361
|
+
conn, network, strict_validation, scatter_radius, location_map
|
|
362
|
+
)
|
|
363
|
+
|
|
331
364
|
if progress_callback:
|
|
332
|
-
progress_callback(
|
|
333
|
-
|
|
365
|
+
progress_callback(
|
|
366
|
+
50, f"Imported {storage_units_count} storage units"
|
|
367
|
+
)
|
|
368
|
+
|
|
334
369
|
# Import stores
|
|
335
|
-
stores_count = self._import_stores(
|
|
336
|
-
|
|
370
|
+
stores_count = self._import_stores(
|
|
371
|
+
conn, network, strict_validation, scatter_radius, location_map
|
|
372
|
+
)
|
|
373
|
+
|
|
337
374
|
if progress_callback:
|
|
338
375
|
progress_callback(55, f"Imported {stores_count} stores")
|
|
339
|
-
|
|
376
|
+
|
|
340
377
|
conn.commit()
|
|
341
|
-
|
|
378
|
+
|
|
342
379
|
if progress_callback:
|
|
343
380
|
progress_callback(100, "Import completed successfully")
|
|
344
|
-
|
|
381
|
+
|
|
345
382
|
# Collect final statistics
|
|
346
383
|
stats = {
|
|
347
|
-
"network_id": network_id,
|
|
348
384
|
"network_name": network_name,
|
|
349
385
|
"carriers": carriers_count,
|
|
350
386
|
"buses": buses_count,
|
|
@@ -354,21 +390,30 @@ class NetCDFModelImporter:
|
|
|
354
390
|
"links": links_count,
|
|
355
391
|
"storage_units": storage_units_count,
|
|
356
392
|
"stores": stores_count,
|
|
357
|
-
"total_components": (
|
|
358
|
-
|
|
359
|
-
|
|
393
|
+
"total_components": (
|
|
394
|
+
buses_count
|
|
395
|
+
+ generators_count
|
|
396
|
+
+ loads_count
|
|
397
|
+
+ lines_count
|
|
398
|
+
+ links_count
|
|
399
|
+
+ storage_units_count
|
|
400
|
+
+ stores_count
|
|
401
|
+
),
|
|
402
|
+
"snapshots": (
|
|
403
|
+
len(network.snapshots) if hasattr(network, "snapshots") else 0
|
|
404
|
+
),
|
|
360
405
|
}
|
|
361
|
-
|
|
406
|
+
|
|
362
407
|
return {
|
|
363
408
|
"success": True,
|
|
364
409
|
"message": f"Network imported successfully from {import_source}",
|
|
365
410
|
"db_path": db_path,
|
|
366
|
-
"stats": stats
|
|
411
|
+
"stats": stats,
|
|
367
412
|
}
|
|
368
|
-
|
|
413
|
+
|
|
369
414
|
finally:
|
|
370
415
|
conn.close()
|
|
371
|
-
|
|
416
|
+
|
|
372
417
|
except Exception as e:
|
|
373
418
|
self.logger.error(f"Error importing network: {e}", exc_info=True)
|
|
374
419
|
if progress_callback:
|
|
@@ -378,25 +423,27 @@ class NetCDFModelImporter:
|
|
|
378
423
|
# Helper methods for the import process
|
|
379
424
|
# Note: These are simplified versions of the methods from the original netcdf_importer.py
|
|
380
425
|
# The full implementation would include all the detailed import logic for each component type
|
|
381
|
-
|
|
426
|
+
|
|
382
427
|
def _extract_datetime_snapshots(self, network) -> pd.DatetimeIndex:
|
|
383
428
|
"""Extract datetime snapshots from a PyPSA network"""
|
|
384
|
-
if not hasattr(network,
|
|
429
|
+
if not hasattr(network, "snapshots") or len(network.snapshots) == 0:
|
|
385
430
|
self.logger.warning("No snapshots found in PyPSA network")
|
|
386
431
|
return pd.DatetimeIndex([])
|
|
387
|
-
|
|
432
|
+
|
|
388
433
|
snapshots = network.snapshots
|
|
389
|
-
|
|
434
|
+
|
|
390
435
|
try:
|
|
391
436
|
# Try direct conversion first (works for simple DatetimeIndex)
|
|
392
437
|
return pd.to_datetime(snapshots)
|
|
393
438
|
except (TypeError, ValueError) as e:
|
|
394
439
|
# Handle MultiIndex case
|
|
395
|
-
if hasattr(snapshots,
|
|
440
|
+
if hasattr(snapshots, "nlevels") and snapshots.nlevels > 1:
|
|
396
441
|
# Try to use the timesteps attribute if available (common in multi-period networks)
|
|
397
|
-
if hasattr(network,
|
|
442
|
+
if hasattr(network, "timesteps") and isinstance(
|
|
443
|
+
network.timesteps, pd.DatetimeIndex
|
|
444
|
+
):
|
|
398
445
|
return network.timesteps
|
|
399
|
-
|
|
446
|
+
|
|
400
447
|
# Try to extract datetime from the last level of the MultiIndex
|
|
401
448
|
try:
|
|
402
449
|
# Get the last level (usually the timestep level)
|
|
@@ -404,337 +451,421 @@ class NetCDFModelImporter:
|
|
|
404
451
|
datetime_snapshots = pd.to_datetime(last_level)
|
|
405
452
|
return datetime_snapshots
|
|
406
453
|
except Exception as multi_e:
|
|
407
|
-
self.logger.warning(
|
|
408
|
-
|
|
454
|
+
self.logger.warning(
|
|
455
|
+
f"Failed to extract datetime from MultiIndex: {multi_e}"
|
|
456
|
+
)
|
|
457
|
+
|
|
409
458
|
# Final fallback: create a default hourly range
|
|
410
|
-
self.logger.warning(
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
459
|
+
self.logger.warning(
|
|
460
|
+
"Could not extract datetime snapshots, creating default hourly range"
|
|
461
|
+
)
|
|
462
|
+
default_start = pd.Timestamp("2024-01-01 00:00:00")
|
|
463
|
+
default_end = pd.Timestamp("2024-01-01 23:59:59")
|
|
464
|
+
return pd.date_range(start=default_start, end=default_end, freq="H")
|
|
414
465
|
|
|
415
466
|
def _create_network_record(
|
|
416
|
-
self,
|
|
417
|
-
conn,
|
|
418
|
-
network,
|
|
467
|
+
self,
|
|
468
|
+
conn,
|
|
469
|
+
network,
|
|
419
470
|
network_name: str,
|
|
420
|
-
network_description: Optional[str] = None
|
|
421
|
-
) ->
|
|
471
|
+
network_description: Optional[str] = None,
|
|
472
|
+
) -> None:
|
|
422
473
|
"""Create the network record and return network ID"""
|
|
423
|
-
|
|
474
|
+
|
|
424
475
|
# Extract time information from PyPSA network using our robust helper
|
|
425
476
|
snapshots = self._extract_datetime_snapshots(network)
|
|
426
|
-
|
|
477
|
+
|
|
427
478
|
if len(snapshots) > 0:
|
|
428
|
-
time_start = snapshots.min().strftime(
|
|
429
|
-
time_end = snapshots.max().strftime(
|
|
430
|
-
|
|
479
|
+
time_start = snapshots.min().strftime("%Y-%m-%d %H:%M:%S")
|
|
480
|
+
time_end = snapshots.max().strftime("%Y-%m-%d %H:%M:%S")
|
|
481
|
+
|
|
431
482
|
# Try to infer time interval
|
|
432
483
|
if len(snapshots) > 1:
|
|
433
484
|
freq = pd.infer_freq(snapshots)
|
|
434
|
-
time_interval = freq or
|
|
485
|
+
time_interval = freq or "H" # Default to hourly if can't infer
|
|
435
486
|
else:
|
|
436
|
-
time_interval =
|
|
487
|
+
time_interval = "H"
|
|
437
488
|
else:
|
|
438
489
|
# Default time range if no snapshots
|
|
439
|
-
time_start =
|
|
440
|
-
time_end =
|
|
441
|
-
time_interval =
|
|
442
|
-
|
|
443
|
-
description =
|
|
444
|
-
|
|
490
|
+
time_start = "2024-01-01 00:00:00"
|
|
491
|
+
time_end = "2024-01-01 23:59:59"
|
|
492
|
+
time_interval = "H"
|
|
493
|
+
|
|
494
|
+
description = (
|
|
495
|
+
network_description
|
|
496
|
+
or f"Imported from PyPSA NetCDF on {pd.Timestamp.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
|
497
|
+
)
|
|
498
|
+
|
|
445
499
|
request = CreateNetworkRequest(
|
|
446
500
|
name=network_name,
|
|
447
501
|
description=description,
|
|
448
502
|
time_resolution=time_interval,
|
|
449
503
|
start_time=time_start,
|
|
450
|
-
end_time=time_end
|
|
504
|
+
end_time=time_end,
|
|
451
505
|
)
|
|
452
|
-
|
|
506
|
+
create_network(conn, request) # Single network per database
|
|
453
507
|
|
|
454
|
-
def _create_network_time_periods(self, conn, network
|
|
455
|
-
"""Create network time periods from PyPSA snapshots using optimized approach"""
|
|
508
|
+
def _create_network_time_periods(self, conn, network) -> None:
|
|
509
|
+
"""Create network time periods from PyPSA snapshots using optimized approach (single network per database)"""
|
|
456
510
|
# Use our robust helper to extract datetime snapshots
|
|
457
511
|
snapshots = self._extract_datetime_snapshots(network)
|
|
458
|
-
|
|
512
|
+
|
|
459
513
|
if len(snapshots) == 0:
|
|
460
|
-
self.logger.warning(
|
|
514
|
+
self.logger.warning(
|
|
515
|
+
"No valid snapshots found in PyPSA network, skipping time periods creation"
|
|
516
|
+
)
|
|
461
517
|
return
|
|
462
|
-
|
|
518
|
+
|
|
463
519
|
# Insert optimized time periods metadata
|
|
464
520
|
period_count = len(snapshots)
|
|
465
521
|
start_timestamp = int(snapshots[0].timestamp())
|
|
466
|
-
|
|
522
|
+
|
|
467
523
|
# Calculate interval in seconds
|
|
468
524
|
if len(snapshots) > 1:
|
|
469
525
|
interval_seconds = int((snapshots[1] - snapshots[0]).total_seconds())
|
|
470
526
|
else:
|
|
471
527
|
interval_seconds = 3600 # Default to hourly
|
|
472
|
-
|
|
473
|
-
conn.execute(
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
528
|
+
|
|
529
|
+
conn.execute(
|
|
530
|
+
"""
|
|
531
|
+
INSERT INTO network_time_periods (period_count, start_timestamp, interval_seconds)
|
|
532
|
+
VALUES (?, ?, ?)
|
|
533
|
+
""",
|
|
534
|
+
(period_count, start_timestamp, interval_seconds),
|
|
535
|
+
)
|
|
477
536
|
|
|
478
537
|
# Placeholder methods - in a full implementation, these would contain
|
|
479
538
|
# the detailed import logic from the original netcdf_importer.py
|
|
480
|
-
|
|
481
|
-
def _import_carriers(self, conn, network
|
|
482
|
-
"""Import carriers from PyPSA network, discovering from both network and component levels"""
|
|
539
|
+
|
|
540
|
+
def _import_carriers(self, conn, network) -> int:
|
|
541
|
+
"""Import carriers from PyPSA network, discovering from both network and component levels (single network per database)"""
|
|
483
542
|
count = 0
|
|
484
543
|
created_carriers = set()
|
|
485
|
-
|
|
544
|
+
|
|
486
545
|
# Discover all carriers from components (not just n.carriers table)
|
|
487
546
|
all_carriers = set()
|
|
488
|
-
|
|
547
|
+
|
|
489
548
|
# Get carriers from network.carriers table if it exists
|
|
490
|
-
if hasattr(network,
|
|
549
|
+
if hasattr(network, "carriers") and not network.carriers.empty:
|
|
491
550
|
all_carriers.update(network.carriers.index)
|
|
492
|
-
|
|
551
|
+
|
|
493
552
|
# Get carriers from generators
|
|
494
|
-
if
|
|
553
|
+
if (
|
|
554
|
+
hasattr(network, "generators")
|
|
555
|
+
and not network.generators.empty
|
|
556
|
+
and "carrier" in network.generators.columns
|
|
557
|
+
):
|
|
495
558
|
component_carriers = set(network.generators.carrier.dropna().unique())
|
|
496
559
|
all_carriers.update(component_carriers)
|
|
497
|
-
|
|
560
|
+
|
|
498
561
|
# Get carriers from storage units
|
|
499
|
-
if
|
|
562
|
+
if (
|
|
563
|
+
hasattr(network, "storage_units")
|
|
564
|
+
and not network.storage_units.empty
|
|
565
|
+
and "carrier" in network.storage_units.columns
|
|
566
|
+
):
|
|
500
567
|
component_carriers = set(network.storage_units.carrier.dropna().unique())
|
|
501
568
|
all_carriers.update(component_carriers)
|
|
502
|
-
|
|
569
|
+
|
|
503
570
|
# Get carriers from stores
|
|
504
|
-
if
|
|
571
|
+
if (
|
|
572
|
+
hasattr(network, "stores")
|
|
573
|
+
and not network.stores.empty
|
|
574
|
+
and "carrier" in network.stores.columns
|
|
575
|
+
):
|
|
505
576
|
component_carriers = set(network.stores.carrier.dropna().unique())
|
|
506
577
|
all_carriers.update(component_carriers)
|
|
507
|
-
|
|
578
|
+
|
|
508
579
|
# Get carriers from loads (if they have carriers)
|
|
509
|
-
if
|
|
580
|
+
if (
|
|
581
|
+
hasattr(network, "loads")
|
|
582
|
+
and not network.loads.empty
|
|
583
|
+
and "carrier" in network.loads.columns
|
|
584
|
+
):
|
|
510
585
|
component_carriers = set(network.loads.carrier.dropna().unique())
|
|
511
586
|
all_carriers.update(component_carriers)
|
|
512
|
-
|
|
587
|
+
|
|
513
588
|
# Get carriers from buses (if they have carriers)
|
|
514
|
-
if
|
|
589
|
+
if (
|
|
590
|
+
hasattr(network, "buses")
|
|
591
|
+
and not network.buses.empty
|
|
592
|
+
and "carrier" in network.buses.columns
|
|
593
|
+
):
|
|
515
594
|
component_carriers = set(network.buses.carrier.dropna().unique())
|
|
516
595
|
all_carriers.update(component_carriers)
|
|
517
|
-
|
|
596
|
+
|
|
518
597
|
# Convert to sorted list for consistent ordering
|
|
519
598
|
all_carriers = sorted(list(all_carriers))
|
|
520
|
-
|
|
599
|
+
|
|
521
600
|
# Define a color palette similar to the Python code
|
|
522
601
|
color_palette = [
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
602
|
+
"#1f77b4", # C0 - blue
|
|
603
|
+
"#ff7f0e", # C1 - orange
|
|
604
|
+
"#2ca02c", # C2 - green
|
|
605
|
+
"#d62728", # C3 - red
|
|
606
|
+
"#9467bd", # C4 - purple
|
|
607
|
+
"#8c564b", # C5 - brown
|
|
608
|
+
"#e377c2", # C6 - pink
|
|
609
|
+
"#7f7f7f", # C7 - gray
|
|
610
|
+
"#bcbd22", # C8 - olive
|
|
611
|
+
"#17becf", # C9 - cyan
|
|
612
|
+
"#aec7e8", # light blue
|
|
613
|
+
"#ffbb78", # light orange
|
|
614
|
+
"#98df8a", # light green
|
|
615
|
+
"#ff9896", # light red
|
|
616
|
+
"#c5b0d5", # light purple
|
|
538
617
|
]
|
|
539
|
-
|
|
618
|
+
|
|
540
619
|
# Create carriers from discovered list
|
|
541
620
|
for i, carrier_name in enumerate(all_carriers):
|
|
542
621
|
# Get carrier data from network.carriers if available
|
|
543
622
|
carrier_data = {}
|
|
544
|
-
if
|
|
623
|
+
if (
|
|
624
|
+
hasattr(network, "carriers")
|
|
625
|
+
and not network.carriers.empty
|
|
626
|
+
and carrier_name in network.carriers.index
|
|
627
|
+
):
|
|
545
628
|
# Use .iloc with index position to avoid fragmentation
|
|
546
629
|
carrier_idx = network.carriers.index.get_loc(carrier_name)
|
|
547
630
|
carrier_data = network.carriers.iloc[carrier_idx]
|
|
548
|
-
|
|
631
|
+
|
|
549
632
|
# Extract attributes with defaults
|
|
550
|
-
co2_emissions = carrier_data.get(
|
|
551
|
-
|
|
633
|
+
co2_emissions = carrier_data.get("co2_emissions", 0.0)
|
|
634
|
+
|
|
552
635
|
# Use color from network.carriers if available, otherwise assign from palette
|
|
553
|
-
if
|
|
554
|
-
color = carrier_data[
|
|
636
|
+
if "color" in carrier_data and pd.notna(carrier_data["color"]):
|
|
637
|
+
color = carrier_data["color"]
|
|
555
638
|
else:
|
|
556
639
|
color = color_palette[i % len(color_palette)]
|
|
557
|
-
|
|
558
|
-
nice_name = carrier_data.get(
|
|
559
|
-
|
|
640
|
+
|
|
641
|
+
nice_name = carrier_data.get("nice_name", None)
|
|
642
|
+
|
|
560
643
|
# Create the carrier
|
|
561
|
-
create_carrier(conn,
|
|
644
|
+
create_carrier(conn, carrier_name, co2_emissions, color, nice_name)
|
|
562
645
|
created_carriers.add(carrier_name)
|
|
563
646
|
count += 1
|
|
564
|
-
|
|
647
|
+
|
|
565
648
|
# Ensure we have essential carriers for bus validation
|
|
566
649
|
# Buses can only use AC, DC, heat, or gas carriers according to database constraints
|
|
567
650
|
essential_carriers = {
|
|
568
|
-
|
|
569
|
-
|
|
651
|
+
"AC": {
|
|
652
|
+
"co2_emissions": 0.0,
|
|
653
|
+
"color": "#3498db",
|
|
654
|
+
"nice_name": "AC Electricity",
|
|
655
|
+
},
|
|
656
|
+
"electricity": {
|
|
657
|
+
"co2_emissions": 0.0,
|
|
658
|
+
"color": "#2ecc71",
|
|
659
|
+
"nice_name": "Electricity",
|
|
660
|
+
},
|
|
570
661
|
}
|
|
571
|
-
|
|
662
|
+
|
|
572
663
|
for carrier_name, carrier_props in essential_carriers.items():
|
|
573
664
|
if carrier_name not in created_carriers:
|
|
574
665
|
create_carrier(
|
|
575
|
-
conn,
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
carrier_props[
|
|
579
|
-
carrier_props[
|
|
580
|
-
carrier_props['nice_name']
|
|
666
|
+
conn,
|
|
667
|
+
carrier_name,
|
|
668
|
+
carrier_props["co2_emissions"],
|
|
669
|
+
carrier_props["color"],
|
|
670
|
+
carrier_props["nice_name"],
|
|
581
671
|
)
|
|
582
672
|
created_carriers.add(carrier_name)
|
|
583
673
|
count += 1
|
|
584
|
-
|
|
674
|
+
|
|
585
675
|
return count
|
|
586
676
|
|
|
587
|
-
def _import_buses(self, conn, network,
|
|
588
|
-
"""Import buses from PyPSA network"""
|
|
677
|
+
def _import_buses(self, conn, network, strict_validation: bool) -> int:
|
|
678
|
+
"""Import buses from PyPSA network (single network per database)"""
|
|
589
679
|
count = 0
|
|
590
|
-
|
|
591
|
-
if not hasattr(network,
|
|
680
|
+
|
|
681
|
+
if not hasattr(network, "buses") or network.buses.empty:
|
|
592
682
|
return count
|
|
593
|
-
|
|
683
|
+
|
|
594
684
|
for bus_name, bus_data in network.buses.iterrows():
|
|
595
685
|
try:
|
|
596
686
|
# Generate a unique name for this bus
|
|
597
|
-
unique_name = self._generate_unique_name(str(bus_name),
|
|
598
|
-
|
|
687
|
+
unique_name = self._generate_unique_name(str(bus_name), "BUS")
|
|
688
|
+
|
|
599
689
|
# Extract and log coordinate data for debugging
|
|
600
|
-
x_value = bus_data.get(
|
|
601
|
-
y_value = bus_data.get(
|
|
602
|
-
self.logger.debug(
|
|
603
|
-
|
|
690
|
+
x_value = bus_data.get("x", None)
|
|
691
|
+
y_value = bus_data.get("y", None)
|
|
692
|
+
self.logger.debug(
|
|
693
|
+
f"Bus '{bus_name}' -> '{unique_name}': x={x_value} (type: {type(x_value)}), y={y_value} (type: {type(y_value)})"
|
|
694
|
+
)
|
|
695
|
+
|
|
604
696
|
# Handle NaN/None values properly
|
|
605
|
-
longitude =
|
|
606
|
-
|
|
607
|
-
|
|
697
|
+
longitude = (
|
|
698
|
+
None
|
|
699
|
+
if x_value is None
|
|
700
|
+
or (hasattr(x_value, "__iter__") and len(str(x_value)) == 0)
|
|
701
|
+
else float(x_value) if x_value != "" else None
|
|
702
|
+
)
|
|
703
|
+
latitude = (
|
|
704
|
+
None
|
|
705
|
+
if y_value is None
|
|
706
|
+
or (hasattr(y_value, "__iter__") and len(str(y_value)) == 0)
|
|
707
|
+
else float(y_value) if y_value != "" else None
|
|
708
|
+
)
|
|
709
|
+
|
|
608
710
|
# Additional check for pandas NaN values
|
|
609
711
|
if longitude is not None and pd.isna(longitude):
|
|
610
712
|
longitude = None
|
|
611
713
|
if latitude is not None and pd.isna(latitude):
|
|
612
714
|
latitude = None
|
|
613
|
-
|
|
715
|
+
|
|
614
716
|
# Get or create carrier
|
|
615
|
-
carrier_name = bus_data.get(
|
|
616
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
617
|
-
|
|
717
|
+
carrier_name = bus_data.get("carrier", "AC")
|
|
718
|
+
carrier_id = self._get_or_create_carrier(conn, carrier_name)
|
|
719
|
+
|
|
618
720
|
# Create component record using atomic function
|
|
619
721
|
# Note: PyPSA 'x'/'y' coordinates are mapped to 'longitude'/'latitude' columns here
|
|
620
722
|
request = CreateComponentRequest(
|
|
621
|
-
|
|
622
|
-
component_type='BUS',
|
|
723
|
+
component_type="BUS",
|
|
623
724
|
name=unique_name, # Use globally unique name
|
|
624
725
|
latitude=latitude, # PyPSA y -> latitude
|
|
625
726
|
longitude=longitude, # PyPSA x -> longitude
|
|
626
|
-
carrier_id=carrier_id
|
|
727
|
+
carrier_id=carrier_id,
|
|
627
728
|
)
|
|
628
729
|
component_id = insert_component(conn, request)
|
|
629
|
-
|
|
730
|
+
|
|
630
731
|
# Import bus attributes (location/coordinate data is handled above, not as attributes)
|
|
631
|
-
self._import_component_attributes(
|
|
632
|
-
|
|
732
|
+
self._import_component_attributes(
|
|
733
|
+
conn, component_id, bus_data, "BUS", strict_validation
|
|
734
|
+
)
|
|
735
|
+
|
|
633
736
|
# Import timeseries attributes for buses
|
|
634
|
-
self._import_component_timeseries(
|
|
635
|
-
|
|
737
|
+
self._import_component_timeseries(
|
|
738
|
+
conn, network, component_id, bus_name, "BUS", strict_validation
|
|
739
|
+
)
|
|
740
|
+
|
|
636
741
|
count += 1
|
|
637
|
-
|
|
742
|
+
|
|
638
743
|
except Exception as e:
|
|
639
744
|
if strict_validation:
|
|
640
745
|
raise
|
|
641
746
|
self.logger.warning(f"Failed to import bus {bus_name}: {e}")
|
|
642
747
|
continue
|
|
643
|
-
|
|
748
|
+
|
|
644
749
|
return count
|
|
645
750
|
|
|
646
751
|
# Additional placeholder methods for other component types
|
|
647
|
-
def _import_generators(
|
|
648
|
-
|
|
752
|
+
def _import_generators(
|
|
753
|
+
self,
|
|
754
|
+
conn,
|
|
755
|
+
network,
|
|
756
|
+
strict_validation: bool,
|
|
757
|
+
scatter_radius: float,
|
|
758
|
+
location_map,
|
|
759
|
+
) -> int:
|
|
760
|
+
"""Import generators from PyPSA network (single network per database)"""
|
|
649
761
|
count = 0
|
|
650
|
-
|
|
651
|
-
if not hasattr(network,
|
|
762
|
+
|
|
763
|
+
if not hasattr(network, "generators") or network.generators.empty:
|
|
652
764
|
return count
|
|
653
|
-
|
|
765
|
+
|
|
654
766
|
# Get bus name to ID mapping
|
|
655
|
-
bus_name_to_id = get_bus_name_to_id_map(conn
|
|
656
|
-
|
|
767
|
+
bus_name_to_id = get_bus_name_to_id_map(conn)
|
|
768
|
+
|
|
657
769
|
# Get master scenario ID
|
|
658
|
-
master_scenario_id =
|
|
659
|
-
|
|
770
|
+
master_scenario_id = None
|
|
771
|
+
|
|
660
772
|
for gen_name, gen_data in network.generators.iterrows():
|
|
661
773
|
try:
|
|
662
774
|
# Get bus connection
|
|
663
|
-
bus_name = gen_data.get(
|
|
775
|
+
bus_name = gen_data.get("bus")
|
|
664
776
|
bus_id = bus_name_to_id.get(bus_name) if bus_name else None
|
|
665
|
-
|
|
777
|
+
|
|
666
778
|
if not bus_id:
|
|
667
|
-
self.logger.warning(
|
|
779
|
+
self.logger.warning(
|
|
780
|
+
f"Generator {gen_name}: bus '{bus_name}' not found, skipping"
|
|
781
|
+
)
|
|
668
782
|
continue
|
|
669
|
-
|
|
783
|
+
|
|
670
784
|
# Get or create carrier
|
|
671
|
-
carrier_name = gen_data.get(
|
|
672
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
673
|
-
|
|
785
|
+
carrier_name = gen_data.get("carrier", "AC")
|
|
786
|
+
carrier_id = self._get_or_create_carrier(conn, carrier_name)
|
|
787
|
+
|
|
674
788
|
# Generate coordinates near the bus
|
|
675
789
|
latitude, longitude = self._generate_component_coordinates(
|
|
676
790
|
conn, bus_id, scatter_radius, location_map, gen_name
|
|
677
791
|
)
|
|
678
|
-
|
|
792
|
+
|
|
679
793
|
# Create component record
|
|
680
794
|
request = CreateComponentRequest(
|
|
681
|
-
|
|
682
|
-
component_type='GENERATOR',
|
|
795
|
+
component_type="GENERATOR",
|
|
683
796
|
name=str(gen_name),
|
|
684
797
|
latitude=latitude,
|
|
685
798
|
longitude=longitude,
|
|
686
799
|
carrier_id=carrier_id,
|
|
687
|
-
bus_id=bus_id
|
|
800
|
+
bus_id=bus_id,
|
|
688
801
|
)
|
|
689
802
|
component_id = insert_component(conn, request)
|
|
690
|
-
|
|
803
|
+
|
|
691
804
|
# Import generator attributes
|
|
692
|
-
self._import_component_attributes(
|
|
693
|
-
|
|
805
|
+
self._import_component_attributes(
|
|
806
|
+
conn, component_id, gen_data, "GENERATOR", strict_validation
|
|
807
|
+
)
|
|
808
|
+
|
|
694
809
|
# Import timeseries attributes for generators
|
|
695
|
-
self._import_component_timeseries(
|
|
696
|
-
|
|
810
|
+
self._import_component_timeseries(
|
|
811
|
+
conn,
|
|
812
|
+
network,
|
|
813
|
+
component_id,
|
|
814
|
+
gen_name,
|
|
815
|
+
"GENERATOR",
|
|
816
|
+
strict_validation,
|
|
817
|
+
)
|
|
818
|
+
|
|
697
819
|
count += 1
|
|
698
|
-
|
|
820
|
+
|
|
699
821
|
except Exception as e:
|
|
700
822
|
if strict_validation:
|
|
701
823
|
raise
|
|
702
824
|
self.logger.warning(f"Failed to import generator {gen_name}: {e}")
|
|
703
825
|
continue
|
|
704
|
-
|
|
826
|
+
|
|
705
827
|
return count
|
|
706
828
|
|
|
707
|
-
def _import_loads(
|
|
708
|
-
|
|
829
|
+
def _import_loads(
|
|
830
|
+
self,
|
|
831
|
+
conn,
|
|
832
|
+
network,
|
|
833
|
+
strict_validation: bool,
|
|
834
|
+
scatter_radius: float,
|
|
835
|
+
location_map,
|
|
836
|
+
) -> int:
|
|
837
|
+
"""Import loads from PyPSA network (single network per database)"""
|
|
709
838
|
count = 0
|
|
710
|
-
|
|
711
|
-
if not hasattr(network,
|
|
839
|
+
|
|
840
|
+
if not hasattr(network, "loads") or network.loads.empty:
|
|
712
841
|
return count
|
|
713
|
-
|
|
714
|
-
bus_map = get_bus_name_to_id_map(conn
|
|
715
|
-
bus_coords = self._get_bus_coordinates_map(conn
|
|
716
|
-
|
|
842
|
+
|
|
843
|
+
bus_map = get_bus_name_to_id_map(conn)
|
|
844
|
+
bus_coords = self._get_bus_coordinates_map(conn)
|
|
845
|
+
|
|
717
846
|
# Count components per bus for better distribution
|
|
718
847
|
components_per_bus = {}
|
|
719
848
|
for load_name, load_data in network.loads.iterrows():
|
|
720
|
-
bus_name = load_data[
|
|
849
|
+
bus_name = load_data["bus"]
|
|
721
850
|
components_per_bus[bus_name] = components_per_bus.get(bus_name, 0) + 1
|
|
722
|
-
|
|
851
|
+
|
|
723
852
|
bus_component_counters = {}
|
|
724
|
-
|
|
853
|
+
|
|
725
854
|
for load_name, load_data in network.loads.iterrows():
|
|
726
855
|
try:
|
|
727
|
-
bus_id = bus_map.get(load_data[
|
|
856
|
+
bus_id = bus_map.get(load_data["bus"])
|
|
728
857
|
if bus_id is None:
|
|
729
|
-
self.logger.warning(
|
|
858
|
+
self.logger.warning(
|
|
859
|
+
f"Bus '{load_data['bus']}' not found for load '{load_name}'"
|
|
860
|
+
)
|
|
730
861
|
continue
|
|
731
|
-
|
|
862
|
+
|
|
732
863
|
# Generate a unique name for this load
|
|
733
|
-
unique_name = self._generate_unique_name(str(load_name),
|
|
734
|
-
|
|
864
|
+
unique_name = self._generate_unique_name(str(load_name), "LOAD")
|
|
865
|
+
|
|
735
866
|
# Try to get coordinates from CSV first, then fall back to scattered coordinates
|
|
736
867
|
latitude, longitude = None, None
|
|
737
|
-
|
|
868
|
+
|
|
738
869
|
# Check CSV coordinates first
|
|
739
870
|
csv_coords = self._get_csv_coordinates(unique_name, location_map)
|
|
740
871
|
if csv_coords:
|
|
@@ -742,211 +873,242 @@ class NetCDFModelImporter:
|
|
|
742
873
|
elif bus_id in bus_coords:
|
|
743
874
|
# Fall back to scattered coordinates around the connected bus
|
|
744
875
|
bus_lat, bus_lon = bus_coords[bus_id]
|
|
745
|
-
bus_name = load_data[
|
|
746
|
-
|
|
876
|
+
bus_name = load_data["bus"]
|
|
877
|
+
|
|
747
878
|
# Get component index for this bus
|
|
748
879
|
component_index = bus_component_counters.get(bus_name, 0)
|
|
749
880
|
bus_component_counters[bus_name] = component_index + 1
|
|
750
|
-
|
|
881
|
+
|
|
751
882
|
latitude, longitude = self._generate_scattered_coordinates(
|
|
752
|
-
bus_lat,
|
|
753
|
-
|
|
883
|
+
bus_lat,
|
|
884
|
+
bus_lon,
|
|
885
|
+
scatter_radius,
|
|
886
|
+
components_per_bus[bus_name],
|
|
887
|
+
component_index,
|
|
754
888
|
)
|
|
755
|
-
|
|
889
|
+
|
|
756
890
|
# Get carrier ID if carrier is specified
|
|
757
891
|
carrier_id = None
|
|
758
|
-
if
|
|
759
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
760
|
-
|
|
892
|
+
if "carrier" in load_data and pd.notna(load_data["carrier"]):
|
|
893
|
+
carrier_id = self._get_or_create_carrier(conn, load_data["carrier"])
|
|
894
|
+
|
|
761
895
|
# Create component record using atomic function
|
|
762
896
|
request = CreateComponentRequest(
|
|
763
|
-
|
|
764
|
-
component_type='LOAD',
|
|
897
|
+
component_type="LOAD",
|
|
765
898
|
name=unique_name, # Use globally unique name
|
|
766
899
|
bus_id=bus_id,
|
|
767
900
|
carrier_id=carrier_id,
|
|
768
901
|
latitude=latitude,
|
|
769
|
-
longitude=longitude
|
|
902
|
+
longitude=longitude,
|
|
770
903
|
)
|
|
771
904
|
component_id = insert_component(conn, request)
|
|
772
|
-
|
|
905
|
+
|
|
773
906
|
# Import load attributes
|
|
774
|
-
self._import_component_attributes(
|
|
775
|
-
|
|
907
|
+
self._import_component_attributes(
|
|
908
|
+
conn, component_id, load_data, "LOAD", strict_validation
|
|
909
|
+
)
|
|
910
|
+
|
|
776
911
|
# Import timeseries attributes for loads
|
|
777
|
-
self._import_component_timeseries(
|
|
778
|
-
|
|
912
|
+
self._import_component_timeseries(
|
|
913
|
+
conn, network, component_id, load_name, "LOAD", strict_validation
|
|
914
|
+
)
|
|
915
|
+
|
|
779
916
|
count += 1
|
|
780
|
-
|
|
917
|
+
|
|
781
918
|
except Exception as e:
|
|
782
919
|
if strict_validation:
|
|
783
920
|
raise
|
|
784
921
|
self.logger.warning(f"Failed to import load {load_name}: {e}")
|
|
785
922
|
continue
|
|
786
|
-
|
|
923
|
+
|
|
787
924
|
return count
|
|
788
925
|
|
|
789
|
-
def _import_lines(
|
|
790
|
-
|
|
926
|
+
def _import_lines(
|
|
927
|
+
self, conn, network, strict_validation: bool, location_map
|
|
928
|
+
) -> int:
|
|
929
|
+
"""Import lines from PyPSA network (single network per database)"""
|
|
791
930
|
count = 0
|
|
792
931
|
name_counter = {} # Track duplicate names
|
|
793
|
-
|
|
794
|
-
if not hasattr(network,
|
|
932
|
+
|
|
933
|
+
if not hasattr(network, "lines") or network.lines.empty:
|
|
795
934
|
return count
|
|
796
|
-
|
|
797
|
-
bus_map = get_bus_name_to_id_map(conn
|
|
798
|
-
|
|
935
|
+
|
|
936
|
+
bus_map = get_bus_name_to_id_map(conn)
|
|
937
|
+
|
|
799
938
|
for line_name, line_data in network.lines.iterrows():
|
|
800
939
|
try:
|
|
801
|
-
bus0_id = bus_map.get(line_data[
|
|
802
|
-
bus1_id = bus_map.get(line_data[
|
|
803
|
-
|
|
940
|
+
bus0_id = bus_map.get(line_data["bus0"])
|
|
941
|
+
bus1_id = bus_map.get(line_data["bus1"])
|
|
942
|
+
|
|
804
943
|
if bus0_id is None or bus1_id is None:
|
|
805
|
-
self.logger.warning(
|
|
944
|
+
self.logger.warning(
|
|
945
|
+
f"Bus not found for line '{line_name}': bus0='{line_data['bus0']}', bus1='{line_data['bus1']}'"
|
|
946
|
+
)
|
|
806
947
|
continue
|
|
807
|
-
|
|
948
|
+
|
|
808
949
|
# Handle duplicate names by appending counter
|
|
809
950
|
unique_name = line_name
|
|
810
951
|
if line_name in name_counter:
|
|
811
952
|
name_counter[line_name] += 1
|
|
812
953
|
unique_name = f"{line_name}_{name_counter[line_name]}"
|
|
813
|
-
self.logger.warning(
|
|
954
|
+
self.logger.warning(
|
|
955
|
+
f"Duplicate line name '{line_name}' renamed to '{unique_name}'"
|
|
956
|
+
)
|
|
814
957
|
else:
|
|
815
958
|
name_counter[line_name] = 0
|
|
816
|
-
|
|
959
|
+
|
|
817
960
|
# Check for CSV coordinates
|
|
818
961
|
latitude, longitude = None, None
|
|
819
962
|
csv_coords = self._get_csv_coordinates(unique_name, location_map)
|
|
820
963
|
if csv_coords:
|
|
821
964
|
latitude, longitude = csv_coords
|
|
822
|
-
|
|
965
|
+
|
|
823
966
|
# Lines always use AC carrier
|
|
824
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
825
|
-
|
|
967
|
+
carrier_id = self._get_or_create_carrier(conn, "AC")
|
|
968
|
+
|
|
826
969
|
# Create component record using atomic function
|
|
827
970
|
request = CreateComponentRequest(
|
|
828
|
-
|
|
829
|
-
component_type='LINE',
|
|
971
|
+
component_type="LINE",
|
|
830
972
|
name=unique_name, # Use deduplicated name
|
|
831
973
|
bus0_id=bus0_id,
|
|
832
974
|
bus1_id=bus1_id,
|
|
833
975
|
carrier_id=carrier_id,
|
|
834
976
|
latitude=latitude,
|
|
835
|
-
longitude=longitude
|
|
977
|
+
longitude=longitude,
|
|
836
978
|
)
|
|
837
979
|
component_id = insert_component(conn, request)
|
|
838
|
-
|
|
980
|
+
|
|
839
981
|
# Import line attributes
|
|
840
|
-
self._import_component_attributes(
|
|
841
|
-
|
|
982
|
+
self._import_component_attributes(
|
|
983
|
+
conn, component_id, line_data, "LINE", strict_validation
|
|
984
|
+
)
|
|
985
|
+
|
|
842
986
|
# Import timeseries attributes for lines
|
|
843
|
-
self._import_component_timeseries(
|
|
844
|
-
|
|
987
|
+
self._import_component_timeseries(
|
|
988
|
+
conn, network, component_id, line_name, "LINE", strict_validation
|
|
989
|
+
)
|
|
990
|
+
|
|
845
991
|
count += 1
|
|
846
|
-
|
|
992
|
+
|
|
847
993
|
except Exception as e:
|
|
848
994
|
if strict_validation:
|
|
849
995
|
raise
|
|
850
996
|
self.logger.warning(f"Failed to import line {line_name}: {e}")
|
|
851
997
|
continue
|
|
852
|
-
|
|
998
|
+
|
|
853
999
|
return count
|
|
854
1000
|
|
|
855
|
-
def _import_links(
|
|
856
|
-
|
|
1001
|
+
def _import_links(
|
|
1002
|
+
self, conn, network, strict_validation: bool, location_map
|
|
1003
|
+
) -> int:
|
|
1004
|
+
"""Import links from PyPSA network (single network per database)"""
|
|
857
1005
|
count = 0
|
|
858
|
-
|
|
859
|
-
if not hasattr(network,
|
|
1006
|
+
|
|
1007
|
+
if not hasattr(network, "links") or network.links.empty:
|
|
860
1008
|
return count
|
|
861
|
-
|
|
862
|
-
bus_map = get_bus_name_to_id_map(conn
|
|
863
|
-
|
|
1009
|
+
|
|
1010
|
+
bus_map = get_bus_name_to_id_map(conn)
|
|
1011
|
+
|
|
864
1012
|
for link_name, link_data in network.links.iterrows():
|
|
865
1013
|
try:
|
|
866
|
-
bus0_id = bus_map.get(link_data[
|
|
867
|
-
bus1_id = bus_map.get(link_data[
|
|
868
|
-
|
|
1014
|
+
bus0_id = bus_map.get(link_data["bus0"])
|
|
1015
|
+
bus1_id = bus_map.get(link_data["bus1"])
|
|
1016
|
+
|
|
869
1017
|
if bus0_id is None or bus1_id is None:
|
|
870
|
-
self.logger.warning(
|
|
1018
|
+
self.logger.warning(
|
|
1019
|
+
f"Bus not found for link '{link_name}': bus0='{link_data['bus0']}', bus1='{link_data['bus1']}'"
|
|
1020
|
+
)
|
|
871
1021
|
continue
|
|
872
|
-
|
|
1022
|
+
|
|
873
1023
|
# Generate a unique name for this link
|
|
874
|
-
unique_name = self._generate_unique_name(str(link_name),
|
|
875
|
-
|
|
1024
|
+
unique_name = self._generate_unique_name(str(link_name), "LINK")
|
|
1025
|
+
|
|
876
1026
|
# Check for CSV coordinates
|
|
877
1027
|
latitude, longitude = None, None
|
|
878
1028
|
csv_coords = self._get_csv_coordinates(unique_name, location_map)
|
|
879
1029
|
if csv_coords:
|
|
880
1030
|
latitude, longitude = csv_coords
|
|
881
|
-
|
|
1031
|
+
|
|
882
1032
|
# Get carrier ID if carrier is specified
|
|
883
1033
|
carrier_id = None
|
|
884
|
-
if
|
|
885
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
1034
|
+
if "carrier" in link_data and pd.notna(link_data["carrier"]):
|
|
1035
|
+
carrier_id = self._get_or_create_carrier(conn, link_data["carrier"])
|
|
886
1036
|
else:
|
|
887
1037
|
# Default to DC for links
|
|
888
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
889
|
-
|
|
1038
|
+
carrier_id = self._get_or_create_carrier(conn, "DC")
|
|
1039
|
+
|
|
890
1040
|
# Create component record using atomic function
|
|
891
1041
|
request = CreateComponentRequest(
|
|
892
|
-
|
|
893
|
-
component_type='LINK',
|
|
1042
|
+
component_type="LINK",
|
|
894
1043
|
name=unique_name, # Use globally unique name
|
|
895
1044
|
bus0_id=bus0_id,
|
|
896
1045
|
bus1_id=bus1_id,
|
|
897
1046
|
carrier_id=carrier_id,
|
|
898
1047
|
latitude=latitude,
|
|
899
|
-
longitude=longitude
|
|
1048
|
+
longitude=longitude,
|
|
900
1049
|
)
|
|
901
1050
|
component_id = insert_component(conn, request)
|
|
902
|
-
|
|
1051
|
+
|
|
903
1052
|
# Import link attributes
|
|
904
|
-
self._import_component_attributes(
|
|
905
|
-
|
|
1053
|
+
self._import_component_attributes(
|
|
1054
|
+
conn, component_id, link_data, "LINK", strict_validation
|
|
1055
|
+
)
|
|
1056
|
+
|
|
906
1057
|
# Import timeseries attributes for links
|
|
907
|
-
self._import_component_timeseries(
|
|
908
|
-
|
|
1058
|
+
self._import_component_timeseries(
|
|
1059
|
+
conn, network, component_id, link_name, "LINK", strict_validation
|
|
1060
|
+
)
|
|
1061
|
+
|
|
909
1062
|
count += 1
|
|
910
|
-
|
|
1063
|
+
|
|
911
1064
|
except Exception as e:
|
|
912
1065
|
if strict_validation:
|
|
913
1066
|
raise
|
|
914
1067
|
self.logger.warning(f"Failed to import link {link_name}: {e}")
|
|
915
1068
|
continue
|
|
916
|
-
|
|
1069
|
+
|
|
917
1070
|
return count
|
|
918
1071
|
|
|
919
|
-
def _import_storage_units(
|
|
1072
|
+
def _import_storage_units(
|
|
1073
|
+
self,
|
|
1074
|
+
conn,
|
|
1075
|
+
network,
|
|
1076
|
+
strict_validation: bool,
|
|
1077
|
+
scatter_radius: float,
|
|
1078
|
+
location_map,
|
|
1079
|
+
) -> int:
|
|
920
1080
|
"""Import storage units from PyPSA network"""
|
|
921
1081
|
count = 0
|
|
922
|
-
|
|
923
|
-
if not hasattr(network,
|
|
1082
|
+
|
|
1083
|
+
if not hasattr(network, "storage_units") or network.storage_units.empty:
|
|
924
1084
|
return count
|
|
925
|
-
|
|
926
|
-
bus_map = get_bus_name_to_id_map(conn
|
|
927
|
-
bus_coords = self._get_bus_coordinates_map(conn
|
|
928
|
-
|
|
1085
|
+
|
|
1086
|
+
bus_map = get_bus_name_to_id_map(conn)
|
|
1087
|
+
bus_coords = self._get_bus_coordinates_map(conn)
|
|
1088
|
+
|
|
929
1089
|
# Count components per bus for better distribution
|
|
930
1090
|
components_per_bus = {}
|
|
931
1091
|
for su_name, su_data in network.storage_units.iterrows():
|
|
932
|
-
bus_name = su_data[
|
|
1092
|
+
bus_name = su_data["bus"]
|
|
933
1093
|
components_per_bus[bus_name] = components_per_bus.get(bus_name, 0) + 1
|
|
934
|
-
|
|
1094
|
+
|
|
935
1095
|
bus_component_counters = {}
|
|
936
|
-
|
|
1096
|
+
|
|
937
1097
|
for su_name, su_data in network.storage_units.iterrows():
|
|
938
1098
|
try:
|
|
939
|
-
bus_id = bus_map.get(su_data[
|
|
1099
|
+
bus_id = bus_map.get(su_data["bus"])
|
|
940
1100
|
if bus_id is None:
|
|
941
|
-
self.logger.warning(
|
|
1101
|
+
self.logger.warning(
|
|
1102
|
+
f"Bus '{su_data['bus']}' not found for storage unit '{su_name}'"
|
|
1103
|
+
)
|
|
942
1104
|
continue
|
|
943
|
-
|
|
1105
|
+
|
|
944
1106
|
# Generate a unique name for this storage unit
|
|
945
|
-
unique_name = self._generate_unique_name(str(su_name),
|
|
946
|
-
|
|
1107
|
+
unique_name = self._generate_unique_name(str(su_name), "STORAGE_UNIT")
|
|
1108
|
+
|
|
947
1109
|
# Try to get coordinates from CSV first, then fall back to scattered coordinates
|
|
948
1110
|
latitude, longitude = None, None
|
|
949
|
-
|
|
1111
|
+
|
|
950
1112
|
# Check CSV coordinates first
|
|
951
1113
|
csv_coords = self._get_csv_coordinates(unique_name, location_map)
|
|
952
1114
|
if csv_coords:
|
|
@@ -954,88 +1116,112 @@ class NetCDFModelImporter:
|
|
|
954
1116
|
elif bus_id in bus_coords:
|
|
955
1117
|
# Fall back to scattered coordinates around the connected bus
|
|
956
1118
|
bus_lat, bus_lon = bus_coords[bus_id]
|
|
957
|
-
bus_name = su_data[
|
|
958
|
-
|
|
1119
|
+
bus_name = su_data["bus"]
|
|
1120
|
+
|
|
959
1121
|
# Get component index for this bus
|
|
960
1122
|
component_index = bus_component_counters.get(bus_name, 0)
|
|
961
1123
|
bus_component_counters[bus_name] = component_index + 1
|
|
962
|
-
|
|
1124
|
+
|
|
963
1125
|
latitude, longitude = self._generate_scattered_coordinates(
|
|
964
|
-
bus_lat,
|
|
965
|
-
|
|
1126
|
+
bus_lat,
|
|
1127
|
+
bus_lon,
|
|
1128
|
+
scatter_radius,
|
|
1129
|
+
components_per_bus[bus_name],
|
|
1130
|
+
component_index,
|
|
966
1131
|
)
|
|
967
|
-
|
|
1132
|
+
|
|
968
1133
|
# Get carrier ID if carrier is specified
|
|
969
1134
|
carrier_id = None
|
|
970
|
-
if
|
|
971
|
-
carrier_id = self._get_or_create_carrier(conn,
|
|
972
|
-
|
|
1135
|
+
if "carrier" in su_data and pd.notna(su_data["carrier"]):
|
|
1136
|
+
carrier_id = self._get_or_create_carrier(conn, su_data["carrier"])
|
|
1137
|
+
|
|
973
1138
|
# Create component record using atomic function
|
|
974
1139
|
request = CreateComponentRequest(
|
|
975
|
-
|
|
976
|
-
component_type='STORAGE_UNIT',
|
|
1140
|
+
component_type="STORAGE_UNIT",
|
|
977
1141
|
name=unique_name, # Use globally unique name
|
|
978
1142
|
bus_id=bus_id,
|
|
979
1143
|
carrier_id=carrier_id,
|
|
980
1144
|
latitude=latitude,
|
|
981
|
-
longitude=longitude
|
|
1145
|
+
longitude=longitude,
|
|
982
1146
|
)
|
|
983
1147
|
component_id = insert_component(conn, request)
|
|
984
|
-
|
|
1148
|
+
|
|
985
1149
|
# Import storage unit attributes
|
|
986
|
-
self._import_component_attributes(
|
|
987
|
-
|
|
1150
|
+
self._import_component_attributes(
|
|
1151
|
+
conn, component_id, su_data, "STORAGE_UNIT", strict_validation
|
|
1152
|
+
)
|
|
1153
|
+
|
|
988
1154
|
# Import timeseries attributes for storage units
|
|
989
|
-
self._import_component_timeseries(
|
|
990
|
-
|
|
1155
|
+
self._import_component_timeseries(
|
|
1156
|
+
conn,
|
|
1157
|
+
network,
|
|
1158
|
+
component_id,
|
|
1159
|
+
su_name,
|
|
1160
|
+
"STORAGE_UNIT",
|
|
1161
|
+
strict_validation,
|
|
1162
|
+
)
|
|
1163
|
+
|
|
991
1164
|
count += 1
|
|
992
|
-
|
|
1165
|
+
|
|
993
1166
|
except Exception as e:
|
|
994
1167
|
if strict_validation:
|
|
995
1168
|
raise
|
|
996
1169
|
self.logger.warning(f"Failed to import storage unit {su_name}: {e}")
|
|
997
1170
|
continue
|
|
998
|
-
|
|
1171
|
+
|
|
999
1172
|
return count
|
|
1000
1173
|
|
|
1001
|
-
def _import_stores(
|
|
1002
|
-
|
|
1174
|
+
def _import_stores(
|
|
1175
|
+
self,
|
|
1176
|
+
conn,
|
|
1177
|
+
network,
|
|
1178
|
+
strict_validation: bool,
|
|
1179
|
+
scatter_radius: float,
|
|
1180
|
+
location_map,
|
|
1181
|
+
) -> int:
|
|
1182
|
+
"""Import stores from PyPSA network (single network per database)"""
|
|
1003
1183
|
count = 0
|
|
1004
1184
|
name_counter = {} # Track duplicate names
|
|
1005
|
-
|
|
1006
|
-
if not hasattr(network,
|
|
1185
|
+
|
|
1186
|
+
if not hasattr(network, "stores") or network.stores.empty:
|
|
1007
1187
|
return count
|
|
1008
|
-
|
|
1009
|
-
bus_map = get_bus_name_to_id_map(conn
|
|
1010
|
-
bus_coords = self._get_bus_coordinates_map(conn
|
|
1011
|
-
|
|
1188
|
+
|
|
1189
|
+
bus_map = get_bus_name_to_id_map(conn)
|
|
1190
|
+
bus_coords = self._get_bus_coordinates_map(conn)
|
|
1191
|
+
|
|
1012
1192
|
# Count components per bus for better distribution
|
|
1013
1193
|
components_per_bus = {}
|
|
1014
1194
|
for store_name, store_data in network.stores.iterrows():
|
|
1015
|
-
bus_name = store_data[
|
|
1195
|
+
bus_name = store_data["bus"]
|
|
1016
1196
|
components_per_bus[bus_name] = components_per_bus.get(bus_name, 0) + 1
|
|
1017
|
-
|
|
1018
|
-
bus_component_counters =
|
|
1019
|
-
|
|
1197
|
+
|
|
1198
|
+
bus_component_counters = (
|
|
1199
|
+
{}
|
|
1200
|
+
) # Track how many components we've placed at each bus
|
|
1201
|
+
|
|
1020
1202
|
for store_name, store_data in network.stores.iterrows():
|
|
1021
1203
|
try:
|
|
1022
|
-
bus_id = bus_map.get(store_data[
|
|
1204
|
+
bus_id = bus_map.get(store_data["bus"])
|
|
1023
1205
|
if bus_id is None:
|
|
1024
|
-
self.logger.warning(
|
|
1206
|
+
self.logger.warning(
|
|
1207
|
+
f"Bus '{store_data['bus']}' not found for store '{store_name}'"
|
|
1208
|
+
)
|
|
1025
1209
|
continue
|
|
1026
|
-
|
|
1210
|
+
|
|
1027
1211
|
# Handle duplicate names by appending counter
|
|
1028
1212
|
unique_name = store_name
|
|
1029
1213
|
if store_name in name_counter:
|
|
1030
1214
|
name_counter[store_name] += 1
|
|
1031
1215
|
unique_name = f"{store_name}_{name_counter[store_name]}"
|
|
1032
|
-
self.logger.warning(
|
|
1216
|
+
self.logger.warning(
|
|
1217
|
+
f"Duplicate store name '{store_name}' renamed to '{unique_name}'"
|
|
1218
|
+
)
|
|
1033
1219
|
else:
|
|
1034
1220
|
name_counter[store_name] = 0
|
|
1035
|
-
|
|
1221
|
+
|
|
1036
1222
|
# Try to get coordinates from CSV first, then fall back to scattered coordinates
|
|
1037
1223
|
latitude, longitude = None, None
|
|
1038
|
-
|
|
1224
|
+
|
|
1039
1225
|
# Check CSV coordinates first
|
|
1040
1226
|
csv_coords = self._get_csv_coordinates(unique_name, location_map)
|
|
1041
1227
|
if csv_coords:
|
|
@@ -1043,97 +1229,114 @@ class NetCDFModelImporter:
|
|
|
1043
1229
|
elif bus_id in bus_coords:
|
|
1044
1230
|
# Fall back to scattered coordinates around the connected bus
|
|
1045
1231
|
bus_lat, bus_lon = bus_coords[bus_id]
|
|
1046
|
-
bus_name = store_data[
|
|
1047
|
-
|
|
1232
|
+
bus_name = store_data["bus"]
|
|
1233
|
+
|
|
1048
1234
|
# Get component index for this bus
|
|
1049
1235
|
component_index = bus_component_counters.get(bus_name, 0)
|
|
1050
1236
|
bus_component_counters[bus_name] = component_index + 1
|
|
1051
|
-
|
|
1237
|
+
|
|
1052
1238
|
latitude, longitude = self._generate_scattered_coordinates(
|
|
1053
|
-
bus_lat,
|
|
1054
|
-
|
|
1239
|
+
bus_lat,
|
|
1240
|
+
bus_lon,
|
|
1241
|
+
scatter_radius,
|
|
1242
|
+
components_per_bus[bus_name],
|
|
1243
|
+
component_index,
|
|
1055
1244
|
)
|
|
1056
|
-
|
|
1245
|
+
|
|
1057
1246
|
# Get carrier ID if carrier is specified
|
|
1058
1247
|
carrier_id = None
|
|
1059
|
-
if
|
|
1060
|
-
carrier_id = self._get_or_create_carrier(
|
|
1061
|
-
|
|
1248
|
+
if "carrier" in store_data and pd.notna(store_data["carrier"]):
|
|
1249
|
+
carrier_id = self._get_or_create_carrier(
|
|
1250
|
+
conn, store_data["carrier"]
|
|
1251
|
+
)
|
|
1252
|
+
|
|
1062
1253
|
# Create component record using atomic function
|
|
1063
1254
|
request = CreateComponentRequest(
|
|
1064
|
-
|
|
1065
|
-
component_type='STORE',
|
|
1255
|
+
component_type="STORE",
|
|
1066
1256
|
name=unique_name, # Use deduplicated name
|
|
1067
1257
|
bus_id=bus_id,
|
|
1068
1258
|
carrier_id=carrier_id,
|
|
1069
1259
|
latitude=latitude,
|
|
1070
|
-
longitude=longitude
|
|
1260
|
+
longitude=longitude,
|
|
1071
1261
|
)
|
|
1072
1262
|
component_id = insert_component(conn, request)
|
|
1073
|
-
|
|
1263
|
+
|
|
1074
1264
|
# Import store attributes
|
|
1075
|
-
self._import_component_attributes(
|
|
1076
|
-
|
|
1265
|
+
self._import_component_attributes(
|
|
1266
|
+
conn, component_id, store_data, "STORE", strict_validation
|
|
1267
|
+
)
|
|
1268
|
+
|
|
1077
1269
|
# Import timeseries attributes for stores
|
|
1078
|
-
self._import_component_timeseries(
|
|
1079
|
-
|
|
1270
|
+
self._import_component_timeseries(
|
|
1271
|
+
conn, network, component_id, store_name, "STORE", strict_validation
|
|
1272
|
+
)
|
|
1273
|
+
|
|
1080
1274
|
count += 1
|
|
1081
|
-
|
|
1275
|
+
|
|
1082
1276
|
except Exception as e:
|
|
1083
1277
|
if strict_validation:
|
|
1084
1278
|
raise
|
|
1085
1279
|
self.logger.warning(f"Failed to import store {store_name}: {e}")
|
|
1086
1280
|
continue
|
|
1087
|
-
|
|
1281
|
+
|
|
1088
1282
|
return count
|
|
1089
1283
|
|
|
1090
|
-
def _get_bus_coordinates(self, conn
|
|
1091
|
-
"""Get coordinates of all buses in the network that have valid coordinates"""
|
|
1092
|
-
cursor = conn.execute(
|
|
1284
|
+
def _get_bus_coordinates(self, conn) -> List[Tuple[float, float]]:
|
|
1285
|
+
"""Get coordinates of all buses in the network that have valid coordinates (single network per database)"""
|
|
1286
|
+
cursor = conn.execute(
|
|
1287
|
+
"""
|
|
1093
1288
|
SELECT latitude, longitude FROM components
|
|
1094
|
-
WHERE
|
|
1289
|
+
WHERE component_type = 'BUS'
|
|
1095
1290
|
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
|
1096
1291
|
AND NOT (latitude = 0 AND longitude = 0)
|
|
1097
|
-
""",
|
|
1098
|
-
|
|
1292
|
+
""",
|
|
1293
|
+
(),
|
|
1294
|
+
)
|
|
1295
|
+
|
|
1099
1296
|
coordinates = [(row[0], row[1]) for row in cursor.fetchall()]
|
|
1100
1297
|
return coordinates
|
|
1101
1298
|
|
|
1102
|
-
def _calculate_bus_separation_radius(
|
|
1299
|
+
def _calculate_bus_separation_radius(
|
|
1300
|
+
self, bus_coordinates: List[Tuple[float, float]]
|
|
1301
|
+
) -> float:
|
|
1103
1302
|
"""Calculate the minimum separation between buses and return a radius for scattering"""
|
|
1104
1303
|
if len(bus_coordinates) < 2:
|
|
1105
1304
|
return 0.01 # ~1km at equator
|
|
1106
|
-
|
|
1107
|
-
min_distance_degrees = float(
|
|
1305
|
+
|
|
1306
|
+
min_distance_degrees = float("inf")
|
|
1108
1307
|
min_separation_threshold = 0.001 # ~100m threshold to exclude co-located buses
|
|
1109
|
-
|
|
1308
|
+
|
|
1110
1309
|
for i, (lat1, lon1) in enumerate(bus_coordinates):
|
|
1111
|
-
for j, (lat2, lon2) in enumerate(bus_coordinates[i+1:], i+1):
|
|
1310
|
+
for j, (lat2, lon2) in enumerate(bus_coordinates[i + 1 :], i + 1):
|
|
1112
1311
|
# Simple Euclidean distance in degrees
|
|
1113
|
-
distance_degrees = math.sqrt((lat2 - lat1)**2 + (lon2 - lon1)**2)
|
|
1114
|
-
|
|
1312
|
+
distance_degrees = math.sqrt((lat2 - lat1) ** 2 + (lon2 - lon1) ** 2)
|
|
1313
|
+
|
|
1115
1314
|
if distance_degrees > min_separation_threshold:
|
|
1116
1315
|
min_distance_degrees = min(min_distance_degrees, distance_degrees)
|
|
1117
|
-
|
|
1118
|
-
if min_distance_degrees == float(
|
|
1316
|
+
|
|
1317
|
+
if min_distance_degrees == float("inf"):
|
|
1119
1318
|
scatter_radius_degrees = 0.05 # ~5km default
|
|
1120
1319
|
else:
|
|
1121
1320
|
scatter_radius_degrees = min_distance_degrees * 0.25
|
|
1122
|
-
|
|
1321
|
+
|
|
1123
1322
|
# Ensure reasonable bounds: between 1km and 100km equivalent in degrees
|
|
1124
|
-
min_radius = 0.01
|
|
1125
|
-
max_radius = 1.0
|
|
1126
|
-
scatter_radius_degrees = max(
|
|
1127
|
-
|
|
1323
|
+
min_radius = 0.01 # ~1km
|
|
1324
|
+
max_radius = 1.0 # ~100km
|
|
1325
|
+
scatter_radius_degrees = max(
|
|
1326
|
+
min_radius, min(max_radius, scatter_radius_degrees)
|
|
1327
|
+
)
|
|
1328
|
+
|
|
1128
1329
|
return scatter_radius_degrees
|
|
1129
1330
|
|
|
1130
|
-
def _detect_and_load_location_csv(
|
|
1331
|
+
def _detect_and_load_location_csv(
|
|
1332
|
+
self, netcdf_path: str
|
|
1333
|
+
) -> Optional[Dict[str, Tuple[float, float]]]:
|
|
1131
1334
|
"""
|
|
1132
1335
|
Detect and load companion CSV file with component locations.
|
|
1133
|
-
|
|
1336
|
+
|
|
1134
1337
|
Args:
|
|
1135
1338
|
netcdf_path: Path to the NetCDF file (e.g., /path/to/fileX.nc)
|
|
1136
|
-
|
|
1339
|
+
|
|
1137
1340
|
Returns:
|
|
1138
1341
|
Dictionary mapping component names to (latitude, longitude) tuples, or None if no CSV found
|
|
1139
1342
|
"""
|
|
@@ -1141,152 +1344,145 @@ class NetCDFModelImporter:
|
|
|
1141
1344
|
# Construct expected CSV path: replace .nc with _locations.csv
|
|
1142
1345
|
netcdf_file = Path(netcdf_path)
|
|
1143
1346
|
csv_path = netcdf_file.parent / f"{netcdf_file.stem}_locations.csv"
|
|
1144
|
-
|
|
1347
|
+
|
|
1145
1348
|
if not csv_path.exists():
|
|
1146
1349
|
return None
|
|
1147
|
-
|
|
1350
|
+
|
|
1148
1351
|
# Parse the CSV file
|
|
1149
1352
|
try:
|
|
1150
1353
|
location_df = pd.read_csv(csv_path)
|
|
1151
|
-
|
|
1354
|
+
|
|
1152
1355
|
# Validate required columns
|
|
1153
|
-
required_columns = {
|
|
1356
|
+
required_columns = {"name", "longitude", "latitude"}
|
|
1154
1357
|
if not required_columns.issubset(location_df.columns):
|
|
1155
1358
|
missing_cols = required_columns - set(location_df.columns)
|
|
1156
|
-
self.logger.warning(
|
|
1359
|
+
self.logger.warning(
|
|
1360
|
+
f"Location CSV missing required columns: {missing_cols}. Found columns: {list(location_df.columns)}"
|
|
1361
|
+
)
|
|
1157
1362
|
return None
|
|
1158
|
-
|
|
1363
|
+
|
|
1159
1364
|
# Create lookup dictionary
|
|
1160
1365
|
location_map = {}
|
|
1161
1366
|
skipped_count = 0
|
|
1162
|
-
|
|
1367
|
+
|
|
1163
1368
|
for _, row in location_df.iterrows():
|
|
1164
|
-
name = row[
|
|
1165
|
-
longitude = row[
|
|
1166
|
-
latitude = row[
|
|
1167
|
-
|
|
1369
|
+
name = row["name"]
|
|
1370
|
+
longitude = row["longitude"]
|
|
1371
|
+
latitude = row["latitude"]
|
|
1372
|
+
|
|
1168
1373
|
# Skip rows with missing data
|
|
1169
1374
|
if pd.isna(name) or pd.isna(longitude) or pd.isna(latitude):
|
|
1170
1375
|
skipped_count += 1
|
|
1171
1376
|
continue
|
|
1172
|
-
|
|
1377
|
+
|
|
1173
1378
|
# Validate coordinate ranges
|
|
1174
1379
|
if not (-180 <= longitude <= 180) or not (-90 <= latitude <= 90):
|
|
1175
|
-
self.logger.warning(
|
|
1380
|
+
self.logger.warning(
|
|
1381
|
+
f"Invalid coordinates for '{name}': longitude={longitude}, latitude={latitude}"
|
|
1382
|
+
)
|
|
1176
1383
|
skipped_count += 1
|
|
1177
1384
|
continue
|
|
1178
|
-
|
|
1179
|
-
location_map[str(name).strip()] = (
|
|
1180
|
-
|
|
1181
|
-
|
|
1385
|
+
|
|
1386
|
+
location_map[str(name).strip()] = (
|
|
1387
|
+
float(latitude),
|
|
1388
|
+
float(longitude),
|
|
1389
|
+
)
|
|
1390
|
+
|
|
1391
|
+
self.logger.info(
|
|
1392
|
+
f"Loaded {len(location_map)} component locations from CSV (skipped {skipped_count} invalid entries)"
|
|
1393
|
+
)
|
|
1182
1394
|
return location_map
|
|
1183
|
-
|
|
1395
|
+
|
|
1184
1396
|
except Exception as e:
|
|
1185
1397
|
self.logger.error(f"Failed to parse location CSV {csv_path}: {e}")
|
|
1186
1398
|
return None
|
|
1187
|
-
|
|
1399
|
+
|
|
1188
1400
|
except Exception as e:
|
|
1189
1401
|
self.logger.warning(f"Error detecting location CSV: {e}")
|
|
1190
1402
|
return None
|
|
1191
|
-
|
|
1192
|
-
def
|
|
1193
|
-
"""Get
|
|
1194
|
-
cursor = conn.execute(
|
|
1195
|
-
"SELECT id FROM scenarios WHERE network_id = ? AND is_master = 1",
|
|
1196
|
-
(network_id,)
|
|
1197
|
-
)
|
|
1198
|
-
result = cursor.fetchone()
|
|
1199
|
-
if not result:
|
|
1200
|
-
raise ValueError(f"No master scenario found for network {network_id}")
|
|
1201
|
-
return result[0]
|
|
1202
|
-
|
|
1203
|
-
def _get_or_create_carrier(self, conn, network_id: int, carrier_name: str) -> int:
|
|
1204
|
-
"""Get existing carrier ID or create new carrier"""
|
|
1403
|
+
|
|
1404
|
+
def _get_or_create_carrier(self, conn, carrier_name: str) -> int:
|
|
1405
|
+
"""Get existing carrier ID or create new carrier (single network per database)"""
|
|
1205
1406
|
# Try to find existing carrier
|
|
1206
|
-
cursor = conn.execute(
|
|
1207
|
-
"SELECT id FROM carriers WHERE network_id = ? AND name = ?",
|
|
1208
|
-
(network_id, carrier_name)
|
|
1209
|
-
)
|
|
1407
|
+
cursor = conn.execute("SELECT id FROM carriers WHERE name = ?", (carrier_name,))
|
|
1210
1408
|
result = cursor.fetchone()
|
|
1211
1409
|
if result:
|
|
1212
1410
|
return result[0]
|
|
1213
|
-
|
|
1411
|
+
|
|
1214
1412
|
# Create new carrier
|
|
1215
|
-
carrier_id = create_carrier(conn,
|
|
1413
|
+
carrier_id = create_carrier(conn, carrier_name, 0.0, "#3498db", carrier_name)
|
|
1216
1414
|
return carrier_id
|
|
1217
|
-
|
|
1415
|
+
|
|
1218
1416
|
def _generate_component_coordinates(
|
|
1219
|
-
self,
|
|
1220
|
-
conn,
|
|
1221
|
-
bus_id: int,
|
|
1222
|
-
scatter_radius: float,
|
|
1223
|
-
location_map: Optional[Dict],
|
|
1224
|
-
component_name: str
|
|
1417
|
+
self,
|
|
1418
|
+
conn,
|
|
1419
|
+
bus_id: int,
|
|
1420
|
+
scatter_radius: float,
|
|
1421
|
+
location_map: Optional[Dict],
|
|
1422
|
+
component_name: str,
|
|
1225
1423
|
) -> Tuple[Optional[float], Optional[float]]:
|
|
1226
1424
|
"""Generate coordinates for a component near its connected bus"""
|
|
1227
1425
|
# Check location map first
|
|
1228
1426
|
if location_map and component_name in location_map:
|
|
1229
1427
|
return location_map[component_name]
|
|
1230
|
-
|
|
1428
|
+
|
|
1231
1429
|
# Get bus coordinates
|
|
1232
1430
|
cursor = conn.execute(
|
|
1233
|
-
"SELECT latitude, longitude FROM components WHERE id = ?",
|
|
1234
|
-
(bus_id,)
|
|
1431
|
+
"SELECT latitude, longitude FROM components WHERE id = ?", (bus_id,)
|
|
1235
1432
|
)
|
|
1236
1433
|
result = cursor.fetchone()
|
|
1237
1434
|
if not result or result[0] is None or result[1] is None:
|
|
1238
1435
|
return None, None
|
|
1239
|
-
|
|
1436
|
+
|
|
1240
1437
|
bus_lat, bus_lon = result[0], result[1]
|
|
1241
|
-
|
|
1438
|
+
|
|
1242
1439
|
# Generate unique name-based offset
|
|
1243
1440
|
name_hash = hash(component_name) % 1000
|
|
1244
1441
|
angle = (name_hash / 1000.0) * 2 * math.pi
|
|
1245
|
-
|
|
1442
|
+
|
|
1246
1443
|
# Apply scatter radius
|
|
1247
1444
|
lat_offset = scatter_radius * math.cos(angle)
|
|
1248
1445
|
lon_offset = scatter_radius * math.sin(angle)
|
|
1249
|
-
|
|
1446
|
+
|
|
1250
1447
|
return bus_lat + lat_offset, bus_lon + lon_offset
|
|
1251
|
-
|
|
1448
|
+
|
|
1252
1449
|
def _import_component_attributes(
|
|
1253
|
-
self,
|
|
1254
|
-
conn,
|
|
1255
|
-
component_id: int,
|
|
1256
|
-
component_data: pd.Series,
|
|
1450
|
+
self,
|
|
1451
|
+
conn,
|
|
1452
|
+
component_id: int,
|
|
1453
|
+
component_data: pd.Series,
|
|
1257
1454
|
component_type: str,
|
|
1258
|
-
strict_validation: bool
|
|
1455
|
+
strict_validation: bool,
|
|
1259
1456
|
):
|
|
1260
1457
|
"""Import component attributes, excluding bus connection columns"""
|
|
1261
|
-
|
|
1458
|
+
|
|
1262
1459
|
# Get master scenario ID
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
self.logger.error(f"Could not find network_id for component {component_id}")
|
|
1266
|
-
return
|
|
1267
|
-
|
|
1268
|
-
network_id = network_id_result[0]
|
|
1269
|
-
scenario_id = self._get_master_scenario_id(conn, network_id)
|
|
1270
|
-
|
|
1460
|
+
scenario_id = None
|
|
1461
|
+
|
|
1271
1462
|
# Skip these columns as they're handled in the components table
|
|
1272
1463
|
skip_columns = {
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1464
|
+
"bus",
|
|
1465
|
+
"bus0",
|
|
1466
|
+
"bus1",
|
|
1467
|
+
"name", # Bus connections and name
|
|
1468
|
+
"x",
|
|
1469
|
+
"y",
|
|
1470
|
+
"location", # Coordinate/location data (stored as latitude/longitude columns)
|
|
1471
|
+
"carrier", # Carrier reference (stored as carrier_id column)
|
|
1276
1472
|
}
|
|
1277
|
-
|
|
1473
|
+
|
|
1278
1474
|
attribute_count = 0
|
|
1279
1475
|
skipped_count = 0
|
|
1280
|
-
|
|
1476
|
+
|
|
1281
1477
|
for attr_name, value in component_data.items():
|
|
1282
1478
|
if attr_name in skip_columns:
|
|
1283
1479
|
skipped_count += 1
|
|
1284
1480
|
continue
|
|
1285
|
-
|
|
1481
|
+
|
|
1286
1482
|
if pd.isna(value):
|
|
1287
1483
|
skipped_count += 1
|
|
1288
1484
|
continue
|
|
1289
|
-
|
|
1485
|
+
|
|
1290
1486
|
# Convert value to appropriate format for our database and use smart attribute setting
|
|
1291
1487
|
try:
|
|
1292
1488
|
# Get validation rule to check expected data type
|
|
@@ -1295,19 +1491,21 @@ class NetCDFModelImporter:
|
|
|
1295
1491
|
expected_type = rule.data_type
|
|
1296
1492
|
except:
|
|
1297
1493
|
expected_type = None
|
|
1298
|
-
|
|
1494
|
+
|
|
1299
1495
|
# Convert based on expected type or infer from value
|
|
1300
|
-
if expected_type ==
|
|
1496
|
+
if expected_type == "boolean":
|
|
1301
1497
|
# Handle boolean attributes that might come as int/float from PyPSA
|
|
1302
1498
|
if isinstance(value, (bool, np.bool_)):
|
|
1303
1499
|
static_value = StaticValue(bool(value))
|
|
1304
1500
|
elif isinstance(value, (int, np.integer)):
|
|
1305
1501
|
static_value = StaticValue(bool(value)) # 0 -> False, 1 -> True
|
|
1306
1502
|
elif isinstance(value, (float, np.floating)):
|
|
1307
|
-
static_value = StaticValue(
|
|
1503
|
+
static_value = StaticValue(
|
|
1504
|
+
bool(int(value))
|
|
1505
|
+
) # 0.0 -> False, 1.0 -> True
|
|
1308
1506
|
else:
|
|
1309
|
-
static_value = StaticValue(str(value).lower() ==
|
|
1310
|
-
elif expected_type ==
|
|
1507
|
+
static_value = StaticValue(str(value).lower() == "true")
|
|
1508
|
+
elif expected_type == "int":
|
|
1311
1509
|
# Handle integer attributes
|
|
1312
1510
|
if isinstance(value, (int, np.integer)):
|
|
1313
1511
|
static_value = StaticValue(int(value))
|
|
@@ -1321,7 +1519,7 @@ class NetCDFModelImporter:
|
|
|
1321
1519
|
static_value = StaticValue(int(value))
|
|
1322
1520
|
else:
|
|
1323
1521
|
static_value = StaticValue(int(float(str(value))))
|
|
1324
|
-
elif expected_type ==
|
|
1522
|
+
elif expected_type == "float":
|
|
1325
1523
|
# Handle float attributes
|
|
1326
1524
|
if isinstance(value, (float, np.floating)):
|
|
1327
1525
|
if np.isfinite(value):
|
|
@@ -1349,27 +1547,35 @@ class NetCDFModelImporter:
|
|
|
1349
1547
|
continue # Skip infinite/NaN values
|
|
1350
1548
|
else:
|
|
1351
1549
|
static_value = StaticValue(str(value))
|
|
1352
|
-
|
|
1550
|
+
|
|
1353
1551
|
# Use direct static attribute setting
|
|
1354
|
-
set_static_attribute(
|
|
1552
|
+
set_static_attribute(
|
|
1553
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
1554
|
+
)
|
|
1355
1555
|
attribute_count += 1
|
|
1356
|
-
|
|
1556
|
+
|
|
1357
1557
|
except Exception as e:
|
|
1358
1558
|
# Handle validation errors from db_utils functions
|
|
1359
|
-
if (
|
|
1360
|
-
"
|
|
1361
|
-
"
|
|
1559
|
+
if (
|
|
1560
|
+
"No validation rule found" in str(e)
|
|
1561
|
+
or "does not allow" in str(e)
|
|
1562
|
+
or "ValidationError" in str(type(e).__name__)
|
|
1563
|
+
):
|
|
1362
1564
|
if strict_validation:
|
|
1363
1565
|
raise
|
|
1364
1566
|
else:
|
|
1365
|
-
self.logger.warning(
|
|
1567
|
+
self.logger.warning(
|
|
1568
|
+
f"Skipping undefined/invalid attribute '{attr_name}' for {component_type} component {component_id}: {e}"
|
|
1569
|
+
)
|
|
1366
1570
|
skipped_count += 1
|
|
1367
1571
|
continue
|
|
1368
1572
|
else:
|
|
1369
1573
|
# Log but don't fail on other attribute import errors (like type conversion issues)
|
|
1370
|
-
self.logger.warning(
|
|
1574
|
+
self.logger.warning(
|
|
1575
|
+
f"Skipping attribute {attr_name} for component {component_id}: {e}"
|
|
1576
|
+
)
|
|
1371
1577
|
skipped_count += 1
|
|
1372
|
-
|
|
1578
|
+
|
|
1373
1579
|
def _import_component_timeseries(
|
|
1374
1580
|
self,
|
|
1375
1581
|
conn,
|
|
@@ -1377,86 +1583,88 @@ class NetCDFModelImporter:
|
|
|
1377
1583
|
component_id: int,
|
|
1378
1584
|
component_name: str,
|
|
1379
1585
|
component_type: str,
|
|
1380
|
-
strict_validation: bool
|
|
1586
|
+
strict_validation: bool,
|
|
1381
1587
|
):
|
|
1382
1588
|
"""Import timeseries attributes from PyPSA network"""
|
|
1383
|
-
|
|
1589
|
+
|
|
1384
1590
|
# Get master scenario ID
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
self.logger.error(f"Could not find network_id for component {component_id}")
|
|
1388
|
-
return
|
|
1389
|
-
|
|
1390
|
-
network_id = network_id_result[0]
|
|
1391
|
-
scenario_id = self._get_master_scenario_id(conn, network_id)
|
|
1392
|
-
|
|
1591
|
+
scenario_id = None
|
|
1592
|
+
|
|
1393
1593
|
# Map component types to their PyPSA timeseries DataFrames
|
|
1394
1594
|
timeseries_map = {
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1595
|
+
"BUS": getattr(network, "buses_t", {}),
|
|
1596
|
+
"GENERATOR": getattr(network, "generators_t", {}),
|
|
1597
|
+
"LOAD": getattr(network, "loads_t", {}),
|
|
1598
|
+
"LINE": getattr(network, "lines_t", {}),
|
|
1599
|
+
"LINK": getattr(network, "links_t", {}),
|
|
1600
|
+
"STORAGE_UNIT": getattr(network, "storage_units_t", {}),
|
|
1601
|
+
"STORE": getattr(network, "stores_t", {}),
|
|
1402
1602
|
}
|
|
1403
|
-
|
|
1603
|
+
|
|
1404
1604
|
component_timeseries = timeseries_map.get(component_type, {})
|
|
1405
|
-
|
|
1605
|
+
|
|
1406
1606
|
if not component_timeseries:
|
|
1407
1607
|
return
|
|
1408
|
-
|
|
1608
|
+
|
|
1409
1609
|
timeseries_count = 0
|
|
1410
|
-
|
|
1610
|
+
|
|
1411
1611
|
# Iterate through each timeseries attribute (e.g., 'p', 'q', 'p_set', 'p_max_pu', etc.)
|
|
1412
1612
|
for attr_name, timeseries_df in component_timeseries.items():
|
|
1413
1613
|
if component_name not in timeseries_df.columns:
|
|
1414
1614
|
continue
|
|
1415
|
-
|
|
1615
|
+
|
|
1416
1616
|
# Get the timeseries data for this component
|
|
1417
1617
|
component_series = timeseries_df[component_name]
|
|
1418
|
-
|
|
1618
|
+
|
|
1419
1619
|
# Skip if all values are NaN
|
|
1420
1620
|
if component_series.isna().all():
|
|
1421
1621
|
continue
|
|
1422
|
-
|
|
1622
|
+
|
|
1423
1623
|
try:
|
|
1424
1624
|
# Convert pandas Series to list of values (using optimized approach)
|
|
1425
1625
|
values = []
|
|
1426
|
-
|
|
1626
|
+
|
|
1427
1627
|
for value in component_series:
|
|
1428
1628
|
# Skip NaN values by using 0.0 as default (PyPSA convention)
|
|
1429
1629
|
if pd.isna(value):
|
|
1430
1630
|
values.append(0.0)
|
|
1431
1631
|
else:
|
|
1432
1632
|
values.append(float(value))
|
|
1433
|
-
|
|
1633
|
+
|
|
1434
1634
|
if not values:
|
|
1435
|
-
self.logger.warning(
|
|
1635
|
+
self.logger.warning(
|
|
1636
|
+
f"No valid timeseries points for '{attr_name}' on {component_type} '{component_name}'"
|
|
1637
|
+
)
|
|
1436
1638
|
continue
|
|
1437
|
-
|
|
1639
|
+
|
|
1438
1640
|
# Use optimized timeseries attribute setting
|
|
1439
|
-
set_timeseries_attribute(
|
|
1641
|
+
set_timeseries_attribute(
|
|
1642
|
+
conn, component_id, attr_name, values, scenario_id
|
|
1643
|
+
)
|
|
1440
1644
|
timeseries_count += 1
|
|
1441
|
-
|
|
1645
|
+
|
|
1442
1646
|
except Exception as e:
|
|
1443
1647
|
if strict_validation:
|
|
1444
1648
|
raise
|
|
1445
1649
|
else:
|
|
1446
|
-
self.logger.warning(
|
|
1650
|
+
self.logger.warning(
|
|
1651
|
+
f"Skipping timeseries attribute '{attr_name}' for {component_type} component '{component_name}': {e}"
|
|
1652
|
+
)
|
|
1447
1653
|
continue
|
|
1448
|
-
|
|
1654
|
+
|
|
1449
1655
|
if timeseries_count > 0:
|
|
1450
|
-
self.logger.debug(
|
|
1451
|
-
|
|
1656
|
+
self.logger.debug(
|
|
1657
|
+
f"Imported {timeseries_count} timeseries attributes for {component_type} '{component_name}'"
|
|
1658
|
+
)
|
|
1659
|
+
|
|
1452
1660
|
def _generate_unique_name(self, base_name: str, component_type: str) -> str:
|
|
1453
1661
|
"""
|
|
1454
1662
|
Generate a unique name for a component, ensuring no duplicates across all component types.
|
|
1455
|
-
|
|
1663
|
+
|
|
1456
1664
|
Args:
|
|
1457
1665
|
base_name: The original name to start with
|
|
1458
1666
|
component_type: The type of component (used in the suffix if needed)
|
|
1459
|
-
|
|
1667
|
+
|
|
1460
1668
|
Returns:
|
|
1461
1669
|
A unique name that hasn't been used yet
|
|
1462
1670
|
"""
|
|
@@ -1464,13 +1672,13 @@ class NetCDFModelImporter:
|
|
|
1464
1672
|
if base_name not in self._used_names:
|
|
1465
1673
|
self._used_names.add(base_name)
|
|
1466
1674
|
return base_name
|
|
1467
|
-
|
|
1675
|
+
|
|
1468
1676
|
# If base name is taken, try appending the component type
|
|
1469
1677
|
typed_name = f"{base_name}_{component_type.lower()}"
|
|
1470
1678
|
if typed_name not in self._used_names:
|
|
1471
1679
|
self._used_names.add(typed_name)
|
|
1472
1680
|
return typed_name
|
|
1473
|
-
|
|
1681
|
+
|
|
1474
1682
|
# If that's taken too, start adding numbers
|
|
1475
1683
|
counter = 1
|
|
1476
1684
|
while True:
|
|
@@ -1479,124 +1687,147 @@ class NetCDFModelImporter:
|
|
|
1479
1687
|
self._used_names.add(unique_name)
|
|
1480
1688
|
return unique_name
|
|
1481
1689
|
counter += 1
|
|
1482
|
-
|
|
1690
|
+
|
|
1483
1691
|
def _generate_scattered_coordinates(
|
|
1484
|
-
self,
|
|
1485
|
-
bus_lat: float,
|
|
1486
|
-
bus_lon: float,
|
|
1692
|
+
self,
|
|
1693
|
+
bus_lat: float,
|
|
1694
|
+
bus_lon: float,
|
|
1487
1695
|
scatter_radius: float,
|
|
1488
1696
|
component_count_at_bus: int,
|
|
1489
|
-
component_index: int
|
|
1697
|
+
component_index: int,
|
|
1490
1698
|
) -> Tuple[float, float]:
|
|
1491
1699
|
"""
|
|
1492
1700
|
Generate scattered coordinates around a bus location.
|
|
1493
|
-
|
|
1701
|
+
|
|
1494
1702
|
Args:
|
|
1495
1703
|
bus_lat: Bus latitude
|
|
1496
|
-
bus_lon: Bus longitude
|
|
1704
|
+
bus_lon: Bus longitude
|
|
1497
1705
|
scatter_radius: Radius in degrees to scatter within
|
|
1498
1706
|
component_count_at_bus: Total number of components at this bus
|
|
1499
1707
|
component_index: Index of this component (0-based)
|
|
1500
|
-
|
|
1708
|
+
|
|
1501
1709
|
Returns:
|
|
1502
1710
|
Tuple of (latitude, longitude) for the scattered position
|
|
1503
1711
|
"""
|
|
1504
1712
|
if component_count_at_bus == 1:
|
|
1505
1713
|
# Single component - place it at a moderate distance from the bus
|
|
1506
1714
|
angle = random.uniform(0, 2 * math.pi)
|
|
1507
|
-
distance = scatter_radius * random.uniform(
|
|
1715
|
+
distance = scatter_radius * random.uniform(
|
|
1716
|
+
0.5, 0.8
|
|
1717
|
+
) # 50-80% of scatter radius
|
|
1508
1718
|
else:
|
|
1509
1719
|
# Multiple components - arrange in a rough circle with some randomness
|
|
1510
1720
|
base_angle = (2 * math.pi * component_index) / component_count_at_bus
|
|
1511
|
-
angle_jitter = random.uniform(
|
|
1721
|
+
angle_jitter = random.uniform(
|
|
1722
|
+
-math.pi / 8, math.pi / 8
|
|
1723
|
+
) # ±22.5 degrees jitter
|
|
1512
1724
|
angle = base_angle + angle_jitter
|
|
1513
|
-
|
|
1725
|
+
|
|
1514
1726
|
# Vary distance randomly within the radius (use more of the available radius)
|
|
1515
|
-
distance = scatter_radius * random.uniform(
|
|
1516
|
-
|
|
1727
|
+
distance = scatter_radius * random.uniform(
|
|
1728
|
+
0.6, 1.0
|
|
1729
|
+
) # 60-100% of scatter radius
|
|
1730
|
+
|
|
1517
1731
|
# Calculate new coordinates
|
|
1518
1732
|
new_lat = bus_lat + distance * math.cos(angle)
|
|
1519
1733
|
new_lon = bus_lon + distance * math.sin(angle)
|
|
1520
|
-
|
|
1734
|
+
|
|
1521
1735
|
return new_lat, new_lon
|
|
1522
|
-
|
|
1523
|
-
def _get_bus_coordinates_map(self, conn
|
|
1736
|
+
|
|
1737
|
+
def _get_bus_coordinates_map(self, conn) -> Dict[int, Tuple[float, float]]:
|
|
1524
1738
|
"""
|
|
1525
1739
|
Get a mapping from bus component ID to coordinates.
|
|
1526
|
-
|
|
1740
|
+
|
|
1527
1741
|
Returns:
|
|
1528
1742
|
Dictionary mapping bus component ID to (latitude, longitude) tuple
|
|
1529
1743
|
"""
|
|
1530
|
-
cursor = conn.execute(
|
|
1744
|
+
cursor = conn.execute(
|
|
1745
|
+
"""
|
|
1531
1746
|
SELECT id, latitude, longitude FROM components
|
|
1532
|
-
WHERE
|
|
1747
|
+
WHERE component_type = 'BUS'
|
|
1533
1748
|
AND latitude IS NOT NULL AND longitude IS NOT NULL
|
|
1534
1749
|
AND NOT (latitude = 0 AND longitude = 0)
|
|
1535
|
-
""",
|
|
1536
|
-
|
|
1750
|
+
""",
|
|
1751
|
+
(),
|
|
1752
|
+
)
|
|
1753
|
+
|
|
1537
1754
|
bus_coords = {row[0]: (row[1], row[2]) for row in cursor.fetchall()}
|
|
1538
1755
|
return bus_coords
|
|
1539
|
-
|
|
1756
|
+
|
|
1540
1757
|
def _resolve_original_component_name(self, unique_name: str) -> str:
|
|
1541
1758
|
"""
|
|
1542
1759
|
Resolve a potentially modified unique name back to its original name for CSV lookup.
|
|
1543
|
-
|
|
1760
|
+
|
|
1544
1761
|
Args:
|
|
1545
1762
|
unique_name: The unique name that may have been modified (e.g., "component_1", "component_generator")
|
|
1546
|
-
|
|
1763
|
+
|
|
1547
1764
|
Returns:
|
|
1548
1765
|
The original name for CSV lookup
|
|
1549
1766
|
"""
|
|
1550
1767
|
# Remove common suffixes added by _generate_unique_name
|
|
1551
1768
|
# Pattern 1: Remove "_NUMBER" suffix (e.g., "component_1" -> "component")
|
|
1552
1769
|
import re
|
|
1553
|
-
|
|
1770
|
+
|
|
1554
1771
|
# First try removing "_NUMBER" pattern
|
|
1555
|
-
no_number_suffix = re.sub(r
|
|
1772
|
+
no_number_suffix = re.sub(r"_\d+$", "", unique_name)
|
|
1556
1773
|
if no_number_suffix != unique_name:
|
|
1557
1774
|
return no_number_suffix
|
|
1558
|
-
|
|
1775
|
+
|
|
1559
1776
|
# Then try removing "_COMPONENT_TYPE" pattern (e.g., "component_generator" -> "component")
|
|
1560
|
-
component_types = [
|
|
1777
|
+
component_types = [
|
|
1778
|
+
"bus",
|
|
1779
|
+
"generator",
|
|
1780
|
+
"load",
|
|
1781
|
+
"line",
|
|
1782
|
+
"link",
|
|
1783
|
+
"storage_unit",
|
|
1784
|
+
"store",
|
|
1785
|
+
]
|
|
1561
1786
|
for comp_type in component_types:
|
|
1562
1787
|
suffix = f"_{comp_type.lower()}"
|
|
1563
1788
|
if unique_name.endswith(suffix):
|
|
1564
|
-
return unique_name[
|
|
1565
|
-
|
|
1789
|
+
return unique_name[: -len(suffix)]
|
|
1790
|
+
|
|
1566
1791
|
# If no patterns match, return the original name
|
|
1567
1792
|
return unique_name
|
|
1568
|
-
|
|
1793
|
+
|
|
1569
1794
|
def _get_csv_coordinates(
|
|
1570
|
-
self,
|
|
1571
|
-
component_name: str,
|
|
1572
|
-
location_map: Optional[Dict[str, Tuple[float, float]]]
|
|
1795
|
+
self,
|
|
1796
|
+
component_name: str,
|
|
1797
|
+
location_map: Optional[Dict[str, Tuple[float, float]]],
|
|
1573
1798
|
) -> Optional[Tuple[float, float]]:
|
|
1574
1799
|
"""
|
|
1575
1800
|
Get coordinates for a component from the CSV location map.
|
|
1576
|
-
|
|
1801
|
+
|
|
1577
1802
|
Args:
|
|
1578
1803
|
component_name: The component name (potentially modified for uniqueness)
|
|
1579
1804
|
location_map: Dictionary mapping original names to coordinates
|
|
1580
|
-
|
|
1805
|
+
|
|
1581
1806
|
Returns:
|
|
1582
1807
|
(latitude, longitude) tuple if found, None otherwise
|
|
1583
1808
|
"""
|
|
1584
1809
|
if not location_map:
|
|
1585
1810
|
return None
|
|
1586
|
-
|
|
1811
|
+
|
|
1587
1812
|
# Try exact match first
|
|
1588
1813
|
if component_name in location_map:
|
|
1589
1814
|
coordinates = location_map[component_name]
|
|
1590
|
-
self.logger.debug(
|
|
1815
|
+
self.logger.debug(
|
|
1816
|
+
f"CSV location exact match for '{component_name}': {coordinates}"
|
|
1817
|
+
)
|
|
1591
1818
|
return coordinates
|
|
1592
|
-
|
|
1819
|
+
|
|
1593
1820
|
# Try resolving back to original name
|
|
1594
1821
|
original_name = self._resolve_original_component_name(component_name)
|
|
1595
1822
|
if original_name != component_name and original_name in location_map:
|
|
1596
1823
|
coordinates = location_map[original_name]
|
|
1597
|
-
self.logger.debug(
|
|
1824
|
+
self.logger.debug(
|
|
1825
|
+
f"CSV location resolved match for '{component_name}' -> '{original_name}': {coordinates}"
|
|
1826
|
+
)
|
|
1598
1827
|
return coordinates
|
|
1599
|
-
|
|
1828
|
+
|
|
1600
1829
|
# No match found
|
|
1601
|
-
self.logger.debug(
|
|
1830
|
+
self.logger.debug(
|
|
1831
|
+
f"No CSV location found for component '{component_name}' (original: '{original_name}')"
|
|
1832
|
+
)
|
|
1602
1833
|
return None
|