pyconvexity 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +27 -2
- pyconvexity/_version.py +1 -2
- pyconvexity/core/__init__.py +0 -2
- pyconvexity/core/database.py +158 -0
- pyconvexity/core/types.py +105 -18
- pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
- pyconvexity/data/schema/01_core_schema.sql +12 -12
- pyconvexity/data/schema/02_data_metadata.sql +17 -321
- pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
- pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
- pyconvexity/data/sources/gem.py +5 -5
- pyconvexity/io/excel_exporter.py +34 -13
- pyconvexity/io/excel_importer.py +48 -51
- pyconvexity/io/netcdf_importer.py +1054 -51
- pyconvexity/models/attributes.py +209 -72
- pyconvexity/models/network.py +17 -15
- pyconvexity/solvers/pypsa/api.py +24 -1
- pyconvexity/solvers/pypsa/batch_loader.py +37 -44
- pyconvexity/solvers/pypsa/builder.py +62 -152
- pyconvexity/solvers/pypsa/solver.py +104 -253
- pyconvexity/solvers/pypsa/storage.py +740 -1373
- pyconvexity/timeseries.py +327 -0
- pyconvexity/validation/rules.py +2 -2
- {pyconvexity-0.1.3.dist-info → pyconvexity-0.1.4.dist-info}/METADATA +1 -1
- pyconvexity-0.1.4.dist-info/RECORD +46 -0
- pyconvexity-0.1.3.dist-info/RECORD +0 -45
- {pyconvexity-0.1.3.dist-info → pyconvexity-0.1.4.dist-info}/WHEEL +0 -0
- {pyconvexity-0.1.3.dist-info → pyconvexity-0.1.4.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Solving functionality for PyPSA networks.
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
Simplified to always use multi-period optimization for consistency.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
import logging
|
|
@@ -16,10 +16,9 @@ logger = logging.getLogger(__name__)
|
|
|
16
16
|
|
|
17
17
|
class NetworkSolver:
|
|
18
18
|
"""
|
|
19
|
-
|
|
19
|
+
Simplified PyPSA network solver that always uses multi-period optimization.
|
|
20
20
|
|
|
21
|
-
This
|
|
22
|
-
multi-period optimization setup, and result extraction.
|
|
21
|
+
This ensures consistent behavior for both single-year and multi-year models.
|
|
23
22
|
"""
|
|
24
23
|
|
|
25
24
|
def __init__(self):
|
|
@@ -33,6 +32,61 @@ class NetworkSolver:
|
|
|
33
32
|
"Please ensure it is installed correctly in the environment."
|
|
34
33
|
) from e
|
|
35
34
|
|
|
35
|
+
def _get_user_settings_path(self):
|
|
36
|
+
"""Get the path to the user settings file (same location as Tauri uses)"""
|
|
37
|
+
try:
|
|
38
|
+
import platform
|
|
39
|
+
import os
|
|
40
|
+
from pathlib import Path
|
|
41
|
+
|
|
42
|
+
system = platform.system()
|
|
43
|
+
if system == "Darwin": # macOS
|
|
44
|
+
home = Path.home()
|
|
45
|
+
app_data_dir = home / "Library" / "Application Support" / "com.convexity.desktop"
|
|
46
|
+
elif system == "Windows":
|
|
47
|
+
app_data_dir = Path(os.environ.get("APPDATA", "")) / "com.convexity.desktop"
|
|
48
|
+
else: # Linux
|
|
49
|
+
home = Path.home()
|
|
50
|
+
app_data_dir = home / ".local" / "share" / "com.convexity.desktop"
|
|
51
|
+
|
|
52
|
+
settings_file = app_data_dir / "user_settings.json"
|
|
53
|
+
return settings_file if settings_file.exists() else None
|
|
54
|
+
|
|
55
|
+
except Exception as e:
|
|
56
|
+
logger.warning(f"Failed to determine user settings path: {e}")
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
def _resolve_default_solver(self) -> str:
|
|
60
|
+
"""Resolve 'default' solver to user's preferred solver"""
|
|
61
|
+
try:
|
|
62
|
+
import json
|
|
63
|
+
|
|
64
|
+
settings_path = self._get_user_settings_path()
|
|
65
|
+
if not settings_path:
|
|
66
|
+
logger.debug("User settings file not found, using 'highs' as default solver")
|
|
67
|
+
return 'highs'
|
|
68
|
+
|
|
69
|
+
with open(settings_path, 'r') as f:
|
|
70
|
+
user_settings = json.load(f)
|
|
71
|
+
|
|
72
|
+
# Get default solver from user settings
|
|
73
|
+
default_solver = user_settings.get('default_solver', 'highs')
|
|
74
|
+
logger.info(f"📖 Read default solver from user settings: {default_solver}")
|
|
75
|
+
|
|
76
|
+
# Validate that it's a known solver
|
|
77
|
+
known_solvers = ['highs', 'gurobi', 'gurobi (barrier)', 'gurobi (barrier homogeneous)',
|
|
78
|
+
'gurobi (barrier+crossover balanced)', 'gurobi (dual simplex)', 'cplex', 'glpk', 'cbc', 'scip']
|
|
79
|
+
|
|
80
|
+
if default_solver in known_solvers:
|
|
81
|
+
return default_solver
|
|
82
|
+
else:
|
|
83
|
+
logger.warning(f"Unknown default solver '{default_solver}' in user settings, falling back to 'highs'")
|
|
84
|
+
return 'highs'
|
|
85
|
+
|
|
86
|
+
except Exception as e:
|
|
87
|
+
logger.warning(f"Failed to read default solver from user settings: {e}")
|
|
88
|
+
return 'highs'
|
|
89
|
+
|
|
36
90
|
def solve_network(
|
|
37
91
|
self,
|
|
38
92
|
network: 'pypsa.Network',
|
|
@@ -71,23 +125,17 @@ class NetworkSolver:
|
|
|
71
125
|
# Get solver configuration
|
|
72
126
|
actual_solver_name, solver_config = self._get_solver_config(solver_name, solver_options)
|
|
73
127
|
|
|
74
|
-
# Always use multi-period mode for consistency
|
|
75
|
-
# Extract years from network snapshots
|
|
76
|
-
if hasattr(network, '_available_years') and network._available_years:
|
|
77
|
-
years = network._available_years
|
|
78
|
-
elif hasattr(network.snapshots, 'year'):
|
|
79
|
-
years = sorted(network.snapshots.year.unique())
|
|
80
|
-
else:
|
|
81
|
-
# If no year info, use a single default year
|
|
82
|
-
years = [2020] # Default single year
|
|
83
128
|
|
|
84
|
-
|
|
129
|
+
years = list(network.investment_periods)
|
|
85
130
|
effective_discount_rate = discount_rate if discount_rate is not None else 0.05 # Default 5%
|
|
86
|
-
logger.info(f"Configuring multi-period optimization with discount rate {effective_discount_rate}")
|
|
87
|
-
network = self._configure_multi_period_optimization(network, years, effective_discount_rate)
|
|
88
131
|
|
|
89
|
-
|
|
90
|
-
|
|
132
|
+
logger.info(f"Multi-period optimization with {len(years)} periods: {years}")
|
|
133
|
+
logger.info(f"Discount rate: {effective_discount_rate}")
|
|
134
|
+
|
|
135
|
+
# Calculate investment period weightings with discount rate
|
|
136
|
+
self._calculate_investment_weightings(network, effective_discount_rate)
|
|
137
|
+
|
|
138
|
+
# Set snapshot weightings after multi-period setup
|
|
91
139
|
if conn and network_id:
|
|
92
140
|
self._set_snapshot_weightings_after_multiperiod(conn, network_id, network)
|
|
93
141
|
|
|
@@ -99,23 +147,47 @@ class NetworkSolver:
|
|
|
99
147
|
logger.info(f"Applying {len(optimization_constraints)} optimization-time constraints")
|
|
100
148
|
extra_functionality = self._create_extra_functionality(optimization_constraints, constraint_applicator)
|
|
101
149
|
|
|
102
|
-
# Solver diagnostics
|
|
150
|
+
# Solver diagnostics
|
|
103
151
|
logger.info(f"=== PYPSA SOLVER DIAGNOSTICS ===")
|
|
104
|
-
logger.info(f"
|
|
105
|
-
logger.info(f"
|
|
152
|
+
logger.info(f"Solver: {actual_solver_name}")
|
|
153
|
+
logger.info(f"Investment periods: {years}")
|
|
154
|
+
logger.info(f"Snapshots: {len(network.snapshots)} (MultiIndex)")
|
|
106
155
|
if solver_config:
|
|
107
156
|
logger.info(f"Solver options: {solver_config}")
|
|
108
|
-
logger.info(f"Multi-period optimization: {self._is_multi_period_network(network)}")
|
|
109
|
-
logger.info(f"Investment periods: {getattr(network, 'investment_periods', 'None')}")
|
|
110
157
|
logger.info(f"=== END PYPSA SOLVER DIAGNOSTICS ===")
|
|
111
158
|
|
|
112
|
-
#
|
|
113
|
-
logger.info(f"Solving network with {actual_solver_name}")
|
|
159
|
+
# Always solve with multi-period optimization
|
|
160
|
+
logger.info(f"Solving network with multi-period optimization using {actual_solver_name}")
|
|
161
|
+
|
|
162
|
+
# DEBUG: Check network structure before solving
|
|
163
|
+
logger.info(f"DEBUG: Network snapshots type: {type(network.snapshots)}")
|
|
164
|
+
logger.info(f"DEBUG: Network snapshots names: {getattr(network.snapshots, 'names', 'No names')}")
|
|
165
|
+
logger.info(f"DEBUG: Network snapshots shape: {len(network.snapshots)}")
|
|
166
|
+
logger.info(f"DEBUG: First 3 snapshots: {network.snapshots[:3].tolist()}")
|
|
167
|
+
|
|
168
|
+
# Check some timeseries data structure
|
|
169
|
+
if hasattr(network, 'generators_t') and hasattr(network.generators_t, 'p_max_pu'):
|
|
170
|
+
if not network.generators_t.p_max_pu.empty:
|
|
171
|
+
logger.info(f"DEBUG: generators_t.p_max_pu type: {type(network.generators_t.p_max_pu)}")
|
|
172
|
+
logger.info(f"DEBUG: generators_t.p_max_pu index type: {type(network.generators_t.p_max_pu.index)}")
|
|
173
|
+
logger.info(f"DEBUG: generators_t.p_max_pu index names: {getattr(network.generators_t.p_max_pu.index, 'names', 'No names')}")
|
|
174
|
+
logger.info(f"DEBUG: generators_t.p_max_pu shape: {network.generators_t.p_max_pu.shape}")
|
|
175
|
+
logger.info(f"DEBUG: First 3 p_max_pu index values: {network.generators_t.p_max_pu.index[:3].tolist()}")
|
|
176
|
+
|
|
177
|
+
if hasattr(network, 'loads_t') and hasattr(network.loads_t, 'p_set'):
|
|
178
|
+
if not network.loads_t.p_set.empty:
|
|
179
|
+
logger.info(f"DEBUG: loads_t.p_set type: {type(network.loads_t.p_set)}")
|
|
180
|
+
logger.info(f"DEBUG: loads_t.p_set index type: {type(network.loads_t.p_set.index)}")
|
|
181
|
+
logger.info(f"DEBUG: loads_t.p_set index names: {getattr(network.loads_t.p_set.index, 'names', 'No names')}")
|
|
182
|
+
logger.info(f"DEBUG: loads_t.p_set shape: {network.loads_t.p_set.shape}")
|
|
183
|
+
logger.info(f"DEBUG: First 3 p_set index values: {network.loads_t.p_set.index[:3].tolist()}")
|
|
114
184
|
|
|
115
185
|
if solver_config:
|
|
116
|
-
result =
|
|
186
|
+
result = network.optimize(solver_name=actual_solver_name, multi_investment_periods=True,
|
|
187
|
+
extra_functionality=extra_functionality, **solver_config)
|
|
117
188
|
else:
|
|
118
|
-
result =
|
|
189
|
+
result = network.optimize(solver_name=actual_solver_name, multi_investment_periods=True,
|
|
190
|
+
extra_functionality=extra_functionality)
|
|
119
191
|
|
|
120
192
|
solve_time = time.time() - start_time
|
|
121
193
|
|
|
@@ -161,6 +233,7 @@ class NetworkSolver:
|
|
|
161
233
|
except Exception as e:
|
|
162
234
|
solve_time = time.time() - start_time
|
|
163
235
|
logger.error(f"Solve failed after {solve_time:.2f} seconds: {e}")
|
|
236
|
+
logger.exception("Full solve error traceback:")
|
|
164
237
|
|
|
165
238
|
return {
|
|
166
239
|
"success": False,
|
|
@@ -185,8 +258,10 @@ class NetworkSolver:
|
|
|
185
258
|
"""
|
|
186
259
|
# Handle "default" solver
|
|
187
260
|
if solver_name == 'default':
|
|
188
|
-
|
|
189
|
-
|
|
261
|
+
# Try to read user's default solver preference
|
|
262
|
+
actual_solver = self._resolve_default_solver()
|
|
263
|
+
logger.info(f"Resolved 'default' solver to: {actual_solver}")
|
|
264
|
+
return actual_solver, solver_options
|
|
190
265
|
|
|
191
266
|
# Handle special Gurobi configurations
|
|
192
267
|
if solver_name == 'gurobi (barrier)':
|
|
@@ -275,85 +350,6 @@ class NetworkSolver:
|
|
|
275
350
|
logger.warning(f"Unknown solver name '{solver_name}' - falling back to 'highs'")
|
|
276
351
|
return 'highs', solver_options
|
|
277
352
|
|
|
278
|
-
def _solve_with_config(self, network: 'pypsa.Network', solver_name: str, solver_config: Dict[str, Any], job_id: Optional[str], extra_functionality=None) -> Any:
|
|
279
|
-
"""Solve network with specific solver configuration."""
|
|
280
|
-
# Check if multi-period optimization is needed
|
|
281
|
-
is_multi_period = self._is_multi_period_network(network)
|
|
282
|
-
|
|
283
|
-
# Add extra_functionality to solver config if provided
|
|
284
|
-
if extra_functionality:
|
|
285
|
-
solver_config = solver_config.copy() # Don't modify original
|
|
286
|
-
solver_config['extra_functionality'] = extra_functionality
|
|
287
|
-
|
|
288
|
-
if is_multi_period:
|
|
289
|
-
return network.optimize(solver_name=solver_name, multi_investment_periods=True, **solver_config)
|
|
290
|
-
else:
|
|
291
|
-
return network.optimize(solver_name=solver_name, **solver_config)
|
|
292
|
-
|
|
293
|
-
def _solve_standard(self, network: 'pypsa.Network', solver_name: str, job_id: Optional[str], extra_functionality=None) -> Any:
|
|
294
|
-
"""Solve network with standard configuration."""
|
|
295
|
-
# Check if multi-period optimization is needed
|
|
296
|
-
is_multi_period = self._is_multi_period_network(network)
|
|
297
|
-
|
|
298
|
-
if extra_functionality:
|
|
299
|
-
if is_multi_period:
|
|
300
|
-
return network.optimize(solver_name=solver_name, multi_investment_periods=True, extra_functionality=extra_functionality)
|
|
301
|
-
else:
|
|
302
|
-
return network.optimize(solver_name=solver_name, extra_functionality=extra_functionality)
|
|
303
|
-
else:
|
|
304
|
-
if is_multi_period:
|
|
305
|
-
return network.optimize(solver_name=solver_name, multi_investment_periods=True)
|
|
306
|
-
else:
|
|
307
|
-
return network.optimize(solver_name=solver_name)
|
|
308
|
-
|
|
309
|
-
def _is_multi_period_network(self, network: 'pypsa.Network') -> bool:
|
|
310
|
-
"""
|
|
311
|
-
Determine if the network requires multi-period optimization.
|
|
312
|
-
|
|
313
|
-
Multi-period optimization is needed when:
|
|
314
|
-
1. Network has investment_periods attribute with multiple periods
|
|
315
|
-
2. Network snapshots are MultiIndex with period/timestep structure
|
|
316
|
-
3. Network has generators with build_year attributes
|
|
317
|
-
|
|
318
|
-
Args:
|
|
319
|
-
network: PyPSA Network object
|
|
320
|
-
|
|
321
|
-
Returns:
|
|
322
|
-
True if multi-period optimization is needed, False otherwise
|
|
323
|
-
"""
|
|
324
|
-
try:
|
|
325
|
-
# Check if network has investment_periods
|
|
326
|
-
if hasattr(network, 'investment_periods') and network.investment_periods is not None:
|
|
327
|
-
periods = list(network.investment_periods)
|
|
328
|
-
if len(periods) > 1:
|
|
329
|
-
return True
|
|
330
|
-
elif len(periods) == 1:
|
|
331
|
-
# Even with single period, check if we have build_year constraints
|
|
332
|
-
if hasattr(network, 'generators') and not network.generators.empty:
|
|
333
|
-
if 'build_year' in network.generators.columns:
|
|
334
|
-
build_year_gens = network.generators[network.generators['build_year'].notna()]
|
|
335
|
-
if not build_year_gens.empty:
|
|
336
|
-
return True
|
|
337
|
-
|
|
338
|
-
# Check if snapshots are MultiIndex (period, timestep structure)
|
|
339
|
-
if hasattr(network, 'snapshots') and hasattr(network.snapshots, 'names'):
|
|
340
|
-
if network.snapshots.names and len(network.snapshots.names) >= 2:
|
|
341
|
-
if network.snapshots.names[0] == 'period':
|
|
342
|
-
return True
|
|
343
|
-
|
|
344
|
-
# Check if we have generators with build_year (fallback check)
|
|
345
|
-
if hasattr(network, 'generators') and not network.generators.empty:
|
|
346
|
-
if 'build_year' in network.generators.columns:
|
|
347
|
-
build_year_gens = network.generators[network.generators['build_year'].notna()]
|
|
348
|
-
if not build_year_gens.empty:
|
|
349
|
-
# If we have build_year but no proper multi-period setup, we should still try multi-period
|
|
350
|
-
return True
|
|
351
|
-
|
|
352
|
-
return False
|
|
353
|
-
|
|
354
|
-
except Exception as e:
|
|
355
|
-
logger.error(f"Error checking multi-period status: {e}")
|
|
356
|
-
return False
|
|
357
353
|
|
|
358
354
|
def _create_extra_functionality(self, optimization_constraints: list, constraint_applicator) -> callable:
|
|
359
355
|
"""
|
|
@@ -477,80 +473,6 @@ class NetworkSolver:
|
|
|
477
473
|
logger.warning(f"Could not parse time interval '{time_interval}': {e}")
|
|
478
474
|
return None
|
|
479
475
|
|
|
480
|
-
def _configure_multi_period_optimization(self, network: 'pypsa.Network', years: list, discount_rate: float) -> 'pypsa.Network':
|
|
481
|
-
"""
|
|
482
|
-
Configure network for multi-period optimization (works for single or multiple years).
|
|
483
|
-
|
|
484
|
-
Args:
|
|
485
|
-
network: PyPSA Network object
|
|
486
|
-
years: List of years in the network
|
|
487
|
-
discount_rate: Discount rate for investment calculations
|
|
488
|
-
|
|
489
|
-
Returns:
|
|
490
|
-
Configured network
|
|
491
|
-
"""
|
|
492
|
-
try:
|
|
493
|
-
import pandas as pd
|
|
494
|
-
|
|
495
|
-
logger.info(f"Configuring multi-period optimization for years: {years}")
|
|
496
|
-
logger.info(f"Current snapshots: {len(network.snapshots)} time steps")
|
|
497
|
-
|
|
498
|
-
# Handle case where snapshots don't have year info but years were extracted manually
|
|
499
|
-
if not hasattr(network.snapshots, 'year'):
|
|
500
|
-
if len(years) > 0:
|
|
501
|
-
# Use the manually extracted years from timestamps
|
|
502
|
-
# Create MultiIndex snapshots by dividing existing snapshots among the years
|
|
503
|
-
snapshots_per_year = len(network.snapshots) // len(years)
|
|
504
|
-
multi_snapshots = []
|
|
505
|
-
|
|
506
|
-
for i, year in enumerate(years):
|
|
507
|
-
start_idx = i * snapshots_per_year
|
|
508
|
-
end_idx = (i + 1) * snapshots_per_year if i < len(years) - 1 else len(network.snapshots)
|
|
509
|
-
year_snapshots = network.snapshots[start_idx:end_idx]
|
|
510
|
-
for snapshot in year_snapshots:
|
|
511
|
-
multi_snapshots.append((year, snapshot))
|
|
512
|
-
|
|
513
|
-
logger.info(f"Created {len(multi_snapshots)} multi-period snapshots from {len(network.snapshots)} original snapshots")
|
|
514
|
-
|
|
515
|
-
else:
|
|
516
|
-
# Only use 2020 fallback if no years were extracted at all (should be rare)
|
|
517
|
-
single_year = 2020
|
|
518
|
-
multi_snapshots = [(single_year, snapshot) for snapshot in network.snapshots]
|
|
519
|
-
years = [single_year]
|
|
520
|
-
logger.warning(f"No years provided, using fallback year {single_year}")
|
|
521
|
-
else:
|
|
522
|
-
# Create MultiIndex snapshots from existing year-based snapshots
|
|
523
|
-
multi_snapshots = []
|
|
524
|
-
for year in years:
|
|
525
|
-
year_snapshots = network.snapshots[network.snapshots.year == year]
|
|
526
|
-
for snapshot in year_snapshots:
|
|
527
|
-
multi_snapshots.append((year, snapshot))
|
|
528
|
-
|
|
529
|
-
logger.info(f"Created {len(multi_snapshots)} multi-period snapshots from year-based snapshots")
|
|
530
|
-
|
|
531
|
-
# Set MultiIndex snapshots and investment periods
|
|
532
|
-
network.snapshots = pd.MultiIndex.from_tuples(multi_snapshots, names=['period', 'timestep'])
|
|
533
|
-
network.investment_periods = years
|
|
534
|
-
print(network.investment_periods) # Match old code debug output
|
|
535
|
-
|
|
536
|
-
logger.info(f"Set investment_periods: {network.investment_periods}")
|
|
537
|
-
logger.info(f"MultiIndex snapshots created with levels: {network.snapshots.names}")
|
|
538
|
-
|
|
539
|
-
# Calculate investment period weightings with discount rate
|
|
540
|
-
self._calculate_investment_weightings(network, discount_rate)
|
|
541
|
-
|
|
542
|
-
# Configure build year constraints for multi-period optimization
|
|
543
|
-
self._configure_build_year_constraints(network, years)
|
|
544
|
-
|
|
545
|
-
logger.info(f"Successfully configured multi-period optimization for {len(years)} investment periods")
|
|
546
|
-
|
|
547
|
-
except Exception as e:
|
|
548
|
-
logger.error(f"Failed to configure multi-period optimization: {e}")
|
|
549
|
-
logger.exception("Full traceback:")
|
|
550
|
-
# Don't re-raise - let the solve continue with original configuration
|
|
551
|
-
|
|
552
|
-
return network
|
|
553
|
-
|
|
554
476
|
def _calculate_investment_weightings(self, network: 'pypsa.Network', discount_rate: float) -> None:
|
|
555
477
|
"""
|
|
556
478
|
Calculate investment period weightings using discount rate - matching old PyPSA solver exactly.
|
|
@@ -644,77 +566,6 @@ class NetworkSolver:
|
|
|
644
566
|
logger.error(f"Failed to calculate investment weightings: {e}")
|
|
645
567
|
logger.exception("Full traceback:")
|
|
646
568
|
|
|
647
|
-
def _configure_build_year_constraints(self, network: 'pypsa.Network', years: list) -> None:
|
|
648
|
-
"""
|
|
649
|
-
Configure build year constraints for multi-period optimization.
|
|
650
|
-
|
|
651
|
-
In PyPSA multi-period optimization, generators should only be available for investment
|
|
652
|
-
starting from their build year. This method ensures proper constraint setup.
|
|
653
|
-
|
|
654
|
-
Args:
|
|
655
|
-
network: PyPSA Network object
|
|
656
|
-
years: List of investment periods (years)
|
|
657
|
-
"""
|
|
658
|
-
try:
|
|
659
|
-
import pandas as pd
|
|
660
|
-
|
|
661
|
-
logger.info("Configuring build year constraints for multi-period optimization")
|
|
662
|
-
|
|
663
|
-
# Check if we have generators with build_year attributes
|
|
664
|
-
if not hasattr(network, 'generators') or network.generators.empty:
|
|
665
|
-
logger.warning("No generators found, skipping build year constraints")
|
|
666
|
-
return
|
|
667
|
-
|
|
668
|
-
if 'build_year' not in network.generators.columns:
|
|
669
|
-
logger.warning("No build_year column found in generators, skipping build year constraints")
|
|
670
|
-
return
|
|
671
|
-
|
|
672
|
-
# Get generators with build year information
|
|
673
|
-
generators_with_build_year = network.generators[network.generators['build_year'].notna()]
|
|
674
|
-
|
|
675
|
-
if generators_with_build_year.empty:
|
|
676
|
-
logger.warning("No generators have build_year values, skipping build year constraints")
|
|
677
|
-
return
|
|
678
|
-
|
|
679
|
-
logger.info(f"Applying build year constraints to {len(generators_with_build_year)} generators")
|
|
680
|
-
|
|
681
|
-
# Check if generators have proper extendable capacity settings
|
|
682
|
-
if 'p_nom_extendable' in network.generators.columns:
|
|
683
|
-
extendable_generators = generators_with_build_year[generators_with_build_year['p_nom_extendable'] == True]
|
|
684
|
-
|
|
685
|
-
if extendable_generators.empty:
|
|
686
|
-
logger.warning("No generators are marked as extendable (p_nom_extendable=True). Build year constraints only apply to extendable generators.")
|
|
687
|
-
return
|
|
688
|
-
|
|
689
|
-
logger.info(f"Found {len(extendable_generators)} extendable generators with build years")
|
|
690
|
-
else:
|
|
691
|
-
logger.warning("No p_nom_extendable column found - cannot determine which generators are extendable")
|
|
692
|
-
return
|
|
693
|
-
|
|
694
|
-
# Verify that build years align with investment periods
|
|
695
|
-
build_years = set(generators_with_build_year['build_year'].astype(int))
|
|
696
|
-
investment_years = set(years)
|
|
697
|
-
|
|
698
|
-
unmatched_build_years = build_years - investment_years
|
|
699
|
-
if unmatched_build_years:
|
|
700
|
-
logger.warning(f"Some generators have build years not in investment periods: {sorted(unmatched_build_years)}")
|
|
701
|
-
|
|
702
|
-
matched_build_years = build_years & investment_years
|
|
703
|
-
logger.info(f"Generators with build years matching investment periods: {sorted(matched_build_years)}")
|
|
704
|
-
|
|
705
|
-
# Store build year information for potential custom constraint application
|
|
706
|
-
network._build_year_info = {
|
|
707
|
-
'generators_with_build_year': generators_with_build_year.index.tolist(),
|
|
708
|
-
'build_years': generators_with_build_year['build_year'].to_dict(),
|
|
709
|
-
'investment_periods': years,
|
|
710
|
-
'extendable_generators': extendable_generators.index.tolist() if 'extendable_generators' in locals() else []
|
|
711
|
-
}
|
|
712
|
-
|
|
713
|
-
logger.info("Build year constraint configuration completed")
|
|
714
|
-
|
|
715
|
-
except Exception as e:
|
|
716
|
-
logger.error(f"Failed to configure build year constraints: {e}")
|
|
717
|
-
logger.exception("Full traceback:")
|
|
718
569
|
|
|
719
570
|
def _extract_solve_results(self, network: 'pypsa.Network', result: Any, solve_time: float, solver_name: str, run_id: str) -> Dict[str, Any]:
|
|
720
571
|
"""
|