pyconvexity 0.3.8.post3__py3-none-any.whl → 0.3.8.post5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/_version.py +1 -1
- pyconvexity/data/schema/01_core_schema.sql +51 -0
- pyconvexity/data/schema/migrate_add_geometries.sql +73 -0
- pyconvexity/models/__init__.py +49 -5
- pyconvexity/models/carriers.py +156 -0
- pyconvexity/models/components.py +120 -0
- pyconvexity/models/network.py +67 -1
- pyconvexity/models/results.py +138 -0
- pyconvexity/models/scenarios.py +102 -114
- pyconvexity/solvers/pypsa/api.py +31 -9
- pyconvexity/solvers/pypsa/constraints.py +5 -78
- pyconvexity/solvers/pypsa/solver.py +193 -73
- {pyconvexity-0.3.8.post3.dist-info → pyconvexity-0.3.8.post5.dist-info}/METADATA +1 -1
- {pyconvexity-0.3.8.post3.dist-info → pyconvexity-0.3.8.post5.dist-info}/RECORD +16 -13
- {pyconvexity-0.3.8.post3.dist-info → pyconvexity-0.3.8.post5.dist-info}/WHEEL +0 -0
- {pyconvexity-0.3.8.post3.dist-info → pyconvexity-0.3.8.post5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Results and statistics operations for PyConvexity.
|
|
3
|
+
|
|
4
|
+
Provides operations for querying solve results and statistics.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sqlite3
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from typing import Dict, Any, Optional
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
|
|
13
|
+
from pyconvexity.core.errors import ValidationError
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class SolveResults:
|
|
20
|
+
"""Represents solve results for a scenario."""
|
|
21
|
+
network_statistics: Dict[str, Any]
|
|
22
|
+
metadata: Dict[str, Any]
|
|
23
|
+
status: str
|
|
24
|
+
objective_value: Optional[float]
|
|
25
|
+
solve_time: float
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class YearlyResults:
|
|
30
|
+
"""Represents yearly solve results."""
|
|
31
|
+
year: int
|
|
32
|
+
network_statistics: Dict[str, Any]
|
|
33
|
+
metadata: Dict[str, Any]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_solve_results(
|
|
37
|
+
conn: sqlite3.Connection,
|
|
38
|
+
network_id: int,
|
|
39
|
+
scenario_id: Optional[int] = None
|
|
40
|
+
) -> Optional[SolveResults]:
|
|
41
|
+
"""
|
|
42
|
+
Get overall solve results for a scenario.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
conn: Database connection
|
|
46
|
+
network_id: Network ID
|
|
47
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
SolveResults object or None if no results found
|
|
51
|
+
"""
|
|
52
|
+
# Resolve scenario ID if not provided
|
|
53
|
+
if scenario_id is None:
|
|
54
|
+
from pyconvexity.models.scenarios import get_master_scenario
|
|
55
|
+
scenario = get_master_scenario(conn, network_id)
|
|
56
|
+
scenario_id = scenario.id
|
|
57
|
+
|
|
58
|
+
cursor = conn.execute("""
|
|
59
|
+
SELECT results_json, metadata_json, solve_status, objective_value, solve_time_seconds
|
|
60
|
+
FROM network_solve_results
|
|
61
|
+
WHERE network_id = ? AND scenario_id = ?
|
|
62
|
+
ORDER BY created_at DESC
|
|
63
|
+
LIMIT 1
|
|
64
|
+
""", (network_id, scenario_id))
|
|
65
|
+
|
|
66
|
+
row = cursor.fetchone()
|
|
67
|
+
if not row:
|
|
68
|
+
return None
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
results_json = json.loads(row[0]) if row[0] else {}
|
|
72
|
+
metadata_json = json.loads(row[1]) if row[1] else {}
|
|
73
|
+
|
|
74
|
+
# Extract network_statistics from results_json
|
|
75
|
+
network_statistics = results_json.get('network_statistics', {})
|
|
76
|
+
|
|
77
|
+
return SolveResults(
|
|
78
|
+
network_statistics=network_statistics,
|
|
79
|
+
metadata=metadata_json,
|
|
80
|
+
status=row[2] or 'unknown',
|
|
81
|
+
objective_value=row[3],
|
|
82
|
+
solve_time=row[4] or 0.0
|
|
83
|
+
)
|
|
84
|
+
except json.JSONDecodeError as e:
|
|
85
|
+
logger.error(f"Error parsing JSON for scenario {scenario_id}: {e}")
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def get_yearly_results(
|
|
90
|
+
conn: sqlite3.Connection,
|
|
91
|
+
network_id: int,
|
|
92
|
+
scenario_id: Optional[int] = None
|
|
93
|
+
) -> Dict[int, YearlyResults]:
|
|
94
|
+
"""
|
|
95
|
+
Get year-by-year solve results for a scenario.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
conn: Database connection
|
|
99
|
+
network_id: Network ID
|
|
100
|
+
scenario_id: Scenario ID (uses master scenario if None)
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
Dictionary mapping years to YearlyResults objects
|
|
104
|
+
"""
|
|
105
|
+
# Resolve scenario ID if not provided
|
|
106
|
+
if scenario_id is None:
|
|
107
|
+
from pyconvexity.models.scenarios import get_master_scenario
|
|
108
|
+
scenario = get_master_scenario(conn, network_id)
|
|
109
|
+
scenario_id = scenario.id
|
|
110
|
+
|
|
111
|
+
cursor = conn.execute("""
|
|
112
|
+
SELECT year, results_json, metadata_json
|
|
113
|
+
FROM network_solve_results_by_year
|
|
114
|
+
WHERE network_id = ? AND scenario_id = ?
|
|
115
|
+
ORDER BY year
|
|
116
|
+
""", (network_id, scenario_id))
|
|
117
|
+
|
|
118
|
+
yearly_results = {}
|
|
119
|
+
for row in cursor.fetchall():
|
|
120
|
+
year = row[0]
|
|
121
|
+
try:
|
|
122
|
+
results_json = json.loads(row[1]) if row[1] else {}
|
|
123
|
+
metadata_json = json.loads(row[2]) if row[2] else {}
|
|
124
|
+
|
|
125
|
+
# Extract network_statistics from results_json
|
|
126
|
+
network_statistics = results_json.get('network_statistics', {})
|
|
127
|
+
|
|
128
|
+
yearly_results[year] = YearlyResults(
|
|
129
|
+
year=year,
|
|
130
|
+
network_statistics=network_statistics,
|
|
131
|
+
metadata=metadata_json
|
|
132
|
+
)
|
|
133
|
+
except json.JSONDecodeError as e:
|
|
134
|
+
logger.error(f"Error parsing JSON for year {year}: {e}")
|
|
135
|
+
continue
|
|
136
|
+
|
|
137
|
+
return yearly_results
|
|
138
|
+
|
pyconvexity/models/scenarios.py
CHANGED
|
@@ -1,177 +1,165 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Scenario management operations for PyConvexity.
|
|
3
3
|
|
|
4
|
-
Provides operations for
|
|
4
|
+
Provides operations for listing, querying, and managing scenarios.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
import sqlite3
|
|
8
8
|
import logging
|
|
9
9
|
from typing import List, Optional
|
|
10
|
-
from
|
|
10
|
+
from dataclasses import dataclass
|
|
11
11
|
|
|
12
|
-
from pyconvexity.core.errors import ValidationError
|
|
12
|
+
from pyconvexity.core.errors import ValidationError
|
|
13
13
|
|
|
14
14
|
logger = logging.getLogger(__name__)
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
17
|
+
@dataclass
|
|
18
|
+
class Scenario:
|
|
19
|
+
"""Represents a scenario in the network."""
|
|
20
|
+
id: int
|
|
21
|
+
network_id: int
|
|
22
|
+
name: str
|
|
23
|
+
description: Optional[str]
|
|
24
|
+
is_master: bool
|
|
25
|
+
created_at: str
|
|
26
|
+
updated_at: Optional[str] = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def list_scenarios(conn: sqlite3.Connection, network_id: int) -> List[Scenario]:
|
|
24
30
|
"""
|
|
25
|
-
|
|
31
|
+
List all scenarios for a network.
|
|
26
32
|
|
|
27
33
|
Args:
|
|
28
34
|
conn: Database connection
|
|
29
|
-
network_id: ID
|
|
30
|
-
name: Name of the scenario
|
|
31
|
-
description: Optional description
|
|
32
|
-
is_master: Whether this is a master scenario (default False)
|
|
35
|
+
network_id: Network ID
|
|
33
36
|
|
|
34
37
|
Returns:
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
Raises:
|
|
38
|
-
ValidationError: If network doesn't exist or scenario name conflicts
|
|
39
|
-
DatabaseError: If creation fails
|
|
38
|
+
List of Scenario objects ordered by master first, then by creation date
|
|
40
39
|
"""
|
|
40
|
+
cursor = conn.execute("""
|
|
41
|
+
SELECT id, network_id, name, description, is_master, created_at, updated_at
|
|
42
|
+
FROM scenarios
|
|
43
|
+
WHERE network_id = ?
|
|
44
|
+
ORDER BY is_master DESC, created_at
|
|
45
|
+
""", (network_id,))
|
|
41
46
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
raise ValidationError(f"Scenario with name '{name}' already exists in network {network_id}")
|
|
54
|
-
|
|
55
|
-
# Insert the scenario (database triggers will handle master scenario uniqueness)
|
|
56
|
-
cursor = conn.execute(
|
|
57
|
-
"INSERT INTO scenarios (network_id, name, description, is_master, created_at) "
|
|
58
|
-
"VALUES (?, ?, ?, ?, datetime('now'))",
|
|
59
|
-
(network_id, name, description, is_master)
|
|
60
|
-
)
|
|
61
|
-
|
|
62
|
-
scenario_id = cursor.lastrowid
|
|
63
|
-
if not scenario_id:
|
|
64
|
-
raise DatabaseError("Failed to create scenario")
|
|
47
|
+
scenarios = []
|
|
48
|
+
for row in cursor.fetchall():
|
|
49
|
+
scenarios.append(Scenario(
|
|
50
|
+
id=row[0],
|
|
51
|
+
network_id=row[1],
|
|
52
|
+
name=row[2],
|
|
53
|
+
description=row[3],
|
|
54
|
+
is_master=bool(row[4]),
|
|
55
|
+
created_at=row[5],
|
|
56
|
+
updated_at=row[6]
|
|
57
|
+
))
|
|
65
58
|
|
|
66
|
-
|
|
67
|
-
return scenario_id
|
|
59
|
+
return scenarios
|
|
68
60
|
|
|
69
61
|
|
|
70
|
-
def
|
|
62
|
+
def get_scenario_by_name(conn: sqlite3.Connection, network_id: int, name: str) -> Scenario:
|
|
71
63
|
"""
|
|
72
|
-
|
|
64
|
+
Get a scenario by name.
|
|
73
65
|
|
|
74
66
|
Args:
|
|
75
67
|
conn: Database connection
|
|
76
|
-
network_id: ID
|
|
68
|
+
network_id: Network ID
|
|
69
|
+
name: Scenario name
|
|
77
70
|
|
|
78
71
|
Returns:
|
|
79
|
-
|
|
72
|
+
Scenario object
|
|
80
73
|
|
|
81
74
|
Raises:
|
|
82
|
-
|
|
75
|
+
ValidationError: If scenario doesn't exist
|
|
83
76
|
"""
|
|
77
|
+
cursor = conn.execute("""
|
|
78
|
+
SELECT id, network_id, name, description, is_master, created_at, updated_at
|
|
79
|
+
FROM scenarios
|
|
80
|
+
WHERE network_id = ? AND name = ?
|
|
81
|
+
""", (network_id, name))
|
|
84
82
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
"
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
83
|
+
row = cursor.fetchone()
|
|
84
|
+
if not row:
|
|
85
|
+
raise ValidationError(f"Scenario '{name}' not found for network {network_id}")
|
|
86
|
+
|
|
87
|
+
return Scenario(
|
|
88
|
+
id=row[0],
|
|
89
|
+
network_id=row[1],
|
|
90
|
+
name=row[2],
|
|
91
|
+
description=row[3],
|
|
92
|
+
is_master=bool(row[4]),
|
|
93
|
+
created_at=row[5],
|
|
94
|
+
updated_at=row[6]
|
|
91
95
|
)
|
|
92
|
-
|
|
93
|
-
scenarios = []
|
|
94
|
-
for row in cursor.fetchall():
|
|
95
|
-
scenarios.append({
|
|
96
|
-
'id': row[0],
|
|
97
|
-
'network_id': row[1],
|
|
98
|
-
'name': row[2],
|
|
99
|
-
'description': row[3],
|
|
100
|
-
'is_master': bool(row[4]),
|
|
101
|
-
'created_at': row[5],
|
|
102
|
-
})
|
|
103
|
-
|
|
104
|
-
logger.debug(f"Found {len(scenarios)} scenarios for network {network_id}")
|
|
105
|
-
return scenarios
|
|
106
96
|
|
|
107
97
|
|
|
108
|
-
def
|
|
98
|
+
def get_scenario_by_id(conn: sqlite3.Connection, scenario_id: int) -> Scenario:
|
|
109
99
|
"""
|
|
110
|
-
Get a
|
|
100
|
+
Get a scenario by ID.
|
|
111
101
|
|
|
112
102
|
Args:
|
|
113
103
|
conn: Database connection
|
|
114
|
-
scenario_id: ID
|
|
104
|
+
scenario_id: Scenario ID
|
|
115
105
|
|
|
116
106
|
Returns:
|
|
117
|
-
Scenario
|
|
107
|
+
Scenario object
|
|
118
108
|
|
|
119
109
|
Raises:
|
|
120
|
-
ValidationError: If scenario
|
|
121
|
-
DatabaseError: If query fails
|
|
110
|
+
ValidationError: If scenario doesn't exist
|
|
122
111
|
"""
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
(scenario_id,)
|
|
129
|
-
)
|
|
112
|
+
cursor = conn.execute("""
|
|
113
|
+
SELECT id, network_id, name, description, is_master, created_at, updated_at
|
|
114
|
+
FROM scenarios
|
|
115
|
+
WHERE id = ?
|
|
116
|
+
""", (scenario_id,))
|
|
130
117
|
|
|
131
118
|
row = cursor.fetchone()
|
|
132
119
|
if not row:
|
|
133
120
|
raise ValidationError(f"Scenario with ID {scenario_id} not found")
|
|
134
121
|
|
|
135
|
-
return
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
122
|
+
return Scenario(
|
|
123
|
+
id=row[0],
|
|
124
|
+
network_id=row[1],
|
|
125
|
+
name=row[2],
|
|
126
|
+
description=row[3],
|
|
127
|
+
is_master=bool(row[4]),
|
|
128
|
+
created_at=row[5],
|
|
129
|
+
updated_at=row[6]
|
|
130
|
+
)
|
|
143
131
|
|
|
144
132
|
|
|
145
|
-
def
|
|
133
|
+
def get_master_scenario(conn: sqlite3.Connection, network_id: int) -> Scenario:
|
|
146
134
|
"""
|
|
147
|
-
|
|
135
|
+
Get the master scenario for a network.
|
|
148
136
|
|
|
149
137
|
Args:
|
|
150
138
|
conn: Database connection
|
|
151
|
-
|
|
139
|
+
network_id: Network ID
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Scenario object for the master scenario
|
|
152
143
|
|
|
153
144
|
Raises:
|
|
154
|
-
ValidationError: If scenario
|
|
155
|
-
DatabaseError: If deletion fails
|
|
145
|
+
ValidationError: If master scenario doesn't exist
|
|
156
146
|
"""
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
)
|
|
147
|
+
cursor = conn.execute("""
|
|
148
|
+
SELECT id, network_id, name, description, is_master, created_at, updated_at
|
|
149
|
+
FROM scenarios
|
|
150
|
+
WHERE network_id = ? AND is_master = TRUE
|
|
151
|
+
""", (network_id,))
|
|
163
152
|
|
|
164
153
|
row = cursor.fetchone()
|
|
165
154
|
if not row:
|
|
166
|
-
raise ValidationError(f"
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
logger.info(f"Deleted scenario {scenario_id}")
|
|
155
|
+
raise ValidationError(f"No master scenario found for network {network_id}")
|
|
156
|
+
|
|
157
|
+
return Scenario(
|
|
158
|
+
id=row[0],
|
|
159
|
+
network_id=row[1],
|
|
160
|
+
name=row[2],
|
|
161
|
+
description=row[3],
|
|
162
|
+
is_master=bool(row[4]),
|
|
163
|
+
created_at=row[5],
|
|
164
|
+
updated_at=row[6]
|
|
165
|
+
)
|
pyconvexity/solvers/pypsa/api.py
CHANGED
|
@@ -25,7 +25,8 @@ def solve_network(
|
|
|
25
25
|
constraints_dsl: Optional[str] = None,
|
|
26
26
|
discount_rate: Optional[float] = None,
|
|
27
27
|
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
28
|
-
return_detailed_results: bool = True
|
|
28
|
+
return_detailed_results: bool = True,
|
|
29
|
+
custom_solver_config: Optional[Dict[str, Any]] = None
|
|
29
30
|
) -> Dict[str, Any]:
|
|
30
31
|
"""
|
|
31
32
|
Complete solve workflow: build PyPSA network from database, solve, store results.
|
|
@@ -38,12 +39,15 @@ def solve_network(
|
|
|
38
39
|
db_path: Path to the database file
|
|
39
40
|
network_id: ID of the network to solve
|
|
40
41
|
scenario_id: Optional scenario ID (uses master scenario if None)
|
|
41
|
-
solver_name: Solver to use (default: "highs")
|
|
42
|
+
solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
|
|
42
43
|
solver_options: Optional solver-specific options
|
|
43
44
|
constraints_dsl: Optional DSL constraints to apply
|
|
44
45
|
discount_rate: Optional discount rate for multi-period optimization
|
|
45
46
|
progress_callback: Optional callback for progress updates (progress: int, message: str)
|
|
46
47
|
return_detailed_results: If True, return comprehensive results; if False, return simple status
|
|
48
|
+
custom_solver_config: Optional custom solver configuration when solver_name="custom"
|
|
49
|
+
Format: {"solver": "actual_solver_name", "solver_options": {...}}
|
|
50
|
+
Example: {"solver": "gurobi", "solver_options": {"Method": 2, "Crossover": 0}}
|
|
47
51
|
|
|
48
52
|
Returns:
|
|
49
53
|
Dictionary with solve results - comprehensive if return_detailed_results=True, simple status otherwise
|
|
@@ -64,7 +68,9 @@ def solve_network(
|
|
|
64
68
|
progress_callback(8, "Loaded network configuration")
|
|
65
69
|
|
|
66
70
|
# Use configuration values with parameter overrides
|
|
67
|
-
|
|
71
|
+
# Note: network_config already has default of 0.0 from get_network_config()
|
|
72
|
+
effective_discount_rate = discount_rate if discount_rate is not None else network_config.get('discount_rate')
|
|
73
|
+
logger.info(f"Using discount rate: {effective_discount_rate} (from {'parameter override' if discount_rate is not None else 'network config'})")
|
|
68
74
|
|
|
69
75
|
# Build network
|
|
70
76
|
if progress_callback:
|
|
@@ -76,10 +82,18 @@ def solve_network(
|
|
|
76
82
|
if progress_callback:
|
|
77
83
|
progress_callback(50, f"Network built: {len(network.buses)} buses, {len(network.generators)} generators")
|
|
78
84
|
|
|
79
|
-
# Create constraint applicator
|
|
85
|
+
# Create constraint applicator and apply constraints BEFORE solve
|
|
80
86
|
constraint_applicator = ConstraintApplicator()
|
|
81
87
|
|
|
82
|
-
#
|
|
88
|
+
# Apply constraints before solving (network modifications like GlobalConstraints)
|
|
89
|
+
if progress_callback:
|
|
90
|
+
progress_callback(60, "Applying constraints...")
|
|
91
|
+
|
|
92
|
+
constraint_applicator.apply_constraints(
|
|
93
|
+
conn, network_id, network, scenario_id, constraints_dsl
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
# Solve network
|
|
83
97
|
if progress_callback:
|
|
84
98
|
progress_callback(70, f"Solving with {solver_name}...")
|
|
85
99
|
|
|
@@ -92,7 +106,8 @@ def solve_network(
|
|
|
92
106
|
conn=conn,
|
|
93
107
|
network_id=network_id,
|
|
94
108
|
scenario_id=scenario_id,
|
|
95
|
-
constraint_applicator=constraint_applicator
|
|
109
|
+
constraint_applicator=constraint_applicator,
|
|
110
|
+
custom_solver_config=custom_solver_config
|
|
96
111
|
)
|
|
97
112
|
|
|
98
113
|
if progress_callback:
|
|
@@ -192,8 +207,10 @@ def solve_pypsa_network(
|
|
|
192
207
|
scenario_id: Optional[int] = None,
|
|
193
208
|
solver_name: str = "highs",
|
|
194
209
|
solver_options: Optional[Dict[str, Any]] = None,
|
|
210
|
+
discount_rate: Optional[float] = None,
|
|
195
211
|
store_results: bool = True,
|
|
196
|
-
progress_callback: Optional[Callable[[int, str], None]] = None
|
|
212
|
+
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
213
|
+
custom_solver_config: Optional[Dict[str, Any]] = None
|
|
197
214
|
) -> Dict[str, Any]:
|
|
198
215
|
"""
|
|
199
216
|
Solve PyPSA network and optionally store results back to database.
|
|
@@ -206,10 +223,13 @@ def solve_pypsa_network(
|
|
|
206
223
|
db_path: Path to the database file (needed for result storage)
|
|
207
224
|
network_id: ID of the network (for result storage)
|
|
208
225
|
scenario_id: Optional scenario ID
|
|
209
|
-
solver_name: Solver to use (default: "highs")
|
|
226
|
+
solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
|
|
210
227
|
solver_options: Optional solver-specific options
|
|
228
|
+
discount_rate: Optional discount rate for multi-period optimization (default: 0.0)
|
|
211
229
|
store_results: Whether to store results back to database (default: True)
|
|
212
230
|
progress_callback: Optional callback for progress updates
|
|
231
|
+
custom_solver_config: Optional custom solver configuration when solver_name="custom"
|
|
232
|
+
Format: {"solver": "actual_solver_name", "solver_options": {...}}
|
|
213
233
|
|
|
214
234
|
Returns:
|
|
215
235
|
Dictionary with solve results and statistics
|
|
@@ -226,7 +246,9 @@ def solve_pypsa_network(
|
|
|
226
246
|
solve_result = solver.solve_network(
|
|
227
247
|
network,
|
|
228
248
|
solver_name=solver_name,
|
|
229
|
-
solver_options=solver_options
|
|
249
|
+
solver_options=solver_options,
|
|
250
|
+
discount_rate=discount_rate,
|
|
251
|
+
custom_solver_config=custom_solver_config
|
|
230
252
|
)
|
|
231
253
|
|
|
232
254
|
if progress_callback:
|
|
@@ -169,7 +169,8 @@ class ConstraintApplicator:
|
|
|
169
169
|
|
|
170
170
|
logger.info(f"Constraint breakdown: {len(model_constraints)} model constraints, {len(network_constraints)} network constraints")
|
|
171
171
|
|
|
172
|
-
# Apply network constraints
|
|
172
|
+
# Apply network constraints ONLY (they modify the network structure before solve)
|
|
173
|
+
# Model constraints will be applied later by the solver via extra_functionality
|
|
173
174
|
if network_constraints:
|
|
174
175
|
network_constraints.sort(key=lambda x: x['priority'])
|
|
175
176
|
for constraint in network_constraints:
|
|
@@ -193,88 +194,14 @@ class ConstraintApplicator:
|
|
|
193
194
|
# Continue with other constraints instead of failing the entire solve
|
|
194
195
|
continue
|
|
195
196
|
|
|
196
|
-
#
|
|
197
|
+
# Skip model constraints here - they will be applied by the solver during optimization
|
|
198
|
+
# via extra_functionality to ensure they have access to the actual optimization model
|
|
197
199
|
if model_constraints:
|
|
198
|
-
|
|
200
|
+
logger.info(f"Skipping {len(model_constraints)} model constraints - will be applied during solve")
|
|
199
201
|
|
|
200
202
|
except Exception as e:
|
|
201
203
|
logger.error(f"Failed to apply custom constraints: {e}", exc_info=True)
|
|
202
204
|
|
|
203
|
-
def _apply_model_constraints(self, network: 'pypsa.Network', model_constraints: list):
|
|
204
|
-
"""
|
|
205
|
-
Apply model constraints that need access to the optimization model.
|
|
206
|
-
|
|
207
|
-
This creates the optimization model, applies constraints to it, and then
|
|
208
|
-
replaces PyPSA's solve method to use the pre-constrained model.
|
|
209
|
-
|
|
210
|
-
Args:
|
|
211
|
-
network: PyPSA Network object
|
|
212
|
-
model_constraints: List of model constraint dictionaries
|
|
213
|
-
"""
|
|
214
|
-
try:
|
|
215
|
-
logger.info(f"Applying {len(model_constraints)} model constraints...")
|
|
216
|
-
|
|
217
|
-
# Create the optimization model (same as PyPSA would do internally)
|
|
218
|
-
logger.info("Creating optimization model for constraint application...")
|
|
219
|
-
model = network.optimize.create_model()
|
|
220
|
-
logger.info(f"Created optimization model with {len(model.variables)} variable groups")
|
|
221
|
-
|
|
222
|
-
# Sort constraints by priority
|
|
223
|
-
sorted_constraints = sorted(model_constraints, key=lambda x: x['priority'])
|
|
224
|
-
|
|
225
|
-
# Apply each model constraint
|
|
226
|
-
for constraint in sorted_constraints:
|
|
227
|
-
try:
|
|
228
|
-
constraint_code = constraint['constraint_code']
|
|
229
|
-
constraint_name = constraint['name']
|
|
230
|
-
|
|
231
|
-
logger.info(f"Applying model constraint '{constraint_name}' (priority {constraint['priority']})")
|
|
232
|
-
|
|
233
|
-
# Create execution environment with network, model, and utilities
|
|
234
|
-
exec_globals = {
|
|
235
|
-
'n': network,
|
|
236
|
-
'network': network,
|
|
237
|
-
'model': model,
|
|
238
|
-
'm': model,
|
|
239
|
-
'snapshots': network.snapshots,
|
|
240
|
-
'pd': pd,
|
|
241
|
-
'np': np,
|
|
242
|
-
'xr': __import__('xarray'), # Import xarray for DataArray operations
|
|
243
|
-
}
|
|
244
|
-
|
|
245
|
-
# Execute the constraint code
|
|
246
|
-
exec(constraint_code, exec_globals)
|
|
247
|
-
logger.info(f"Successfully applied model constraint '{constraint_name}'")
|
|
248
|
-
|
|
249
|
-
except Exception as e:
|
|
250
|
-
error_msg = f"Failed to apply model constraint '{constraint.get('name', 'unknown')}': {e}"
|
|
251
|
-
logger.error(error_msg, exc_info=True)
|
|
252
|
-
# Continue with other constraints instead of failing
|
|
253
|
-
continue
|
|
254
|
-
|
|
255
|
-
# Store the constrained model for the solver to use
|
|
256
|
-
# We'll replace PyPSA's solve_model method to use our pre-constrained model
|
|
257
|
-
logger.info("Replacing PyPSA's solve method to use pre-constrained model...")
|
|
258
|
-
|
|
259
|
-
# Store original methods
|
|
260
|
-
original_optimize = network.optimize
|
|
261
|
-
original_solve_model = original_optimize.solve_model
|
|
262
|
-
|
|
263
|
-
# Create a wrapper that uses our pre-constrained model
|
|
264
|
-
def constrained_solve_model(*args, **kwargs):
|
|
265
|
-
"""Use the pre-constrained model instead of creating a new one."""
|
|
266
|
-
logger.info("Using pre-constrained model for solve...")
|
|
267
|
-
return original_solve_model(model, *args, **kwargs)
|
|
268
|
-
|
|
269
|
-
# Replace the solve_model method
|
|
270
|
-
network.optimize.solve_model = constrained_solve_model
|
|
271
|
-
|
|
272
|
-
logger.info(f"Successfully applied {len(model_constraints)} model constraints")
|
|
273
|
-
|
|
274
|
-
except Exception as e:
|
|
275
|
-
logger.error(f"Failed to apply model constraints: {e}", exc_info=True)
|
|
276
|
-
# Don't re-raise - let the solve continue without constraints rather than fail completely
|
|
277
|
-
|
|
278
205
|
def _apply_dsl_constraints(self, network: 'pypsa.Network', constraints_dsl: str):
|
|
279
206
|
"""
|
|
280
207
|
Apply DSL constraints to the network.
|