pyconvexity 0.3.8.post2__tar.gz → 0.3.8.post4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (53) hide show
  1. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/PKG-INFO +1 -1
  2. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/pyproject.toml +2 -2
  3. pyconvexity-0.3.8.post4/src/pyconvexity/_version.py +1 -0
  4. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/api.py +25 -8
  5. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/constraints.py +5 -78
  6. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/solver.py +320 -24
  7. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/PKG-INFO +1 -1
  8. pyconvexity-0.3.8.post2/src/pyconvexity/_version.py +0 -1
  9. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/README.md +0 -0
  10. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/setup.cfg +0 -0
  11. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/__init__.py +0 -0
  12. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/__init__.py +0 -0
  13. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/database.py +0 -0
  14. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/errors.py +0 -0
  15. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/types.py +0 -0
  16. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/README.md +0 -0
  17. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/__init__.py +0 -0
  18. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  19. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/__init__.py +0 -0
  20. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  21. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  22. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/cache.py +0 -0
  23. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/01_core_schema.sql +0 -0
  24. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/02_data_metadata.sql +0 -0
  25. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/03_validation_data.sql +0 -0
  26. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/04_scenario_schema.sql +0 -0
  27. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/__init__.py +0 -0
  28. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  29. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  30. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/gem.py +0 -0
  31. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/__init__.py +0 -0
  32. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/excel_exporter.py +0 -0
  33. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/excel_importer.py +0 -0
  34. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/netcdf_exporter.py +0 -0
  35. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/netcdf_importer.py +0 -0
  36. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/__init__.py +0 -0
  37. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/attributes.py +0 -0
  38. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/components.py +0 -0
  39. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/network.py +0 -0
  40. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/scenarios.py +0 -0
  41. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/__init__.py +0 -0
  42. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/__init__.py +0 -0
  43. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/batch_loader.py +0 -0
  44. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/builder.py +0 -0
  45. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/storage.py +0 -0
  46. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/timeseries.py +0 -0
  47. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/validation/__init__.py +0 -0
  48. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity/validation/rules.py +0 -0
  49. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/SOURCES.txt +0 -0
  50. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/dependency_links.txt +0 -0
  51. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/requires.txt +0 -0
  52. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/top_level.txt +0 -0
  53. {pyconvexity-0.3.8.post2 → pyconvexity-0.3.8.post4}/tests/test_core_types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyconvexity
3
- Version: 0.3.8.post2
3
+ Version: 0.3.8.post4
4
4
  Summary: Python library for energy system modeling and optimization with PyPSA
5
5
  Author-email: Convexity Team <info@convexity.com>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "pyconvexity"
7
- version = "0.3.8.post2"
7
+ version = "0.3.8post4"
8
8
  description = "Python library for energy system modeling and optimization with PyPSA"
9
9
  readme = "README.md"
10
10
  license = {text = "MIT"}
@@ -81,7 +81,7 @@ profile = "black"
81
81
  line_length = 100
82
82
 
83
83
  [tool.mypy]
84
- python_version = "0.3.8.post2"
84
+ python_version = "0.3.8post4"
85
85
  warn_return_any = true
86
86
  warn_unused_configs = true
87
87
  disallow_untyped_defs = true
@@ -0,0 +1 @@
1
+ __version__ = "0.3.8post4"
@@ -25,7 +25,8 @@ def solve_network(
25
25
  constraints_dsl: Optional[str] = None,
26
26
  discount_rate: Optional[float] = None,
27
27
  progress_callback: Optional[Callable[[int, str], None]] = None,
28
- return_detailed_results: bool = True
28
+ return_detailed_results: bool = True,
29
+ custom_solver_config: Optional[Dict[str, Any]] = None
29
30
  ) -> Dict[str, Any]:
30
31
  """
31
32
  Complete solve workflow: build PyPSA network from database, solve, store results.
@@ -38,12 +39,15 @@ def solve_network(
38
39
  db_path: Path to the database file
39
40
  network_id: ID of the network to solve
40
41
  scenario_id: Optional scenario ID (uses master scenario if None)
41
- solver_name: Solver to use (default: "highs")
42
+ solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
42
43
  solver_options: Optional solver-specific options
43
44
  constraints_dsl: Optional DSL constraints to apply
44
45
  discount_rate: Optional discount rate for multi-period optimization
45
46
  progress_callback: Optional callback for progress updates (progress: int, message: str)
46
47
  return_detailed_results: If True, return comprehensive results; if False, return simple status
48
+ custom_solver_config: Optional custom solver configuration when solver_name="custom"
49
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
50
+ Example: {"solver": "gurobi", "solver_options": {"Method": 2, "Crossover": 0}}
47
51
 
48
52
  Returns:
49
53
  Dictionary with solve results - comprehensive if return_detailed_results=True, simple status otherwise
@@ -76,10 +80,18 @@ def solve_network(
76
80
  if progress_callback:
77
81
  progress_callback(50, f"Network built: {len(network.buses)} buses, {len(network.generators)} generators")
78
82
 
79
- # Create constraint applicator (constraints will be applied during solve via extra_functionality)
83
+ # Create constraint applicator and apply constraints BEFORE solve
80
84
  constraint_applicator = ConstraintApplicator()
81
85
 
82
- # Solve network (constraints are applied during optimization)
86
+ # Apply constraints before solving (network modifications like GlobalConstraints)
87
+ if progress_callback:
88
+ progress_callback(60, "Applying constraints...")
89
+
90
+ constraint_applicator.apply_constraints(
91
+ conn, network_id, network, scenario_id, constraints_dsl
92
+ )
93
+
94
+ # Solve network
83
95
  if progress_callback:
84
96
  progress_callback(70, f"Solving with {solver_name}...")
85
97
 
@@ -92,7 +104,8 @@ def solve_network(
92
104
  conn=conn,
93
105
  network_id=network_id,
94
106
  scenario_id=scenario_id,
95
- constraint_applicator=constraint_applicator
107
+ constraint_applicator=constraint_applicator,
108
+ custom_solver_config=custom_solver_config
96
109
  )
97
110
 
98
111
  if progress_callback:
@@ -193,7 +206,8 @@ def solve_pypsa_network(
193
206
  solver_name: str = "highs",
194
207
  solver_options: Optional[Dict[str, Any]] = None,
195
208
  store_results: bool = True,
196
- progress_callback: Optional[Callable[[int, str], None]] = None
209
+ progress_callback: Optional[Callable[[int, str], None]] = None,
210
+ custom_solver_config: Optional[Dict[str, Any]] = None
197
211
  ) -> Dict[str, Any]:
198
212
  """
199
213
  Solve PyPSA network and optionally store results back to database.
@@ -206,10 +220,12 @@ def solve_pypsa_network(
206
220
  db_path: Path to the database file (needed for result storage)
207
221
  network_id: ID of the network (for result storage)
208
222
  scenario_id: Optional scenario ID
209
- solver_name: Solver to use (default: "highs")
223
+ solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
210
224
  solver_options: Optional solver-specific options
211
225
  store_results: Whether to store results back to database (default: True)
212
226
  progress_callback: Optional callback for progress updates
227
+ custom_solver_config: Optional custom solver configuration when solver_name="custom"
228
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
213
229
 
214
230
  Returns:
215
231
  Dictionary with solve results and statistics
@@ -226,7 +242,8 @@ def solve_pypsa_network(
226
242
  solve_result = solver.solve_network(
227
243
  network,
228
244
  solver_name=solver_name,
229
- solver_options=solver_options
245
+ solver_options=solver_options,
246
+ custom_solver_config=custom_solver_config
230
247
  )
231
248
 
232
249
  if progress_callback:
@@ -169,7 +169,8 @@ class ConstraintApplicator:
169
169
 
170
170
  logger.info(f"Constraint breakdown: {len(model_constraints)} model constraints, {len(network_constraints)} network constraints")
171
171
 
172
- # Apply network constraints first (they modify the network structure)
172
+ # Apply network constraints ONLY (they modify the network structure before solve)
173
+ # Model constraints will be applied later by the solver via extra_functionality
173
174
  if network_constraints:
174
175
  network_constraints.sort(key=lambda x: x['priority'])
175
176
  for constraint in network_constraints:
@@ -193,88 +194,14 @@ class ConstraintApplicator:
193
194
  # Continue with other constraints instead of failing the entire solve
194
195
  continue
195
196
 
196
- # Apply model constraints (they need access to the optimization model)
197
+ # Skip model constraints here - they will be applied by the solver during optimization
198
+ # via extra_functionality to ensure they have access to the actual optimization model
197
199
  if model_constraints:
198
- self._apply_model_constraints(network, model_constraints)
200
+ logger.info(f"Skipping {len(model_constraints)} model constraints - will be applied during solve")
199
201
 
200
202
  except Exception as e:
201
203
  logger.error(f"Failed to apply custom constraints: {e}", exc_info=True)
202
204
 
203
- def _apply_model_constraints(self, network: 'pypsa.Network', model_constraints: list):
204
- """
205
- Apply model constraints that need access to the optimization model.
206
-
207
- This creates the optimization model, applies constraints to it, and then
208
- replaces PyPSA's solve method to use the pre-constrained model.
209
-
210
- Args:
211
- network: PyPSA Network object
212
- model_constraints: List of model constraint dictionaries
213
- """
214
- try:
215
- logger.info(f"Applying {len(model_constraints)} model constraints...")
216
-
217
- # Create the optimization model (same as PyPSA would do internally)
218
- logger.info("Creating optimization model for constraint application...")
219
- model = network.optimize.create_model()
220
- logger.info(f"Created optimization model with {len(model.variables)} variable groups")
221
-
222
- # Sort constraints by priority
223
- sorted_constraints = sorted(model_constraints, key=lambda x: x['priority'])
224
-
225
- # Apply each model constraint
226
- for constraint in sorted_constraints:
227
- try:
228
- constraint_code = constraint['constraint_code']
229
- constraint_name = constraint['name']
230
-
231
- logger.info(f"Applying model constraint '{constraint_name}' (priority {constraint['priority']})")
232
-
233
- # Create execution environment with network, model, and utilities
234
- exec_globals = {
235
- 'n': network,
236
- 'network': network,
237
- 'model': model,
238
- 'm': model,
239
- 'snapshots': network.snapshots,
240
- 'pd': pd,
241
- 'np': np,
242
- 'xr': __import__('xarray'), # Import xarray for DataArray operations
243
- }
244
-
245
- # Execute the constraint code
246
- exec(constraint_code, exec_globals)
247
- logger.info(f"Successfully applied model constraint '{constraint_name}'")
248
-
249
- except Exception as e:
250
- error_msg = f"Failed to apply model constraint '{constraint.get('name', 'unknown')}': {e}"
251
- logger.error(error_msg, exc_info=True)
252
- # Continue with other constraints instead of failing
253
- continue
254
-
255
- # Store the constrained model for the solver to use
256
- # We'll replace PyPSA's solve_model method to use our pre-constrained model
257
- logger.info("Replacing PyPSA's solve method to use pre-constrained model...")
258
-
259
- # Store original methods
260
- original_optimize = network.optimize
261
- original_solve_model = original_optimize.solve_model
262
-
263
- # Create a wrapper that uses our pre-constrained model
264
- def constrained_solve_model(*args, **kwargs):
265
- """Use the pre-constrained model instead of creating a new one."""
266
- logger.info("Using pre-constrained model for solve...")
267
- return original_solve_model(model, *args, **kwargs)
268
-
269
- # Replace the solve_model method
270
- network.optimize.solve_model = constrained_solve_model
271
-
272
- logger.info(f"Successfully applied {len(model_constraints)} model constraints")
273
-
274
- except Exception as e:
275
- logger.error(f"Failed to apply model constraints: {e}", exc_info=True)
276
- # Don't re-raise - let the solve continue without constraints rather than fail completely
277
-
278
205
  def _apply_dsl_constraints(self, network: 'pypsa.Network', constraints_dsl: str):
279
206
  """
280
207
  Apply DSL constraints to the network.
@@ -75,7 +75,11 @@ class NetworkSolver:
75
75
 
76
76
  # Validate that it's a known solver
77
77
  known_solvers = ['highs', 'gurobi', 'gurobi (barrier)', 'gurobi (barrier homogeneous)',
78
- 'gurobi (barrier+crossover balanced)', 'gurobi (dual simplex)', 'cplex', 'glpk', 'cbc', 'scip']
78
+ 'gurobi (barrier+crossover balanced)', 'gurobi (dual simplex)',
79
+ 'mosek', 'mosek (default)', 'mosek (barrier)', 'mosek (barrier+crossover)', 'mosek (dual simplex)',
80
+ 'copt', 'copt (barrier)', 'copt (barrier homogeneous)', 'copt (barrier+crossover)',
81
+ 'copt (dual simplex)', 'copt (concurrent)',
82
+ 'cplex', 'glpk', 'cbc', 'scip']
79
83
 
80
84
  if default_solver in known_solvers:
81
85
  return default_solver
@@ -97,17 +101,21 @@ class NetworkSolver:
97
101
  conn=None,
98
102
  network_id: Optional[int] = None,
99
103
  scenario_id: Optional[int] = None,
100
- constraint_applicator=None
104
+ constraint_applicator=None,
105
+ custom_solver_config: Optional[Dict[str, Any]] = None
101
106
  ) -> Dict[str, Any]:
102
107
  """
103
108
  Solve PyPSA network and return results.
104
109
 
105
110
  Args:
106
111
  network: PyPSA Network object to solve
107
- solver_name: Solver to use (default: "highs")
112
+ solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
108
113
  solver_options: Optional solver-specific options
109
114
  discount_rate: Optional discount rate for multi-period optimization
110
115
  job_id: Optional job ID for tracking
116
+ custom_solver_config: Optional custom solver configuration when solver_name="custom"
117
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
118
+ Example: {"solver": "gurobi", "solver_options": {"Method": 2, "Crossover": 0}}
111
119
 
112
120
  Returns:
113
121
  Dictionary with solve results and metadata
@@ -123,7 +131,7 @@ class NetworkSolver:
123
131
 
124
132
  try:
125
133
  # Get solver configuration
126
- actual_solver_name, solver_config = self._get_solver_config(solver_name, solver_options)
134
+ actual_solver_name, solver_config = self._get_solver_config(solver_name, solver_options, custom_solver_config)
127
135
 
128
136
 
129
137
  years = list(network.investment_periods)
@@ -139,17 +147,17 @@ class NetworkSolver:
139
147
  if conn and network_id:
140
148
  self._set_snapshot_weightings_after_multiperiod(conn, network_id, network)
141
149
 
142
- # Prepare optimization constraints with type detection
150
+ # Prepare optimization constraints - ONLY model constraints
151
+ # Network constraints were already applied before solve in api.py
143
152
  extra_functionality = None
144
153
  model_constraints = []
145
- network_constraints = []
146
154
 
147
155
  if conn and network_id and constraint_applicator:
148
156
  optimization_constraints = constraint_applicator.get_optimization_constraints(conn, network_id, scenario_id)
149
157
  if optimization_constraints:
150
158
  logger.info(f"Found {len(optimization_constraints)} optimization constraints")
151
159
 
152
- # Separate constraints by type
160
+ # Filter for model constraints only (network constraints already applied)
153
161
  for constraint in optimization_constraints:
154
162
  constraint_code = constraint.get('constraint_code', '')
155
163
  constraint_type = self._detect_constraint_type(constraint_code)
@@ -157,21 +165,19 @@ class NetworkSolver:
157
165
 
158
166
  if constraint_type == "model_constraint":
159
167
  model_constraints.append(constraint)
160
- logger.info(f"Detected model constraint: {constraint_name}")
168
+ logger.info(f"Will apply model constraint during solve: {constraint_name}")
161
169
  else:
162
- network_constraints.append(constraint)
163
- logger.info(f"Detected network constraint: {constraint_name}")
170
+ logger.info(f"Skipping network constraint (already applied): {constraint_name}")
164
171
 
165
- logger.info(f"Constraint breakdown: {len(model_constraints)} model constraints, {len(network_constraints)} network constraints")
172
+ logger.info(f"Will apply {len(model_constraints)} model constraints during optimization")
166
173
 
167
- # Create extra_functionality for ALL constraints (both model and network)
168
- all_constraints = model_constraints + network_constraints
169
- if all_constraints:
170
- extra_functionality = self._create_extra_functionality(all_constraints, constraint_applicator)
171
- logger.info(f"Prepared {len(all_constraints)} constraints for optimization-time application")
174
+ # Create extra_functionality for model constraints only
175
+ if model_constraints:
176
+ extra_functionality = self._create_extra_functionality(model_constraints, constraint_applicator)
177
+ logger.info(f"Prepared {len(model_constraints)} model constraints for optimization-time application")
172
178
 
173
- # NOTE: Model constraints are now applied DURING solve via extra_functionality
174
- # This ensures they are applied to the actual model PyPSA creates, not a separate model
179
+ # NOTE: Model constraints are applied DURING solve via extra_functionality
180
+ # Network constraints were already applied to the network structure before solve
175
181
 
176
182
  # Solver diagnostics
177
183
  logger.info(f"=== PYPSA SOLVER DIAGNOSTICS ===")
@@ -271,17 +277,40 @@ class NetworkSolver:
271
277
  "objective_value": None
272
278
  }
273
279
 
274
- def _get_solver_config(self, solver_name: str, solver_options: Optional[Dict[str, Any]] = None) -> tuple[str, Optional[Dict[str, Any]]]:
280
+ def _get_solver_config(self, solver_name: str, solver_options: Optional[Dict[str, Any]] = None,
281
+ custom_solver_config: Optional[Dict[str, Any]] = None) -> tuple[str, Optional[Dict[str, Any]]]:
275
282
  """
276
283
  Get the actual solver name and options for special solver configurations.
277
284
 
278
285
  Args:
279
- solver_name: The solver name (e.g., 'gurobi (barrier)', 'highs')
286
+ solver_name: The solver name (e.g., 'gurobi (barrier)', 'highs', 'custom')
280
287
  solver_options: Optional additional solver options
288
+ custom_solver_config: Optional custom solver configuration for solver_name='custom'
289
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
281
290
 
282
291
  Returns:
283
292
  Tuple of (actual_solver_name, solver_options_dict)
284
293
  """
294
+ # Handle "custom" solver with custom configuration
295
+ if solver_name == 'custom':
296
+ if not custom_solver_config:
297
+ raise ValueError("custom_solver_config must be provided when solver_name='custom'")
298
+
299
+ if 'solver' not in custom_solver_config:
300
+ raise ValueError("custom_solver_config must contain 'solver' key with the actual solver name")
301
+
302
+ actual_solver = custom_solver_config['solver']
303
+ custom_options = custom_solver_config.get('solver_options', {})
304
+
305
+ # Merge with any additional solver_options passed separately
306
+ if solver_options:
307
+ merged_options = {'solver_options': {**custom_options, **solver_options}}
308
+ else:
309
+ merged_options = {'solver_options': custom_options} if custom_options else None
310
+
311
+ logger.info(f"Using custom solver configuration: {actual_solver} with options: {custom_options}")
312
+ return actual_solver, merged_options
313
+
285
314
  # Handle "default" solver
286
315
  if solver_name == 'default':
287
316
  # Try to read user's default solver preference
@@ -296,7 +325,7 @@ class NetworkSolver:
296
325
  'Method': 2, # Barrier
297
326
  'Crossover': 0, # Skip crossover
298
327
  'MIPGap': 0.05, # 5% gap
299
- 'Threads': 4, # Use all cores
328
+ 'Threads': 0, # Use all cores (0 = auto)
300
329
  'Presolve': 2, # Aggressive presolve
301
330
  'ConcurrentMIP': 1, # Parallel root strategies
302
331
  'BarConvTol': 1e-4, # Relaxed barrier convergence
@@ -317,7 +346,7 @@ class NetworkSolver:
317
346
  'Method': 2, # Barrier
318
347
  'Crossover': 0, # Skip crossover
319
348
  'MIPGap': 0.05,
320
- 'Threads': 4,
349
+ 'Threads': 0, # Use all cores (0 = auto)
321
350
  'Presolve': 2,
322
351
  'ConcurrentMIP': 1,
323
352
  'BarConvTol': 1e-4,
@@ -338,7 +367,7 @@ class NetworkSolver:
338
367
  'Method': 2,
339
368
  'Crossover': 1, # Dual crossover
340
369
  'MIPGap': 0.01,
341
- 'Threads': 4,
370
+ 'Threads': 0, # Use all cores (0 = auto)
342
371
  'Presolve': 2,
343
372
  'Heuristics': 0.1,
344
373
  'Cuts': 2,
@@ -367,8 +396,275 @@ class NetworkSolver:
367
396
  gurobi_dual_options.update(solver_options)
368
397
  return 'gurobi', gurobi_dual_options
369
398
 
399
+ # Handle special Mosek configurations
400
+ elif solver_name == 'mosek (default)':
401
+ # No custom options - let Mosek use its default configuration
402
+ mosek_default_options = {
403
+ 'solver_options': {
404
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # MIP relative gap tolerance (5% to match Gurobi)
405
+ 'MSK_IPAR_MIO_MAX_TIME': 3600, # Max time 1 hour
406
+ # Safe MIP performance improvements
407
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
408
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
409
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
410
+ # Logging
411
+ 'MSK_IPAR_LOG': 4, # Moderate logging (was 10)
412
+ 'MSK_IPAR_LOG_INTPNT': 1, # Log interior-point progress
413
+ 'MSK_IPAR_LOG_SIM': 4, # Log simplex progress
414
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging (was 4)
415
+ 'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
416
+ }
417
+ }
418
+ if solver_options:
419
+ mosek_default_options['solver_options'].update(solver_options)
420
+ logger.info(f"Using Mosek with default configuration (auto-select optimizer) and moderate MIP strategies")
421
+ return 'mosek', mosek_default_options
422
+
423
+ elif solver_name == 'mosek (barrier)':
424
+ mosek_barrier_options = {
425
+ 'solver_options': {
426
+ 'MSK_IPAR_INTPNT_BASIS': 0, # Skip crossover (barrier-only) - 0 = MSK_BI_NEVER
427
+ 'MSK_DPAR_INTPNT_TOL_REL_GAP': 1e-4, # Match Gurobi barrier tolerance
428
+ 'MSK_DPAR_INTPNT_TOL_PFEAS': 1e-5, # Match Gurobi primal feasibility
429
+ 'MSK_DPAR_INTPNT_TOL_DFEAS': 1e-5, # Match Gurobi dual feasibility
430
+ # Removed MSK_DPAR_INTPNT_TOL_INFEAS - was 1000x tighter than other tolerances!
431
+ 'MSK_IPAR_NUM_THREADS': 0, # Use all available cores (0 = auto)
432
+ 'MSK_IPAR_PRESOLVE_USE': 2, # Aggressive presolve (match Gurobi Presolve=2)
433
+ 'MSK_IPAR_PRESOLVE_LINDEP_USE': 1, # Linear dependency check
434
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap
435
+ 'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes
436
+ 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root
437
+ # Safe MIP performance improvements
438
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 4, # Aggressive heuristics (was 2, match Gurobi's aggressive approach)
439
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection
440
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator
441
+ 'MSK_DPAR_MIO_MAX_TIME': 3600, # Max time 1 hour
442
+ # Logging
443
+ 'MSK_IPAR_LOG': 4, # Moderate logging
444
+ 'MSK_IPAR_LOG_INTPNT': 1, # Log interior-point progress
445
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging
446
+ 'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
447
+ }
448
+ }
449
+ if solver_options:
450
+ mosek_barrier_options['solver_options'].update(solver_options)
451
+ logger.info(f"Using Mosek Barrier with aggressive presolve and relaxed tolerances")
452
+ return 'mosek', mosek_barrier_options
453
+
454
+ elif solver_name == 'mosek (barrier+crossover)':
455
+ mosek_barrier_crossover_options = {
456
+ 'solver_options': {
457
+ 'MSK_IPAR_INTPNT_BASIS': 1, # Always crossover (1 = MSK_BI_ALWAYS)
458
+ 'MSK_DPAR_INTPNT_TOL_REL_GAP': 1e-4, # Match Gurobi barrier tolerance (was 1e-6)
459
+ 'MSK_DPAR_INTPNT_TOL_PFEAS': 1e-5, # Match Gurobi (was 1e-6)
460
+ 'MSK_DPAR_INTPNT_TOL_DFEAS': 1e-5, # Match Gurobi (was 1e-6)
461
+ 'MSK_IPAR_NUM_THREADS': 0, # Use all available cores (0 = auto)
462
+ 'MSK_IPAR_PRESOLVE_USE': 1, # Force presolve
463
+ 'MSK_IPAR_PRESOLVE_LINDEP_USE': 1, # Linear dependency check
464
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap (was 1e-6)
465
+ 'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes
466
+ 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root
467
+ # Safe MIP performance improvements
468
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
469
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
470
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
471
+ 'MSK_DPAR_MIO_MAX_TIME': 3600, # Max time 1 hour (safety limit)
472
+ # Logging
473
+ 'MSK_IPAR_LOG': 4, # Moderate logging (was 10)
474
+ 'MSK_IPAR_LOG_INTPNT': 1, # Log interior-point progress
475
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging (was 4)
476
+ 'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
477
+ # Note: Don't force MSK_IPAR_OPTIMIZER - let Mosek choose based on problem type
478
+ }
479
+ }
480
+ if solver_options:
481
+ mosek_barrier_crossover_options['solver_options'].update(solver_options)
482
+ logger.info(f"Using Mosek Barrier+Crossover configuration with Gurobi-matched tolerances and moderate MIP strategies")
483
+ return 'mosek', mosek_barrier_crossover_options
484
+
485
+ elif solver_name == 'mosek (dual simplex)':
486
+ mosek_dual_options = {
487
+ 'solver_options': {
488
+ 'MSK_IPAR_NUM_THREADS': 0, # Use all available cores (0 = automatic)
489
+ 'MSK_IPAR_PRESOLVE_USE': 1, # Force presolve
490
+ 'MSK_IPAR_SIM_SCALING': 2, # Aggressive scaling (2 = MSK_SCALING_AGGRESSIVE)
491
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap (was 1e-6)
492
+ 'MSK_IPAR_MIO_NODE_OPTIMIZER': 1, # Use dual simplex for MIP nodes (1 = MSK_OPTIMIZER_DUAL_SIMPLEX)
493
+ 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 1, # Use dual simplex for MIP root
494
+ # Safe MIP performance improvements
495
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
496
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
497
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
498
+ 'MSK_DPAR_MIO_MAX_TIME': 3600, # Max time 1 hour (safety limit)
499
+ # Logging
500
+ 'MSK_IPAR_LOG': 4, # Moderate logging (was 10)
501
+ 'MSK_IPAR_LOG_SIM': 4, # Log simplex progress
502
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging (was 4)
503
+ 'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
504
+ # Note: For pure LP, set optimizer; for MILP, only set node/root optimizers
505
+ }
506
+ }
507
+ if solver_options:
508
+ mosek_dual_options['solver_options'].update(solver_options)
509
+ logger.info(f"Using Mosek Dual Simplex configuration with Gurobi-matched tolerances and moderate MIP strategies")
510
+ return 'mosek', mosek_dual_options
511
+
370
512
  # Check if this is a known valid solver name
371
- elif solver_name in ['highs', 'gurobi', 'cplex', 'glpk', 'cbc', 'scip', 'copt', 'mosek']:
513
+ elif solver_name == 'mosek':
514
+ # Add default MILP-friendly settings for plain Mosek
515
+ mosek_defaults = {
516
+ 'solver_options': {
517
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap (was 1e-4)
518
+ 'MSK_IPAR_MIO_MAX_TIME': 3600, # Max time 1 hour
519
+ 'MSK_IPAR_NUM_THREADS': 0, # Use all cores (0 = auto)
520
+ # CRITICAL: Use interior-point for MIP (much faster than simplex)
521
+ 'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes (4 = MSK_OPTIMIZER_INTPNT)
522
+ 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root (4 = MSK_OPTIMIZER_INTPNT)
523
+ # Safe MIP performance improvements
524
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
525
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
526
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
527
+ # Logging
528
+ 'MSK_IPAR_LOG': 4, # Moderate logging
529
+ 'MSK_IPAR_LOG_MIO': 2, # Log MIP occasionally
530
+ }
531
+ }
532
+ if solver_options:
533
+ mosek_defaults['solver_options'].update(solver_options)
534
+ logger.info(f"Using Mosek with barrier method for MIP (interior-point for root/nodes)")
535
+ return solver_name, mosek_defaults
536
+
537
+ elif solver_name == 'gurobi':
538
+ # Add default MILP-friendly settings for plain Gurobi (for consistency)
539
+ gurobi_defaults = {
540
+ 'solver_options': {
541
+ 'MIPGap': 1e-4, # 0.01% gap
542
+ 'TimeLimit': 3600, # 1 hour
543
+ 'Threads': 0, # Use all cores
544
+ 'OutputFlag': 1, # Enable output
545
+ }
546
+ }
547
+ if solver_options:
548
+ gurobi_defaults['solver_options'].update(solver_options)
549
+ logger.info(f"Using Gurobi with default MILP-friendly settings")
550
+ return solver_name, gurobi_defaults
551
+
552
+ # Handle special COPT configurations
553
+ elif solver_name == 'copt (barrier)':
554
+ copt_barrier_options = {
555
+ 'solver_options': {
556
+ 'LpMethod': 2, # Barrier method
557
+ 'Crossover': 0, # Skip crossover for speed
558
+ 'RelGap': 0.05, # 5% MIP gap (match Gurobi)
559
+ 'TimeLimit': 3600, # 1 hour time limit
560
+ 'Threads': 2, # 4 threads (memory-conscious)
561
+ 'Presolve': 3, # Aggressive presolve
562
+ 'Scaling': 1, # Enable scaling
563
+ 'FeasTol': 1e-5, # Match Gurobi feasibility
564
+ 'DualTol': 1e-5, # Match Gurobi dual tolerance
565
+ # MIP performance settings
566
+ 'CutLevel': 2, # Normal cut generation
567
+ 'HeurLevel': 3, # Aggressive heuristics
568
+ 'StrongBranching': 1, # Fast strong branching
569
+ }
570
+ }
571
+ if solver_options:
572
+ copt_barrier_options['solver_options'].update(solver_options)
573
+ logger.info(f"Using COPT Barrier configuration (fast interior-point method)")
574
+ return 'copt', copt_barrier_options
575
+
576
+ elif solver_name == 'copt (barrier homogeneous)':
577
+ copt_barrier_homogeneous_options = {
578
+ 'solver_options': {
579
+ 'LpMethod': 2, # Barrier method
580
+ 'Crossover': 0, # Skip crossover
581
+ 'BarHomogeneous': 1, # Use homogeneous self-dual form
582
+ 'RelGap': 0.05, # 5% MIP gap
583
+ 'TimeLimit': 3600, # 1 hour
584
+ 'Threads': 2, # 4 threads (memory-conscious)
585
+ 'Presolve': 3, # Aggressive presolve
586
+ 'Scaling': 1, # Enable scaling
587
+ 'FeasTol': 1e-5,
588
+ 'DualTol': 1e-5,
589
+ # MIP performance settings
590
+ 'CutLevel': 2, # Normal cuts
591
+ 'HeurLevel': 3, # Aggressive heuristics
592
+ 'StrongBranching': 1, # Fast strong branching
593
+ }
594
+ }
595
+ if solver_options:
596
+ copt_barrier_homogeneous_options['solver_options'].update(solver_options)
597
+ logger.info(f"Using COPT Barrier Homogeneous configuration")
598
+ return 'copt', copt_barrier_homogeneous_options
599
+
600
+ elif solver_name == 'copt (barrier+crossover)':
601
+ copt_barrier_crossover_options = {
602
+ 'solver_options': {
603
+ 'LpMethod': 2, # Barrier method
604
+ 'Crossover': 1, # Enable crossover for better solutions
605
+ 'RelGap': 0.05, # 5% MIP gap (relaxed for faster solves)
606
+ 'TimeLimit': 3600, # 1 hour
607
+ 'Threads': 2, # Use all cores
608
+ 'Presolve': 3, # Aggressive presolve
609
+ 'Scaling': 1, # Enable scaling
610
+ 'FeasTol': 1e-6, # Tighter feasibility
611
+ 'DualTol': 1e-6, # Tighter dual tolerance
612
+ # MIP performance settings
613
+ 'CutLevel': 3, # Aggressive cuts
614
+ 'HeurLevel': 3, # Aggressive heuristics
615
+ 'StrongBranching': 2, # Normal strong branching
616
+ }
617
+ }
618
+ if solver_options:
619
+ copt_barrier_crossover_options['solver_options'].update(solver_options)
620
+ logger.info(f"Using COPT Barrier+Crossover configuration (balanced performance)")
621
+ return 'copt', copt_barrier_crossover_options
622
+
623
+ elif solver_name == 'copt (dual simplex)':
624
+ copt_dual_simplex_options = {
625
+ 'solver_options': {
626
+ 'LpMethod': 1, # Dual simplex method
627
+ 'RelGap': 0.05, # 5% MIP gap
628
+ 'TimeLimit': 3600, # 1 hour
629
+ 'Threads': 2, # Use all cores
630
+ 'Presolve': 3, # Aggressive presolve
631
+ 'Scaling': 1, # Enable scaling
632
+ 'FeasTol': 1e-6,
633
+ 'DualTol': 1e-6,
634
+ # MIP performance settings
635
+ 'CutLevel': 2, # Normal cuts
636
+ 'HeurLevel': 2, # Normal heuristics
637
+ 'StrongBranching': 1, # Fast strong branching
638
+ }
639
+ }
640
+ if solver_options:
641
+ copt_dual_simplex_options['solver_options'].update(solver_options)
642
+ logger.info(f"Using COPT Dual Simplex configuration (robust method)")
643
+ return 'copt', copt_dual_simplex_options
644
+
645
+ elif solver_name == 'copt (concurrent)':
646
+ copt_concurrent_options = {
647
+ 'solver_options': {
648
+ 'LpMethod': 4, # Concurrent (simplex + barrier)
649
+ 'RelGap': 0.05, # 5% MIP gap
650
+ 'TimeLimit': 3600, # 1 hour
651
+ 'Threads': 2, # Use all cores
652
+ 'Presolve': 3, # Aggressive presolve
653
+ 'Scaling': 1, # Enable scaling
654
+ 'FeasTol': 1e-5,
655
+ 'DualTol': 1e-5,
656
+ # MIP performance settings
657
+ 'CutLevel': 2, # Normal cuts
658
+ 'HeurLevel': 3, # Aggressive heuristics
659
+ 'StrongBranching': 1, # Fast strong branching
660
+ }
661
+ }
662
+ if solver_options:
663
+ copt_concurrent_options['solver_options'].update(solver_options)
664
+ logger.info(f"Using COPT Concurrent configuration (parallel simplex + barrier)")
665
+ return 'copt', copt_concurrent_options
666
+
667
+ elif solver_name in ['highs', 'cplex', 'glpk', 'cbc', 'scip', 'copt']:
372
668
  return solver_name, solver_options
373
669
 
374
670
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyconvexity
3
- Version: 0.3.8.post2
3
+ Version: 0.3.8.post4
4
4
  Summary: Python library for energy system modeling and optimization with PyPSA
5
5
  Author-email: Convexity Team <info@convexity.com>
6
6
  License: MIT
@@ -1 +0,0 @@
1
- __version__ = "0.3.8.post2"