pyconvexity 0.3.8.post3__tar.gz → 0.3.8.post4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (53) hide show
  1. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/PKG-INFO +1 -1
  2. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/pyproject.toml +2 -2
  3. pyconvexity-0.3.8.post4/src/pyconvexity/_version.py +1 -0
  4. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/api.py +25 -8
  5. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/constraints.py +5 -78
  6. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/solver.py +228 -54
  7. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/PKG-INFO +1 -1
  8. pyconvexity-0.3.8.post3/src/pyconvexity/_version.py +0 -1
  9. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/README.md +0 -0
  10. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/setup.cfg +0 -0
  11. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/__init__.py +0 -0
  12. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/__init__.py +0 -0
  13. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/database.py +0 -0
  14. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/errors.py +0 -0
  15. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/core/types.py +0 -0
  16. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/README.md +0 -0
  17. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/__init__.py +0 -0
  18. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  19. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/__init__.py +0 -0
  20. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  21. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  22. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/loaders/cache.py +0 -0
  23. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/01_core_schema.sql +0 -0
  24. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/02_data_metadata.sql +0 -0
  25. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/03_validation_data.sql +0 -0
  26. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/schema/04_scenario_schema.sql +0 -0
  27. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/__init__.py +0 -0
  28. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  29. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  30. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/data/sources/gem.py +0 -0
  31. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/__init__.py +0 -0
  32. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/excel_exporter.py +0 -0
  33. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/excel_importer.py +0 -0
  34. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/netcdf_exporter.py +0 -0
  35. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/io/netcdf_importer.py +0 -0
  36. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/__init__.py +0 -0
  37. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/attributes.py +0 -0
  38. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/components.py +0 -0
  39. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/network.py +0 -0
  40. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/models/scenarios.py +0 -0
  41. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/__init__.py +0 -0
  42. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/__init__.py +0 -0
  43. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/batch_loader.py +0 -0
  44. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/builder.py +0 -0
  45. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/solvers/pypsa/storage.py +0 -0
  46. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/timeseries.py +0 -0
  47. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/validation/__init__.py +0 -0
  48. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity/validation/rules.py +0 -0
  49. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/SOURCES.txt +0 -0
  50. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/dependency_links.txt +0 -0
  51. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/requires.txt +0 -0
  52. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/src/pyconvexity.egg-info/top_level.txt +0 -0
  53. {pyconvexity-0.3.8.post3 → pyconvexity-0.3.8.post4}/tests/test_core_types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyconvexity
3
- Version: 0.3.8.post3
3
+ Version: 0.3.8.post4
4
4
  Summary: Python library for energy system modeling and optimization with PyPSA
5
5
  Author-email: Convexity Team <info@convexity.com>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "pyconvexity"
7
- version = "0.3.8.post3"
7
+ version = "0.3.8post4"
8
8
  description = "Python library for energy system modeling and optimization with PyPSA"
9
9
  readme = "README.md"
10
10
  license = {text = "MIT"}
@@ -81,7 +81,7 @@ profile = "black"
81
81
  line_length = 100
82
82
 
83
83
  [tool.mypy]
84
- python_version = "0.3.8.post3"
84
+ python_version = "0.3.8post4"
85
85
  warn_return_any = true
86
86
  warn_unused_configs = true
87
87
  disallow_untyped_defs = true
@@ -0,0 +1 @@
1
+ __version__ = "0.3.8post4"
@@ -25,7 +25,8 @@ def solve_network(
25
25
  constraints_dsl: Optional[str] = None,
26
26
  discount_rate: Optional[float] = None,
27
27
  progress_callback: Optional[Callable[[int, str], None]] = None,
28
- return_detailed_results: bool = True
28
+ return_detailed_results: bool = True,
29
+ custom_solver_config: Optional[Dict[str, Any]] = None
29
30
  ) -> Dict[str, Any]:
30
31
  """
31
32
  Complete solve workflow: build PyPSA network from database, solve, store results.
@@ -38,12 +39,15 @@ def solve_network(
38
39
  db_path: Path to the database file
39
40
  network_id: ID of the network to solve
40
41
  scenario_id: Optional scenario ID (uses master scenario if None)
41
- solver_name: Solver to use (default: "highs")
42
+ solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
42
43
  solver_options: Optional solver-specific options
43
44
  constraints_dsl: Optional DSL constraints to apply
44
45
  discount_rate: Optional discount rate for multi-period optimization
45
46
  progress_callback: Optional callback for progress updates (progress: int, message: str)
46
47
  return_detailed_results: If True, return comprehensive results; if False, return simple status
48
+ custom_solver_config: Optional custom solver configuration when solver_name="custom"
49
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
50
+ Example: {"solver": "gurobi", "solver_options": {"Method": 2, "Crossover": 0}}
47
51
 
48
52
  Returns:
49
53
  Dictionary with solve results - comprehensive if return_detailed_results=True, simple status otherwise
@@ -76,10 +80,18 @@ def solve_network(
76
80
  if progress_callback:
77
81
  progress_callback(50, f"Network built: {len(network.buses)} buses, {len(network.generators)} generators")
78
82
 
79
- # Create constraint applicator (constraints will be applied during solve via extra_functionality)
83
+ # Create constraint applicator and apply constraints BEFORE solve
80
84
  constraint_applicator = ConstraintApplicator()
81
85
 
82
- # Solve network (constraints are applied during optimization)
86
+ # Apply constraints before solving (network modifications like GlobalConstraints)
87
+ if progress_callback:
88
+ progress_callback(60, "Applying constraints...")
89
+
90
+ constraint_applicator.apply_constraints(
91
+ conn, network_id, network, scenario_id, constraints_dsl
92
+ )
93
+
94
+ # Solve network
83
95
  if progress_callback:
84
96
  progress_callback(70, f"Solving with {solver_name}...")
85
97
 
@@ -92,7 +104,8 @@ def solve_network(
92
104
  conn=conn,
93
105
  network_id=network_id,
94
106
  scenario_id=scenario_id,
95
- constraint_applicator=constraint_applicator
107
+ constraint_applicator=constraint_applicator,
108
+ custom_solver_config=custom_solver_config
96
109
  )
97
110
 
98
111
  if progress_callback:
@@ -193,7 +206,8 @@ def solve_pypsa_network(
193
206
  solver_name: str = "highs",
194
207
  solver_options: Optional[Dict[str, Any]] = None,
195
208
  store_results: bool = True,
196
- progress_callback: Optional[Callable[[int, str], None]] = None
209
+ progress_callback: Optional[Callable[[int, str], None]] = None,
210
+ custom_solver_config: Optional[Dict[str, Any]] = None
197
211
  ) -> Dict[str, Any]:
198
212
  """
199
213
  Solve PyPSA network and optionally store results back to database.
@@ -206,10 +220,12 @@ def solve_pypsa_network(
206
220
  db_path: Path to the database file (needed for result storage)
207
221
  network_id: ID of the network (for result storage)
208
222
  scenario_id: Optional scenario ID
209
- solver_name: Solver to use (default: "highs")
223
+ solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
210
224
  solver_options: Optional solver-specific options
211
225
  store_results: Whether to store results back to database (default: True)
212
226
  progress_callback: Optional callback for progress updates
227
+ custom_solver_config: Optional custom solver configuration when solver_name="custom"
228
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
213
229
 
214
230
  Returns:
215
231
  Dictionary with solve results and statistics
@@ -226,7 +242,8 @@ def solve_pypsa_network(
226
242
  solve_result = solver.solve_network(
227
243
  network,
228
244
  solver_name=solver_name,
229
- solver_options=solver_options
245
+ solver_options=solver_options,
246
+ custom_solver_config=custom_solver_config
230
247
  )
231
248
 
232
249
  if progress_callback:
@@ -169,7 +169,8 @@ class ConstraintApplicator:
169
169
 
170
170
  logger.info(f"Constraint breakdown: {len(model_constraints)} model constraints, {len(network_constraints)} network constraints")
171
171
 
172
- # Apply network constraints first (they modify the network structure)
172
+ # Apply network constraints ONLY (they modify the network structure before solve)
173
+ # Model constraints will be applied later by the solver via extra_functionality
173
174
  if network_constraints:
174
175
  network_constraints.sort(key=lambda x: x['priority'])
175
176
  for constraint in network_constraints:
@@ -193,88 +194,14 @@ class ConstraintApplicator:
193
194
  # Continue with other constraints instead of failing the entire solve
194
195
  continue
195
196
 
196
- # Apply model constraints (they need access to the optimization model)
197
+ # Skip model constraints here - they will be applied by the solver during optimization
198
+ # via extra_functionality to ensure they have access to the actual optimization model
197
199
  if model_constraints:
198
- self._apply_model_constraints(network, model_constraints)
200
+ logger.info(f"Skipping {len(model_constraints)} model constraints - will be applied during solve")
199
201
 
200
202
  except Exception as e:
201
203
  logger.error(f"Failed to apply custom constraints: {e}", exc_info=True)
202
204
 
203
- def _apply_model_constraints(self, network: 'pypsa.Network', model_constraints: list):
204
- """
205
- Apply model constraints that need access to the optimization model.
206
-
207
- This creates the optimization model, applies constraints to it, and then
208
- replaces PyPSA's solve method to use the pre-constrained model.
209
-
210
- Args:
211
- network: PyPSA Network object
212
- model_constraints: List of model constraint dictionaries
213
- """
214
- try:
215
- logger.info(f"Applying {len(model_constraints)} model constraints...")
216
-
217
- # Create the optimization model (same as PyPSA would do internally)
218
- logger.info("Creating optimization model for constraint application...")
219
- model = network.optimize.create_model()
220
- logger.info(f"Created optimization model with {len(model.variables)} variable groups")
221
-
222
- # Sort constraints by priority
223
- sorted_constraints = sorted(model_constraints, key=lambda x: x['priority'])
224
-
225
- # Apply each model constraint
226
- for constraint in sorted_constraints:
227
- try:
228
- constraint_code = constraint['constraint_code']
229
- constraint_name = constraint['name']
230
-
231
- logger.info(f"Applying model constraint '{constraint_name}' (priority {constraint['priority']})")
232
-
233
- # Create execution environment with network, model, and utilities
234
- exec_globals = {
235
- 'n': network,
236
- 'network': network,
237
- 'model': model,
238
- 'm': model,
239
- 'snapshots': network.snapshots,
240
- 'pd': pd,
241
- 'np': np,
242
- 'xr': __import__('xarray'), # Import xarray for DataArray operations
243
- }
244
-
245
- # Execute the constraint code
246
- exec(constraint_code, exec_globals)
247
- logger.info(f"Successfully applied model constraint '{constraint_name}'")
248
-
249
- except Exception as e:
250
- error_msg = f"Failed to apply model constraint '{constraint.get('name', 'unknown')}': {e}"
251
- logger.error(error_msg, exc_info=True)
252
- # Continue with other constraints instead of failing
253
- continue
254
-
255
- # Store the constrained model for the solver to use
256
- # We'll replace PyPSA's solve_model method to use our pre-constrained model
257
- logger.info("Replacing PyPSA's solve method to use pre-constrained model...")
258
-
259
- # Store original methods
260
- original_optimize = network.optimize
261
- original_solve_model = original_optimize.solve_model
262
-
263
- # Create a wrapper that uses our pre-constrained model
264
- def constrained_solve_model(*args, **kwargs):
265
- """Use the pre-constrained model instead of creating a new one."""
266
- logger.info("Using pre-constrained model for solve...")
267
- return original_solve_model(model, *args, **kwargs)
268
-
269
- # Replace the solve_model method
270
- network.optimize.solve_model = constrained_solve_model
271
-
272
- logger.info(f"Successfully applied {len(model_constraints)} model constraints")
273
-
274
- except Exception as e:
275
- logger.error(f"Failed to apply model constraints: {e}", exc_info=True)
276
- # Don't re-raise - let the solve continue without constraints rather than fail completely
277
-
278
205
  def _apply_dsl_constraints(self, network: 'pypsa.Network', constraints_dsl: str):
279
206
  """
280
207
  Apply DSL constraints to the network.
@@ -77,6 +77,8 @@ class NetworkSolver:
77
77
  known_solvers = ['highs', 'gurobi', 'gurobi (barrier)', 'gurobi (barrier homogeneous)',
78
78
  'gurobi (barrier+crossover balanced)', 'gurobi (dual simplex)',
79
79
  'mosek', 'mosek (default)', 'mosek (barrier)', 'mosek (barrier+crossover)', 'mosek (dual simplex)',
80
+ 'copt', 'copt (barrier)', 'copt (barrier homogeneous)', 'copt (barrier+crossover)',
81
+ 'copt (dual simplex)', 'copt (concurrent)',
80
82
  'cplex', 'glpk', 'cbc', 'scip']
81
83
 
82
84
  if default_solver in known_solvers:
@@ -99,17 +101,21 @@ class NetworkSolver:
99
101
  conn=None,
100
102
  network_id: Optional[int] = None,
101
103
  scenario_id: Optional[int] = None,
102
- constraint_applicator=None
104
+ constraint_applicator=None,
105
+ custom_solver_config: Optional[Dict[str, Any]] = None
103
106
  ) -> Dict[str, Any]:
104
107
  """
105
108
  Solve PyPSA network and return results.
106
109
 
107
110
  Args:
108
111
  network: PyPSA Network object to solve
109
- solver_name: Solver to use (default: "highs")
112
+ solver_name: Solver to use (default: "highs"). Use "custom" for custom_solver_config.
110
113
  solver_options: Optional solver-specific options
111
114
  discount_rate: Optional discount rate for multi-period optimization
112
115
  job_id: Optional job ID for tracking
116
+ custom_solver_config: Optional custom solver configuration when solver_name="custom"
117
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
118
+ Example: {"solver": "gurobi", "solver_options": {"Method": 2, "Crossover": 0}}
113
119
 
114
120
  Returns:
115
121
  Dictionary with solve results and metadata
@@ -125,7 +131,7 @@ class NetworkSolver:
125
131
 
126
132
  try:
127
133
  # Get solver configuration
128
- actual_solver_name, solver_config = self._get_solver_config(solver_name, solver_options)
134
+ actual_solver_name, solver_config = self._get_solver_config(solver_name, solver_options, custom_solver_config)
129
135
 
130
136
 
131
137
  years = list(network.investment_periods)
@@ -141,17 +147,17 @@ class NetworkSolver:
141
147
  if conn and network_id:
142
148
  self._set_snapshot_weightings_after_multiperiod(conn, network_id, network)
143
149
 
144
- # Prepare optimization constraints with type detection
150
+ # Prepare optimization constraints - ONLY model constraints
151
+ # Network constraints were already applied before solve in api.py
145
152
  extra_functionality = None
146
153
  model_constraints = []
147
- network_constraints = []
148
154
 
149
155
  if conn and network_id and constraint_applicator:
150
156
  optimization_constraints = constraint_applicator.get_optimization_constraints(conn, network_id, scenario_id)
151
157
  if optimization_constraints:
152
158
  logger.info(f"Found {len(optimization_constraints)} optimization constraints")
153
159
 
154
- # Separate constraints by type
160
+ # Filter for model constraints only (network constraints already applied)
155
161
  for constraint in optimization_constraints:
156
162
  constraint_code = constraint.get('constraint_code', '')
157
163
  constraint_type = self._detect_constraint_type(constraint_code)
@@ -159,21 +165,19 @@ class NetworkSolver:
159
165
 
160
166
  if constraint_type == "model_constraint":
161
167
  model_constraints.append(constraint)
162
- logger.info(f"Detected model constraint: {constraint_name}")
168
+ logger.info(f"Will apply model constraint during solve: {constraint_name}")
163
169
  else:
164
- network_constraints.append(constraint)
165
- logger.info(f"Detected network constraint: {constraint_name}")
170
+ logger.info(f"Skipping network constraint (already applied): {constraint_name}")
166
171
 
167
- logger.info(f"Constraint breakdown: {len(model_constraints)} model constraints, {len(network_constraints)} network constraints")
172
+ logger.info(f"Will apply {len(model_constraints)} model constraints during optimization")
168
173
 
169
- # Create extra_functionality for ALL constraints (both model and network)
170
- all_constraints = model_constraints + network_constraints
171
- if all_constraints:
172
- extra_functionality = self._create_extra_functionality(all_constraints, constraint_applicator)
173
- logger.info(f"Prepared {len(all_constraints)} constraints for optimization-time application")
174
+ # Create extra_functionality for model constraints only
175
+ if model_constraints:
176
+ extra_functionality = self._create_extra_functionality(model_constraints, constraint_applicator)
177
+ logger.info(f"Prepared {len(model_constraints)} model constraints for optimization-time application")
174
178
 
175
- # NOTE: Model constraints are now applied DURING solve via extra_functionality
176
- # This ensures they are applied to the actual model PyPSA creates, not a separate model
179
+ # NOTE: Model constraints are applied DURING solve via extra_functionality
180
+ # Network constraints were already applied to the network structure before solve
177
181
 
178
182
  # Solver diagnostics
179
183
  logger.info(f"=== PYPSA SOLVER DIAGNOSTICS ===")
@@ -273,17 +277,40 @@ class NetworkSolver:
273
277
  "objective_value": None
274
278
  }
275
279
 
276
- def _get_solver_config(self, solver_name: str, solver_options: Optional[Dict[str, Any]] = None) -> tuple[str, Optional[Dict[str, Any]]]:
280
+ def _get_solver_config(self, solver_name: str, solver_options: Optional[Dict[str, Any]] = None,
281
+ custom_solver_config: Optional[Dict[str, Any]] = None) -> tuple[str, Optional[Dict[str, Any]]]:
277
282
  """
278
283
  Get the actual solver name and options for special solver configurations.
279
284
 
280
285
  Args:
281
- solver_name: The solver name (e.g., 'gurobi (barrier)', 'highs')
286
+ solver_name: The solver name (e.g., 'gurobi (barrier)', 'highs', 'custom')
282
287
  solver_options: Optional additional solver options
288
+ custom_solver_config: Optional custom solver configuration for solver_name='custom'
289
+ Format: {"solver": "actual_solver_name", "solver_options": {...}}
283
290
 
284
291
  Returns:
285
292
  Tuple of (actual_solver_name, solver_options_dict)
286
293
  """
294
+ # Handle "custom" solver with custom configuration
295
+ if solver_name == 'custom':
296
+ if not custom_solver_config:
297
+ raise ValueError("custom_solver_config must be provided when solver_name='custom'")
298
+
299
+ if 'solver' not in custom_solver_config:
300
+ raise ValueError("custom_solver_config must contain 'solver' key with the actual solver name")
301
+
302
+ actual_solver = custom_solver_config['solver']
303
+ custom_options = custom_solver_config.get('solver_options', {})
304
+
305
+ # Merge with any additional solver_options passed separately
306
+ if solver_options:
307
+ merged_options = {'solver_options': {**custom_options, **solver_options}}
308
+ else:
309
+ merged_options = {'solver_options': custom_options} if custom_options else None
310
+
311
+ logger.info(f"Using custom solver configuration: {actual_solver} with options: {custom_options}")
312
+ return actual_solver, merged_options
313
+
287
314
  # Handle "default" solver
288
315
  if solver_name == 'default':
289
316
  # Try to read user's default solver preference
@@ -298,7 +325,7 @@ class NetworkSolver:
298
325
  'Method': 2, # Barrier
299
326
  'Crossover': 0, # Skip crossover
300
327
  'MIPGap': 0.05, # 5% gap
301
- 'Threads': 4, # Use all cores
328
+ 'Threads': 0, # Use all cores (0 = auto)
302
329
  'Presolve': 2, # Aggressive presolve
303
330
  'ConcurrentMIP': 1, # Parallel root strategies
304
331
  'BarConvTol': 1e-4, # Relaxed barrier convergence
@@ -319,7 +346,7 @@ class NetworkSolver:
319
346
  'Method': 2, # Barrier
320
347
  'Crossover': 0, # Skip crossover
321
348
  'MIPGap': 0.05,
322
- 'Threads': 4,
349
+ 'Threads': 0, # Use all cores (0 = auto)
323
350
  'Presolve': 2,
324
351
  'ConcurrentMIP': 1,
325
352
  'BarConvTol': 1e-4,
@@ -340,7 +367,7 @@ class NetworkSolver:
340
367
  'Method': 2,
341
368
  'Crossover': 1, # Dual crossover
342
369
  'MIPGap': 0.01,
343
- 'Threads': 4,
370
+ 'Threads': 0, # Use all cores (0 = auto)
344
371
  'Presolve': 2,
345
372
  'Heuristics': 0.1,
346
373
  'Cuts': 2,
@@ -374,67 +401,85 @@ class NetworkSolver:
374
401
  # No custom options - let Mosek use its default configuration
375
402
  mosek_default_options = {
376
403
  'solver_options': {
377
- 'MSK_IPAR_LOG': 10, # Enable full logging (10 = verbose)
404
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # MIP relative gap tolerance (5% to match Gurobi)
405
+ 'MSK_IPAR_MIO_MAX_TIME': 3600, # Max time 1 hour
406
+ # Safe MIP performance improvements
407
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
408
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
409
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
410
+ # Logging
411
+ 'MSK_IPAR_LOG': 4, # Moderate logging (was 10)
378
412
  'MSK_IPAR_LOG_INTPNT': 1, # Log interior-point progress
379
413
  'MSK_IPAR_LOG_SIM': 4, # Log simplex progress
380
- 'MSK_IPAR_LOG_MIO': 4, # Log MIP progress (4 = full)
414
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging (was 4)
381
415
  'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
382
416
  }
383
417
  }
384
418
  if solver_options:
385
419
  mosek_default_options['solver_options'].update(solver_options)
386
- logger.info(f"Using Mosek with default configuration (auto-select optimizer)")
420
+ logger.info(f"Using Mosek with default configuration (auto-select optimizer) and moderate MIP strategies")
387
421
  return 'mosek', mosek_default_options
388
422
 
389
423
  elif solver_name == 'mosek (barrier)':
390
424
  mosek_barrier_options = {
391
425
  'solver_options': {
392
426
  'MSK_IPAR_INTPNT_BASIS': 0, # Skip crossover (barrier-only) - 0 = MSK_BI_NEVER
393
- 'MSK_DPAR_INTPNT_TOL_REL_GAP': 1e-5, # Relaxed relative gap tolerance
394
- 'MSK_DPAR_INTPNT_TOL_PFEAS': 1e-6, # Primal feasibility tolerance
395
- 'MSK_DPAR_INTPNT_TOL_DFEAS': 1e-6, # Dual feasibility tolerance
396
- 'MSK_DPAR_INTPNT_TOL_INFEAS': 1e-8, # Infeasibility tolerance
397
- 'MSK_IPAR_NUM_THREADS': 4, # Number of threads
398
- 'MSK_IPAR_PRESOLVE_USE': 1, # Force presolve (1 = ON)
399
- 'MSK_IPAR_PRESOLVE_LINDEP_USE': 1, # Linear dependency check (1 = ON)
400
- 'MSK_DPAR_MIO_REL_GAP_CONST': 1e-5, # MIP relative gap tolerance
401
- 'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes (4 = MSK_OPTIMIZER_INTPNT)
402
- 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root (4 = MSK_OPTIMIZER_INTPNT)
403
- 'MSK_IPAR_LOG': 10, # Enable full logging (10 = verbose)
427
+ 'MSK_DPAR_INTPNT_TOL_REL_GAP': 1e-4, # Match Gurobi barrier tolerance
428
+ 'MSK_DPAR_INTPNT_TOL_PFEAS': 1e-5, # Match Gurobi primal feasibility
429
+ 'MSK_DPAR_INTPNT_TOL_DFEAS': 1e-5, # Match Gurobi dual feasibility
430
+ # Removed MSK_DPAR_INTPNT_TOL_INFEAS - was 1000x tighter than other tolerances!
431
+ 'MSK_IPAR_NUM_THREADS': 0, # Use all available cores (0 = auto)
432
+ 'MSK_IPAR_PRESOLVE_USE': 2, # Aggressive presolve (match Gurobi Presolve=2)
433
+ 'MSK_IPAR_PRESOLVE_LINDEP_USE': 1, # Linear dependency check
434
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap
435
+ 'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes
436
+ 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root
437
+ # Safe MIP performance improvements
438
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 4, # Aggressive heuristics (was 2, match Gurobi's aggressive approach)
439
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection
440
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator
441
+ 'MSK_DPAR_MIO_MAX_TIME': 3600, # Max time 1 hour
442
+ # Logging
443
+ 'MSK_IPAR_LOG': 4, # Moderate logging
404
444
  'MSK_IPAR_LOG_INTPNT': 1, # Log interior-point progress
405
- 'MSK_IPAR_LOG_MIO': 4, # Log MIP progress (4 = full)
445
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging
406
446
  'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
407
- # Note: Don't force MSK_IPAR_OPTIMIZER - let Mosek choose based on problem type (LP vs MILP)
408
447
  }
409
448
  }
410
449
  if solver_options:
411
450
  mosek_barrier_options['solver_options'].update(solver_options)
412
- logger.info(f"Using Mosek Barrier (no crossover) configuration with verbose logging")
451
+ logger.info(f"Using Mosek Barrier with aggressive presolve and relaxed tolerances")
413
452
  return 'mosek', mosek_barrier_options
414
453
 
415
454
  elif solver_name == 'mosek (barrier+crossover)':
416
455
  mosek_barrier_crossover_options = {
417
456
  'solver_options': {
418
457
  'MSK_IPAR_INTPNT_BASIS': 1, # Always crossover (1 = MSK_BI_ALWAYS)
419
- 'MSK_DPAR_INTPNT_TOL_REL_GAP': 1e-6, # Tighter relative gap tolerance
420
- 'MSK_DPAR_INTPNT_TOL_PFEAS': 1e-6, # Primal feasibility tolerance
421
- 'MSK_DPAR_INTPNT_TOL_DFEAS': 1e-6, # Dual feasibility tolerance
422
- 'MSK_IPAR_NUM_THREADS': 4, # Number of threads
458
+ 'MSK_DPAR_INTPNT_TOL_REL_GAP': 1e-4, # Match Gurobi barrier tolerance (was 1e-6)
459
+ 'MSK_DPAR_INTPNT_TOL_PFEAS': 1e-5, # Match Gurobi (was 1e-6)
460
+ 'MSK_DPAR_INTPNT_TOL_DFEAS': 1e-5, # Match Gurobi (was 1e-6)
461
+ 'MSK_IPAR_NUM_THREADS': 0, # Use all available cores (0 = auto)
423
462
  'MSK_IPAR_PRESOLVE_USE': 1, # Force presolve
424
463
  'MSK_IPAR_PRESOLVE_LINDEP_USE': 1, # Linear dependency check
425
- 'MSK_DPAR_MIO_REL_GAP_CONST': 1e-6, # MIP relative gap tolerance
464
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap (was 1e-6)
426
465
  'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes
427
466
  'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root
428
- 'MSK_IPAR_LOG': 10, # Enable full logging (10 = verbose)
467
+ # Safe MIP performance improvements
468
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
469
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
470
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
471
+ 'MSK_DPAR_MIO_MAX_TIME': 3600, # Max time 1 hour (safety limit)
472
+ # Logging
473
+ 'MSK_IPAR_LOG': 4, # Moderate logging (was 10)
429
474
  'MSK_IPAR_LOG_INTPNT': 1, # Log interior-point progress
430
- 'MSK_IPAR_LOG_MIO': 4, # Log MIP progress (4 = full)
475
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging (was 4)
431
476
  'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
432
477
  # Note: Don't force MSK_IPAR_OPTIMIZER - let Mosek choose based on problem type
433
478
  }
434
479
  }
435
480
  if solver_options:
436
481
  mosek_barrier_crossover_options['solver_options'].update(solver_options)
437
- logger.info(f"Using Mosek Barrier+Crossover configuration with verbose logging")
482
+ logger.info(f"Using Mosek Barrier+Crossover configuration with Gurobi-matched tolerances and moderate MIP strategies")
438
483
  return 'mosek', mosek_barrier_crossover_options
439
484
 
440
485
  elif solver_name == 'mosek (dual simplex)':
@@ -443,19 +488,25 @@ class NetworkSolver:
443
488
  'MSK_IPAR_NUM_THREADS': 0, # Use all available cores (0 = automatic)
444
489
  'MSK_IPAR_PRESOLVE_USE': 1, # Force presolve
445
490
  'MSK_IPAR_SIM_SCALING': 2, # Aggressive scaling (2 = MSK_SCALING_AGGRESSIVE)
446
- 'MSK_DPAR_MIO_REL_GAP_CONST': 1e-6, # MIP relative gap tolerance
491
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap (was 1e-6)
447
492
  'MSK_IPAR_MIO_NODE_OPTIMIZER': 1, # Use dual simplex for MIP nodes (1 = MSK_OPTIMIZER_DUAL_SIMPLEX)
448
493
  'MSK_IPAR_MIO_ROOT_OPTIMIZER': 1, # Use dual simplex for MIP root
449
- 'MSK_IPAR_LOG': 10, # Enable full logging (10 = verbose)
450
- 'MSK_IPAR_LOG_SIM': 4, # Log simplex progress (4 = full)
451
- 'MSK_IPAR_LOG_MIO': 4, # Log MIP progress (4 = full)
494
+ # Safe MIP performance improvements
495
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
496
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
497
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
498
+ 'MSK_DPAR_MIO_MAX_TIME': 3600, # Max time 1 hour (safety limit)
499
+ # Logging
500
+ 'MSK_IPAR_LOG': 4, # Moderate logging (was 10)
501
+ 'MSK_IPAR_LOG_SIM': 4, # Log simplex progress
502
+ 'MSK_IPAR_LOG_MIO': 2, # Reduced MIP logging (was 4)
452
503
  'MSK_IPAR_LOG_MIO_FREQ': 10, # Log MIP every 10 seconds
453
504
  # Note: For pure LP, set optimizer; for MILP, only set node/root optimizers
454
505
  }
455
506
  }
456
507
  if solver_options:
457
508
  mosek_dual_options['solver_options'].update(solver_options)
458
- logger.info(f"Using Mosek Dual Simplex configuration with verbose logging")
509
+ logger.info(f"Using Mosek Dual Simplex configuration with Gurobi-matched tolerances and moderate MIP strategies")
459
510
  return 'mosek', mosek_dual_options
460
511
 
461
512
  # Check if this is a known valid solver name
@@ -463,16 +514,24 @@ class NetworkSolver:
463
514
  # Add default MILP-friendly settings for plain Mosek
464
515
  mosek_defaults = {
465
516
  'solver_options': {
466
- 'MSK_DPAR_MIO_REL_GAP_CONST': 1e-4, # MIP relative gap tolerance (10^-4 = 0.01%)
517
+ 'MSK_DPAR_MIO_REL_GAP_CONST': 0.05, # Match Gurobi 5% MIP gap (was 1e-4)
467
518
  'MSK_IPAR_MIO_MAX_TIME': 3600, # Max time 1 hour
468
519
  'MSK_IPAR_NUM_THREADS': 0, # Use all cores (0 = auto)
520
+ # CRITICAL: Use interior-point for MIP (much faster than simplex)
521
+ 'MSK_IPAR_MIO_NODE_OPTIMIZER': 4, # Use interior-point for MIP nodes (4 = MSK_OPTIMIZER_INTPNT)
522
+ 'MSK_IPAR_MIO_ROOT_OPTIMIZER': 4, # Use interior-point for MIP root (4 = MSK_OPTIMIZER_INTPNT)
523
+ # Safe MIP performance improvements
524
+ 'MSK_IPAR_MIO_HEURISTIC_LEVEL': 2, # Moderate heuristics (safe, helps find good solutions faster)
525
+ 'MSK_IPAR_MIO_SYMMETRY_LEVEL': 2, # Moderate symmetry detection (safe, can dramatically speed up symmetric problems)
526
+ 'MSK_IPAR_MIO_PRESOLVE_AGGREGATOR_USE': 1, # MIP presolve aggregator (safe, helps reduce problem size)
527
+ # Logging
469
528
  'MSK_IPAR_LOG': 4, # Moderate logging
470
529
  'MSK_IPAR_LOG_MIO': 2, # Log MIP occasionally
471
530
  }
472
531
  }
473
532
  if solver_options:
474
533
  mosek_defaults['solver_options'].update(solver_options)
475
- logger.info(f"Using Mosek with default MILP-friendly settings")
534
+ logger.info(f"Using Mosek with barrier method for MIP (interior-point for root/nodes)")
476
535
  return solver_name, mosek_defaults
477
536
 
478
537
  elif solver_name == 'gurobi':
@@ -490,6 +549,121 @@ class NetworkSolver:
490
549
  logger.info(f"Using Gurobi with default MILP-friendly settings")
491
550
  return solver_name, gurobi_defaults
492
551
 
552
+ # Handle special COPT configurations
553
+ elif solver_name == 'copt (barrier)':
554
+ copt_barrier_options = {
555
+ 'solver_options': {
556
+ 'LpMethod': 2, # Barrier method
557
+ 'Crossover': 0, # Skip crossover for speed
558
+ 'RelGap': 0.05, # 5% MIP gap (match Gurobi)
559
+ 'TimeLimit': 3600, # 1 hour time limit
560
+ 'Threads': 2, # 4 threads (memory-conscious)
561
+ 'Presolve': 3, # Aggressive presolve
562
+ 'Scaling': 1, # Enable scaling
563
+ 'FeasTol': 1e-5, # Match Gurobi feasibility
564
+ 'DualTol': 1e-5, # Match Gurobi dual tolerance
565
+ # MIP performance settings
566
+ 'CutLevel': 2, # Normal cut generation
567
+ 'HeurLevel': 3, # Aggressive heuristics
568
+ 'StrongBranching': 1, # Fast strong branching
569
+ }
570
+ }
571
+ if solver_options:
572
+ copt_barrier_options['solver_options'].update(solver_options)
573
+ logger.info(f"Using COPT Barrier configuration (fast interior-point method)")
574
+ return 'copt', copt_barrier_options
575
+
576
+ elif solver_name == 'copt (barrier homogeneous)':
577
+ copt_barrier_homogeneous_options = {
578
+ 'solver_options': {
579
+ 'LpMethod': 2, # Barrier method
580
+ 'Crossover': 0, # Skip crossover
581
+ 'BarHomogeneous': 1, # Use homogeneous self-dual form
582
+ 'RelGap': 0.05, # 5% MIP gap
583
+ 'TimeLimit': 3600, # 1 hour
584
+ 'Threads': 2, # 4 threads (memory-conscious)
585
+ 'Presolve': 3, # Aggressive presolve
586
+ 'Scaling': 1, # Enable scaling
587
+ 'FeasTol': 1e-5,
588
+ 'DualTol': 1e-5,
589
+ # MIP performance settings
590
+ 'CutLevel': 2, # Normal cuts
591
+ 'HeurLevel': 3, # Aggressive heuristics
592
+ 'StrongBranching': 1, # Fast strong branching
593
+ }
594
+ }
595
+ if solver_options:
596
+ copt_barrier_homogeneous_options['solver_options'].update(solver_options)
597
+ logger.info(f"Using COPT Barrier Homogeneous configuration")
598
+ return 'copt', copt_barrier_homogeneous_options
599
+
600
+ elif solver_name == 'copt (barrier+crossover)':
601
+ copt_barrier_crossover_options = {
602
+ 'solver_options': {
603
+ 'LpMethod': 2, # Barrier method
604
+ 'Crossover': 1, # Enable crossover for better solutions
605
+ 'RelGap': 0.05, # 5% MIP gap (relaxed for faster solves)
606
+ 'TimeLimit': 3600, # 1 hour
607
+ 'Threads': 2, # Use all cores
608
+ 'Presolve': 3, # Aggressive presolve
609
+ 'Scaling': 1, # Enable scaling
610
+ 'FeasTol': 1e-6, # Tighter feasibility
611
+ 'DualTol': 1e-6, # Tighter dual tolerance
612
+ # MIP performance settings
613
+ 'CutLevel': 3, # Aggressive cuts
614
+ 'HeurLevel': 3, # Aggressive heuristics
615
+ 'StrongBranching': 2, # Normal strong branching
616
+ }
617
+ }
618
+ if solver_options:
619
+ copt_barrier_crossover_options['solver_options'].update(solver_options)
620
+ logger.info(f"Using COPT Barrier+Crossover configuration (balanced performance)")
621
+ return 'copt', copt_barrier_crossover_options
622
+
623
+ elif solver_name == 'copt (dual simplex)':
624
+ copt_dual_simplex_options = {
625
+ 'solver_options': {
626
+ 'LpMethod': 1, # Dual simplex method
627
+ 'RelGap': 0.05, # 5% MIP gap
628
+ 'TimeLimit': 3600, # 1 hour
629
+ 'Threads': 2, # Use all cores
630
+ 'Presolve': 3, # Aggressive presolve
631
+ 'Scaling': 1, # Enable scaling
632
+ 'FeasTol': 1e-6,
633
+ 'DualTol': 1e-6,
634
+ # MIP performance settings
635
+ 'CutLevel': 2, # Normal cuts
636
+ 'HeurLevel': 2, # Normal heuristics
637
+ 'StrongBranching': 1, # Fast strong branching
638
+ }
639
+ }
640
+ if solver_options:
641
+ copt_dual_simplex_options['solver_options'].update(solver_options)
642
+ logger.info(f"Using COPT Dual Simplex configuration (robust method)")
643
+ return 'copt', copt_dual_simplex_options
644
+
645
+ elif solver_name == 'copt (concurrent)':
646
+ copt_concurrent_options = {
647
+ 'solver_options': {
648
+ 'LpMethod': 4, # Concurrent (simplex + barrier)
649
+ 'RelGap': 0.05, # 5% MIP gap
650
+ 'TimeLimit': 3600, # 1 hour
651
+ 'Threads': 2, # Use all cores
652
+ 'Presolve': 3, # Aggressive presolve
653
+ 'Scaling': 1, # Enable scaling
654
+ 'FeasTol': 1e-5,
655
+ 'DualTol': 1e-5,
656
+ # MIP performance settings
657
+ 'CutLevel': 2, # Normal cuts
658
+ 'HeurLevel': 3, # Aggressive heuristics
659
+ 'StrongBranching': 1, # Fast strong branching
660
+ }
661
+ }
662
+ if solver_options:
663
+ copt_concurrent_options['solver_options'].update(solver_options)
664
+ logger.info(f"Using COPT Concurrent configuration (parallel simplex + barrier)")
665
+ return 'copt', copt_concurrent_options
666
+
493
667
  elif solver_name in ['highs', 'cplex', 'glpk', 'cbc', 'scip', 'copt']:
494
668
  return solver_name, solver_options
495
669
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyconvexity
3
- Version: 0.3.8.post3
3
+ Version: 0.3.8.post4
4
4
  Summary: Python library for energy system modeling and optimization with PyPSA
5
5
  Author-email: Convexity Team <info@convexity.com>
6
6
  License: MIT
@@ -1 +0,0 @@
1
- __version__ = "0.3.8.post3"