skfolio 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. skfolio/cluster/_hierarchical.py +1 -0
  2. skfolio/datasets/_base.py +1 -1
  3. skfolio/measures/__init__.py +1 -1
  4. skfolio/model_selection/_combinatorial.py +12 -14
  5. skfolio/model_selection/_validation.py +6 -8
  6. skfolio/moments/covariance/_covariance.py +0 -1
  7. skfolio/moments/expected_returns/_expected_returns.py +1 -0
  8. skfolio/optimization/_base.py +9 -11
  9. skfolio/optimization/cluster/hierarchical/_base.py +0 -2
  10. skfolio/optimization/convex/__init__.py +1 -1
  11. skfolio/optimization/convex/_base.py +43 -26
  12. skfolio/optimization/convex/_distributionally_robust.py +22 -10
  13. skfolio/optimization/convex/_maximum_diversification.py +17 -7
  14. skfolio/optimization/convex/_mean_risk.py +25 -13
  15. skfolio/optimization/convex/_risk_budgeting.py +22 -10
  16. skfolio/optimization/ensemble/_stacking.py +4 -6
  17. skfolio/population/_population.py +18 -25
  18. skfolio/portfolio/_portfolio.py +11 -13
  19. skfolio/pre_selection/_pre_selection.py +6 -6
  20. skfolio/preprocessing/_returns.py +1 -1
  21. skfolio/prior/__init__.py +1 -1
  22. skfolio/prior/_base.py +1 -0
  23. skfolio/prior/_empirical.py +1 -1
  24. skfolio/prior/_factor_model.py +4 -6
  25. skfolio/uncertainty_set/_base.py +1 -0
  26. skfolio/uncertainty_set/_bootstrap.py +1 -0
  27. skfolio/uncertainty_set/_empirical.py +2 -0
  28. skfolio/utils/stats.py +1 -1
  29. {skfolio-0.0.1.dist-info → skfolio-0.0.3.dist-info}/METADATA +580 -568
  30. {skfolio-0.0.1.dist-info → skfolio-0.0.3.dist-info}/RECORD +33 -33
  31. {skfolio-0.0.1.dist-info → skfolio-0.0.3.dist-info}/LICENSE +0 -0
  32. {skfolio-0.0.1.dist-info → skfolio-0.0.3.dist-info}/WHEEL +0 -0
  33. {skfolio-0.0.1.dist-info → skfolio-0.0.3.dist-info}/top_level.txt +0 -0
@@ -149,6 +149,7 @@ class HierarchicalClustering(skb.ClusterMixin, skb.BaseEstimator):
149
149
  .. [1] "Application of two-order difference to gap statistic".
150
150
  Yue, Wang & Wei (2009)
151
151
  """
152
+
152
153
  n_clusters_: int
153
154
  labels_: np.ndarray
154
155
  linkage_matrix_: np.ndarray
skfolio/datasets/_base.py CHANGED
@@ -136,7 +136,7 @@ def download_dataset(
136
136
  representing the asset price of a given observation.
137
137
  """
138
138
  url = (
139
- "https://github.com/HugoDelatte/portfolio-optimization/raw/main/datasets/"
139
+ "https://github.com/skfolio/skfolio/raw/main/datasets/"
140
140
  f"{data_filename}.csv.gz"
141
141
  )
142
142
 
@@ -25,9 +25,9 @@ from skfolio.measures._measures import (
25
25
  get_drawdowns,
26
26
  gini_mean_difference,
27
27
  kurtosis,
28
- mean_absolute_deviation,
29
28
  max_drawdown,
30
29
  mean,
30
+ mean_absolute_deviation,
31
31
  owa_gmd_weights,
32
32
  semi_deviation,
33
33
  semi_variance,
@@ -316,20 +316,18 @@ class CombinatorialPurgedCV(BaseCombinatorialCV):
316
316
 
317
317
  def summary(self, X) -> pd.Series:
318
318
  n_samples = X.shape[0]
319
- return pd.Series(
320
- {
321
- "Number of Observations": n_samples,
322
- "Total Number of Folds": self.n_folds,
323
- "Number of Test Folds": self.n_test_folds,
324
- "Purge Size": self.purged_size,
325
- "Embargo Size": self.embargo_size,
326
- "Average Training Size": int(
327
- n_samples / self.n_folds * (self.n_folds - self.n_test_folds)
328
- ),
329
- "Number of Test Paths": self.n_test_paths,
330
- "Number of Training Combinations": self.n_splits,
331
- }
332
- )
319
+ return pd.Series({
320
+ "Number of Observations": n_samples,
321
+ "Total Number of Folds": self.n_folds,
322
+ "Number of Test Folds": self.n_test_folds,
323
+ "Purge Size": self.purged_size,
324
+ "Embargo Size": self.embargo_size,
325
+ "Average Training Size": int(
326
+ n_samples / self.n_folds * (self.n_folds - self.n_test_folds)
327
+ ),
328
+ "Number of Test Paths": self.n_test_paths,
329
+ "Number of Training Combinations": self.n_splits,
330
+ })
333
331
 
334
332
  def plot_train_test_folds(self) -> skt.Figure:
335
333
  """Plot the train/test fold locations"""
@@ -165,14 +165,12 @@ def cross_val_predict(
165
165
  path_id = path_ids[i, j]
166
166
  portfolios[path_id].append(p)
167
167
  name = portfolio_params.pop("name", "path")
168
- pred = Population(
169
- [
170
- MultiPeriodPortfolio(
171
- name=f"{name}_{i}", portfolios=portfolios[i], **portfolio_params
172
- )
173
- for i in range(path_nb)
174
- ]
175
- )
168
+ pred = Population([
169
+ MultiPeriodPortfolio(
170
+ name=f"{name}_{i}", portfolios=portfolios[i], **portfolio_params
171
+ )
172
+ for i in range(path_nb)
173
+ ])
176
174
  else:
177
175
  # We need to re-order the test folds in case they were un-ordered by the
178
176
  # CV generator.
@@ -1,6 +1,5 @@
1
1
  """Covariance Estimators."""
2
2
 
3
-
4
3
  import numpy as np
5
4
  import numpy.typing as npt
6
5
  import pandas as pd
@@ -315,6 +315,7 @@ class ShrunkMu(BaseMu):
315
315
  .. [3] "Optimal shrinkage estimator for high-dimensional mean vector"
316
316
  Bodnar, Okhrin and Parolya (2019)
317
317
  """
318
+
318
319
  covariance_estimator_: BaseCovariance
319
320
  mu_target_: np.ndarray
320
321
  alpha_: float
@@ -89,17 +89,15 @@ class BaseOptimization(skb.BaseEstimator, ABC):
89
89
  # For a 2D array we return a population of portfolios.
90
90
  if self.weights_.ndim == 2:
91
91
  n_portfolios = self.weights_.shape[0]
92
- return Population(
93
- [
94
- Portfolio(
95
- X=X,
96
- weights=self.weights_[i],
97
- name=f"ptf{i} - {name}",
98
- **ptf_kwargs,
99
- )
100
- for i in range(n_portfolios)
101
- ]
102
- )
92
+ return Population([
93
+ Portfolio(
94
+ X=X,
95
+ weights=self.weights_[i],
96
+ name=f"ptf{i} - {name}",
97
+ **ptf_kwargs,
98
+ )
99
+ for i in range(n_portfolios)
100
+ ])
103
101
  return Portfolio(X=X, weights=self.weights_, name=name, **ptf_kwargs)
104
102
 
105
103
  def score(self, X: npt.ArrayLike, y: npt.ArrayLike = None) -> float:
@@ -7,14 +7,12 @@ from abc import ABC, abstractmethod
7
7
 
8
8
  import numpy as np
9
9
  import numpy.typing as npt
10
- import pandas as pd
11
10
 
12
11
  import skfolio.typing as skt
13
12
  from skfolio.cluster import HierarchicalClustering
14
13
  from skfolio.distance import BaseDistance
15
14
  from skfolio.measures import ExtraRiskMeasure, RiskMeasure
16
15
  from skfolio.optimization._base import BaseOptimization
17
- from skfolio.population import Population
18
16
  from skfolio.portfolio import Portfolio
19
17
  from skfolio.prior import BasePrior, PriorModel
20
18
  from skfolio.utils.tools import input_to_array
@@ -13,4 +13,4 @@ __all__ = [
13
13
  "RiskBudgeting",
14
14
  "DistributionallyRobustCVaR",
15
15
  "MaximumDiversification",
16
- ]
16
+ ]
@@ -46,6 +46,7 @@ class ObjectiveFunction(AutoEnum):
46
46
  MAXIMIZE_UTILITY : str
47
47
  Maximize the ratio :math:`\frac{w^T\mu - R_{f}}{risk(w)}`.
48
48
  """
49
+
49
50
  MINIMIZE_RISK = auto()
50
51
  MAXIMIZE_RETURN = auto()
51
52
  MAXIMIZE_UTILITY = auto()
@@ -364,14 +365,17 @@ class ConvexOptimization(BaseOptimization, ABC):
364
365
  It is a function that must take as argument the weights `w` and returns a
365
366
  CVPXY expression.
366
367
 
367
- solver : str, optional
368
- The solver to use. For example, "ECOS", "SCS", or "OSQP".
369
- The default (`None`) is set depending on the problem.
368
+ solver : str, default="CLARABEL"
369
+ The solver to use. The default is "CLARABEL" which is written in Rust and has
370
+ better numerical stability and performance than ECOS and SCS. Cvxpy will replace
371
+ its default solver "ECOS" by "CLARABEL" in future releases.
370
372
  For more details about available solvers, check the CVXPY documentation:
371
373
  https://www.cvxpy.org/tutorial/advanced/index.html#choosing-a-solver
372
374
 
373
375
  solver_params : dict, optional
374
376
  Solver parameters. For example, `solver_params=dict(verbose=True)`.
377
+ The default (`None`) is use `{"tol_gap_abs": 1e-9, "tol_gap_rel": 1e-9}`
378
+ for the solver "CLARABEL" and the CVXPY default otherwise.
375
379
  For more details about solver arguments, check the CVXPY documentation:
376
380
  https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options
377
381
 
@@ -385,6 +389,10 @@ class ConvexOptimization(BaseOptimization, ABC):
385
389
  It can be used to increase the optimization accuracies in specific cases.
386
390
  The default (`None`) is set depending on the problem.
387
391
 
392
+ save_problem : bool, default=False
393
+ If this is set to True, the CVXPY Problem is saved in `problem_`.
394
+ The default is `False`.
395
+
388
396
  raise_on_failure : bool, default=True
389
397
  If this is set to True, an error is raised when the optimization fail otherwise
390
398
  it passes with a warning.
@@ -400,9 +408,6 @@ class ConvexOptimization(BaseOptimization, ABC):
400
408
  weights_ : ndarray of shape (n_assets,) or (n_optimizations, n_assets)
401
409
  Weights of the assets.
402
410
 
403
- problem_: cvxpy.Problem
404
- CVXPY problem used for the optimization.
405
-
406
411
  problem_values_ : dict[str, float] | list[dict[str, float]] of size n_optimizations
407
412
  Expression values retrieved from the CVXPY problem.
408
413
 
@@ -414,12 +419,17 @@ class ConvexOptimization(BaseOptimization, ABC):
414
419
 
415
420
  covariance_uncertainty_set_estimator_ : BaseCovarianceUncertaintySet
416
421
  Fitted `covariance_uncertainty_set_estimator` if provided.
422
+
423
+ problem_: cvxpy.Problem
424
+ CVXPY problem used for the optimization. Only when `save_problem` is set to
425
+ `True`.
417
426
  """
418
- _solver: str
427
+
428
+ _solver_params: dict
419
429
  _scale_objective: cp.Constant
420
430
  _scale_constraints: cp.Constant
431
+ _cvx_cache: dict
421
432
 
422
- _cvx_cache = dict
423
433
  problem_: cp.Problem
424
434
  problem_values_: dict[str, float] | list[dict[str, float]]
425
435
  prior_estimator_: BasePrior
@@ -448,18 +458,20 @@ class ConvexOptimization(BaseOptimization, ABC):
448
458
  l1_coef: float = 0.0,
449
459
  l2_coef: float = 0.0,
450
460
  mu_uncertainty_set_estimator: BaseMuUncertaintySet | None = None,
451
- covariance_uncertainty_set_estimator: BaseCovarianceUncertaintySet
452
- | None = None,
461
+ covariance_uncertainty_set_estimator: (
462
+ BaseCovarianceUncertaintySet | None
463
+ ) = None,
453
464
  risk_free_rate: float = 0.0,
454
465
  min_acceptable_return: skt.Target | None = None,
455
466
  cvar_beta: float = 0.95,
456
467
  evar_beta: float = 0.95,
457
468
  cdar_beta: float = 0.95,
458
469
  edar_beta: float = 0.95,
459
- solver: str | None = None,
470
+ solver: str = "CLARABEL",
460
471
  solver_params: dict | None = None,
461
472
  scale_objective: float | None = None,
462
473
  scale_constraints: float | None = None,
474
+ save_problem: bool = False,
463
475
  raise_on_failure: bool = True,
464
476
  add_objective: skt.ExpressionFunction | None = None,
465
477
  add_constraints: skt.ExpressionFunction | None = None,
@@ -501,6 +513,7 @@ class ConvexOptimization(BaseOptimization, ABC):
501
513
  self.overwrite_expected_return = overwrite_expected_return
502
514
  self.solver = solver
503
515
  self.solver_params = solver_params
516
+ self.save_problem = save_problem
504
517
  self.raise_on_failure = raise_on_failure
505
518
  self.scale_objective = scale_objective
506
519
  self.scale_constraints = scale_constraints
@@ -509,6 +522,8 @@ class ConvexOptimization(BaseOptimization, ABC):
509
522
  self.cdar_beta = cdar_beta
510
523
  self.edar_beta = edar_beta
511
524
 
525
+ self._clear_models_cache()
526
+
512
527
  def _call_custom_func(
513
528
  self, func: skt.ExpressionFunction, w: cp.Variable, name: str = "custom_func"
514
529
  ) -> cp.Expression | list[cp.Expression]:
@@ -757,21 +772,19 @@ class ConvexOptimization(BaseOptimization, ABC):
757
772
 
758
773
  return constraints
759
774
 
760
- def _set_solver(self, default: str) -> None:
761
- """Set solver by saving its value in `_solver`.
775
+ def _set_solver_params(self, default: dict | None) -> None:
776
+ """Set the solver params by saving its value in `_solver_params`.
762
777
  It uses `solver` if provided otherwise it uses the `default` solver.
763
778
 
764
779
  Parameters
765
780
  ----------
766
781
  default : str
767
- The default solver to use when `solver` is `None`.
782
+ The default solver params to use when `solver_params` is `None`.
768
783
  """
769
- if self.solver is None:
770
- self._solver = default
784
+ if self.solver_params is None:
785
+ self._solver_params = default if default is not None else {}
771
786
  else:
772
- self._solver = self.solver
773
- if self._solver not in INSTALLED_SOLVERS:
774
- raise ValueError(f"The solver {self._solver} is not installed.")
787
+ self._solver_params = self.solver_params
775
788
 
776
789
  def _set_scale_objective(self, default: float) -> None:
777
790
  """Set the objective scale by saving its value in `_scale_objective`.
@@ -894,6 +907,9 @@ class ConvexOptimization(BaseOptimization, ABC):
894
907
  factor: cvxpy Variable | cvxpy Constant
895
908
  CVXPY Variable or Constant used for RatioMeasure optimization problems.
896
909
  """
910
+ if self.solver not in INSTALLED_SOLVERS:
911
+ raise ValueError(f"The solver {self.solver} is not installed.")
912
+
897
913
  if parameters_values is None:
898
914
  parameters_values = []
899
915
 
@@ -918,9 +934,7 @@ class ConvexOptimization(BaseOptimization, ABC):
918
934
  for p, v in parameters_values
919
935
  ]
920
936
 
921
- solver_params = self.solver_params
922
- if solver_params is None:
923
- solver_params = {}
937
+
924
938
  all_weights = []
925
939
  all_problem_values = []
926
940
  optimal = True
@@ -932,7 +946,7 @@ class ConvexOptimization(BaseOptimization, ABC):
932
946
  # We suppress cvxpy warning as it is redundant with our warning
933
947
  with warnings.catch_warnings():
934
948
  warnings.simplefilter("ignore")
935
- problem.solve(solver=self._solver, **solver_params)
949
+ problem.solve(solver=self.solver, **self._solver_params)
936
950
 
937
951
  if w.value is None:
938
952
  raise cp.SolverError("No solution found")
@@ -965,7 +979,7 @@ class ConvexOptimization(BaseOptimization, ABC):
965
979
  if len(params_string) != 0:
966
980
  params_string = f" with parameters {params_string}"
967
981
  msg = (
968
- f"Solver '{self._solver}' failed{params_string}. Try another"
982
+ f"Solver '{self.solver}' failed for {params_string}. Try another"
969
983
  " solver, or solve with solver_params=dict(verbose=True) for more"
970
984
  " information"
971
985
  )
@@ -988,7 +1002,10 @@ class ConvexOptimization(BaseOptimization, ABC):
988
1002
  self.weights_ = np.array(all_weights, dtype=float)
989
1003
  self.problem_values_ = all_problem_values
990
1004
 
991
- self.problem_ = problem
1005
+ if self.save_problem:
1006
+ self.problem_ = problem
1007
+
1008
+ self._clear_models_cache()
992
1009
 
993
1010
  @cache_method("_cvx_cache")
994
1011
  def _cvx_mu_uncertainty_set(
@@ -1893,7 +1910,7 @@ class ConvexOptimization(BaseOptimization, ABC):
1893
1910
  The Gini mean difference (GMD) is a measure of dispersion introduced in the
1894
1911
  context of portfolio optimization by Yitzhaki (1982).
1895
1912
  The initial formulation was not used by practitioners due to the high number of
1896
- variables that increases proportional to T(T1)/2 ,
1913
+ variables that increases proportional to T(T-1)/2 ,
1897
1914
 
1898
1915
  Cajas (2021) proposed an alternative reformulation based on the ordered weighted
1899
1916
  averaging (OWA) operator for monotonic weights proposed by Chassein and
@@ -172,14 +172,17 @@ class DistributionallyRobustCVaR(ConvexOptimization):
172
172
  It is a function that must take as argument the weights `w` and returns a
173
173
  CVPXY expression.
174
174
 
175
- solver : str, optional
176
- The solver to use. For example, "ECOS", "SCS", or "OSQP".
177
- The default (`None`) is set depending on the problem.
175
+ solver : str, default="CLARABEL"
176
+ The solver to use. The default is "CLARABEL" which is written in Rust and has
177
+ better numerical stability and performance than ECOS and SCS. Cvxpy will replace
178
+ its default solver "ECOS" by "CLARABEL" in future releases.
178
179
  For more details about available solvers, check the CVXPY documentation:
179
180
  https://www.cvxpy.org/tutorial/advanced/index.html#choosing-a-solver
180
181
 
181
182
  solver_params : dict, optional
182
183
  Solver parameters. For example, `solver_params=dict(verbose=True)`.
184
+ The default (`None`) is use `{"tol_gap_abs": 1e-9, "tol_gap_rel": 1e-9}`
185
+ for the solver "CLARABEL" and the CVXPY default otherwise.
183
186
  For more details about solver arguments, check the CVXPY documentation:
184
187
  https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options
185
188
 
@@ -193,6 +196,10 @@ class DistributionallyRobustCVaR(ConvexOptimization):
193
196
  It can be used to increase the optimization accuracies in specific cases.
194
197
  The default (`None`) is set depending on the problem.
195
198
 
199
+ save_problem : bool, default=False
200
+ If this is set to True, the CVXPY Problem is saved in `problem_`.
201
+ The default is `False`.
202
+
196
203
  raise_on_failure : bool, default=True
197
204
  If this is set to True, an error is raised when the optimization fail otherwise
198
205
  it passes with a warning.
@@ -208,15 +215,16 @@ class DistributionallyRobustCVaR(ConvexOptimization):
208
215
  weights_ : ndarray of shape (n_assets,) or (n_optimizations, n_assets)
209
216
  Weights of the assets.
210
217
 
211
- problem_: cvxpy.Problem
212
- CVXPY problem used for the optimization.
213
-
214
218
  problem_values_ : dict[str, float] | list[dict[str, float]] of size n_optimizations
215
219
  Expression values retrieved from the CVXPY problem.
216
220
 
217
221
  prior_estimator_ : BasePrior
218
222
  Fitted `prior_estimator`.
219
223
 
224
+ problem_: cvxpy.Problem
225
+ CVXPY problem used for the optimization. Only when `save_problem` is set to
226
+ `True`.
227
+
220
228
  n_features_in_ : int
221
229
  Number of assets seen during `fit`.
222
230
 
@@ -252,10 +260,11 @@ class DistributionallyRobustCVaR(ConvexOptimization):
252
260
  left_inequality: skt.Inequality | None = None,
253
261
  right_inequality: skt.Inequality | None = None,
254
262
  risk_free_rate: float = 0.0,
255
- solver: str | None = None,
263
+ solver: str = "CLARABEL",
256
264
  solver_params: dict | None = None,
257
265
  scale_objective: float | None = None,
258
266
  scale_constraints: float | None = None,
267
+ save_problem: bool = False,
259
268
  raise_on_failure: bool = True,
260
269
  add_objective: skt.ExpressionFunction | None = None,
261
270
  add_constraints: skt.ExpressionFunction | None = None,
@@ -282,6 +291,7 @@ class DistributionallyRobustCVaR(ConvexOptimization):
282
291
  solver_params=solver_params,
283
292
  scale_objective=scale_objective,
284
293
  scale_constraints=scale_constraints,
294
+ save_problem=save_problem,
285
295
  raise_on_failure=raise_on_failure,
286
296
  add_objective=add_objective,
287
297
  add_constraints=add_constraints,
@@ -313,7 +323,6 @@ class DistributionallyRobustCVaR(ConvexOptimization):
313
323
  self._check_feature_names(X, reset=True)
314
324
  # Used to avoid adding multiple times similar constrains linked to identical
315
325
  # risk models
316
- self._clear_models_cache()
317
326
  self.prior_estimator_ = check_estimator(
318
327
  self.prior_estimator,
319
328
  default=EmpiricalPrior(),
@@ -323,8 +332,11 @@ class DistributionallyRobustCVaR(ConvexOptimization):
323
332
  prior_model = self.prior_estimator_.prior_model_
324
333
  n_observations, n_assets = prior_model.returns.shape
325
334
 
326
- # set solvers
327
- self._set_solver(default="ECOS")
335
+ # set solvers params
336
+ if self.solver == "CLARABEL":
337
+ self._set_solver_params(default={"tol_gap_abs": 1e-9, "tol_gap_rel": 1e-9})
338
+ else:
339
+ self._set_solver_params(default=None)
328
340
 
329
341
  # set scale
330
342
  self._set_scale_objective(default=1)
@@ -267,14 +267,17 @@ class MaximumDiversification(MeanRisk):
267
267
  It is a function that must take as argument the weights `w` and returns a
268
268
  CVPXY expression or a list of CVPXY expressions.
269
269
 
270
- solver : str, optional
271
- The solver to use. For example, "ECOS", "SCS", or "OSQP".
272
- The default (`None`) is set depending on the problem.
270
+ solver : str, default="CLARABEL"
271
+ The solver to use. The default is "CLARABEL" which is written in Rust and has
272
+ better numerical stability and performance than ECOS and SCS. Cvxpy will replace
273
+ its default solver "ECOS" by "CLARABEL" in future releases.
273
274
  For more details about available solvers, check the CVXPY documentation:
274
275
  https://www.cvxpy.org/tutorial/advanced/index.html#choosing-a-solver
275
276
 
276
277
  solver_params : dict, optional
277
278
  Solver parameters. For example, `solver_params=dict(verbose=True)`.
279
+ The default (`None`) is use `{"tol_gap_abs": 1e-9, "tol_gap_rel": 1e-9}`
280
+ for the solver "CLARABEL" and the CVXPY default otherwise.
278
281
  For more details about solver arguments, check the CVXPY documentation:
279
282
  https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options
280
283
 
@@ -288,6 +291,10 @@ class MaximumDiversification(MeanRisk):
288
291
  It can be used to increase the optimization accuracies in specific cases.
289
292
  The default (`None`) is set depending on the problem.
290
293
 
294
+ save_problem : bool, default=False
295
+ If this is set to True, the CVXPY Problem is saved in `problem_`.
296
+ The default is `False`.
297
+
291
298
  raise_on_failure : bool, default=True
292
299
  If this is set to True, an error is raised when the optimization fail otherwise
293
300
  it passes with a warning.
@@ -303,15 +310,16 @@ class MaximumDiversification(MeanRisk):
303
310
  weights_ : ndarray of shape (n_assets,) or (n_optimizations, n_assets)
304
311
  Weights of the assets.
305
312
 
306
- problem_: cvxpy.Problem
307
- CVXPY problem used for the optimization.
308
-
309
313
  problem_values_ : dict[str, float] | list[dict[str, float]] of size n_optimizations
310
314
  Expression values retrieved from the CVXPY problem.
311
315
 
312
316
  prior_estimator_ : BasePrior
313
317
  Fitted `prior_estimator`.
314
318
 
319
+ problem_: cvxpy.Problem
320
+ CVXPY problem used for the optimization. Only when `save_problem` is set to
321
+ `True`.
322
+
315
323
  n_features_in_ : int
316
324
  Number of assets seen during `fit`.
317
325
 
@@ -343,10 +351,11 @@ class MaximumDiversification(MeanRisk):
343
351
  min_return: skt.Target | None = None,
344
352
  max_tracking_error: skt.Target | None = None,
345
353
  max_turnover: skt.Target | None = None,
346
- solver: str | None = None,
354
+ solver: str = "CLARABEL",
347
355
  solver_params: dict | None = None,
348
356
  scale_objective: float | None = None,
349
357
  scale_constraints: float | None = None,
358
+ save_problem: bool = False,
350
359
  raise_on_failure: bool = True,
351
360
  add_objective: skt.ExpressionFunction | None = None,
352
361
  add_constraints: skt.ExpressionFunction | None = None,
@@ -380,6 +389,7 @@ class MaximumDiversification(MeanRisk):
380
389
  solver_params=solver_params,
381
390
  scale_objective=scale_objective,
382
391
  scale_constraints=scale_constraints,
392
+ save_problem=save_problem,
383
393
  raise_on_failure=raise_on_failure,
384
394
  add_objective=add_objective,
385
395
  add_constraints=add_constraints,
@@ -472,14 +472,17 @@ class MeanRisk(ConvexOptimization):
472
472
  It is a function that must take as argument the weights `w` and returns a
473
473
  CVPXY expression.
474
474
 
475
- solver : str, optional
476
- The solver to use. For example, "ECOS", "SCS", or "OSQP".
477
- The default (`None`) is set depending on the problem.
475
+ solver : str, default="CLARABEL"
476
+ The solver to use. The default is "CLARABEL" which is written in Rust and has
477
+ better numerical stability and performance than ECOS and SCS. Cvxpy will replace
478
+ its default solver "ECOS" by "CLARABEL" in future releases.
478
479
  For more details about available solvers, check the CVXPY documentation:
479
480
  https://www.cvxpy.org/tutorial/advanced/index.html#choosing-a-solver
480
481
 
481
482
  solver_params : dict, optional
482
483
  Solver parameters. For example, `solver_params=dict(verbose=True)`.
484
+ The default (`None`) is use `{"tol_gap_abs": 1e-9, "tol_gap_rel": 1e-9}`
485
+ for the solver "CLARABEL" and the CVXPY default otherwise.
483
486
  For more details about solver arguments, check the CVXPY documentation:
484
487
  https://www.cvxpy.org/tutorial/advanced/index.html#setting-solver-options
485
488
 
@@ -493,6 +496,10 @@ class MeanRisk(ConvexOptimization):
493
496
  It can be used to increase the optimization accuracies in specific cases.
494
497
  The default (`None`) is set depending on the problem.
495
498
 
499
+ save_problem : bool, default=False
500
+ If this is set to True, the CVXPY Problem is saved in `problem_`.
501
+ The default is `False`.
502
+
496
503
  raise_on_failure : bool, default=True
497
504
  If this is set to True, an error is raised when the optimization fail otherwise
498
505
  it passes with a warning.
@@ -508,9 +515,6 @@ class MeanRisk(ConvexOptimization):
508
515
  weights_ : ndarray of shape (n_assets,) or (n_optimizations, n_assets)
509
516
  Weights of the assets.
510
517
 
511
- problem_: cvxpy.Problem
512
- CVXPY problem used for the optimization.
513
-
514
518
  problem_values_ : dict[str, float] | list[dict[str, float]] of size n_optimizations
515
519
  Expression values retrieved from the CVXPY problem.
516
520
 
@@ -523,6 +527,10 @@ class MeanRisk(ConvexOptimization):
523
527
  covariance_uncertainty_set_estimator_ : BaseCovarianceUncertaintySet
524
528
  Fitted `covariance_uncertainty_set_estimator` if provided.
525
529
 
530
+ problem_: cvxpy.Problem
531
+ CVXPY problem used for the optimization. Only when `save_problem` is set to
532
+ `True`.
533
+
526
534
  n_features_in_ : int
527
535
  Number of assets seen during `fit`.
528
536
 
@@ -555,8 +563,9 @@ class MeanRisk(ConvexOptimization):
555
563
  l1_coef: float = 0.0,
556
564
  l2_coef: float = 0.0,
557
565
  mu_uncertainty_set_estimator: BaseMuUncertaintySet | None = None,
558
- covariance_uncertainty_set_estimator: BaseCovarianceUncertaintySet
559
- | None = None,
566
+ covariance_uncertainty_set_estimator: (
567
+ BaseCovarianceUncertaintySet | None
568
+ ) = None,
560
569
  risk_free_rate: float = 0.0,
561
570
  min_return: skt.Target | None = None,
562
571
  max_tracking_error: skt.Target | None = None,
@@ -581,10 +590,11 @@ class MeanRisk(ConvexOptimization):
581
590
  evar_beta: float = 0.95,
582
591
  cdar_beta: float = 0.95,
583
592
  edar_beta: float = 0.95,
584
- solver: str | None = None,
593
+ solver: str = "CLARABEL",
585
594
  solver_params: dict | None = None,
586
595
  scale_objective: float | None = None,
587
596
  scale_constraints: float | None = None,
597
+ save_problem: bool = False,
588
598
  raise_on_failure: bool = True,
589
599
  add_objective: skt.ExpressionFunction | None = None,
590
600
  add_constraints: skt.ExpressionFunction | None = None,
@@ -622,6 +632,7 @@ class MeanRisk(ConvexOptimization):
622
632
  solver_params=solver_params,
623
633
  scale_objective=scale_objective,
624
634
  scale_constraints=scale_constraints,
635
+ save_problem=save_problem,
625
636
  raise_on_failure=raise_on_failure,
626
637
  add_objective=add_objective,
627
638
  add_constraints=add_constraints,
@@ -689,7 +700,6 @@ class MeanRisk(ConvexOptimization):
689
700
  self._validation()
690
701
  # Used to avoid adding multiple times similar constrains linked to identical
691
702
  # risk models
692
- self._clear_models_cache()
693
703
  self.prior_estimator_ = check_estimator(
694
704
  self.prior_estimator,
695
705
  default=EmpiricalPrior(),
@@ -699,8 +709,11 @@ class MeanRisk(ConvexOptimization):
699
709
  prior_model = self.prior_estimator_.prior_model_
700
710
  n_observations, n_assets = prior_model.returns.shape
701
711
 
702
- # set solvers
703
- self._set_solver(default="ECOS")
712
+ # set solvers params
713
+ if self.solver == "CLARABEL":
714
+ self._set_solver_params(default={"tol_gap_abs": 1e-9, "tol_gap_rel": 1e-9})
715
+ else:
716
+ self._set_solver_params(default=None)
704
717
 
705
718
  # set scales
706
719
  if self.objective_function == ObjectiveFunction.MAXIMIZE_RATIO:
@@ -868,7 +881,6 @@ class MeanRisk(ConvexOptimization):
868
881
  elif arg_name == "factor":
869
882
  args[arg_name] = factor
870
883
  elif arg_name == "covariance_uncertainty_set":
871
- self._set_solver(default="CVXOPT")
872
884
  # noinspection PyTypeChecker
873
885
  self.covariance_uncertainty_set_estimator_ = sk.clone(
874
886
  self.covariance_uncertainty_set_estimator