skfolio 0.10.0__tar.gz → 0.10.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. {skfolio-0.10.0/src/skfolio.egg-info → skfolio-0.10.2}/PKG-INFO +53 -21
  2. {skfolio-0.10.0 → skfolio-0.10.2}/README.rst +51 -19
  3. {skfolio-0.10.0 → skfolio-0.10.2}/pyproject.toml +3 -2
  4. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/measures/_measures.py +4 -2
  5. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_empirical.py +1 -0
  6. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_entropy_pooling.py +9 -9
  7. {skfolio-0.10.0 → skfolio-0.10.2/src/skfolio.egg-info}/PKG-INFO +53 -21
  8. {skfolio-0.10.0 → skfolio-0.10.2}/LICENSE +0 -0
  9. {skfolio-0.10.0 → skfolio-0.10.2}/MANIFEST.in +0 -0
  10. {skfolio-0.10.0 → skfolio-0.10.2}/setup.cfg +0 -0
  11. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/__init__.py +0 -0
  12. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/cluster/__init__.py +0 -0
  13. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/cluster/_hierarchical.py +0 -0
  14. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/datasets/__init__.py +0 -0
  15. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/datasets/_base.py +0 -0
  16. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/datasets/data/__init__.py +0 -0
  17. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/datasets/data/factors_dataset.csv.gz +0 -0
  18. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/datasets/data/sp500_dataset.csv.gz +0 -0
  19. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/datasets/data/sp500_index.csv.gz +0 -0
  20. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distance/__init__.py +0 -0
  21. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distance/_base.py +0 -0
  22. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distance/_distance.py +0 -0
  23. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/__init__.py +0 -0
  24. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/_base.py +0 -0
  25. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/__init__.py +0 -0
  26. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_base.py +0 -0
  27. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_clayton.py +0 -0
  28. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_gaussian.py +0 -0
  29. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_gumbel.py +0 -0
  30. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_independent.py +0 -0
  31. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_joe.py +0 -0
  32. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_selection.py +0 -0
  33. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_student_t.py +0 -0
  34. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/copula/_utils.py +0 -0
  35. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/multivariate/__init__.py +0 -0
  36. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/multivariate/_base.py +0 -0
  37. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/multivariate/_utils.py +0 -0
  38. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/multivariate/_vine_copula.py +0 -0
  39. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/__init__.py +0 -0
  40. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/_base.py +0 -0
  41. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/_gaussian.py +0 -0
  42. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/_johnson_su.py +0 -0
  43. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/_normal_inverse_gaussian.py +0 -0
  44. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/_selection.py +0 -0
  45. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/distribution/univariate/_student_t.py +0 -0
  46. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/exceptions.py +0 -0
  47. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/measures/__init__.py +0 -0
  48. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/measures/_enums.py +0 -0
  49. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/metrics/__init__.py +0 -0
  50. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/metrics/_scorer.py +0 -0
  51. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/model_selection/__init__.py +0 -0
  52. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/model_selection/_combinatorial.py +0 -0
  53. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/model_selection/_validation.py +0 -0
  54. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/model_selection/_walk_forward.py +0 -0
  55. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/__init__.py +0 -0
  56. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/__init__.py +0 -0
  57. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_base.py +0 -0
  58. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_denoise_covariance.py +0 -0
  59. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_detone_covariance.py +0 -0
  60. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_empirical_covariance.py +0 -0
  61. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_ew_covariance.py +0 -0
  62. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_gerber_covariance.py +0 -0
  63. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_graphical_lasso_cv.py +0 -0
  64. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_implied_covariance.py +0 -0
  65. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_ledoit_wolf.py +0 -0
  66. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_oas.py +0 -0
  67. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/covariance/_shrunk_covariance.py +0 -0
  68. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/expected_returns/__init__.py +0 -0
  69. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/expected_returns/_base.py +0 -0
  70. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/expected_returns/_empirical_mu.py +0 -0
  71. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/expected_returns/_equilibrium_mu.py +0 -0
  72. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/expected_returns/_ew_mu.py +0 -0
  73. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/moments/expected_returns/_shrunk_mu.py +0 -0
  74. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/__init__.py +0 -0
  75. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/_base.py +0 -0
  76. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/cluster/__init__.py +0 -0
  77. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/cluster/_nco.py +0 -0
  78. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/cluster/hierarchical/__init__.py +0 -0
  79. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/cluster/hierarchical/_base.py +0 -0
  80. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/cluster/hierarchical/_herc.py +0 -0
  81. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/cluster/hierarchical/_hrp.py +0 -0
  82. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/convex/__init__.py +0 -0
  83. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/convex/_base.py +0 -0
  84. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/convex/_distributionally_robust.py +0 -0
  85. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/convex/_maximum_diversification.py +0 -0
  86. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/convex/_mean_risk.py +0 -0
  87. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/convex/_risk_budgeting.py +0 -0
  88. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/ensemble/__init__.py +0 -0
  89. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/ensemble/_stacking.py +0 -0
  90. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/naive/__init__.py +0 -0
  91. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/optimization/naive/_naive.py +0 -0
  92. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/population/__init__.py +0 -0
  93. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/population/_population.py +0 -0
  94. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/portfolio/__init__.py +0 -0
  95. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/portfolio/_base.py +0 -0
  96. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/portfolio/_multi_period_portfolio.py +0 -0
  97. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/portfolio/_portfolio.py +0 -0
  98. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/__init__.py +0 -0
  99. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/_drop_correlated.py +0 -0
  100. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/_drop_zero_variance.py +0 -0
  101. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/_select_complete.py +0 -0
  102. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/_select_k_extremes.py +0 -0
  103. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/_select_non_dominated.py +0 -0
  104. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/pre_selection/_select_non_expiring.py +0 -0
  105. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/preprocessing/__init__.py +0 -0
  106. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/preprocessing/_returns.py +0 -0
  107. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/__init__.py +0 -0
  108. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_base.py +0 -0
  109. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_black_litterman.py +0 -0
  110. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_factor_model.py +0 -0
  111. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_opinion_pooling.py +0 -0
  112. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/prior/_synthetic_data.py +0 -0
  113. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/typing.py +0 -0
  114. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/uncertainty_set/__init__.py +0 -0
  115. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/uncertainty_set/_base.py +0 -0
  116. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/uncertainty_set/_bootstrap.py +0 -0
  117. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/uncertainty_set/_empirical.py +0 -0
  118. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/__init__.py +0 -0
  119. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/bootstrap.py +0 -0
  120. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/composition.py +0 -0
  121. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/equations.py +0 -0
  122. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/figure.py +0 -0
  123. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/sorting.py +0 -0
  124. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/stats.py +0 -0
  125. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio/utils/tools.py +0 -0
  126. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio.egg-info/SOURCES.txt +0 -0
  127. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio.egg-info/dependency_links.txt +0 -0
  128. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio.egg-info/requires.txt +0 -0
  129. {skfolio-0.10.0 → skfolio-0.10.2}/src/skfolio.egg-info/top_level.txt +0 -0
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: skfolio
3
- Version: 0.10.0
3
+ Version: 0.10.2
4
4
  Summary: Portfolio optimization built on top of scikit-learn
5
5
  Author-email: Hugo Delatte <delatte.hugo@gmail.com>
6
- Maintainer-email: Hugo Delatte <delatte.hugo@gmail.com>, Matteo Manzi <matteomanzi09@gmail.com>
6
+ Maintainer-email: Hugo Delatte <delatte.hugo@gmail.com>, Matteo Manzi <matteomanzi09@gmail.com>, Carlo Nicolini <c.nicolini@ipazia.com>
7
7
  License: BSD 3-Clause License
8
8
 
9
9
  Copyright (c) 2007-2023 The skfolio developers.
@@ -176,7 +176,6 @@ Installation
176
176
  pip install -U skfolio
177
177
 
178
178
 
179
-
180
179
  Dependencies
181
180
  ~~~~~~~~~~~~
182
181
 
@@ -192,6 +191,23 @@ Dependencies
192
191
  - joblib (>= |JoblibMinVersion|)
193
192
  - plotly (>= |PlotlyMinVersion|)
194
193
 
194
+ Docker
195
+ ~~~~~~
196
+
197
+ You can also spin up a reproducible JupyterLab environment using Docker:
198
+
199
+ Build the image::
200
+
201
+ docker build -t skfolio-jupyterlab .
202
+
203
+ Run the container::
204
+
205
+ docker run -p 8888:8888 -v <path-to-your-folder-containing-data>:/app/data -it skfolio-jupyterlab
206
+
207
+ Browse:
208
+
209
+ Open localhost:8888/lab and start using `skfolio`
210
+
195
211
  Key Concepts
196
212
  ~~~~~~~~~~~~
197
213
  Since the development of modern portfolio theory by Markowitz (1952), mean-variance
@@ -385,6 +401,7 @@ Imports
385
401
  )
386
402
  from skfolio.optimization import (
387
403
  MeanRisk,
404
+ HierarchicalRiskParity,
388
405
  NestedClustersOptimization,
389
406
  ObjectiveFunction,
390
407
  RiskBudgeting,
@@ -584,11 +601,13 @@ Factor Model
584
601
 
585
602
  factor_prices = load_factors_dataset()
586
603
 
587
- X, y = prices_to_returns(prices, factor_prices)
588
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, shuffle=False)
604
+ X, factors = prices_to_returns(prices, factor_prices)
605
+ X_train, X_test, factors_train, factors_test = train_test_split(
606
+ X, factors, test_size=0.33, shuffle=False
607
+ )
589
608
 
590
609
  model = MeanRisk(prior_estimator=FactorModel())
591
- model.fit(X_train, y_train)
610
+ model.fit(X_train, factors_train)
592
611
 
593
612
  print(model.weights_)
594
613
 
@@ -597,7 +616,6 @@ Factor Model
597
616
  print(portfolio.calmar_ratio)
598
617
  print(portfolio.summary())
599
618
 
600
-
601
619
  Factor Model & Covariance Detoning
602
620
  ----------------------------------
603
621
  .. code-block:: python
@@ -658,7 +676,7 @@ Combinatorial Purged Cross-Validation
658
676
 
659
677
  cv = CombinatorialPurgedCV(n_folds=10, n_test_folds=2)
660
678
 
661
- print(cv.get_summary(X_train))
679
+ print(cv.summary(X_train))
662
680
 
663
681
  population = cross_val_predict(model, X_train, cv=cv)
664
682
 
@@ -674,7 +692,7 @@ Minimum CVaR Optimization on Synthetic Returns
674
692
  .. code-block:: python
675
693
 
676
694
  vine = VineCopula(log_transform=True, n_jobs=-1)
677
- prior = =SyntheticData(distribution_estimator=vine, n_samples=2000)
695
+ prior = SyntheticData(distribution_estimator=vine, n_samples=2000)
678
696
  model = MeanRisk(risk_measure=RiskMeasure.CVAR, prior_estimator=prior)
679
697
  model.fit(X)
680
698
  print(model.weights_)
@@ -684,7 +702,7 @@ Stress Test
684
702
  -----------
685
703
  .. code-block:: python
686
704
 
687
- vine = VineCopula(log_transform=True, central_assets=["BAC"] n_jobs=-1)
705
+ vine = VineCopula(log_transform=True, central_assets=["BAC"], n_jobs=-1)
688
706
  vine.fit(X)
689
707
  X_stressed = vine.sample(n_samples=10_000, conditioning = {"BAC": -0.2})
690
708
  ptf_stressed = model.predict(X_stressed)
@@ -702,7 +720,7 @@ Minimum CVaR Optimization on Synthetic Factors
702
720
  )
703
721
  factor_model = FactorModel(factor_prior_estimator=factor_prior)
704
722
  model = MeanRisk(risk_measure=RiskMeasure.CVAR, prior_estimator=factor_model)
705
- model.fit(X, y)
723
+ model.fit(X, factors)
706
724
  print(model.weights_)
707
725
 
708
726
 
@@ -713,7 +731,7 @@ Factor Stress Test
713
731
  factor_model.set_params(factor_prior_estimator__sample_args=dict(
714
732
  conditioning={"QUAL": -0.5}
715
733
  ))
716
- factor_model.fit(X,y)
734
+ factor_model.fit(X, factors)
717
735
  stressed_dist = factor_model.return_distribution_
718
736
  stressed_ptf = model.predict(stressed_dist)
719
737
 
@@ -798,7 +816,7 @@ Recognition
798
816
  ~~~~~~~~~~~
799
817
 
800
818
  We would like to thank all contributors to our direct dependencies, such as
801
- scikit-learn and cvxpy, as well as the contributors of the following resources that
819
+ `scikit-learn <https://github.com/scikit-learn/scikit-learn>`_ and `cvxpy <https://github.com/cvxpy/cvxpy>`_, as well as the contributors of the following resources that
802
820
  served as sources of inspiration:
803
821
 
804
822
  * PyPortfolioOpt
@@ -807,6 +825,7 @@ served as sources of inspiration:
807
825
  * microprediction
808
826
  * statsmodels
809
827
  * rsome
828
+ * danielppalomar.com
810
829
  * gautier.marti.ai
811
830
 
812
831
 
@@ -815,12 +834,25 @@ Citation
815
834
 
816
835
  If you use `skfolio` in a scientific publication, we would appreciate citations:
817
836
 
818
- Bibtex entry::
819
-
820
- @misc{skfolio,
821
- author = {Delatte, Hugo and Nicolini, Carlo},
822
- title = {skfolio},
823
- year = {2023},
824
- url = {https://github.com/skfolio/skfolio}
825
- }
837
+ **The library**::
838
+
839
+ @software{skfolio,
840
+ title = {skfolio},
841
+ author = {Delatte, Hugo and Nicolini, Carlo and Manzi, Matteo},
842
+ year = {2024},
843
+ doi = {TBD after next release},
844
+ url = {https://github.com/skfolio/skfolio}
845
+ }
846
+
847
+ **The paper**::
848
+
849
+ @article{nicolini2025skfolio,
850
+ title = {skfolio: Portfolio Optimization in Python},
851
+ author = {Nicolini, Carlo and Manzi, Matteo and Delatte, Hugo},
852
+ journal = {arXiv preprint arXiv:2507.04176},
853
+ year = {2025},
854
+ eprint = {2507.04176},
855
+ archivePrefix = {arXiv},
856
+ url = {https://arxiv.org/abs/2507.04176}
857
+ }
826
858
 
@@ -84,7 +84,6 @@ Installation
84
84
  pip install -U skfolio
85
85
 
86
86
 
87
-
88
87
  Dependencies
89
88
  ~~~~~~~~~~~~
90
89
 
@@ -100,6 +99,23 @@ Dependencies
100
99
  - joblib (>= |JoblibMinVersion|)
101
100
  - plotly (>= |PlotlyMinVersion|)
102
101
 
102
+ Docker
103
+ ~~~~~~
104
+
105
+ You can also spin up a reproducible JupyterLab environment using Docker:
106
+
107
+ Build the image::
108
+
109
+ docker build -t skfolio-jupyterlab .
110
+
111
+ Run the container::
112
+
113
+ docker run -p 8888:8888 -v <path-to-your-folder-containing-data>:/app/data -it skfolio-jupyterlab
114
+
115
+ Browse:
116
+
117
+ Open localhost:8888/lab and start using `skfolio`
118
+
103
119
  Key Concepts
104
120
  ~~~~~~~~~~~~
105
121
  Since the development of modern portfolio theory by Markowitz (1952), mean-variance
@@ -293,6 +309,7 @@ Imports
293
309
  )
294
310
  from skfolio.optimization import (
295
311
  MeanRisk,
312
+ HierarchicalRiskParity,
296
313
  NestedClustersOptimization,
297
314
  ObjectiveFunction,
298
315
  RiskBudgeting,
@@ -492,11 +509,13 @@ Factor Model
492
509
 
493
510
  factor_prices = load_factors_dataset()
494
511
 
495
- X, y = prices_to_returns(prices, factor_prices)
496
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, shuffle=False)
512
+ X, factors = prices_to_returns(prices, factor_prices)
513
+ X_train, X_test, factors_train, factors_test = train_test_split(
514
+ X, factors, test_size=0.33, shuffle=False
515
+ )
497
516
 
498
517
  model = MeanRisk(prior_estimator=FactorModel())
499
- model.fit(X_train, y_train)
518
+ model.fit(X_train, factors_train)
500
519
 
501
520
  print(model.weights_)
502
521
 
@@ -505,7 +524,6 @@ Factor Model
505
524
  print(portfolio.calmar_ratio)
506
525
  print(portfolio.summary())
507
526
 
508
-
509
527
  Factor Model & Covariance Detoning
510
528
  ----------------------------------
511
529
  .. code-block:: python
@@ -566,7 +584,7 @@ Combinatorial Purged Cross-Validation
566
584
 
567
585
  cv = CombinatorialPurgedCV(n_folds=10, n_test_folds=2)
568
586
 
569
- print(cv.get_summary(X_train))
587
+ print(cv.summary(X_train))
570
588
 
571
589
  population = cross_val_predict(model, X_train, cv=cv)
572
590
 
@@ -582,7 +600,7 @@ Minimum CVaR Optimization on Synthetic Returns
582
600
  .. code-block:: python
583
601
 
584
602
  vine = VineCopula(log_transform=True, n_jobs=-1)
585
- prior = =SyntheticData(distribution_estimator=vine, n_samples=2000)
603
+ prior = SyntheticData(distribution_estimator=vine, n_samples=2000)
586
604
  model = MeanRisk(risk_measure=RiskMeasure.CVAR, prior_estimator=prior)
587
605
  model.fit(X)
588
606
  print(model.weights_)
@@ -592,7 +610,7 @@ Stress Test
592
610
  -----------
593
611
  .. code-block:: python
594
612
 
595
- vine = VineCopula(log_transform=True, central_assets=["BAC"] n_jobs=-1)
613
+ vine = VineCopula(log_transform=True, central_assets=["BAC"], n_jobs=-1)
596
614
  vine.fit(X)
597
615
  X_stressed = vine.sample(n_samples=10_000, conditioning = {"BAC": -0.2})
598
616
  ptf_stressed = model.predict(X_stressed)
@@ -610,7 +628,7 @@ Minimum CVaR Optimization on Synthetic Factors
610
628
  )
611
629
  factor_model = FactorModel(factor_prior_estimator=factor_prior)
612
630
  model = MeanRisk(risk_measure=RiskMeasure.CVAR, prior_estimator=factor_model)
613
- model.fit(X, y)
631
+ model.fit(X, factors)
614
632
  print(model.weights_)
615
633
 
616
634
 
@@ -621,7 +639,7 @@ Factor Stress Test
621
639
  factor_model.set_params(factor_prior_estimator__sample_args=dict(
622
640
  conditioning={"QUAL": -0.5}
623
641
  ))
624
- factor_model.fit(X,y)
642
+ factor_model.fit(X, factors)
625
643
  stressed_dist = factor_model.return_distribution_
626
644
  stressed_ptf = model.predict(stressed_dist)
627
645
 
@@ -706,7 +724,7 @@ Recognition
706
724
  ~~~~~~~~~~~
707
725
 
708
726
  We would like to thank all contributors to our direct dependencies, such as
709
- scikit-learn and cvxpy, as well as the contributors of the following resources that
727
+ `scikit-learn <https://github.com/scikit-learn/scikit-learn>`_ and `cvxpy <https://github.com/cvxpy/cvxpy>`_, as well as the contributors of the following resources that
710
728
  served as sources of inspiration:
711
729
 
712
730
  * PyPortfolioOpt
@@ -715,6 +733,7 @@ served as sources of inspiration:
715
733
  * microprediction
716
734
  * statsmodels
717
735
  * rsome
736
+ * danielppalomar.com
718
737
  * gautier.marti.ai
719
738
 
720
739
 
@@ -723,12 +742,25 @@ Citation
723
742
 
724
743
  If you use `skfolio` in a scientific publication, we would appreciate citations:
725
744
 
726
- Bibtex entry::
727
-
728
- @misc{skfolio,
729
- author = {Delatte, Hugo and Nicolini, Carlo},
730
- title = {skfolio},
731
- year = {2023},
732
- url = {https://github.com/skfolio/skfolio}
733
- }
745
+ **The library**::
746
+
747
+ @software{skfolio,
748
+ title = {skfolio},
749
+ author = {Delatte, Hugo and Nicolini, Carlo and Manzi, Matteo},
750
+ year = {2024},
751
+ doi = {TBD after next release},
752
+ url = {https://github.com/skfolio/skfolio}
753
+ }
754
+
755
+ **The paper**::
756
+
757
+ @article{nicolini2025skfolio,
758
+ title = {skfolio: Portfolio Optimization in Python},
759
+ author = {Nicolini, Carlo and Manzi, Matteo and Delatte, Hugo},
760
+ journal = {arXiv preprint arXiv:2507.04176},
761
+ year = {2025},
762
+ eprint = {2507.04176},
763
+ archivePrefix = {arXiv},
764
+ url = {https://arxiv.org/abs/2507.04176}
765
+ }
734
766
 
@@ -4,10 +4,11 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "skfolio"
7
- version = "0.10.0"
7
+ version = "0.10.2"
8
8
  maintainers = [
9
9
  { name = "Hugo Delatte", email = "delatte.hugo@gmail.com" },
10
- { name = "Matteo Manzi", email = "matteomanzi09@gmail.com" }
10
+ { name = "Matteo Manzi", email = "matteomanzi09@gmail.com" },
11
+ { name = "Carlo Nicolini", email = "c.nicolini@ipazia.com" }
11
12
  ]
12
13
  authors = [
13
14
  { name = "Hugo Delatte", email = "delatte.hugo@gmail.com" },
@@ -136,7 +136,9 @@ def variance(
136
136
  if sample_weight is None:
137
137
  return np.var(returns, ddof=0 if biased else 1, axis=0)
138
138
 
139
- biased_var = sample_weight @ (returns - mean(returns)) ** 2
139
+ biased_var = (
140
+ sample_weight @ (returns - mean(returns, sample_weight=sample_weight)) ** 2
141
+ )
140
142
  if biased:
141
143
  return biased_var
142
144
  n_eff = 1 / np.sum(sample_weight**2)
@@ -177,7 +179,7 @@ def semi_variance(
177
179
  If `returns` is a 2D-array, the result is a ndarray of shape (n_assets,).
178
180
  """
179
181
  if min_acceptable_return is None:
180
- min_acceptable_return = mean(returns)
182
+ min_acceptable_return = mean(returns, sample_weight=sample_weight)
181
183
 
182
184
  biased_semi_var = mean(
183
185
  np.maximum(0, min_acceptable_return - returns) ** 2, sample_weight=sample_weight
@@ -191,6 +191,7 @@ class EmpiricalPrior(BasePrior):
191
191
  # horizon
192
192
  mu = np.exp(mu + 0.5 * np.diag(covariance))
193
193
  covariance = np.outer(mu, mu) * (np.exp(covariance) - 1)
194
+ mu -= 1
194
195
 
195
196
  # we validate and convert to numpy after all models have been fitted to keep
196
197
  # features names information.
@@ -589,10 +589,10 @@ class EntropyPooling(BasePrior):
589
589
 
590
590
  Parameters
591
591
  ----------
592
- a : ndarray of shape (n_observations, n_constrains)
592
+ a : ndarray of shape (n_observations, n_constraints)
593
593
  Left matrix in `x @ a == b` or `x @ a <= b`.
594
594
 
595
- a : ndarray of shape (n_observations, n_constrains)
595
+ a : ndarray of shape (n_observations, n_constraints)
596
596
  Right vector in `x @ a == b` or `x @ a <= b`.
597
597
 
598
598
  Returns
@@ -1029,16 +1029,16 @@ class EntropyPooling(BasePrior):
1029
1029
 
1030
1030
  """
1031
1031
  n_observations, _ = self._returns.shape
1032
- # Init constrains with sum(p)==1, rescaled by its norm
1032
+ # Init constraints with sum(p)==1, rescaled by its norm
1033
1033
  # Has better convergence than the normalized form done inside the dual.
1034
1034
  a = [np.ones(n_observations).reshape(-1, 1) / np.sqrt(n_observations)]
1035
1035
  b = [np.array([1.0]) / np.sqrt(n_observations)]
1036
1036
  bounds = [(None, None)]
1037
- for name, constrains in self._constraints.items():
1038
- if constrains is not None:
1039
- a.append(constrains[0])
1040
- b.append(constrains[1])
1041
- s = constrains[1].size
1037
+ for name, constraints in self._constraints.items():
1038
+ if constraints is not None:
1039
+ a.append(constraints[0])
1040
+ b.append(constraints[1])
1041
+ s = constraints[1].size
1042
1042
  match name:
1043
1043
  case "equality" | "cvar_equality":
1044
1044
  bounds += [(None, None)] * s
@@ -1145,7 +1145,7 @@ class EntropyPooling(BasePrior):
1145
1145
 
1146
1146
  if self._constraints["fixed_equality"] is not None:
1147
1147
  a, b = self._constraints["fixed_equality"]
1148
- # Relaxe the problem with slack variables with a norm1 penalty to avoid
1148
+ # Relax the problem with slack variables with a norm1 penalty to avoid
1149
1149
  # solver infeasibility that may arise from overly tight constraints from
1150
1150
  # fixing the moments.
1151
1151
  slack = cp.Variable(b.size)
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: skfolio
3
- Version: 0.10.0
3
+ Version: 0.10.2
4
4
  Summary: Portfolio optimization built on top of scikit-learn
5
5
  Author-email: Hugo Delatte <delatte.hugo@gmail.com>
6
- Maintainer-email: Hugo Delatte <delatte.hugo@gmail.com>, Matteo Manzi <matteomanzi09@gmail.com>
6
+ Maintainer-email: Hugo Delatte <delatte.hugo@gmail.com>, Matteo Manzi <matteomanzi09@gmail.com>, Carlo Nicolini <c.nicolini@ipazia.com>
7
7
  License: BSD 3-Clause License
8
8
 
9
9
  Copyright (c) 2007-2023 The skfolio developers.
@@ -176,7 +176,6 @@ Installation
176
176
  pip install -U skfolio
177
177
 
178
178
 
179
-
180
179
  Dependencies
181
180
  ~~~~~~~~~~~~
182
181
 
@@ -192,6 +191,23 @@ Dependencies
192
191
  - joblib (>= |JoblibMinVersion|)
193
192
  - plotly (>= |PlotlyMinVersion|)
194
193
 
194
+ Docker
195
+ ~~~~~~
196
+
197
+ You can also spin up a reproducible JupyterLab environment using Docker:
198
+
199
+ Build the image::
200
+
201
+ docker build -t skfolio-jupyterlab .
202
+
203
+ Run the container::
204
+
205
+ docker run -p 8888:8888 -v <path-to-your-folder-containing-data>:/app/data -it skfolio-jupyterlab
206
+
207
+ Browse:
208
+
209
+ Open localhost:8888/lab and start using `skfolio`
210
+
195
211
  Key Concepts
196
212
  ~~~~~~~~~~~~
197
213
  Since the development of modern portfolio theory by Markowitz (1952), mean-variance
@@ -385,6 +401,7 @@ Imports
385
401
  )
386
402
  from skfolio.optimization import (
387
403
  MeanRisk,
404
+ HierarchicalRiskParity,
388
405
  NestedClustersOptimization,
389
406
  ObjectiveFunction,
390
407
  RiskBudgeting,
@@ -584,11 +601,13 @@ Factor Model
584
601
 
585
602
  factor_prices = load_factors_dataset()
586
603
 
587
- X, y = prices_to_returns(prices, factor_prices)
588
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, shuffle=False)
604
+ X, factors = prices_to_returns(prices, factor_prices)
605
+ X_train, X_test, factors_train, factors_test = train_test_split(
606
+ X, factors, test_size=0.33, shuffle=False
607
+ )
589
608
 
590
609
  model = MeanRisk(prior_estimator=FactorModel())
591
- model.fit(X_train, y_train)
610
+ model.fit(X_train, factors_train)
592
611
 
593
612
  print(model.weights_)
594
613
 
@@ -597,7 +616,6 @@ Factor Model
597
616
  print(portfolio.calmar_ratio)
598
617
  print(portfolio.summary())
599
618
 
600
-
601
619
  Factor Model & Covariance Detoning
602
620
  ----------------------------------
603
621
  .. code-block:: python
@@ -658,7 +676,7 @@ Combinatorial Purged Cross-Validation
658
676
 
659
677
  cv = CombinatorialPurgedCV(n_folds=10, n_test_folds=2)
660
678
 
661
- print(cv.get_summary(X_train))
679
+ print(cv.summary(X_train))
662
680
 
663
681
  population = cross_val_predict(model, X_train, cv=cv)
664
682
 
@@ -674,7 +692,7 @@ Minimum CVaR Optimization on Synthetic Returns
674
692
  .. code-block:: python
675
693
 
676
694
  vine = VineCopula(log_transform=True, n_jobs=-1)
677
- prior = =SyntheticData(distribution_estimator=vine, n_samples=2000)
695
+ prior = SyntheticData(distribution_estimator=vine, n_samples=2000)
678
696
  model = MeanRisk(risk_measure=RiskMeasure.CVAR, prior_estimator=prior)
679
697
  model.fit(X)
680
698
  print(model.weights_)
@@ -684,7 +702,7 @@ Stress Test
684
702
  -----------
685
703
  .. code-block:: python
686
704
 
687
- vine = VineCopula(log_transform=True, central_assets=["BAC"] n_jobs=-1)
705
+ vine = VineCopula(log_transform=True, central_assets=["BAC"], n_jobs=-1)
688
706
  vine.fit(X)
689
707
  X_stressed = vine.sample(n_samples=10_000, conditioning = {"BAC": -0.2})
690
708
  ptf_stressed = model.predict(X_stressed)
@@ -702,7 +720,7 @@ Minimum CVaR Optimization on Synthetic Factors
702
720
  )
703
721
  factor_model = FactorModel(factor_prior_estimator=factor_prior)
704
722
  model = MeanRisk(risk_measure=RiskMeasure.CVAR, prior_estimator=factor_model)
705
- model.fit(X, y)
723
+ model.fit(X, factors)
706
724
  print(model.weights_)
707
725
 
708
726
 
@@ -713,7 +731,7 @@ Factor Stress Test
713
731
  factor_model.set_params(factor_prior_estimator__sample_args=dict(
714
732
  conditioning={"QUAL": -0.5}
715
733
  ))
716
- factor_model.fit(X,y)
734
+ factor_model.fit(X, factors)
717
735
  stressed_dist = factor_model.return_distribution_
718
736
  stressed_ptf = model.predict(stressed_dist)
719
737
 
@@ -798,7 +816,7 @@ Recognition
798
816
  ~~~~~~~~~~~
799
817
 
800
818
  We would like to thank all contributors to our direct dependencies, such as
801
- scikit-learn and cvxpy, as well as the contributors of the following resources that
819
+ `scikit-learn <https://github.com/scikit-learn/scikit-learn>`_ and `cvxpy <https://github.com/cvxpy/cvxpy>`_, as well as the contributors of the following resources that
802
820
  served as sources of inspiration:
803
821
 
804
822
  * PyPortfolioOpt
@@ -807,6 +825,7 @@ served as sources of inspiration:
807
825
  * microprediction
808
826
  * statsmodels
809
827
  * rsome
828
+ * danielppalomar.com
810
829
  * gautier.marti.ai
811
830
 
812
831
 
@@ -815,12 +834,25 @@ Citation
815
834
 
816
835
  If you use `skfolio` in a scientific publication, we would appreciate citations:
817
836
 
818
- Bibtex entry::
819
-
820
- @misc{skfolio,
821
- author = {Delatte, Hugo and Nicolini, Carlo},
822
- title = {skfolio},
823
- year = {2023},
824
- url = {https://github.com/skfolio/skfolio}
825
- }
837
+ **The library**::
838
+
839
+ @software{skfolio,
840
+ title = {skfolio},
841
+ author = {Delatte, Hugo and Nicolini, Carlo and Manzi, Matteo},
842
+ year = {2024},
843
+ doi = {TBD after next release},
844
+ url = {https://github.com/skfolio/skfolio}
845
+ }
846
+
847
+ **The paper**::
848
+
849
+ @article{nicolini2025skfolio,
850
+ title = {skfolio: Portfolio Optimization in Python},
851
+ author = {Nicolini, Carlo and Manzi, Matteo and Delatte, Hugo},
852
+ journal = {arXiv preprint arXiv:2507.04176},
853
+ year = {2025},
854
+ eprint = {2507.04176},
855
+ archivePrefix = {arXiv},
856
+ url = {https://arxiv.org/abs/2507.04176}
857
+ }
826
858
 
File without changes
File without changes
File without changes
File without changes