qis 2.1.39__tar.gz → 2.1.41__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qis-2.1.39 → qis-2.1.41}/PKG-INFO +11 -11
- {qis-2.1.39 → qis-2.1.41}/README.md +10 -10
- {qis-2.1.39 → qis-2.1.41}/pyproject.toml +1 -1
- {qis-2.1.39 → qis-2.1.41}/qis/examples/readme_performances.py +10 -10
- {qis-2.1.39 → qis-2.1.41}/qis/file_utils.py +4 -49
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/perf_table.py +2 -6
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/multi_portfolio_data.py +3 -6
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/portfolio_data.py +15 -7
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/strategy_benchmark_factsheet.py +38 -18
- qis-2.1.41/qis/settings.yaml +19 -0
- qis-2.1.39/qis/settings.yaml +0 -19
- {qis-2.1.39 → qis-2.1.41}/LICENSE.txt +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/best_returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/bond_futures_portfolio.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/bootstrap_analysis.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/boxplot_conditional_returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/btc_asset_corr.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/constant_notional.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/constant_weight_portfolios.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/core/perf_bbg_prices.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/core/price_plots.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/core/us_election.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/credit_spreads.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/credit_trackers.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/europe_futures.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/factsheets/multi_assets.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/factsheets/multi_strategy.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/factsheets/pyblogs_reports.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/factsheets/strategy.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/factsheets/strategy_benchmark.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/generate_option_rolls.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/interpolation_infrequent_returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/leveraged_strategies.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/long_short.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/momentum_indices.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/oakmark_analysis.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/ohlc_vol_analysis.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/overnight_returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/perf_external_assets.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/perp_pricing.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/risk_return_frontier.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/rolling_performance.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/seasonality.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/sharpe_vs_sortino.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/simulate_quant_strats.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/test_ewm.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/test_scatter.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/try_pybloqs.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/universe_corrs.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/vix_beta_to_equities_bonds.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/vix_conditional_returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/vix_spy_by_year.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/vix_tenor_analysis.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/examples/vol_without_weekends.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/local_path.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/README.md +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/auto_corr.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/corr_cov_matrix.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/ewm.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/ewm_convolution.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/ewm_factors.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/ewm_winsor_outliers.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/pca.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/plot_correlations.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/linear/ra_returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/stats/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/stats/bootstrap.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/stats/ohlc_vol.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/stats/rolling_stats.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/models/stats/test_bootstrap.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/README.md +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/cond_regression.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/config.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/desc_table.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/fx_ops.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/perf_stats.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/regime_classifier.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/returns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/perfstats/timeseries_bfill.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/README.md +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/bars.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/boxplot.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/contour.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/data_timeseries.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/desc_table.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/drawdowns.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/prices.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/regime_class_table.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/regime_data.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/regime_pdf.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/regime_scatter.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/returns_heatmap.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/derived/returns_scatter.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/errorbar.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/heatmap.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/histogram.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/histplot2d.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/lineplot.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/pie.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/qqplot.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/reports/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/reports/econ_data_single.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/reports/gantt_data_history.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/reports/price_history.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/reports/utils.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/scatter.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/stackplot.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/table.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/time_series.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/plots/utils.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/README.md +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/backtester.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/ewm_portfolio_risk.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/brinson_attribution.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/config.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/multi_assets_factsheet.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/multi_strategy_factseet_pybloqs.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/multi_strategy_factsheet.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/strategy_benchmark_factsheet_pybloqs.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/strategy_factsheet.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/reports/strategy_signal_factsheet.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/strats/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/strats/quant_strats_delta1.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/portfolio/strats/seasonal_strats.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/sql_engine.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/test_data.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/README.md +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/__init__.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/dates.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_agg.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_cut.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_freq.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_groups.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_melt.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_ops.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_str.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_to_scores.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/df_to_weights.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/generic.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/np_ops.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/ols.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/sampling.py +0 -0
- {qis-2.1.39 → qis-2.1.41}/qis/utils/struct_ops.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: qis
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.41
|
4
4
|
Summary: Implementation of visualisation and reporting analytics for Quantitative Investment Strategies
|
5
5
|
License: LICENSE.txt
|
6
6
|
Keywords: quantitative,investing,portfolio optimization,systematic strategies,volatility
|
@@ -174,16 +174,16 @@ fig = qis.plot_ra_perf_table(prices=prices,
|
|
174
174
|
# add benchmark regression using excess returns for linear beta
|
175
175
|
# regression frequency is specified using perf_params.freq_reg
|
176
176
|
# regression alpha is multiplied using perf_params.alpha_an_factor
|
177
|
-
fig = qis.plot_ra_perf_table_benchmark(prices=prices,
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
177
|
+
fig, _ = qis.plot_ra_perf_table_benchmark(prices=prices,
|
178
|
+
benchmark='SPY',
|
179
|
+
perf_columns=[PerfStat.TOTAL_RETURN, PerfStat.PA_RETURN, PerfStat.PA_EXCESS_RETURN,
|
180
|
+
PerfStat.VOL, PerfStat.SHARPE_RF0,
|
181
|
+
PerfStat.SHARPE_EXCESS, PerfStat.SORTINO_RATIO, PerfStat.CALMAR_RATIO,
|
182
|
+
PerfStat.MAX_DD, PerfStat.MAX_DD_VOL,
|
183
|
+
PerfStat.SKEWNESS, PerfStat.KURTOSIS,
|
184
|
+
PerfStat.ALPHA_AN, PerfStat.BETA, PerfStat.R2],
|
185
|
+
title=f"Risk-adjusted performance: {qis.get_time_period_label(prices, date_separator='-')} benchmarked with SPY",
|
186
|
+
perf_params=perf_params)
|
187
187
|
```
|
188
188
|

|
189
189
|
|
@@ -124,16 +124,16 @@ fig = qis.plot_ra_perf_table(prices=prices,
|
|
124
124
|
# add benchmark regression using excess returns for linear beta
|
125
125
|
# regression frequency is specified using perf_params.freq_reg
|
126
126
|
# regression alpha is multiplied using perf_params.alpha_an_factor
|
127
|
-
fig = qis.plot_ra_perf_table_benchmark(prices=prices,
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
127
|
+
fig, _ = qis.plot_ra_perf_table_benchmark(prices=prices,
|
128
|
+
benchmark='SPY',
|
129
|
+
perf_columns=[PerfStat.TOTAL_RETURN, PerfStat.PA_RETURN, PerfStat.PA_EXCESS_RETURN,
|
130
|
+
PerfStat.VOL, PerfStat.SHARPE_RF0,
|
131
|
+
PerfStat.SHARPE_EXCESS, PerfStat.SORTINO_RATIO, PerfStat.CALMAR_RATIO,
|
132
|
+
PerfStat.MAX_DD, PerfStat.MAX_DD_VOL,
|
133
|
+
PerfStat.SKEWNESS, PerfStat.KURTOSIS,
|
134
|
+
PerfStat.ALPHA_AN, PerfStat.BETA, PerfStat.R2],
|
135
|
+
title=f"Risk-adjusted performance: {qis.get_time_period_label(prices, date_separator='-')} benchmarked with SPY",
|
136
|
+
perf_params=perf_params)
|
137
137
|
```
|
138
138
|

|
139
139
|
|
@@ -48,16 +48,16 @@ qis.save_fig(fig, file_name='perf3', local_path="figures/")
|
|
48
48
|
# add benchmark regression using excess returns for linear beta
|
49
49
|
# regression frequency is specified using perf_params.freq_reg
|
50
50
|
# regression alpha is multiplied using perf_params.alpha_an_factor
|
51
|
-
fig = qis.plot_ra_perf_table_benchmark(prices=prices,
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
51
|
+
fig, _ = qis.plot_ra_perf_table_benchmark(prices=prices,
|
52
|
+
benchmark='SPY',
|
53
|
+
perf_columns=[PerfStat.TOTAL_RETURN, PerfStat.PA_RETURN, PerfStat.PA_EXCESS_RETURN,
|
54
|
+
PerfStat.VOL, PerfStat.SHARPE_RF0,
|
55
|
+
PerfStat.SHARPE_EXCESS, PerfStat.SORTINO_RATIO, PerfStat.CALMAR_RATIO,
|
56
|
+
PerfStat.MAX_DD, PerfStat.MAX_DD_VOL,
|
57
|
+
PerfStat.SKEWNESS, PerfStat.KURTOSIS,
|
58
|
+
PerfStat.ALPHA_AN, PerfStat.BETA, PerfStat.R2],
|
59
|
+
title=f"Risk-adjusted performance: {qis.get_time_period_label(prices, date_separator='-')} benchmarked with SPY",
|
60
|
+
perf_params=perf_params)
|
61
61
|
# skip
|
62
62
|
qis.save_fig(fig, file_name='perf4', local_path="figures/")
|
63
63
|
|
@@ -118,20 +118,19 @@ def get_local_file_path(file_name: Optional[str],
|
|
118
118
|
file_type: Optional[FileTypes] = None,
|
119
119
|
local_path: Optional[str] = None,
|
120
120
|
folder_name: str = None,
|
121
|
-
subfolder_name: str = None,
|
122
121
|
key: str = None,
|
123
122
|
is_output_file: bool = False
|
124
123
|
) -> str:
|
125
124
|
"""
|
126
125
|
file data management is organised as:
|
127
|
-
file_path = RESOURCE_PATH/folder_name/
|
126
|
+
file_path = RESOURCE_PATH/folder_name/file_name+file_type.value
|
128
127
|
default value without optional arguments will be:
|
129
128
|
file_path = RESOURCE_PATH/file_name.file_type.value
|
130
129
|
|
131
130
|
for datasets, we can define datasets keys so the file paths are:
|
132
|
-
file_path = RESOURCE_PATH/folder_name/
|
131
|
+
file_path = RESOURCE_PATH/folder_name/file_name+_key+file_type.value
|
133
132
|
or if file_name is None:
|
134
|
-
file_path = RESOURCE_PATH/folder_name/
|
133
|
+
file_path = RESOURCE_PATH/folder_name/key+file_type.value
|
135
134
|
|
136
135
|
if local_path is not None: file_path=local_path
|
137
136
|
if local_path in not None and file_name and file_type is passed: file_path=local_path//file_name+file_type.value
|
@@ -146,10 +145,7 @@ def get_local_file_path(file_name: Optional[str],
|
|
146
145
|
local_path = RESOURCE_PATH
|
147
146
|
|
148
147
|
if folder_name is not None:
|
149
|
-
|
150
|
-
local_path = join(local_path, folder_name, subfolder_name)
|
151
|
-
else:
|
152
|
-
local_path = join(local_path, folder_name)
|
148
|
+
local_path = join(local_path, folder_name)
|
153
149
|
|
154
150
|
if file_name is not None:
|
155
151
|
if key is not None:
|
@@ -209,7 +205,6 @@ def save_df_to_excel(data: Union[pd.DataFrame, List[pd.DataFrame], Dict[str, pd.
|
|
209
205
|
local_path: Optional[str] = None,
|
210
206
|
mode: Literal['w', 'a'] = 'w',
|
211
207
|
folder_name: str = None,
|
212
|
-
subfolder_name: str = None,
|
213
208
|
key: str = None,
|
214
209
|
add_current_date: bool = False,
|
215
210
|
sheet_names: List[str] = None,
|
@@ -229,7 +224,6 @@ def save_df_to_excel(data: Union[pd.DataFrame, List[pd.DataFrame], Dict[str, pd.
|
|
229
224
|
file_type=FileTypes.EXCEL,
|
230
225
|
local_path=local_path,
|
231
226
|
folder_name=folder_name,
|
232
|
-
subfolder_name=subfolder_name,
|
233
227
|
key=key)
|
234
228
|
|
235
229
|
excel_writer = pd.ExcelWriter(file_path, engine='openpyxl', mode=mode, if_sheet_exists=if_sheet_exists)
|
@@ -262,7 +256,6 @@ def load_df_from_excel(file_name: str,
|
|
262
256
|
sheet_name: str = 'Sheet1',
|
263
257
|
local_path: Optional[str] = None,
|
264
258
|
folder_name: str = None,
|
265
|
-
subfolder_name: str = None,
|
266
259
|
key: str = None,
|
267
260
|
is_index: bool = True,
|
268
261
|
delocalize: bool = False # excel data may have local time which are unwanted
|
@@ -274,7 +267,6 @@ def load_df_from_excel(file_name: str,
|
|
274
267
|
file_type=FileTypes.EXCEL,
|
275
268
|
local_path=local_path,
|
276
269
|
folder_name=folder_name,
|
277
|
-
subfolder_name=subfolder_name,
|
278
270
|
key=key)
|
279
271
|
if os.path.isfile(file_path):
|
280
272
|
excel_reader = pd.ExcelFile(file_path, engine='openpyxl')
|
@@ -295,7 +287,6 @@ def save_df_dict_to_excel(datasets: Dict[Union[str, Enum, NamedTuple], pd.DataFr
|
|
295
287
|
local_path: Optional[str] = None,
|
296
288
|
mode: Literal['w', 'a'] = 'w',
|
297
289
|
folder_name: str = None,
|
298
|
-
subfolder_name: str = None,
|
299
290
|
key: str = None,
|
300
291
|
add_current_date: bool = False,
|
301
292
|
delocalize: bool = False
|
@@ -311,7 +302,6 @@ def save_df_dict_to_excel(datasets: Dict[Union[str, Enum, NamedTuple], pd.DataFr
|
|
311
302
|
file_type=FileTypes.EXCEL,
|
312
303
|
local_path=local_path,
|
313
304
|
folder_name=folder_name,
|
314
|
-
subfolder_name=subfolder_name,
|
315
305
|
key=key)
|
316
306
|
|
317
307
|
excel_writer = pd.ExcelWriter(file_path, engine='openpyxl', mode=mode)
|
@@ -327,7 +317,6 @@ def load_df_dict_from_excel(file_name: str,
|
|
327
317
|
dataset_keys: Optional[List[Union[str, Enum, NamedTuple]]] = None,
|
328
318
|
local_path: Optional[str] = None,
|
329
319
|
folder_name: str = None,
|
330
|
-
subfolder_name: str = None,
|
331
320
|
key: str = None,
|
332
321
|
is_index: bool = True,
|
333
322
|
delocalize: bool = False,
|
@@ -341,7 +330,6 @@ def load_df_dict_from_excel(file_name: str,
|
|
341
330
|
file_type=FileTypes.EXCEL,
|
342
331
|
local_path=local_path,
|
343
332
|
folder_name=folder_name,
|
344
|
-
subfolder_name=subfolder_name,
|
345
333
|
key=key)
|
346
334
|
|
347
335
|
if os.path.isfile(file_path):
|
@@ -374,7 +362,6 @@ Pandas to/from CSV core
|
|
374
362
|
def save_df_to_csv(df: pd.DataFrame,
|
375
363
|
file_name: str = None,
|
376
364
|
folder_name: str = None,
|
377
|
-
subfolder_name: str = None,
|
378
365
|
key: str = None,
|
379
366
|
add_current_date: bool = False,
|
380
367
|
local_path: Optional[str] = None
|
@@ -389,7 +376,6 @@ def save_df_to_csv(df: pd.DataFrame,
|
|
389
376
|
file_type=FileTypes.CSV,
|
390
377
|
local_path=local_path,
|
391
378
|
folder_name=folder_name,
|
392
|
-
subfolder_name=subfolder_name,
|
393
379
|
key=key)
|
394
380
|
df.to_csv(path_or_buf=file_path)
|
395
381
|
|
@@ -397,7 +383,6 @@ def save_df_to_csv(df: pd.DataFrame,
|
|
397
383
|
def load_df_from_csv(file_name: Optional[str] = None,
|
398
384
|
local_path: Optional[str] = None,
|
399
385
|
folder_name: str = None,
|
400
|
-
subfolder_name: str = None,
|
401
386
|
key: str = None,
|
402
387
|
is_index: bool = True,
|
403
388
|
parse_dates: bool = True,
|
@@ -412,9 +397,7 @@ def load_df_from_csv(file_name: Optional[str] = None,
|
|
412
397
|
file_type=FileTypes.CSV,
|
413
398
|
local_path=local_path,
|
414
399
|
folder_name=folder_name,
|
415
|
-
subfolder_name=subfolder_name,
|
416
400
|
key=key)
|
417
|
-
|
418
401
|
if is_index:
|
419
402
|
index_col = 0
|
420
403
|
parse_dates = parse_dates
|
@@ -450,7 +433,6 @@ def load_df_from_csv(file_name: Optional[str] = None,
|
|
450
433
|
def append_df_to_csv(df: pd.DataFrame,
|
451
434
|
file_name: str = None,
|
452
435
|
folder_name: str = None,
|
453
|
-
subfolder_name: str = None,
|
454
436
|
key: str = None,
|
455
437
|
local_path: Optional[str] = None,
|
456
438
|
keep: Optional[Literal['first', 'last']] = None
|
@@ -463,13 +445,11 @@ def append_df_to_csv(df: pd.DataFrame,
|
|
463
445
|
file_type=FileTypes.CSV,
|
464
446
|
local_path=local_path,
|
465
447
|
folder_name=folder_name,
|
466
|
-
subfolder_name=subfolder_name,
|
467
448
|
key=key)
|
468
449
|
if os.path.isfile(file_path): # append using format of old file
|
469
450
|
old_df = load_df_from_csv(file_name=file_name,
|
470
451
|
local_path=local_path,
|
471
452
|
folder_name=folder_name,
|
472
|
-
subfolder_name=subfolder_name,
|
473
453
|
key=key)
|
474
454
|
df = pd.concat([old_df, df], axis=0)
|
475
455
|
if keep is not None:
|
@@ -481,7 +461,6 @@ def append_df_to_csv(df: pd.DataFrame,
|
|
481
461
|
save_df_to_csv(df=df,
|
482
462
|
file_name=file_name,
|
483
463
|
folder_name=folder_name,
|
484
|
-
subfolder_name=subfolder_name,
|
485
464
|
key=key,
|
486
465
|
local_path=local_path)
|
487
466
|
|
@@ -490,7 +469,6 @@ def save_df_dict_to_csv(datasets: Dict[Union[str, Enum, NamedTuple], pd.DataFram
|
|
490
469
|
file_name: Optional[str] = None,
|
491
470
|
local_path: Optional[str] = None,
|
492
471
|
folder_name: str = None,
|
493
|
-
subfolder_name: str = None,
|
494
472
|
add_current_date: bool = False
|
495
473
|
) -> None:
|
496
474
|
"""
|
@@ -505,7 +483,6 @@ def save_df_dict_to_csv(datasets: Dict[Union[str, Enum, NamedTuple], pd.DataFram
|
|
505
483
|
file_type=FileTypes.CSV,
|
506
484
|
local_path=local_path,
|
507
485
|
folder_name=folder_name,
|
508
|
-
subfolder_name=subfolder_name,
|
509
486
|
key=key)
|
510
487
|
data.to_csv(path_or_buf=file_path)
|
511
488
|
|
@@ -514,7 +491,6 @@ def load_df_dict_from_csv(dataset_keys: List[Union[str, Enum, NamedTuple]],
|
|
514
491
|
file_name: Optional[str],
|
515
492
|
local_path: Optional[str] = None,
|
516
493
|
folder_name: str = None,
|
517
|
-
subfolder_name: str = None,
|
518
494
|
is_index: bool = True,
|
519
495
|
dayfirst: Optional[bool] = None, # will give priority to formats where day come first
|
520
496
|
force_not_found_error: bool = False,
|
@@ -529,7 +505,6 @@ def load_df_dict_from_csv(dataset_keys: List[Union[str, Enum, NamedTuple]],
|
|
529
505
|
file_type=FileTypes.CSV,
|
530
506
|
local_path=local_path,
|
531
507
|
folder_name=folder_name,
|
532
|
-
subfolder_name=subfolder_name,
|
533
508
|
key=key)
|
534
509
|
if os.path.isfile(file_path):
|
535
510
|
data = pd.read_csv(filepath_or_buffer=file_path,
|
@@ -599,7 +574,6 @@ def save_df_to_feather(df: pd.DataFrame,
|
|
599
574
|
file_name: Optional[str] = None,
|
600
575
|
local_path: Optional[str] = None,
|
601
576
|
folder_name: str = None,
|
602
|
-
subfolder_name: str = None,
|
603
577
|
key: str = None,
|
604
578
|
index_col: Optional[str] = INDEX_COLUMN
|
605
579
|
) -> None:
|
@@ -611,7 +585,6 @@ def save_df_to_feather(df: pd.DataFrame,
|
|
611
585
|
file_type=FileTypes.FEATHER,
|
612
586
|
local_path=local_path,
|
613
587
|
folder_name=folder_name,
|
614
|
-
subfolder_name=subfolder_name,
|
615
588
|
key=key)
|
616
589
|
if index_col is not None and index_col not in df.columns: # index is unique and preserved
|
617
590
|
df = df.reset_index(names=index_col)
|
@@ -623,7 +596,6 @@ def save_df_to_feather(df: pd.DataFrame,
|
|
623
596
|
def append_df_to_feather(df: pd.DataFrame,
|
624
597
|
file_name: str = None,
|
625
598
|
folder_name: str = None,
|
626
|
-
subfolder_name: str = None,
|
627
599
|
key: str = None,
|
628
600
|
local_path: Optional[str] = None,
|
629
601
|
keep: Optional[Literal['first', 'last']] = None,
|
@@ -637,13 +609,11 @@ def append_df_to_feather(df: pd.DataFrame,
|
|
637
609
|
file_type=FileTypes.FEATHER,
|
638
610
|
local_path=local_path,
|
639
611
|
folder_name=folder_name,
|
640
|
-
subfolder_name=subfolder_name,
|
641
612
|
key=key)
|
642
613
|
if os.path.isfile(file_path): # append using format of old file
|
643
614
|
old_df = load_df_from_feather(file_name=file_name,
|
644
615
|
local_path=local_path,
|
645
616
|
folder_name=folder_name,
|
646
|
-
subfolder_name=subfolder_name,
|
647
617
|
key=key,
|
648
618
|
index_col=index_col)
|
649
619
|
df = pd.concat([old_df, df], axis=0)
|
@@ -656,7 +626,6 @@ def append_df_to_feather(df: pd.DataFrame,
|
|
656
626
|
save_df_to_feather(df=df,
|
657
627
|
file_name=file_name,
|
658
628
|
folder_name=folder_name,
|
659
|
-
subfolder_name=subfolder_name,
|
660
629
|
key=key,
|
661
630
|
local_path=local_path,
|
662
631
|
index_col=index_col)
|
@@ -666,7 +635,6 @@ def append_df_to_feather(df: pd.DataFrame,
|
|
666
635
|
def load_df_from_feather(file_name: Optional[str] = None,
|
667
636
|
local_path: Optional[str] = None,
|
668
637
|
folder_name: str = None,
|
669
|
-
subfolder_name: str = None,
|
670
638
|
key: str = None,
|
671
639
|
index_col: Optional[str] = INDEX_COLUMN
|
672
640
|
) -> pd.DataFrame:
|
@@ -677,7 +645,6 @@ def load_df_from_feather(file_name: Optional[str] = None,
|
|
677
645
|
file_type=FileTypes.FEATHER,
|
678
646
|
local_path=local_path,
|
679
647
|
folder_name=folder_name,
|
680
|
-
subfolder_name=subfolder_name,
|
681
648
|
key=key)
|
682
649
|
if os.path.isfile(file_path): # append using format of old file
|
683
650
|
df = pd.read_feather(file_path)
|
@@ -695,7 +662,6 @@ def save_df_dict_to_feather(dfs: Dict[Union[str, Enum, NamedTuple], pd.DataFrame
|
|
695
662
|
file_name: Optional[str] = None,
|
696
663
|
local_path: Optional[str] = None,
|
697
664
|
folder_name: str = None,
|
698
|
-
subfolder_name: str = None,
|
699
665
|
index_col: Optional[str] = INDEX_COLUMN
|
700
666
|
) -> None:
|
701
667
|
"""
|
@@ -707,7 +673,6 @@ def save_df_dict_to_feather(dfs: Dict[Union[str, Enum, NamedTuple], pd.DataFrame
|
|
707
673
|
file_type=FileTypes.FEATHER,
|
708
674
|
local_path=local_path,
|
709
675
|
folder_name=folder_name,
|
710
|
-
subfolder_name=subfolder_name,
|
711
676
|
key=key)
|
712
677
|
if index_col not in df.columns:
|
713
678
|
df = df.reset_index(names=index_col)
|
@@ -718,7 +683,6 @@ def load_df_dict_from_feather(dataset_keys: List[Union[str, Enum, NamedTuple]],
|
|
718
683
|
file_name: Optional[str],
|
719
684
|
local_path: Optional[str] = None,
|
720
685
|
folder_name: str = None,
|
721
|
-
subfolder_name: str = None,
|
722
686
|
force_not_found_error: bool = False,
|
723
687
|
index_col: Optional[str] = INDEX_COLUMN
|
724
688
|
) -> Dict[str, pd.DataFrame]:
|
@@ -731,7 +695,6 @@ def load_df_dict_from_feather(dataset_keys: List[Union[str, Enum, NamedTuple]],
|
|
731
695
|
file_type=FileTypes.FEATHER,
|
732
696
|
local_path=local_path,
|
733
697
|
folder_name=folder_name,
|
734
|
-
subfolder_name=subfolder_name,
|
735
698
|
key=key)
|
736
699
|
if os.path.isfile(file_path):
|
737
700
|
df = pd.read_feather(file_path)
|
@@ -757,7 +720,6 @@ Pandas to/from parquet core
|
|
757
720
|
def save_df_to_parquet(df: pd.DataFrame,
|
758
721
|
file_name: str,
|
759
722
|
folder_name: Optional[str] = None,
|
760
|
-
subfolder_name: str = None,
|
761
723
|
key: Optional[str] = None,
|
762
724
|
local_path: Optional[str] = None,
|
763
725
|
delocalize: bool = False
|
@@ -769,7 +731,6 @@ def save_df_to_parquet(df: pd.DataFrame,
|
|
769
731
|
file_type=FileTypes.PARQUET,
|
770
732
|
local_path=local_path,
|
771
733
|
folder_name=folder_name,
|
772
|
-
subfolder_name=subfolder_name,
|
773
734
|
key=key)
|
774
735
|
if delocalize:
|
775
736
|
df = delocalize_df(df)
|
@@ -778,7 +739,6 @@ def save_df_to_parquet(df: pd.DataFrame,
|
|
778
739
|
|
779
740
|
def load_df_from_parquet(file_name: Optional[str],
|
780
741
|
folder_name: str = None,
|
781
|
-
subfolder_name: str = None,
|
782
742
|
key: Optional[str] = None,
|
783
743
|
local_path: Optional[str] = None,
|
784
744
|
delocalize: bool = False
|
@@ -790,7 +750,6 @@ def load_df_from_parquet(file_name: Optional[str],
|
|
790
750
|
file_type=FileTypes.PARQUET,
|
791
751
|
local_path=local_path,
|
792
752
|
folder_name=folder_name,
|
793
|
-
subfolder_name=subfolder_name,
|
794
753
|
key=key)
|
795
754
|
if os.path.isfile(file_path):
|
796
755
|
df = pd.read_parquet(path=file_path)
|
@@ -805,7 +764,6 @@ def load_df_from_parquet(file_name: Optional[str],
|
|
805
764
|
def save_df_dict_to_parquet(datasets: Dict[Union[str, Enum, NamedTuple], pd.DataFrame],
|
806
765
|
file_name: Optional[str] = None,
|
807
766
|
folder_name: str = None,
|
808
|
-
subfolder_name: str = None,
|
809
767
|
local_path: Optional[str] = None,
|
810
768
|
delocalize: bool = False
|
811
769
|
) -> None:
|
@@ -818,7 +776,6 @@ def save_df_dict_to_parquet(datasets: Dict[Union[str, Enum, NamedTuple], pd.Data
|
|
818
776
|
file_type=FileTypes.PARQUET,
|
819
777
|
local_path=local_path,
|
820
778
|
folder_name=folder_name,
|
821
|
-
subfolder_name=subfolder_name,
|
822
779
|
key=key)
|
823
780
|
if delocalize:
|
824
781
|
data = delocalize_df(data)
|
@@ -828,7 +785,6 @@ def save_df_dict_to_parquet(datasets: Dict[Union[str, Enum, NamedTuple], pd.Data
|
|
828
785
|
def load_df_dict_from_parquet(dataset_keys: List[Union[str, Enum, NamedTuple]],
|
829
786
|
file_name: Optional[str],
|
830
787
|
folder_name: str = None,
|
831
|
-
subfolder_name: str = None,
|
832
788
|
local_path: Optional[str] = None,
|
833
789
|
force_not_found_error: bool = False
|
834
790
|
) -> Dict[str, pd.DataFrame]:
|
@@ -841,7 +797,6 @@ def load_df_dict_from_parquet(dataset_keys: List[Union[str, Enum, NamedTuple]],
|
|
841
797
|
file_type=FileTypes.PARQUET,
|
842
798
|
local_path=local_path,
|
843
799
|
folder_name=folder_name,
|
844
|
-
subfolder_name=subfolder_name,
|
845
800
|
key=key)
|
846
801
|
if os.path.isfile(file_path):
|
847
802
|
pandas_dict[key] = pd.read_parquet(path=file_path)
|
@@ -125,10 +125,9 @@ def plot_ra_perf_table_benchmark(prices: pd.DataFrame,
|
|
125
125
|
fontsize: int = 10,
|
126
126
|
transpose: bool = False,
|
127
127
|
alpha_an_factor: float = None,
|
128
|
-
is_df_out: bool = False,
|
129
128
|
ax: plt.Subplot = None,
|
130
129
|
**kwargs
|
131
|
-
) ->
|
130
|
+
) -> Tuple[Optional[plt.Figure], pd.DataFrame]:
|
132
131
|
"""
|
133
132
|
plot ra perf table and get ra performance columns with data as string for tables
|
134
133
|
"""
|
@@ -149,10 +148,7 @@ def plot_ra_perf_table_benchmark(prices: pd.DataFrame,
|
|
149
148
|
fontsize=fontsize,
|
150
149
|
ax=ax,
|
151
150
|
**kwargs)
|
152
|
-
|
153
|
-
return fig, ra_perf_table
|
154
|
-
else:
|
155
|
-
return fig
|
151
|
+
return fig, ra_perf_table
|
156
152
|
|
157
153
|
|
158
154
|
def plot_ra_perf_bars(prices: pd.DataFrame,
|
@@ -207,7 +207,7 @@ class MultiPortfolioData:
|
|
207
207
|
for date, pd_covar in self.covar_dict.items():
|
208
208
|
w = weight_diffs.loc[date]
|
209
209
|
tracking_error[date] = np.sqrt(w @ pd_covar @ w.T)
|
210
|
-
tracking_error = pd.Series(tracking_error)
|
210
|
+
tracking_error = pd.Series(tracking_error, name='Tracking error')
|
211
211
|
return tracking_error
|
212
212
|
|
213
213
|
def compute_tracking_error_table(self,
|
@@ -427,10 +427,9 @@ class MultiPortfolioData:
|
|
427
427
|
time_period: TimePeriod = None,
|
428
428
|
perf_params: PerfParams = PERF_PARAMS,
|
429
429
|
perf_columns: List[PerfStat] = rpt.BENCHMARK_TABLE_COLUMNS,
|
430
|
-
is_df_out: bool = False,
|
431
430
|
ax: plt.Subplot = None,
|
432
431
|
**kwargs
|
433
|
-
) ->
|
432
|
+
) -> pd.DataFrame:
|
434
433
|
if benchmark is None:
|
435
434
|
benchmark = self.benchmark_prices.columns[0]
|
436
435
|
prices = self.get_navs(time_period=time_period, benchmark=benchmark, add_benchmarks_to_navs=add_benchmarks_to_navs)
|
@@ -445,11 +444,9 @@ class MultiPortfolioData:
|
|
445
444
|
drop_benchmark=drop_benchmark,
|
446
445
|
title=ra_perf_title,
|
447
446
|
rotation_for_columns_headers=0,
|
448
|
-
is_df_out=is_df_out,
|
449
447
|
ax=ax,
|
450
448
|
**kwargs)
|
451
|
-
|
452
|
-
return ra_perf_table
|
449
|
+
return ra_perf_table
|
453
450
|
|
454
451
|
def plot_ac_ra_perf_table(self,
|
455
452
|
benchmark_price: pd.Series,
|
@@ -709,25 +709,33 @@ class PortfolioData:
|
|
709
709
|
def compute_risk_contributions_implied_by_covar(self,
|
710
710
|
covar_dict: Dict[pd.Timestamp, pd.DataFrame],
|
711
711
|
group_data: pd.Series = None,
|
712
|
-
group_order: List[str] = None
|
712
|
+
group_order: List[str] = None,
|
713
|
+
align_with_covar_dates: bool = True,
|
714
|
+
freq: Optional[str] = None
|
713
715
|
) -> pd.DataFrame:
|
714
716
|
"""
|
715
717
|
compute risk contributions using covar_dict
|
716
718
|
"""
|
717
|
-
strategy_weights = self.get_weights(freq=
|
719
|
+
strategy_weights = self.get_weights(freq=freq, is_input_weights=True)
|
718
720
|
covar_index = list(covar_dict.keys())
|
719
|
-
strategy_weights = strategy_weights.reindex(index=covar_index).ffill().fillna(0.0)
|
720
721
|
strategy_rc = {}
|
721
|
-
|
722
|
-
|
723
|
-
|
722
|
+
if align_with_covar_dates:
|
723
|
+
strategy_weights = strategy_weights.reindex(index=covar_index).ffill().fillna(0.0)
|
724
|
+
for date, pd_covar in covar_dict.items():
|
725
|
+
strategy_rc[date] = compute_portfolio_risk_contributions(w=strategy_weights.loc[date], covar=pd_covar)
|
726
|
+
else:
|
727
|
+
for date, weights in strategy_weights.to_dict(orient='index').items():
|
728
|
+
last_covar_update_date = qis.find_upto_date_from_datetime_index(index=covar_index, date=date)
|
729
|
+
if last_covar_update_date is not None:
|
730
|
+
strategy_rc[date] = compute_portfolio_risk_contributions(w=pd.Series(weights).fillna(0.0),
|
731
|
+
covar=covar_dict[last_covar_update_date])
|
724
732
|
|
733
|
+
strategy_rc = pd.DataFrame.from_dict(strategy_rc, orient='index')
|
725
734
|
if group_data is not None:
|
726
735
|
strategy_rc = dfg.agg_df_by_groups_ax1(strategy_rc, group_data=group_data, group_order=group_order)
|
727
736
|
|
728
737
|
return strategy_rc
|
729
738
|
|
730
|
-
|
731
739
|
# """
|
732
740
|
# plotting methods
|
733
741
|
# """
|