mainsequence 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mainsequence/__init__.py +0 -0
- mainsequence/__main__.py +9 -0
- mainsequence/cli/__init__.py +1 -0
- mainsequence/cli/api.py +157 -0
- mainsequence/cli/cli.py +442 -0
- mainsequence/cli/config.py +78 -0
- mainsequence/cli/ssh_utils.py +126 -0
- mainsequence/client/__init__.py +17 -0
- mainsequence/client/base.py +431 -0
- mainsequence/client/data_sources_interfaces/__init__.py +0 -0
- mainsequence/client/data_sources_interfaces/duckdb.py +1468 -0
- mainsequence/client/data_sources_interfaces/timescale.py +479 -0
- mainsequence/client/models_helpers.py +113 -0
- mainsequence/client/models_report_studio.py +412 -0
- mainsequence/client/models_tdag.py +2276 -0
- mainsequence/client/models_vam.py +1983 -0
- mainsequence/client/utils.py +387 -0
- mainsequence/dashboards/__init__.py +0 -0
- mainsequence/dashboards/streamlit/__init__.py +0 -0
- mainsequence/dashboards/streamlit/assets/config.toml +12 -0
- mainsequence/dashboards/streamlit/assets/favicon.png +0 -0
- mainsequence/dashboards/streamlit/assets/logo.png +0 -0
- mainsequence/dashboards/streamlit/core/__init__.py +0 -0
- mainsequence/dashboards/streamlit/core/theme.py +212 -0
- mainsequence/dashboards/streamlit/pages/__init__.py +0 -0
- mainsequence/dashboards/streamlit/scaffold.py +220 -0
- mainsequence/instrumentation/__init__.py +7 -0
- mainsequence/instrumentation/utils.py +101 -0
- mainsequence/instruments/__init__.py +1 -0
- mainsequence/instruments/data_interface/__init__.py +10 -0
- mainsequence/instruments/data_interface/data_interface.py +361 -0
- mainsequence/instruments/instruments/__init__.py +3 -0
- mainsequence/instruments/instruments/base_instrument.py +85 -0
- mainsequence/instruments/instruments/bond.py +447 -0
- mainsequence/instruments/instruments/european_option.py +74 -0
- mainsequence/instruments/instruments/interest_rate_swap.py +217 -0
- mainsequence/instruments/instruments/json_codec.py +585 -0
- mainsequence/instruments/instruments/knockout_fx_option.py +146 -0
- mainsequence/instruments/instruments/position.py +475 -0
- mainsequence/instruments/instruments/ql_fields.py +239 -0
- mainsequence/instruments/instruments/vanilla_fx_option.py +107 -0
- mainsequence/instruments/pricing_models/__init__.py +0 -0
- mainsequence/instruments/pricing_models/black_scholes.py +49 -0
- mainsequence/instruments/pricing_models/bond_pricer.py +182 -0
- mainsequence/instruments/pricing_models/fx_option_pricer.py +90 -0
- mainsequence/instruments/pricing_models/indices.py +350 -0
- mainsequence/instruments/pricing_models/knockout_fx_pricer.py +209 -0
- mainsequence/instruments/pricing_models/swap_pricer.py +502 -0
- mainsequence/instruments/settings.py +175 -0
- mainsequence/instruments/utils.py +29 -0
- mainsequence/logconf.py +284 -0
- mainsequence/reportbuilder/__init__.py +0 -0
- mainsequence/reportbuilder/__main__.py +0 -0
- mainsequence/reportbuilder/examples/ms_template_report.py +706 -0
- mainsequence/reportbuilder/model.py +713 -0
- mainsequence/reportbuilder/slide_templates.py +532 -0
- mainsequence/tdag/__init__.py +8 -0
- mainsequence/tdag/__main__.py +0 -0
- mainsequence/tdag/config.py +129 -0
- mainsequence/tdag/data_nodes/__init__.py +12 -0
- mainsequence/tdag/data_nodes/build_operations.py +751 -0
- mainsequence/tdag/data_nodes/data_nodes.py +1292 -0
- mainsequence/tdag/data_nodes/persist_managers.py +812 -0
- mainsequence/tdag/data_nodes/run_operations.py +543 -0
- mainsequence/tdag/data_nodes/utils.py +24 -0
- mainsequence/tdag/future_registry.py +25 -0
- mainsequence/tdag/utils.py +40 -0
- mainsequence/virtualfundbuilder/__init__.py +45 -0
- mainsequence/virtualfundbuilder/__main__.py +235 -0
- mainsequence/virtualfundbuilder/agent_interface.py +77 -0
- mainsequence/virtualfundbuilder/config_handling.py +86 -0
- mainsequence/virtualfundbuilder/contrib/__init__.py +0 -0
- mainsequence/virtualfundbuilder/contrib/apps/__init__.py +8 -0
- mainsequence/virtualfundbuilder/contrib/apps/etf_replicator_app.py +164 -0
- mainsequence/virtualfundbuilder/contrib/apps/generate_report.py +292 -0
- mainsequence/virtualfundbuilder/contrib/apps/load_external_portfolio.py +107 -0
- mainsequence/virtualfundbuilder/contrib/apps/news_app.py +437 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +91 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_table.py +95 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_named_portfolio.py +45 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_portfolio.py +40 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/base.html +147 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/report.html +77 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/__init__.py +5 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/external_weights.py +61 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/intraday_trend.py +149 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/market_cap.py +310 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/mock_signal.py +78 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/portfolio_replicator.py +269 -0
- mainsequence/virtualfundbuilder/contrib/prices/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +810 -0
- mainsequence/virtualfundbuilder/contrib/prices/utils.py +11 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/rebalance_strategies.py +313 -0
- mainsequence/virtualfundbuilder/data_nodes.py +637 -0
- mainsequence/virtualfundbuilder/enums.py +23 -0
- mainsequence/virtualfundbuilder/models.py +282 -0
- mainsequence/virtualfundbuilder/notebook_handling.py +42 -0
- mainsequence/virtualfundbuilder/portfolio_interface.py +272 -0
- mainsequence/virtualfundbuilder/resource_factory/__init__.py +0 -0
- mainsequence/virtualfundbuilder/resource_factory/app_factory.py +170 -0
- mainsequence/virtualfundbuilder/resource_factory/base_factory.py +238 -0
- mainsequence/virtualfundbuilder/resource_factory/rebalance_factory.py +101 -0
- mainsequence/virtualfundbuilder/resource_factory/signal_factory.py +183 -0
- mainsequence/virtualfundbuilder/utils.py +381 -0
- mainsequence-2.0.0.dist-info/METADATA +105 -0
- mainsequence-2.0.0.dist-info/RECORD +110 -0
- mainsequence-2.0.0.dist-info/WHEEL +5 -0
- mainsequence-2.0.0.dist-info/licenses/LICENSE +40 -0
- mainsequence-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,637 @@
|
|
1
|
+
import copy
|
2
|
+
import os
|
3
|
+
|
4
|
+
from mainsequence.client import UpdateStatistics, AssetCategory, Asset
|
5
|
+
from mainsequence.tdag.data_nodes import DataNode, WrapperDataNode
|
6
|
+
from datetime import datetime
|
7
|
+
import numpy as np
|
8
|
+
import pytz
|
9
|
+
import pandas as pd
|
10
|
+
from typing import Dict, Tuple, Union, Optional
|
11
|
+
|
12
|
+
from .models import PortfolioBuildConfiguration
|
13
|
+
from mainsequence.virtualfundbuilder.contrib.prices.data_nodes import get_interpolated_prices_timeseries
|
14
|
+
from mainsequence.virtualfundbuilder.resource_factory.rebalance_factory import RebalanceFactory
|
15
|
+
import json
|
16
|
+
|
17
|
+
from mainsequence.virtualfundbuilder.resource_factory.signal_factory import SignalWeightsFactory
|
18
|
+
from tqdm import tqdm
|
19
|
+
|
20
|
+
from .. import client as ms_client
|
21
|
+
import mainsequence.client as msc
|
22
|
+
|
23
|
+
def translate_to_pandas_freq(custom_freq):
|
24
|
+
"""
|
25
|
+
Translate custom datetime frequency strings to Pandas frequency strings.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
custom_freq (str): Custom frequency string (e.g., '1d', '1m', '1mo').
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
str: Pandas frequency string (e.g., 'D', 'T', 'M').
|
32
|
+
"""
|
33
|
+
# Mapping for the custom frequencies to pandas frequencies
|
34
|
+
freq_mapping = {
|
35
|
+
'd': 'D', # days
|
36
|
+
'm': 'min', # minutes
|
37
|
+
'mo': 'M', # months
|
38
|
+
}
|
39
|
+
|
40
|
+
# Extract the numeric part and the unit part
|
41
|
+
import re
|
42
|
+
match = re.match(r"(\d+)([a-z]+)", custom_freq)
|
43
|
+
if not match:
|
44
|
+
raise ValueError(f"Invalid frequency format: {custom_freq}")
|
45
|
+
|
46
|
+
number, unit = match.groups()
|
47
|
+
|
48
|
+
# Map the unit to the corresponding pandas frequency
|
49
|
+
if unit not in freq_mapping:
|
50
|
+
raise ValueError(f"Unsupported frequency unit: {unit}")
|
51
|
+
|
52
|
+
pandas_freq = freq_mapping[unit]
|
53
|
+
|
54
|
+
# Combine the number with the pandas frequency
|
55
|
+
return f"{number}{pandas_freq}"
|
56
|
+
|
57
|
+
WEIGHTS_TO_PORTFOLIO_COLUMNS = {
|
58
|
+
"rebalance_weights": "weights_current",
|
59
|
+
"rebalance_price": "price_current",
|
60
|
+
"volume": "volume_current",
|
61
|
+
"weights_at_last_rebalance": "weights_before",
|
62
|
+
"price_at_last_rebalance": "price_before",
|
63
|
+
"volume_at_last_rebalance": "volume_before"
|
64
|
+
}
|
65
|
+
|
66
|
+
All_PORTFOLIO_COLUMNS = []
|
67
|
+
All_PORTFOLIO_COLUMNS.extend(list(WEIGHTS_TO_PORTFOLIO_COLUMNS.keys()))
|
68
|
+
All_PORTFOLIO_COLUMNS.extend(["last_rebalance_date","close","return"])
|
69
|
+
|
70
|
+
|
71
|
+
class PortfolioFromDF(DataNode):
|
72
|
+
|
73
|
+
def __init__(self, portfolio_name:str,calendar_name:str,
|
74
|
+
target_portfolio_about:str,
|
75
|
+
*args, **kwargs):
|
76
|
+
self.portfolio_name = portfolio_name
|
77
|
+
self.calendar_name=calendar_name
|
78
|
+
self.target_portfolio_about=target_portfolio_about
|
79
|
+
super().__init__(*args, **kwargs)
|
80
|
+
|
81
|
+
def dependencies(self) -> Dict[str, Union["DataNode", "APIDataNode"]]:
|
82
|
+
return {}
|
83
|
+
|
84
|
+
def get_portfolio_df(self):
|
85
|
+
raise NotImplementedError()
|
86
|
+
|
87
|
+
def update(self):
|
88
|
+
df=self.get_portfolio_df()
|
89
|
+
if df.empty:
|
90
|
+
return pd.DataFrame()
|
91
|
+
|
92
|
+
assert all([c in All_PORTFOLIO_COLUMNS for c in df.columns])
|
93
|
+
if self.update_statistics.max_time_index_value is not None:
|
94
|
+
df=df[df.index>=self.update_statistics.max_time_index_value]
|
95
|
+
if df.empty:
|
96
|
+
return pd.DataFrame()
|
97
|
+
|
98
|
+
for c in WEIGHTS_TO_PORTFOLIO_COLUMNS.keys():
|
99
|
+
def _to_json_dict(v):
|
100
|
+
# Normalize missing to empty dict
|
101
|
+
if v is None or (isinstance(v, float) and np.isnan(v)):
|
102
|
+
v = {}
|
103
|
+
|
104
|
+
# If already a JSON string, parse; if a Python-literal string, literal_eval
|
105
|
+
if isinstance(v, str):
|
106
|
+
try:
|
107
|
+
v = json.loads(v)
|
108
|
+
except Exception:
|
109
|
+
try:
|
110
|
+
v = ast.literal_eval(v)
|
111
|
+
except Exception:
|
112
|
+
raise ValueError(f"Value in '{col}' is not JSON/dict: {v!r}")
|
113
|
+
|
114
|
+
if not isinstance(v, dict):
|
115
|
+
raise ValueError(f"Value in '{col}' is not a dict after normalization (got {type(v)}).")
|
116
|
+
|
117
|
+
# Dump to canonical JSON (and verify round-trip)
|
118
|
+
s = json.dumps(v, ensure_ascii=False, sort_keys=True)
|
119
|
+
json.loads(s) # will raise if invalid
|
120
|
+
return s
|
121
|
+
|
122
|
+
df[c] = df[c].apply(_to_json_dict)
|
123
|
+
|
124
|
+
return df
|
125
|
+
|
126
|
+
|
127
|
+
class PortfolioStrategy(DataNode):
|
128
|
+
"""
|
129
|
+
Manages the rebalancing of asset weights within a portfolio over time, considering transaction fees
|
130
|
+
and rebalancing strategies. Calculates portfolio values and returns while accounting for execution-specific fees.
|
131
|
+
"""
|
132
|
+
|
133
|
+
|
134
|
+
def __init__(
|
135
|
+
self,
|
136
|
+
portfolio_build_configuration: PortfolioBuildConfiguration,
|
137
|
+
*args, **kwargs
|
138
|
+
):
|
139
|
+
"""
|
140
|
+
Initializes the PortfolioStrategy class with the necessary configurations.
|
141
|
+
|
142
|
+
Args:
|
143
|
+
portfolio_build_configuration (PortfolioBuildConfiguration): Configuration for building the portfolio,
|
144
|
+
including assets, execution parameters, and backtesting weights.
|
145
|
+
is_live (bool): Flag indicating whether the strategy is running in live mode.
|
146
|
+
"""
|
147
|
+
|
148
|
+
self.execution_configuration = portfolio_build_configuration.execution_configuration
|
149
|
+
self.backtesting_weights_config = portfolio_build_configuration.backtesting_weights_configuration
|
150
|
+
|
151
|
+
self.commission_fee = self.execution_configuration.commission_fee
|
152
|
+
|
153
|
+
self.portfolio_prices_frequency = portfolio_build_configuration.portfolio_prices_frequency
|
154
|
+
|
155
|
+
self.assets_configuration = portfolio_build_configuration.assets_configuration
|
156
|
+
|
157
|
+
self.portfolio_frequency = self.assets_configuration.prices_configuration.upsample_frequency_id
|
158
|
+
|
159
|
+
self.full_signal_weight_config = copy.deepcopy(self.backtesting_weights_config.signal_weights_configuration)
|
160
|
+
|
161
|
+
self.signal_weights_name = self.backtesting_weights_config.signal_weights_name
|
162
|
+
SignalWeightClass = SignalWeightsFactory.get_signal_weights_strategy(
|
163
|
+
signal_weights_name=self.signal_weights_name
|
164
|
+
)
|
165
|
+
|
166
|
+
self.signal_weights = SignalWeightClass.build_and_parse_from_configuration(**self.full_signal_weight_config)
|
167
|
+
|
168
|
+
self.rebalance_strategy_name = self.backtesting_weights_config.rebalance_strategy_name
|
169
|
+
RebalanceClass = RebalanceFactory.get_rebalance_strategy(rebalance_strategy_name=self.rebalance_strategy_name)
|
170
|
+
self.rebalancer = RebalanceClass(**self.backtesting_weights_config.rebalance_strategy_configuration)
|
171
|
+
|
172
|
+
self.rebalancer_explanation = "" # TODO: Add rebalancer explanation
|
173
|
+
|
174
|
+
asset_list = None
|
175
|
+
if not self.assets_configuration.assets_category_unique_id:
|
176
|
+
asset_list = self.signal_weights.get_asset_list()
|
177
|
+
portfolio_asset_uid=self.signal_weights.get_asset_uid_to_override_portfolio_price()
|
178
|
+
if portfolio_asset_uid is not None:
|
179
|
+
asset=msc.Asset.get_or_none(unique_identifier=portfolio_asset_uid)
|
180
|
+
if asset is None:
|
181
|
+
raise Exception(f"{portfolio_asset_uid} not found. be sure that is on the price transaltion table")
|
182
|
+
asset_list=asset_list+[asset]
|
183
|
+
asset_list= list({a.id:a for a in asset_list}.values())
|
184
|
+
|
185
|
+
self.bars_ts = get_interpolated_prices_timeseries(copy.deepcopy(self.assets_configuration), asset_list=asset_list)
|
186
|
+
|
187
|
+
super().__init__(*args, **kwargs)
|
188
|
+
|
189
|
+
def get_asset_list(self):
|
190
|
+
"""
|
191
|
+
Creates mappings from symbols to IDs
|
192
|
+
"""
|
193
|
+
if self.assets_configuration.assets_category_unique_id:
|
194
|
+
asset_category = AssetCategory.get(unique_identifier=self.assets_configuration.assets_category_unique_id)
|
195
|
+
asset_list = Asset.filter(id__in=asset_category.assets) # no need for specifics as only symbols are relevant
|
196
|
+
else:
|
197
|
+
# get all assets of signal
|
198
|
+
asset_list = self.signal_weights.get_asset_list()
|
199
|
+
|
200
|
+
return asset_list
|
201
|
+
|
202
|
+
def _calculate_start_end_dates(self):
|
203
|
+
"""
|
204
|
+
Calculates the start and end dates for processing based on the latest value and available data.
|
205
|
+
The end date is calcualted to get the end dates of the prices of all assets involved, and using the earliest to ensure that all assets have prices.
|
206
|
+
|
207
|
+
Args:
|
208
|
+
latest_value (datetime): The timestamp of the latest available data.
|
209
|
+
|
210
|
+
Returns:
|
211
|
+
Tuple[datetime, datetime]: A tuple containing the start date and end date for processing.
|
212
|
+
"""
|
213
|
+
# Get last observations for each exchange
|
214
|
+
update_statics_from_dependencies = self.bars_ts.update_statistics
|
215
|
+
earliest_last_value = max(update_statics_from_dependencies.asset_time_statistics.values())
|
216
|
+
|
217
|
+
if earliest_last_value is None:
|
218
|
+
self.logger.warning(f"update_statics_from_dependencies {update_statics_from_dependencies}")
|
219
|
+
raise Exception("Prices are empty")
|
220
|
+
|
221
|
+
# Determine the last value where all assets have data
|
222
|
+
if self.assets_configuration.prices_configuration.forward_fill_to_now:
|
223
|
+
end_date = datetime.now(pytz.utc)
|
224
|
+
else:
|
225
|
+
end_date = earliest_last_value + self.bars_ts.maximum_forward_fill
|
226
|
+
|
227
|
+
# Handle case when latest_value is None
|
228
|
+
start_date = self.update_statistics.max_time_index_value or self.OFFSET_START
|
229
|
+
|
230
|
+
# Adjust end_date based on max time difference variable if set
|
231
|
+
max_td_env = os.getenv("MAX_TD_FROM_LATEST_VALUE", None)
|
232
|
+
if max_td_env is not None:
|
233
|
+
new_end_date = start_date + pd.Timedelta(max_td_env)
|
234
|
+
end_date = new_end_date if new_end_date < end_date else end_date
|
235
|
+
|
236
|
+
return start_date, end_date
|
237
|
+
|
238
|
+
def _generate_new_index(self, start_date, end_date, rebalancer_calendar):
|
239
|
+
"""
|
240
|
+
Generates a new index based on frequency and calendar.
|
241
|
+
|
242
|
+
Args:
|
243
|
+
start_date (datetime): Latest timestamp in series.
|
244
|
+
end_date (datetime): Upper limit for date range.
|
245
|
+
rebalancer_calendar: Calendar object from the rebalancer.
|
246
|
+
|
247
|
+
Returns:
|
248
|
+
pd.DatetimeIndex: New index for resampling.
|
249
|
+
"""
|
250
|
+
upsample_freq = self.assets_configuration.prices_configuration.upsample_frequency_id
|
251
|
+
|
252
|
+
if "d" in upsample_freq:
|
253
|
+
assert upsample_freq == "1d", "Only '1d' frequency is implemented."
|
254
|
+
upsample_freq = translate_to_pandas_freq(upsample_freq)
|
255
|
+
freq = upsample_freq.replace("days", "d")
|
256
|
+
schedule = rebalancer_calendar.schedule(start_date=start_date, end_date=end_date)
|
257
|
+
new_index = schedule.set_index('market_close').index
|
258
|
+
new_index.name = None
|
259
|
+
new_index = new_index[new_index <= end_date]
|
260
|
+
|
261
|
+
else:
|
262
|
+
upsample_freq = translate_to_pandas_freq(upsample_freq)
|
263
|
+
self.logger.warning("Matching new index with calendar")
|
264
|
+
freq = upsample_freq
|
265
|
+
|
266
|
+
new_index = pd.date_range(start=start_date, end=end_date, freq=freq)
|
267
|
+
return new_index, freq
|
268
|
+
|
269
|
+
def dependencies(self) -> Dict[str, Union["DataNode", "APIDataNode"]]:
|
270
|
+
return {
|
271
|
+
"bars_ts": self.bars_ts,
|
272
|
+
"signal_weights": self.signal_weights
|
273
|
+
}
|
274
|
+
|
275
|
+
def _postprocess_weights(self, weights):
|
276
|
+
"""
|
277
|
+
Prepares backtesting weights DataFrame for storage and sends them to VAM if applicable.
|
278
|
+
|
279
|
+
Args:
|
280
|
+
weights (pd.DataFrame): DataFrame of backtesting weights.
|
281
|
+
latest_value (datetime): Latest timestamp.
|
282
|
+
|
283
|
+
Returns:
|
284
|
+
pd.DataFrame: Prepared backtesting weights.
|
285
|
+
"""
|
286
|
+
# Filter for dates after latest_value
|
287
|
+
if self.update_statistics.is_empty() == False:
|
288
|
+
weights = weights[weights.index > self.update_statistics.max_time_index_value]
|
289
|
+
if weights.empty:
|
290
|
+
return pd.DataFrame()
|
291
|
+
|
292
|
+
# Reshape and validate the DataFrame
|
293
|
+
weights = weights.stack()
|
294
|
+
required_columns = ["weights_before", "weights_current", "price_current", "price_before"]
|
295
|
+
for col in required_columns:
|
296
|
+
assert col in weights.columns, f"Column '{col}' is missing in weights"
|
297
|
+
|
298
|
+
weights = weights.dropna(subset=["weights_current"])
|
299
|
+
# Filter again for dates after latest_value
|
300
|
+
if self.update_statistics.max_time_index_value is not None:
|
301
|
+
weights = weights[
|
302
|
+
weights.index.get_level_values("time_index") > self.update_statistics.max_time_index_value]
|
303
|
+
|
304
|
+
# Prepare the weights before by using the last weights used for the portfolio and the new weights
|
305
|
+
if self.update_statistics.is_empty() == False:
|
306
|
+
last_weights = self._get_last_weights()
|
307
|
+
weights = pd.concat([last_weights, weights], axis=0).fillna(0)
|
308
|
+
|
309
|
+
return weights
|
310
|
+
|
311
|
+
def get_portfolio_about_text(self):
|
312
|
+
"""
|
313
|
+
Constructs the portfolio about text.
|
314
|
+
|
315
|
+
Returns:
|
316
|
+
str: Portfolio description.
|
317
|
+
"""
|
318
|
+
portfolio_about = f"""Portfolio created with Main Sequence VirtualFundBuilder engine with the following signal and
|
319
|
+
rebalance details:"""
|
320
|
+
return json.dumps(portfolio_about)
|
321
|
+
|
322
|
+
def build_prefix(self):
|
323
|
+
reba_strat = self.rebalance_strategy_name
|
324
|
+
signa_name = self.signal_weights_name
|
325
|
+
return f"{reba_strat}_{signa_name}"
|
326
|
+
|
327
|
+
def _calculate_portfolio_returns(self, weights: pd.DataFrame, prices: pd.DataFrame, ) -> pd.DataFrame:
|
328
|
+
"""
|
329
|
+
Calculates the returns for the portfolio based on the asset prices and their respective weights,
|
330
|
+
including the impact of transaction fees.
|
331
|
+
|
332
|
+
Args:
|
333
|
+
weights (pd.DataFrame): DataFrame containing weights of assets at different timestamps.
|
334
|
+
prices (pd.DataFrame): DataFrame containing prices of assets.
|
335
|
+
|
336
|
+
Returns:
|
337
|
+
pd.DataFrame: DataFrame containing portfolio returns with and without transaction fees.
|
338
|
+
"""
|
339
|
+
weights = weights.reset_index().pivot(
|
340
|
+
index="time_index",
|
341
|
+
columns=["unique_identifier"],
|
342
|
+
values=["price_current", "weights_before", "weights_current"]
|
343
|
+
)
|
344
|
+
|
345
|
+
price_current = weights.price_current
|
346
|
+
weights_before = weights.weights_before.fillna(0)
|
347
|
+
weights_current = weights.weights_current.fillna(0)
|
348
|
+
|
349
|
+
prices = prices[self.assets_configuration.price_type.value].unstack()
|
350
|
+
|
351
|
+
# get the first date for prices
|
352
|
+
first_price_date = prices.stack().dropna().index.union(price_current.stack().dropna().index)[0][0]
|
353
|
+
|
354
|
+
prices = price_current.combine_first(
|
355
|
+
prices).sort_index().ffill() # combine raw prices with signal prices for continous price ts
|
356
|
+
prices = prices.reindex(weights.index)
|
357
|
+
|
358
|
+
returns = (prices / prices.shift(1) - 1).fillna(0.0)
|
359
|
+
returns.replace([np.inf, -np.inf], 0, inplace=True)
|
360
|
+
|
361
|
+
# Calculate weighted returns per coin: R_c = w_past_c * r_c
|
362
|
+
weights_before = weights_before.reindex(returns.index, method="ffill").dropna()
|
363
|
+
weights_current = weights_current.reindex(returns.index, method="ffill").dropna()
|
364
|
+
|
365
|
+
weighted_returns = (weights_before * returns).dropna()
|
366
|
+
|
367
|
+
weights_diff = (weights_current - weights_before).fillna(0)
|
368
|
+
# Fees = w_diff * fee%
|
369
|
+
fees = (weights_diff.abs() * self.commission_fee).sum(axis=1)
|
370
|
+
|
371
|
+
# Sum returns over assets
|
372
|
+
portfolio_returns = pd.DataFrame({
|
373
|
+
"return": weighted_returns.sum(axis=1) - fees,
|
374
|
+
})
|
375
|
+
portfolio_returns = portfolio_returns[portfolio_returns.index >= first_price_date]
|
376
|
+
|
377
|
+
return portfolio_returns
|
378
|
+
|
379
|
+
def _calculate_portfolio_values(self, portfolio: pd.DataFrame) -> pd.DataFrame:
|
380
|
+
"""
|
381
|
+
Calculates and applies cumulative returns to get the current portfolio values.
|
382
|
+
For re-executions, the last portfolio values are retrieved from the database.
|
383
|
+
|
384
|
+
Args:
|
385
|
+
portfolio (pd.DataFrame): DataFrame containing portfolio returns.
|
386
|
+
latest_value (datetime): Timestamp indicating the latest data point for starting calculations.
|
387
|
+
|
388
|
+
Returns:
|
389
|
+
pd.DataFrame: Updated portfolio values with and without fees and returns.
|
390
|
+
"""
|
391
|
+
last_portfolio = 1
|
392
|
+
last_portfolio_minus_fees = 1
|
393
|
+
if self.update_statistics.is_empty() == False:
|
394
|
+
last_obs = self.get_df_between_dates(start_date=self.update_statistics.max_time_index_value)
|
395
|
+
last_portfolio = last_obs["close"].iloc[0]
|
396
|
+
|
397
|
+
# Keep only new returns
|
398
|
+
portfolio = portfolio[portfolio.index > last_obs.index[0]]
|
399
|
+
|
400
|
+
# Apply cumulative returns
|
401
|
+
portfolio["close"] = last_portfolio * np.cumprod(portfolio["return"] + 1)
|
402
|
+
return portfolio
|
403
|
+
|
404
|
+
def _add_serialized_weights(self, portfolio, weights):
|
405
|
+
# Reset index to get 'time_index' as a column
|
406
|
+
weights_reset = weights.reset_index()
|
407
|
+
|
408
|
+
# Identify the data columns to pivot
|
409
|
+
data_columns = weights_reset.columns.difference(
|
410
|
+
['time_index', 'unique_identifier']
|
411
|
+
)
|
412
|
+
|
413
|
+
# Pivot the DataFrame to get a wide format
|
414
|
+
weights_pivot = weights_reset.pivot(
|
415
|
+
index='time_index', columns='unique_identifier', values=data_columns
|
416
|
+
)
|
417
|
+
|
418
|
+
# calculate close metrics
|
419
|
+
rebalance_weights_serialized = pd.DataFrame(index=weights_pivot.index)
|
420
|
+
for portfolio_column, weights_column in WEIGHTS_TO_PORTFOLIO_COLUMNS.items():
|
421
|
+
rebalance_weights_serialized[portfolio_column] = [json.dumps(r) for r in
|
422
|
+
weights_pivot[weights_column].to_dict(orient="records")]
|
423
|
+
|
424
|
+
# Join the serialized weights to the portfolio DataFrame
|
425
|
+
portfolio = portfolio.join(rebalance_weights_serialized, how='left')
|
426
|
+
|
427
|
+
# Identify rebalance dates where weights are provided
|
428
|
+
is_rebalance_date = portfolio['rebalance_weights'].notnull()
|
429
|
+
portfolio.loc[is_rebalance_date, 'last_rebalance_date'] = (
|
430
|
+
portfolio.index[is_rebalance_date].astype(str)
|
431
|
+
)
|
432
|
+
|
433
|
+
# Forward-fill the serialized weights and last rebalance dates
|
434
|
+
rebalance_columns = list(WEIGHTS_TO_PORTFOLIO_COLUMNS.keys())
|
435
|
+
portfolio[rebalance_columns] = portfolio[rebalance_columns].ffill()
|
436
|
+
portfolio['last_rebalance_date'] = portfolio['last_rebalance_date'].ffill()
|
437
|
+
|
438
|
+
# Drop rows with any remaining NaN values
|
439
|
+
return portfolio.dropna()
|
440
|
+
|
441
|
+
def _get_last_weights(self):
|
442
|
+
""" Deserialize the last rebalance weights"""
|
443
|
+
|
444
|
+
last_obs = self.get_df_between_dates(start_date=self.update_statistics.max_time_index_value)
|
445
|
+
if last_obs is None or last_obs.empty:
|
446
|
+
return None
|
447
|
+
|
448
|
+
last_weights = {}
|
449
|
+
for portfolio_column, weights_column in WEIGHTS_TO_PORTFOLIO_COLUMNS.items():
|
450
|
+
last_weights[weights_column] = json.loads(last_obs[portfolio_column].iloc[0])
|
451
|
+
|
452
|
+
last_weights = pd.DataFrame(last_weights)
|
453
|
+
last_weights.index.names = ["unique_identifier"]
|
454
|
+
last_weights["time_index"] = last_obs.index[0]
|
455
|
+
last_weights = last_weights.set_index("time_index", append=True)
|
456
|
+
last_weights.index = last_weights.index.reorder_levels(["time_index", "unique_identifier"])
|
457
|
+
return last_weights
|
458
|
+
|
459
|
+
def _interpolate_bars_index(self, new_index: pd.DatetimeIndex, unique_identifier_list: list, index_freq: str,
|
460
|
+
bars_ts: WrapperDataNode
|
461
|
+
):
|
462
|
+
"""
|
463
|
+
Get interpolated prices for a time index.
|
464
|
+
Optionally forward-fills prices to the present if configured.
|
465
|
+
"""
|
466
|
+
prices_config = self.assets_configuration.prices_configuration
|
467
|
+
|
468
|
+
# Determine the end_date for data fetching
|
469
|
+
fetch_end_date = new_index.max()
|
470
|
+
|
471
|
+
# If forward-filling is enabled, we still fetch up to the latest signal date,
|
472
|
+
# but we will extend the index later.
|
473
|
+
raw_prices = bars_ts.get_df_between_dates(
|
474
|
+
start_date=new_index.min() - pd.Timedelta(index_freq),
|
475
|
+
end_date=fetch_end_date,
|
476
|
+
great_or_equal=True,
|
477
|
+
less_or_equal=True,
|
478
|
+
unique_identifier_list=unique_identifier_list
|
479
|
+
)
|
480
|
+
|
481
|
+
if len(raw_prices) == 0:
|
482
|
+
self.logger.info(f"No prices data in index interpolation for node {bars_ts.storage_hash}")
|
483
|
+
return pd.DataFrame(), pd.DataFrame()
|
484
|
+
|
485
|
+
raw_prices.sort_values("time_index", inplace=True)
|
486
|
+
|
487
|
+
final_index_for_interpolation = new_index
|
488
|
+
if prices_config.forward_fill_to_now:
|
489
|
+
fill_end_date = datetime.now(pytz.utc)
|
490
|
+
last_ts_in_df = raw_prices.index.get_level_values("time_index").max()
|
491
|
+
|
492
|
+
self.logger.info(f"Forward-filling prices from {last_ts_in_df} to {fill_end_date}")
|
493
|
+
# Extend the `new_index` to the current time for the fill operation
|
494
|
+
pandas_freq = translate_to_pandas_freq(self.portfolio_prices_frequency)
|
495
|
+
final_index_for_interpolation = pd.date_range(start=new_index.min(), end=fill_end_date, freq=pandas_freq)
|
496
|
+
|
497
|
+
interpolated_prices = raw_prices.unstack(["unique_identifier"])
|
498
|
+
|
499
|
+
# Use the potentially extended index for reindexing
|
500
|
+
interpolated_prices = interpolated_prices.reindex(final_index_for_interpolation, method="ffill")
|
501
|
+
interpolated_prices.index.names = ["time_index"]
|
502
|
+
interpolated_prices = interpolated_prices.stack(["unique_identifier"])
|
503
|
+
|
504
|
+
return raw_prices, interpolated_prices
|
505
|
+
|
506
|
+
def update(self):
|
507
|
+
"""
|
508
|
+
Updates the portfolio weights based on the latest available data.
|
509
|
+
|
510
|
+
Args:
|
511
|
+
latest_value (datetime): The timestamp of the latest available data.
|
512
|
+
|
513
|
+
Returns:
|
514
|
+
pd.DataFrame: Updated portfolio values with and without fees and returns.
|
515
|
+
"""
|
516
|
+
self.logger.debug("Starting update of portfolio weights.")
|
517
|
+
start_date, end_date = self._calculate_start_end_dates()
|
518
|
+
self.logger.debug(f"Update from {start_date} to {end_date}")
|
519
|
+
|
520
|
+
if start_date is None:
|
521
|
+
self.logger.info("Start date is None, no update is done")
|
522
|
+
return pd.DataFrame()
|
523
|
+
|
524
|
+
# Generate new index for resampling
|
525
|
+
new_index, index_freq = self._generate_new_index(start_date, end_date, self.rebalancer.calendar)
|
526
|
+
|
527
|
+
if len(new_index) == 0:
|
528
|
+
self.logger.info("No new portfolio weights to update")
|
529
|
+
return pd.DataFrame()
|
530
|
+
|
531
|
+
# Interpolate signal weights to the new index, times where signal is not valid are nan
|
532
|
+
signal_weights = self.signal_weights.interpolate_index(new_index).dropna()
|
533
|
+
|
534
|
+
if len(signal_weights) == 0:
|
535
|
+
self.logger.info("No signal weights found, no update is done")
|
536
|
+
return pd.DataFrame()
|
537
|
+
|
538
|
+
# limit index to last valid signal_weights value, as new signal_weights might be created afterwards (especially important for backtesting)
|
539
|
+
new_index = new_index[new_index <= signal_weights.index.max() + self.signal_weights.maximum_forward_fill()]
|
540
|
+
|
541
|
+
# Verify the format of signal_weights columns
|
542
|
+
expected_columns = ["unique_identifier"]
|
543
|
+
assert signal_weights.columns.names == expected_columns, (
|
544
|
+
f"signal_weights must have columns named {expected_columns}"
|
545
|
+
)
|
546
|
+
|
547
|
+
# get prices for portfolio and interpolated with new_index
|
548
|
+
raw_prices, interpolated_prices = self._interpolate_bars_index(
|
549
|
+
new_index=new_index,
|
550
|
+
bars_ts=self.bars_ts,
|
551
|
+
index_freq=index_freq,
|
552
|
+
unique_identifier_list=list(
|
553
|
+
signal_weights.columns.get_level_values("unique_identifier")
|
554
|
+
)
|
555
|
+
)
|
556
|
+
|
557
|
+
if self.update_statistics.is_empty() == False:
|
558
|
+
interpolated_prices = interpolated_prices[
|
559
|
+
interpolated_prices.index.get_level_values(
|
560
|
+
"time_index") > self.update_statistics.max_time_index_value
|
561
|
+
]
|
562
|
+
signal_weights = signal_weights[signal_weights.index > self.update_statistics.max_time_index_value]
|
563
|
+
|
564
|
+
if interpolated_prices.empty:
|
565
|
+
raise ValueError("Interpolated Prices are empty. Check if asset prices exist for time window")
|
566
|
+
|
567
|
+
# Calculate rebalanced weights
|
568
|
+
weights = self.rebalancer.apply_rebalance_logic(
|
569
|
+
signal_weights=signal_weights,
|
570
|
+
start_date=start_date,
|
571
|
+
prices_df=interpolated_prices,
|
572
|
+
end_date=end_date,
|
573
|
+
last_rebalance_weights=self._get_last_weights(),
|
574
|
+
price_type=self.assets_configuration.price_type,
|
575
|
+
)
|
576
|
+
|
577
|
+
weights = self._postprocess_weights(weights)
|
578
|
+
if len(weights) == 0:
|
579
|
+
self.logger.info("No portfolio weights to update")
|
580
|
+
return pd.DataFrame()
|
581
|
+
|
582
|
+
# Calculate portfolio returns
|
583
|
+
portfolio_returns = self._calculate_portfolio_returns(weights, raw_prices)
|
584
|
+
portfolio = self._calculate_portfolio_values(portfolio_returns)
|
585
|
+
|
586
|
+
# prepare for storage
|
587
|
+
if len(portfolio) > 0 and self.update_statistics.is_empty() == False:
|
588
|
+
portfolio = portfolio[portfolio.index > self.update_statistics.max_time_index_value]
|
589
|
+
|
590
|
+
portfolio = self._add_serialized_weights(portfolio, weights)
|
591
|
+
portfolio = self._resample_portfolio_with_calendar(portfolio)
|
592
|
+
|
593
|
+
#if price comes forn signal then override
|
594
|
+
asset_uid_to_override_portfolio_price = self.signal_weights.get_asset_uid_to_override_portfolio_price()
|
595
|
+
if asset_uid_to_override_portfolio_price is not None:
|
596
|
+
new_portfolio_price = self.bars_ts.get_ranged_data_per_asset(
|
597
|
+
range_descriptor={asset_uid_to_override_portfolio_price: {"start_date": portfolio.index.min(),
|
598
|
+
"start_date_operand": ">="
|
599
|
+
}})
|
600
|
+
if new_portfolio_price.empty:
|
601
|
+
self.logger.error("No Prices on portfolio target asset")
|
602
|
+
return pd.DataFrame()
|
603
|
+
|
604
|
+
new_portfolio_price=new_portfolio_price.reset_index("unique_identifier",drop=True)
|
605
|
+
union_index=new_portfolio_price.index.union(portfolio.index.unique()).unique()
|
606
|
+
new_portfolio_price=new_portfolio_price.reindex(union_index).ffill().bfill()
|
607
|
+
new_portfolio_price=new_portfolio_price.reindex(portfolio.index)
|
608
|
+
portfolio["calculated_close"]=portfolio["close"]
|
609
|
+
portfolio["close"] = new_portfolio_price["close"]
|
610
|
+
portfolio["return"]= portfolio["close"].pct_change().fillna(0.0) #todo get the correct return from previoyus price
|
611
|
+
|
612
|
+
self.logger.info(f"{len(portfolio)} new portfolio values have been calculated.")
|
613
|
+
return portfolio
|
614
|
+
|
615
|
+
|
616
|
+
def get_table_metadata(self) -> Optional[ms_client.TableMetaData]:
|
617
|
+
asset = ms_client.PortfolioIndexAsset.get_or_none(reference_portfolio__local_time_serie__update_hash=self.local_time_serie.update_hash)
|
618
|
+
if asset is not None:
|
619
|
+
identifier = asset.unique_identifier
|
620
|
+
return ms_client.TableMetaData(
|
621
|
+
identifier=identifier,
|
622
|
+
description=f"Portfolio strategy for asset {asset.unique_identifier}",
|
623
|
+
data_frequency_id=ms_client.DataFrequency.one_d,
|
624
|
+
)
|
625
|
+
|
626
|
+
def _resample_portfolio_with_calendar(self, portfolio: pd.DataFrame) -> pd.DataFrame:
|
627
|
+
if len(portfolio) == 0: return portfolio
|
628
|
+
|
629
|
+
calendar_schedule = self.rebalancer.calendar.schedule(portfolio.index.min(), portfolio.index.max())
|
630
|
+
portfolio.index = pd.to_datetime(portfolio.index)
|
631
|
+
portfolio["close_time"] = portfolio.index.strftime("%Y-%m-%d %H:%M:%S")
|
632
|
+
portfolio = portfolio.resample(pd.to_timedelta(self.portfolio_frequency_to_pandas())).last().ffill()
|
633
|
+
# todo: solve cases of portfolio_frequency
|
634
|
+
return portfolio
|
635
|
+
|
636
|
+
def portfolio_frequency_to_pandas(self):
|
637
|
+
return translate_to_pandas_freq(self.portfolio_prices_frequency)
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from mainsequence.client import MARKETS_CONSTANTS as CONSTANTS
|
3
|
+
|
4
|
+
class RebalanceFrequencyStrategyName(Enum):
|
5
|
+
DAILY = "daily"
|
6
|
+
MONTHLY = "monthly"
|
7
|
+
QUARTERLY = "quarterly"
|
8
|
+
|
9
|
+
class PriceTypeNames(Enum):
|
10
|
+
VWAP = "vwap"
|
11
|
+
OPEN = "open"
|
12
|
+
CLOSE = "close"
|
13
|
+
|
14
|
+
class RunStrategy(Enum):
|
15
|
+
BACKTEST = "backtest"
|
16
|
+
LIVE = "live"
|
17
|
+
ALL = "all"
|
18
|
+
|
19
|
+
class ResourceType(Enum):
|
20
|
+
SIGNAL_WEIGHTS_STRATEGY = "signal_weights_strategy"
|
21
|
+
REBALANCE_STRATEGY = "rebalance_strategy"
|
22
|
+
APP = "app"
|
23
|
+
HTML_APP = "html_app"
|