mainsequence 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mainsequence/__init__.py +0 -0
- mainsequence/__main__.py +9 -0
- mainsequence/cli/__init__.py +1 -0
- mainsequence/cli/api.py +157 -0
- mainsequence/cli/cli.py +442 -0
- mainsequence/cli/config.py +78 -0
- mainsequence/cli/ssh_utils.py +126 -0
- mainsequence/client/__init__.py +17 -0
- mainsequence/client/base.py +431 -0
- mainsequence/client/data_sources_interfaces/__init__.py +0 -0
- mainsequence/client/data_sources_interfaces/duckdb.py +1468 -0
- mainsequence/client/data_sources_interfaces/timescale.py +479 -0
- mainsequence/client/models_helpers.py +113 -0
- mainsequence/client/models_report_studio.py +412 -0
- mainsequence/client/models_tdag.py +2276 -0
- mainsequence/client/models_vam.py +1983 -0
- mainsequence/client/utils.py +387 -0
- mainsequence/dashboards/__init__.py +0 -0
- mainsequence/dashboards/streamlit/__init__.py +0 -0
- mainsequence/dashboards/streamlit/assets/config.toml +12 -0
- mainsequence/dashboards/streamlit/assets/favicon.png +0 -0
- mainsequence/dashboards/streamlit/assets/logo.png +0 -0
- mainsequence/dashboards/streamlit/core/__init__.py +0 -0
- mainsequence/dashboards/streamlit/core/theme.py +212 -0
- mainsequence/dashboards/streamlit/pages/__init__.py +0 -0
- mainsequence/dashboards/streamlit/scaffold.py +220 -0
- mainsequence/instrumentation/__init__.py +7 -0
- mainsequence/instrumentation/utils.py +101 -0
- mainsequence/instruments/__init__.py +1 -0
- mainsequence/instruments/data_interface/__init__.py +10 -0
- mainsequence/instruments/data_interface/data_interface.py +361 -0
- mainsequence/instruments/instruments/__init__.py +3 -0
- mainsequence/instruments/instruments/base_instrument.py +85 -0
- mainsequence/instruments/instruments/bond.py +447 -0
- mainsequence/instruments/instruments/european_option.py +74 -0
- mainsequence/instruments/instruments/interest_rate_swap.py +217 -0
- mainsequence/instruments/instruments/json_codec.py +585 -0
- mainsequence/instruments/instruments/knockout_fx_option.py +146 -0
- mainsequence/instruments/instruments/position.py +475 -0
- mainsequence/instruments/instruments/ql_fields.py +239 -0
- mainsequence/instruments/instruments/vanilla_fx_option.py +107 -0
- mainsequence/instruments/pricing_models/__init__.py +0 -0
- mainsequence/instruments/pricing_models/black_scholes.py +49 -0
- mainsequence/instruments/pricing_models/bond_pricer.py +182 -0
- mainsequence/instruments/pricing_models/fx_option_pricer.py +90 -0
- mainsequence/instruments/pricing_models/indices.py +350 -0
- mainsequence/instruments/pricing_models/knockout_fx_pricer.py +209 -0
- mainsequence/instruments/pricing_models/swap_pricer.py +502 -0
- mainsequence/instruments/settings.py +175 -0
- mainsequence/instruments/utils.py +29 -0
- mainsequence/logconf.py +284 -0
- mainsequence/reportbuilder/__init__.py +0 -0
- mainsequence/reportbuilder/__main__.py +0 -0
- mainsequence/reportbuilder/examples/ms_template_report.py +706 -0
- mainsequence/reportbuilder/model.py +713 -0
- mainsequence/reportbuilder/slide_templates.py +532 -0
- mainsequence/tdag/__init__.py +8 -0
- mainsequence/tdag/__main__.py +0 -0
- mainsequence/tdag/config.py +129 -0
- mainsequence/tdag/data_nodes/__init__.py +12 -0
- mainsequence/tdag/data_nodes/build_operations.py +751 -0
- mainsequence/tdag/data_nodes/data_nodes.py +1292 -0
- mainsequence/tdag/data_nodes/persist_managers.py +812 -0
- mainsequence/tdag/data_nodes/run_operations.py +543 -0
- mainsequence/tdag/data_nodes/utils.py +24 -0
- mainsequence/tdag/future_registry.py +25 -0
- mainsequence/tdag/utils.py +40 -0
- mainsequence/virtualfundbuilder/__init__.py +45 -0
- mainsequence/virtualfundbuilder/__main__.py +235 -0
- mainsequence/virtualfundbuilder/agent_interface.py +77 -0
- mainsequence/virtualfundbuilder/config_handling.py +86 -0
- mainsequence/virtualfundbuilder/contrib/__init__.py +0 -0
- mainsequence/virtualfundbuilder/contrib/apps/__init__.py +8 -0
- mainsequence/virtualfundbuilder/contrib/apps/etf_replicator_app.py +164 -0
- mainsequence/virtualfundbuilder/contrib/apps/generate_report.py +292 -0
- mainsequence/virtualfundbuilder/contrib/apps/load_external_portfolio.py +107 -0
- mainsequence/virtualfundbuilder/contrib/apps/news_app.py +437 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +91 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_table.py +95 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_named_portfolio.py +45 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_portfolio.py +40 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/base.html +147 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/report.html +77 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/__init__.py +5 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/external_weights.py +61 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/intraday_trend.py +149 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/market_cap.py +310 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/mock_signal.py +78 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/portfolio_replicator.py +269 -0
- mainsequence/virtualfundbuilder/contrib/prices/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +810 -0
- mainsequence/virtualfundbuilder/contrib/prices/utils.py +11 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/rebalance_strategies.py +313 -0
- mainsequence/virtualfundbuilder/data_nodes.py +637 -0
- mainsequence/virtualfundbuilder/enums.py +23 -0
- mainsequence/virtualfundbuilder/models.py +282 -0
- mainsequence/virtualfundbuilder/notebook_handling.py +42 -0
- mainsequence/virtualfundbuilder/portfolio_interface.py +272 -0
- mainsequence/virtualfundbuilder/resource_factory/__init__.py +0 -0
- mainsequence/virtualfundbuilder/resource_factory/app_factory.py +170 -0
- mainsequence/virtualfundbuilder/resource_factory/base_factory.py +238 -0
- mainsequence/virtualfundbuilder/resource_factory/rebalance_factory.py +101 -0
- mainsequence/virtualfundbuilder/resource_factory/signal_factory.py +183 -0
- mainsequence/virtualfundbuilder/utils.py +381 -0
- mainsequence-2.0.0.dist-info/METADATA +105 -0
- mainsequence-2.0.0.dist-info/RECORD +110 -0
- mainsequence-2.0.0.dist-info/WHEEL +5 -0
- mainsequence-2.0.0.dist-info/licenses/LICENSE +40 -0
- mainsequence-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1983 @@
|
|
1
|
+
import copy
|
2
|
+
import datetime
|
3
|
+
from multiprocessing.managers import BaseManager
|
4
|
+
|
5
|
+
import pytz
|
6
|
+
import requests
|
7
|
+
from functools import wraps
|
8
|
+
import pandas as pd
|
9
|
+
from typing import Union,Literal
|
10
|
+
from types import SimpleNamespace
|
11
|
+
import requests
|
12
|
+
import os
|
13
|
+
import json
|
14
|
+
import time
|
15
|
+
|
16
|
+
from enum import IntEnum, Enum
|
17
|
+
from decimal import Decimal
|
18
|
+
from mainsequence.client import LocalTimeSerie
|
19
|
+
|
20
|
+
from .base import BasePydanticModel, BaseObjectOrm, MARKETS_CONSTANTS as CONSTANTS, TDAG_ENDPOINT, API_ENDPOINT, HtmlSaveException
|
21
|
+
from .utils import AuthLoaders, make_request, DoesNotExist, request_to_datetime, DATE_FORMAT
|
22
|
+
from typing import List, Optional, Dict, Any, Tuple
|
23
|
+
from pydantic import BaseModel, Field, validator,root_validator,constr,model_validator
|
24
|
+
|
25
|
+
from mainsequence.logconf import logger
|
26
|
+
from urllib.parse import urlsplit, urlunsplit, parse_qsl, urlencode
|
27
|
+
|
28
|
+
|
29
|
+
|
30
|
+
CRYPTO_EXCHANGE_CODE = ["abts","acxi","alcn","bbit","bbox","bbsp","bcex","bequ","bfly","bfnx","bfrx","bgon",
|
31
|
+
"binc","bitc","bitz","bjex","bl3p","blc2","blcr","bnbd","bnce","bndx","bnf8","bnus",
|
32
|
+
"bopt","bpnd","bt38","btba","btbu","btby","btca","btcb","btcc","bthb","btma","btmx",
|
33
|
+
"btrk","btrx","btsh","btso","bull","bxth","bybt","cbse","ccck","ccex","cexi","cflr",
|
34
|
+
"cflx","cnex","cngg","cnhd","cnmt","cone","crco","crfl","crtw","crv2","cucy","curv",
|
35
|
+
"delt","drbt","dydx","eris","ethx","etrx","exxa","ftxu","ftxx","gacn","gate","gmni",
|
36
|
+
"hbdm","hitb","huob","inch","indr","itbi","kcon","korb","krkn","lclb","lgom","lmax",
|
37
|
+
"merc","mexc","mtgx","ngcs","nova","nvdx","okcn","okex","oslx","pksp","polo","qsp2",
|
38
|
+
"qsp3","quon","sghd","stmp","sush"]
|
39
|
+
|
40
|
+
COMPOSITE_TO_ISO = {
|
41
|
+
'AR': 'XBUE', 'AU': 'XASX', 'BZ': 'BVMF', 'CN': 'XTSE', 'CB': 'XBOG',
|
42
|
+
'CH': 'XSHG', 'CI': 'XSGO', 'CP': 'XPRA', 'DC': 'XCSE', 'FH': 'XHEL',
|
43
|
+
'FP': 'XPAR', 'GA': 'ASEX', 'GR': 'XFRA', 'HK': 'XHKG', 'IE': 'XDUB',
|
44
|
+
'IM': 'XMIL', 'IN': 'XBOM', 'IT': 'XTAE', 'JP': 'XTKS', 'KS': 'XKRX',
|
45
|
+
'KZ': 'AIXK', 'LN': 'XLON', 'MM': 'XMEX', 'MK': 'XKLS', 'NA': 'XAMS',
|
46
|
+
'PL': 'XLIS', 'PM': 'XPHS', 'PW': 'XWAR', 'RO': 'XBSE', 'SA': 'XSAU',
|
47
|
+
'SM': 'XMAD', 'SS': 'XSTO', 'SW': 'XSWX', 'TH': 'XBKK', 'TI': 'XIST',
|
48
|
+
'TT': 'XTAI', 'US': 'XNYS', 'AT': 'XWBO', 'BB': 'XBRU',
|
49
|
+
}
|
50
|
+
def validator_for_string(value):
|
51
|
+
if isinstance(value, str):
|
52
|
+
# Parse the string to a datetime object
|
53
|
+
try:
|
54
|
+
return datetime.datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
|
55
|
+
except ValueError:
|
56
|
+
raise ValueError(f"Invalid datetime format: {value}. Expected format is 'YYYY-MM-DDTHH:MM:SSZ'.")
|
57
|
+
|
58
|
+
|
59
|
+
def get_model_class(model_class: str):
|
60
|
+
"""
|
61
|
+
Reverse look from model class by name
|
62
|
+
"""
|
63
|
+
MODEL_CLASS_MAP = {
|
64
|
+
|
65
|
+
"Asset": Asset,
|
66
|
+
"AssetCurrencyPair": AssetCurrencyPair,
|
67
|
+
"AssetFutureUSDM": AssetFutureUSDM,
|
68
|
+
"PortfolioIndexAsset": PortfolioIndexAsset,
|
69
|
+
"Calendar": Calendar,
|
70
|
+
"ExecutionVenue": ExecutionVenue,
|
71
|
+
"PortfolioGroup": PortfolioGroup,
|
72
|
+
}
|
73
|
+
return MODEL_CLASS_MAP[model_class]
|
74
|
+
|
75
|
+
|
76
|
+
def create_from_serializer_with_class(asset_list: List[dict]):
|
77
|
+
new_list = []
|
78
|
+
for a in asset_list:
|
79
|
+
AssetClass = get_model_class(a["AssetClass"])
|
80
|
+
a.pop("AssetClass")
|
81
|
+
new_list.append(AssetClass(**a))
|
82
|
+
return new_list
|
83
|
+
|
84
|
+
|
85
|
+
def resolve_asset(asset_dict:dict):
|
86
|
+
asset=create_from_serializer_with_class([asset_dict])[0]
|
87
|
+
return asset
|
88
|
+
|
89
|
+
|
90
|
+
|
91
|
+
|
92
|
+
class Calendar(BaseObjectOrm,BasePydanticModel):
|
93
|
+
id: Optional[int] = None
|
94
|
+
name: str
|
95
|
+
calendar_dates:Optional[dict]=None
|
96
|
+
|
97
|
+
def __str__(self):
|
98
|
+
return self.name
|
99
|
+
|
100
|
+
def __repr__(self) -> str:
|
101
|
+
return self.name
|
102
|
+
|
103
|
+
class Organization(BaseModel):
|
104
|
+
id: int
|
105
|
+
uid: str
|
106
|
+
name: str
|
107
|
+
url: Optional[str] # URL can be None
|
108
|
+
|
109
|
+
class Group(BaseModel):
|
110
|
+
id: int
|
111
|
+
name: str
|
112
|
+
permissions: List[Any] # Adjust the type for permissions as needed
|
113
|
+
|
114
|
+
class User(BaseObjectOrm,BasePydanticModel):
|
115
|
+
|
116
|
+
first_name: str
|
117
|
+
last_name: str
|
118
|
+
is_active:bool
|
119
|
+
date_joined: datetime.datetime
|
120
|
+
role: str
|
121
|
+
username: str
|
122
|
+
email: str
|
123
|
+
last_login: datetime.datetime
|
124
|
+
api_request_limit: int
|
125
|
+
mfa_enabled: bool
|
126
|
+
organization: Organization
|
127
|
+
plan: Optional[Any] # Use a specific model if plan details are available
|
128
|
+
groups: List[Group]
|
129
|
+
user_permissions: List[Any] # Adjust as necessary for permission structure
|
130
|
+
phone_number:Optional[str]=None
|
131
|
+
|
132
|
+
@classmethod
|
133
|
+
def get_object_url(cls):
|
134
|
+
# TODO should be also orm/api
|
135
|
+
url = f"{cls.ROOT_URL.replace('orm/api', 'user/api')}/{cls.END_POINTS[cls.class_name()]}"
|
136
|
+
return url
|
137
|
+
|
138
|
+
@classmethod
|
139
|
+
def get_authenticated_user_details(cls):
|
140
|
+
url = f"{cls.get_object_url()}/get_user_details/"
|
141
|
+
r = make_request(s=cls.build_session(), loaders=cls.LOADERS, r_type="GET", url=url,)
|
142
|
+
if r.status_code not in [200, 201]:
|
143
|
+
raise Exception(f" {r.text()}")
|
144
|
+
|
145
|
+
return cls(**r.json())
|
146
|
+
|
147
|
+
|
148
|
+
class AssetSnapshot(BaseObjectOrm, BasePydanticModel):
|
149
|
+
id: Optional[int] = None
|
150
|
+
asset: Union["AssetMixin", int]
|
151
|
+
|
152
|
+
# Validity window
|
153
|
+
effective_from: datetime.datetime = Field(
|
154
|
+
|
155
|
+
description="Date at which this snapshot became effective"
|
156
|
+
)
|
157
|
+
effective_to: Optional[datetime.datetime] = Field(
|
158
|
+
None,
|
159
|
+
description="Date at which this snapshot was superseded (null if current)"
|
160
|
+
)
|
161
|
+
|
162
|
+
# Mutable fields
|
163
|
+
name: constr(max_length=255) = Field(
|
164
|
+
..., description="Security name as recorded in the FIGI database"
|
165
|
+
)
|
166
|
+
ticker: Optional[constr(max_length=50)] = Field(
|
167
|
+
None,
|
168
|
+
description="FIGI ticker field (often shorter symbol used by OpenFIGI)"
|
169
|
+
)
|
170
|
+
exchange_code: Optional[constr(max_length=50)] = Field(
|
171
|
+
None,
|
172
|
+
description="Exchange/market MIC code (e.g. XNYS, XNAS) or composite code"
|
173
|
+
)
|
174
|
+
asset_ticker_group_id: Optional[constr(max_length=12)] = Field(
|
175
|
+
None,
|
176
|
+
description="Highest aggregation level for share class grouping"
|
177
|
+
)
|
178
|
+
venue_specific_properties: Optional[Dict[str, Any]] = Field(
|
179
|
+
None,
|
180
|
+
description="Exchange-specific metadata"
|
181
|
+
)
|
182
|
+
def _set_query_param_on_url(url: str, key: str, value) -> str:
|
183
|
+
"""
|
184
|
+
Add or replace a query parameter in a URL without disturbing others (e.g., offset/page).
|
185
|
+
Works with absolute or relative URLs.
|
186
|
+
"""
|
187
|
+
parts = urlsplit(url)
|
188
|
+
q = dict(parse_qsl(parts.query, keep_blank_values=True))
|
189
|
+
q[key] = str(value)
|
190
|
+
new_query = urlencode(q, doseq=True)
|
191
|
+
return urlunsplit((parts.scheme, parts.netloc, parts.path, new_query, parts.fragment))
|
192
|
+
|
193
|
+
class AssetPricingDetail(BasePydanticModel):
|
194
|
+
instrument_dump:dict
|
195
|
+
pricing_details_date:datetime.datetime
|
196
|
+
|
197
|
+
class AssetMixin(BaseObjectOrm, BasePydanticModel):
|
198
|
+
id: Optional[int] = None
|
199
|
+
|
200
|
+
# Immutable identifiers
|
201
|
+
unique_identifier: constr(max_length=255)
|
202
|
+
figi: Optional[constr(max_length=12)] = Field(
|
203
|
+
None,
|
204
|
+
description="FIGI identifier (unique to a specific instrument on a particular market/exchange)"
|
205
|
+
)
|
206
|
+
composite: Optional[constr(max_length=12)] = Field(
|
207
|
+
None,
|
208
|
+
description="Composite FIGI identifier (aggregates multiple local listings within one market)"
|
209
|
+
)
|
210
|
+
share_class: Optional[constr(max_length=12)] = Field(
|
211
|
+
None,
|
212
|
+
description="Share class designation (e.g. 'Common', 'Class A', 'Preferred') as per FIGI"
|
213
|
+
)
|
214
|
+
|
215
|
+
isin: Optional[constr(max_length=12)] = Field(
|
216
|
+
None,
|
217
|
+
description="International Securities Identification Number"
|
218
|
+
)
|
219
|
+
|
220
|
+
security_type: Optional[constr(max_length=50)] = Field(
|
221
|
+
None,
|
222
|
+
description="Instrument type (e.g. 'CS' for common stock, 'PS' for preferred)"
|
223
|
+
)
|
224
|
+
security_type_2: Optional[constr(max_length=50)] = Field(
|
225
|
+
None,
|
226
|
+
description="OpenFIGI Security Type 2"
|
227
|
+
)
|
228
|
+
security_market_sector: Optional[constr(max_length=50)] = Field(
|
229
|
+
None,
|
230
|
+
description="High-level sector classification (e.g. 'Equity', 'Corporate Bond') as per FIGI"
|
231
|
+
)
|
232
|
+
|
233
|
+
|
234
|
+
is_custom_by_organization: bool = Field(
|
235
|
+
default=False,
|
236
|
+
description="Flag indicating if this asset was custom-created by the organization"
|
237
|
+
)
|
238
|
+
|
239
|
+
# Snapshot relationship
|
240
|
+
current_snapshot: Optional[AssetSnapshot] = Field(
|
241
|
+
None,
|
242
|
+
description="Latest active snapshot (effective_to is null)"
|
243
|
+
)
|
244
|
+
current_pricing_detail:Optional[AssetPricingDetail]=Field(
|
245
|
+
None,
|
246
|
+
description="details for instrument pricing"
|
247
|
+
)
|
248
|
+
|
249
|
+
def __repr__(self) -> str:
|
250
|
+
return f"{self.class_name()}: {self.unique_identifier}"
|
251
|
+
|
252
|
+
@model_validator(mode="after")
|
253
|
+
def _inject_main_sequence_asset_id(self) -> "AssetMixin":
|
254
|
+
"""
|
255
|
+
After model construction, if instrument_pricing_detail is present,
|
256
|
+
ensure it contains {'main_sequence_asset_id': self.id}.
|
257
|
+
"""
|
258
|
+
ipd = self.current_pricing_detail
|
259
|
+
if ipd is not None:
|
260
|
+
# Be tolerant: coerce to a dict if necessary.
|
261
|
+
try:
|
262
|
+
ipd.instrument_dump["instrument"]['main_sequence_asset_id'] = self.id
|
263
|
+
except Exception as e:
|
264
|
+
self.clear_asset_pricing_details()
|
265
|
+
raise e
|
266
|
+
self.current_pricing_detail = ipd
|
267
|
+
return self
|
268
|
+
|
269
|
+
@property
|
270
|
+
def ticker(self):
|
271
|
+
return self.current_snapshot.ticker
|
272
|
+
|
273
|
+
@property
|
274
|
+
def name(self):
|
275
|
+
return self.current_snapshot.name
|
276
|
+
|
277
|
+
@property
|
278
|
+
def exchange_code(self):
|
279
|
+
return self.current_snapshot.exchange_code
|
280
|
+
|
281
|
+
@property
|
282
|
+
def asset_ticker_group_id(self):
|
283
|
+
return self.current_snapshot.asset_ticker_group_id
|
284
|
+
|
285
|
+
@classmethod
|
286
|
+
def _translate_query_params(cls, query_params: Dict[str, Any]):
|
287
|
+
translation_map = {
|
288
|
+
"ticker": "current_snapshot__ticker",
|
289
|
+
"name": "current_snapshot__name",
|
290
|
+
"exchange_code": "current_snapshot__exchange_code",
|
291
|
+
"asset_ticker_group_id": "current_snapshot__asset_ticker_group_id"
|
292
|
+
}
|
293
|
+
|
294
|
+
translated_params = {}
|
295
|
+
for key, value in query_params.items():
|
296
|
+
# django search uses '__' for nested objects
|
297
|
+
full_query = key.split("__")
|
298
|
+
asset_query = full_query[0]
|
299
|
+
|
300
|
+
if asset_query in translation_map:
|
301
|
+
# Reconstruct the key using the translated base and the original suffix
|
302
|
+
translated_base = translation_map[asset_query]
|
303
|
+
# Join the translated base with the rest of the query parts
|
304
|
+
new_key_parts = [translated_base] + full_query[1:]
|
305
|
+
new_key = "__".join(new_key_parts)
|
306
|
+
translated_params[new_key] = value
|
307
|
+
else:
|
308
|
+
# If no translation is needed, use the original key
|
309
|
+
translated_params[key] = value
|
310
|
+
|
311
|
+
return translated_params
|
312
|
+
|
313
|
+
@classmethod
|
314
|
+
def query(cls, timeout=None, per_page: int = None, **kwargs):
|
315
|
+
"""
|
316
|
+
POST-based filtering for large requests that don't fit in the URL.
|
317
|
+
|
318
|
+
- per_page: desired number of items per page (client-side).
|
319
|
+
|
320
|
+
|
321
|
+
Follows DRF pagination and accumulates ALL pages. Returns raw dict items.
|
322
|
+
"""
|
323
|
+
base_url = cls.get_object_url() # e.g. "https://api.example.com/assets"
|
324
|
+
body = cls._parse_parameters_filter(kwargs) # same filters as GET
|
325
|
+
accumulated = []
|
326
|
+
|
327
|
+
# Start at the collection action
|
328
|
+
next_url = f"{base_url}/query/"
|
329
|
+
|
330
|
+
# Choose which page-size param(s) to set
|
331
|
+
# If not specified, we try the common ones in order.
|
332
|
+
page_size_params = ["limit", "page_size"]
|
333
|
+
|
334
|
+
only_fields = "fields" in body # your existing flag
|
335
|
+
|
336
|
+
while next_url:
|
337
|
+
# Inject per_page into the URL (NOT the JSON body), preserving offset/page/cursor.
|
338
|
+
if per_page:
|
339
|
+
for pname in page_size_params:
|
340
|
+
if pname: # skip None if passed
|
341
|
+
next_url = _set_query_param_on_url(next_url, pname, per_page)
|
342
|
+
|
343
|
+
r = make_request(
|
344
|
+
s=cls.build_session(),
|
345
|
+
loaders=cls.LOADERS,
|
346
|
+
r_type="POST",
|
347
|
+
url=next_url,
|
348
|
+
payload={"json": body}, # filters stay in body
|
349
|
+
time_out=timeout,
|
350
|
+
)
|
351
|
+
|
352
|
+
if r.status_code != 200:
|
353
|
+
if r.status_code == 401:
|
354
|
+
raise Exception("Unauthorized. Please add credentials to environment.")
|
355
|
+
elif r.status_code == 500:
|
356
|
+
raise Exception("Server Error.")
|
357
|
+
elif r.status_code == 404:
|
358
|
+
raise DoesNotExist("Not Found.")
|
359
|
+
elif r.status_code == 405:
|
360
|
+
raise Exception("Method Not Allowed. Ensure the 'query' endpoint accepts POST.")
|
361
|
+
else:
|
362
|
+
raise Exception(f"{r.status_code} - {r.text}")
|
363
|
+
|
364
|
+
data = r.json()
|
365
|
+
next_url = data.get("next") # DRF-provided next URL (may be relative or absolute)
|
366
|
+
|
367
|
+
# Collect results
|
368
|
+
for item in data.get("results", []):
|
369
|
+
if only_fields:
|
370
|
+
accumulated.append(item)
|
371
|
+
else:
|
372
|
+
item["orm_class"] = cls.__name__
|
373
|
+
try:
|
374
|
+
|
375
|
+
accumulated.append(cls(**item) if issubclass(cls, BasePydanticModel) else item)
|
376
|
+
except Exception as e:
|
377
|
+
print(item)
|
378
|
+
print(cls)
|
379
|
+
print(cls(**item))
|
380
|
+
import traceback
|
381
|
+
traceback.print_exc()
|
382
|
+
raise e
|
383
|
+
|
384
|
+
return accumulated
|
385
|
+
|
386
|
+
@classmethod
|
387
|
+
def filter(cls, *args, **kwargs):
|
388
|
+
"""
|
389
|
+
Overrides the default filter to remap 'ticker' and 'name' lookup keys
|
390
|
+
to the corresponding fields on the related current_snapshot.
|
391
|
+
"""
|
392
|
+
transformed_kwargs = cls._translate_query_params(kwargs)
|
393
|
+
return super().filter(*args, **transformed_kwargs)
|
394
|
+
|
395
|
+
@classmethod
|
396
|
+
def get(cls, *args, **kwargs):
|
397
|
+
"""
|
398
|
+
Overrides the default get to remap lookup keys
|
399
|
+
to the corresponding fields on the related current_snapshot.
|
400
|
+
"""
|
401
|
+
transformed_kwargs = cls._translate_query_params(kwargs)
|
402
|
+
return super().get(*args, **transformed_kwargs)
|
403
|
+
|
404
|
+
|
405
|
+
|
406
|
+
|
407
|
+
def get_calendar(self):
|
408
|
+
if self.current_snapshot.exchange_code in COMPOSITE_TO_ISO.keys():
|
409
|
+
return Calendar(name=COMPOSITE_TO_ISO[self.current_snapshot.exchange_code])
|
410
|
+
elif self.security_type == CONSTANTS.FIGI_SECURITY_TYPE_CRYPTO:
|
411
|
+
return Calendar(name="24/7")
|
412
|
+
elif self.security_type_2 == CONSTANTS.FIGI_SECURITY_TYPE_2_CRYPTO:
|
413
|
+
return Calendar(name="24/7")
|
414
|
+
elif self.security_type_2 == CONSTANTS.FIGI_SECURITY_TYPE_2_PERPETUAL:
|
415
|
+
return Calendar(name="24/7")
|
416
|
+
else:
|
417
|
+
return Calendar(name="XNYS")
|
418
|
+
|
419
|
+
|
420
|
+
def pretty_print(self) -> None:
|
421
|
+
"""
|
422
|
+
Print all asset properties in a neat, aligned table.
|
423
|
+
"""
|
424
|
+
# Gather (field_name, value) pairs
|
425
|
+
rows = []
|
426
|
+
for field_name in self.__fields__:
|
427
|
+
value = getattr(self, field_name)
|
428
|
+
rows.append((field_name, value))
|
429
|
+
|
430
|
+
# Compute column widths
|
431
|
+
max_name_len = max(len(name) for name, _ in rows)
|
432
|
+
max_val_len = max(len(str(val)) for _, val in rows)
|
433
|
+
|
434
|
+
# Header
|
435
|
+
header = f"{'Property':<{max_name_len}} | {'Value':<{max_val_len}}"
|
436
|
+
separator = "-" * len(header)
|
437
|
+
print(header)
|
438
|
+
print(separator)
|
439
|
+
|
440
|
+
# Rows
|
441
|
+
for name, val in rows:
|
442
|
+
print(f"{name:<{max_name_len}} | {val}")
|
443
|
+
|
444
|
+
@classmethod
|
445
|
+
def register_asset_from_figi(cls,figi:str,timeout=None):
|
446
|
+
base_url = cls.get_object_url()+"/register_asset_from_figi/"
|
447
|
+
payload = {"json": {"figi":figi} }
|
448
|
+
s = cls.build_session()
|
449
|
+
|
450
|
+
r = make_request(
|
451
|
+
s=s,
|
452
|
+
loaders=cls.LOADERS,
|
453
|
+
r_type="POST",
|
454
|
+
url=base_url,
|
455
|
+
payload=payload,
|
456
|
+
time_out=timeout
|
457
|
+
)
|
458
|
+
|
459
|
+
if r.status_code not in [200,201]:
|
460
|
+
raise Exception(r.text)
|
461
|
+
|
462
|
+
return cls(**r.json())
|
463
|
+
@classmethod
|
464
|
+
def filter_with_asset_class(
|
465
|
+
cls,
|
466
|
+
timeout=None,
|
467
|
+
include_relationship_details_depth=None,
|
468
|
+
*args,
|
469
|
+
**kwargs
|
470
|
+
):
|
471
|
+
"""
|
472
|
+
Filters assets and returns instances with their correct asset class,
|
473
|
+
"""
|
474
|
+
|
475
|
+
from .models_helpers import create_from_serializer_with_class
|
476
|
+
|
477
|
+
base_url = cls.get_object_url()
|
478
|
+
# Convert `kwargs` to query parameters
|
479
|
+
# kwargs["include_relationship_details_depth"]=include_details
|
480
|
+
transformed_kwargs = cls._translate_query_params(kwargs)
|
481
|
+
params = cls._parse_parameters_filter(parameters=transformed_kwargs)
|
482
|
+
|
483
|
+
# We'll call the custom action endpoint
|
484
|
+
url = f"{base_url}/list_with_asset_class/"
|
485
|
+
all_results = []
|
486
|
+
|
487
|
+
# Build a single requests session
|
488
|
+
s = cls.build_session()
|
489
|
+
|
490
|
+
while url:
|
491
|
+
# Make the request to the current page URL
|
492
|
+
request_kwargs = {"params": params} if params else {}
|
493
|
+
r = make_request(
|
494
|
+
s=s,
|
495
|
+
loaders=cls.LOADERS,
|
496
|
+
r_type="GET",
|
497
|
+
url=url,
|
498
|
+
payload=request_kwargs,
|
499
|
+
time_out=timeout
|
500
|
+
)
|
501
|
+
|
502
|
+
if r.status_code != 200:
|
503
|
+
raise Exception(f"Error getting assets (status code: {r.status_code})")
|
504
|
+
|
505
|
+
data = r.json()
|
506
|
+
|
507
|
+
# Check if it's a DRF paginated response by looking for "results"
|
508
|
+
if isinstance(data, dict) and "results" in data:
|
509
|
+
# Paginated response
|
510
|
+
results = data["results"]
|
511
|
+
next_url = data["next"]
|
512
|
+
else:
|
513
|
+
# Either not paginated or no "results" key
|
514
|
+
# It's possible your endpoint returns a plain list or other structure
|
515
|
+
# Adjust accordingly if needed
|
516
|
+
results = data
|
517
|
+
next_url = None
|
518
|
+
|
519
|
+
# Accumulate the results
|
520
|
+
all_results.extend(results)
|
521
|
+
|
522
|
+
# Prepare for the next loop iteration
|
523
|
+
url = next_url
|
524
|
+
# After the first request, DRF's `next` link is a full URL that already includes
|
525
|
+
# appropriate query params, so we set `params=None` to avoid conflicts.
|
526
|
+
params = None
|
527
|
+
|
528
|
+
# Convert the accumulated raw data into asset instances with correct classes
|
529
|
+
return create_from_serializer_with_class(all_results)
|
530
|
+
|
531
|
+
|
532
|
+
def clear_asset_pricing_details(self,timeout=None):
|
533
|
+
base_url = self.get_object_url() # e.g., https://api.example.com/assets
|
534
|
+
url = f"{base_url}/{self.id}/clear-asset-pricing-details/"
|
535
|
+
r = make_request(
|
536
|
+
s=self.build_session(),
|
537
|
+
loaders=self.LOADERS,
|
538
|
+
r_type="PATCH",
|
539
|
+
url=url,
|
540
|
+
time_out=timeout,
|
541
|
+
)
|
542
|
+
|
543
|
+
if r.status_code not in (200, 201):
|
544
|
+
raise Exception(r.text)
|
545
|
+
|
546
|
+
|
547
|
+
def add_instrument_pricing_details_from_ms_instrument(self,instrument,
|
548
|
+
pricing_details_date:datetime.datetime,
|
549
|
+
timeout=None):
|
550
|
+
|
551
|
+
|
552
|
+
data=instrument.serialize_for_backend()
|
553
|
+
data=json.loads(data)
|
554
|
+
data["instrument"]["main_sequence_asset_id"]=self.id
|
555
|
+
data["pricing_details_date"]=pricing_details_date.timestamp()
|
556
|
+
|
557
|
+
return self.add_instrument_pricing_details(instrument_pricing_details=data,timeout=timeout)
|
558
|
+
|
559
|
+
|
560
|
+
def add_instrument_pricing_details(
|
561
|
+
self,
|
562
|
+
instrument_pricing_details: Dict[str, Any],
|
563
|
+
timeout: Optional[float] = None,
|
564
|
+
) -> Dict[str, Any]:
|
565
|
+
"""
|
566
|
+
POST /assets/{self.id}/set-asset-pricing-detail/
|
567
|
+
|
568
|
+
Sends the pricing details as a RAW JSON object (no wrapper keys).
|
569
|
+
The backend action treats the entire body as the pricing dump and
|
570
|
+
associates it to (asset, organization_owner).
|
571
|
+
|
572
|
+
Args:
|
573
|
+
instrument_pricing_details: JSON object to store.
|
574
|
+
timeout: optional request timeout (seconds).
|
575
|
+
|
576
|
+
Returns:
|
577
|
+
The server's JSON response (dict).
|
578
|
+
"""
|
579
|
+
if not getattr(self, "id", None):
|
580
|
+
raise ValueError("This object has no 'id'; cannot POST to detail action.")
|
581
|
+
if not isinstance(instrument_pricing_details, dict):
|
582
|
+
raise ValueError("instrument_pricing_details must be a JSON object (dict).")
|
583
|
+
|
584
|
+
base_url = self.get_object_url() # e.g., https://api.example.com/assets
|
585
|
+
url = f"{base_url}/{self.id}/set-asset-pricing-detail/"
|
586
|
+
|
587
|
+
r = make_request(
|
588
|
+
s=self.build_session(),
|
589
|
+
loaders=self.LOADERS,
|
590
|
+
r_type="POST",
|
591
|
+
url=url,
|
592
|
+
payload={"json": instrument_pricing_details}, # raw body (no 'dump', no 'organization_id')
|
593
|
+
time_out=timeout,
|
594
|
+
)
|
595
|
+
|
596
|
+
if r.status_code not in (200, 201):
|
597
|
+
if r.status_code == 401:
|
598
|
+
raise Exception("Unauthorized. Please add credentials to environment.")
|
599
|
+
elif r.status_code == 404:
|
600
|
+
raise DoesNotExist("Asset not found.")
|
601
|
+
elif r.status_code == 405:
|
602
|
+
raise Exception("Method Not Allowed. Ensure the custom action is enabled.")
|
603
|
+
elif r.status_code == 413:
|
604
|
+
raise Exception("Payload Too Large. Consider compressing or splitting.")
|
605
|
+
elif r.status_code >= 500:
|
606
|
+
raise Exception("Server Error.")
|
607
|
+
else:
|
608
|
+
raise Exception(f"{r.status_code} - {r.text}")
|
609
|
+
|
610
|
+
data = r.json()
|
611
|
+
|
612
|
+
data.get("instrument_pricing_detail")
|
613
|
+
when=data["pricing_details_date"]
|
614
|
+
self.current_pricing_detail = AssetPricingDetail(instrument_dump=data["instrument_dump"],
|
615
|
+
pricing_details_date=datetime.datetime.utcfromtimestamp(when).replace(tzinfo=pytz.utc)
|
616
|
+
)
|
617
|
+
|
618
|
+
|
619
|
+
|
620
|
+
|
621
|
+
|
622
|
+
|
623
|
+
class AssetCategory(BaseObjectOrm, BasePydanticModel):
|
624
|
+
id: int
|
625
|
+
unique_identifier: str
|
626
|
+
display_name: str
|
627
|
+
source: str
|
628
|
+
assets: List[Union[int,"Asset"]]
|
629
|
+
organization_owner_uid: str
|
630
|
+
description: Optional[str]=None
|
631
|
+
|
632
|
+
def __repr__(self):
|
633
|
+
return f"{self.display_name} source: {self.source}, {len(self.assets)} assets"
|
634
|
+
|
635
|
+
def get_assets(self):
|
636
|
+
if not self.assets:
|
637
|
+
raise ValueError(f"No assets in Asset Category {self.display_name}")
|
638
|
+
return Asset.filter(id__in=self.assets)
|
639
|
+
|
640
|
+
def update_assets(self, asset_ids: List[int]):
|
641
|
+
self.remove_assets(self.assets)
|
642
|
+
self.append_assets(asset_ids)
|
643
|
+
|
644
|
+
def append_assets(self, asset_ids: List[int]) -> "AssetCategory":
|
645
|
+
"""
|
646
|
+
Append the given asset IDs to this category.
|
647
|
+
Expects a payload: {"assets": [<asset_id1>, <asset_id2>, ...]}
|
648
|
+
"""
|
649
|
+
url = f"{self.get_object_url()}/{self.id}/append-assets/"
|
650
|
+
payload = {"assets": asset_ids}
|
651
|
+
r = make_request(
|
652
|
+
s=self.build_session(),
|
653
|
+
loaders=self.LOADERS,
|
654
|
+
r_type="POST",
|
655
|
+
url=url,
|
656
|
+
payload={"json":payload}
|
657
|
+
)
|
658
|
+
if r.status_code not in [200, 201]:
|
659
|
+
raise Exception(f"Error appending assets: {r.text()}")
|
660
|
+
# Return a new instance of AssetCategory built from the response JSON.
|
661
|
+
return AssetCategory(**r.json())
|
662
|
+
|
663
|
+
def remove_assets(self, asset_ids:List[int]) -> "AssetCategory":
|
664
|
+
"""
|
665
|
+
Remove the given asset IDs from this category.
|
666
|
+
Expects a payload: {"assets": [<asset_id1>, <asset_id2>, ...]}
|
667
|
+
"""
|
668
|
+
url = f"{self.get_object_url()}/{self.id}/remove-assets/"
|
669
|
+
payload = {"assets": asset_ids}
|
670
|
+
r = make_request(
|
671
|
+
s=self.build_session(),
|
672
|
+
loaders=self.LOADERS,
|
673
|
+
r_type="POST",
|
674
|
+
url=url,
|
675
|
+
payload={"json": payload}
|
676
|
+
)
|
677
|
+
if r.status_code not in [200, 201]:
|
678
|
+
raise Exception(f"Error removing assets: {r.text()}")
|
679
|
+
# Return a new instance of AssetCategory built from the response JSON.
|
680
|
+
return AssetCategory(**r.json())
|
681
|
+
|
682
|
+
@classmethod
|
683
|
+
def get_or_create(cls, *args, **kwargs):
|
684
|
+
url = f"{cls.get_object_url()}/get-or-create/"
|
685
|
+
payload = {"json": kwargs}
|
686
|
+
r = make_request(
|
687
|
+
s=cls.build_session(),
|
688
|
+
loaders=cls.LOADERS,
|
689
|
+
r_type="POST",
|
690
|
+
url=url,
|
691
|
+
payload=payload
|
692
|
+
)
|
693
|
+
if r.status_code not in [200, 201]:
|
694
|
+
raise Exception(f"Error appending creating: {r.text}")
|
695
|
+
# Return a new instance of AssetCategory built from the response JSON.
|
696
|
+
return AssetCategory(**r.json())
|
697
|
+
|
698
|
+
|
699
|
+
|
700
|
+
class TranslationError(RuntimeError):
|
701
|
+
"""Raised when no translation rule (or more than one) matches an asset."""
|
702
|
+
|
703
|
+
class AssetFilter(BaseModel):
|
704
|
+
security_type: Optional[str] = None
|
705
|
+
security_market_sector: Optional[str] = None
|
706
|
+
|
707
|
+
def filter_triggered(self, asset: "Asset") -> bool:
|
708
|
+
if self.security_type and asset.security_type != self.security_type:
|
709
|
+
return False
|
710
|
+
if self.security_market_sector and asset.security_market_sector != self.security_market_sector:
|
711
|
+
return False
|
712
|
+
return True
|
713
|
+
|
714
|
+
class AssetTranslationRule(BaseModel):
|
715
|
+
asset_filter: AssetFilter
|
716
|
+
markets_time_serie_unique_identifier: str
|
717
|
+
target_exchange_code: Optional[str] = None
|
718
|
+
|
719
|
+
def is_asset_in_rule(self, asset: "Asset") -> bool:
|
720
|
+
return self.asset_filter.filter_triggered(asset)
|
721
|
+
|
722
|
+
class AssetTranslationTable(BaseObjectOrm, BasePydanticModel):
|
723
|
+
"""
|
724
|
+
Mirrors the Django model 'AssetTranslationTableModel' in the backend.
|
725
|
+
"""
|
726
|
+
id: int = None
|
727
|
+
unique_identifier: str
|
728
|
+
rules: List[AssetTranslationRule] = Field(default_factory=list)
|
729
|
+
|
730
|
+
def evaluate_asset(self, asset):
|
731
|
+
for rule in self.rules:
|
732
|
+
if rule.is_asset_in_rule(asset):
|
733
|
+
return {
|
734
|
+
"markets_time_serie_unique_identifier": rule.markets_time_serie_unique_identifier,
|
735
|
+
"exchange_code": rule.target_exchange_code,
|
736
|
+
}
|
737
|
+
|
738
|
+
raise TranslationError(f"No rules for asset {asset} found")
|
739
|
+
|
740
|
+
def add_rules(self, rules: List[AssetTranslationRule], open_for_everyone=False) -> None:
|
741
|
+
"""
|
742
|
+
Add each rule to the translation table by calling the backend's 'add_rule' endpoint.
|
743
|
+
Prevents local duplication. If the server also rejects a duplicate,
|
744
|
+
it returns an error which we silently ignore.
|
745
|
+
"""
|
746
|
+
base_url = self.get_object_url()
|
747
|
+
for new_rule in rules:
|
748
|
+
# 1) Check for local duplicates
|
749
|
+
if any(
|
750
|
+
r.asset_filter == new_rule.asset_filter
|
751
|
+
and r.markets_time_serie_unique_identifier == new_rule.markets_time_serie_unique_identifier
|
752
|
+
and r.target_exchange_code == new_rule.target_exchange_code
|
753
|
+
for r in self.rules
|
754
|
+
):
|
755
|
+
# Already in local table, skip adding
|
756
|
+
logger.debug(f"Rule {new_rule} already present - skipping")
|
757
|
+
continue
|
758
|
+
|
759
|
+
# 2) Post to backend's "add_rule"
|
760
|
+
url = f"{base_url}/{self.id}/add_rule/"
|
761
|
+
payload = new_rule.model_dump()
|
762
|
+
if open_for_everyone:
|
763
|
+
payload["open_for_everyone"] = True
|
764
|
+
payload["asset_filter"]["open_for_everyone"] = True
|
765
|
+
|
766
|
+
r = make_request(
|
767
|
+
s=self.build_session(),
|
768
|
+
loaders=self.LOADERS,
|
769
|
+
r_type="POST",
|
770
|
+
url=url,
|
771
|
+
payload={"json": payload},
|
772
|
+
)
|
773
|
+
|
774
|
+
if r.status_code == 201:
|
775
|
+
# Successfully created on server. Append locally
|
776
|
+
self.rules.append(new_rule)
|
777
|
+
elif r.status_code not in (200, 201):
|
778
|
+
raise Exception(f"Error adding rule: {r.text}")
|
779
|
+
|
780
|
+
def remove_rules(self, rules: List[AssetTranslationRule]) -> None:
|
781
|
+
"""
|
782
|
+
Remove each rule from the translation table by calling the backend's 'remove_rule' endpoint.
|
783
|
+
Once successfully removed on the server, remove it from the local list `self.rules`.
|
784
|
+
If a rule is not found on the server, we skip silently.
|
785
|
+
"""
|
786
|
+
base_url = self.get_object_url()
|
787
|
+
for rule_to_remove in rules:
|
788
|
+
# 1) Check if we even have it locally
|
789
|
+
matching_local = [
|
790
|
+
r for r in self.rules
|
791
|
+
if r.asset_filter == rule_to_remove.asset_filter
|
792
|
+
and r.markets_time_serie_unique_identifier == rule_to_remove.markets_time_serie_unique_identifier
|
793
|
+
and r.target_exchange_code == rule_to_remove.target_exchange_code
|
794
|
+
]
|
795
|
+
if not matching_local:
|
796
|
+
# Not in local rules, skip
|
797
|
+
continue
|
798
|
+
|
799
|
+
# 2) Post to backend's "remove_rule"
|
800
|
+
url = f"{base_url}/{self.id}/remove_rule/"
|
801
|
+
payload = rule_to_remove.model_dump()
|
802
|
+
r = make_request(
|
803
|
+
s=self.build_session(),
|
804
|
+
loaders=self.LOADERS,
|
805
|
+
r_type="POST",
|
806
|
+
url=url,
|
807
|
+
payload={"json": payload},
|
808
|
+
)
|
809
|
+
|
810
|
+
if r.status_code == 200:
|
811
|
+
# Successfully removed from server => remove from local
|
812
|
+
for matched in matching_local:
|
813
|
+
self.rules.remove(matched)
|
814
|
+
elif r.status_code not in (200, 204):
|
815
|
+
raise Exception(f"Error removing rule: {r.text()}")
|
816
|
+
|
817
|
+
|
818
|
+
class Asset(AssetMixin, BaseObjectOrm):
|
819
|
+
|
820
|
+
def get_spot_reference_asset_unique_identifier(self):
|
821
|
+
return self.unique_identifier
|
822
|
+
|
823
|
+
@classmethod
|
824
|
+
def create_or_update_index_asset_from_portfolios(
|
825
|
+
cls,
|
826
|
+
reference_portfolio: int,
|
827
|
+
timeout = None
|
828
|
+
) -> "PortfolioIndexAsset":
|
829
|
+
url = f"{cls.get_object_url()}/create_or_update_index_asset_from_portfolios/"
|
830
|
+
payload = {
|
831
|
+
"json": dict(
|
832
|
+
reference_portfolio=reference_portfolio,
|
833
|
+
)
|
834
|
+
}
|
835
|
+
r = make_request(
|
836
|
+
s=cls.build_session(),
|
837
|
+
loaders=cls.LOADERS,
|
838
|
+
r_type="POST",
|
839
|
+
url=url,
|
840
|
+
payload=payload,
|
841
|
+
time_out=timeout
|
842
|
+
)
|
843
|
+
if r.status_code not in [200,201]:
|
844
|
+
raise Exception(f"{r.text}")
|
845
|
+
|
846
|
+
return PortfolioIndexAsset(**r.json())
|
847
|
+
|
848
|
+
@classmethod
|
849
|
+
def get_or_register_from_isin(cls,isin: str,exchange_code:str,timeout=None,) -> "Asset":
|
850
|
+
|
851
|
+
base_url = cls.get_object_url() + "/get_or_register_from_isin/"
|
852
|
+
payload = {"json": {"isin":isin,"exchange_code":exchange_code}}
|
853
|
+
s = cls.build_session()
|
854
|
+
|
855
|
+
r = make_request(
|
856
|
+
s=s,
|
857
|
+
loaders=cls.LOADERS,
|
858
|
+
r_type="POST",
|
859
|
+
url=base_url,
|
860
|
+
payload=payload,
|
861
|
+
time_out=timeout
|
862
|
+
)
|
863
|
+
if r.status_code not in (200, 201):
|
864
|
+
raise Exception(f"Error registering asset: {r.text}")
|
865
|
+
return cls(**r.json())
|
866
|
+
|
867
|
+
|
868
|
+
@classmethod
|
869
|
+
def get_or_register_custom_asset(cls, timeout=None, **kwargs,):
|
870
|
+
base_url = cls.get_object_url() + "/get_or_register_custom_asset/"
|
871
|
+
payload = {"json": kwargs}
|
872
|
+
s = cls.build_session()
|
873
|
+
|
874
|
+
r = make_request(
|
875
|
+
s=s,
|
876
|
+
loaders=cls.LOADERS,
|
877
|
+
r_type="POST",
|
878
|
+
url=base_url,
|
879
|
+
payload=payload,
|
880
|
+
time_out=timeout
|
881
|
+
)
|
882
|
+
if r.status_code not in (200, 201):
|
883
|
+
raise Exception(f"Error registering asset: {r.text}")
|
884
|
+
return cls(**r.json())
|
885
|
+
|
886
|
+
@classmethod
|
887
|
+
def batch_get_or_register_custom_assets(cls, assets_data: List[Dict], timeout=None) -> List["Asset"]:
|
888
|
+
"""
|
889
|
+
Calls the batch endpoint to get or register multiple custom assets.
|
890
|
+
|
891
|
+
Args:
|
892
|
+
assets_data: A list of dictionaries, where each dictionary
|
893
|
+
represents the data for one asset.
|
894
|
+
timeout: Optional request timeout in seconds.
|
895
|
+
|
896
|
+
Returns:
|
897
|
+
A list of Asset objects.
|
898
|
+
"""
|
899
|
+
base_url = cls.get_object_url() + "/batch_get_or_register_custom_assets/"
|
900
|
+
payload = {"json": assets_data}
|
901
|
+
s = cls.build_session()
|
902
|
+
|
903
|
+
r = make_request(
|
904
|
+
s=s,
|
905
|
+
loaders=cls.LOADERS,
|
906
|
+
r_type="POST",
|
907
|
+
url=base_url,
|
908
|
+
payload=payload,
|
909
|
+
time_out=timeout
|
910
|
+
)
|
911
|
+
|
912
|
+
if r.status_code != 200:
|
913
|
+
raise Exception(f"Error in batch asset registration: {r.text}")
|
914
|
+
|
915
|
+
return [cls(**data) for data in r.json()]
|
916
|
+
|
917
|
+
class PortfolioIndexAsset(Asset):
|
918
|
+
reference_portfolio : Union["Portfolio",int]
|
919
|
+
|
920
|
+
@property
|
921
|
+
def reference_portfolio_details_url(self):
|
922
|
+
return f"{TDAG_ENDPOINT}/dashboards/portfolio-detail/?target_portfolio_id={self.reference_portfolios.id}"
|
923
|
+
|
924
|
+
class AssetCurrencyPair(AssetMixin, BasePydanticModel):
|
925
|
+
base_asset: Union[AssetMixin, int]
|
926
|
+
quote_asset: Union[AssetMixin, int]
|
927
|
+
|
928
|
+
def get_spot_reference_asset_unique_identifier(self):
|
929
|
+
return self.base_asset.unique_identifier
|
930
|
+
|
931
|
+
def get_ms_share_class(self):
|
932
|
+
return self.base_asset.get_ms_share_class()
|
933
|
+
|
934
|
+
class FutureUSDMMixin(AssetMixin, BasePydanticModel):
|
935
|
+
maturity_code: str = Field(..., max_length=50)
|
936
|
+
last_trade_time: Optional[datetime.datetime] = None
|
937
|
+
currency_pair:AssetCurrencyPair
|
938
|
+
|
939
|
+
def get_spot_reference_asset_unique_identifier(self):
|
940
|
+
|
941
|
+
|
942
|
+
base_asset_symbol = self.currency_pair.base_asset.unique_identifier
|
943
|
+
if self.execution_venue_symbol == CONSTANTS.BINANCE_FUTURES_EV_SYMBOL:
|
944
|
+
# replace() will do nothing if “1000SHIB” isn’t present
|
945
|
+
return base_asset_symbol.replace("1000SHIB", "SHIB")
|
946
|
+
return base_asset_symbol
|
947
|
+
|
948
|
+
class AssetFutureUSDM(FutureUSDMMixin, BaseObjectOrm):
|
949
|
+
pass
|
950
|
+
|
951
|
+
|
952
|
+
class AccountPortfolioScheduledRebalance(BaseObjectOrm, BasePydanticModel):
|
953
|
+
id: int
|
954
|
+
target_account_portfolio: Optional[dict] = None
|
955
|
+
scheduled_time: str = None
|
956
|
+
received_in_execution_engine : bool = False
|
957
|
+
executed : bool = False
|
958
|
+
execution_start: Optional[str] = None
|
959
|
+
execution_end: Optional[datetime.datetime] = None
|
960
|
+
execution_message: Optional[str] = None
|
961
|
+
|
962
|
+
|
963
|
+
|
964
|
+
class AccountExecutionConfiguration(BasePydanticModel):
|
965
|
+
related_account: int # Assuming related_account is represented by its ID
|
966
|
+
rebalance_tolerance_percent: float = Field(0.02, ge=0)
|
967
|
+
minimum_notional_for_a_rebalance: float = Field(15.00, ge=0)
|
968
|
+
max_latency_in_cdc_seconds: float = Field(60.00, ge=0)
|
969
|
+
force_market_order_on_execution_remaining_balances: bool = Field(False)
|
970
|
+
orders_execution_configuration: Dict[str, Any]
|
971
|
+
cooldown_configuration: Dict[str, Any]
|
972
|
+
|
973
|
+
class AccountPortfolioPosition(BasePydanticModel):
|
974
|
+
id: Optional[int]
|
975
|
+
parent_positions: Optional[int]
|
976
|
+
target_asset: int
|
977
|
+
weight_notional_exposure: Optional[float]=0.0
|
978
|
+
constant_notional_exposure: Optional[float]=0.0
|
979
|
+
single_asset_quantity: Optional[float]=0.0
|
980
|
+
|
981
|
+
class AccountPortfolioHistoricalPositions(BaseObjectOrm, BasePydanticModel):
|
982
|
+
id: Optional[int]
|
983
|
+
positions_date: datetime.datetime
|
984
|
+
comments: Optional[str]
|
985
|
+
positions: list[AccountPortfolioPosition]
|
986
|
+
|
987
|
+
|
988
|
+
|
989
|
+
class AccountPortfolio(BaseObjectOrm, BasePydanticModel):
|
990
|
+
id:int
|
991
|
+
related_account:Optional[int]
|
992
|
+
latest_positions:Optional[AccountPortfolioHistoricalPositions]=None
|
993
|
+
model_portfolio_name:Optional[str]=None
|
994
|
+
model_portfolio_description:Optional[str]=None
|
995
|
+
@property
|
996
|
+
def unique_identifier(self):
|
997
|
+
return self.related_account_id
|
998
|
+
|
999
|
+
|
1000
|
+
|
1001
|
+
class AccountMixin(BasePydanticModel):
|
1002
|
+
id: Optional[int] = None
|
1003
|
+
uuid:str
|
1004
|
+
execution_venue: Union["ExecutionVenue",int]
|
1005
|
+
account_is_active: bool
|
1006
|
+
account_name: Optional[str] = None
|
1007
|
+
is_paper: bool
|
1008
|
+
account_target_portfolio: Optional[AccountPortfolio]=None # can be none on creation without holdings
|
1009
|
+
latest_holdings: Union["AccountLatestHoldings",None]=None
|
1010
|
+
|
1011
|
+
|
1012
|
+
def build_rebalance(
|
1013
|
+
self,
|
1014
|
+
latest_holdings: "AccountHistoricalHoldings",
|
1015
|
+
tolerance: float,
|
1016
|
+
change_cash_asset_to_currency_asset: Union[Asset, None] = None,
|
1017
|
+
):
|
1018
|
+
nav = self.get_nav()
|
1019
|
+
nav, nav_date = nav["nav"], nav["nav_date"]
|
1020
|
+
related_expected_asset_exposure_df = latest_holdings.related_expected_asset_exposure_df
|
1021
|
+
# extract Target Rebalance
|
1022
|
+
|
1023
|
+
# extract expected holdings
|
1024
|
+
try:
|
1025
|
+
implicit_holdings_df = related_expected_asset_exposure_df.groupby("aid") \
|
1026
|
+
.aggregate({"holding": "sum", "price": "last", "expected_holding_in_fund": "sum"}) \
|
1027
|
+
.rename(columns={"expected_holding_in_fund": "expected_holding"})
|
1028
|
+
except Exception as e:
|
1029
|
+
raise e
|
1030
|
+
implicit_holdings_df["difference"] = (
|
1031
|
+
implicit_holdings_df["expected_holding"] - implicit_holdings_df["holding"])
|
1032
|
+
implicit_holdings_df["relative_w"] = (implicit_holdings_df["difference"] * implicit_holdings_df["price"]) / nav
|
1033
|
+
implicit_holdings_df["tolerance_flag"] = implicit_holdings_df["relative_w"].apply(
|
1034
|
+
lambda x: 1 if x >= tolerance else 0)
|
1035
|
+
implicit_holdings_df["difference"] = implicit_holdings_df["difference"] * implicit_holdings_df[
|
1036
|
+
"tolerance_flag"]
|
1037
|
+
implicit_holdings_df["expected_holding"] = implicit_holdings_df["holding"] + implicit_holdings_df[
|
1038
|
+
"difference"]
|
1039
|
+
|
1040
|
+
implicit_holdings = implicit_holdings_df[["expected_holding", "price"]] \
|
1041
|
+
.rename(columns={"expected_holding": "holding"}).T.to_dict()
|
1042
|
+
|
1043
|
+
implicit_holdings_df["reference_notional"] = implicit_holdings_df["price"] * implicit_holdings_df["difference"]
|
1044
|
+
rebalance = implicit_holdings_df[["difference", "reference_notional", "price"]] \
|
1045
|
+
.rename(columns={"difference": "quantity", "price": "reference_price"}).T.to_dict()
|
1046
|
+
|
1047
|
+
all_assets = implicit_holdings.keys()
|
1048
|
+
new_rebalance, new_implicit_holdings = {}, {}
|
1049
|
+
# build_asset_switch
|
1050
|
+
asset_switch_map = Asset.switch_cash_in_asset_list(
|
1051
|
+
asset_id_list=[c for c in all_assets if c != change_cash_asset_to_currency_asset.id],
|
1052
|
+
target_currency_asset_id=int(change_cash_asset_to_currency_asset.id))
|
1053
|
+
asset_switch_map[
|
1054
|
+
str(change_cash_asset_to_currency_asset.id)] = change_cash_asset_to_currency_asset.serialized_config
|
1055
|
+
|
1056
|
+
for a_id in all_assets:
|
1057
|
+
try:
|
1058
|
+
new_a = Asset(**asset_switch_map[str(a_id)])
|
1059
|
+
except Exception as e:
|
1060
|
+
raise e
|
1061
|
+
if rebalance[a_id]["quantity"] != 0.0:
|
1062
|
+
new_rebalance[new_a.id] = {"rebalance": rebalance[a_id],
|
1063
|
+
"asset": new_a}
|
1064
|
+
try:
|
1065
|
+
new_implicit_holdings[new_a.id] = implicit_holdings[a_id]
|
1066
|
+
except Exception as e:
|
1067
|
+
raise e
|
1068
|
+
not_rebalanced_by_tolerance = implicit_holdings_df[implicit_holdings_df["difference"] != 0]
|
1069
|
+
not_rebalanced_by_tolerance = not_rebalanced_by_tolerance[not_rebalanced_by_tolerance["tolerance_flag"] == 0][
|
1070
|
+
"relative_w"]
|
1071
|
+
not_rebalanced_by_tolerance = {"tolerance": not_rebalanced_by_tolerance.to_dict()}
|
1072
|
+
return new_rebalance, new_implicit_holdings, not_rebalanced_by_tolerance
|
1073
|
+
|
1074
|
+
|
1075
|
+
def get_latest_holdings(self):
|
1076
|
+
base_url = self.get_object_url()
|
1077
|
+
url = f"{base_url}/{self.id}/latest_holdings/"
|
1078
|
+
r = make_request(s=self.build_session(),loaders=self.LOADERS, r_type="GET", url=url)
|
1079
|
+
if r.status_code != 200:
|
1080
|
+
raise Exception("Error Syncing funds in account")
|
1081
|
+
return AccountHistoricalHoldings(**r.json())
|
1082
|
+
|
1083
|
+
def get_missing_assets_in_exposure(self,asset_list_ids,timeout=None)->list[Asset]:
|
1084
|
+
base_url = self.get_object_url()
|
1085
|
+
url = f"{base_url}/{self.id}/get_missing_assets_in_exposure/"
|
1086
|
+
payload = {"json": {"asset_list_ids":asset_list_ids,}}
|
1087
|
+
|
1088
|
+
r = make_request(s=self.build_session(),payload=payload, loaders=self.LOADERS, r_type="GET", url=url, time_out=timeout)
|
1089
|
+
if r.status_code != 200:
|
1090
|
+
raise Exception(r.text)
|
1091
|
+
|
1092
|
+
asset_list = []
|
1093
|
+
for a in r.json():
|
1094
|
+
asset_list.append(resolve_asset(a))
|
1095
|
+
|
1096
|
+
return asset_list
|
1097
|
+
|
1098
|
+
class RebalanceTargetPosition(BasePydanticModel):
|
1099
|
+
target_portfolio_id: int
|
1100
|
+
weight_notional_exposure: float
|
1101
|
+
|
1102
|
+
class Account(AccountMixin, BaseObjectOrm, BasePydanticModel):
|
1103
|
+
|
1104
|
+
@classmethod
|
1105
|
+
def get_or_create(cls,
|
1106
|
+
create_without_holdings=False,
|
1107
|
+
timeout=None,**kwargs,):
|
1108
|
+
base_url = cls.get_object_url()
|
1109
|
+
url = f"{base_url}/get-or-create/"
|
1110
|
+
kwargs["create_without_holdings"]=create_without_holdings
|
1111
|
+
payload = {"json": kwargs}
|
1112
|
+
|
1113
|
+
r = make_request(s=cls.build_session(), loaders=cls.LOADERS, r_type="POST", url=url,
|
1114
|
+
payload=payload,
|
1115
|
+
time_out=timeout)
|
1116
|
+
if r.status_code not in [200, 201]:
|
1117
|
+
raise Exception(f"Error geting or creating account {r.text}")
|
1118
|
+
return cls(**r.json())
|
1119
|
+
|
1120
|
+
def set_account_target_portfolio_from_asset_holdings(self,timeout=None):
|
1121
|
+
base_url = self.get_object_url()
|
1122
|
+
url = f"{base_url}/{self.id}/set_account_target_portfolio_from_asset_holdings/"
|
1123
|
+
r = make_request(s=self.build_session(), loaders=self.LOADERS, r_type="GET", url=url,
|
1124
|
+
time_out=timeout)
|
1125
|
+
if r.status_code != 200:
|
1126
|
+
raise Exception(f"Error set_account_target_portfolio_from_asset_holdings in account {r.text}")
|
1127
|
+
def snapshot_account(self,timeout=None):
|
1128
|
+
|
1129
|
+
base_url = self.get_object_url()
|
1130
|
+
url = f"{base_url}/{self.id}/snapshot_account/"
|
1131
|
+
r = make_request(s=self.build_session(), loaders=self.LOADERS, r_type="GET", url=url,
|
1132
|
+
time_out=timeout)
|
1133
|
+
if r.status_code != 200:
|
1134
|
+
raise Exception(f"Error Getting NAV in account {r.text}")
|
1135
|
+
|
1136
|
+
|
1137
|
+
def get_tracking_error_details(self, timeout=None):
|
1138
|
+
|
1139
|
+
base_url = self.get_object_url()
|
1140
|
+
url = f"{base_url}/{self.id}/get_tracking_error_details/"
|
1141
|
+
r = make_request(s=self.build_session(), loaders=self.LOADERS, r_type="GET", url=url,
|
1142
|
+
time_out=timeout)
|
1143
|
+
if r.status_code != 200:
|
1144
|
+
raise Exception(f"Error Getting NAV in account {r.text}")
|
1145
|
+
result = r.json()
|
1146
|
+
return result['fund_summary'],result['account_tracking_error']
|
1147
|
+
|
1148
|
+
def rebalance(
|
1149
|
+
self,
|
1150
|
+
target_positions: List[RebalanceTargetPosition],
|
1151
|
+
scheduled_time: Optional[datetime.datetime] = None,
|
1152
|
+
timeout=None
|
1153
|
+
) -> AccountPortfolioScheduledRebalance:
|
1154
|
+
|
1155
|
+
parsed_target_positions = {}
|
1156
|
+
for target_position in target_positions:
|
1157
|
+
if target_position.target_portfolio_id in parsed_target_positions:
|
1158
|
+
raise ValueError(f"Duplicate target portfolio id: {target_position.target_portfolio_id} not allowed")
|
1159
|
+
|
1160
|
+
parsed_target_positions[target_position.target_portfolio_id] = {
|
1161
|
+
"weight_notional_exposure": target_position.weight_notional_exposure,
|
1162
|
+
}
|
1163
|
+
|
1164
|
+
return AccountPortfolioScheduledRebalance.create(
|
1165
|
+
timeout=timeout,
|
1166
|
+
target_positions=parsed_target_positions,
|
1167
|
+
target_account_portfolio=self.id,
|
1168
|
+
scheduled_time=scheduled_time,
|
1169
|
+
)
|
1170
|
+
|
1171
|
+
|
1172
|
+
def get_historical_holdings(self,start_date:Optional[datetime.datetime]=None,
|
1173
|
+
end_date:Optional[datetime.datetime]=None,timeout=None)->pd.DataFrame:
|
1174
|
+
"""
|
1175
|
+
Retrieves historical holdings data for the account over a specified date range.
|
1176
|
+
|
1177
|
+
Args:
|
1178
|
+
start_date_timestamp (datetime, optional): The start datetime (UTC) for filtering holdings.
|
1179
|
+
end_date_timestamp (datetime, optional): The end datetime (UTC) for filtering holdings.
|
1180
|
+
timeout (int, optional): Optional timeout parameter for the query (currently unused).
|
1181
|
+
|
1182
|
+
Returns:
|
1183
|
+
pd.DataFrame: A DataFrame indexed by a multi-index of `time_index` (UTC datetime) and `asset_id` (int),
|
1184
|
+
containing the following columns:
|
1185
|
+
|
1186
|
+
- **missing_price (bool)**: Indicates whether the price for the asset was missing on that date.
|
1187
|
+
- **price (float)**: The recorded price of the asset at the time of the holding.
|
1188
|
+
- **quantity (float)**: The quantity of the asset held at that time.
|
1189
|
+
|
1190
|
+
If no holdings are found within the specified range, an empty DataFrame is returned.
|
1191
|
+
|
1192
|
+
Example Output:
|
1193
|
+
missing_price price quantity
|
1194
|
+
time_index asset_id
|
1195
|
+
2025-06-23 17:59:57+00:00 62376 False 1.0 1000000.0
|
1196
|
+
2025-05-30 09:43:19+00:00 62376 False 1.0 1000000.0
|
1197
|
+
2025-05-30 09:43:26+00:00 62376 False 1.0 1000000.0
|
1198
|
+
"""
|
1199
|
+
|
1200
|
+
|
1201
|
+
|
1202
|
+
filter_search=dict(related_account__id=self.id)
|
1203
|
+
if start_date is not None:
|
1204
|
+
if isinstance(start_date, datetime.datetime):
|
1205
|
+
if start_date.tzinfo is None:
|
1206
|
+
start_date_timestamp = start_date.replace(tzinfo=pytz.utc)
|
1207
|
+
filter_search['holdings_date__gte'] = start_date_timestamp.isoformat()
|
1208
|
+
|
1209
|
+
if end_date is not None:
|
1210
|
+
if isinstance(end_date, datetime.datetime):
|
1211
|
+
if end_date.tzinfo is None:
|
1212
|
+
end_date = end_date.replace(tzinfo=pytz.utc)
|
1213
|
+
filter_search['holdings_date__lte'] = end_date.isoformat()
|
1214
|
+
|
1215
|
+
holdings = AccountHistoricalHoldings.filter(**filter_search)
|
1216
|
+
if len(holdings)==0:
|
1217
|
+
return pd.DataFrame()
|
1218
|
+
positions_df = []
|
1219
|
+
for holding in holdings:
|
1220
|
+
holding_date = holding.holdings_date
|
1221
|
+
for position in holding.holdings:
|
1222
|
+
pos = position.model_dump()
|
1223
|
+
pos.pop("orm_class", None)
|
1224
|
+
pos.pop("parents_holdings", None)
|
1225
|
+
pos.pop("id", None)
|
1226
|
+
pos.pop("extra_details", None)
|
1227
|
+
pos["time_index"] = holding_date
|
1228
|
+
|
1229
|
+
positions_df.append(pos)
|
1230
|
+
|
1231
|
+
positions_df = pd.DataFrame(positions_df).rename(
|
1232
|
+
columns={"asset": "asset_id"}).set_index(["time_index", "asset_id"])
|
1233
|
+
return positions_df
|
1234
|
+
|
1235
|
+
class AccountPositionDetail(BaseObjectOrm,BasePydanticModel):
|
1236
|
+
id: Optional[int] = None
|
1237
|
+
asset:Union[Asset,int] = None
|
1238
|
+
missing_price :bool=False
|
1239
|
+
price: float
|
1240
|
+
quantity : float
|
1241
|
+
parents_holdings: Optional[int]=None
|
1242
|
+
extra_details:Optional[dict]=None
|
1243
|
+
|
1244
|
+
class AccountHistoricalHoldingsMixin:
|
1245
|
+
id: Optional[int] = Field(None, primary_key=True)
|
1246
|
+
holdings_date: datetime.datetime
|
1247
|
+
comments: Optional[str] = Field(None, max_length=150)
|
1248
|
+
nav: Optional[float] = None
|
1249
|
+
|
1250
|
+
is_trade_snapshot: bool = Field(default=False)
|
1251
|
+
target_trade_time: Optional[datetime.datetime] = None
|
1252
|
+
related_expected_asset_exposure_df: Optional[List[Dict[str, Any]]] = None
|
1253
|
+
|
1254
|
+
holdings: List[AccountPositionDetail]
|
1255
|
+
|
1256
|
+
|
1257
|
+
def get_nav(self):
|
1258
|
+
base_url = self.get_object_url()
|
1259
|
+
url = f"{base_url}/{self.id}/get_nav/"
|
1260
|
+
r = make_request(s=self.build_session(), loaders=self.LOADERS, r_type="GET", url=url)
|
1261
|
+
if r.status_code != 200:
|
1262
|
+
raise Exception(f"Error Getting NAV in account {r.text}")
|
1263
|
+
return r.json()
|
1264
|
+
|
1265
|
+
|
1266
|
+
|
1267
|
+
class AccountLatestHoldings(AccountHistoricalHoldingsMixin,BaseObjectOrm,BasePydanticModel):
|
1268
|
+
"""
|
1269
|
+
Same as Account HistoricalHoldings but Does not include related account
|
1270
|
+
|
1271
|
+
"""
|
1272
|
+
...
|
1273
|
+
|
1274
|
+
|
1275
|
+
|
1276
|
+
class AccountHistoricalHoldings(AccountHistoricalHoldingsMixin,BaseObjectOrm,BasePydanticModel):
|
1277
|
+
|
1278
|
+
|
1279
|
+
related_account: Union[int,"Account"]
|
1280
|
+
|
1281
|
+
@classmethod
|
1282
|
+
def destroy_holdings_before_date(cls,target_date:datetime.datetime,
|
1283
|
+
keep_trade_snapshots:bool):
|
1284
|
+
base_url = cls.get_object_url()
|
1285
|
+
payload = {"json": {"target_date":target_date.strftime(DATE_FORMAT),
|
1286
|
+
"keep_trade_snapshots":keep_trade_snapshots}}
|
1287
|
+
|
1288
|
+
|
1289
|
+
r = make_request(
|
1290
|
+
s=cls.build_session(),
|
1291
|
+
loaders=cls.LOADERS,
|
1292
|
+
r_type="POST",
|
1293
|
+
url=f"{base_url}/destroy_holdings_before_date/",
|
1294
|
+
payload=payload
|
1295
|
+
)
|
1296
|
+
if r.status_code != 204:
|
1297
|
+
raise Exception(r.text)
|
1298
|
+
|
1299
|
+
@classmethod
|
1300
|
+
def create_with_holdings(cls,position_list:List[AccountPositionDetail],
|
1301
|
+
holdings_date:int,
|
1302
|
+
related_account:int,
|
1303
|
+
extra_details: dict = None,
|
1304
|
+
timeout=None
|
1305
|
+
):
|
1306
|
+
|
1307
|
+
base_url = cls.get_object_url()
|
1308
|
+
payload = {"json": {"position_list": [{k:v for k,v in p.model_dump().items() if k not in ["orm_class","id","parents_holdings"]} for p in position_list],
|
1309
|
+
"holdings_date": holdings_date,
|
1310
|
+
"related_account":related_account,
|
1311
|
+
}}
|
1312
|
+
|
1313
|
+
r = make_request(
|
1314
|
+
s=cls.build_session(),
|
1315
|
+
loaders=cls.LOADERS,
|
1316
|
+
r_type="POST",
|
1317
|
+
url=f"{base_url}/create_with_holdings/",
|
1318
|
+
payload=payload,time_out=timeout
|
1319
|
+
)
|
1320
|
+
if r.status_code != 201:
|
1321
|
+
raise Exception(r.text)
|
1322
|
+
return cls(**r.json())
|
1323
|
+
|
1324
|
+
|
1325
|
+
|
1326
|
+
class AccountRiskFactors(BaseObjectOrm,BasePydanticModel):
|
1327
|
+
related_holdings: Union[int,AccountHistoricalHoldings]
|
1328
|
+
account_balance: float
|
1329
|
+
|
1330
|
+
|
1331
|
+
class FundingFeeTransaction(BaseObjectOrm):
|
1332
|
+
pass
|
1333
|
+
|
1334
|
+
class AccountPortfolioHistoricalWeights(BaseObjectOrm):
|
1335
|
+
pass
|
1336
|
+
|
1337
|
+
class WeightPosition(BaseObjectOrm, BasePydanticModel):
|
1338
|
+
# id: Optional[int] = None
|
1339
|
+
# parent_weights: int
|
1340
|
+
asset: Union[AssetMixin, int]
|
1341
|
+
weight_notional_exposure: float
|
1342
|
+
|
1343
|
+
@property
|
1344
|
+
def asset_id(self):
|
1345
|
+
return self.asset if isinstance(self.asset, int) else self.asset.id
|
1346
|
+
|
1347
|
+
@root_validator(pre=True)
|
1348
|
+
def resolve_assets(cls, values):
|
1349
|
+
# Check if 'asset' is a dict and determine its type
|
1350
|
+
if isinstance(values.get('asset'), dict):
|
1351
|
+
asset=values.get('asset')
|
1352
|
+
asset=resolve_asset(asset_dict=asset)
|
1353
|
+
values['asset']=asset
|
1354
|
+
|
1355
|
+
return values
|
1356
|
+
|
1357
|
+
|
1358
|
+
|
1359
|
+
class ExecutionVenue(BaseObjectOrm,BasePydanticModel):
|
1360
|
+
id: Optional[int] = None
|
1361
|
+
symbol: str
|
1362
|
+
name: str
|
1363
|
+
|
1364
|
+
@property
|
1365
|
+
def unique_identifier(self):
|
1366
|
+
return f"{self.symbol}"
|
1367
|
+
|
1368
|
+
|
1369
|
+
|
1370
|
+
class TradeSide(IntEnum):
|
1371
|
+
SELL = -1
|
1372
|
+
BUY = 1
|
1373
|
+
|
1374
|
+
class Trade(BaseObjectOrm,BasePydanticModel):
|
1375
|
+
id: Optional[int] =None
|
1376
|
+
|
1377
|
+
# Use a default_factory to set the default trade_time to now (with UTC timezone)
|
1378
|
+
trade_time: datetime.datetime
|
1379
|
+
trade_side: TradeSide
|
1380
|
+
asset: Optional[Union[AssetMixin,int]]
|
1381
|
+
quantity: float
|
1382
|
+
price: float
|
1383
|
+
commission: Optional[float]
|
1384
|
+
commission_asset: Optional[Union[AssetMixin,int]]
|
1385
|
+
|
1386
|
+
related_fund: Optional[Union["VirtualFund",int]]
|
1387
|
+
related_account: Optional[Union[Account,int]]
|
1388
|
+
related_order: Optional[Union["Order",int]]
|
1389
|
+
|
1390
|
+
settlement_cost:Optional[float]
|
1391
|
+
settlement_asset: Optional[Union[AssetMixin,int]]
|
1392
|
+
|
1393
|
+
comments: Optional[str]
|
1394
|
+
venue_specific_properties: Optional[Dict]
|
1395
|
+
|
1396
|
+
@classmethod
|
1397
|
+
def create_or_update(cls, trade_kwargs,timeout=None) -> None:
|
1398
|
+
url = f"{cls.get_object_url()}/create_or_update/"
|
1399
|
+
data = cls.serialize_for_json(trade_kwargs)
|
1400
|
+
payload = {"json": data}
|
1401
|
+
r = make_request(s=cls.build_session(), loaders=cls.LOADERS, r_type="POST", url=url, payload=payload,
|
1402
|
+
time_out=timeout)
|
1403
|
+
if r.status_code in [200] == False:
|
1404
|
+
raise Exception(f" {r.text()}")
|
1405
|
+
return cls(**r.json())
|
1406
|
+
|
1407
|
+
class OrdersExecutionConfiguration(BaseModel):
|
1408
|
+
broker_class: str
|
1409
|
+
broker_configuration: dict
|
1410
|
+
|
1411
|
+
|
1412
|
+
|
1413
|
+
class PortfolioTags(BasePydanticModel):
|
1414
|
+
id:Optional[int]=None
|
1415
|
+
name:str
|
1416
|
+
color:str
|
1417
|
+
|
1418
|
+
from typing import TypedDict
|
1419
|
+
|
1420
|
+
class PortfolioAbout(TypedDict):
|
1421
|
+
description: str
|
1422
|
+
signal_name: str
|
1423
|
+
signal_description: str
|
1424
|
+
rebalance_strategy_name: str
|
1425
|
+
|
1426
|
+
|
1427
|
+
class PortfolioMixin:
|
1428
|
+
id: Optional[int] = None
|
1429
|
+
is_active: bool = False
|
1430
|
+
local_time_serie: Optional['LocalTimeSerie']
|
1431
|
+
signal_local_time_serie: Optional['LocalTimeSerie']
|
1432
|
+
follow_account_rebalance: bool = False
|
1433
|
+
comparable_portfolios: Optional[List[int]] = None
|
1434
|
+
backtest_table_price_column_name: Optional[str] = Field(None, max_length=20)
|
1435
|
+
tags: Optional[List['PortfolioTags']] = None
|
1436
|
+
calendar: Optional['Calendar']
|
1437
|
+
index_asset: PortfolioIndexAsset
|
1438
|
+
|
1439
|
+
def pretty_print(self) -> str:
|
1440
|
+
def format_field(name, value):
|
1441
|
+
if isinstance(value, list):
|
1442
|
+
val = ', '.join(str(v) for v in value)
|
1443
|
+
elif hasattr(value, '__str__'):
|
1444
|
+
val = str(value)
|
1445
|
+
else:
|
1446
|
+
val = repr(value)
|
1447
|
+
return f"{name:35}: {val}"
|
1448
|
+
|
1449
|
+
fields = self.__fields__
|
1450
|
+
lines = [format_field(name, getattr(self, name, None)) for name in fields]
|
1451
|
+
return "\n".join(lines)
|
1452
|
+
|
1453
|
+
@classmethod
|
1454
|
+
def create_from_time_series(
|
1455
|
+
cls,
|
1456
|
+
portfolio_name: str,
|
1457
|
+
local_time_serie_id: int,
|
1458
|
+
signal_local_time_serie_id: int,
|
1459
|
+
is_active: bool,
|
1460
|
+
calendar_name: str,
|
1461
|
+
target_portfolio_about: PortfolioAbout,
|
1462
|
+
backtest_table_price_column_name: str,
|
1463
|
+
tags: Optional[list] = None,
|
1464
|
+
timeout=None
|
1465
|
+
) -> "Portfolio":
|
1466
|
+
url = f"{cls.get_object_url()}/create_from_time_series/"
|
1467
|
+
# Build the payload with the required arguments.
|
1468
|
+
payload_data = {
|
1469
|
+
"portfolio_name": portfolio_name,
|
1470
|
+
"is_active": is_active,
|
1471
|
+
"local_time_serie_id": local_time_serie_id,
|
1472
|
+
"signal_local_time_serie_id": signal_local_time_serie_id,
|
1473
|
+
# Using the same ID for local_signal_time_serie_id as specified.
|
1474
|
+
"calendar_name": calendar_name,
|
1475
|
+
"target_portfolio_about": target_portfolio_about,
|
1476
|
+
"backtest_table_price_column_name": backtest_table_price_column_name,
|
1477
|
+
"tags": tags,
|
1478
|
+
}
|
1479
|
+
|
1480
|
+
r = make_request(s=cls.build_session(), loaders=cls.LOADERS, r_type="POST", url=url,
|
1481
|
+
payload={"json": payload_data}, time_out=timeout)
|
1482
|
+
if r.status_code not in [201]:
|
1483
|
+
raise Exception(f" {r.text}")
|
1484
|
+
response = r.json()
|
1485
|
+
|
1486
|
+
return cls(**response["portfolio"]), PortfolioIndexAsset(**response["portfolio_index_asset"])
|
1487
|
+
|
1488
|
+
@property
|
1489
|
+
def portfolio_name(self) -> str:
|
1490
|
+
return self.index_asset.current_snapshot.name
|
1491
|
+
@property
|
1492
|
+
def portfolio_ticker(self)->str:
|
1493
|
+
return self.index_asset.current_snapshot.ticker
|
1494
|
+
|
1495
|
+
def add_venue(self, venue_id) -> None:
|
1496
|
+
url = f"{self.get_object_url()}/{self.id}/add_venue/"
|
1497
|
+
payload = {"json": {"venue_id": venue_id}}
|
1498
|
+
r = make_request(s=self.build_session(), loaders=self.LOADERS, r_type="PATCH", url=url, payload=payload)
|
1499
|
+
if r.status_code in [200] == False:
|
1500
|
+
raise Exception(f" {r.text()}")
|
1501
|
+
|
1502
|
+
def get_latest_weights(self,timeout=None)->Dict[str, float]:
|
1503
|
+
url = f"{self.get_object_url()}/{self.id}/get_latest_weights/"
|
1504
|
+
r = make_request(s=self.build_session(), loaders=self.LOADERS, r_type="GET", url=url,
|
1505
|
+
time_out=timeout
|
1506
|
+
)
|
1507
|
+
if r.status_code in [200] == False:
|
1508
|
+
raise Exception(f" {r.text()}")
|
1509
|
+
results=r.json()
|
1510
|
+
return results["weights"],datetime.datetime.utcfromtimestamp(results["weights_date"]).replace(tzinfo=pytz.utc)
|
1511
|
+
|
1512
|
+
def get_historical_weights(self,
|
1513
|
+
start_date_timestamp:float,end_date_timestamp:float,
|
1514
|
+
timeout=None)->Dict[str, float]:
|
1515
|
+
if self.local_time_serie is None:
|
1516
|
+
print("this portfolio does not have a weights table")
|
1517
|
+
self.local_time_serie
|
1518
|
+
|
1519
|
+
|
1520
|
+
class Portfolio(PortfolioMixin, BaseObjectOrm, BasePydanticModel):
|
1521
|
+
pass
|
1522
|
+
|
1523
|
+
|
1524
|
+
class PortfolioGroup(BaseObjectOrm, BasePydanticModel):
|
1525
|
+
id: int
|
1526
|
+
unique_identifier: str
|
1527
|
+
display_name: str
|
1528
|
+
source: str
|
1529
|
+
portfolios: List[Union[int, "Portfolio"]]
|
1530
|
+
description: Optional[str] = None
|
1531
|
+
|
1532
|
+
def __repr__(self):
|
1533
|
+
return f"{self.display_name} ({self.unique_identifier}), {len(self.portfolios)} portfolios"
|
1534
|
+
|
1535
|
+
def append_portfolios(self, portfolio_ids: List[int]) -> "PortfolioGroup":
|
1536
|
+
"""
|
1537
|
+
Appends portfolios to the group by calling the custom API action.
|
1538
|
+
|
1539
|
+
Args:
|
1540
|
+
portfolio_ids: A list of portfolio primary keys to add to the group.
|
1541
|
+
|
1542
|
+
Returns:
|
1543
|
+
The updated PortfolioGroup instance.
|
1544
|
+
"""
|
1545
|
+
if not self.id:
|
1546
|
+
raise ValueError("Cannot append portfolios to an unsaved PortfolioGroup.")
|
1547
|
+
|
1548
|
+
url = f"{self.get_object_url()}/{self.id}/append-portfolios/"
|
1549
|
+
payload = {"portfolios": portfolio_ids}
|
1550
|
+
|
1551
|
+
r = make_request(
|
1552
|
+
s=self.build_session(),
|
1553
|
+
loaders=self.LOADERS,
|
1554
|
+
r_type="POST",
|
1555
|
+
url=url,
|
1556
|
+
payload={"json": payload}
|
1557
|
+
)
|
1558
|
+
|
1559
|
+
if r.status_code != 200:
|
1560
|
+
raise Exception(f"Error appending portfolios: {r.text}")
|
1561
|
+
|
1562
|
+
# Update the current instance in-place with the response from the server
|
1563
|
+
updated_data = r.json()
|
1564
|
+
for key, value in updated_data.items():
|
1565
|
+
setattr(self, key, value)
|
1566
|
+
|
1567
|
+
return self
|
1568
|
+
|
1569
|
+
def remove_portfolios(self, portfolio_ids: List[int]) -> "PortfolioGroup":
|
1570
|
+
"""
|
1571
|
+
Removes portfolios from the group by calling the custom API action.
|
1572
|
+
|
1573
|
+
Args:
|
1574
|
+
portfolio_ids: A list of portfolio primary keys to remove from the group.
|
1575
|
+
|
1576
|
+
Returns:
|
1577
|
+
The updated PortfolioGroup instance.
|
1578
|
+
"""
|
1579
|
+
if not self.id:
|
1580
|
+
raise ValueError("Cannot remove portfolios from an unsaved PortfolioGroup.")
|
1581
|
+
|
1582
|
+
url = f"{self.get_object_url()}/{self.id}/remove-portfolios/"
|
1583
|
+
payload = {"portfolios": portfolio_ids}
|
1584
|
+
|
1585
|
+
r = make_request(
|
1586
|
+
s=self.build_session(),
|
1587
|
+
loaders=self.LOADERS,
|
1588
|
+
r_type="POST",
|
1589
|
+
url=url,
|
1590
|
+
payload={"json": payload}
|
1591
|
+
)
|
1592
|
+
|
1593
|
+
if r.status_code != 200:
|
1594
|
+
raise Exception(f"Error removing portfolios: {r.text}")
|
1595
|
+
|
1596
|
+
# Update the current instance in-place with the response from the server
|
1597
|
+
updated_data = r.json()
|
1598
|
+
for key, value in updated_data.items():
|
1599
|
+
setattr(self, key, value)
|
1600
|
+
|
1601
|
+
return self
|
1602
|
+
|
1603
|
+
class ExecutionPrediction(BaseObjectOrm):
|
1604
|
+
@classmethod
|
1605
|
+
def add_prediction_from_time_serie(
|
1606
|
+
cls,
|
1607
|
+
time_serie_hash_id: str,
|
1608
|
+
prediction_time: datetime.datetime,
|
1609
|
+
symbol_to_search_map,
|
1610
|
+
predictions: dict,
|
1611
|
+
human_readable_name: Union[None, str] = None,
|
1612
|
+
timeout=None
|
1613
|
+
):
|
1614
|
+
url = f"{cls.get_object_url()}/add_prediction_from_time_serie/"
|
1615
|
+
payload = {"json": {"time_serie_hash_id": time_serie_hash_id,
|
1616
|
+
"prediction_time": prediction_time.strftime(DATE_FORMAT),
|
1617
|
+
"symbol_to_search_map": symbol_to_search_map,
|
1618
|
+
"predictions": predictions,
|
1619
|
+
}, }
|
1620
|
+
|
1621
|
+
r = make_request(s=cls.build_session(),
|
1622
|
+
loaders=cls.LOADERS, r_type="POST", url=url, payload=payload, time_out=timeout)
|
1623
|
+
if r.status_code in [201, 204] == False:
|
1624
|
+
raise Exception(f"Error inserting new prediction{r.text}")
|
1625
|
+
return r.json()
|
1626
|
+
|
1627
|
+
class VirtualFundPositionDetail(BaseObjectOrm, BasePydanticModel):
|
1628
|
+
id: Optional[int] = None
|
1629
|
+
asset: Union[Asset,AssetFutureUSDM,int]
|
1630
|
+
price: float
|
1631
|
+
quantity: float
|
1632
|
+
parents_holdings: Union[int,"VirtualFundHistoricalHoldings"]
|
1633
|
+
|
1634
|
+
@property
|
1635
|
+
def asset_id(self):
|
1636
|
+
return self.asset if isinstance(self.asset,int) else self.asset.id
|
1637
|
+
|
1638
|
+
@root_validator(pre=True)
|
1639
|
+
def resolve_assets(cls, values):
|
1640
|
+
# Check if 'asset' is a dict and determine its type
|
1641
|
+
if isinstance(values.get('asset'), dict):
|
1642
|
+
asset = values.get('asset')
|
1643
|
+
asset = resolve_asset(asset_dict=asset)
|
1644
|
+
values['asset'] = asset
|
1645
|
+
|
1646
|
+
return values
|
1647
|
+
|
1648
|
+
class VirtualFundHistoricalHoldings(BaseObjectOrm, BasePydanticModel):
|
1649
|
+
related_fund: Union["VirtualFund",int] # assuming VirtualFund is another Pydantic model
|
1650
|
+
target_trade_time: Optional[datetime.datetime] = None
|
1651
|
+
target_weights: Optional[dict] = Field(default=None)
|
1652
|
+
is_trade_snapshot: bool = Field(default=False)
|
1653
|
+
fund_account_target_exposure: float = Field(default=0)
|
1654
|
+
fund_account_units_exposure: Optional[float] = Field(default=None)
|
1655
|
+
holdings:list[VirtualFundPositionDetail]
|
1656
|
+
|
1657
|
+
class ExecutionQuantity(BaseModel):
|
1658
|
+
asset: Union[Asset,AssetFutureUSDM, int]
|
1659
|
+
quantity: float
|
1660
|
+
reference_price:Union[None,float]
|
1661
|
+
|
1662
|
+
def __repr__(self):
|
1663
|
+
return f"{self.__class__.__name__}(asset={self.asset}, quantity={self.quantity})"
|
1664
|
+
|
1665
|
+
@root_validator(pre=True)
|
1666
|
+
def resolve_assets(cls, values):
|
1667
|
+
# Check if 'asset' is a dict and determine its type
|
1668
|
+
if isinstance(values.get('asset'), dict):
|
1669
|
+
asset = values.get('asset')
|
1670
|
+
asset = resolve_asset(asset_dict=asset)
|
1671
|
+
values['asset'] = asset
|
1672
|
+
|
1673
|
+
return values
|
1674
|
+
|
1675
|
+
class TargetRebalance(BaseModel):
|
1676
|
+
# target_execution_positions: ExecutionPositions
|
1677
|
+
execution_target: List[ExecutionQuantity]
|
1678
|
+
|
1679
|
+
@property
|
1680
|
+
def rebalance_asset_map(self):
|
1681
|
+
return {e.asset.id: e.asset for e in self.execution_target}
|
1682
|
+
|
1683
|
+
class VirtualFund(BaseObjectOrm, BasePydanticModel):
|
1684
|
+
id: Optional[float] = None
|
1685
|
+
target_portfolio: Union[int,"Portfolio"]
|
1686
|
+
target_account: AccountMixin
|
1687
|
+
notional_exposure_in_account: float
|
1688
|
+
latest_holdings: "VirtualFundHistoricalHoldings" = None
|
1689
|
+
latest_rebalance: Optional[datetime.datetime] = None
|
1690
|
+
fund_nav: float = Field(default=0)
|
1691
|
+
fund_nav_date: Optional[datetime.datetime] = None
|
1692
|
+
requires_nav_adjustment: bool = Field(default=False)
|
1693
|
+
target_portfolio_weight_in_account: Optional[float] = None
|
1694
|
+
last_trade_time: Optional[datetime.datetime] = None
|
1695
|
+
|
1696
|
+
# def sanitize_target_weights_for_execution_venue(self,target_weights:dict):
|
1697
|
+
# """
|
1698
|
+
# This functions switches assets from main net to test net to guarante consistency in the recording
|
1699
|
+
# of trades and orders
|
1700
|
+
# Args:
|
1701
|
+
# target_weights:{asset_id:WeightExecutionPosition}
|
1702
|
+
#
|
1703
|
+
# Returns:
|
1704
|
+
#
|
1705
|
+
# """
|
1706
|
+
# if self.target_account.execution_venue.symbol == CONSTANTS.BINANCE_TESTNET_FUTURES_EV_SYMBOL:
|
1707
|
+
# target_ev=CONSTANTS.BINANCE_TESTNET_FUTURES_EV_SYMBOL
|
1708
|
+
# new_target_weights={}
|
1709
|
+
# for _, position in target_weights.items():
|
1710
|
+
# AssetClass = position.asset.__class__
|
1711
|
+
# asset,_ = AssetClass.filter(symbol=position.asset.unique_symbol, execution_venue__symbol=target_ev,
|
1712
|
+
# asset_type=position.asset.asset_type,
|
1713
|
+
# )
|
1714
|
+
# asset = asset[0]
|
1715
|
+
# new_position = copy.deepcopy(position)
|
1716
|
+
# new_position.asset=asset
|
1717
|
+
# new_target_weights[asset.id] = new_position
|
1718
|
+
# # todo create in DB an execution position
|
1719
|
+
# else:
|
1720
|
+
# new_target_weights = target_weights
|
1721
|
+
#
|
1722
|
+
# return new_target_weights
|
1723
|
+
|
1724
|
+
# def build_rebalance_from_target_weights(
|
1725
|
+
# self,
|
1726
|
+
# target_execution_postitions: ExecutionPositions,
|
1727
|
+
# positions_prices: dict(),
|
1728
|
+
# absolute_rebalance_weight_limit=.02
|
1729
|
+
# ) -> TargetRebalance:
|
1730
|
+
# actual_positions = {}
|
1731
|
+
# target_weights = {p.asset_id: p for p in target_execution_postitions.positions}
|
1732
|
+
# #substitute target weights in case of testnets
|
1733
|
+
# target_weights = self.sanitize_target_weights_for_execution_venue(target_weights)
|
1734
|
+
#
|
1735
|
+
# positions_to_rebalance = []
|
1736
|
+
# if self.latest_holdings is not None:
|
1737
|
+
# actual_positions = {p.asset_id : p for p in self.latest_holdings.holdings}
|
1738
|
+
#
|
1739
|
+
# # positions to unwind first
|
1740
|
+
# positions_to_unwind=[]
|
1741
|
+
# for position in self.latest_holdings.holdings:
|
1742
|
+
# if position.quantity == 0.0:
|
1743
|
+
# continue
|
1744
|
+
# if position.asset_id not in target_weights.keys():
|
1745
|
+
# positions_to_unwind.append(
|
1746
|
+
# ExecutionQuantity(
|
1747
|
+
# asset=position.asset,
|
1748
|
+
# reference_price=None,
|
1749
|
+
# quantity=-position.quantity
|
1750
|
+
# )
|
1751
|
+
# )
|
1752
|
+
#
|
1753
|
+
# positions_to_rebalance.extend(positions_to_unwind)
|
1754
|
+
#
|
1755
|
+
# for target_position in target_execution_postitions.positions:
|
1756
|
+
# price = positions_prices[target_position.asset_id]
|
1757
|
+
#
|
1758
|
+
# current_weight, current_position = 0, 0
|
1759
|
+
# if target_position.asset_id in actual_positions.keys():
|
1760
|
+
# current_weight = actual_positions[target_position.asset_id].quantity * price / self.notional_exposure_in_account
|
1761
|
+
# current_position = actual_positions[target_position.asset_id].quantity
|
1762
|
+
# target_weight = target_position.weight_notional_exposure
|
1763
|
+
# if abs(target_weight - current_weight) <= absolute_rebalance_weight_limit:
|
1764
|
+
# continue
|
1765
|
+
# target_quantity = self.notional_exposure_in_account * target_position.weight_notional_exposure / price
|
1766
|
+
# rebalance_quantity = target_quantity - current_position
|
1767
|
+
# positions_to_rebalance.append(ExecutionQuantity(asset=target_position.asset,
|
1768
|
+
# quantity=rebalance_quantity,
|
1769
|
+
# reference_price=price
|
1770
|
+
# ))
|
1771
|
+
#
|
1772
|
+
# target_rebalance = TargetRebalance(target_execution_positions=target_execution_postitions,
|
1773
|
+
# execution_target=positions_to_rebalance
|
1774
|
+
# )
|
1775
|
+
# return target_rebalance
|
1776
|
+
|
1777
|
+
@validator('last_trade_time', pre=True, always=True)
|
1778
|
+
def parse_last_trade_time(cls, value):
|
1779
|
+
value = validator_for_string(value)
|
1780
|
+
return value
|
1781
|
+
|
1782
|
+
@validator('fund_nav_date', pre=True, always=True)
|
1783
|
+
def parse_fund_nav_date(cls, value):
|
1784
|
+
value = validator_for_string(value)
|
1785
|
+
return value
|
1786
|
+
|
1787
|
+
@validator('latest_rebalance', pre=True, always=True)
|
1788
|
+
def parse_latest_rebalance(cls, value):
|
1789
|
+
value = validator_for_string(value)
|
1790
|
+
return value
|
1791
|
+
|
1792
|
+
def get_account(self):
|
1793
|
+
a, r = Account.get(id=self.target_account)
|
1794
|
+
return a
|
1795
|
+
|
1796
|
+
def get_latest_trade_snapshot_holdings(self):
|
1797
|
+
url = f"{self.get_object_url()}/{int(self.id)}/get_latest_trade_snapshot_holdings/"
|
1798
|
+
r = make_request(s=self.build_session(),
|
1799
|
+
loaders=self.LOADERS, r_type="GET", url=url)
|
1800
|
+
|
1801
|
+
if r.status_code != 200:
|
1802
|
+
raise HtmlSaveException(r.text)
|
1803
|
+
if len(r.json()) == 0:
|
1804
|
+
return None
|
1805
|
+
return VirtualFundHistoricalHoldings(**r.json())
|
1806
|
+
|
1807
|
+
class OrderStatus(str, Enum):
|
1808
|
+
LIVE = "live"
|
1809
|
+
FILLED = "filled"
|
1810
|
+
PARTIALLY_FILLED = "partially_filled"
|
1811
|
+
CANCELED = "canceled"
|
1812
|
+
NOT_PLACED = "not_placed"
|
1813
|
+
class OrderTimeInForce(str, Enum):
|
1814
|
+
GOOD_TILL_CANCELED = "gtc"
|
1815
|
+
class OrderSide(IntEnum):
|
1816
|
+
SELL = -1
|
1817
|
+
BUY = 1
|
1818
|
+
|
1819
|
+
class OrderType(str, Enum):
|
1820
|
+
MARKET = "market"
|
1821
|
+
LIMIT = "limit"
|
1822
|
+
NOT_PLACED = "not_placed"
|
1823
|
+
|
1824
|
+
class Order(BaseObjectOrm, BasePydanticModel):
|
1825
|
+
id: Optional[int] = Field(None, primary_key=True)
|
1826
|
+
order_remote_id: str
|
1827
|
+
client_order_id: str
|
1828
|
+
order_type: OrderType
|
1829
|
+
order_time: datetime.datetime
|
1830
|
+
expires_time: Optional[datetime.datetime]=None
|
1831
|
+
order_side: OrderSide # Use int for choices (-1: SELL, 1: BUY)
|
1832
|
+
quantity: float
|
1833
|
+
status: OrderStatus = OrderStatus.NOT_PLACED
|
1834
|
+
filled_quantity: Optional[float] = 0.0
|
1835
|
+
filled_price: Optional[float] = None
|
1836
|
+
order_manager: Union[int, "OrderManager"] = None # Assuming foreign key ID is used
|
1837
|
+
asset: int # Assuming foreign key ID is used
|
1838
|
+
related_fund: Optional[int] = None # Assuming foreign key ID is used
|
1839
|
+
related_account: int # Assuming foreign key ID is used
|
1840
|
+
time_in_force: str
|
1841
|
+
comments: Optional[str] = None
|
1842
|
+
|
1843
|
+
class Config:
|
1844
|
+
use_enum_values = True # This allows using enum values directly
|
1845
|
+
@classmethod
|
1846
|
+
def create_or_update(cls,order_time_stamp:float,*args,**kwargs):
|
1847
|
+
"""
|
1848
|
+
|
1849
|
+
Args:
|
1850
|
+
order_time: timestamp
|
1851
|
+
*args:
|
1852
|
+
**kwargs:
|
1853
|
+
|
1854
|
+
Returns:
|
1855
|
+
|
1856
|
+
"""
|
1857
|
+
url = f"{cls.get_object_url()}/create_or_update/"
|
1858
|
+
kwargs['order_time'] = order_time_stamp
|
1859
|
+
payload = { "json": kwargs }
|
1860
|
+
|
1861
|
+
r = make_request(
|
1862
|
+
s=cls.build_session(),
|
1863
|
+
loaders=cls.LOADERS,
|
1864
|
+
r_type="POST",
|
1865
|
+
url=url,
|
1866
|
+
payload=payload
|
1867
|
+
)
|
1868
|
+
|
1869
|
+
if r.status_code not in [200,201]:
|
1870
|
+
raise r.text
|
1871
|
+
return cls(**r.json())
|
1872
|
+
class MarketOrder(Order):
|
1873
|
+
pass
|
1874
|
+
|
1875
|
+
class LimitOrder(Order):
|
1876
|
+
limit_price: float
|
1877
|
+
|
1878
|
+
class OrderManagerTargetQuantity(BaseModel):
|
1879
|
+
asset: Union[int, Asset]
|
1880
|
+
quantity: Decimal
|
1881
|
+
|
1882
|
+
class OrderManager(BaseObjectOrm, BasePydanticModel):
|
1883
|
+
id: Optional[int] = None
|
1884
|
+
target_time: datetime.datetime
|
1885
|
+
target_rebalance: list[OrderManagerTargetQuantity]
|
1886
|
+
order_received_time: Optional[datetime.datetime] = None
|
1887
|
+
execution_end: Optional[datetime.datetime] = None
|
1888
|
+
related_account: Union[Account, int] # Representing the ForeignKey field with the related account ID
|
1889
|
+
|
1890
|
+
|
1891
|
+
@staticmethod
|
1892
|
+
def serialize_for_json(kwargs):
|
1893
|
+
new_data = {}
|
1894
|
+
for key, value in kwargs.items():
|
1895
|
+
new_value = copy.deepcopy(value)
|
1896
|
+
if isinstance(value, datetime.datetime):
|
1897
|
+
new_value = str(value)
|
1898
|
+
elif key == "target_rebalance":
|
1899
|
+
new_value=[json.loads(c.model_dump_json()) for c in value]
|
1900
|
+
new_data[key] = new_value
|
1901
|
+
return new_data
|
1902
|
+
|
1903
|
+
@classmethod
|
1904
|
+
def destroy_before_date(cls, target_date: datetime.datetime):
|
1905
|
+
base_url = cls.get_object_url()
|
1906
|
+
payload = {
|
1907
|
+
"json": {
|
1908
|
+
"target_date": target_date.strftime(DATE_FORMAT),
|
1909
|
+
},
|
1910
|
+
}
|
1911
|
+
|
1912
|
+
r = make_request(
|
1913
|
+
s=cls.build_session(),
|
1914
|
+
loaders=cls.LOADERS,
|
1915
|
+
r_type="POST",
|
1916
|
+
url=f"{base_url}/destroy_before_date/",
|
1917
|
+
payload=payload
|
1918
|
+
)
|
1919
|
+
|
1920
|
+
if r.status_code != 204:
|
1921
|
+
raise Exception(r.text)
|
1922
|
+
|
1923
|
+
|
1924
|
+
# ------------------------------
|
1925
|
+
# ALPACA
|
1926
|
+
# ------------------------------
|
1927
|
+
|
1928
|
+
class AlpacaAccountRiskFactors(AccountRiskFactors):
|
1929
|
+
total_initial_margin: float
|
1930
|
+
total_maintenance_margin: float
|
1931
|
+
last_equity: float
|
1932
|
+
buying_power: float
|
1933
|
+
cash: float
|
1934
|
+
last_maintenance_margin: float
|
1935
|
+
long_market_value: float
|
1936
|
+
non_marginable_buying_power: float
|
1937
|
+
options_buying_power: float
|
1938
|
+
portfolio_value:float
|
1939
|
+
regt_buying_power: float
|
1940
|
+
sma: float
|
1941
|
+
|
1942
|
+
class AlpacaAccount(AccountMixin,):
|
1943
|
+
api_key: str
|
1944
|
+
secret_key: str
|
1945
|
+
|
1946
|
+
account_number: str
|
1947
|
+
id_hex: str
|
1948
|
+
account_blocked: bool
|
1949
|
+
multiplier: float
|
1950
|
+
options_approved_level: int
|
1951
|
+
options_trading_level: int
|
1952
|
+
pattern_day_trader: bool
|
1953
|
+
trade_suspended_by_user: bool
|
1954
|
+
trading_blocked: bool
|
1955
|
+
transfers_blocked: bool
|
1956
|
+
shorting_enabled: bool
|
1957
|
+
|
1958
|
+
|
1959
|
+
|
1960
|
+
# ------------------------------
|
1961
|
+
# BINANCE
|
1962
|
+
# ------------------------------
|
1963
|
+
|
1964
|
+
|
1965
|
+
class BinanceFuturesAccountRiskFactors(AccountRiskFactors):
|
1966
|
+
total_initial_margin: float
|
1967
|
+
total_maintenance_margin: float
|
1968
|
+
total_margin_balance: float
|
1969
|
+
total_unrealized_profit: float
|
1970
|
+
total_cross_wallet_balance: float
|
1971
|
+
total_cross_unrealized_pnl: float
|
1972
|
+
available_balance: float
|
1973
|
+
max_withdraw_amount: float
|
1974
|
+
|
1975
|
+
class BaseFuturesAccount(Account):
|
1976
|
+
api_key :str
|
1977
|
+
secret_key :str
|
1978
|
+
|
1979
|
+
multi_assets_margin: bool = False
|
1980
|
+
fee_burn: bool = False
|
1981
|
+
can_deposit: bool = False
|
1982
|
+
can_withdraw: bool = False
|
1983
|
+
|