mainsequence 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mainsequence/__init__.py +0 -0
- mainsequence/__main__.py +9 -0
- mainsequence/cli/__init__.py +1 -0
- mainsequence/cli/api.py +157 -0
- mainsequence/cli/cli.py +442 -0
- mainsequence/cli/config.py +78 -0
- mainsequence/cli/ssh_utils.py +126 -0
- mainsequence/client/__init__.py +17 -0
- mainsequence/client/base.py +431 -0
- mainsequence/client/data_sources_interfaces/__init__.py +0 -0
- mainsequence/client/data_sources_interfaces/duckdb.py +1468 -0
- mainsequence/client/data_sources_interfaces/timescale.py +479 -0
- mainsequence/client/models_helpers.py +113 -0
- mainsequence/client/models_report_studio.py +412 -0
- mainsequence/client/models_tdag.py +2276 -0
- mainsequence/client/models_vam.py +1983 -0
- mainsequence/client/utils.py +387 -0
- mainsequence/dashboards/__init__.py +0 -0
- mainsequence/dashboards/streamlit/__init__.py +0 -0
- mainsequence/dashboards/streamlit/assets/config.toml +12 -0
- mainsequence/dashboards/streamlit/assets/favicon.png +0 -0
- mainsequence/dashboards/streamlit/assets/logo.png +0 -0
- mainsequence/dashboards/streamlit/core/__init__.py +0 -0
- mainsequence/dashboards/streamlit/core/theme.py +212 -0
- mainsequence/dashboards/streamlit/pages/__init__.py +0 -0
- mainsequence/dashboards/streamlit/scaffold.py +220 -0
- mainsequence/instrumentation/__init__.py +7 -0
- mainsequence/instrumentation/utils.py +101 -0
- mainsequence/instruments/__init__.py +1 -0
- mainsequence/instruments/data_interface/__init__.py +10 -0
- mainsequence/instruments/data_interface/data_interface.py +361 -0
- mainsequence/instruments/instruments/__init__.py +3 -0
- mainsequence/instruments/instruments/base_instrument.py +85 -0
- mainsequence/instruments/instruments/bond.py +447 -0
- mainsequence/instruments/instruments/european_option.py +74 -0
- mainsequence/instruments/instruments/interest_rate_swap.py +217 -0
- mainsequence/instruments/instruments/json_codec.py +585 -0
- mainsequence/instruments/instruments/knockout_fx_option.py +146 -0
- mainsequence/instruments/instruments/position.py +475 -0
- mainsequence/instruments/instruments/ql_fields.py +239 -0
- mainsequence/instruments/instruments/vanilla_fx_option.py +107 -0
- mainsequence/instruments/pricing_models/__init__.py +0 -0
- mainsequence/instruments/pricing_models/black_scholes.py +49 -0
- mainsequence/instruments/pricing_models/bond_pricer.py +182 -0
- mainsequence/instruments/pricing_models/fx_option_pricer.py +90 -0
- mainsequence/instruments/pricing_models/indices.py +350 -0
- mainsequence/instruments/pricing_models/knockout_fx_pricer.py +209 -0
- mainsequence/instruments/pricing_models/swap_pricer.py +502 -0
- mainsequence/instruments/settings.py +175 -0
- mainsequence/instruments/utils.py +29 -0
- mainsequence/logconf.py +284 -0
- mainsequence/reportbuilder/__init__.py +0 -0
- mainsequence/reportbuilder/__main__.py +0 -0
- mainsequence/reportbuilder/examples/ms_template_report.py +706 -0
- mainsequence/reportbuilder/model.py +713 -0
- mainsequence/reportbuilder/slide_templates.py +532 -0
- mainsequence/tdag/__init__.py +8 -0
- mainsequence/tdag/__main__.py +0 -0
- mainsequence/tdag/config.py +129 -0
- mainsequence/tdag/data_nodes/__init__.py +12 -0
- mainsequence/tdag/data_nodes/build_operations.py +751 -0
- mainsequence/tdag/data_nodes/data_nodes.py +1292 -0
- mainsequence/tdag/data_nodes/persist_managers.py +812 -0
- mainsequence/tdag/data_nodes/run_operations.py +543 -0
- mainsequence/tdag/data_nodes/utils.py +24 -0
- mainsequence/tdag/future_registry.py +25 -0
- mainsequence/tdag/utils.py +40 -0
- mainsequence/virtualfundbuilder/__init__.py +45 -0
- mainsequence/virtualfundbuilder/__main__.py +235 -0
- mainsequence/virtualfundbuilder/agent_interface.py +77 -0
- mainsequence/virtualfundbuilder/config_handling.py +86 -0
- mainsequence/virtualfundbuilder/contrib/__init__.py +0 -0
- mainsequence/virtualfundbuilder/contrib/apps/__init__.py +8 -0
- mainsequence/virtualfundbuilder/contrib/apps/etf_replicator_app.py +164 -0
- mainsequence/virtualfundbuilder/contrib/apps/generate_report.py +292 -0
- mainsequence/virtualfundbuilder/contrib/apps/load_external_portfolio.py +107 -0
- mainsequence/virtualfundbuilder/contrib/apps/news_app.py +437 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +91 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_table.py +95 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_named_portfolio.py +45 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_portfolio.py +40 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/base.html +147 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/report.html +77 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/__init__.py +5 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/external_weights.py +61 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/intraday_trend.py +149 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/market_cap.py +310 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/mock_signal.py +78 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/portfolio_replicator.py +269 -0
- mainsequence/virtualfundbuilder/contrib/prices/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +810 -0
- mainsequence/virtualfundbuilder/contrib/prices/utils.py +11 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/rebalance_strategies.py +313 -0
- mainsequence/virtualfundbuilder/data_nodes.py +637 -0
- mainsequence/virtualfundbuilder/enums.py +23 -0
- mainsequence/virtualfundbuilder/models.py +282 -0
- mainsequence/virtualfundbuilder/notebook_handling.py +42 -0
- mainsequence/virtualfundbuilder/portfolio_interface.py +272 -0
- mainsequence/virtualfundbuilder/resource_factory/__init__.py +0 -0
- mainsequence/virtualfundbuilder/resource_factory/app_factory.py +170 -0
- mainsequence/virtualfundbuilder/resource_factory/base_factory.py +238 -0
- mainsequence/virtualfundbuilder/resource_factory/rebalance_factory.py +101 -0
- mainsequence/virtualfundbuilder/resource_factory/signal_factory.py +183 -0
- mainsequence/virtualfundbuilder/utils.py +381 -0
- mainsequence-2.0.0.dist-info/METADATA +105 -0
- mainsequence-2.0.0.dist-info/RECORD +110 -0
- mainsequence-2.0.0.dist-info/WHEEL +5 -0
- mainsequence-2.0.0.dist-info/licenses/LICENSE +40 -0
- mainsequence-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,361 @@
|
|
1
|
+
import datetime
|
2
|
+
from typing import Dict, Optional, TypedDict, Any
|
3
|
+
import random
|
4
|
+
from mainsequence.instruments.utils import to_ql_date
|
5
|
+
import QuantLib as ql
|
6
|
+
import os
|
7
|
+
import pandas as pd
|
8
|
+
from pathlib import Path
|
9
|
+
|
10
|
+
|
11
|
+
class DateInfo(TypedDict, total=False):
|
12
|
+
"""Defines the date range for a data query."""
|
13
|
+
start_date: Optional[datetime.datetime]
|
14
|
+
start_date_operand: Optional[str]
|
15
|
+
end_date: Optional[datetime.datetime]
|
16
|
+
end_date_operand: Optional[str]
|
17
|
+
|
18
|
+
|
19
|
+
UniqueIdentifierRangeMap = Dict[str, DateInfo]
|
20
|
+
|
21
|
+
|
22
|
+
class MockDataInterface:
|
23
|
+
"""
|
24
|
+
A mock class to simulate fetching financial time series data from an API.
|
25
|
+
|
26
|
+
In a real-world scenario, this class would contain logic to connect to a
|
27
|
+
financial data provider (e.g., Bloomberg, Refinitiv, a database).
|
28
|
+
"""
|
29
|
+
|
30
|
+
@staticmethod
|
31
|
+
def get_historical_fixings(index_name: str, start_date: datetime.date, end_date: datetime.date) -> Dict[
|
32
|
+
datetime.date, float]:
|
33
|
+
"""
|
34
|
+
Simulates fetching historical index fixings from a database.
|
35
|
+
|
36
|
+
CORRECTED: This now dynamically selects the appropriate calendar based on the index name.
|
37
|
+
"""
|
38
|
+
|
39
|
+
# Dynamically select the calendar based on the index name
|
40
|
+
calendar = ql.TARGET() # Default calendar
|
41
|
+
if 'USDLibor' in index_name:
|
42
|
+
calendar = ql.UnitedKingdom()
|
43
|
+
print("Using UnitedKingdom calendar for LIBOR.")
|
44
|
+
elif 'Euribor' in index_name:
|
45
|
+
calendar = ql.TARGET() # TARGET is the standard for EUR rates
|
46
|
+
print("Using TARGET calendar for Euribor.")
|
47
|
+
elif 'SOFR' in index_name:
|
48
|
+
calendar = ql.UnitedStates(ql.UnitedStates.SOFR)
|
49
|
+
print("Using UnitedStates.SOFR calendar for SOFR.")
|
50
|
+
elif index_name == "TIIE28":
|
51
|
+
DEFAULT_TIIE_CSV = Path(__file__).resolve().parents[2] / "data" / "TIIE28_FIXINGS.csv"
|
52
|
+
csv_path = os.getenv("TIIE28_FIXINGS_CSV") or str(DEFAULT_TIIE_CSV)
|
53
|
+
|
54
|
+
fixings = pd.read_csv(csv_path)
|
55
|
+
fixings.columns = ["date", "rate"]
|
56
|
+
fixings["date"] = pd.to_datetime(fixings["date"], format="%m/%d/%Y")
|
57
|
+
fixings["date"] = fixings["date"].dt.date
|
58
|
+
if end_date > fixings["date"].max():
|
59
|
+
raise Exception("Fixing not existent")
|
60
|
+
fixings = fixings[fixings.date <= end_date]
|
61
|
+
fixings["rate"] = fixings["rate"] / 100
|
62
|
+
return fixings.set_index("date")["rate"].to_dict()
|
63
|
+
|
64
|
+
|
65
|
+
elif 'TIIE' in index_name or 'F-TIIE' in index_name:
|
66
|
+
raise Exception("Unrecognized index name")
|
67
|
+
|
68
|
+
print("---------------------\n")
|
69
|
+
|
70
|
+
fixings = {}
|
71
|
+
current_date = start_date
|
72
|
+
base_rate = 0.05
|
73
|
+
|
74
|
+
while current_date <= end_date:
|
75
|
+
ql_date = to_ql_date(current_date)
|
76
|
+
# Only generate a fixing if the date is a business day for the selected calendar
|
77
|
+
if calendar.isBusinessDay(ql_date):
|
78
|
+
random_factor = (random.random() - 0.5) * 0.001
|
79
|
+
fixings[current_date] = base_rate + random_factor
|
80
|
+
|
81
|
+
current_date += datetime.timedelta(days=1)
|
82
|
+
|
83
|
+
return fixings
|
84
|
+
|
85
|
+
@staticmethod
|
86
|
+
def get_historical_discount_curve(curve_name: str, target_date) -> list[dict]:
|
87
|
+
"""
|
88
|
+
Simulates fetching historical data for a given asset or data type.
|
89
|
+
|
90
|
+
Args:
|
91
|
+
table_name: The name of the data table to query.
|
92
|
+
asset_range_map: A dictionary mapping identifiers to date ranges.
|
93
|
+
|
94
|
+
Returns:
|
95
|
+
A dictionary containing mock market data.
|
96
|
+
"""
|
97
|
+
print(f"--- MOCK DATA API ---")
|
98
|
+
print(f"Fetching data from table '{table_name}' for assets: {list(asset_range_map.keys())}")
|
99
|
+
print("---------------------\n")
|
100
|
+
|
101
|
+
if table_name == "equities_daily":
|
102
|
+
asset_ticker = list(asset_range_map.keys())[0]
|
103
|
+
mock_data = {
|
104
|
+
asset_ticker: {
|
105
|
+
"spot_price": 175.50,
|
106
|
+
"volatility": 0.20,
|
107
|
+
"dividend_yield": 0.015,
|
108
|
+
"risk_free_rate": 0.04
|
109
|
+
}
|
110
|
+
}
|
111
|
+
if asset_ticker in mock_data:
|
112
|
+
return mock_data[asset_ticker]
|
113
|
+
else:
|
114
|
+
raise ValueError(f"No mock data available for asset: {asset_ticker}")
|
115
|
+
|
116
|
+
elif table_name == "interest_rate_swaps":
|
117
|
+
# A more realistic set of market rates for curve bootstrapping.
|
118
|
+
# This includes short-term deposit rates and longer-term swap rates.
|
119
|
+
return {
|
120
|
+
"curve_nodes": [
|
121
|
+
{'type': 'deposit', 'tenor': '3M', 'rate': 0.048},
|
122
|
+
{'type': 'deposit', 'tenor': '6M', 'rate': 0.050},
|
123
|
+
{'type': 'swap', 'tenor': '1Y', 'rate': 0.052},
|
124
|
+
{'type': 'swap', 'tenor': '2Y', 'rate': 0.054},
|
125
|
+
{'type': 'swap', 'tenor': '3Y', 'rate': 0.055},
|
126
|
+
{'type': 'swap', 'tenor': '5Y', 'rate': 0.056},
|
127
|
+
{'type': 'swap', 'tenor': '10Y', 'rate': 0.057},
|
128
|
+
]
|
129
|
+
}
|
130
|
+
elif table_name == "discount_bond_curve":
|
131
|
+
# Zero rates for discounting bond cashflows (simple upward-sloping curve).
|
132
|
+
# Tenors are parsed by QuantLib (e.g., "6M", "5Y").
|
133
|
+
return {
|
134
|
+
"curve_nodes": [
|
135
|
+
# --- Zero-coupon section (<= 1Y) ---
|
136
|
+
{"type": "zcb", "days_to_maturity": 30, "yield": 0.0370},
|
137
|
+
{"type": "zcb", "days_to_maturity": 90, "yield": 0.0385},
|
138
|
+
{"type": "zcb", "days_to_maturity": 180, "yield": 0.0395},
|
139
|
+
{"type": "zcb", "days_to_maturity": 270, "yield": 0.0405},
|
140
|
+
{"type": "zcb", "days_to_maturity": 360, "yield": 0.0410},
|
141
|
+
|
142
|
+
# --- Coupon bond section (>= 2Y) ---
|
143
|
+
{"type": "bond", "days_to_maturity": 730, "coupon": 0.0425, "clean_price": 99.20,
|
144
|
+
"dirty_price": 99.45, "frequency": "6M", "day_count": "30/360"},
|
145
|
+
{"type": "bond", "days_to_maturity": 1095, "coupon": 0.0440, "clean_price": 98.85,
|
146
|
+
"dirty_price": 99.10, "frequency": "6M", "day_count": "30/360"},
|
147
|
+
{"type": "bond", "days_to_maturity": 1825, "coupon": 0.0475, "clean_price": 98.10,
|
148
|
+
"dirty_price": 98.40, "frequency": "6M", "day_count": "30/360"},
|
149
|
+
{"type": "bond", "days_to_maturity": 2555, "coupon": 0.0490, "clean_price": 97.25,
|
150
|
+
"dirty_price": 97.60, "frequency": "6M", "day_count": "30/360"},
|
151
|
+
{"type": "bond", "days_to_maturity": 3650, "coupon": 0.0500, "clean_price": 96.80,
|
152
|
+
"dirty_price": 97.20, "frequency": "6M", "day_count": "30/360"},
|
153
|
+
]
|
154
|
+
}
|
155
|
+
elif table_name == "fx_options":
|
156
|
+
# Mock FX options market data
|
157
|
+
currency_pair = list(asset_range_map.keys())[0]
|
158
|
+
|
159
|
+
# Mock data for common currency pairs
|
160
|
+
fx_mock_data = {
|
161
|
+
"EURUSD": {
|
162
|
+
"spot_fx_rate": 1.0850,
|
163
|
+
"volatility": 0.12,
|
164
|
+
"domestic_rate": 0.045, # USD rate
|
165
|
+
"foreign_rate": 0.035 # EUR rate
|
166
|
+
},
|
167
|
+
"GBPUSD": {
|
168
|
+
"spot_fx_rate": 1.2650,
|
169
|
+
"volatility": 0.15,
|
170
|
+
"domestic_rate": 0.045, # USD rate
|
171
|
+
"foreign_rate": 0.040 # GBP rate
|
172
|
+
},
|
173
|
+
"USDJPY": {
|
174
|
+
"spot_fx_rate": 148.50,
|
175
|
+
"volatility": 0.11,
|
176
|
+
"domestic_rate": 0.005, # JPY rate
|
177
|
+
"foreign_rate": 0.045 # USD rate
|
178
|
+
},
|
179
|
+
"USDCHF": {
|
180
|
+
"spot_fx_rate": 0.8950,
|
181
|
+
"volatility": 0.13,
|
182
|
+
"domestic_rate": 0.015, # CHF rate
|
183
|
+
"foreign_rate": 0.045 # USD rate
|
184
|
+
}
|
185
|
+
}
|
186
|
+
|
187
|
+
if currency_pair in fx_mock_data:
|
188
|
+
return fx_mock_data[currency_pair]
|
189
|
+
else:
|
190
|
+
# Default mock data for unknown pairs
|
191
|
+
return {
|
192
|
+
"spot_fx_rate": 1.0000,
|
193
|
+
"volatility": 0.15,
|
194
|
+
"domestic_rate": 0.040,
|
195
|
+
"foreign_rate": 0.040
|
196
|
+
}
|
197
|
+
|
198
|
+
elif table_name == "tiie_zero_valmer":
|
199
|
+
"""
|
200
|
+
Return a pre-built MXN TIIE zero curve parsed from a CSV.
|
201
|
+
|
202
|
+
Expected CSV columns (case-insensitive; flexible):
|
203
|
+
- Either 'maturity_date' (YYYY-MM-DD) OR 'days_to_maturity' OR a 'tenor' like '28D','3M','2Y'
|
204
|
+
- One rate column among: ['zero','rate','yield','tiie'] as a decimal (e.g., 0.095 for 9.5%)
|
205
|
+
(if the file holds percents like 9.50, we'll auto-convert to 0.095)
|
206
|
+
"""
|
207
|
+
|
208
|
+
# You can override this path in your env; default points to the uploaded file
|
209
|
+
DEFAULT_TIIE_CSV = Path(__file__).resolve().parents[2] / "data" / "MEXDERSWAP_IRSTIIEPR.csv"
|
210
|
+
csv_path = os.getenv("TIIE_ZERO_CSV") or str(DEFAULT_TIIE_CSV)
|
211
|
+
if not os.path.exists(csv_path):
|
212
|
+
raise FileNotFoundError(f"TIIE zero curve CSV not found at: {csv_path}")
|
213
|
+
|
214
|
+
names = ["id", "curve_name", "asof_yyMMdd", "idx", "zero_rate"]
|
215
|
+
# STRICT: comma-separated, headerless, exactly these six columns
|
216
|
+
df = pd.read_csv(csv_path, header=None, names=names, sep=",", engine="c", dtype=str)
|
217
|
+
# pick a rate column
|
218
|
+
|
219
|
+
df["asof_yyMMdd"] = pd.to_datetime(df["asof_yyMMdd"], format="%y%m%d")
|
220
|
+
|
221
|
+
df["idx"] = df["idx"].astype(int)
|
222
|
+
df["days_to_maturity"] = (df["asof_yyMMdd"] - df["asof_yyMMdd"].iloc[0]).dt.days
|
223
|
+
df["zero_rate"] = df["zero_rate"].astype(float) / 100
|
224
|
+
base_dt = df["asof_yyMMdd"].iloc[0].date()
|
225
|
+
nodes = [
|
226
|
+
{"days_to_maturity": d, "zero": z}
|
227
|
+
for d, z in zip(df["days_to_maturity"], df["zero_rate"])
|
228
|
+
if d > 0
|
229
|
+
]
|
230
|
+
return {"curve_nodes": nodes}
|
231
|
+
|
232
|
+
else:
|
233
|
+
raise ValueError(f"Table '{table_name}' not found in mock data API.")
|
234
|
+
|
235
|
+
|
236
|
+
import json
|
237
|
+
import base64
|
238
|
+
import gzip
|
239
|
+
from cachetools import cachedmethod, LRUCache
|
240
|
+
from operator import attrgetter
|
241
|
+
from threading import RLock
|
242
|
+
|
243
|
+
class MSInterface():
|
244
|
+
|
245
|
+
# ---- bounded, shared caches (class-level) ----
|
246
|
+
_curve_cache = LRUCache(maxsize=1024)
|
247
|
+
_curve_cache_lock = RLock()
|
248
|
+
|
249
|
+
_fixings_cache = LRUCache(maxsize=4096)
|
250
|
+
_fixings_cache_lock = RLock()
|
251
|
+
|
252
|
+
@staticmethod
|
253
|
+
def decompress_string_to_curve(b64_string: str) -> Dict[Any, Any]:
|
254
|
+
"""
|
255
|
+
Decodes, decompresses, and deserializes a string back into a curve dictionary.
|
256
|
+
|
257
|
+
Pipeline: Base64 (text) -> Gzip (binary) -> JSON -> Dict
|
258
|
+
|
259
|
+
Args:
|
260
|
+
b64_string: The Base64-encoded string from the database or API.
|
261
|
+
|
262
|
+
Returns:
|
263
|
+
The reconstructed Python dictionary.
|
264
|
+
"""
|
265
|
+
# 1. Encode the ASCII string back into Base64 bytes
|
266
|
+
base64_bytes = b64_string.encode('ascii')
|
267
|
+
|
268
|
+
# 2. Decode the Base64 to get the compressed Gzip bytes
|
269
|
+
compressed_bytes = base64.b64decode(base64_bytes)
|
270
|
+
|
271
|
+
# 3. Decompress the Gzip bytes to get the original JSON bytes
|
272
|
+
json_bytes = gzip.decompress(compressed_bytes)
|
273
|
+
|
274
|
+
# 4. Decode the JSON bytes to a string and parse back into a dictionary
|
275
|
+
return json.loads(json_bytes.decode('utf-8'))
|
276
|
+
|
277
|
+
# NOTE: caching is applied at the method boundary; body is unchanged.
|
278
|
+
@cachedmethod(cache=attrgetter("_curve_cache"), lock=attrgetter("_curve_cache_lock"))
|
279
|
+
def get_historical_discount_curve(self, curve_name, target_date):
|
280
|
+
from mainsequence.tdag import APIDataNode
|
281
|
+
from mainsequence.instruments.settings import DISCOUNT_CURVES_TABLE
|
282
|
+
data_node = APIDataNode.build_from_identifier(identifier=DISCOUNT_CURVES_TABLE)
|
283
|
+
|
284
|
+
|
285
|
+
|
286
|
+
# for test purposes only get lats observations
|
287
|
+
update_statistics = data_node.get_update_statistics()
|
288
|
+
target_date = update_statistics.asset_time_statistics[curve_name]
|
289
|
+
print("REMOVE ABOCVE ONLU FOR TESTING")
|
290
|
+
|
291
|
+
|
292
|
+
try:
|
293
|
+
limit = target_date + datetime.timedelta(days=1)
|
294
|
+
except Exception as e:
|
295
|
+
raise e
|
296
|
+
|
297
|
+
|
298
|
+
|
299
|
+
|
300
|
+
curve = data_node.get_ranged_data_per_asset(
|
301
|
+
range_descriptor={curve_name: {"start_date": target_date, "start_date_operand": ">=",
|
302
|
+
"end_date": limit, "end_date_operand": "<", }}
|
303
|
+
)
|
304
|
+
|
305
|
+
if curve.empty:
|
306
|
+
raise Exception(f"{target_date} is empty.")
|
307
|
+
zeros = self.decompress_string_to_curve(curve["curve"].iloc[0])
|
308
|
+
zeros = pd.Series(zeros).reset_index()
|
309
|
+
zeros["index"] = pd.to_numeric(zeros["index"])
|
310
|
+
zeros = zeros.set_index("index")[0]
|
311
|
+
|
312
|
+
nodes = [
|
313
|
+
{"days_to_maturity": d, "zero": z}
|
314
|
+
for d, z in zeros.to_dict().items()
|
315
|
+
if d > 0
|
316
|
+
]
|
317
|
+
|
318
|
+
return nodes
|
319
|
+
|
320
|
+
@cachedmethod(cache=attrgetter("_fixings_cache"), lock=attrgetter("_fixings_cache_lock"))
|
321
|
+
def get_historical_fixings(self, reference_rate_uid: str, start_date: datetime.datetime,
|
322
|
+
end_date: datetime.datetime):
|
323
|
+
"""
|
324
|
+
|
325
|
+
:param reference_rate_uid:
|
326
|
+
:param start_date:
|
327
|
+
:param end_date:
|
328
|
+
:return:
|
329
|
+
"""
|
330
|
+
from mainsequence.tdag import APIDataNode
|
331
|
+
from mainsequence.instruments.settings import REFERENCE_RATES_FIXING_TABLE
|
332
|
+
|
333
|
+
data_node = APIDataNode.build_from_identifier(identifier=REFERENCE_RATES_FIXING_TABLE)
|
334
|
+
|
335
|
+
import pytz # patch
|
336
|
+
start_date = datetime.datetime(2024, 9, 10, tzinfo=pytz.utc)
|
337
|
+
end_date=datetime.datetime(2025, 9, 17, tzinfo=pytz.utc)
|
338
|
+
|
339
|
+
fixings_df = data_node.get_ranged_data_per_asset(
|
340
|
+
range_descriptor={reference_rate_uid: {"start_date": start_date, "start_date_operand": ">=",
|
341
|
+
"end_date": end_date, "end_date_operand": "<=", }}
|
342
|
+
)
|
343
|
+
if fixings_df.empty:
|
344
|
+
raise Exception(f"{reference_rate_uid} has not data between {start_date} and {end_date}.")
|
345
|
+
fixings_df = fixings_df.reset_index().rename(columns={"time_index": "date"})
|
346
|
+
fixings_df["date"] = fixings_df["date"].dt.date
|
347
|
+
return fixings_df.set_index("date")["rate"].to_dict()
|
348
|
+
|
349
|
+
# optional helpers
|
350
|
+
@classmethod
|
351
|
+
def clear_caches(cls) -> None:
|
352
|
+
cls._curve_cache.clear()
|
353
|
+
cls._fixings_cache.clear()
|
354
|
+
|
355
|
+
@classmethod
|
356
|
+
def cache_info(cls) -> dict:
|
357
|
+
return {
|
358
|
+
"discount_curve_cache": {"size": cls._curve_cache.currsize, "max": cls._curve_cache.maxsize},
|
359
|
+
"fixings_cache": {"size": cls._fixings_cache.currsize, "max": cls._fixings_cache.maxsize},
|
360
|
+
}
|
361
|
+
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# src/instruments/base_instrument.py
|
2
|
+
from typing import Protocol, runtime_checkable, Optional, Union,Dict, Any, Mapping,Type,ClassVar
|
3
|
+
from pydantic import BaseModel, Field, PrivateAttr
|
4
|
+
from .json_codec import JSONMixin
|
5
|
+
import datetime
|
6
|
+
import json
|
7
|
+
class InstrumentModel(BaseModel, JSONMixin):
|
8
|
+
"""
|
9
|
+
Common base for all Pydantic instrument models.
|
10
|
+
Adds a shared optional 'main_sequence_uid' field and shared config.
|
11
|
+
"""
|
12
|
+
main_sequence_asset_id :Optional[int] = Field(
|
13
|
+
default=None,
|
14
|
+
description="Optional UID linking this instrument to a main sequence record."
|
15
|
+
)
|
16
|
+
|
17
|
+
# Keep your existing behavior (QuantLib types, etc.)
|
18
|
+
model_config = {"arbitrary_types_allowed": True}
|
19
|
+
|
20
|
+
_valuation_date: Optional[datetime.datetime] =PrivateAttr(default=None)
|
21
|
+
|
22
|
+
_DEFAULT_REGISTRY: ClassVar[Dict[str, Type["InstrumentModel"]]] = {}
|
23
|
+
# public read access (still not serialized)
|
24
|
+
@property
|
25
|
+
def valuation_date(self) -> Optional[datetime.datetime]:
|
26
|
+
return self._valuation_date
|
27
|
+
|
28
|
+
# explicit setter method (per your request)
|
29
|
+
def set_valuation_date(self, value: Optional[datetime.datetime]) -> None:
|
30
|
+
self._valuation_date = value
|
31
|
+
|
32
|
+
def serialize_for_backend(self):
|
33
|
+
serialized={}
|
34
|
+
data = self.model_dump_json()
|
35
|
+
data = json.loads(data)
|
36
|
+
serialized["instrument_type"] = type(self).__name__
|
37
|
+
serialized["instrument"] = data
|
38
|
+
|
39
|
+
return json.dumps(serialized)
|
40
|
+
|
41
|
+
@classmethod
|
42
|
+
def rebuild(
|
43
|
+
cls,
|
44
|
+
data: Union[str, Dict[str, Any]],
|
45
|
+
registry: Optional[Mapping[str, Type["InstrumentModel"]]] = None,
|
46
|
+
) -> "InstrumentModel":
|
47
|
+
"""
|
48
|
+
Rebuild a single instrument from its wire format.
|
49
|
+
|
50
|
+
Accepts either:
|
51
|
+
- a dict: {"instrument_type": "FixedRateBond", "instrument": {...}}
|
52
|
+
- a JSON string of the same shape
|
53
|
+
|
54
|
+
Optional `registry` maps instrument_type -> InstrumentModel subclass.
|
55
|
+
Falls back to InstrumentModel._DEFAULT_REGISTRY.
|
56
|
+
"""
|
57
|
+
# Parse JSON if needed
|
58
|
+
if isinstance(data, str):
|
59
|
+
try:
|
60
|
+
data = json.loads(data)
|
61
|
+
except Exception as e:
|
62
|
+
raise ValueError(f"Invalid JSON for instrument: {e}")
|
63
|
+
|
64
|
+
if not isinstance(data, dict):
|
65
|
+
raise ValueError("Instrument payload must be dict or JSON string.")
|
66
|
+
|
67
|
+
t = data.get("instrument_type")
|
68
|
+
payload = data.get("instrument", {})
|
69
|
+
if not t or not isinstance(payload, dict):
|
70
|
+
raise ValueError(
|
71
|
+
"Expected {'instrument_type': <str>, 'instrument': <dict>}."
|
72
|
+
)
|
73
|
+
|
74
|
+
# Merge registries (explicit registry overrides defaults)
|
75
|
+
effective_registry: Dict[str, Type[InstrumentModel]] = dict(cls._DEFAULT_REGISTRY)
|
76
|
+
if registry:
|
77
|
+
effective_registry.update(registry)
|
78
|
+
|
79
|
+
target_cls = effective_registry.get(t)
|
80
|
+
if target_cls is None:
|
81
|
+
raise ValueError(f"Unknown instrument type: {t}")
|
82
|
+
if not hasattr(target_cls, "from_json"):
|
83
|
+
raise TypeError(f"Instrument type {t} is not JSON-rebuildable (missing from_json).")
|
84
|
+
|
85
|
+
return target_cls.from_json(payload)
|