mainsequence 2.0.4rc0__py3-none-any.whl → 3.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mainsequence/cli/cli.py +4 -7
- mainsequence/cli/ssh_utils.py +17 -2
- mainsequence/client/__init__.py +3 -3
- mainsequence/client/base.py +3 -3
- mainsequence/client/data_sources_interfaces/timescale.py +20 -19
- mainsequence/client/exceptions.py +11 -0
- mainsequence/client/models_helpers.py +2 -2
- mainsequence/client/models_tdag.py +104 -87
- mainsequence/client/models_vam.py +9 -9
- mainsequence/dashboards/streamlit/core/theme.py +128 -109
- mainsequence/dashboards/streamlit/scaffold.py +3 -0
- mainsequence/instruments/__init__.py +1 -1
- mainsequence/instruments/data_interface/__init__.py +1 -1
- mainsequence/instruments/data_interface/data_interface.py +31 -11
- mainsequence/instruments/instruments/bond.py +8 -0
- mainsequence/instruments/pricing_models/indices.py +26 -14
- mainsequence/instruments/settings.py +2 -162
- mainsequence/tdag/config.py +2 -2
- mainsequence/tdag/data_nodes/build_operations.py +3 -3
- mainsequence/tdag/data_nodes/data_nodes.py +23 -23
- mainsequence/tdag/data_nodes/persist_managers.py +121 -121
- mainsequence/tdag/data_nodes/run_operations.py +25 -25
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +1 -1
- mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +2 -2
- mainsequence/virtualfundbuilder/data_nodes.py +1 -1
- mainsequence/virtualfundbuilder/portfolio_interface.py +7 -7
- mainsequence/virtualfundbuilder/utils.py +2 -2
- {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/METADATA +1 -1
- {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/RECORD +33 -32
- {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/WHEEL +0 -0
- {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/entry_points.txt +0 -0
- {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/licenses/LICENSE +0 -0
- {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/top_level.txt +0 -0
@@ -7,168 +7,8 @@ import sys
|
|
7
7
|
from pathlib import Path
|
8
8
|
from types import SimpleNamespace
|
9
9
|
|
10
|
+
ENV_PREFIX = "MSI"
|
10
11
|
|
11
|
-
HERE = Path(__file__).resolve().parent
|
12
|
-
# ---------------- App identity & app dirs ----------------
|
13
|
-
APP_VENDOR = "mainsequence"
|
14
|
-
APP_NAME = "instruments"
|
15
|
-
APP_ID = f"{APP_VENDOR}/{APP_NAME}"
|
16
12
|
|
17
|
-
|
18
|
-
ENV_PREFIX = "MSI" # e.g., MSI_CONFIG_FILE, MSI_DATA_BACKEND
|
19
|
-
ENV_CONFIG_FILE = f"{ENV_PREFIX}_CONFIG_FILE"
|
20
|
-
ENV_DEFAULT_TOML_FILE = f"{ENV_PREFIX}_DEFAULT_TOML_FILE"
|
21
|
-
|
22
|
-
|
23
|
-
def _user_config_root() -> Path:
|
24
|
-
if sys.platform == "win32":
|
25
|
-
base = Path(os.environ.get("APPDATA", Path.home() / "AppData" / "Roaming"))
|
26
|
-
elif sys.platform == "darwin":
|
27
|
-
base = Path.home() / "Library" / "Application Support"
|
28
|
-
else:
|
29
|
-
base = Path(os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config"))
|
30
|
-
return (base / APP_VENDOR / APP_NAME).resolve()
|
31
|
-
|
32
|
-
APP_ROOT = _user_config_root()
|
33
|
-
# (No POSITIONS_DIR / BUILDS_DIR / DATA_DIR and no bulk mkdir here.)
|
34
|
-
|
35
|
-
# ---------------- tiny config loader (stdlib only) ----------------
|
36
|
-
def _load_toml(text: str) -> dict:
|
37
|
-
try:
|
38
|
-
import tomllib # py311+
|
39
|
-
return tomllib.loads(text)
|
40
|
-
except Exception:
|
41
|
-
return {}
|
42
|
-
|
43
|
-
def _load_file_config() -> dict:
|
44
|
-
candidates: list[Path] = []
|
45
|
-
# 1) explicit path via MSI_CONFIG_FILE
|
46
|
-
env_cfg = os.getenv(ENV_CONFIG_FILE)
|
47
|
-
if env_cfg:
|
48
|
-
candidates.append(Path(env_cfg).expanduser())
|
49
|
-
|
50
|
-
# 2) project-local
|
51
|
-
candidates += [Path("./instruments.toml"), Path("./instruments.json")]
|
52
|
-
|
53
|
-
# 3) user config root
|
54
|
-
candidates += [APP_ROOT / "config.toml", APP_ROOT / "config.json"]
|
55
|
-
|
56
|
-
for p in candidates:
|
57
|
-
try:
|
58
|
-
if not p.exists():
|
59
|
-
continue
|
60
|
-
s = p.read_text(encoding="utf-8")
|
61
|
-
if p.suffix.lower() == ".toml":
|
62
|
-
return _load_toml(s) or {}
|
63
|
-
if p.suffix.lower() == ".json":
|
64
|
-
return json.loads(s)
|
65
|
-
except Exception:
|
66
|
-
pass
|
67
|
-
return {}
|
68
|
-
|
69
|
-
# ---------------- default TOML (read from a real file next to this module) -----
|
70
|
-
def _read_default_toml_text() -> str | None:
|
71
|
-
"""
|
72
|
-
Returns the text of the default TOML configuration.
|
73
|
-
Order of precedence:
|
74
|
-
1) Path from MSI_DEFAULT_TOML_FILE (if set)
|
75
|
-
2) instruments.default.toml next to this file
|
76
|
-
Returns None if no default file is found/readable.
|
77
|
-
"""
|
78
|
-
candidate = Path(os.getenv(ENV_DEFAULT_TOML_FILE, HERE / "instruments.default.toml")).expanduser()
|
79
|
-
try:
|
80
|
-
if candidate.exists():
|
81
|
-
return candidate.read_text(encoding="utf-8")
|
82
|
-
except Exception:
|
83
|
-
pass
|
84
|
-
return None
|
85
|
-
|
86
|
-
def _existing_config_path() -> Path | None:
|
87
|
-
env_cfg = os.getenv(ENV_CONFIG_FILE)
|
88
|
-
if env_cfg:
|
89
|
-
p = Path(env_cfg).expanduser()
|
90
|
-
if p.exists():
|
91
|
-
return p
|
92
|
-
for p in (
|
93
|
-
Path("./instruments.toml"),
|
94
|
-
Path("./instruments.json"),
|
95
|
-
APP_ROOT / "config.toml",
|
96
|
-
APP_ROOT / "config.json",
|
97
|
-
):
|
98
|
-
if p.exists():
|
99
|
-
return p
|
100
|
-
return None
|
101
|
-
|
102
|
-
def _ensure_default_config_file() -> Path | None:
|
103
|
-
"""If no config exists anywhere, create one. Never overwrites existing."""
|
104
|
-
if _existing_config_path() is not None:
|
105
|
-
return None
|
106
|
-
default_text = _read_default_toml_text()
|
107
|
-
if default_text is None:
|
108
|
-
return None
|
109
|
-
|
110
|
-
target = Path(os.getenv(ENV_CONFIG_FILE, APP_ROOT / "config.toml")).expanduser()
|
111
|
-
try:
|
112
|
-
target.parent.mkdir(parents=True, exist_ok=True) # ensure parent dir only
|
113
|
-
if not target.exists():
|
114
|
-
target.write_text(default_text, encoding="utf-8")
|
115
|
-
except Exception:
|
116
|
-
return None
|
117
|
-
return target
|
118
|
-
|
119
|
-
# Create a default config file if none is present anywhere.
|
120
|
-
_ensure_default_config_file()
|
121
|
-
|
122
|
-
# Now load the config (env still overrides)
|
123
|
-
_CFG = _load_file_config()
|
124
|
-
|
125
|
-
def _get(key: str, default: str) -> str:
|
126
|
-
# Env overrides config file (MSI_<KEY>)
|
127
|
-
v = os.getenv(f"{ENV_PREFIX}_{key}")
|
128
|
-
if v is not None:
|
129
|
-
return v
|
130
|
-
try:
|
131
|
-
section, leaf = key.lower().split(".", 1)
|
132
|
-
return _CFG.get(section, {}).get(leaf, default)
|
133
|
-
except Exception:
|
134
|
-
return _CFG.get(key, default)
|
135
|
-
|
136
|
-
# ---------------- Your existing constants (with overrides) ----------------
|
137
|
-
# Tables
|
138
|
-
DISCOUNT_CURVES_TABLE = _get("DISCOUNT_CURVES_TABLE", "discount_curves")
|
139
|
-
REFERENCE_RATES_FIXING_TABLE = _get("REFERENCE_RATES_FIXING_TABLE", "fixing_rates_1d")
|
140
|
-
|
141
|
-
# Curve identifiers
|
142
|
-
TIIE_28_ZERO_CURVE = _get("TIIE_28_ZERO_CURVE", "F_TIIE_28_VALMER")
|
143
|
-
M_BONOS_ZERO_CURVE = _get("M_BONOS_ZERO_CURVE", "M_BONOS_ZERO_OTR")
|
144
|
-
|
145
|
-
# Index UIDs
|
146
|
-
TIIE_28_UID = _get("TIIE_28_UID", "TIIE_28")
|
147
|
-
TIIE_91_UID = _get("TIIE_91_UID", "TIIE_91")
|
148
|
-
TIIE_182_UID = _get("TIIE_182_UID", "TIIE_182")
|
149
|
-
TIIE_OVERNIGHT_UID = _get("TIIE_OVERNIGHT_UID", "TIIE_OVERNIGHT")
|
150
|
-
|
151
|
-
CETE_28_UID = _get("CETE_28_UID", "CETE_28")
|
152
|
-
CETE_91_UID = _get("CETE_91_UID", "CETE_91")
|
153
|
-
CETE_182_UID = _get("CETE_182_UID", "CETE_182")
|
154
|
-
|
155
|
-
# Optional file locations (let your code decide how to use them)
|
156
|
-
TIIE_ZERO_CSV = (_CFG.get("files", {}) or {}).get("tiie_zero_csv")
|
157
|
-
TIIE28_FIXINGS_CSV = (_CFG.get("files", {}) or {}).get("tiie28_fixings_csv")
|
158
|
-
|
159
|
-
# ---------------- Convenience namespaces for legacy import sites ------------
|
160
|
-
indices = SimpleNamespace(
|
161
|
-
TIIE_28_UID=TIIE_28_UID,
|
162
|
-
TIIE_91_UID=TIIE_91_UID,
|
163
|
-
TIIE_182_UID=TIIE_182_UID,
|
164
|
-
TIIE_OVERNIGHT_UID=TIIE_OVERNIGHT_UID,
|
165
|
-
CETE_28_UID=CETE_28_UID,
|
166
|
-
CETE_91_UID=CETE_91_UID,
|
167
|
-
CETE_182_UID=CETE_182_UID,
|
168
|
-
)
|
169
|
-
curves = SimpleNamespace(
|
170
|
-
TIIE_28_ZERO_CURVE=TIIE_28_ZERO_CURVE,
|
171
|
-
M_BONOS_ZERO_CURVE=M_BONOS_ZERO_CURVE,
|
172
|
-
)
|
173
|
-
DATA_BACKEND = os.getenv(f"{ENV_PREFIX}_DATA_BACKEND", (_CFG.get("data", {}) or {}).get("backend", "mainsequence"))
|
13
|
+
DATA_BACKEND = os.getenv(f"{ENV_PREFIX}_DATA_BACKEND","mainsequence")
|
174
14
|
data = SimpleNamespace(backend=DATA_BACKEND)
|
mainsequence/tdag/config.py
CHANGED
@@ -112,8 +112,8 @@ class TimeSeriesOGM:
|
|
112
112
|
return target_path
|
113
113
|
|
114
114
|
@property
|
115
|
-
def
|
116
|
-
target_path = os.path.join(f"{self.time_series_folder}", "
|
115
|
+
def data_node_update_path(self):
|
116
|
+
target_path = os.path.join(f"{self.time_series_folder}", "data_node_update")
|
117
117
|
self.verify_exist(target_path=target_path)
|
118
118
|
return target_path
|
119
119
|
|
@@ -211,10 +211,10 @@ def prepare_config_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[
|
|
211
211
|
def verify_backend_git_hash_with_pickle(local_persist_manager:PersistManager,
|
212
212
|
time_serie_class: "DataNode") -> None:
|
213
213
|
"""Verifies if the git hash in the backend matches the one from the pickled object."""
|
214
|
-
if local_persist_manager.
|
214
|
+
if local_persist_manager.data_node_storage is not None:
|
215
215
|
load_git_hash = get_data_node_source_code_git_hash(time_serie_class)
|
216
216
|
|
217
|
-
persisted_pickle_hash = local_persist_manager.
|
217
|
+
persisted_pickle_hash = local_persist_manager.data_node_storage.time_serie_source_code_git_hash
|
218
218
|
if load_git_hash != persisted_pickle_hash:
|
219
219
|
local_persist_manager.logger.warning(
|
220
220
|
f"{bcolors.WARNING}Source code does not match with pickle rebuilding{bcolors.ENDC}")
|
@@ -228,7 +228,7 @@ def verify_backend_git_hash_with_pickle(local_persist_manager:PersistManager,
|
|
228
228
|
rebuild_time_serie.persist_to_pickle()
|
229
229
|
else:
|
230
230
|
# if no need to rebuild, just sync the metadata
|
231
|
-
local_persist_manager.
|
231
|
+
local_persist_manager.synchronize_data_node_storage(data_node_update=None)
|
232
232
|
|
233
233
|
def hash_signature(dictionary: Dict[str, Any]) -> Tuple[str, str]:
|
234
234
|
"""
|
@@ -34,7 +34,7 @@ from abc import ABC
|
|
34
34
|
|
35
35
|
from typing import Union
|
36
36
|
|
37
|
-
from mainsequence.client import
|
37
|
+
from mainsequence.client import DataNodeUpdate, CONSTANTS, \
|
38
38
|
DynamicTableDataSource, AssetTranslationTable
|
39
39
|
|
40
40
|
from functools import wraps
|
@@ -106,7 +106,7 @@ class DataAccessMixin:
|
|
106
106
|
|
107
107
|
def __repr__(self) -> str:
|
108
108
|
try:
|
109
|
-
local_id = self.
|
109
|
+
local_id = self.data_node_update.id
|
110
110
|
except:
|
111
111
|
local_id = 0
|
112
112
|
repr = self.__class__.__name__ + f" {os.environ['TDAG_ENDPOINT']}/local-time-series/details/?local_time_serie_id={local_id}"
|
@@ -350,14 +350,14 @@ class APIDataNode(DataAccessMixin):
|
|
350
350
|
|
351
351
|
|
352
352
|
@classmethod
|
353
|
-
def build_from_local_time_serie(cls, source_table: "
|
353
|
+
def build_from_local_time_serie(cls, source_table: "DataNodeUpdate") -> "APIDataNode":
|
354
354
|
return cls(data_source_id=source_table.data_source.id,
|
355
355
|
storage_hash=source_table.storage_hash
|
356
356
|
)
|
357
357
|
|
358
358
|
@classmethod
|
359
359
|
def build_from_table_id(cls, table_id: str) -> "APIDataNode":
|
360
|
-
table = ms_client.
|
360
|
+
table = ms_client.DataNodeStorage.get(id=table_id)
|
361
361
|
ts = cls(
|
362
362
|
data_source_id=table.data_source.id,
|
363
363
|
storage_hash=table.storage_hash
|
@@ -367,7 +367,7 @@ class APIDataNode(DataAccessMixin):
|
|
367
367
|
@classmethod
|
368
368
|
def build_from_identifier(cls, identifier: str) -> "APIDataNode":
|
369
369
|
|
370
|
-
table = ms_client.
|
370
|
+
table = ms_client.DataNodeStorage.get(identifier=identifier)
|
371
371
|
ts = cls(
|
372
372
|
data_source_id=table.data_source.id,
|
373
373
|
storage_hash=table.storage_hash
|
@@ -462,9 +462,9 @@ class APIDataNode(DataAccessMixin):
|
|
462
462
|
def _set_local_persist_manager(self) -> None:
|
463
463
|
self._verify_local_data_source()
|
464
464
|
self._local_persist_manager = APIPersistManager(storage_hash=self.storage_hash, data_source_id=self.data_source_id)
|
465
|
-
|
465
|
+
data_node_storage = self._local_persist_manager.data_node_storage
|
466
466
|
|
467
|
-
assert
|
467
|
+
assert data_node_storage is not None, f"Verify that the table {self.storage_hash} exists "
|
468
468
|
|
469
469
|
|
470
470
|
|
@@ -480,7 +480,7 @@ class APIDataNode(DataAccessMixin):
|
|
480
480
|
A tuple containing the last update time for the table and a dictionary of last update times per asset.
|
481
481
|
"""
|
482
482
|
|
483
|
-
return self.local_persist_manager.
|
483
|
+
return self.local_persist_manager.data_node_storage.sourcetableconfiguration.get_data_updates()
|
484
484
|
|
485
485
|
def get_earliest_updated_asset_filter(self, unique_identifier_list: list,
|
486
486
|
last_update_per_asset: dict) -> datetime.datetime:
|
@@ -537,7 +537,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
537
537
|
*args,
|
538
538
|
**kwargs):
|
539
539
|
"""
|
540
|
-
Initializes the DataNode object with the provided
|
540
|
+
Initializes the DataNode object with the provided data_node_storage and configurations. For extension of the method
|
541
541
|
|
542
542
|
This method sets up the time series object, loading the necessary configurations
|
543
543
|
and metadata.
|
@@ -548,7 +548,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
548
548
|
- init_meta
|
549
549
|
- build_meta_data
|
550
550
|
|
551
|
-
Each DataNode instance will create a update_hash and a
|
551
|
+
Each DataNode instance will create a update_hash and a DataNodeUpdate instance in the Data Engine by uniquely hashing
|
552
552
|
the same arguments as the table but excluding the arguments inside _LOCAL_KWARGS_TO_IGNORE
|
553
553
|
|
554
554
|
|
@@ -715,12 +715,12 @@ class DataNode(DataAccessMixin,ABC):
|
|
715
715
|
|
716
716
|
|
717
717
|
@property
|
718
|
-
def
|
718
|
+
def data_node_update(self) -> DataNodeUpdate:
|
719
719
|
"""The local time series metadata object."""
|
720
|
-
return self.local_persist_manager.
|
720
|
+
return self.local_persist_manager.data_node_update
|
721
721
|
|
722
722
|
@property
|
723
|
-
def metadata(self) -> "
|
723
|
+
def metadata(self) -> "DataNodeStorage":
|
724
724
|
return self.local_persist_manager.metadata
|
725
725
|
|
726
726
|
|
@@ -768,7 +768,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
768
768
|
if graph_depth <= graph_depth_limit and self.data_source.related_resource_class_type:
|
769
769
|
self._set_local_persist_manager(
|
770
770
|
update_hash=self.update_hash,
|
771
|
-
|
771
|
+
data_node_update=None,
|
772
772
|
)
|
773
773
|
|
774
774
|
deserializer = build_operations.DeserializerManager()
|
@@ -782,7 +782,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
782
782
|
|
783
783
|
self.__dict__.update(state)
|
784
784
|
|
785
|
-
self.local_persist_manager.synchronize_metadata(
|
785
|
+
self.local_persist_manager.synchronize_metadata(data_node_update=None)
|
786
786
|
|
787
787
|
def _prepare_state_for_pickle(self, state: Dict[str, Any]) -> Dict[str, Any]:
|
788
788
|
"""
|
@@ -803,8 +803,8 @@ class DataNode(DataAccessMixin,ABC):
|
|
803
803
|
"local_persist_manager",
|
804
804
|
"logger",
|
805
805
|
"init_meta",
|
806
|
-
"
|
807
|
-
"
|
806
|
+
"_data_node_update_future",
|
807
|
+
"_data_node_update_lock",
|
808
808
|
"_local_persist_manager",
|
809
809
|
"update_tracker",
|
810
810
|
]:
|
@@ -822,7 +822,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
822
822
|
|
823
823
|
return properties
|
824
824
|
def _set_local_persist_manager(self, update_hash: str,
|
825
|
-
|
825
|
+
data_node_update: Union[None, dict] = None,
|
826
826
|
|
827
827
|
) -> None:
|
828
828
|
"""
|
@@ -835,13 +835,13 @@ class DataNode(DataAccessMixin,ABC):
|
|
835
835
|
The local hash ID for the time series.
|
836
836
|
storage_hash : str
|
837
837
|
The remote table hash name for the time series.
|
838
|
-
|
838
|
+
data_node_update : Union[None, dict], optional
|
839
839
|
Local metadata for the time series, if available.
|
840
840
|
"""
|
841
841
|
self._local_persist_manager = PersistManager.get_from_data_type(
|
842
842
|
update_hash=update_hash,
|
843
843
|
class_name=self.__class__.__name__,
|
844
|
-
|
844
|
+
data_node_update=data_node_update,
|
845
845
|
data_source=self.data_source
|
846
846
|
)
|
847
847
|
|
@@ -883,7 +883,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
883
883
|
|
884
884
|
"""Sets the node relationships in the backend by calling the dependencies() method."""
|
885
885
|
|
886
|
-
if self.local_persist_manager.
|
886
|
+
if self.local_persist_manager.data_node_update is None:
|
887
887
|
self.verify_and_build_remote_objects() #
|
888
888
|
if self.local_persist_manager.is_local_relation_tree_set():
|
889
889
|
return
|
@@ -911,7 +911,7 @@ class DataNode(DataAccessMixin,ABC):
|
|
911
911
|
self.depth_df = depth_df
|
912
912
|
if not depth_df.empty:
|
913
913
|
self.dependencies_df = depth_df[
|
914
|
-
depth_df["
|
914
|
+
depth_df["data_node_update_id"] != self.data_node_update.id].copy()
|
915
915
|
else:
|
916
916
|
self.dependencies_df = pd.DataFrame()
|
917
917
|
|
@@ -1108,7 +1108,7 @@ class WrapperDataNode(DataNode):
|
|
1108
1108
|
"""
|
1109
1109
|
from mainsequence.client import DoesNotExist
|
1110
1110
|
try:
|
1111
|
-
metadata = ms_client.
|
1111
|
+
metadata = ms_client.DataNodeStorage.get(identifier=table_identifier)
|
1112
1112
|
|
1113
1113
|
except DoesNotExist as e:
|
1114
1114
|
raise e
|