mainsequence 2.0.4b0__py3-none-any.whl → 3.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. mainsequence/cli/cli.py +17 -10
  2. mainsequence/cli/ssh_utils.py +17 -2
  3. mainsequence/client/__init__.py +3 -3
  4. mainsequence/client/base.py +3 -3
  5. mainsequence/client/data_sources_interfaces/timescale.py +20 -19
  6. mainsequence/client/models_helpers.py +2 -2
  7. mainsequence/client/models_tdag.py +96 -86
  8. mainsequence/client/models_vam.py +9 -9
  9. mainsequence/dashboards/streamlit/assets/image_1_base64.txt +1 -0
  10. mainsequence/dashboards/streamlit/assets/image_2_base64.txt +1 -0
  11. mainsequence/dashboards/streamlit/assets/image_3_base64.txt +1 -0
  12. mainsequence/dashboards/streamlit/assets/image_4_base64.txt +1 -0
  13. mainsequence/dashboards/streamlit/assets/image_5_base64.txt +1 -0
  14. mainsequence/dashboards/streamlit/core/theme.py +2 -2
  15. mainsequence/instruments/__init__.py +1 -1
  16. mainsequence/instruments/data_interface/__init__.py +1 -1
  17. mainsequence/instruments/data_interface/data_interface.py +3 -4
  18. mainsequence/instruments/pricing_models/indices.py +29 -14
  19. mainsequence/instruments/settings.py +2 -162
  20. mainsequence/tdag/config.py +2 -2
  21. mainsequence/tdag/data_nodes/build_operations.py +3 -3
  22. mainsequence/tdag/data_nodes/data_nodes.py +23 -23
  23. mainsequence/tdag/data_nodes/persist_managers.py +121 -121
  24. mainsequence/tdag/data_nodes/run_operations.py +25 -25
  25. mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +1 -1
  26. mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +2 -2
  27. mainsequence/virtualfundbuilder/data_nodes.py +1 -1
  28. mainsequence/virtualfundbuilder/portfolio_interface.py +7 -7
  29. mainsequence/virtualfundbuilder/utils.py +2 -2
  30. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/METADATA +1 -1
  31. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/RECORD +35 -30
  32. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/WHEEL +0 -0
  33. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/entry_points.txt +0 -0
  34. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/licenses/LICENSE +0 -0
  35. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/top_level.txt +0 -0
@@ -26,10 +26,10 @@ def explain_theming():
26
26
 
27
27
  # --- Load spinner frames ONCE from two levels above, files: image_1_base64.txt ... image_5_base64.txt ---
28
28
  def _load_spinner_frames_for_this_template() -> list[str]:
29
- base_dir = Path(__file__).resolve().parent.parent.parent.parent # two levels above this module
29
+ base_dir = Path(__file__).resolve().parent.parent
30
30
  frames: list[str] = []
31
31
  for i in range(1, 6):
32
- p = base_dir / f"image_{i}_base64.txt"
32
+ p = base_dir / f"assets/image_{i}_base64.txt"
33
33
  if not p.exists():
34
34
  raise FileNotFoundError(f"Missing spinner frame file: {p}")
35
35
  frames.append(p.read_text(encoding="utf-8").strip())
@@ -1,2 +1,2 @@
1
- from .constants import *
1
+
2
2
  from .instruments import *
@@ -4,7 +4,7 @@ from mainsequence.client import Constant as _C
4
4
  import os
5
5
 
6
6
  def _make_backend():
7
- backend = os.getenv("MSI_DATA_BACKEND", "mock").lower()
7
+ backend = os.getenv("MSI_DATA_BACKEND", "mainsequence").lower()
8
8
  return MSInterface() if backend == "mainsequence" else MockDataInterface()
9
9
 
10
10
  # export a single, uniform instance
@@ -9,8 +9,8 @@ import pandas as pd
9
9
  from pathlib import Path
10
10
 
11
11
 
12
- DISCOUNT_CURVES_TABLE=msc.Constant.get_or_none(name="DISCOUNT_CURVES_TABLE")
13
- REFERENCE_RATES_FIXING_TABLE = msc.Constant.get_or_none(name="REFERENCE_RATES_FIXING_TABLE")
12
+ DISCOUNT_CURVES_TABLE=msc.Constant.get_value(name="DISCOUNT_CURVES_TABLE")
13
+ REFERENCE_RATES_FIXING_TABLE = msc.Constant.get_value(name="REFERENCE_RATES_FIXING_TABLE")
14
14
 
15
15
  assert DISCOUNT_CURVES_TABLE is not None, "DISCOUNT_CURVES_TABLE not found in constants"
16
16
  assert REFERENCE_RATES_FIXING_TABLE is not None, "REFERENCE_RATES_FIXING_TABLE not found in constants"
@@ -294,7 +294,7 @@ class MSInterface():
294
294
  # for test purposes only get lats observations
295
295
  update_statistics = data_node.get_update_statistics()
296
296
  target_date = update_statistics.asset_time_statistics[curve_name]
297
- print("REMOVE ABOCVE ONLU FOR TESTING")
297
+ print("REMOVE ABOCVE ONLY FOR TESTING")
298
298
 
299
299
 
300
300
  try:
@@ -336,7 +336,6 @@ class MSInterface():
336
336
  :return:
337
337
  """
338
338
  from mainsequence.tdag import APIDataNode
339
- from mainsequence.instruments.settings import REFERENCE_RATES_FIXING_TABLE
340
339
 
341
340
  data_node = APIDataNode.build_from_identifier(identifier=REFERENCE_RATES_FIXING_TABLE)
342
341
 
@@ -42,14 +42,18 @@ _INDEX_CACHE: Dict[_IndexCacheKey, ql.Index] = {}
42
42
  def clear_index_cache() -> None:
43
43
  _INDEX_CACHE.clear()
44
44
 
45
+ constants_to_create = dict(
46
+ UST="UST",
47
+ )
45
48
 
49
+ _C.create_constants_if_not_exist(constants_to_create)
46
50
  # ----------------------------- Config ----------------------------- #
47
51
  # Put every supported identifier here with its curve + index construction config.
48
52
  # No tenor tokens; we store the QuantLib Period directly.
49
53
 
50
54
  INDEX_CONFIGS: Dict[str, Dict] = {
51
- _C.get_value(name="TIIE_28_UID"): dict(
52
- curve_uid=_C.get_value(name="TIIE_28_ZERO_CURVE"),
55
+ _C.get_value(name="REFERENCE_RATE__TIIE_28"): dict(
56
+ curve_uid=_C.get_value(name="ZERO_CURVE__VALMER_TIIE_28"),
53
57
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
54
58
  day_counter=ql.Actual360(),
55
59
  currency=(ql.MXNCurrency() if hasattr(ql, "MXNCurrency") else ql.USDCurrency()),
@@ -58,8 +62,8 @@ INDEX_CONFIGS: Dict[str, Dict] = {
58
62
  bdc=ql.ModifiedFollowing,
59
63
  end_of_month=False,
60
64
  ),
61
- _C.get_value(name="TIIE_91_UID"): dict(
62
- curve_uid=_C.get_value(name="TIIE_28_ZERO_CURVE"),
65
+ _C.get_value(name="REFERENCE_RATE__TIIE_91"): dict(
66
+ curve_uid=_C.get_value(name="ZERO_CURVE__VALMER_TIIE_28"),
63
67
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
64
68
  day_counter=ql.Actual360(),
65
69
  currency=ql.MXNCurrency(),
@@ -68,8 +72,8 @@ INDEX_CONFIGS: Dict[str, Dict] = {
68
72
  bdc=ql.ModifiedFollowing,
69
73
  end_of_month=False,
70
74
  ),
71
- _C.get_value(name="TIIE_182_UID"): dict(
72
- curve_uid=_C.get_value(name="TIIE_28_ZERO_CURVE"),
75
+ _C.get_value(name="REFERENCE_RATE__TIIE_182"): dict(
76
+ curve_uid=_C.get_value(name="ZERO_CURVE__VALMER_TIIE_28"),
73
77
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
74
78
  day_counter=ql.Actual360(),
75
79
  currency=ql.MXNCurrency(),
@@ -79,8 +83,8 @@ INDEX_CONFIGS: Dict[str, Dict] = {
79
83
  end_of_month=False,
80
84
  ),
81
85
  # Add more identifiers here as needed.
82
- _C.get_value(name="TIIE_OVERNIGHT_UID"): dict(
83
- curve_uid=_C.get_value(name="TIIE_28_ZERO_CURVE"),
86
+ _C.get_value(name="REFERENCE_RATE__TIIE_OVERNIGHT"): dict(
87
+ curve_uid=_C.get_value(name="ZERO_CURVE__VALMER_TIIE_28"),
84
88
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
85
89
  day_counter=ql.Actual360(),
86
90
  currency=ql.MXNCurrency(),
@@ -89,8 +93,8 @@ INDEX_CONFIGS: Dict[str, Dict] = {
89
93
  bdc=ql.ModifiedFollowing,
90
94
  end_of_month=False,
91
95
  ),
92
- _C.get_value(name="CETE_28_UID"): dict(
93
- curve_uid=_C.get_value(name="M_BONOS_ZERO_CURVE"),
96
+ _C.get_value(name="REFERENCE_RATE__CETE_28"): dict(
97
+ curve_uid=_C.get_value(name="ZERO_CURVE__BANXICO_M_BONOS_OTR"),
94
98
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
95
99
  day_counter=ql.Actual360(), # BONOS accrue on Act/360
96
100
  currency=ql.MXNCurrency(),
@@ -99,8 +103,8 @@ INDEX_CONFIGS: Dict[str, Dict] = {
99
103
  bdc=ql.Following, # “next banking business day” => Following
100
104
  end_of_month=False, # Irrelevant when scheduling by days
101
105
  ),
102
- _C.get_value(name="CETE_91_UID"): dict(
103
- curve_uid=_C.get_value(name="M_BONOS_ZERO_CURVE"),
106
+ _C.get_value(name="REFERENCE_RATE__CETE_91"): dict(
107
+ curve_uid=_C.get_value(name="ZERO_CURVE__BANXICO_M_BONOS_OTR"),
104
108
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
105
109
  day_counter=ql.Actual360(), # BONOS accrue on Act/360
106
110
  currency=ql.MXNCurrency(),
@@ -110,8 +114,8 @@ INDEX_CONFIGS: Dict[str, Dict] = {
110
114
  end_of_month=False, # Irrelevant when scheduling by days
111
115
  ),
112
116
 
113
- _C.get_value(name="CETE_182_UID"): dict(
114
- curve_uid=_C.get_value(name="M_BONOS_ZERO_CURVE"),
117
+ _C.get_value(name="REFERENCE_RATE__CETE_182"): dict(
118
+ curve_uid=_C.get_value(name="ZERO_CURVE__BANXICO_M_BONOS_OTR"),
115
119
  calendar=(ql.Mexico() if hasattr(ql, "Mexico") else ql.TARGET()),
116
120
  day_counter=ql.Actual360(), # BONOS accrue on Act/360
117
121
  currency=ql.MXNCurrency(),
@@ -120,6 +124,17 @@ INDEX_CONFIGS: Dict[str, Dict] = {
120
124
  bdc=ql.Following, # “next banking business day” => Following
121
125
  end_of_month=False, # Irrelevant when scheduling by days
122
126
  ),
127
+
128
+ _C.get_value(name="REFERENCE_RATE__UST"): dict(
129
+ curve_uid=_C.get_value(name="ZERO_CURVE__UST_CMT_ZERO_CURVE_UID"),
130
+ calendar=ql.UnitedStates(ql.UnitedStates.GovernmentBond),
131
+ day_counter=ql.ActualActual(ql.ActualActual.Bond), # Treasuries accrue Act/Act (Bond/ICMA)
132
+ currency=ql.USDCurrency(),
133
+ period=ql.Period(6, ql.Months), # Semiannual coupons
134
+ settlement_days=1, # T+1
135
+ bdc=ql.Following, # “next banking business day” => Following
136
+ end_of_month=False, # Irrelevant when scheduling by days
137
+ ),
123
138
  }
124
139
 
125
140
 
@@ -7,168 +7,8 @@ import sys
7
7
  from pathlib import Path
8
8
  from types import SimpleNamespace
9
9
 
10
+ ENV_PREFIX = "MSI"
10
11
 
11
- HERE = Path(__file__).resolve().parent
12
- # ---------------- App identity & app dirs ----------------
13
- APP_VENDOR = "mainsequence"
14
- APP_NAME = "instruments"
15
- APP_ID = f"{APP_VENDOR}/{APP_NAME}"
16
12
 
17
- # All environment variables use this prefix now.
18
- ENV_PREFIX = "MSI" # e.g., MSI_CONFIG_FILE, MSI_DATA_BACKEND
19
- ENV_CONFIG_FILE = f"{ENV_PREFIX}_CONFIG_FILE"
20
- ENV_DEFAULT_TOML_FILE = f"{ENV_PREFIX}_DEFAULT_TOML_FILE"
21
-
22
-
23
- def _user_config_root() -> Path:
24
- if sys.platform == "win32":
25
- base = Path(os.environ.get("APPDATA", Path.home() / "AppData" / "Roaming"))
26
- elif sys.platform == "darwin":
27
- base = Path.home() / "Library" / "Application Support"
28
- else:
29
- base = Path(os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config"))
30
- return (base / APP_VENDOR / APP_NAME).resolve()
31
-
32
- APP_ROOT = _user_config_root()
33
- # (No POSITIONS_DIR / BUILDS_DIR / DATA_DIR and no bulk mkdir here.)
34
-
35
- # ---------------- tiny config loader (stdlib only) ----------------
36
- def _load_toml(text: str) -> dict:
37
- try:
38
- import tomllib # py311+
39
- return tomllib.loads(text)
40
- except Exception:
41
- return {}
42
-
43
- def _load_file_config() -> dict:
44
- candidates: list[Path] = []
45
- # 1) explicit path via MSI_CONFIG_FILE
46
- env_cfg = os.getenv(ENV_CONFIG_FILE)
47
- if env_cfg:
48
- candidates.append(Path(env_cfg).expanduser())
49
-
50
- # 2) project-local
51
- candidates += [Path("./instruments.toml"), Path("./instruments.json")]
52
-
53
- # 3) user config root
54
- candidates += [APP_ROOT / "config.toml", APP_ROOT / "config.json"]
55
-
56
- for p in candidates:
57
- try:
58
- if not p.exists():
59
- continue
60
- s = p.read_text(encoding="utf-8")
61
- if p.suffix.lower() == ".toml":
62
- return _load_toml(s) or {}
63
- if p.suffix.lower() == ".json":
64
- return json.loads(s)
65
- except Exception:
66
- pass
67
- return {}
68
-
69
- # ---------------- default TOML (read from a real file next to this module) -----
70
- def _read_default_toml_text() -> str | None:
71
- """
72
- Returns the text of the default TOML configuration.
73
- Order of precedence:
74
- 1) Path from MSI_DEFAULT_TOML_FILE (if set)
75
- 2) instruments.default.toml next to this file
76
- Returns None if no default file is found/readable.
77
- """
78
- candidate = Path(os.getenv(ENV_DEFAULT_TOML_FILE, HERE / "instruments.default.toml")).expanduser()
79
- try:
80
- if candidate.exists():
81
- return candidate.read_text(encoding="utf-8")
82
- except Exception:
83
- pass
84
- return None
85
-
86
- def _existing_config_path() -> Path | None:
87
- env_cfg = os.getenv(ENV_CONFIG_FILE)
88
- if env_cfg:
89
- p = Path(env_cfg).expanduser()
90
- if p.exists():
91
- return p
92
- for p in (
93
- Path("./instruments.toml"),
94
- Path("./instruments.json"),
95
- APP_ROOT / "config.toml",
96
- APP_ROOT / "config.json",
97
- ):
98
- if p.exists():
99
- return p
100
- return None
101
-
102
- def _ensure_default_config_file() -> Path | None:
103
- """If no config exists anywhere, create one. Never overwrites existing."""
104
- if _existing_config_path() is not None:
105
- return None
106
- default_text = _read_default_toml_text()
107
- if default_text is None:
108
- return None
109
-
110
- target = Path(os.getenv(ENV_CONFIG_FILE, APP_ROOT / "config.toml")).expanduser()
111
- try:
112
- target.parent.mkdir(parents=True, exist_ok=True) # ensure parent dir only
113
- if not target.exists():
114
- target.write_text(default_text, encoding="utf-8")
115
- except Exception:
116
- return None
117
- return target
118
-
119
- # Create a default config file if none is present anywhere.
120
- _ensure_default_config_file()
121
-
122
- # Now load the config (env still overrides)
123
- _CFG = _load_file_config()
124
-
125
- def _get(key: str, default: str) -> str:
126
- # Env overrides config file (MSI_<KEY>)
127
- v = os.getenv(f"{ENV_PREFIX}_{key}")
128
- if v is not None:
129
- return v
130
- try:
131
- section, leaf = key.lower().split(".", 1)
132
- return _CFG.get(section, {}).get(leaf, default)
133
- except Exception:
134
- return _CFG.get(key, default)
135
-
136
- # ---------------- Your existing constants (with overrides) ----------------
137
- # Tables
138
- DISCOUNT_CURVES_TABLE = _get("DISCOUNT_CURVES_TABLE", "discount_curves")
139
- REFERENCE_RATES_FIXING_TABLE = _get("REFERENCE_RATES_FIXING_TABLE", "fixing_rates_1d")
140
-
141
- # Curve identifiers
142
- TIIE_28_ZERO_CURVE = _get("TIIE_28_ZERO_CURVE", "F_TIIE_28_VALMER")
143
- M_BONOS_ZERO_CURVE = _get("M_BONOS_ZERO_CURVE", "M_BONOS_ZERO_OTR")
144
-
145
- # Index UIDs
146
- TIIE_28_UID = _get("TIIE_28_UID", "TIIE_28")
147
- TIIE_91_UID = _get("TIIE_91_UID", "TIIE_91")
148
- TIIE_182_UID = _get("TIIE_182_UID", "TIIE_182")
149
- TIIE_OVERNIGHT_UID = _get("TIIE_OVERNIGHT_UID", "TIIE_OVERNIGHT")
150
-
151
- CETE_28_UID = _get("CETE_28_UID", "CETE_28")
152
- CETE_91_UID = _get("CETE_91_UID", "CETE_91")
153
- CETE_182_UID = _get("CETE_182_UID", "CETE_182")
154
-
155
- # Optional file locations (let your code decide how to use them)
156
- TIIE_ZERO_CSV = (_CFG.get("files", {}) or {}).get("tiie_zero_csv")
157
- TIIE28_FIXINGS_CSV = (_CFG.get("files", {}) or {}).get("tiie28_fixings_csv")
158
-
159
- # ---------------- Convenience namespaces for legacy import sites ------------
160
- indices = SimpleNamespace(
161
- TIIE_28_UID=TIIE_28_UID,
162
- TIIE_91_UID=TIIE_91_UID,
163
- TIIE_182_UID=TIIE_182_UID,
164
- TIIE_OVERNIGHT_UID=TIIE_OVERNIGHT_UID,
165
- CETE_28_UID=CETE_28_UID,
166
- CETE_91_UID=CETE_91_UID,
167
- CETE_182_UID=CETE_182_UID,
168
- )
169
- curves = SimpleNamespace(
170
- TIIE_28_ZERO_CURVE=TIIE_28_ZERO_CURVE,
171
- M_BONOS_ZERO_CURVE=M_BONOS_ZERO_CURVE,
172
- )
173
- DATA_BACKEND = os.getenv(f"{ENV_PREFIX}_DATA_BACKEND", (_CFG.get("data", {}) or {}).get("backend", "mainsequence"))
13
+ DATA_BACKEND = os.getenv(f"{ENV_PREFIX}_DATA_BACKEND","mainsequence")
174
14
  data = SimpleNamespace(backend=DATA_BACKEND)
@@ -112,8 +112,8 @@ class TimeSeriesOGM:
112
112
  return target_path
113
113
 
114
114
  @property
115
- def local_metadata_path(self):
116
- target_path = os.path.join(f"{self.time_series_folder}", "metadata")
115
+ def data_node_update_path(self):
116
+ target_path = os.path.join(f"{self.time_series_folder}", "data_node_update")
117
117
  self.verify_exist(target_path=target_path)
118
118
  return target_path
119
119
 
@@ -211,10 +211,10 @@ def prepare_config_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[
211
211
  def verify_backend_git_hash_with_pickle(local_persist_manager:PersistManager,
212
212
  time_serie_class: "DataNode") -> None:
213
213
  """Verifies if the git hash in the backend matches the one from the pickled object."""
214
- if local_persist_manager.metadata is not None:
214
+ if local_persist_manager.data_node_storage is not None:
215
215
  load_git_hash = get_data_node_source_code_git_hash(time_serie_class)
216
216
 
217
- persisted_pickle_hash = local_persist_manager.metadata.time_serie_source_code_git_hash
217
+ persisted_pickle_hash = local_persist_manager.data_node_storage.time_serie_source_code_git_hash
218
218
  if load_git_hash != persisted_pickle_hash:
219
219
  local_persist_manager.logger.warning(
220
220
  f"{bcolors.WARNING}Source code does not match with pickle rebuilding{bcolors.ENDC}")
@@ -228,7 +228,7 @@ def verify_backend_git_hash_with_pickle(local_persist_manager:PersistManager,
228
228
  rebuild_time_serie.persist_to_pickle()
229
229
  else:
230
230
  # if no need to rebuild, just sync the metadata
231
- local_persist_manager.synchronize_metadata(local_metadata=None)
231
+ local_persist_manager.synchronize_data_node_storage(data_node_update=None)
232
232
 
233
233
  def hash_signature(dictionary: Dict[str, Any]) -> Tuple[str, str]:
234
234
  """
@@ -34,7 +34,7 @@ from abc import ABC
34
34
 
35
35
  from typing import Union
36
36
 
37
- from mainsequence.client import LocalTimeSerie, CONSTANTS, \
37
+ from mainsequence.client import DataNodeUpdate, CONSTANTS, \
38
38
  DynamicTableDataSource, AssetTranslationTable
39
39
 
40
40
  from functools import wraps
@@ -106,7 +106,7 @@ class DataAccessMixin:
106
106
 
107
107
  def __repr__(self) -> str:
108
108
  try:
109
- local_id = self.local_time_serie.id
109
+ local_id = self.data_node_update.id
110
110
  except:
111
111
  local_id = 0
112
112
  repr = self.__class__.__name__ + f" {os.environ['TDAG_ENDPOINT']}/local-time-series/details/?local_time_serie_id={local_id}"
@@ -350,14 +350,14 @@ class APIDataNode(DataAccessMixin):
350
350
 
351
351
 
352
352
  @classmethod
353
- def build_from_local_time_serie(cls, source_table: "LocalTimeSerie") -> "APIDataNode":
353
+ def build_from_local_time_serie(cls, source_table: "DataNodeUpdate") -> "APIDataNode":
354
354
  return cls(data_source_id=source_table.data_source.id,
355
355
  storage_hash=source_table.storage_hash
356
356
  )
357
357
 
358
358
  @classmethod
359
359
  def build_from_table_id(cls, table_id: str) -> "APIDataNode":
360
- table = ms_client.DynamicTableMetaData.get(id=table_id)
360
+ table = ms_client.DataNodeStorage.get(id=table_id)
361
361
  ts = cls(
362
362
  data_source_id=table.data_source.id,
363
363
  storage_hash=table.storage_hash
@@ -367,7 +367,7 @@ class APIDataNode(DataAccessMixin):
367
367
  @classmethod
368
368
  def build_from_identifier(cls, identifier: str) -> "APIDataNode":
369
369
 
370
- table = ms_client.DynamicTableMetaData.get(identifier=identifier)
370
+ table = ms_client.DataNodeStorage.get(identifier=identifier)
371
371
  ts = cls(
372
372
  data_source_id=table.data_source.id,
373
373
  storage_hash=table.storage_hash
@@ -462,9 +462,9 @@ class APIDataNode(DataAccessMixin):
462
462
  def _set_local_persist_manager(self) -> None:
463
463
  self._verify_local_data_source()
464
464
  self._local_persist_manager = APIPersistManager(storage_hash=self.storage_hash, data_source_id=self.data_source_id)
465
- metadata = self._local_persist_manager.metadata
465
+ data_node_storage = self._local_persist_manager.data_node_storage
466
466
 
467
- assert metadata is not None, f"Verify that the table {self.storage_hash} exists "
467
+ assert data_node_storage is not None, f"Verify that the table {self.storage_hash} exists "
468
468
 
469
469
 
470
470
 
@@ -480,7 +480,7 @@ class APIDataNode(DataAccessMixin):
480
480
  A tuple containing the last update time for the table and a dictionary of last update times per asset.
481
481
  """
482
482
 
483
- return self.local_persist_manager.metadata.sourcetableconfiguration.get_data_updates()
483
+ return self.local_persist_manager.data_node_storage.sourcetableconfiguration.get_data_updates()
484
484
 
485
485
  def get_earliest_updated_asset_filter(self, unique_identifier_list: list,
486
486
  last_update_per_asset: dict) -> datetime.datetime:
@@ -537,7 +537,7 @@ class DataNode(DataAccessMixin,ABC):
537
537
  *args,
538
538
  **kwargs):
539
539
  """
540
- Initializes the DataNode object with the provided metadata and configurations. For extension of the method
540
+ Initializes the DataNode object with the provided data_node_storage and configurations. For extension of the method
541
541
 
542
542
  This method sets up the time series object, loading the necessary configurations
543
543
  and metadata.
@@ -548,7 +548,7 @@ class DataNode(DataAccessMixin,ABC):
548
548
  - init_meta
549
549
  - build_meta_data
550
550
 
551
- Each DataNode instance will create a update_hash and a LocalTimeSerie instance in the Data Engine by uniquely hashing
551
+ Each DataNode instance will create a update_hash and a DataNodeUpdate instance in the Data Engine by uniquely hashing
552
552
  the same arguments as the table but excluding the arguments inside _LOCAL_KWARGS_TO_IGNORE
553
553
 
554
554
 
@@ -715,12 +715,12 @@ class DataNode(DataAccessMixin,ABC):
715
715
 
716
716
 
717
717
  @property
718
- def local_time_serie(self) -> LocalTimeSerie:
718
+ def data_node_update(self) -> DataNodeUpdate:
719
719
  """The local time series metadata object."""
720
- return self.local_persist_manager.local_metadata
720
+ return self.local_persist_manager.data_node_update
721
721
 
722
722
  @property
723
- def metadata(self) -> "DynamicTableMetaData":
723
+ def metadata(self) -> "DataNodeStorage":
724
724
  return self.local_persist_manager.metadata
725
725
 
726
726
 
@@ -768,7 +768,7 @@ class DataNode(DataAccessMixin,ABC):
768
768
  if graph_depth <= graph_depth_limit and self.data_source.related_resource_class_type:
769
769
  self._set_local_persist_manager(
770
770
  update_hash=self.update_hash,
771
- local_metadata=None,
771
+ data_node_update=None,
772
772
  )
773
773
 
774
774
  deserializer = build_operations.DeserializerManager()
@@ -782,7 +782,7 @@ class DataNode(DataAccessMixin,ABC):
782
782
 
783
783
  self.__dict__.update(state)
784
784
 
785
- self.local_persist_manager.synchronize_metadata(local_metadata=None)
785
+ self.local_persist_manager.synchronize_metadata(data_node_update=None)
786
786
 
787
787
  def _prepare_state_for_pickle(self, state: Dict[str, Any]) -> Dict[str, Any]:
788
788
  """
@@ -803,8 +803,8 @@ class DataNode(DataAccessMixin,ABC):
803
803
  "local_persist_manager",
804
804
  "logger",
805
805
  "init_meta",
806
- "_local_metadata_future",
807
- "_local_metadata_lock",
806
+ "_data_node_update_future",
807
+ "_data_node_update_lock",
808
808
  "_local_persist_manager",
809
809
  "update_tracker",
810
810
  ]:
@@ -822,7 +822,7 @@ class DataNode(DataAccessMixin,ABC):
822
822
 
823
823
  return properties
824
824
  def _set_local_persist_manager(self, update_hash: str,
825
- local_metadata: Union[None, dict] = None,
825
+ data_node_update: Union[None, dict] = None,
826
826
 
827
827
  ) -> None:
828
828
  """
@@ -835,13 +835,13 @@ class DataNode(DataAccessMixin,ABC):
835
835
  The local hash ID for the time series.
836
836
  storage_hash : str
837
837
  The remote table hash name for the time series.
838
- local_metadata : Union[None, dict], optional
838
+ data_node_update : Union[None, dict], optional
839
839
  Local metadata for the time series, if available.
840
840
  """
841
841
  self._local_persist_manager = PersistManager.get_from_data_type(
842
842
  update_hash=update_hash,
843
843
  class_name=self.__class__.__name__,
844
- local_metadata=local_metadata,
844
+ data_node_update=data_node_update,
845
845
  data_source=self.data_source
846
846
  )
847
847
 
@@ -883,7 +883,7 @@ class DataNode(DataAccessMixin,ABC):
883
883
 
884
884
  """Sets the node relationships in the backend by calling the dependencies() method."""
885
885
 
886
- if self.local_persist_manager.local_metadata is None:
886
+ if self.local_persist_manager.data_node_update is None:
887
887
  self.verify_and_build_remote_objects() #
888
888
  if self.local_persist_manager.is_local_relation_tree_set():
889
889
  return
@@ -911,7 +911,7 @@ class DataNode(DataAccessMixin,ABC):
911
911
  self.depth_df = depth_df
912
912
  if not depth_df.empty:
913
913
  self.dependencies_df = depth_df[
914
- depth_df["local_time_serie_id"] != self.local_time_serie.id].copy()
914
+ depth_df["data_node_update_id"] != self.data_node_update.id].copy()
915
915
  else:
916
916
  self.dependencies_df = pd.DataFrame()
917
917
 
@@ -1108,7 +1108,7 @@ class WrapperDataNode(DataNode):
1108
1108
  """
1109
1109
  from mainsequence.client import DoesNotExist
1110
1110
  try:
1111
- metadata = ms_client.DynamicTableMetaData.get(identifier=table_identifier)
1111
+ metadata = ms_client.DataNodeStorage.get(identifier=table_identifier)
1112
1112
 
1113
1113
  except DoesNotExist as e:
1114
1114
  raise e