mainsequence 2.0.4rc0__py3-none-any.whl → 3.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. mainsequence/cli/cli.py +4 -7
  2. mainsequence/cli/ssh_utils.py +17 -2
  3. mainsequence/client/__init__.py +3 -3
  4. mainsequence/client/base.py +3 -3
  5. mainsequence/client/data_sources_interfaces/timescale.py +20 -19
  6. mainsequence/client/exceptions.py +11 -0
  7. mainsequence/client/models_helpers.py +2 -2
  8. mainsequence/client/models_tdag.py +104 -87
  9. mainsequence/client/models_vam.py +9 -9
  10. mainsequence/dashboards/streamlit/core/theme.py +128 -109
  11. mainsequence/dashboards/streamlit/scaffold.py +3 -0
  12. mainsequence/instruments/__init__.py +1 -1
  13. mainsequence/instruments/data_interface/__init__.py +1 -1
  14. mainsequence/instruments/data_interface/data_interface.py +31 -11
  15. mainsequence/instruments/instruments/bond.py +8 -0
  16. mainsequence/instruments/pricing_models/indices.py +26 -14
  17. mainsequence/instruments/settings.py +2 -162
  18. mainsequence/tdag/config.py +2 -2
  19. mainsequence/tdag/data_nodes/build_operations.py +3 -3
  20. mainsequence/tdag/data_nodes/data_nodes.py +23 -23
  21. mainsequence/tdag/data_nodes/persist_managers.py +121 -121
  22. mainsequence/tdag/data_nodes/run_operations.py +25 -25
  23. mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +1 -1
  24. mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +2 -2
  25. mainsequence/virtualfundbuilder/data_nodes.py +1 -1
  26. mainsequence/virtualfundbuilder/portfolio_interface.py +7 -7
  27. mainsequence/virtualfundbuilder/utils.py +2 -2
  28. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/METADATA +1 -1
  29. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/RECORD +33 -32
  30. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/WHEEL +0 -0
  31. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/entry_points.txt +0 -0
  32. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/licenses/LICENSE +0 -0
  33. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.2.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,6 @@
1
- from importlib.metadata import metadata
1
+ from __future__ import annotations
2
+
3
+ from asyncio import exceptions
2
4
 
3
5
  import yaml
4
6
 
@@ -32,6 +34,9 @@ import concurrent.futures
32
34
 
33
35
  from cachetools import TTLCache, cachedmethod
34
36
  from operator import attrgetter
37
+ from mainsequence.client import exceptions
38
+
39
+
35
40
 
36
41
  _default_data_source = None # Module-level cache
37
42
 
@@ -105,7 +110,7 @@ class ColumnMetaData(BasePydanticModel, BaseObjectOrm):
105
110
 
106
111
  class SourceTableConfiguration(BasePydanticModel, BaseObjectOrm):
107
112
  id: Optional[int] = Field(None, description="Primary key, auto-incremented ID")
108
- related_table: Union[int, "DynamicTableMetaData"]
113
+ related_table: Union[int, "DataNodeStorage"]
109
114
  time_index_name: str = Field(..., max_length=100, description="Time index name")
110
115
  column_dtypes_map: Dict[str, Any] = Field(..., description="Column data types map")
111
116
  index_names: List
@@ -191,25 +196,25 @@ class ColumnMetaData(BasePydanticModel):
191
196
  description: str = Field(..., description="Detailed description")
192
197
 
193
198
 
194
- class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
199
+ class DataNodeUpdate(BasePydanticModel, BaseObjectOrm):
195
200
  id: Optional[int] = Field(None, description="Primary key, auto-incremented ID")
196
201
  update_hash: str = Field(..., max_length=63, description="Max length of PostgreSQL table name")
197
- remote_table: Union[int, "DynamicTableMetaData"]
202
+ data_node_storage: Union[int, "DataNodeStorage"]
198
203
  build_configuration: Dict[str, Any] = Field(..., description="Configuration in JSON format")
199
204
  build_meta_data: Optional[Dict[str, Any]] = Field(None, description="Optional YAML metadata")
200
205
  ogm_dependencies_linked: bool = Field(default=False, description="OGM dependencies linked flag")
201
206
  tags: Optional[list[str]] = Field(default=[], description="List of tags")
202
207
  description: Optional[str] = Field(None, description="Optional HTML description")
203
- localtimeserieupdatedetails: Optional[Union["LocalTimeSerieUpdateDetails", int]] = None
208
+ update_details: Optional[Union["DataNodeUpdateDetails", int]] = None
204
209
  run_configuration: Optional["RunConfiguration"] = None
205
210
  open_for_everyone: bool = Field(default=False, description="Whether the ts is open for everyone")
206
211
 
207
212
  @property
208
213
  def data_source_id(self):
209
- if isinstance(self.remote_table.data_source, int):
210
- return self.remote_table.data_source
214
+ if isinstance(self.data_node_storage.data_source, int):
215
+ return self.data_node_storage.data_source
211
216
  else:
212
- return self.remote_table.data_source.id
217
+ return self.data_node_storage.data_source.id
213
218
 
214
219
  @classmethod
215
220
  def get_or_create(cls, **kwargs):
@@ -247,8 +252,8 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
247
252
  r = make_request(s=s, loaders=cls.LOADERS, r_type="POST", url=url, payload=payload, time_out=timeout)
248
253
  if r.status_code != 200:
249
254
  raise Exception(f"{r.text}")
250
- all_metadatas = {m["update_hash"]: m for m in r.json()}
251
- return all_metadatas
255
+ all_data_node_storage = {m["update_hash"]: m for m in r.json()}
256
+ return all_data_node_storage
252
257
 
253
258
  def set_start_of_execution(self, **kwargs):
254
259
  s = self.build_session()
@@ -332,16 +337,16 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
332
337
  raise Exception(f"Error in request ")
333
338
 
334
339
  @classmethod
335
- def set_last_update_index_time(cls, metadata, timeout=None):
340
+ def set_last_update_index_time(cls, data_node_storage, timeout=None):
336
341
  s = cls.build_session()
337
- url = cls.get_object_url() + f"/{metadata['id']}/set_last_update_index_time/"
342
+ url = cls.get_object_url() + f"/{data_node_storage['id']}/set_last_update_index_time/"
338
343
  r = make_request(s=s, loaders=cls.LOADERS, r_type="GET", url=url, time_out=timeout)
339
344
 
340
345
  if r.status_code == 404:
341
346
  raise SourceTableConfigurationDoesNotExist
342
347
 
343
348
  if r.status_code != 200:
344
- raise Exception(f"{metadata['update_hash']}{r.text}")
349
+ raise Exception(f"{data_node_storage['update_hash']}{r.text}")
345
350
  return r
346
351
 
347
352
  def set_last_update_index_time_from_update_stats(
@@ -350,7 +355,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
350
355
  max_per_asset_symbol,
351
356
  multi_index_column_stats,
352
357
  timeout=None
353
- ) -> "LocalTimeSerie":
358
+ ) -> "DataNodeUpdate":
354
359
  s = self.build_session()
355
360
  url = self.get_object_url() + f"/{self.id}/set_last_update_index_time_from_update_stats/"
356
361
 
@@ -373,7 +378,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
373
378
 
374
379
  if r.status_code != 200:
375
380
  raise Exception(f"{self.update_hash}{r.text}")
376
- return LocalTimeSerie(**r.json())
381
+ return DataNodeUpdate(**r.json())
377
382
 
378
383
  @classmethod
379
384
  def create_historical_update(cls, *args, **kwargs):
@@ -448,13 +453,13 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
448
453
  *args, **kwargs
449
454
  ):
450
455
 
451
- return self.remote_table.get_data_between_dates_from_api(*args, **kwargs)
456
+ return self.data_node_storage.get_data_between_dates_from_api(*args, **kwargs)
452
457
 
453
458
  @classmethod
454
- def insert_data_into_table(cls, local_metadata_id, records: List[dict],
459
+ def insert_data_into_table(cls, data_node_update_id, records: List[dict],
455
460
  overwrite=True, add_insertion_time=False):
456
461
  s = cls.build_session()
457
- url = cls.get_object_url() + f"/{local_metadata_id}/insert_data_into_table/"
462
+ url = cls.get_object_url() + f"/{data_node_update_id}/insert_data_into_table/"
458
463
 
459
464
  chunk_json_str = json.dumps(records)
460
465
  compressed = gzip.compress(chunk_json_str.encode('utf-8'))
@@ -485,7 +490,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
485
490
  cls,
486
491
  serialized_data_frame: pd.DataFrame,
487
492
  chunk_size: int = 50_000,
488
- local_metadata: dict = None,
493
+ data_node_update: "DataNodeUpdate" = None,
489
494
  data_source: str = None,
490
495
  index_names: list = None,
491
496
  time_index_name: str = 'timestamp',
@@ -496,7 +501,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
496
501
  If a chunk is too large (HTTP 413), it's automatically split in half and retried.
497
502
  """
498
503
  s = cls.build_session()
499
- url = cls.get_object_url() + f"/{local_metadata.id}/insert_data_into_table/"
504
+ url = cls.get_object_url() + f"/{data_node_update.id}/insert_data_into_table/"
500
505
 
501
506
  def _send_chunk_recursively(df_chunk: pd.DataFrame, chunk_idx: int, total_chunks: int,
502
507
  is_sub_chunk: bool = False):
@@ -575,7 +580,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
575
580
  _send_chunk_recursively(chunk_df, i, total_chunks)
576
581
 
577
582
  @classmethod
578
- def get_metadatas_and_set_updates(
583
+ def get_data_nodes_and_set_updates(
579
584
  cls,
580
585
  local_time_series_ids: list,
581
586
  update_details_kwargs,
@@ -603,9 +608,9 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
603
608
  r = r.json()
604
609
  r["source_table_config_map"] = {int(k): SourceTableConfiguration(**v) if v is not None else v for k, v in
605
610
  r["source_table_config_map"].items()}
606
- r["state_data"] = {int(k): LocalTimeSerieUpdateDetails(**v) for k, v in r["state_data"].items()}
611
+ r["state_data"] = {int(k): DataNodeUpdateDetails(**v) for k, v in r["state_data"].items()}
607
612
  r["all_index_stats"] = {int(k): v for k, v in r["all_index_stats"].items()}
608
- r["local_metadatas"] = [LocalTimeSerie(**v) for v in r["local_metadatas"]]
613
+ r["data_node_updates"] = [DataNodeUpdate(**v) for v in r["local_metadatas"]]
609
614
  return r
610
615
 
611
616
  def depends_on_connect(self, target_time_serie_id
@@ -662,7 +667,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
662
667
  ):
663
668
 
664
669
  overwrite = True # ALWAYS OVERWRITE
665
- metadata = self.remote_table
670
+ metadata = self.data_node_storage
666
671
 
667
672
  data, index_names, column_dtypes_map, time_index_name = self._break_pandas_dataframe(
668
673
  data)
@@ -695,7 +700,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
695
700
 
696
701
  data_source.insert_data_into_table(
697
702
  serialized_data_frame=data,
698
- local_metadata=self,
703
+ data_node_update=self,
699
704
  overwrite=overwrite,
700
705
  time_index_name=time_index_name,
701
706
  index_names=index_names,
@@ -717,16 +722,16 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
717
722
  uid: extract_max(stats)
718
723
  for uid, stats in global_stats["_PER_ASSET_"].items()
719
724
  }
720
- local_metadata = self.set_last_update_index_time_from_update_stats(
725
+ data_node_update = self.set_last_update_index_time_from_update_stats(
721
726
  max_per_asset_symbol=max_per_asset_symbol,
722
727
  last_time_index_value=last_time_index_value,
723
728
  multi_index_column_stats=multi_index_column_stats
724
729
  )
725
- return local_metadata
730
+ return data_node_update
726
731
 
727
732
  def get_node_time_to_wait(self):
728
733
 
729
- next_update = self.localtimeserieupdatedetails.next_update
734
+ next_update = self.update_details.next_update
730
735
  time_to_wait = 0.0
731
736
  if next_update is not None:
732
737
  time_to_wait = (pd.to_datetime(next_update) - datetime.datetime.now(pytz.utc)).total_seconds()
@@ -745,13 +750,40 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
745
750
  time.sleep(time_to_wait)
746
751
 
747
752
 
753
+
754
+ class DataNodeUpdateDetails(BasePydanticModel, BaseObjectOrm):
755
+ related_table: Union[int, DataNodeUpdate]
756
+ active_update: bool = Field(default=False, description="Flag to indicate if update is active")
757
+ update_pid: int = Field(default=0, description="Process ID of the update")
758
+ error_on_last_update: bool = Field(default=False,
759
+ description="Flag to indicate if there was an error in the last update")
760
+ last_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the last update")
761
+ next_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the next update")
762
+ update_statistics: Optional[Dict[str, Any]] = Field(None, description="JSON field for update statistics")
763
+ active_update_status: str = Field(default="Q", max_length=20, description="Current update status")
764
+ active_update_scheduler: Optional[Union[int, Scheduler]] = Field(None,
765
+ description="Scheduler for active update")
766
+ update_priority: int = Field(default=0, description="Priority level of the update")
767
+ last_updated_by_user: Optional[int] = Field(None, description="Foreign key reference to User")
768
+
769
+ run_configuration: Optional["RunConfiguration"] = None
770
+
771
+ @staticmethod
772
+ def _parse_parameters_filter(parameters):
773
+ for key, value in parameters.items():
774
+ if "__in" in key:
775
+ assert isinstance(value, list)
776
+ parameters[key] = ",".join(value)
777
+ return parameters
778
+
779
+
748
780
  class TableMetaData(BaseModel):
749
781
  identifier: str = None
750
782
  description: Optional[str] = None
751
783
  data_frequency_id: Optional[DataFrequency] = None
752
784
 
753
785
 
754
- class DynamicTableMetaData(BasePydanticModel, BaseObjectOrm):
786
+ class DataNodeStorage(BasePydanticModel, BaseObjectOrm):
755
787
  id: int = Field(None, description="Primary key, auto-incremented ID")
756
788
  storage_hash: str = Field(..., max_length=63, description="Max length of PostgreSQL table name")
757
789
  table_name: Optional[str] = Field(None, max_length=63, description="Max length of PostgreSQL table name")
@@ -790,7 +822,10 @@ class DynamicTableMetaData(BasePydanticModel, BaseObjectOrm):
790
822
  s = self.build_session()
791
823
  r = make_request(s=s, loaders=self.LOADERS, r_type="PATCH", url=url, payload=payload, time_out=time_out)
792
824
  if r.status_code != 200:
793
- raise Exception(f"Error in request {r.text}")
825
+ data = r.json() # guaranteed JSON from your backend
826
+ if r.status_code == 409:
827
+ raise exceptions.ConflictError(data["error"])
828
+ raise exceptions.ApiError(data["error"])
794
829
  return self.__class__(**r.json())
795
830
 
796
831
  @classmethod
@@ -1000,9 +1035,9 @@ class Scheduler(BasePydanticModel, BaseObjectOrm):
1000
1035
  api_address: Optional[str]
1001
1036
  api_port: Optional[int]
1002
1037
  last_heart_beat: Optional[datetime.datetime] = None
1003
- pre_loads_in_tree: Optional[List[LocalTimeSerie]] = None # Assuming this is a list of strings
1004
- in_active_tree: Optional[List[LocalTimeSerie]] = None # Assuming this is a list of strings
1005
- schedules_to: Optional[List[LocalTimeSerie]] = None
1038
+ pre_loads_in_tree: Optional[List[DataNodeUpdate]] = None # Assuming this is a list of strings
1039
+ in_active_tree: Optional[List[DataNodeUpdate]] = None # Assuming this is a list of strings
1040
+ schedules_to: Optional[List[DataNodeUpdate]] = None
1006
1041
  # for heartbeat
1007
1042
  _stop_heart_beat: bool = False
1008
1043
  _executor: Optional[object] = None
@@ -1010,7 +1045,7 @@ class Scheduler(BasePydanticModel, BaseObjectOrm):
1010
1045
  @classmethod
1011
1046
  def get_scheduler_for_ts(cls, ts_id: int):
1012
1047
  """
1013
- GET /schedulers/for-ts/?ts_id=<LocalTimeSerie PK>
1048
+ GET /schedulers/for-ts/?ts_id=<DataNodeUpdate PK>
1014
1049
  """
1015
1050
  s = cls.build_session()
1016
1051
  url = cls.get_object_url() + "/for-ts/"
@@ -1198,31 +1233,6 @@ class RunConfiguration(BasePydanticModel, BaseObjectOrm):
1198
1233
  return None
1199
1234
 
1200
1235
 
1201
- class LocalTimeSerieUpdateDetails(BasePydanticModel, BaseObjectOrm):
1202
- related_table: Union[int, LocalTimeSerie]
1203
- active_update: bool = Field(default=False, description="Flag to indicate if update is active")
1204
- update_pid: int = Field(default=0, description="Process ID of the update")
1205
- error_on_last_update: bool = Field(default=False,
1206
- description="Flag to indicate if there was an error in the last update")
1207
- last_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the last update")
1208
- next_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the next update")
1209
- update_statistics: Optional[Dict[str, Any]] = Field(None, description="JSON field for update statistics")
1210
- active_update_status: str = Field(default="Q", max_length=20, description="Current update status")
1211
- active_update_scheduler: Optional[Union[int, Scheduler]] = Field(None,
1212
- description="Scheduler for active update")
1213
- update_priority: int = Field(default=0, description="Priority level of the update")
1214
- last_updated_by_user: Optional[int] = Field(None, description="Foreign key reference to User")
1215
-
1216
- run_configuration: Optional["RunConfiguration"] = None
1217
-
1218
- @staticmethod
1219
- def _parse_parameters_filter(parameters):
1220
- for key, value in parameters.items():
1221
- if "__in" in key:
1222
- assert isinstance(value, list)
1223
- parameters[key] = ",".join(value)
1224
- return parameters
1225
-
1226
1236
 
1227
1237
  class UpdateStatistics(BaseModel):
1228
1238
  """
@@ -1746,7 +1756,7 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1746
1756
  def insert_data_into_table(
1747
1757
  self,
1748
1758
  serialized_data_frame: pd.DataFrame,
1749
- local_metadata: LocalTimeSerie,
1759
+ data_node_update: DataNodeUpdate,
1750
1760
  overwrite: bool,
1751
1761
  time_index_name: str,
1752
1762
  index_names: list,
@@ -1756,12 +1766,12 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1756
1766
  if self.class_type == DUCK_DB:
1757
1767
  DuckDBInterface().upsert(
1758
1768
  df=serialized_data_frame,
1759
- table=local_metadata.remote_table.table_name
1769
+ table=data_node_update.data_node_storage.table_name
1760
1770
  )
1761
1771
  else:
1762
- LocalTimeSerie.post_data_frame_in_chunks(
1772
+ DataNodeUpdate.post_data_frame_in_chunks(
1763
1773
  serialized_data_frame=serialized_data_frame,
1764
- local_metadata=local_metadata,
1774
+ data_node_update=data_node_update,
1765
1775
  data_source=self,
1766
1776
  index_names=index_names,
1767
1777
  time_index_name=time_index_name,
@@ -1771,16 +1781,16 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1771
1781
  def insert_data_into_local_table(
1772
1782
  self,
1773
1783
  serialized_data_frame: pd.DataFrame,
1774
- local_metadata: LocalTimeSerie,
1784
+ data_node_update: DataNodeUpdate,
1775
1785
  overwrite: bool,
1776
1786
  time_index_name: str,
1777
1787
  index_names: list,
1778
1788
  grouped_dates: dict,
1779
1789
  ):
1780
1790
 
1781
- # LocalTimeSerie.post_data_frame_in_chunks(
1791
+ # DataNodeUpdate.post_data_frame_in_chunks(
1782
1792
  # serialized_data_frame=serialized_data_frame,
1783
- # local_metadata=local_metadata,
1793
+ # data_node_update=data_node_update,
1784
1794
  # data_source=self,
1785
1795
  # index_names=index_names,
1786
1796
  # time_index_name=time_index_name,
@@ -1790,7 +1800,7 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1790
1800
 
1791
1801
  def get_data_by_time_index(
1792
1802
  self,
1793
- local_metadata: dict,
1803
+ data_node_update: "DataNodeUpdate",
1794
1804
  start_date: Optional[datetime.datetime] = None,
1795
1805
  end_date: Optional[datetime.datetime] = None,
1796
1806
  great_or_equal: bool = True,
@@ -1801,10 +1811,9 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1801
1811
  column_range_descriptor: Optional[Dict[str, UniqueIdentifierRangeMap]] = None,
1802
1812
  ) -> pd.DataFrame:
1803
1813
 
1804
- logger.warning("EXTEND THE CONSTRAIN READ HERE!!")
1805
1814
  if self.class_type == DUCK_DB:
1806
1815
  db_interface = DuckDBInterface()
1807
- table_name = local_metadata.remote_table.table_name
1816
+ table_name = data_node_update.data_node_storage.table_name
1808
1817
 
1809
1818
  adjusted_start, adjusted_end, adjusted_uirm, _ = db_interface.constrain_read(
1810
1819
  table=table_name,
@@ -1835,7 +1844,7 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1835
1844
  else:
1836
1845
  if column_range_descriptor is not None:
1837
1846
  raise Exception("On this data source do not use column_range_descriptor")
1838
- df = local_metadata.get_data_between_dates_from_api(
1847
+ df = data_node_update.get_data_between_dates_from_api(
1839
1848
  start_date=start_date,
1840
1849
  end_date=end_date,
1841
1850
  great_or_equal=great_or_equal,
@@ -1846,11 +1855,11 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1846
1855
  )
1847
1856
  if len(df) == 0:
1848
1857
  logger.warning(
1849
- f"No data returned from remote API for {local_metadata.update_hash}"
1858
+ f"No data returned from remote API for {data_node_update.update_hash}"
1850
1859
  )
1851
1860
  return df
1852
1861
 
1853
- stc = local_metadata.remote_table.sourcetableconfiguration
1862
+ stc = data_node_update.data_node_storage.sourcetableconfiguration
1854
1863
  try:
1855
1864
  df[stc.time_index_name] = pd.to_datetime(df[stc.time_index_name], format='ISO8601')
1856
1865
  except Exception as e:
@@ -1867,11 +1876,11 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1867
1876
  return df
1868
1877
 
1869
1878
  def get_earliest_value(self,
1870
- local_metadata: LocalTimeSerie,
1879
+ data_node_update: DataNodeUpdate,
1871
1880
  ) -> Tuple[Optional[pd.Timestamp], Dict[Any, Optional[pd.Timestamp]]]:
1872
1881
  if self.class_type == DUCK_DB:
1873
1882
  db_interface = DuckDBInterface()
1874
- table_name = local_metadata.remote_table.table_name
1883
+ table_name = data_node_update.data_node_storage.table_name
1875
1884
  return db_interface.time_index_minima(table=table_name)
1876
1885
 
1877
1886
 
@@ -1935,7 +1944,7 @@ class DynamicTableDataSource(BasePydanticModel, BaseObjectOrm):
1935
1944
 
1936
1945
  def get_data_by_time_index(self, *args, **kwargs):
1937
1946
  if self.has_direct_postgres_connection():
1938
- stc = kwargs["local_metadata"].remote_table.sourcetableconfiguration
1947
+ stc = kwargs["data_node_update"].data_node_storage.sourcetableconfiguration
1939
1948
 
1940
1949
  df = TimeScaleInterface.direct_data_from_db(
1941
1950
  connection_uri=self.related_resource.get_connection_uri(),
@@ -1998,16 +2007,16 @@ class TimeScaleDB(DataSource):
1998
2007
  def insert_data_into_table(
1999
2008
  self,
2000
2009
  serialized_data_frame: pd.DataFrame,
2001
- local_metadata: dict,
2010
+ data_node_update: "DataNodeUpdate",
2002
2011
  overwrite: bool,
2003
2012
  time_index_name: str,
2004
2013
  index_names: list,
2005
2014
  grouped_dates: dict,
2006
2015
  ):
2007
2016
 
2008
- LocalTimeSerie.post_data_frame_in_chunks(
2017
+ DataNodeUpdate.post_data_frame_in_chunks(
2009
2018
  serialized_data_frame=serialized_data_frame,
2010
- local_metadata=local_metadata,
2019
+ data_node_update=data_node_update,
2011
2020
  data_source=self,
2012
2021
  index_names=index_names,
2013
2022
  time_index_name=time_index_name,
@@ -2033,7 +2042,7 @@ class TimeScaleDB(DataSource):
2033
2042
  column_types=column_types
2034
2043
  )
2035
2044
  else:
2036
- df = LocalTimeSerie.get_data_between_dates_from_api(
2045
+ df = DataNodeUpdate.get_data_between_dates_from_api(
2037
2046
  update_hash=update_hash,
2038
2047
  data_source_id=self.id,
2039
2048
  start_date=None,
@@ -2049,7 +2058,7 @@ class TimeScaleDB(DataSource):
2049
2058
 
2050
2059
  def get_data_by_time_index(
2051
2060
  self,
2052
- local_metadata: dict,
2061
+ data_node_update: DataNodeUpdate,
2053
2062
  start_date: Optional[datetime.datetime] = None,
2054
2063
  end_date: Optional[datetime.datetime] = None,
2055
2064
  great_or_equal: bool = True,
@@ -2059,9 +2068,9 @@ class TimeScaleDB(DataSource):
2059
2068
 
2060
2069
  ) -> pd.DataFrame:
2061
2070
 
2062
- metadata = local_metadata.remote_table
2071
+ metadata = data_node_update.data_node_storage
2063
2072
 
2064
- df = local_metadata.get_data_between_dates_from_api(
2073
+ df = data_node_update.get_data_between_dates_from_api(
2065
2074
 
2066
2075
  start_date=start_date,
2067
2076
  end_date=end_date,
@@ -2073,11 +2082,11 @@ class TimeScaleDB(DataSource):
2073
2082
  if len(df) == 0:
2074
2083
  if logger:
2075
2084
  logger.warning(
2076
- f"No data returned from remote API for {local_metadata.update_hash}"
2085
+ f"No data returned from remote API for {data_node_update.update_hash}"
2077
2086
  )
2078
2087
  return df
2079
2088
 
2080
- stc = local_metadata.remote_table.sourcetableconfiguration
2089
+ stc = data_node_update.data_node_storage.sourcetableconfiguration
2081
2090
  df[stc.time_index_name] = pd.to_datetime(df[stc.time_index_name])
2082
2091
  for c, c_type in stc.column_dtypes_map.items():
2083
2092
  if c != stc.time_index_name:
@@ -2236,8 +2245,8 @@ class PodDataSource:
2236
2245
  )
2237
2246
 
2238
2247
  # drop local tables that are not in registered in the backend anymore (probably have been deleted)
2239
- remote_tables = DynamicTableMetaData.filter(data_source__id=duckdb_dynamic_data_source.id, list_tables=True)
2240
- remote_table_names = [t.table_name for t in remote_tables]
2248
+ remote_node_storages = DataNodeStorage.filter(data_source__id=duckdb_dynamic_data_source.id, list_tables=True)
2249
+ remote_table_names = [t.table_name for t in remote_node_storages]
2241
2250
  from mainsequence.client.data_sources_interfaces.duckdb import DuckDBInterface
2242
2251
  from mainsequence.client.utils import DataFrequency
2243
2252
  db_interface = DuckDBInterface()
@@ -2249,7 +2258,7 @@ class PodDataSource:
2249
2258
  db_interface.drop_table(table_name)
2250
2259
 
2251
2260
  tables_to_delete_remotely = set(remote_table_names) - set(local_table_names)
2252
- for remote_table in remote_tables:
2261
+ for remote_table in remote_node_storages:
2253
2262
  if remote_table.table_name in tables_to_delete_remotely:
2254
2263
  logger.debug(f"Deleting table remotely {remote_table.table_name}")
2255
2264
  if remote_table.protect_from_deletion:
@@ -2402,3 +2411,11 @@ class Constant(BasePydanticModel, BaseObjectOrm):
2402
2411
 
2403
2412
  SessionDataSource = PodDataSource()
2404
2413
  SessionDataSource.set_remote_db()
2414
+
2415
+ DataNodeUpdateDetails.model_rebuild()
2416
+ DataNodeUpdate.model_rebuild()
2417
+ RunConfiguration.model_rebuild()
2418
+ SourceTableConfiguration.model_rebuild()
2419
+ DataNodeStorage.model_rebuild()
2420
+ DynamicTableDataSource.model_rebuild()
2421
+ DataSource.model_rebuild()
@@ -15,7 +15,7 @@ import time
15
15
 
16
16
  from enum import IntEnum, Enum
17
17
  from decimal import Decimal
18
- from mainsequence.client import LocalTimeSerie
18
+ from mainsequence.client import DataNodeUpdate
19
19
 
20
20
  from .base import BasePydanticModel, BaseObjectOrm, MARKETS_CONSTANTS as CONSTANTS, TDAG_ENDPOINT, API_ENDPOINT, HtmlSaveException
21
21
  from .utils import AuthLoaders, make_request, DoesNotExist, request_to_datetime, DATE_FORMAT
@@ -1427,8 +1427,8 @@ class PortfolioAbout(TypedDict):
1427
1427
  class PortfolioMixin:
1428
1428
  id: Optional[int] = None
1429
1429
  is_active: bool = False
1430
- local_time_serie: Optional['LocalTimeSerie']
1431
- signal_local_time_serie: Optional['LocalTimeSerie']
1430
+ data_node_update: Optional['DataNodeUpdate']
1431
+ signal_data_node_update: Optional['DataNodeUpdate']
1432
1432
  follow_account_rebalance: bool = False
1433
1433
  comparable_portfolios: Optional[List[int]] = None
1434
1434
  backtest_table_price_column_name: Optional[str] = Field(None, max_length=20)
@@ -1454,8 +1454,8 @@ class PortfolioMixin:
1454
1454
  def create_from_time_series(
1455
1455
  cls,
1456
1456
  portfolio_name: str,
1457
- local_time_serie_id: int,
1458
- signal_local_time_serie_id: int,
1457
+ data_node_update_id: int,
1458
+ signal_data_node_update_id: int,
1459
1459
  is_active: bool,
1460
1460
  calendar_name: str,
1461
1461
  target_portfolio_about: PortfolioAbout,
@@ -1468,8 +1468,8 @@ class PortfolioMixin:
1468
1468
  payload_data = {
1469
1469
  "portfolio_name": portfolio_name,
1470
1470
  "is_active": is_active,
1471
- "local_time_serie_id": local_time_serie_id,
1472
- "signal_local_time_serie_id": signal_local_time_serie_id,
1471
+ "data_node_update_id": data_node_update_id,
1472
+ "signal_data_node_update_id": signal_data_node_update_id,
1473
1473
  # Using the same ID for local_signal_time_serie_id as specified.
1474
1474
  "calendar_name": calendar_name,
1475
1475
  "target_portfolio_about": target_portfolio_about,
@@ -1512,9 +1512,9 @@ class PortfolioMixin:
1512
1512
  def get_historical_weights(self,
1513
1513
  start_date_timestamp:float,end_date_timestamp:float,
1514
1514
  timeout=None)->Dict[str, float]:
1515
- if self.local_time_serie is None:
1515
+ if self.data_node_update is None:
1516
1516
  print("this portfolio does not have a weights table")
1517
- self.local_time_serie
1517
+ self.data_node_update
1518
1518
 
1519
1519
 
1520
1520
  class Portfolio(PortfolioMixin, BaseObjectOrm, BasePydanticModel):