mainsequence 2.0.4b0__py3-none-any.whl → 3.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. mainsequence/cli/cli.py +17 -10
  2. mainsequence/cli/ssh_utils.py +17 -2
  3. mainsequence/client/__init__.py +3 -3
  4. mainsequence/client/base.py +3 -3
  5. mainsequence/client/data_sources_interfaces/timescale.py +20 -19
  6. mainsequence/client/models_helpers.py +2 -2
  7. mainsequence/client/models_tdag.py +96 -86
  8. mainsequence/client/models_vam.py +9 -9
  9. mainsequence/dashboards/streamlit/assets/image_1_base64.txt +1 -0
  10. mainsequence/dashboards/streamlit/assets/image_2_base64.txt +1 -0
  11. mainsequence/dashboards/streamlit/assets/image_3_base64.txt +1 -0
  12. mainsequence/dashboards/streamlit/assets/image_4_base64.txt +1 -0
  13. mainsequence/dashboards/streamlit/assets/image_5_base64.txt +1 -0
  14. mainsequence/dashboards/streamlit/core/theme.py +2 -2
  15. mainsequence/instruments/__init__.py +1 -1
  16. mainsequence/instruments/data_interface/__init__.py +1 -1
  17. mainsequence/instruments/data_interface/data_interface.py +3 -4
  18. mainsequence/instruments/pricing_models/indices.py +29 -14
  19. mainsequence/instruments/settings.py +2 -162
  20. mainsequence/tdag/config.py +2 -2
  21. mainsequence/tdag/data_nodes/build_operations.py +3 -3
  22. mainsequence/tdag/data_nodes/data_nodes.py +23 -23
  23. mainsequence/tdag/data_nodes/persist_managers.py +121 -121
  24. mainsequence/tdag/data_nodes/run_operations.py +25 -25
  25. mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +1 -1
  26. mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +2 -2
  27. mainsequence/virtualfundbuilder/data_nodes.py +1 -1
  28. mainsequence/virtualfundbuilder/portfolio_interface.py +7 -7
  29. mainsequence/virtualfundbuilder/utils.py +2 -2
  30. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/METADATA +1 -1
  31. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/RECORD +35 -30
  32. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/WHEEL +0 -0
  33. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/entry_points.txt +0 -0
  34. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/licenses/LICENSE +0 -0
  35. {mainsequence-2.0.4b0.dist-info → mainsequence-3.0.1.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
- from importlib.metadata import metadata
1
+ from __future__ import annotations
2
+
2
3
 
3
4
  import yaml
4
5
 
@@ -105,7 +106,7 @@ class ColumnMetaData(BasePydanticModel, BaseObjectOrm):
105
106
 
106
107
  class SourceTableConfiguration(BasePydanticModel, BaseObjectOrm):
107
108
  id: Optional[int] = Field(None, description="Primary key, auto-incremented ID")
108
- related_table: Union[int, "DynamicTableMetaData"]
109
+ related_table: Union[int, "DataNodeStorage"]
109
110
  time_index_name: str = Field(..., max_length=100, description="Time index name")
110
111
  column_dtypes_map: Dict[str, Any] = Field(..., description="Column data types map")
111
112
  index_names: List
@@ -191,25 +192,25 @@ class ColumnMetaData(BasePydanticModel):
191
192
  description: str = Field(..., description="Detailed description")
192
193
 
193
194
 
194
- class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
195
+ class DataNodeUpdate(BasePydanticModel, BaseObjectOrm):
195
196
  id: Optional[int] = Field(None, description="Primary key, auto-incremented ID")
196
197
  update_hash: str = Field(..., max_length=63, description="Max length of PostgreSQL table name")
197
- remote_table: Union[int, "DynamicTableMetaData"]
198
+ data_node_storage: Union[int, "DataNodeStorage"]
198
199
  build_configuration: Dict[str, Any] = Field(..., description="Configuration in JSON format")
199
200
  build_meta_data: Optional[Dict[str, Any]] = Field(None, description="Optional YAML metadata")
200
201
  ogm_dependencies_linked: bool = Field(default=False, description="OGM dependencies linked flag")
201
202
  tags: Optional[list[str]] = Field(default=[], description="List of tags")
202
203
  description: Optional[str] = Field(None, description="Optional HTML description")
203
- localtimeserieupdatedetails: Optional[Union["LocalTimeSerieUpdateDetails", int]] = None
204
+ update_details: Optional[Union["DataNodeUpdateDetails", int]] = None
204
205
  run_configuration: Optional["RunConfiguration"] = None
205
206
  open_for_everyone: bool = Field(default=False, description="Whether the ts is open for everyone")
206
207
 
207
208
  @property
208
209
  def data_source_id(self):
209
- if isinstance(self.remote_table.data_source, int):
210
- return self.remote_table.data_source
210
+ if isinstance(self.data_node_storage.data_source, int):
211
+ return self.data_node_storage.data_source
211
212
  else:
212
- return self.remote_table.data_source.id
213
+ return self.data_node_storage.data_source.id
213
214
 
214
215
  @classmethod
215
216
  def get_or_create(cls, **kwargs):
@@ -247,8 +248,8 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
247
248
  r = make_request(s=s, loaders=cls.LOADERS, r_type="POST", url=url, payload=payload, time_out=timeout)
248
249
  if r.status_code != 200:
249
250
  raise Exception(f"{r.text}")
250
- all_metadatas = {m["update_hash"]: m for m in r.json()}
251
- return all_metadatas
251
+ all_data_node_storage = {m["update_hash"]: m for m in r.json()}
252
+ return all_data_node_storage
252
253
 
253
254
  def set_start_of_execution(self, **kwargs):
254
255
  s = self.build_session()
@@ -332,16 +333,16 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
332
333
  raise Exception(f"Error in request ")
333
334
 
334
335
  @classmethod
335
- def set_last_update_index_time(cls, metadata, timeout=None):
336
+ def set_last_update_index_time(cls, data_node_storage, timeout=None):
336
337
  s = cls.build_session()
337
- url = cls.get_object_url() + f"/{metadata['id']}/set_last_update_index_time/"
338
+ url = cls.get_object_url() + f"/{data_node_storage['id']}/set_last_update_index_time/"
338
339
  r = make_request(s=s, loaders=cls.LOADERS, r_type="GET", url=url, time_out=timeout)
339
340
 
340
341
  if r.status_code == 404:
341
342
  raise SourceTableConfigurationDoesNotExist
342
343
 
343
344
  if r.status_code != 200:
344
- raise Exception(f"{metadata['update_hash']}{r.text}")
345
+ raise Exception(f"{data_node_storage['update_hash']}{r.text}")
345
346
  return r
346
347
 
347
348
  def set_last_update_index_time_from_update_stats(
@@ -350,7 +351,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
350
351
  max_per_asset_symbol,
351
352
  multi_index_column_stats,
352
353
  timeout=None
353
- ) -> "LocalTimeSerie":
354
+ ) -> "DataNodeUpdate":
354
355
  s = self.build_session()
355
356
  url = self.get_object_url() + f"/{self.id}/set_last_update_index_time_from_update_stats/"
356
357
 
@@ -373,7 +374,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
373
374
 
374
375
  if r.status_code != 200:
375
376
  raise Exception(f"{self.update_hash}{r.text}")
376
- return LocalTimeSerie(**r.json())
377
+ return DataNodeUpdate(**r.json())
377
378
 
378
379
  @classmethod
379
380
  def create_historical_update(cls, *args, **kwargs):
@@ -448,13 +449,13 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
448
449
  *args, **kwargs
449
450
  ):
450
451
 
451
- return self.remote_table.get_data_between_dates_from_api(*args, **kwargs)
452
+ return self.data_node_storage.get_data_between_dates_from_api(*args, **kwargs)
452
453
 
453
454
  @classmethod
454
- def insert_data_into_table(cls, local_metadata_id, records: List[dict],
455
+ def insert_data_into_table(cls, data_node_update_id, records: List[dict],
455
456
  overwrite=True, add_insertion_time=False):
456
457
  s = cls.build_session()
457
- url = cls.get_object_url() + f"/{local_metadata_id}/insert_data_into_table/"
458
+ url = cls.get_object_url() + f"/{data_node_update_id}/insert_data_into_table/"
458
459
 
459
460
  chunk_json_str = json.dumps(records)
460
461
  compressed = gzip.compress(chunk_json_str.encode('utf-8'))
@@ -485,7 +486,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
485
486
  cls,
486
487
  serialized_data_frame: pd.DataFrame,
487
488
  chunk_size: int = 50_000,
488
- local_metadata: dict = None,
489
+ data_node_update: "DataNodeUpdate" = None,
489
490
  data_source: str = None,
490
491
  index_names: list = None,
491
492
  time_index_name: str = 'timestamp',
@@ -496,7 +497,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
496
497
  If a chunk is too large (HTTP 413), it's automatically split in half and retried.
497
498
  """
498
499
  s = cls.build_session()
499
- url = cls.get_object_url() + f"/{local_metadata.id}/insert_data_into_table/"
500
+ url = cls.get_object_url() + f"/{data_node_update.id}/insert_data_into_table/"
500
501
 
501
502
  def _send_chunk_recursively(df_chunk: pd.DataFrame, chunk_idx: int, total_chunks: int,
502
503
  is_sub_chunk: bool = False):
@@ -575,7 +576,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
575
576
  _send_chunk_recursively(chunk_df, i, total_chunks)
576
577
 
577
578
  @classmethod
578
- def get_metadatas_and_set_updates(
579
+ def get_data_nodes_and_set_updates(
579
580
  cls,
580
581
  local_time_series_ids: list,
581
582
  update_details_kwargs,
@@ -603,9 +604,9 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
603
604
  r = r.json()
604
605
  r["source_table_config_map"] = {int(k): SourceTableConfiguration(**v) if v is not None else v for k, v in
605
606
  r["source_table_config_map"].items()}
606
- r["state_data"] = {int(k): LocalTimeSerieUpdateDetails(**v) for k, v in r["state_data"].items()}
607
+ r["state_data"] = {int(k): DataNodeUpdateDetails(**v) for k, v in r["state_data"].items()}
607
608
  r["all_index_stats"] = {int(k): v for k, v in r["all_index_stats"].items()}
608
- r["local_metadatas"] = [LocalTimeSerie(**v) for v in r["local_metadatas"]]
609
+ r["data_node_updates"] = [DataNodeUpdate(**v) for v in r["local_metadatas"]]
609
610
  return r
610
611
 
611
612
  def depends_on_connect(self, target_time_serie_id
@@ -662,7 +663,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
662
663
  ):
663
664
 
664
665
  overwrite = True # ALWAYS OVERWRITE
665
- metadata = self.remote_table
666
+ metadata = self.data_node_storage
666
667
 
667
668
  data, index_names, column_dtypes_map, time_index_name = self._break_pandas_dataframe(
668
669
  data)
@@ -695,7 +696,7 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
695
696
 
696
697
  data_source.insert_data_into_table(
697
698
  serialized_data_frame=data,
698
- local_metadata=self,
699
+ data_node_update=self,
699
700
  overwrite=overwrite,
700
701
  time_index_name=time_index_name,
701
702
  index_names=index_names,
@@ -717,16 +718,16 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
717
718
  uid: extract_max(stats)
718
719
  for uid, stats in global_stats["_PER_ASSET_"].items()
719
720
  }
720
- local_metadata = self.set_last_update_index_time_from_update_stats(
721
+ data_node_update = self.set_last_update_index_time_from_update_stats(
721
722
  max_per_asset_symbol=max_per_asset_symbol,
722
723
  last_time_index_value=last_time_index_value,
723
724
  multi_index_column_stats=multi_index_column_stats
724
725
  )
725
- return local_metadata
726
+ return data_node_update
726
727
 
727
728
  def get_node_time_to_wait(self):
728
729
 
729
- next_update = self.localtimeserieupdatedetails.next_update
730
+ next_update = self.update_details.next_update
730
731
  time_to_wait = 0.0
731
732
  if next_update is not None:
732
733
  time_to_wait = (pd.to_datetime(next_update) - datetime.datetime.now(pytz.utc)).total_seconds()
@@ -745,13 +746,40 @@ class LocalTimeSerie(BasePydanticModel, BaseObjectOrm):
745
746
  time.sleep(time_to_wait)
746
747
 
747
748
 
749
+
750
+ class DataNodeUpdateDetails(BasePydanticModel, BaseObjectOrm):
751
+ related_table: Union[int, DataNodeUpdate]
752
+ active_update: bool = Field(default=False, description="Flag to indicate if update is active")
753
+ update_pid: int = Field(default=0, description="Process ID of the update")
754
+ error_on_last_update: bool = Field(default=False,
755
+ description="Flag to indicate if there was an error in the last update")
756
+ last_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the last update")
757
+ next_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the next update")
758
+ update_statistics: Optional[Dict[str, Any]] = Field(None, description="JSON field for update statistics")
759
+ active_update_status: str = Field(default="Q", max_length=20, description="Current update status")
760
+ active_update_scheduler: Optional[Union[int, Scheduler]] = Field(None,
761
+ description="Scheduler for active update")
762
+ update_priority: int = Field(default=0, description="Priority level of the update")
763
+ last_updated_by_user: Optional[int] = Field(None, description="Foreign key reference to User")
764
+
765
+ run_configuration: Optional["RunConfiguration"] = None
766
+
767
+ @staticmethod
768
+ def _parse_parameters_filter(parameters):
769
+ for key, value in parameters.items():
770
+ if "__in" in key:
771
+ assert isinstance(value, list)
772
+ parameters[key] = ",".join(value)
773
+ return parameters
774
+
775
+
748
776
  class TableMetaData(BaseModel):
749
777
  identifier: str = None
750
778
  description: Optional[str] = None
751
779
  data_frequency_id: Optional[DataFrequency] = None
752
780
 
753
781
 
754
- class DynamicTableMetaData(BasePydanticModel, BaseObjectOrm):
782
+ class DataNodeStorage(BasePydanticModel, BaseObjectOrm):
755
783
  id: int = Field(None, description="Primary key, auto-incremented ID")
756
784
  storage_hash: str = Field(..., max_length=63, description="Max length of PostgreSQL table name")
757
785
  table_name: Optional[str] = Field(None, max_length=63, description="Max length of PostgreSQL table name")
@@ -1000,9 +1028,9 @@ class Scheduler(BasePydanticModel, BaseObjectOrm):
1000
1028
  api_address: Optional[str]
1001
1029
  api_port: Optional[int]
1002
1030
  last_heart_beat: Optional[datetime.datetime] = None
1003
- pre_loads_in_tree: Optional[List[LocalTimeSerie]] = None # Assuming this is a list of strings
1004
- in_active_tree: Optional[List[LocalTimeSerie]] = None # Assuming this is a list of strings
1005
- schedules_to: Optional[List[LocalTimeSerie]] = None
1031
+ pre_loads_in_tree: Optional[List[DataNodeUpdate]] = None # Assuming this is a list of strings
1032
+ in_active_tree: Optional[List[DataNodeUpdate]] = None # Assuming this is a list of strings
1033
+ schedules_to: Optional[List[DataNodeUpdate]] = None
1006
1034
  # for heartbeat
1007
1035
  _stop_heart_beat: bool = False
1008
1036
  _executor: Optional[object] = None
@@ -1010,7 +1038,7 @@ class Scheduler(BasePydanticModel, BaseObjectOrm):
1010
1038
  @classmethod
1011
1039
  def get_scheduler_for_ts(cls, ts_id: int):
1012
1040
  """
1013
- GET /schedulers/for-ts/?ts_id=<LocalTimeSerie PK>
1041
+ GET /schedulers/for-ts/?ts_id=<DataNodeUpdate PK>
1014
1042
  """
1015
1043
  s = cls.build_session()
1016
1044
  url = cls.get_object_url() + "/for-ts/"
@@ -1198,31 +1226,6 @@ class RunConfiguration(BasePydanticModel, BaseObjectOrm):
1198
1226
  return None
1199
1227
 
1200
1228
 
1201
- class LocalTimeSerieUpdateDetails(BasePydanticModel, BaseObjectOrm):
1202
- related_table: Union[int, LocalTimeSerie]
1203
- active_update: bool = Field(default=False, description="Flag to indicate if update is active")
1204
- update_pid: int = Field(default=0, description="Process ID of the update")
1205
- error_on_last_update: bool = Field(default=False,
1206
- description="Flag to indicate if there was an error in the last update")
1207
- last_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the last update")
1208
- next_update: Optional[datetime.datetime] = Field(None, description="Timestamp of the next update")
1209
- update_statistics: Optional[Dict[str, Any]] = Field(None, description="JSON field for update statistics")
1210
- active_update_status: str = Field(default="Q", max_length=20, description="Current update status")
1211
- active_update_scheduler: Optional[Union[int, Scheduler]] = Field(None,
1212
- description="Scheduler for active update")
1213
- update_priority: int = Field(default=0, description="Priority level of the update")
1214
- last_updated_by_user: Optional[int] = Field(None, description="Foreign key reference to User")
1215
-
1216
- run_configuration: Optional["RunConfiguration"] = None
1217
-
1218
- @staticmethod
1219
- def _parse_parameters_filter(parameters):
1220
- for key, value in parameters.items():
1221
- if "__in" in key:
1222
- assert isinstance(value, list)
1223
- parameters[key] = ",".join(value)
1224
- return parameters
1225
-
1226
1229
 
1227
1230
  class UpdateStatistics(BaseModel):
1228
1231
  """
@@ -1746,7 +1749,7 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1746
1749
  def insert_data_into_table(
1747
1750
  self,
1748
1751
  serialized_data_frame: pd.DataFrame,
1749
- local_metadata: LocalTimeSerie,
1752
+ data_node_update: DataNodeUpdate,
1750
1753
  overwrite: bool,
1751
1754
  time_index_name: str,
1752
1755
  index_names: list,
@@ -1756,12 +1759,12 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1756
1759
  if self.class_type == DUCK_DB:
1757
1760
  DuckDBInterface().upsert(
1758
1761
  df=serialized_data_frame,
1759
- table=local_metadata.remote_table.table_name
1762
+ table=data_node_update.data_node_storage.table_name
1760
1763
  )
1761
1764
  else:
1762
- LocalTimeSerie.post_data_frame_in_chunks(
1765
+ DataNodeUpdate.post_data_frame_in_chunks(
1763
1766
  serialized_data_frame=serialized_data_frame,
1764
- local_metadata=local_metadata,
1767
+ data_node_update=data_node_update,
1765
1768
  data_source=self,
1766
1769
  index_names=index_names,
1767
1770
  time_index_name=time_index_name,
@@ -1771,16 +1774,16 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1771
1774
  def insert_data_into_local_table(
1772
1775
  self,
1773
1776
  serialized_data_frame: pd.DataFrame,
1774
- local_metadata: LocalTimeSerie,
1777
+ data_node_update: DataNodeUpdate,
1775
1778
  overwrite: bool,
1776
1779
  time_index_name: str,
1777
1780
  index_names: list,
1778
1781
  grouped_dates: dict,
1779
1782
  ):
1780
1783
 
1781
- # LocalTimeSerie.post_data_frame_in_chunks(
1784
+ # DataNodeUpdate.post_data_frame_in_chunks(
1782
1785
  # serialized_data_frame=serialized_data_frame,
1783
- # local_metadata=local_metadata,
1786
+ # data_node_update=data_node_update,
1784
1787
  # data_source=self,
1785
1788
  # index_names=index_names,
1786
1789
  # time_index_name=time_index_name,
@@ -1790,7 +1793,7 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1790
1793
 
1791
1794
  def get_data_by_time_index(
1792
1795
  self,
1793
- local_metadata: dict,
1796
+ data_node_update: "DataNodeUpdate",
1794
1797
  start_date: Optional[datetime.datetime] = None,
1795
1798
  end_date: Optional[datetime.datetime] = None,
1796
1799
  great_or_equal: bool = True,
@@ -1801,10 +1804,9 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1801
1804
  column_range_descriptor: Optional[Dict[str, UniqueIdentifierRangeMap]] = None,
1802
1805
  ) -> pd.DataFrame:
1803
1806
 
1804
- logger.warning("EXTEND THE CONSTRAIN READ HERE!!")
1805
1807
  if self.class_type == DUCK_DB:
1806
1808
  db_interface = DuckDBInterface()
1807
- table_name = local_metadata.remote_table.table_name
1809
+ table_name = data_node_update.data_node_storage.table_name
1808
1810
 
1809
1811
  adjusted_start, adjusted_end, adjusted_uirm, _ = db_interface.constrain_read(
1810
1812
  table=table_name,
@@ -1835,7 +1837,7 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1835
1837
  else:
1836
1838
  if column_range_descriptor is not None:
1837
1839
  raise Exception("On this data source do not use column_range_descriptor")
1838
- df = local_metadata.get_data_between_dates_from_api(
1840
+ df = data_node_update.get_data_between_dates_from_api(
1839
1841
  start_date=start_date,
1840
1842
  end_date=end_date,
1841
1843
  great_or_equal=great_or_equal,
@@ -1846,11 +1848,11 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1846
1848
  )
1847
1849
  if len(df) == 0:
1848
1850
  logger.warning(
1849
- f"No data returned from remote API for {local_metadata.update_hash}"
1851
+ f"No data returned from remote API for {data_node_update.update_hash}"
1850
1852
  )
1851
1853
  return df
1852
1854
 
1853
- stc = local_metadata.remote_table.sourcetableconfiguration
1855
+ stc = data_node_update.data_node_storage.sourcetableconfiguration
1854
1856
  try:
1855
1857
  df[stc.time_index_name] = pd.to_datetime(df[stc.time_index_name], format='ISO8601')
1856
1858
  except Exception as e:
@@ -1867,11 +1869,11 @@ class DataSource(BasePydanticModel, BaseObjectOrm):
1867
1869
  return df
1868
1870
 
1869
1871
  def get_earliest_value(self,
1870
- local_metadata: LocalTimeSerie,
1872
+ data_node_update: DataNodeUpdate,
1871
1873
  ) -> Tuple[Optional[pd.Timestamp], Dict[Any, Optional[pd.Timestamp]]]:
1872
1874
  if self.class_type == DUCK_DB:
1873
1875
  db_interface = DuckDBInterface()
1874
- table_name = local_metadata.remote_table.table_name
1876
+ table_name = data_node_update.data_node_storage.table_name
1875
1877
  return db_interface.time_index_minima(table=table_name)
1876
1878
 
1877
1879
 
@@ -1935,7 +1937,7 @@ class DynamicTableDataSource(BasePydanticModel, BaseObjectOrm):
1935
1937
 
1936
1938
  def get_data_by_time_index(self, *args, **kwargs):
1937
1939
  if self.has_direct_postgres_connection():
1938
- stc = kwargs["local_metadata"].remote_table.sourcetableconfiguration
1940
+ stc = kwargs["data_node_update"].data_node_storage.sourcetableconfiguration
1939
1941
 
1940
1942
  df = TimeScaleInterface.direct_data_from_db(
1941
1943
  connection_uri=self.related_resource.get_connection_uri(),
@@ -1998,16 +2000,16 @@ class TimeScaleDB(DataSource):
1998
2000
  def insert_data_into_table(
1999
2001
  self,
2000
2002
  serialized_data_frame: pd.DataFrame,
2001
- local_metadata: dict,
2003
+ data_node_update: "DataNodeUpdate",
2002
2004
  overwrite: bool,
2003
2005
  time_index_name: str,
2004
2006
  index_names: list,
2005
2007
  grouped_dates: dict,
2006
2008
  ):
2007
2009
 
2008
- LocalTimeSerie.post_data_frame_in_chunks(
2010
+ DataNodeUpdate.post_data_frame_in_chunks(
2009
2011
  serialized_data_frame=serialized_data_frame,
2010
- local_metadata=local_metadata,
2012
+ data_node_update=data_node_update,
2011
2013
  data_source=self,
2012
2014
  index_names=index_names,
2013
2015
  time_index_name=time_index_name,
@@ -2033,7 +2035,7 @@ class TimeScaleDB(DataSource):
2033
2035
  column_types=column_types
2034
2036
  )
2035
2037
  else:
2036
- df = LocalTimeSerie.get_data_between_dates_from_api(
2038
+ df = DataNodeUpdate.get_data_between_dates_from_api(
2037
2039
  update_hash=update_hash,
2038
2040
  data_source_id=self.id,
2039
2041
  start_date=None,
@@ -2049,7 +2051,7 @@ class TimeScaleDB(DataSource):
2049
2051
 
2050
2052
  def get_data_by_time_index(
2051
2053
  self,
2052
- local_metadata: dict,
2054
+ data_node_update: DataNodeUpdate,
2053
2055
  start_date: Optional[datetime.datetime] = None,
2054
2056
  end_date: Optional[datetime.datetime] = None,
2055
2057
  great_or_equal: bool = True,
@@ -2059,9 +2061,9 @@ class TimeScaleDB(DataSource):
2059
2061
 
2060
2062
  ) -> pd.DataFrame:
2061
2063
 
2062
- metadata = local_metadata.remote_table
2064
+ metadata = data_node_update.data_node_storage
2063
2065
 
2064
- df = local_metadata.get_data_between_dates_from_api(
2066
+ df = data_node_update.get_data_between_dates_from_api(
2065
2067
 
2066
2068
  start_date=start_date,
2067
2069
  end_date=end_date,
@@ -2073,11 +2075,11 @@ class TimeScaleDB(DataSource):
2073
2075
  if len(df) == 0:
2074
2076
  if logger:
2075
2077
  logger.warning(
2076
- f"No data returned from remote API for {local_metadata.update_hash}"
2078
+ f"No data returned from remote API for {data_node_update.update_hash}"
2077
2079
  )
2078
2080
  return df
2079
2081
 
2080
- stc = local_metadata.remote_table.sourcetableconfiguration
2082
+ stc = data_node_update.data_node_storage.sourcetableconfiguration
2081
2083
  df[stc.time_index_name] = pd.to_datetime(df[stc.time_index_name])
2082
2084
  for c, c_type in stc.column_dtypes_map.items():
2083
2085
  if c != stc.time_index_name:
@@ -2236,8 +2238,8 @@ class PodDataSource:
2236
2238
  )
2237
2239
 
2238
2240
  # drop local tables that are not in registered in the backend anymore (probably have been deleted)
2239
- remote_tables = DynamicTableMetaData.filter(data_source__id=duckdb_dynamic_data_source.id, list_tables=True)
2240
- remote_table_names = [t.table_name for t in remote_tables]
2241
+ remote_node_storages = DataNodeStorage.filter(data_source__id=duckdb_dynamic_data_source.id, list_tables=True)
2242
+ remote_table_names = [t.table_name for t in remote_node_storages]
2241
2243
  from mainsequence.client.data_sources_interfaces.duckdb import DuckDBInterface
2242
2244
  from mainsequence.client.utils import DataFrequency
2243
2245
  db_interface = DuckDBInterface()
@@ -2249,7 +2251,7 @@ class PodDataSource:
2249
2251
  db_interface.drop_table(table_name)
2250
2252
 
2251
2253
  tables_to_delete_remotely = set(remote_table_names) - set(local_table_names)
2252
- for remote_table in remote_tables:
2254
+ for remote_table in remote_node_storages:
2253
2255
  if remote_table.table_name in tables_to_delete_remotely:
2254
2256
  logger.debug(f"Deleting table remotely {remote_table.table_name}")
2255
2257
  if remote_table.protect_from_deletion:
@@ -2402,3 +2404,11 @@ class Constant(BasePydanticModel, BaseObjectOrm):
2402
2404
 
2403
2405
  SessionDataSource = PodDataSource()
2404
2406
  SessionDataSource.set_remote_db()
2407
+
2408
+ DataNodeUpdateDetails.model_rebuild()
2409
+ DataNodeUpdate.model_rebuild()
2410
+ RunConfiguration.model_rebuild()
2411
+ SourceTableConfiguration.model_rebuild()
2412
+ DataNodeStorage.model_rebuild()
2413
+ DynamicTableDataSource.model_rebuild()
2414
+ DataSource.model_rebuild()
@@ -15,7 +15,7 @@ import time
15
15
 
16
16
  from enum import IntEnum, Enum
17
17
  from decimal import Decimal
18
- from mainsequence.client import LocalTimeSerie
18
+ from mainsequence.client import DataNodeUpdate
19
19
 
20
20
  from .base import BasePydanticModel, BaseObjectOrm, MARKETS_CONSTANTS as CONSTANTS, TDAG_ENDPOINT, API_ENDPOINT, HtmlSaveException
21
21
  from .utils import AuthLoaders, make_request, DoesNotExist, request_to_datetime, DATE_FORMAT
@@ -1427,8 +1427,8 @@ class PortfolioAbout(TypedDict):
1427
1427
  class PortfolioMixin:
1428
1428
  id: Optional[int] = None
1429
1429
  is_active: bool = False
1430
- local_time_serie: Optional['LocalTimeSerie']
1431
- signal_local_time_serie: Optional['LocalTimeSerie']
1430
+ data_node_update: Optional['DataNodeUpdate']
1431
+ signal_data_node_update: Optional['DataNodeUpdate']
1432
1432
  follow_account_rebalance: bool = False
1433
1433
  comparable_portfolios: Optional[List[int]] = None
1434
1434
  backtest_table_price_column_name: Optional[str] = Field(None, max_length=20)
@@ -1454,8 +1454,8 @@ class PortfolioMixin:
1454
1454
  def create_from_time_series(
1455
1455
  cls,
1456
1456
  portfolio_name: str,
1457
- local_time_serie_id: int,
1458
- signal_local_time_serie_id: int,
1457
+ data_node_update_id: int,
1458
+ signal_data_node_update_id: int,
1459
1459
  is_active: bool,
1460
1460
  calendar_name: str,
1461
1461
  target_portfolio_about: PortfolioAbout,
@@ -1468,8 +1468,8 @@ class PortfolioMixin:
1468
1468
  payload_data = {
1469
1469
  "portfolio_name": portfolio_name,
1470
1470
  "is_active": is_active,
1471
- "local_time_serie_id": local_time_serie_id,
1472
- "signal_local_time_serie_id": signal_local_time_serie_id,
1471
+ "data_node_update_id": data_node_update_id,
1472
+ "signal_data_node_update_id": signal_data_node_update_id,
1473
1473
  # Using the same ID for local_signal_time_serie_id as specified.
1474
1474
  "calendar_name": calendar_name,
1475
1475
  "target_portfolio_about": target_portfolio_about,
@@ -1512,9 +1512,9 @@ class PortfolioMixin:
1512
1512
  def get_historical_weights(self,
1513
1513
  start_date_timestamp:float,end_date_timestamp:float,
1514
1514
  timeout=None)->Dict[str, float]:
1515
- if self.local_time_serie is None:
1515
+ if self.data_node_update is None:
1516
1516
  print("this portfolio does not have a weights table")
1517
- self.local_time_serie
1517
+ self.data_node_update
1518
1518
 
1519
1519
 
1520
1520
  class Portfolio(PortfolioMixin, BaseObjectOrm, BasePydanticModel):