mainsequence 2.0.4rc0__py3-none-any.whl → 3.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. mainsequence/cli/cli.py +4 -7
  2. mainsequence/cli/ssh_utils.py +17 -2
  3. mainsequence/client/__init__.py +3 -3
  4. mainsequence/client/base.py +3 -3
  5. mainsequence/client/data_sources_interfaces/timescale.py +20 -19
  6. mainsequence/client/models_helpers.py +2 -2
  7. mainsequence/client/models_tdag.py +96 -86
  8. mainsequence/client/models_vam.py +9 -9
  9. mainsequence/instruments/__init__.py +1 -1
  10. mainsequence/instruments/data_interface/__init__.py +1 -1
  11. mainsequence/instruments/data_interface/data_interface.py +3 -4
  12. mainsequence/instruments/pricing_models/indices.py +29 -14
  13. mainsequence/instruments/settings.py +2 -162
  14. mainsequence/tdag/config.py +2 -2
  15. mainsequence/tdag/data_nodes/build_operations.py +3 -3
  16. mainsequence/tdag/data_nodes/data_nodes.py +23 -23
  17. mainsequence/tdag/data_nodes/persist_managers.py +121 -121
  18. mainsequence/tdag/data_nodes/run_operations.py +25 -25
  19. mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +1 -1
  20. mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +2 -2
  21. mainsequence/virtualfundbuilder/data_nodes.py +1 -1
  22. mainsequence/virtualfundbuilder/portfolio_interface.py +7 -7
  23. mainsequence/virtualfundbuilder/utils.py +2 -2
  24. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.1.dist-info}/METADATA +1 -1
  25. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.1.dist-info}/RECORD +29 -29
  26. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.1.dist-info}/WHEEL +0 -0
  27. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.1.dist-info}/entry_points.txt +0 -0
  28. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.1.dist-info}/licenses/LICENSE +0 -0
  29. {mainsequence-2.0.4rc0.dist-info → mainsequence-3.0.1.dist-info}/top_level.txt +0 -0
mainsequence/cli/cli.py CHANGED
@@ -231,7 +231,6 @@ def project_open(project_id: int):
231
231
  @project.command("delete-local")
232
232
  def project_delete_local(
233
233
  project_id: int,
234
- permanent: bool = typer.Option(False, "--permanent", help="Also remove the folder (dangerous)")
235
234
  ):
236
235
  """Unlink the mapped folder, optionally delete it."""
237
236
  mapped = cfg.remove_link(project_id)
@@ -240,12 +239,10 @@ def project_delete_local(
240
239
  return
241
240
  p = pathlib.Path(mapped)
242
241
  if p.exists():
243
- if permanent:
244
- import shutil
245
- shutil.rmtree(mapped, ignore_errors=True)
246
- typer.secho(f"Deleted: {mapped}", fg=typer.colors.YELLOW)
247
- else:
248
- typer.secho(f"Unlinked mapping (kept folder): {mapped}", fg=typer.colors.GREEN)
242
+ import shutil
243
+ shutil.rmtree(mapped, ignore_errors=True)
244
+ typer.secho(f"Deleted: {mapped}", fg=typer.colors.YELLOW)
245
+
249
246
  else:
250
247
  typer.echo("Mapping removed; folder already absent.")
251
248
 
@@ -78,18 +78,33 @@ def quote_pwsh(s: str) -> str:
78
78
  return '"' + s.replace('"','``"') + '"'
79
79
 
80
80
  def open_signed_terminal(repo_dir: str, key_path: pathlib.Path, repo_name: str) -> None:
81
+ # Windows
81
82
  if sys.platform == "win32":
82
83
  ps = "; ".join([
83
84
  "$ErrorActionPreference='Stop'",
84
- "Try { Set-Service -Name ssh-agent -StartupType Automatic; Start-Service ssh-agent } Catch {}",
85
+ # Check if ssh-agent service is running and start with admin privileges if not
86
+ "$svc = Get-Service ssh-agent",
87
+ "if ($svc.Status -ne 'Running') {",
88
+ " Write-Host 'SSH agent service is not running. Starting admin PowerShell to configure it...' -ForegroundColor Yellow",
89
+ " $adminScript = 'Set-Service ssh-agent -StartupType Automatic; Start-Service ssh-agent; Write-Host \"SSH agent configured successfully!\" -ForegroundColor Green; Start-Sleep -Seconds 2'",
90
+ " Start-Process powershell -ArgumentList '-NoProfile','-Command',$adminScript -Verb RunAs -Wait",
91
+ " Write-Host 'Service configured. Continuing...' -ForegroundColor Green",
92
+ "}",
93
+ # ensure key exists and add to agent
85
94
  f"if (!(Test-Path -Path {quote_pwsh(str(key_path))})) {{ ssh-keygen -t ed25519 -C 'mainsequence@main-sequence.io' -f {quote_pwsh(str(key_path))} -N '' }}",
86
95
  f"ssh-add {quote_pwsh(str(key_path))}",
87
96
  "ssh-add -l",
97
+ # Set GIT_SSH_COMMAND to use the specific key (in set-up-locally we also add key to ssh-agent but use this environment variable as well to be sure)
98
+ f"$env:GIT_SSH_COMMAND = 'ssh -i {quote_pwsh(str(key_path))} -o IdentitiesOnly=yes'",
88
99
  f"Set-Location {quote_pwsh(repo_dir)}",
89
100
  f"Write-Host 'SSH agent ready for {repo_name}. You can now run git.' -ForegroundColor Green"
90
101
  ])
91
- subprocess.Popen(["powershell.exe","-NoExit","-Command", ps])
102
+ subprocess.Popen(
103
+ ["powershell.exe", "-NoExit", "-Command", ps],
104
+ creationflags=subprocess.CREATE_NEW_CONSOLE
105
+ )
92
106
  return
107
+ # macOS
93
108
  if sys.platform == "darwin":
94
109
  bash = " && ".join([
95
110
  f"cd {quote_bash(repo_dir)}",
@@ -1,9 +1,9 @@
1
1
 
2
2
  from .utils import AuthLoaders, bios_uuid
3
3
  from .models_tdag import (request_to_datetime, LocalTimeSeriesDoesNotExist, DynamicTableDoesNotExist,
4
- SourceTableConfigurationDoesNotExist, LocalTimeSerieUpdateDetails,
5
- JSON_COMPRESSED_PREFIX, Scheduler, SchedulerDoesNotExist, LocalTimeSerie,
6
- DynamicTableMetaData, DynamicTableDataSource,DUCK_DB,
4
+ SourceTableConfigurationDoesNotExist, DataNodeUpdateDetails,
5
+ JSON_COMPRESSED_PREFIX, Scheduler, SchedulerDoesNotExist, DataNodeUpdate,
6
+ DataNodeStorage, DynamicTableDataSource,DUCK_DB,
7
7
  ColumnMetaData,Artifact,TableMetaData ,DataFrequency,SourceTableConfiguration,Constant,
8
8
  Project, UniqueIdentifierRangeMap, LocalTimeSeriesHistoricalUpdate,
9
9
  UpdateStatistics, DataSource, PodDataSource, SessionDataSource)
@@ -113,13 +113,13 @@ class BaseObjectOrm:
113
113
  "Scheduler": "ts_manager/scheduler",
114
114
  "MultiIndexMetadata": "orm/multi_index_metadata",
115
115
  "ContinuousAggMultiIndex": "ts_manager/cont_agg_multi_ind",
116
- "DynamicTableMetaData": "ts_manager/dynamic_table",
116
+ "DataNodeStorage": "ts_manager/dynamic_table",
117
117
  # "LocalTimeSerieNodesMethods": "ogm/local_time_serie",
118
118
 
119
119
  "LocalTimeSerieNodesMethods": "ts_manager/local_time_serie",
120
120
 
121
- "LocalTimeSerie": "ts_manager/local_time_serie",
122
- "LocalTimeSerieUpdateDetails": "ts_manager/local_time_serie_update_details",
121
+ "DataNodeUpdate": "ts_manager/local_time_serie",
122
+ "DataNodeUpdateDetails": "ts_manager/local_time_serie_update_details",
123
123
  "LocalTimeSerieHistoricalUpdate": "ts_manager/lts_historical_update",
124
124
  "DynamicTableDataSource": "ts_manager/dynamic_table_data_source",
125
125
  "DataSource": "pods/data_source",
@@ -16,6 +16,7 @@ import json
16
16
 
17
17
  from typing import Dict, List, Union,Optional
18
18
  import datetime
19
+
19
20
  from mainsequence.logconf import logger
20
21
  from ..utils import DATE_FORMAT, make_request, set_types_in_table
21
22
  import os
@@ -97,7 +98,7 @@ def filter_by_assets_ranges(table_name, asset_ranges_map, index_names, data_sour
97
98
  return df
98
99
 
99
100
 
100
- def direct_data_from_db(local_metadata: dict, connection_uri: str,
101
+ def direct_data_from_db(data_node_update: "DataNodeUpdate", connection_uri: str,
101
102
  start_date: Union[datetime.datetime, None] = None,
102
103
  great_or_equal: bool = True, less_or_equal: bool = True,
103
104
  end_date: Union[datetime.datetime, None] = None,
@@ -131,7 +132,7 @@ def direct_data_from_db(local_metadata: dict, connection_uri: str,
131
132
  Data from the table as a pandas DataFrame, optionally filtered by date range.
132
133
  """
133
134
  import_psycopg2()
134
- metadata=local_metadata.remote_table
135
+ data_node_storage=data_node_update.data_node_storage
135
136
  def fast_table_dump(connection_config, table_name, ):
136
137
  query = f"COPY {table_name} TO STDOUT WITH CSV HEADER"
137
138
 
@@ -149,7 +150,7 @@ def direct_data_from_db(local_metadata: dict, connection_uri: str,
149
150
 
150
151
  # Build the WHERE clause dynamically
151
152
  where_clauses = []
152
- time_index_name = metadata.sourcetableconfiguration.time_index_name
153
+ time_index_name = data_node_storage.sourcetableconfiguration.time_index_name
153
154
  if start_date:
154
155
  operator = ">=" if great_or_equal else ">"
155
156
  where_clauses.append(f"{time_index_name} {operator} '{start_date}'")
@@ -165,7 +166,7 @@ def direct_data_from_db(local_metadata: dict, connection_uri: str,
165
166
  where_clause = f"WHERE {' AND '.join(where_clauses)}" if where_clauses else ""
166
167
 
167
168
  # Construct the query
168
- query = f"SELECT {select_clause} FROM {metadata.table_name} {where_clause}"
169
+ query = f"SELECT {select_clause} FROM {data_node_storage.table_name} {where_clause}"
169
170
  # if where_clause=="":
170
171
  # data=fast_table_dump(connection_config, metadata['table_name'])
171
172
  # data[metadata["sourcetableconfiguration"]['time_index_name']]=pd.to_datetime(data[metadata["sourcetableconfiguration"]['time_index_name']])
@@ -179,12 +180,12 @@ def direct_data_from_db(local_metadata: dict, connection_uri: str,
179
180
  # Convert to DataFrame
180
181
  data = pd.DataFrame(data=data, columns=column_names)
181
182
 
182
- data = data.set_index(metadata.sourcetableconfiguration.index_names)
183
+ data = data.set_index(data_node_storage.sourcetableconfiguration.index_names)
183
184
 
184
185
  return data
185
186
 
186
187
 
187
- def direct_table_update(metadata:"DynamicTableMetaData", serialized_data_frame: pd.DataFrame, overwrite: bool,
188
+ def direct_table_update(data_node_storage:"DataNodeStorage", serialized_data_frame: pd.DataFrame, overwrite: bool,
188
189
  grouped_dates,
189
190
  table_is_empty: bool,
190
191
  time_series_orm_db_connection: Union[str, None] = None,
@@ -206,9 +207,9 @@ def direct_table_update(metadata:"DynamicTableMetaData", serialized_data_frame:
206
207
  import_psycopg2()
207
208
  columns = serialized_data_frame.columns.tolist()
208
209
 
209
- index_names=metadata.sourcetableconfiguration.index_names
210
- table_name=metadata.table_name
211
- time_index_name=metadata.sourcetableconfiguration.time_index_name
210
+ index_names=data_node_storage.sourcetableconfiguration.index_names
211
+ table_name=data_node_storage.table_name
212
+ time_index_name=data_node_storage.sourcetableconfiguration.time_index_name
212
213
  def drop_indexes(table_name, table_index_names):
213
214
  # Use a separate connection for index management
214
215
  with psycopg2.connect(time_series_orm_db_connection) as conn:
@@ -226,8 +227,8 @@ def direct_table_update(metadata:"DynamicTableMetaData", serialized_data_frame:
226
227
 
227
228
 
228
229
  # do not drop indices this is only done on inception
229
- if metadata._drop_indices==True:
230
- table_index_names=metadata.sourcetableconfiguration.get_time_scale_extra_table_indices()
230
+ if data_node_storage._drop_indices==True:
231
+ table_index_names=data_node_storage.sourcetableconfiguration.get_time_scale_extra_table_indices()
231
232
  drop_indexes(table_name, table_index_names)
232
233
 
233
234
  if overwrite and not table_is_empty:
@@ -358,9 +359,9 @@ def direct_table_update(metadata:"DynamicTableMetaData", serialized_data_frame:
358
359
  print(f"An error occurred during single insert: {e}")
359
360
  raise
360
361
  # do not rebuild indices this is only done on inception
361
- if metadata._rebuild_indices:
362
+ if data_node_storage._rebuild_indices:
362
363
  logger.info("Rebuilding indices...")
363
- extra_indices = metadata.sourcetableconfiguration.get_time_scale_extra_table_indices()
364
+ extra_indices = data_node_storage.sourcetableconfiguration.get_time_scale_extra_table_indices()
364
365
 
365
366
  with psycopg2.connect(time_series_orm_db_connection) as conn:
366
367
  with conn.cursor() as cur:
@@ -388,7 +389,7 @@ def direct_table_update(metadata:"DynamicTableMetaData", serialized_data_frame:
388
389
 
389
390
  def process_and_update_table(
390
391
  serialized_data_frame,
391
- local_metadata: "LocalTimeSerie",
392
+ data_node_update: "DataNodeUpdate",
392
393
  grouped_dates: List,
393
394
  data_source: object,
394
395
  index_names: List[str],
@@ -403,7 +404,7 @@ def process_and_update_table(
403
404
 
404
405
  Args:
405
406
  serialized_data_frame (pd.DataFrame): The DataFrame to process and update.
406
- metadata (DynamicTableMetaData): Metadata about the table, including table configuration.
407
+ data_node_storage (DataNodeStorage): data_node_storage about the table, including table configuration.
407
408
  grouped_dates (list): List of grouped dates to assist with the update.
408
409
  data_source (object): A data source object with a `get_connection_uri` method.
409
410
  index_names (list): List of index column names.
@@ -416,7 +417,7 @@ def process_and_update_table(
416
417
  """
417
418
  import_psycopg2()
418
419
  JSON_COMPRESSED_PREFIX=JSON_COMPRESSED_PREFIX or []
419
- metadata=local_metadata.remote_table
420
+ data_node_storage=data_node_update.data_node_storage
420
421
  if "unique_identifier" in serialized_data_frame.columns:
421
422
  serialized_data_frame['unique_identifier'] = serialized_data_frame['unique_identifier'].astype(str)
422
423
 
@@ -437,7 +438,7 @@ def process_and_update_table(
437
438
  # Handle overwrite and decompress chunks if required
438
439
  recompress = False
439
440
  if overwrite:
440
- url = f"{base_url}/{metadata.id}/decompress_chunks/"
441
+ url = f"{base_url}/{data_node_storage.id}/decompress_chunks/"
441
442
  from ..models_vam import BaseObject
442
443
  s = BaseObject.build_session()
443
444
 
@@ -461,14 +462,14 @@ def process_and_update_table(
461
462
  recompress = True
462
463
 
463
464
  # Check if the table is empty
464
- table_is_empty = metadata.sourcetableconfiguration.last_time_index_value is None
465
+ table_is_empty = data_node_storage.sourcetableconfiguration.last_time_index_value is None
465
466
 
466
467
  # Update the table
467
468
  direct_table_update(
468
469
  serialized_data_frame=serialized_data_frame,
469
470
  grouped_dates=grouped_dates,
470
471
  time_series_orm_db_connection=data_source.get_connection_uri(),
471
- metadata=metadata,
472
+ data_node_storage=data_node_storage,
472
473
  overwrite=overwrite,
473
474
  table_is_empty=table_is_empty,
474
475
  )
@@ -1,7 +1,7 @@
1
1
  from .models_vam import *
2
2
  from .base import MARKETS_CONSTANTS
3
- from .models_tdag import DynamicTableMetaData, LocalTimeSerie
4
- from .models_tdag import LocalTimeSerie, POD_PROJECT
3
+ from .models_tdag import DataNodeStorage, DataNodeUpdate
4
+ from .models_tdag import DataNodeUpdate, POD_PROJECT
5
5
  import datetime
6
6
 
7
7
  from pydantic import BaseModel, Field, PositiveInt