mainsequence 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mainsequence/__init__.py +0 -0
- mainsequence/__main__.py +9 -0
- mainsequence/cli/__init__.py +1 -0
- mainsequence/cli/api.py +157 -0
- mainsequence/cli/cli.py +442 -0
- mainsequence/cli/config.py +78 -0
- mainsequence/cli/ssh_utils.py +126 -0
- mainsequence/client/__init__.py +17 -0
- mainsequence/client/base.py +431 -0
- mainsequence/client/data_sources_interfaces/__init__.py +0 -0
- mainsequence/client/data_sources_interfaces/duckdb.py +1468 -0
- mainsequence/client/data_sources_interfaces/timescale.py +479 -0
- mainsequence/client/models_helpers.py +113 -0
- mainsequence/client/models_report_studio.py +412 -0
- mainsequence/client/models_tdag.py +2276 -0
- mainsequence/client/models_vam.py +1983 -0
- mainsequence/client/utils.py +387 -0
- mainsequence/dashboards/__init__.py +0 -0
- mainsequence/dashboards/streamlit/__init__.py +0 -0
- mainsequence/dashboards/streamlit/assets/config.toml +12 -0
- mainsequence/dashboards/streamlit/assets/favicon.png +0 -0
- mainsequence/dashboards/streamlit/assets/logo.png +0 -0
- mainsequence/dashboards/streamlit/core/__init__.py +0 -0
- mainsequence/dashboards/streamlit/core/theme.py +212 -0
- mainsequence/dashboards/streamlit/pages/__init__.py +0 -0
- mainsequence/dashboards/streamlit/scaffold.py +220 -0
- mainsequence/instrumentation/__init__.py +7 -0
- mainsequence/instrumentation/utils.py +101 -0
- mainsequence/instruments/__init__.py +1 -0
- mainsequence/instruments/data_interface/__init__.py +10 -0
- mainsequence/instruments/data_interface/data_interface.py +361 -0
- mainsequence/instruments/instruments/__init__.py +3 -0
- mainsequence/instruments/instruments/base_instrument.py +85 -0
- mainsequence/instruments/instruments/bond.py +447 -0
- mainsequence/instruments/instruments/european_option.py +74 -0
- mainsequence/instruments/instruments/interest_rate_swap.py +217 -0
- mainsequence/instruments/instruments/json_codec.py +585 -0
- mainsequence/instruments/instruments/knockout_fx_option.py +146 -0
- mainsequence/instruments/instruments/position.py +475 -0
- mainsequence/instruments/instruments/ql_fields.py +239 -0
- mainsequence/instruments/instruments/vanilla_fx_option.py +107 -0
- mainsequence/instruments/pricing_models/__init__.py +0 -0
- mainsequence/instruments/pricing_models/black_scholes.py +49 -0
- mainsequence/instruments/pricing_models/bond_pricer.py +182 -0
- mainsequence/instruments/pricing_models/fx_option_pricer.py +90 -0
- mainsequence/instruments/pricing_models/indices.py +350 -0
- mainsequence/instruments/pricing_models/knockout_fx_pricer.py +209 -0
- mainsequence/instruments/pricing_models/swap_pricer.py +502 -0
- mainsequence/instruments/settings.py +175 -0
- mainsequence/instruments/utils.py +29 -0
- mainsequence/logconf.py +284 -0
- mainsequence/reportbuilder/__init__.py +0 -0
- mainsequence/reportbuilder/__main__.py +0 -0
- mainsequence/reportbuilder/examples/ms_template_report.py +706 -0
- mainsequence/reportbuilder/model.py +713 -0
- mainsequence/reportbuilder/slide_templates.py +532 -0
- mainsequence/tdag/__init__.py +8 -0
- mainsequence/tdag/__main__.py +0 -0
- mainsequence/tdag/config.py +129 -0
- mainsequence/tdag/data_nodes/__init__.py +12 -0
- mainsequence/tdag/data_nodes/build_operations.py +751 -0
- mainsequence/tdag/data_nodes/data_nodes.py +1292 -0
- mainsequence/tdag/data_nodes/persist_managers.py +812 -0
- mainsequence/tdag/data_nodes/run_operations.py +543 -0
- mainsequence/tdag/data_nodes/utils.py +24 -0
- mainsequence/tdag/future_registry.py +25 -0
- mainsequence/tdag/utils.py +40 -0
- mainsequence/virtualfundbuilder/__init__.py +45 -0
- mainsequence/virtualfundbuilder/__main__.py +235 -0
- mainsequence/virtualfundbuilder/agent_interface.py +77 -0
- mainsequence/virtualfundbuilder/config_handling.py +86 -0
- mainsequence/virtualfundbuilder/contrib/__init__.py +0 -0
- mainsequence/virtualfundbuilder/contrib/apps/__init__.py +8 -0
- mainsequence/virtualfundbuilder/contrib/apps/etf_replicator_app.py +164 -0
- mainsequence/virtualfundbuilder/contrib/apps/generate_report.py +292 -0
- mainsequence/virtualfundbuilder/contrib/apps/load_external_portfolio.py +107 -0
- mainsequence/virtualfundbuilder/contrib/apps/news_app.py +437 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +91 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_table.py +95 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_named_portfolio.py +45 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_portfolio.py +40 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/base.html +147 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/report.html +77 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/__init__.py +5 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/external_weights.py +61 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/intraday_trend.py +149 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/market_cap.py +310 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/mock_signal.py +78 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/portfolio_replicator.py +269 -0
- mainsequence/virtualfundbuilder/contrib/prices/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +810 -0
- mainsequence/virtualfundbuilder/contrib/prices/utils.py +11 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/rebalance_strategies.py +313 -0
- mainsequence/virtualfundbuilder/data_nodes.py +637 -0
- mainsequence/virtualfundbuilder/enums.py +23 -0
- mainsequence/virtualfundbuilder/models.py +282 -0
- mainsequence/virtualfundbuilder/notebook_handling.py +42 -0
- mainsequence/virtualfundbuilder/portfolio_interface.py +272 -0
- mainsequence/virtualfundbuilder/resource_factory/__init__.py +0 -0
- mainsequence/virtualfundbuilder/resource_factory/app_factory.py +170 -0
- mainsequence/virtualfundbuilder/resource_factory/base_factory.py +238 -0
- mainsequence/virtualfundbuilder/resource_factory/rebalance_factory.py +101 -0
- mainsequence/virtualfundbuilder/resource_factory/signal_factory.py +183 -0
- mainsequence/virtualfundbuilder/utils.py +381 -0
- mainsequence-2.0.0.dist-info/METADATA +105 -0
- mainsequence-2.0.0.dist-info/RECORD +110 -0
- mainsequence-2.0.0.dist-info/WHEEL +5 -0
- mainsequence-2.0.0.dist-info/licenses/LICENSE +40 -0
- mainsequence-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,126 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
import json
|
3
|
+
import os, subprocess, shutil, sys, pathlib, platform, re
|
4
|
+
|
5
|
+
def which(cmd: str) -> str | None:
|
6
|
+
p = shutil.which(cmd)
|
7
|
+
return p
|
8
|
+
|
9
|
+
def run(cmd, *args, env=None, cwd=None) -> tuple[int,str,str]:
|
10
|
+
proc = subprocess.Popen([cmd, *args], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, env=env, cwd=cwd)
|
11
|
+
out, err = proc.communicate()
|
12
|
+
return proc.returncode, out, err
|
13
|
+
|
14
|
+
def ensure_key_for_repo(repo_url: str) -> tuple[pathlib.Path, pathlib.Path, str]:
|
15
|
+
home = pathlib.Path.home()
|
16
|
+
key_dir = home / ".ssh"
|
17
|
+
key_dir.mkdir(parents=True, exist_ok=True)
|
18
|
+
# derive safe name
|
19
|
+
last = re.sub(r"[?#].*$","",repo_url).split("/")[-1]
|
20
|
+
if last.lower().endswith(".git"): last = last[:-4]
|
21
|
+
safe = re.sub(r"[^A-Za-z0-9._-]+","-", last)
|
22
|
+
key = key_dir / safe
|
23
|
+
pub = key.with_suffix(key.suffix + ".pub")
|
24
|
+
if not key.exists():
|
25
|
+
run("ssh-keygen","-t","ed25519","-C","mainsequence@main-sequence.io","-f",str(key),"-N","")
|
26
|
+
public_key = pub.read_text(encoding="utf-8")
|
27
|
+
return key, pub, public_key
|
28
|
+
|
29
|
+
def start_agent_and_add_key(key_path: pathlib.Path) -> dict:
|
30
|
+
env = os.environ.copy()
|
31
|
+
# try existing agent
|
32
|
+
rc, _, _ = run("ssh-add","-l")
|
33
|
+
if rc != 0:
|
34
|
+
# start agent
|
35
|
+
rc, out, _ = run("ssh-agent","-s")
|
36
|
+
if rc == 0:
|
37
|
+
m1 = re.search(r"SSH_AUTH_SOCK=([^;]+)", out)
|
38
|
+
m2 = re.search(r"SSH_AGENT_PID=([^;]+)", out)
|
39
|
+
if m1: env["SSH_AUTH_SOCK"] = m1.group(1)
|
40
|
+
if m2: env["SSH_AGENT_PID"] = m2.group(1)
|
41
|
+
# add key with updated env
|
42
|
+
run("ssh-add", str(key_path), env=env)
|
43
|
+
return env
|
44
|
+
|
45
|
+
def open_folder(path: str) -> None:
|
46
|
+
if sys.platform == "win32":
|
47
|
+
os.startfile(path) # type: ignore[attr-defined]
|
48
|
+
elif sys.platform == "darwin":
|
49
|
+
subprocess.Popen(["open", path])
|
50
|
+
else:
|
51
|
+
if which("xdg-open"):
|
52
|
+
subprocess.Popen(["xdg-open", path])
|
53
|
+
else:
|
54
|
+
# best effort
|
55
|
+
subprocess.Popen(["sh","-c", f'echo "{path}"'])
|
56
|
+
|
57
|
+
def pick_linux_terminal() -> tuple[str, list[str]] | None:
|
58
|
+
candidates = [
|
59
|
+
("x-terminal-emulator", ["-e","bash","-lc"]),
|
60
|
+
("gnome-terminal", ["--","bash","-lc"]),
|
61
|
+
("konsole", ["-e","bash","-lc"]),
|
62
|
+
("xfce4-terminal", ["-e","bash","-lc"]),
|
63
|
+
("tilix", ["-e","bash","-lc"]),
|
64
|
+
("mate-terminal", ["-e","bash","-lc"]),
|
65
|
+
("alacritty", ["-e","bash","-lc"]),
|
66
|
+
("kitty", ["-e","bash","-lc"]),
|
67
|
+
("xterm", ["-e","bash","-lc"]),
|
68
|
+
]
|
69
|
+
for cmd,args in candidates:
|
70
|
+
p = which(cmd)
|
71
|
+
if p: return p, args
|
72
|
+
return None
|
73
|
+
|
74
|
+
def quote_bash(s: str) -> str:
|
75
|
+
return '"' + s.replace("\\","\\\\").replace('"','\\"').replace("$","\\$").replace("`","\\`") + '"'
|
76
|
+
|
77
|
+
def quote_pwsh(s: str) -> str:
|
78
|
+
return '"' + s.replace('"','``"') + '"'
|
79
|
+
|
80
|
+
def open_signed_terminal(repo_dir: str, key_path: pathlib.Path, repo_name: str) -> None:
|
81
|
+
if sys.platform == "win32":
|
82
|
+
ps = "; ".join([
|
83
|
+
"$ErrorActionPreference='Stop'",
|
84
|
+
"Try { Set-Service -Name ssh-agent -StartupType Automatic; Start-Service ssh-agent } Catch {}",
|
85
|
+
f"if (!(Test-Path -Path {quote_pwsh(str(key_path))})) {{ ssh-keygen -t ed25519 -C 'mainsequence@main-sequence.io' -f {quote_pwsh(str(key_path))} -N '' }}",
|
86
|
+
f"ssh-add {quote_pwsh(str(key_path))}",
|
87
|
+
"ssh-add -l",
|
88
|
+
f"Set-Location {quote_pwsh(repo_dir)}",
|
89
|
+
f"Write-Host 'SSH agent ready for {repo_name}. You can now run git.' -ForegroundColor Green"
|
90
|
+
])
|
91
|
+
subprocess.Popen(["powershell.exe","-NoExit","-Command", ps])
|
92
|
+
return
|
93
|
+
if sys.platform == "darwin":
|
94
|
+
bash = " && ".join([
|
95
|
+
f"cd {quote_bash(repo_dir)}",
|
96
|
+
f"[ -f {quote_bash(str(key_path))} ] || ssh-keygen -t ed25519 -C \"mainsequence@main-sequence.io\" -f {quote_bash(str(key_path))} -N ''",
|
97
|
+
'eval "$(ssh-agent -s)"',
|
98
|
+
f"ssh-add {quote_bash(str(key_path))}",
|
99
|
+
"ssh-add -l",
|
100
|
+
f"echo 'SSH agent ready for {repo_name}. You can now run git.'",
|
101
|
+
'exec "$SHELL" -l'
|
102
|
+
])
|
103
|
+
|
104
|
+
# Let json.dumps handle the quoting for AppleScript string literal
|
105
|
+
osa = [
|
106
|
+
"osascript",
|
107
|
+
"-e", 'tell application "Terminal" to activate',
|
108
|
+
"-e", f'tell application "Terminal" to do script {json.dumps(bash)}'
|
109
|
+
]
|
110
|
+
subprocess.Popen(osa)
|
111
|
+
return
|
112
|
+
# Linux
|
113
|
+
term = pick_linux_terminal()
|
114
|
+
if not term:
|
115
|
+
raise RuntimeError("No terminal emulator found (x-terminal-emulator, gnome-terminal, …)")
|
116
|
+
cmd, args = term
|
117
|
+
bash = " && ".join([
|
118
|
+
f"cd {quote_bash(repo_dir)}",
|
119
|
+
f"[ -f {quote_bash(str(key_path))} ] || ssh-keygen -t ed25519 -C \"mainsequence@main-sequence.io\" -f {quote_bash(str(key_path))} -N ''",
|
120
|
+
'eval "$(ssh-agent -s)"',
|
121
|
+
f"ssh-add {quote_bash(str(key_path))}",
|
122
|
+
"ssh-add -l",
|
123
|
+
f"echo 'SSH agent ready for {repo_name}. You can now run git.'",
|
124
|
+
'exec "$SHELL" -l'
|
125
|
+
])
|
126
|
+
subprocess.Popen([cmd, *args, bash])
|
@@ -0,0 +1,17 @@
|
|
1
|
+
|
2
|
+
from .utils import AuthLoaders, bios_uuid
|
3
|
+
from .models_tdag import (request_to_datetime, LocalTimeSeriesDoesNotExist, DynamicTableDoesNotExist,
|
4
|
+
SourceTableConfigurationDoesNotExist, LocalTimeSerieUpdateDetails,
|
5
|
+
JSON_COMPRESSED_PREFIX, Scheduler, SchedulerDoesNotExist, LocalTimeSerie,
|
6
|
+
DynamicTableMetaData, DynamicTableDataSource,DUCK_DB,
|
7
|
+
ColumnMetaData,Artifact,TableMetaData ,DataFrequency,SourceTableConfiguration,
|
8
|
+
Project, UniqueIdentifierRangeMap, LocalTimeSeriesHistoricalUpdate,
|
9
|
+
UpdateStatistics, DataSource, PodDataSource, SessionDataSource)
|
10
|
+
|
11
|
+
from .utils import TDAG_CONSTANTS, MARKETS_CONSTANTS
|
12
|
+
from mainsequence.logconf import logger
|
13
|
+
|
14
|
+
from .models_helpers import *
|
15
|
+
from .models_vam import *
|
16
|
+
from .models_report_studio import *
|
17
|
+
|
@@ -0,0 +1,431 @@
|
|
1
|
+
import copy
|
2
|
+
import inspect
|
3
|
+
import json
|
4
|
+
import os
|
5
|
+
from datetime import datetime
|
6
|
+
from typing import Union
|
7
|
+
|
8
|
+
import requests
|
9
|
+
from pydantic import BaseModel, ConfigDict
|
10
|
+
|
11
|
+
from tqdm import tqdm
|
12
|
+
from .utils import MARKETS_CONSTANTS, request_to_datetime, DATE_FORMAT, AuthLoaders, make_request, DoesNotExist
|
13
|
+
|
14
|
+
TDAG_ENDPOINT = os.environ.get('TDAG_ENDPOINT')
|
15
|
+
API_ENDPOINT = f"{TDAG_ENDPOINT}/orm/api"
|
16
|
+
|
17
|
+
loaders = AuthLoaders()
|
18
|
+
|
19
|
+
def build_session(loaders):
|
20
|
+
from requests.adapters import HTTPAdapter, Retry
|
21
|
+
s = requests.Session()
|
22
|
+
s.headers.update(loaders.auth_headers)
|
23
|
+
retries = Retry(total=2, backoff_factor=2, )
|
24
|
+
s.mount('http://', HTTPAdapter(max_retries=retries))
|
25
|
+
s.headers["Accept-Encoding"] = "gzip"
|
26
|
+
return s
|
27
|
+
|
28
|
+
session = build_session(loaders=loaders)
|
29
|
+
|
30
|
+
class HtmlSaveException(Exception):
|
31
|
+
def __init__(self, message):
|
32
|
+
super().__init__(message)
|
33
|
+
self.message = message
|
34
|
+
self.file_path = None
|
35
|
+
|
36
|
+
if 'html' in message.lower():
|
37
|
+
self.file_path = self.save_as_html_file()
|
38
|
+
|
39
|
+
def save_as_html_file(self):
|
40
|
+
# Get the name of the method that raised the exception
|
41
|
+
caller_method = inspect.stack()[2].function
|
42
|
+
|
43
|
+
# Get the current timestamp
|
44
|
+
timestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
|
45
|
+
|
46
|
+
# Create the directory to save HTML files if it doesn't exist
|
47
|
+
folder_path = 'html_exceptions'
|
48
|
+
os.makedirs(folder_path, exist_ok=True)
|
49
|
+
|
50
|
+
# Create the filename
|
51
|
+
filename = f"{caller_method}_{timestamp}.html"
|
52
|
+
file_path = os.path.join(folder_path, filename)
|
53
|
+
|
54
|
+
# Save the message as an HTML file
|
55
|
+
with open(file_path, 'w') as file:
|
56
|
+
file.write(self.message)
|
57
|
+
|
58
|
+
return file_path
|
59
|
+
|
60
|
+
def __str__(self):
|
61
|
+
if self.file_path:
|
62
|
+
return f"HTML content saved to {self.file_path}"
|
63
|
+
else:
|
64
|
+
return self.message
|
65
|
+
|
66
|
+
class BasePydanticModel(BaseModel):
|
67
|
+
model_config = ConfigDict(extra='forbid') # Forbid extra fields in v2
|
68
|
+
orm_class: str = None # This will be set to the class that inherits
|
69
|
+
|
70
|
+
@classmethod
|
71
|
+
def __init_subclass__(cls, **kwargs):
|
72
|
+
super().__init_subclass__(**kwargs)
|
73
|
+
# Set orm_class to the class itself
|
74
|
+
cls.orm_class = cls.__name__
|
75
|
+
|
76
|
+
class BaseObjectOrm:
|
77
|
+
END_POINTS = {
|
78
|
+
"User": "user",
|
79
|
+
|
80
|
+
# VAM
|
81
|
+
"Portfolio": 'assets/target_portfolio',
|
82
|
+
"PortfolioGroup": "assets/portfolio_group",
|
83
|
+
"Asset": "assets/asset",
|
84
|
+
"IndexAsset": "assets/index_asset",
|
85
|
+
"AssetFutureUSDM": "assets/asset_future_usdm",
|
86
|
+
"AssetCurrencyPair": "assets/asset_currency_pair",
|
87
|
+
"VirtualFund": "assets/virtualfund",
|
88
|
+
"OrderManager": "assets/order_manager",
|
89
|
+
"ExecutionVenue": "assets/execution_venue",
|
90
|
+
"Order": "assets/order",
|
91
|
+
"MarketOrder": "assets/market_order",
|
92
|
+
"LimitOrder": "assets/limit_order",
|
93
|
+
"OrderEvent": "assets/order_event",
|
94
|
+
"Account": "assets/account",
|
95
|
+
"Trade": "assets/trade",
|
96
|
+
"VirtualFundHistoricalHoldings": "assets/historical_holdings",
|
97
|
+
"AccountHistoricalHoldings": "assets/account_historical_holdings",
|
98
|
+
"AccountLatestHoldings": "assets/account_historical_holdings",
|
99
|
+
"AccountRiskFactors": "assets/account_risk_factors",
|
100
|
+
"AccountPortfolioScheduledRebalance": "assets/account_portfolio_scheduled_rebalance",
|
101
|
+
"AccountPortfolioHistoricalPositions": "assets/account_portfolio_historical_positions",
|
102
|
+
"ExecutionPrediction": "assets/execution_predictions",
|
103
|
+
"ExecutionPositions": "assets/execution_positions",
|
104
|
+
"AccountCoolDown": "assets/account_cooldown",
|
105
|
+
"HistoricalWeights": "assets/portfolio_weights",
|
106
|
+
"PortfolioIndexAsset": "assets/portfolio_index_asset",
|
107
|
+
"HistoricalBarsSource": "data_sources/historical-bars-source",
|
108
|
+
"MarketsTimeSeriesDetails": "data_sources/markets-time-series-details",
|
109
|
+
"AssetCategory": "assets/asset-category",
|
110
|
+
"AssetTranslationTable": "assets/asset-translation-tables",
|
111
|
+
|
112
|
+
# TDAG
|
113
|
+
"Scheduler": "ts_manager/scheduler",
|
114
|
+
"MultiIndexMetadata": "orm/multi_index_metadata",
|
115
|
+
"ContinuousAggMultiIndex": "ts_manager/cont_agg_multi_ind",
|
116
|
+
"DynamicTableMetaData": "ts_manager/dynamic_table",
|
117
|
+
# "LocalTimeSerieNodesMethods": "ogm/local_time_serie",
|
118
|
+
|
119
|
+
"LocalTimeSerieNodesMethods": "ts_manager/local_time_serie",
|
120
|
+
|
121
|
+
"LocalTimeSerie": "ts_manager/local_time_serie",
|
122
|
+
"LocalTimeSerieUpdateDetails": "ts_manager/local_time_serie_update_details",
|
123
|
+
"LocalTimeSerieHistoricalUpdate": "ts_manager/lts_historical_update",
|
124
|
+
"DynamicTableDataSource": "ts_manager/dynamic_table_data_source",
|
125
|
+
"DataSource": "pods/data_source",
|
126
|
+
"Project": "pods/projects",
|
127
|
+
"SourceTableConfiguration": "ts_manager/source_table_config",
|
128
|
+
"DynamicResource": "tdag-gpt/dynamic_resource",
|
129
|
+
"Artifact": "pods/artifact",
|
130
|
+
"Job": "pods/job",
|
131
|
+
|
132
|
+
#ReportBuilder
|
133
|
+
"Presentation": "reports/presentations",
|
134
|
+
"Folder":"reports/folder",
|
135
|
+
"Slide":"reports/slides",
|
136
|
+
"Theme": "reports/themes",
|
137
|
+
|
138
|
+
}
|
139
|
+
ROOT_URL = API_ENDPOINT
|
140
|
+
LOADERS = loaders
|
141
|
+
|
142
|
+
@staticmethod
|
143
|
+
def request_to_datetime(string_date: str):
|
144
|
+
return request_to_datetime(string_date=string_date)
|
145
|
+
|
146
|
+
@staticmethod
|
147
|
+
def date_to_string(target_date: datetime):
|
148
|
+
return target_date.strftime(DATE_FORMAT)
|
149
|
+
|
150
|
+
@classmethod
|
151
|
+
def class_name(cls):
|
152
|
+
if hasattr(cls, "CLASS_NAME"):
|
153
|
+
return cls.CLASS_NAME
|
154
|
+
return cls.__name__
|
155
|
+
|
156
|
+
@classmethod
|
157
|
+
def build_session(cls):
|
158
|
+
s = session
|
159
|
+
return s
|
160
|
+
|
161
|
+
@property
|
162
|
+
def s(self):
|
163
|
+
s = self.build_session()
|
164
|
+
return s
|
165
|
+
|
166
|
+
def ___hash__(self):
|
167
|
+
if hasattr(self, "unique_identifier"):
|
168
|
+
return self.unique_identifier
|
169
|
+
return self.id
|
170
|
+
|
171
|
+
def __repr__(self):
|
172
|
+
object_id = self.id if hasattr(self, "id") else None
|
173
|
+
return f"{self.class_name()}: {object_id}"
|
174
|
+
|
175
|
+
@classmethod
|
176
|
+
def get_object_url(cls, custom_endpoint_name=None):
|
177
|
+
endpoint_name = custom_endpoint_name or cls.class_name()
|
178
|
+
return f"{cls.ROOT_URL}/{cls.END_POINTS[endpoint_name]}"
|
179
|
+
|
180
|
+
@staticmethod
|
181
|
+
def _parse_parameters_filter(parameters):
|
182
|
+
for key, value in parameters.items():
|
183
|
+
if "__in" in key:
|
184
|
+
assert isinstance(value, list)
|
185
|
+
value = [str(v) for v in value]
|
186
|
+
parameters[key] = ",".join(value)
|
187
|
+
return parameters
|
188
|
+
|
189
|
+
@classmethod
|
190
|
+
def filter(cls, timeout=None, **kwargs):
|
191
|
+
"""
|
192
|
+
Fetches *all pages* from a DRF-paginated endpoint.
|
193
|
+
Accumulates results from each page until 'next' is None.
|
194
|
+
|
195
|
+
Returns a list of `cls` objects (not just one page).
|
196
|
+
|
197
|
+
DRF's typical paginated response looks like:
|
198
|
+
{
|
199
|
+
"count": <int>,
|
200
|
+
"next": <str or null>,
|
201
|
+
"previous": <str or null>,
|
202
|
+
"results": [ ...items... ]
|
203
|
+
}
|
204
|
+
"""
|
205
|
+
base_url = cls.get_object_url() # e.g. "https://api.example.com/assets"
|
206
|
+
params = cls._parse_parameters_filter(kwargs)
|
207
|
+
|
208
|
+
# We'll handle pagination by following the 'next' links from DRF.
|
209
|
+
accumulated = []
|
210
|
+
next_url = f"{base_url}/" # Start with the main endpoint (list)
|
211
|
+
|
212
|
+
while next_url:
|
213
|
+
# For each page, do a GET request
|
214
|
+
r = make_request(
|
215
|
+
s=cls.build_session(),
|
216
|
+
loaders=cls.LOADERS,
|
217
|
+
r_type="GET",
|
218
|
+
url=next_url, # next_url changes each iteration
|
219
|
+
payload={"params": params},
|
220
|
+
time_out=timeout
|
221
|
+
)
|
222
|
+
|
223
|
+
if r.status_code != 200:
|
224
|
+
# Handle errors or break out
|
225
|
+
if r.status_code == 401:
|
226
|
+
raise Exception("Unauthorized. Please add credentials to environment.")
|
227
|
+
elif r.status_code == 500:
|
228
|
+
raise Exception("Server Error.")
|
229
|
+
elif r.status_code == 404:
|
230
|
+
raise DoesNotExist("Not Found.")
|
231
|
+
else:
|
232
|
+
raise Exception(f"{r.status_code} - {r.text}")
|
233
|
+
|
234
|
+
data = r.json()
|
235
|
+
# data should be a dict with "count", "next", "previous", and "results".
|
236
|
+
|
237
|
+
# DRF returns the next page URL in `data["next"]`
|
238
|
+
next_url = data["next"] # either a URL string or None
|
239
|
+
|
240
|
+
# data["results"] should be a list of objects
|
241
|
+
for item in data["results"]:
|
242
|
+
# Insert "orm_class" if you still need that
|
243
|
+
item["orm_class"] = cls.__name__
|
244
|
+
try:
|
245
|
+
accumulated.append(cls(**item) if issubclass(cls, BasePydanticModel) else item)
|
246
|
+
except Exception as e:
|
247
|
+
print(item)
|
248
|
+
print(cls)
|
249
|
+
print(cls(**item))
|
250
|
+
import traceback
|
251
|
+
traceback.print_exc()
|
252
|
+
raise e
|
253
|
+
|
254
|
+
# We set `params = None` (or empty) after the first loop to avoid appending repeatedly
|
255
|
+
# but only if DRF's `next` doesn't contain the query parameters.
|
256
|
+
# Usually, DRF includes them, so you don't need to do anything special here.
|
257
|
+
params = None
|
258
|
+
|
259
|
+
return accumulated
|
260
|
+
|
261
|
+
@classmethod
|
262
|
+
def get(cls, pk=None, timeout=None, **filters):
|
263
|
+
"""
|
264
|
+
Retrieves exactly one object by primary key: GET /base_url/<pk>/
|
265
|
+
Raises `DoesNotExist` if 404 or the response is empty.
|
266
|
+
Raises Exception if multiple or unexpected data is returned.
|
267
|
+
"""
|
268
|
+
if pk is not None:
|
269
|
+
base_url = cls.get_object_url()
|
270
|
+
detail_url = f"{base_url}/{pk}/"
|
271
|
+
|
272
|
+
r = make_request(
|
273
|
+
s=cls.build_session(),
|
274
|
+
loaders=cls.LOADERS,
|
275
|
+
r_type="GET",
|
276
|
+
url=detail_url,
|
277
|
+
payload={"params": filters},#neede to pass special serializer
|
278
|
+
time_out=timeout
|
279
|
+
)
|
280
|
+
|
281
|
+
if r.status_code == 404:
|
282
|
+
raise DoesNotExist(f"No object found for pk={pk}")
|
283
|
+
elif r.status_code == 401:
|
284
|
+
raise Exception("Unauthorized. Please add credentials to environment.")
|
285
|
+
elif r.status_code == 500:
|
286
|
+
raise Exception("Server Error")
|
287
|
+
elif r.status_code != 200:
|
288
|
+
raise Exception(f"Unexpected status code: {r.status_code}")
|
289
|
+
|
290
|
+
data = r.json()
|
291
|
+
data["orm_class"] = cls.__name__
|
292
|
+
return cls(**data)
|
293
|
+
|
294
|
+
# Otherwise, do the filter approach
|
295
|
+
candidates = cls.filter(timeout=timeout, **filters)
|
296
|
+
if not candidates:
|
297
|
+
raise DoesNotExist(f"No {cls.class_name()} found matching {filters}")
|
298
|
+
|
299
|
+
if len(candidates) > 1:
|
300
|
+
raise Exception(
|
301
|
+
f"Multiple {cls.class_name()} objects found for filters {filters}. "
|
302
|
+
f"Expected exactly one."
|
303
|
+
)
|
304
|
+
|
305
|
+
return candidates[0]
|
306
|
+
|
307
|
+
@classmethod
|
308
|
+
def get_or_none(cls, *arg, **kwargs):
|
309
|
+
try:
|
310
|
+
return cls.get(*arg, **kwargs)
|
311
|
+
except DoesNotExist:
|
312
|
+
return None
|
313
|
+
|
314
|
+
@staticmethod
|
315
|
+
def serialize_for_json(kwargs):
|
316
|
+
new_data = {}
|
317
|
+
for key, value in kwargs.items():
|
318
|
+
new_value = copy.deepcopy(value)
|
319
|
+
if isinstance(value, datetime):
|
320
|
+
new_value = str(value)
|
321
|
+
new_data[key] = new_value
|
322
|
+
return new_data
|
323
|
+
|
324
|
+
|
325
|
+
@classmethod
|
326
|
+
def create(cls, timeout=None, files=None, *args, **kwargs):
|
327
|
+
base_url = cls.get_object_url()
|
328
|
+
data = cls.serialize_for_json(kwargs)
|
329
|
+
payload = {
|
330
|
+
"json": data
|
331
|
+
}
|
332
|
+
if files:
|
333
|
+
payload["files"] = files
|
334
|
+
r = make_request(
|
335
|
+
s=cls.build_session(),
|
336
|
+
loaders=cls.LOADERS,
|
337
|
+
r_type="POST",
|
338
|
+
url=f"{base_url}/",
|
339
|
+
payload=payload,
|
340
|
+
time_out=timeout
|
341
|
+
)
|
342
|
+
if r.status_code not in [201]:
|
343
|
+
raise Exception(r.text)
|
344
|
+
return cls(**r.json())
|
345
|
+
|
346
|
+
@classmethod
|
347
|
+
def update_or_create(cls, timeout=None, *args, **kwargs):
|
348
|
+
url = f"{cls.get_object_url()}/update_or_create/"
|
349
|
+
data = cls.serialize_for_json(kwargs)
|
350
|
+
payload = {"json": data}
|
351
|
+
|
352
|
+
r = make_request(
|
353
|
+
s=cls.build_session(),
|
354
|
+
loaders=cls.LOADERS,
|
355
|
+
r_type="POST",
|
356
|
+
url=url,
|
357
|
+
payload=payload,
|
358
|
+
time_out=timeout
|
359
|
+
)
|
360
|
+
if r.status_code not in [201, 200]:
|
361
|
+
raise Exception(r.text)
|
362
|
+
return cls(**r.json())
|
363
|
+
|
364
|
+
@classmethod
|
365
|
+
def destroy_by_id(cls, instance_id, *args, **kwargs):
|
366
|
+
base_url = cls.get_object_url()
|
367
|
+
data = cls.serialize_for_json(kwargs)
|
368
|
+
payload = {"json": data}
|
369
|
+
r = make_request(
|
370
|
+
s=cls.build_session(),
|
371
|
+
loaders=cls.LOADERS,
|
372
|
+
r_type="DELETE",
|
373
|
+
url=f"{base_url}/{instance_id}/",
|
374
|
+
payload=payload
|
375
|
+
)
|
376
|
+
if r.status_code != 204:
|
377
|
+
raise Exception(r.text)
|
378
|
+
|
379
|
+
@classmethod
|
380
|
+
def patch_by_id(cls, instance_id, *args, _into=None, **kwargs):
|
381
|
+
base_url = cls.get_object_url()
|
382
|
+
url = f"{base_url}/{instance_id}/"
|
383
|
+
data = cls.serialize_for_json(kwargs)
|
384
|
+
payload = {"json": data}
|
385
|
+
|
386
|
+
r = make_request(
|
387
|
+
s=cls.build_session(),
|
388
|
+
loaders=cls.LOADERS,
|
389
|
+
r_type="PATCH",
|
390
|
+
url=url,
|
391
|
+
payload=payload,
|
392
|
+
)
|
393
|
+
if r.status_code != 200:
|
394
|
+
raise Exception(r.text)
|
395
|
+
|
396
|
+
body = r.json()
|
397
|
+
|
398
|
+
def recursive_update(obj, update_dict):
|
399
|
+
for k, v in update_dict.items():
|
400
|
+
# Get the existing nested object, defaulting to None if it doesn't exist
|
401
|
+
nested_obj = getattr(obj, k, None)
|
402
|
+
|
403
|
+
# Only recurse if the update value is a dict AND the existing
|
404
|
+
# attribute is an instance of a Pydantic model.
|
405
|
+
if isinstance(v, dict) and isinstance(nested_obj, BaseModel):
|
406
|
+
recursive_update(nested_obj, v)
|
407
|
+
else:
|
408
|
+
# Otherwise, just set the value directly.
|
409
|
+
try:
|
410
|
+
setattr(obj, k, v)
|
411
|
+
except Exception as e:
|
412
|
+
print(e)
|
413
|
+
|
414
|
+
return obj
|
415
|
+
|
416
|
+
# If an instance was provided, update it in place
|
417
|
+
if _into is not None:
|
418
|
+
recursive_update(_into, body)
|
419
|
+
return _into
|
420
|
+
|
421
|
+
# Otherwise return a new instance
|
422
|
+
return cls(**body)
|
423
|
+
|
424
|
+
def patch(self, *args, **kwargs):
|
425
|
+
return type(self).patch_by_id(self.id, _into=self, **kwargs)
|
426
|
+
|
427
|
+
def delete(self, *args, **kwargs):
|
428
|
+
return self.__class__.destroy_by_id(self.id)
|
429
|
+
|
430
|
+
def get_app_label(self):
|
431
|
+
return self.END_POINTS[self.orm_class].split("/")[0]
|
File without changes
|