mainsequence 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mainsequence/__init__.py +0 -0
- mainsequence/__main__.py +9 -0
- mainsequence/cli/__init__.py +1 -0
- mainsequence/cli/api.py +157 -0
- mainsequence/cli/cli.py +442 -0
- mainsequence/cli/config.py +78 -0
- mainsequence/cli/ssh_utils.py +126 -0
- mainsequence/client/__init__.py +17 -0
- mainsequence/client/base.py +431 -0
- mainsequence/client/data_sources_interfaces/__init__.py +0 -0
- mainsequence/client/data_sources_interfaces/duckdb.py +1468 -0
- mainsequence/client/data_sources_interfaces/timescale.py +479 -0
- mainsequence/client/models_helpers.py +113 -0
- mainsequence/client/models_report_studio.py +412 -0
- mainsequence/client/models_tdag.py +2276 -0
- mainsequence/client/models_vam.py +1983 -0
- mainsequence/client/utils.py +387 -0
- mainsequence/dashboards/__init__.py +0 -0
- mainsequence/dashboards/streamlit/__init__.py +0 -0
- mainsequence/dashboards/streamlit/assets/config.toml +12 -0
- mainsequence/dashboards/streamlit/assets/favicon.png +0 -0
- mainsequence/dashboards/streamlit/assets/logo.png +0 -0
- mainsequence/dashboards/streamlit/core/__init__.py +0 -0
- mainsequence/dashboards/streamlit/core/theme.py +212 -0
- mainsequence/dashboards/streamlit/pages/__init__.py +0 -0
- mainsequence/dashboards/streamlit/scaffold.py +220 -0
- mainsequence/instrumentation/__init__.py +7 -0
- mainsequence/instrumentation/utils.py +101 -0
- mainsequence/instruments/__init__.py +1 -0
- mainsequence/instruments/data_interface/__init__.py +10 -0
- mainsequence/instruments/data_interface/data_interface.py +361 -0
- mainsequence/instruments/instruments/__init__.py +3 -0
- mainsequence/instruments/instruments/base_instrument.py +85 -0
- mainsequence/instruments/instruments/bond.py +447 -0
- mainsequence/instruments/instruments/european_option.py +74 -0
- mainsequence/instruments/instruments/interest_rate_swap.py +217 -0
- mainsequence/instruments/instruments/json_codec.py +585 -0
- mainsequence/instruments/instruments/knockout_fx_option.py +146 -0
- mainsequence/instruments/instruments/position.py +475 -0
- mainsequence/instruments/instruments/ql_fields.py +239 -0
- mainsequence/instruments/instruments/vanilla_fx_option.py +107 -0
- mainsequence/instruments/pricing_models/__init__.py +0 -0
- mainsequence/instruments/pricing_models/black_scholes.py +49 -0
- mainsequence/instruments/pricing_models/bond_pricer.py +182 -0
- mainsequence/instruments/pricing_models/fx_option_pricer.py +90 -0
- mainsequence/instruments/pricing_models/indices.py +350 -0
- mainsequence/instruments/pricing_models/knockout_fx_pricer.py +209 -0
- mainsequence/instruments/pricing_models/swap_pricer.py +502 -0
- mainsequence/instruments/settings.py +175 -0
- mainsequence/instruments/utils.py +29 -0
- mainsequence/logconf.py +284 -0
- mainsequence/reportbuilder/__init__.py +0 -0
- mainsequence/reportbuilder/__main__.py +0 -0
- mainsequence/reportbuilder/examples/ms_template_report.py +706 -0
- mainsequence/reportbuilder/model.py +713 -0
- mainsequence/reportbuilder/slide_templates.py +532 -0
- mainsequence/tdag/__init__.py +8 -0
- mainsequence/tdag/__main__.py +0 -0
- mainsequence/tdag/config.py +129 -0
- mainsequence/tdag/data_nodes/__init__.py +12 -0
- mainsequence/tdag/data_nodes/build_operations.py +751 -0
- mainsequence/tdag/data_nodes/data_nodes.py +1292 -0
- mainsequence/tdag/data_nodes/persist_managers.py +812 -0
- mainsequence/tdag/data_nodes/run_operations.py +543 -0
- mainsequence/tdag/data_nodes/utils.py +24 -0
- mainsequence/tdag/future_registry.py +25 -0
- mainsequence/tdag/utils.py +40 -0
- mainsequence/virtualfundbuilder/__init__.py +45 -0
- mainsequence/virtualfundbuilder/__main__.py +235 -0
- mainsequence/virtualfundbuilder/agent_interface.py +77 -0
- mainsequence/virtualfundbuilder/config_handling.py +86 -0
- mainsequence/virtualfundbuilder/contrib/__init__.py +0 -0
- mainsequence/virtualfundbuilder/contrib/apps/__init__.py +8 -0
- mainsequence/virtualfundbuilder/contrib/apps/etf_replicator_app.py +164 -0
- mainsequence/virtualfundbuilder/contrib/apps/generate_report.py +292 -0
- mainsequence/virtualfundbuilder/contrib/apps/load_external_portfolio.py +107 -0
- mainsequence/virtualfundbuilder/contrib/apps/news_app.py +437 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_report_app.py +91 -0
- mainsequence/virtualfundbuilder/contrib/apps/portfolio_table.py +95 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_named_portfolio.py +45 -0
- mainsequence/virtualfundbuilder/contrib/apps/run_portfolio.py +40 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/base.html +147 -0
- mainsequence/virtualfundbuilder/contrib/apps/templates/report.html +77 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/__init__.py +5 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/external_weights.py +61 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/intraday_trend.py +149 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/market_cap.py +310 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/mock_signal.py +78 -0
- mainsequence/virtualfundbuilder/contrib/data_nodes/portfolio_replicator.py +269 -0
- mainsequence/virtualfundbuilder/contrib/prices/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/prices/data_nodes.py +810 -0
- mainsequence/virtualfundbuilder/contrib/prices/utils.py +11 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/__init__.py +1 -0
- mainsequence/virtualfundbuilder/contrib/rebalance_strategies/rebalance_strategies.py +313 -0
- mainsequence/virtualfundbuilder/data_nodes.py +637 -0
- mainsequence/virtualfundbuilder/enums.py +23 -0
- mainsequence/virtualfundbuilder/models.py +282 -0
- mainsequence/virtualfundbuilder/notebook_handling.py +42 -0
- mainsequence/virtualfundbuilder/portfolio_interface.py +272 -0
- mainsequence/virtualfundbuilder/resource_factory/__init__.py +0 -0
- mainsequence/virtualfundbuilder/resource_factory/app_factory.py +170 -0
- mainsequence/virtualfundbuilder/resource_factory/base_factory.py +238 -0
- mainsequence/virtualfundbuilder/resource_factory/rebalance_factory.py +101 -0
- mainsequence/virtualfundbuilder/resource_factory/signal_factory.py +183 -0
- mainsequence/virtualfundbuilder/utils.py +381 -0
- mainsequence-2.0.0.dist-info/METADATA +105 -0
- mainsequence-2.0.0.dist-info/RECORD +110 -0
- mainsequence-2.0.0.dist-info/WHEEL +5 -0
- mainsequence-2.0.0.dist-info/licenses/LICENSE +40 -0
- mainsequence-2.0.0.dist-info/top_level.txt +1 -0
mainsequence/__init__.py
ADDED
File without changes
|
mainsequence/__main__.py
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
from .cli import app
|
mainsequence/cli/api.py
ADDED
@@ -0,0 +1,157 @@
|
|
1
|
+
# mainsequence/cli/api.py
|
2
|
+
from __future__ import annotations
|
3
|
+
import os, re, subprocess, json, platform, pathlib, shlex, sys
|
4
|
+
import requests
|
5
|
+
from .config import backend_url, get_tokens, save_tokens, set_env_access
|
6
|
+
|
7
|
+
AUTH_PATHS = {
|
8
|
+
"obtain": "/auth/jwt-token/token/",
|
9
|
+
"refresh": "/auth/jwt-token/token/refresh/",
|
10
|
+
"ping": "/auth/rest-auth/user/",
|
11
|
+
}
|
12
|
+
|
13
|
+
S = requests.Session()
|
14
|
+
S.headers.update({"Content-Type": "application/json"})
|
15
|
+
|
16
|
+
class ApiError(RuntimeError): ...
|
17
|
+
class NotLoggedIn(ApiError): ...
|
18
|
+
|
19
|
+
def _full(path: str) -> str:
|
20
|
+
p = "/" + path.lstrip("/")
|
21
|
+
return backend_url() + p
|
22
|
+
|
23
|
+
def _normalize_api_path(p: str) -> str:
|
24
|
+
p = "/" + (p or "").lstrip("/")
|
25
|
+
if not re.match(r"^/(api|auth|pods|orm|user)(/|$)", p):
|
26
|
+
raise ApiError("Only /api/*, /auth/*, /pods/*, /orm/*, /user/* allowed")
|
27
|
+
return p
|
28
|
+
|
29
|
+
def _access_token() -> str | None:
|
30
|
+
t = os.environ.get("MAIN_SEQUENCE_USER_TOKEN")
|
31
|
+
if t:
|
32
|
+
return t
|
33
|
+
tok = get_tokens()
|
34
|
+
return tok.get("access")
|
35
|
+
|
36
|
+
def _refresh_token() -> str | None:
|
37
|
+
tok = get_tokens()
|
38
|
+
return tok.get("refresh")
|
39
|
+
|
40
|
+
def login(email: str, password: str) -> dict:
|
41
|
+
url = _full(AUTH_PATHS["obtain"])
|
42
|
+
payload = {"email": email, "password": password} # server expects 'email'
|
43
|
+
r = S.post(url, data=json.dumps(payload))
|
44
|
+
try:
|
45
|
+
data = r.json()
|
46
|
+
except Exception:
|
47
|
+
data = {}
|
48
|
+
if not r.ok:
|
49
|
+
msg = data.get("detail") or data.get("message") or r.text
|
50
|
+
raise ApiError(f"{msg}")
|
51
|
+
access = data.get("access") or data.get("token") or data.get("jwt") or data.get("access_token")
|
52
|
+
refresh = data.get("refresh") or data.get("refresh_token")
|
53
|
+
if not access or not refresh:
|
54
|
+
raise ApiError("Server did not return expected tokens.")
|
55
|
+
save_tokens(email, access, refresh)
|
56
|
+
set_env_access(access)
|
57
|
+
return {"username": email, "backend": backend_url()}
|
58
|
+
|
59
|
+
def refresh_access() -> str:
|
60
|
+
refresh = _refresh_token()
|
61
|
+
if not refresh:
|
62
|
+
raise NotLoggedIn("Not logged in. Run `mainsequence login <email>`.")
|
63
|
+
r = S.post(_full(AUTH_PATHS["refresh"]), data=json.dumps({"refresh": refresh}))
|
64
|
+
data = r.json() if r.headers.get("content-type","").startswith("application/json") else {}
|
65
|
+
if not r.ok:
|
66
|
+
raise NotLoggedIn(data.get("detail") or "Token refresh failed.")
|
67
|
+
access = data.get("access")
|
68
|
+
if not access:
|
69
|
+
raise NotLoggedIn("Refresh succeeded but no access token returned.")
|
70
|
+
tokens = get_tokens()
|
71
|
+
save_tokens(tokens.get("username") or "", access, refresh)
|
72
|
+
set_env_access(access)
|
73
|
+
return access
|
74
|
+
|
75
|
+
def authed(method: str, api_path: str, body: dict | None = None) -> requests.Response:
|
76
|
+
api_path = _normalize_api_path(api_path)
|
77
|
+
access = _access_token()
|
78
|
+
if not access:
|
79
|
+
# try to refresh once
|
80
|
+
access = refresh_access()
|
81
|
+
headers = {"Authorization": f"Bearer {access}"}
|
82
|
+
r = S.request(method.upper(), _full(api_path), headers=headers,
|
83
|
+
data=None if method.upper() in {"GET","HEAD"} else json.dumps(body or {}))
|
84
|
+
if r.status_code == 401:
|
85
|
+
# retry after refresh
|
86
|
+
access = refresh_access()
|
87
|
+
headers = {"Authorization": f"Bearer {access}"}
|
88
|
+
r = S.request(method.upper(), _full(api_path), headers=headers,
|
89
|
+
data=None if method.upper() in {"GET","HEAD"} else json.dumps(body or {}))
|
90
|
+
if r.status_code == 401:
|
91
|
+
raise NotLoggedIn("Not logged in.")
|
92
|
+
return r
|
93
|
+
|
94
|
+
# ---------- Helper APIs ----------
|
95
|
+
|
96
|
+
def safe_slug(s: str) -> str:
|
97
|
+
x = re.sub(r"[^a-z0-9-_]+", "-", (s or "project").lower()).strip("-")
|
98
|
+
return x[:64] or "project"
|
99
|
+
|
100
|
+
def repo_name_from_git_url(url: str | None) -> str | None:
|
101
|
+
if not url: return None
|
102
|
+
s = re.sub(r"[?#].*$", "", url.strip())
|
103
|
+
last = s.split("/")[-1] if "/" in s else s
|
104
|
+
if last.lower().endswith(".git"): last = last[:-4]
|
105
|
+
return re.sub(r"[^A-Za-z0-9._-]+", "-", last)
|
106
|
+
|
107
|
+
def deep_find_repo_url(extra) -> str | None:
|
108
|
+
if not isinstance(extra, dict): return None
|
109
|
+
cand = ["ssh_url","git_ssh_url","repo_ssh_url","git_url","repo_url","repository","url"]
|
110
|
+
for k in cand:
|
111
|
+
v = extra.get(k)
|
112
|
+
if isinstance(v, str) and (v.startswith("git@") or re.search(r"\.git($|\?)", v)):
|
113
|
+
return v
|
114
|
+
if isinstance(v, dict):
|
115
|
+
for vv in v.values():
|
116
|
+
if isinstance(vv, str) and (vv.startswith("git@") or re.search(r"\.git($|\?)", vv)):
|
117
|
+
return vv
|
118
|
+
for v in extra.values():
|
119
|
+
if isinstance(v, dict):
|
120
|
+
found = deep_find_repo_url(v)
|
121
|
+
if found: return found
|
122
|
+
return None
|
123
|
+
|
124
|
+
def get_current_user_profile() -> dict:
|
125
|
+
who = authed("GET", AUTH_PATHS["ping"])
|
126
|
+
d = who.json() if who.ok else {}
|
127
|
+
uid = d.get("id") or d.get("pk") or (d.get("user") or {}).get("id") or d.get("user_id")
|
128
|
+
if not uid:
|
129
|
+
return {}
|
130
|
+
full = authed("GET", f"/user/api/user/{uid}/")
|
131
|
+
u = full.json() if full.ok else {}
|
132
|
+
org_name = (u.get("organization") or {}).get("name") or u.get("organization_name") or ""
|
133
|
+
return {"username": u.get("username") or "", "organization": org_name}
|
134
|
+
|
135
|
+
def get_projects() -> list[dict]:
|
136
|
+
r = authed("GET", "/orm/api/pods/projects/")
|
137
|
+
# If the API shape ever changes, still try to pull a list.
|
138
|
+
if not r.ok:
|
139
|
+
raise ApiError(f"Projects fetch failed ({r.status_code}).")
|
140
|
+
data = r.json() if r.headers.get("content-type","",).startswith("application/json") else {}
|
141
|
+
if isinstance(data, list):
|
142
|
+
return data
|
143
|
+
return data.get("results") or []
|
144
|
+
|
145
|
+
def fetch_project_env_text(project_id: int | str) -> str:
|
146
|
+
r = authed("GET", f"/orm/api/pods/projects/{project_id}/get_environment/")
|
147
|
+
raw = r.json() if r.headers.get("content-type","").startswith("application/json") else r.text
|
148
|
+
if isinstance(raw, dict):
|
149
|
+
raw = raw.get("environment") or raw.get("env") or raw.get("content") or raw.get("text") or ""
|
150
|
+
return (raw or "")
|
151
|
+
|
152
|
+
def add_deploy_key(project_id: int | str, key_title: str, public_key: str) -> None:
|
153
|
+
try:
|
154
|
+
authed("POST", f"/orm/api/pods/projects/{project_id}/add_deploy_key/",
|
155
|
+
{"key_title": key_title, "public_key": public_key})
|
156
|
+
except Exception:
|
157
|
+
pass
|
mainsequence/cli/cli.py
ADDED
@@ -0,0 +1,442 @@
|
|
1
|
+
# mainsequence/cli/cli.py
|
2
|
+
from __future__ import annotations
|
3
|
+
|
4
|
+
import json
|
5
|
+
import os
|
6
|
+
import pathlib
|
7
|
+
import platform
|
8
|
+
import re
|
9
|
+
import shlex
|
10
|
+
import shutil
|
11
|
+
import subprocess
|
12
|
+
import sys
|
13
|
+
from typing import Optional
|
14
|
+
|
15
|
+
import typer
|
16
|
+
|
17
|
+
from . import config as cfg
|
18
|
+
from .api import (
|
19
|
+
ApiError,
|
20
|
+
NotLoggedIn,
|
21
|
+
add_deploy_key,
|
22
|
+
deep_find_repo_url,
|
23
|
+
fetch_project_env_text,
|
24
|
+
get_current_user_profile,
|
25
|
+
get_projects,
|
26
|
+
login as api_login,
|
27
|
+
repo_name_from_git_url,
|
28
|
+
safe_slug,
|
29
|
+
)
|
30
|
+
from .ssh_utils import (
|
31
|
+
ensure_key_for_repo,
|
32
|
+
open_folder,
|
33
|
+
open_signed_terminal,
|
34
|
+
start_agent_and_add_key,
|
35
|
+
)
|
36
|
+
import time
|
37
|
+
|
38
|
+
app = typer.Typer(help="MainSequence CLI (login + project operations)")
|
39
|
+
|
40
|
+
project = typer.Typer(help="Project commands")
|
41
|
+
settings = typer.Typer(help="Settings (base folder, backend, etc.)")
|
42
|
+
|
43
|
+
app.add_typer(project, name="project")
|
44
|
+
app.add_typer(settings, name="settings")
|
45
|
+
|
46
|
+
# ---------- helpers ----------
|
47
|
+
|
48
|
+
def _projects_root(base_dir: str, org_slug: str) -> pathlib.Path:
|
49
|
+
p = pathlib.Path(base_dir).expanduser()
|
50
|
+
return p / org_slug / "projects"
|
51
|
+
|
52
|
+
def _org_slug_from_profile() -> str:
|
53
|
+
prof = get_current_user_profile()
|
54
|
+
name = prof.get("organization") or "default"
|
55
|
+
return re.sub(r"[^a-z0-9-_]+", "-", name.lower()).strip("-") or "default"
|
56
|
+
|
57
|
+
def _determine_repo_url(p: dict) -> str:
|
58
|
+
repo = (p.get("git_ssh_url") or "").strip()
|
59
|
+
if repo.lower() == "none":
|
60
|
+
repo = ""
|
61
|
+
if not repo:
|
62
|
+
extra = (p.get("data_source") or {}).get("related_resource", {}) or {}
|
63
|
+
extra = extra.get("extra_arguments") or (p.get("data_source") or {}).get("extra_arguments") or {}
|
64
|
+
repo = deep_find_repo_url(extra) or ""
|
65
|
+
return repo
|
66
|
+
|
67
|
+
def _copy_clipboard(txt: str) -> bool:
|
68
|
+
try:
|
69
|
+
if sys.platform == "darwin":
|
70
|
+
p = subprocess.run(["pbcopy"], input=txt, text=True)
|
71
|
+
return p.returncode == 0
|
72
|
+
elif shutil.which("wl-copy"):
|
73
|
+
p = subprocess.run(["wl-copy"], input=txt, text=True)
|
74
|
+
return p.returncode == 0
|
75
|
+
elif shutil.which("xclip"):
|
76
|
+
p = subprocess.run(["xclip", "-selection", "clipboard"], input=txt, text=True)
|
77
|
+
return p.returncode == 0
|
78
|
+
except Exception:
|
79
|
+
pass
|
80
|
+
return False
|
81
|
+
|
82
|
+
def _render_projects_table(items: list[dict], links: dict, base_dir: str, org_slug: str) -> str:
|
83
|
+
"""Return an aligned table with Local status + path (map or default folder guess)."""
|
84
|
+
def ds(obj, path, default=""):
|
85
|
+
try:
|
86
|
+
for k in path.split("."):
|
87
|
+
obj = obj.get(k, {})
|
88
|
+
return obj or default
|
89
|
+
except Exception:
|
90
|
+
return default
|
91
|
+
|
92
|
+
rows = []
|
93
|
+
for p in items:
|
94
|
+
pid = str(p.get("id", ""))
|
95
|
+
name = p.get("project_name") or "(unnamed)"
|
96
|
+
dname = ds(p, "data_source.related_resource.display_name", "")
|
97
|
+
klass = ds(p, "data_source.related_resource.class_type",
|
98
|
+
ds(p, "data_source.related_resource_class_type", ""))
|
99
|
+
status = ds(p, "data_source.related_resource.status", "")
|
100
|
+
|
101
|
+
# 1) mapping file
|
102
|
+
mapped = links.get(pid)
|
103
|
+
local_path = mapped if mapped and pathlib.Path(mapped).exists() else None
|
104
|
+
# 2) guess default location if mapping is absent
|
105
|
+
if not local_path:
|
106
|
+
guess = _projects_root(base_dir, org_slug) / safe_slug(name)
|
107
|
+
if guess.exists():
|
108
|
+
local_path = str(guess)
|
109
|
+
|
110
|
+
local = "Local" if local_path else "—"
|
111
|
+
path_col = local_path or "—"
|
112
|
+
rows.append((pid, name, dname, klass, status, local, path_col))
|
113
|
+
|
114
|
+
header = ["ID","Project","Data Source","Class","Status","Local","Path"]
|
115
|
+
if not rows:
|
116
|
+
return "No projects."
|
117
|
+
|
118
|
+
colw = [max(len(r[i]) for r in rows + [tuple(header)]) for i in range(len(header))]
|
119
|
+
fmt = " ".join("{:<" + str(colw[i]) + "}" for i in range(len(header)))
|
120
|
+
out = [fmt.format(*header), fmt.format(*["-"*len(h) for h in header])]
|
121
|
+
for r in rows:
|
122
|
+
out.append(fmt.format(*r))
|
123
|
+
return "\n".join(out)
|
124
|
+
|
125
|
+
# ---------- top-level commands ----------
|
126
|
+
|
127
|
+
@app.command()
|
128
|
+
def login(
|
129
|
+
email: str = typer.Argument(..., help="Email/username (server expects 'email' field)"),
|
130
|
+
password: Optional[str] = typer.Option(None, prompt=True, hide_input=True, help="Password"),
|
131
|
+
export: bool = typer.Option(False, "--export", help='Print `export MAIN_SEQUENCE_USER_TOKEN=...` so you can eval it'),
|
132
|
+
no_status: bool = typer.Option(False, "--no-status", help="Do not print projects table after login")
|
133
|
+
):
|
134
|
+
"""
|
135
|
+
Obtain tokens, store them locally, and set MAIN_SEQUENCE_USER_TOKEN for this process.
|
136
|
+
On success, print the base folder and the project table (like the Electron app).
|
137
|
+
"""
|
138
|
+
try:
|
139
|
+
res = api_login(email, password)
|
140
|
+
except ApiError as e:
|
141
|
+
typer.secho(f"Login failed: {e}", fg=typer.colors.RED)
|
142
|
+
raise typer.Exit(1)
|
143
|
+
|
144
|
+
cfg_obj = cfg.get_config()
|
145
|
+
base = cfg_obj["mainsequence_path"]
|
146
|
+
typer.secho(f"Signed in as {res['username']} (Backend: {res['backend']})", fg=typer.colors.GREEN)
|
147
|
+
typer.echo(f"Projects base folder: {base}")
|
148
|
+
|
149
|
+
tok = cfg.get_tokens().get("access", os.environ.get("MAIN_SEQUENCE_USER_TOKEN", ""))
|
150
|
+
if export and tok:
|
151
|
+
print(f'export MAIN_SEQUENCE_USER_TOKEN="{tok}"')
|
152
|
+
|
153
|
+
if not no_status:
|
154
|
+
try:
|
155
|
+
items = get_projects()
|
156
|
+
links = cfg.get_links()
|
157
|
+
org_slug = _org_slug_from_profile()
|
158
|
+
typer.echo("\nProjects:")
|
159
|
+
typer.echo(_render_projects_table(items, links, base, org_slug))
|
160
|
+
except NotLoggedIn:
|
161
|
+
typer.secho("Not logged in.", fg=typer.colors.RED)
|
162
|
+
|
163
|
+
# ---------- settings group ----------
|
164
|
+
|
165
|
+
@settings.callback(invoke_without_command=True)
|
166
|
+
def settings_cb(ctx: typer.Context):
|
167
|
+
"""`mainsequence settings` defaults to `show`."""
|
168
|
+
if ctx.invoked_subcommand is None:
|
169
|
+
settings_show()
|
170
|
+
raise typer.Exit()
|
171
|
+
|
172
|
+
@settings.command("show")
|
173
|
+
def settings_show():
|
174
|
+
c = cfg.get_config()
|
175
|
+
typer.echo(json.dumps({
|
176
|
+
"backend_url": c.get("backend_url"),
|
177
|
+
"mainsequence_path": c.get("mainsequence_path")
|
178
|
+
}, indent=2))
|
179
|
+
|
180
|
+
@settings.command("set-base")
|
181
|
+
def settings_set_base(path: str = typer.Argument(..., help="New projects base folder")):
|
182
|
+
out = cfg.set_config({"mainsequence_path": path})
|
183
|
+
typer.secho(f"Projects base folder set to: {out['mainsequence_path']}", fg=typer.colors.GREEN)
|
184
|
+
|
185
|
+
# ---------- project group (require login) ----------
|
186
|
+
|
187
|
+
@project.callback()
|
188
|
+
def project_guard():
|
189
|
+
try:
|
190
|
+
prof = get_current_user_profile()
|
191
|
+
if not prof or not prof.get("username"):
|
192
|
+
raise NotLoggedIn("Not logged in.")
|
193
|
+
except NotLoggedIn:
|
194
|
+
typer.secho("Not logged in. Run: mainsequence login <email>", fg=typer.colors.RED)
|
195
|
+
raise typer.Exit(1)
|
196
|
+
except ApiError:
|
197
|
+
typer.secho("Not logged in. Run: mainsequence login <email>", fg=typer.colors.RED)
|
198
|
+
raise typer.Exit(1)
|
199
|
+
|
200
|
+
@project.command("list")
|
201
|
+
def project_list():
|
202
|
+
"""List projects with Local status and path."""
|
203
|
+
cfg_obj = cfg.get_config()
|
204
|
+
base = cfg_obj["mainsequence_path"]
|
205
|
+
org_slug = _org_slug_from_profile()
|
206
|
+
|
207
|
+
items = get_projects()
|
208
|
+
links = cfg.get_links()
|
209
|
+
typer.echo(_render_projects_table(items, links, base, org_slug))
|
210
|
+
|
211
|
+
@project.command("open")
|
212
|
+
def project_open(project_id: int):
|
213
|
+
"""Open the local folder in the OS file manager."""
|
214
|
+
links = cfg.get_links()
|
215
|
+
path = links.get(str(project_id))
|
216
|
+
if not path or not pathlib.Path(path).exists():
|
217
|
+
# also try default guess
|
218
|
+
cfg_obj = cfg.get_config()
|
219
|
+
base = cfg_obj["mainsequence_path"]
|
220
|
+
org_slug = _org_slug_from_profile()
|
221
|
+
items = get_projects()
|
222
|
+
p = next((x for x in items if str(x.get("id")) == str(project_id)), None)
|
223
|
+
if p:
|
224
|
+
guess = _projects_root(base, org_slug) / safe_slug(p.get("project_name") or "")
|
225
|
+
if guess.exists():
|
226
|
+
path = str(guess)
|
227
|
+
if not path or not pathlib.Path(path).exists():
|
228
|
+
typer.secho("No local folder mapped for this project. Run `set-up-locally` first.", fg=typer.colors.RED)
|
229
|
+
raise typer.Exit(1)
|
230
|
+
open_folder(path)
|
231
|
+
typer.echo(f"Opened: {path}")
|
232
|
+
|
233
|
+
@project.command("delete-local")
|
234
|
+
def project_delete_local(
|
235
|
+
project_id: int,
|
236
|
+
permanent: bool = typer.Option(False, "--permanent", help="Also remove the folder (dangerous)")
|
237
|
+
):
|
238
|
+
"""Unlink the mapped folder, optionally delete it."""
|
239
|
+
mapped = cfg.remove_link(project_id)
|
240
|
+
if not mapped:
|
241
|
+
typer.echo("No mapping found.")
|
242
|
+
return
|
243
|
+
p = pathlib.Path(mapped)
|
244
|
+
if p.exists():
|
245
|
+
if permanent:
|
246
|
+
import shutil
|
247
|
+
shutil.rmtree(mapped, ignore_errors=True)
|
248
|
+
typer.secho(f"Deleted: {mapped}", fg=typer.colors.YELLOW)
|
249
|
+
else:
|
250
|
+
typer.secho(f"Unlinked mapping (kept folder): {mapped}", fg=typer.colors.GREEN)
|
251
|
+
else:
|
252
|
+
typer.echo("Mapping removed; folder already absent.")
|
253
|
+
|
254
|
+
@project.command("open-signed-terminal")
|
255
|
+
def project_open_signed_terminal(project_id: int):
|
256
|
+
"""Open a terminal window in the project directory with ssh-agent started and the repo's key added."""
|
257
|
+
links = cfg.get_links()
|
258
|
+
dir_ = links.get(str(project_id))
|
259
|
+
|
260
|
+
if not dir_ or not pathlib.Path(dir_).exists():
|
261
|
+
# also try default guess
|
262
|
+
cfg_obj = cfg.get_config()
|
263
|
+
base = cfg_obj["mainsequence_path"]
|
264
|
+
org_slug = _org_slug_from_profile()
|
265
|
+
items = get_projects()
|
266
|
+
p = next((x for x in items if str(x.get("id")) == str(project_id)), None)
|
267
|
+
if p:
|
268
|
+
guess = _projects_root(base, org_slug) / safe_slug(p.get("project_name") or "")
|
269
|
+
if guess.exists():
|
270
|
+
dir_ = str(guess)
|
271
|
+
|
272
|
+
if not dir_ or not pathlib.Path(dir_).exists():
|
273
|
+
typer.secho("No local folder mapped for this project. Run `set-up-locally` first.", fg=typer.colors.RED)
|
274
|
+
raise typer.Exit(1)
|
275
|
+
|
276
|
+
proc = subprocess.run(["git", "-C", dir_, "remote", "get-url", "origin"], text=True, capture_output=True)
|
277
|
+
origin = (proc.stdout or "").strip().splitlines()[-1] if proc.returncode == 0 else ""
|
278
|
+
name = repo_name_from_git_url(origin) or pathlib.Path(dir_).name
|
279
|
+
key_path = pathlib.Path.home() / ".ssh" / name
|
280
|
+
open_signed_terminal(dir_, key_path, name)
|
281
|
+
|
282
|
+
@project.command("set-up-locally")
|
283
|
+
def project_set_up_locally(
|
284
|
+
project_id: int,
|
285
|
+
base_dir: Optional[str] = typer.Option(None, "--base-dir", help="Override base dir (default from settings)")
|
286
|
+
):
|
287
|
+
cfg_obj = cfg.get_config()
|
288
|
+
base = base_dir or cfg_obj["mainsequence_path"]
|
289
|
+
|
290
|
+
org_slug = _org_slug_from_profile()
|
291
|
+
|
292
|
+
items = get_projects()
|
293
|
+
p = next((x for x in items if int(x.get("id", -1)) == project_id), None)
|
294
|
+
if not p:
|
295
|
+
typer.secho("Project not found/visible.", fg=typer.colors.RED)
|
296
|
+
raise typer.Exit(1)
|
297
|
+
|
298
|
+
repo = _determine_repo_url(p)
|
299
|
+
if not repo:
|
300
|
+
typer.secho("No repository URL found for this project.", fg=typer.colors.RED)
|
301
|
+
raise typer.Exit(1)
|
302
|
+
|
303
|
+
name = safe_slug(p.get("project_name") or f"project-{project_id}")
|
304
|
+
projects_root = _projects_root(base, org_slug)
|
305
|
+
target_dir = projects_root / name
|
306
|
+
projects_root.mkdir(parents=True, exist_ok=True)
|
307
|
+
|
308
|
+
key_path, pub_path, pub = ensure_key_for_repo(repo)
|
309
|
+
copied = _copy_clipboard(pub)
|
310
|
+
|
311
|
+
try:
|
312
|
+
host = platform.node()
|
313
|
+
add_deploy_key(project_id, host, pub)
|
314
|
+
except Exception:
|
315
|
+
pass
|
316
|
+
|
317
|
+
agent_env = start_agent_and_add_key(key_path)
|
318
|
+
|
319
|
+
if target_dir.exists():
|
320
|
+
typer.secho(f"Target already exists: {target_dir}", fg=typer.colors.RED)
|
321
|
+
raise typer.Exit(2)
|
322
|
+
|
323
|
+
env = os.environ.copy() | agent_env
|
324
|
+
env["GIT_SSH_COMMAND"] = f'ssh -i "{str(key_path)}" -o IdentitiesOnly=yes'
|
325
|
+
rc = subprocess.call(["git", "clone", repo, str(target_dir)], env=env, cwd=str(projects_root))
|
326
|
+
if rc != 0:
|
327
|
+
try:
|
328
|
+
if target_dir.exists():
|
329
|
+
import shutil
|
330
|
+
shutil.rmtree(target_dir, ignore_errors=True)
|
331
|
+
except Exception:
|
332
|
+
pass
|
333
|
+
typer.secho("git clone failed", fg=typer.colors.RED)
|
334
|
+
raise typer.Exit(3)
|
335
|
+
|
336
|
+
env_text = ""
|
337
|
+
try:
|
338
|
+
env_text = fetch_project_env_text(project_id)
|
339
|
+
except Exception:
|
340
|
+
env_text = ""
|
341
|
+
env_text = (env_text or "").replace("\r", "")
|
342
|
+
if any(line.startswith("VFB_PROJECT_PATH=") for line in env_text.splitlines()):
|
343
|
+
lines = [f"VFB_PROJECT_PATH={str(target_dir)}" if line.startswith("VFB_PROJECT_PATH=") else line
|
344
|
+
for line in env_text.splitlines()]
|
345
|
+
env_text = "\n".join(lines)
|
346
|
+
else:
|
347
|
+
if env_text and not env_text.endswith("\n"): env_text += "\n"
|
348
|
+
env_text += f"VFB_PROJECT_PATH={str(target_dir)}\n"
|
349
|
+
(target_dir / ".env").write_text(env_text, encoding="utf-8")
|
350
|
+
|
351
|
+
cfg.set_link(project_id, str(target_dir))
|
352
|
+
|
353
|
+
typer.secho(f"Local folder: {target_dir}", fg=typer.colors.GREEN)
|
354
|
+
typer.echo(f"Repo URL: {repo}")
|
355
|
+
if copied:
|
356
|
+
typer.echo("Public key copied to clipboard.")
|
357
|
+
|
358
|
+
@app.command("build_and_run")
|
359
|
+
def build_and_run(dockerfile: Optional[str] = typer.Argument(
|
360
|
+
None,
|
361
|
+
help="Path to Dockerfile to build & run. If omitted, only lock & export requirements."
|
362
|
+
)):
|
363
|
+
"""
|
364
|
+
- uv lock
|
365
|
+
- uv export --format requirements --no-dev --hashes > requirements.txt
|
366
|
+
- If DOCKERFILE argument is given: docker build -f DOCKERFILE . && docker run IMAGE
|
367
|
+
"""
|
368
|
+
|
369
|
+
# ----- sanity checks for uv + project files -----
|
370
|
+
if shutil.which("uv") is None:
|
371
|
+
typer.secho("uv is not installed. Install it with: pip install uv", fg=typer.colors.RED)
|
372
|
+
raise typer.Exit(1)
|
373
|
+
|
374
|
+
if not pathlib.Path("pyproject.toml").exists():
|
375
|
+
typer.secho(f"pyproject.toml not found in {pathlib.Path.cwd()}", fg=typer.colors.RED)
|
376
|
+
raise typer.Exit(1)
|
377
|
+
|
378
|
+
# ----- 1) solve and lock -----
|
379
|
+
typer.secho("Running: uv lock", fg=typer.colors.BLUE)
|
380
|
+
p = subprocess.run(["uv", "lock"])
|
381
|
+
if p.returncode != 0:
|
382
|
+
typer.secho("uv lock failed.", fg=typer.colors.RED)
|
383
|
+
raise typer.Exit(p.returncode)
|
384
|
+
|
385
|
+
# ----- 2) export pinned, hashed requirements -----
|
386
|
+
typer.secho("Exporting hashed requirements to requirements.txt", fg=typer.colors.BLUE)
|
387
|
+
p = subprocess.run(
|
388
|
+
["uv", "export", "--format", "requirements", "--no-dev", "--hashes"],
|
389
|
+
capture_output=True, text=True
|
390
|
+
)
|
391
|
+
if p.returncode != 0:
|
392
|
+
typer.secho("uv export failed:", fg=typer.colors.RED)
|
393
|
+
if p.stderr:
|
394
|
+
typer.echo(p.stderr.strip())
|
395
|
+
raise typer.Exit(p.returncode)
|
396
|
+
|
397
|
+
pathlib.Path("requirements.txt").write_text(p.stdout, encoding="utf-8")
|
398
|
+
typer.secho("requirements.txt written.", fg=typer.colors.GREEN)
|
399
|
+
|
400
|
+
# ----- 3) optional Docker build + run -----
|
401
|
+
if dockerfile is None:
|
402
|
+
typer.secho("No Dockerfile provided; skipping Docker build/run.", fg=typer.colors.BLUE)
|
403
|
+
return
|
404
|
+
|
405
|
+
df_path = pathlib.Path(dockerfile)
|
406
|
+
if not df_path.exists():
|
407
|
+
typer.secho(f"Dockerfile not found: {dockerfile}", fg=typer.colors.RED)
|
408
|
+
raise typer.Exit(1)
|
409
|
+
|
410
|
+
if shutil.which("docker") is None:
|
411
|
+
typer.secho("Docker CLI is not installed or not on PATH.", fg=typer.colors.RED)
|
412
|
+
raise typer.Exit(1)
|
413
|
+
|
414
|
+
# Image name: directory-name + '-img' (overridable via env IMAGE_NAME)
|
415
|
+
cwd_name = pathlib.Path.cwd().name
|
416
|
+
safe_name = re.sub(r"[^a-z0-9_.-]+", "-", cwd_name.lower())
|
417
|
+
image_name = os.environ.get("IMAGE_NAME", f"{safe_name}-img")
|
418
|
+
|
419
|
+
# Tag: short git sha if available, else timestamp (overridable via env TAG)
|
420
|
+
tag = os.environ.get("TAG")
|
421
|
+
if not tag:
|
422
|
+
try:
|
423
|
+
tag = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], text=True).strip()
|
424
|
+
except Exception:
|
425
|
+
tag = time.strftime("%Y%m%d%H%M%S")
|
426
|
+
|
427
|
+
image_ref = f"{image_name}:{tag}"
|
428
|
+
|
429
|
+
typer.secho(f"Building Docker image: {image_ref}", fg=typer.colors.BLUE)
|
430
|
+
build = subprocess.run(["docker", "build", "-f", str(df_path), "-t", image_ref, "."])
|
431
|
+
if build.returncode != 0:
|
432
|
+
typer.secho("docker build failed.", fg=typer.colors.RED)
|
433
|
+
raise typer.Exit(build.returncode)
|
434
|
+
|
435
|
+
typer.secho(f"Running container: {image_ref}", fg=typer.colors.BLUE)
|
436
|
+
try:
|
437
|
+
# interactive by default; relies on your ENTRYPOINT
|
438
|
+
subprocess.check_call(["docker", "run", "--rm", "-it", image_ref])
|
439
|
+
except subprocess.CalledProcessError as e:
|
440
|
+
typer.secho(f"docker run failed (exit {e.returncode}).", fg=typer.colors.RED)
|
441
|
+
raise typer.Exit(e.returncode)
|
442
|
+
|
@@ -0,0 +1,78 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
import json, os, sys, pathlib, platform, time
|
3
|
+
|
4
|
+
APP_NAME = "MainSequenceCLI"
|
5
|
+
|
6
|
+
def _config_dir() -> pathlib.Path:
|
7
|
+
home = pathlib.Path.home()
|
8
|
+
if sys.platform == "win32":
|
9
|
+
base = pathlib.Path(os.environ.get("APPDATA", home))
|
10
|
+
return base / APP_NAME
|
11
|
+
elif sys.platform == "darwin":
|
12
|
+
return home / "Library" / "Application Support" / APP_NAME
|
13
|
+
else:
|
14
|
+
return home / ".config" / "mainsequence"
|
15
|
+
|
16
|
+
CFG_DIR = _config_dir()
|
17
|
+
CFG_DIR.mkdir(parents=True, exist_ok=True)
|
18
|
+
|
19
|
+
CONFIG_JSON = CFG_DIR / "config.json"
|
20
|
+
TOKENS_JSON = CFG_DIR / "token.json" # {username, access, refresh, ts}
|
21
|
+
LINKS_JSON = CFG_DIR / "project-links.json" # {"<id>": "/abs/path"}
|
22
|
+
|
23
|
+
DEFAULTS = {
|
24
|
+
"backend_url": os.environ.get("MAIN_SEQUENCE_BACKEND_URL", "https://main-sequence.app/"),
|
25
|
+
"mainsequence_path": str(pathlib.Path.home() / "mainsequence"),
|
26
|
+
"version": 1,
|
27
|
+
}
|
28
|
+
|
29
|
+
def read_json(path: pathlib.Path, default):
|
30
|
+
try:
|
31
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
32
|
+
except Exception:
|
33
|
+
return default
|
34
|
+
|
35
|
+
def write_json(path: pathlib.Path, obj) -> None:
|
36
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
37
|
+
path.write_text(json.dumps(obj, indent=2), encoding="utf-8")
|
38
|
+
|
39
|
+
def get_config() -> dict:
|
40
|
+
cfg = DEFAULTS | read_json(CONFIG_JSON, {})
|
41
|
+
# ensure base exists
|
42
|
+
pathlib.Path(cfg["mainsequence_path"]).mkdir(parents=True, exist_ok=True)
|
43
|
+
return cfg
|
44
|
+
|
45
|
+
def set_config(updates: dict) -> dict:
|
46
|
+
cfg = get_config() | (updates or {})
|
47
|
+
cfg["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
48
|
+
write_json(CONFIG_JSON, cfg)
|
49
|
+
return cfg
|
50
|
+
|
51
|
+
def get_links() -> dict:
|
52
|
+
return read_json(LINKS_JSON, {})
|
53
|
+
|
54
|
+
def set_link(project_id: int | str, path: str) -> None:
|
55
|
+
links = get_links()
|
56
|
+
links[str(project_id)] = path
|
57
|
+
write_json(LINKS_JSON, links)
|
58
|
+
|
59
|
+
def remove_link(project_id: int | str) -> str | None:
|
60
|
+
links = get_links()
|
61
|
+
prev = links.pop(str(project_id), None)
|
62
|
+
write_json(LINKS_JSON, links)
|
63
|
+
return prev
|
64
|
+
|
65
|
+
def get_tokens() -> dict:
|
66
|
+
return read_json(TOKENS_JSON, {})
|
67
|
+
|
68
|
+
def save_tokens(username: str, access: str, refresh: str) -> None:
|
69
|
+
write_json(TOKENS_JSON, {"username": username, "access": access, "refresh": refresh, "ts": int(time.time())})
|
70
|
+
|
71
|
+
def set_env_access(access: str) -> None:
|
72
|
+
# For the current process (and children). Parent shell can't be set from here.
|
73
|
+
os.environ["MAIN_SEQUENCE_USER_TOKEN"] = access
|
74
|
+
|
75
|
+
def backend_url() -> str:
|
76
|
+
cfg = get_config()
|
77
|
+
url = (cfg.get("backend_url") or DEFAULTS["backend_url"]).rstrip("/")
|
78
|
+
return url
|