mtrx-cli 0.1.23 → 0.1.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mtrx-cli",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.24",
|
|
4
4
|
"description": "MATRX CLI for routing Codex, Claude, and Cursor through Matrx",
|
|
5
5
|
"homepage": "https://mtrx.so",
|
|
6
6
|
"repository": {
|
|
@@ -33,6 +33,8 @@
|
|
|
33
33
|
"src/matrx/cli/cursor_proxy.py",
|
|
34
34
|
"src/matrx/cli/cursor_reroute.py",
|
|
35
35
|
"src/matrx/cli/cursor_service.py",
|
|
36
|
+
"src/matrx/cli/bootstrap.py",
|
|
37
|
+
"src/matrx/cli/gemini_env_bootstrap.cjs",
|
|
36
38
|
"src/matrx/cli/launcher.py",
|
|
37
39
|
"src/matrx/cli/main.py",
|
|
38
40
|
"src/matrx/cli/project_cmds.py",
|
package/src/matrx/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.24"
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Bootstrap command — warms the system registry for an existing project.
|
|
3
|
+
Called by `mtrx init`.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import subprocess
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
_DEFAULT_SYSTEMS_TEMPLATE = [
|
|
12
|
+
{
|
|
13
|
+
"id": "memory",
|
|
14
|
+
"name": "Memory System",
|
|
15
|
+
"description": "Memory flywheel: extract, store, retrieve, inject",
|
|
16
|
+
"file_patterns": ["core/memory.py", "services/memory.py", "core/extractor.py", "core/profile_builder.py"]
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
"id": "proxy",
|
|
20
|
+
"name": "Proxy Service",
|
|
21
|
+
"description": "Intercepts all LLM calls, applies compression + injection",
|
|
22
|
+
"file_patterns": ["services/proxy.py", "core/compressor.py", "core/summarizer.py"]
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"id": "auth",
|
|
26
|
+
"name": "Auth & Multi-Org",
|
|
27
|
+
"description": "Clerk JWT validation, org/project scoping, API keys",
|
|
28
|
+
"file_patterns": ["middleware/auth.py", "api/auth.py", "api/orgs.py", "models/org_member.py"]
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
"id": "analytics",
|
|
32
|
+
"name": "Analytics",
|
|
33
|
+
"description": "Usage snapshots, memory hit rate, token tracking",
|
|
34
|
+
"file_patterns": ["services/analytics.py", "api/analytics.py"]
|
|
35
|
+
},
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def run_init(project_root: str = ".") -> None:
|
|
40
|
+
"""Entry point for `mtrx init`."""
|
|
41
|
+
root = Path(project_root).resolve()
|
|
42
|
+
print(f"Matrx init: analyzing {root}")
|
|
43
|
+
|
|
44
|
+
# Step 1: Load or create .matrx/systems.json
|
|
45
|
+
matrx_dir = root / ".matrx"
|
|
46
|
+
matrx_dir.mkdir(exist_ok=True)
|
|
47
|
+
systems_path = matrx_dir / "systems.json"
|
|
48
|
+
|
|
49
|
+
if not systems_path.exists():
|
|
50
|
+
_seed_systems_json(root, systems_path)
|
|
51
|
+
print(f" Created {systems_path}")
|
|
52
|
+
else:
|
|
53
|
+
print(f" Found existing {systems_path}")
|
|
54
|
+
|
|
55
|
+
# Step 2: Analyze git log for hot systems
|
|
56
|
+
hot_files = _get_hot_files(root, days=30)
|
|
57
|
+
print(f" Found {len(hot_files)} recently modified files")
|
|
58
|
+
|
|
59
|
+
# Step 3: Map to systems
|
|
60
|
+
systems = json.loads(systems_path.read_text()).get("systems", [])
|
|
61
|
+
hot_systems = _rank_systems_by_activity(hot_files, systems)
|
|
62
|
+
|
|
63
|
+
if hot_systems:
|
|
64
|
+
print("\n Active systems detected:")
|
|
65
|
+
for sys_id, count, depth in hot_systems:
|
|
66
|
+
meta = next((s for s in systems if s["id"] == sys_id), None)
|
|
67
|
+
if meta:
|
|
68
|
+
print(f" {meta['name']:<25} {count:>3} touches → {depth} card")
|
|
69
|
+
else:
|
|
70
|
+
print("\n No recently active systems detected (no git history or no matches).")
|
|
71
|
+
|
|
72
|
+
print(f"\n Matrx will generate cards for detected systems on first use.")
|
|
73
|
+
print(f" Run your agent — cards will be ready within the first few calls.\n")
|
|
74
|
+
print(" Done.")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _get_hot_files(root: Path, days: int = 30) -> dict[str, int]:
|
|
78
|
+
"""Parse git log, return file → touch count mapping."""
|
|
79
|
+
try:
|
|
80
|
+
result = subprocess.run(
|
|
81
|
+
["git", "log", f"--since={days} days ago", "--name-only", "--pretty=format:"],
|
|
82
|
+
cwd=root, capture_output=True, text=True, timeout=10,
|
|
83
|
+
)
|
|
84
|
+
counts: dict[str, int] = {}
|
|
85
|
+
for line in result.stdout.splitlines():
|
|
86
|
+
line = line.strip()
|
|
87
|
+
if line and not line.startswith("commit"):
|
|
88
|
+
counts[line] = counts.get(line, 0) + 1
|
|
89
|
+
return counts
|
|
90
|
+
except Exception:
|
|
91
|
+
return {}
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _rank_systems_by_activity(
|
|
95
|
+
hot_files: dict[str, int], systems: list[dict]
|
|
96
|
+
) -> list[tuple[str, int, str]]:
|
|
97
|
+
"""Return [(system_id, touch_count, depth)] sorted by activity."""
|
|
98
|
+
ranked = []
|
|
99
|
+
for s in systems:
|
|
100
|
+
patterns = s.get("file_patterns", [])
|
|
101
|
+
total = sum(
|
|
102
|
+
count for f, count in hot_files.items()
|
|
103
|
+
if any(pat in f or f.endswith(pat) for pat in patterns)
|
|
104
|
+
)
|
|
105
|
+
if total > 0:
|
|
106
|
+
ranked.append((s["id"], total, "standard"))
|
|
107
|
+
|
|
108
|
+
ranked.sort(key=lambda x: x[1], reverse=True)
|
|
109
|
+
# Re-assign depths by rank
|
|
110
|
+
result = []
|
|
111
|
+
for i, (sid, count, _) in enumerate(ranked):
|
|
112
|
+
depth = "full" if i < 3 else ("standard" if i < 8 else "distilled")
|
|
113
|
+
result.append((sid, count, depth))
|
|
114
|
+
return result
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _seed_systems_json(root: Path, out_path: Path) -> None:
|
|
118
|
+
"""Write default systems.json. Users can hand-edit afterward."""
|
|
119
|
+
out_path.write_text(json.dumps({"systems": _DEFAULT_SYSTEMS_TEMPLATE}, indent=2))
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
function decodeBase64Env(name) {
|
|
4
|
+
const value = process.env[name];
|
|
5
|
+
if (!value) {
|
|
6
|
+
return undefined;
|
|
7
|
+
}
|
|
8
|
+
try {
|
|
9
|
+
return Buffer.from(value, "base64").toString("utf8");
|
|
10
|
+
} catch {
|
|
11
|
+
return undefined;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const envMappings = [
|
|
16
|
+
["MTRX_GEMINI_CUSTOM_HEADERS_B64", "GEMINI_CLI_CUSTOM_HEADERS"],
|
|
17
|
+
["MTRX_CODE_ASSIST_ENDPOINT_B64", "CODE_ASSIST_ENDPOINT"],
|
|
18
|
+
["MTRX_GEMINI_API_ENDPOINT_B64", "GEMINI_API_ENDPOINT"],
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
for (const [sourceName, targetName] of envMappings) {
|
|
22
|
+
const decoded = decodeBase64Env(sourceName);
|
|
23
|
+
if (decoded && !process.env[targetName]) {
|
|
24
|
+
process.env[targetName] = decoded;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (
|
|
29
|
+
process.env.MTRX_GEMINI_API_KEY_AUTH_MECHANISM &&
|
|
30
|
+
!process.env.GEMINI_API_KEY_AUTH_MECHANISM
|
|
31
|
+
) {
|
|
32
|
+
process.env.GEMINI_API_KEY_AUTH_MECHANISM =
|
|
33
|
+
process.env.MTRX_GEMINI_API_KEY_AUTH_MECHANISM;
|
|
34
|
+
}
|
|
@@ -95,6 +95,46 @@ def _runtime_agent_basename(tool: str) -> tuple[str, str, list[str], str]:
|
|
|
95
95
|
return normalized, f"{tool.capitalize()} CLI", ["cli", tool], tool
|
|
96
96
|
|
|
97
97
|
|
|
98
|
+
def _append_sandbox_env(env: dict[str, str], key: str, value: str | None) -> None:
|
|
99
|
+
if not value:
|
|
100
|
+
return
|
|
101
|
+
entry = f"{key}={value}"
|
|
102
|
+
existing = [item.strip() for item in (env.get("SANDBOX_ENV") or "").split(",") if item.strip()]
|
|
103
|
+
filtered = [item for item in existing if not item.startswith(f"{key}=")]
|
|
104
|
+
filtered.append(entry)
|
|
105
|
+
env["SANDBOX_ENV"] = ",".join(filtered)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _remove_sandbox_env_keys(env: dict[str, str], keys: tuple[str, ...]) -> None:
|
|
109
|
+
existing = [item.strip() for item in (env.get("SANDBOX_ENV") or "").split(",") if item.strip()]
|
|
110
|
+
filtered = [
|
|
111
|
+
item for item in existing
|
|
112
|
+
if not any(item.startswith(f"{key}=") for key in keys)
|
|
113
|
+
]
|
|
114
|
+
if filtered:
|
|
115
|
+
env["SANDBOX_ENV"] = ",".join(filtered)
|
|
116
|
+
else:
|
|
117
|
+
env.pop("SANDBOX_ENV", None)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _ensure_node_require(env: dict[str, str], script_path: str) -> None:
|
|
121
|
+
require_flag = f"--require={script_path}"
|
|
122
|
+
existing = (env.get("NODE_OPTIONS") or "").strip()
|
|
123
|
+
if require_flag in existing.split():
|
|
124
|
+
return
|
|
125
|
+
env["NODE_OPTIONS"] = f"{existing} {require_flag}".strip()
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def _remove_node_require(env: dict[str, str], script_path: str) -> None:
|
|
129
|
+
require_flag = f"--require={script_path}"
|
|
130
|
+
existing = (env.get("NODE_OPTIONS") or "").split()
|
|
131
|
+
filtered = [part for part in existing if part != require_flag]
|
|
132
|
+
if filtered:
|
|
133
|
+
env["NODE_OPTIONS"] = " ".join(filtered)
|
|
134
|
+
else:
|
|
135
|
+
env.pop("NODE_OPTIONS", None)
|
|
136
|
+
|
|
137
|
+
|
|
98
138
|
def configured_route(state: dict, tool: str) -> str | None:
|
|
99
139
|
route = state.get("defaults", {}).get(tool)
|
|
100
140
|
if route in VALID_ROUTES:
|
|
@@ -467,6 +507,20 @@ def _capture_git_context(cwd: str | None = None) -> tuple[str, str]:
|
|
|
467
507
|
return branch, commit
|
|
468
508
|
|
|
469
509
|
|
|
510
|
+
def _capture_git_remote_url(cwd: str | None = None) -> str:
|
|
511
|
+
root = cwd or os.getcwd()
|
|
512
|
+
try:
|
|
513
|
+
r = subprocess.run(
|
|
514
|
+
["git", "-C", root, "remote", "get-url", "origin"],
|
|
515
|
+
capture_output=True, text=True, timeout=2, check=False,
|
|
516
|
+
)
|
|
517
|
+
if r.returncode == 0:
|
|
518
|
+
return r.stdout.strip()
|
|
519
|
+
except (OSError, subprocess.SubprocessError):
|
|
520
|
+
pass
|
|
521
|
+
return ""
|
|
522
|
+
|
|
523
|
+
|
|
470
524
|
def _resolve_matrx_context_overrides(
|
|
471
525
|
state: dict,
|
|
472
526
|
env: dict[str, str],
|
|
@@ -544,10 +598,13 @@ def _build_codex_env(
|
|
|
544
598
|
if project_id:
|
|
545
599
|
header_parts.append(f'"X-Matrx-Project-Id" = "{project_id}"')
|
|
546
600
|
_git_branch, _git_commit = _capture_git_context(_workspace_cwd(env))
|
|
601
|
+
_git_repo_url = _capture_git_remote_url(_workspace_cwd(env))
|
|
547
602
|
if _git_branch:
|
|
548
603
|
header_parts.append(f'"X-Matrx-Branch" = "{_git_branch}"')
|
|
549
604
|
if _git_commit:
|
|
550
605
|
header_parts.append(f'"X-Matrx-Commit" = "{_git_commit}"')
|
|
606
|
+
if _git_repo_url:
|
|
607
|
+
header_parts.append(f'"X-Matrx-Repo-Url" = "{_git_repo_url}"')
|
|
551
608
|
if env_b64:
|
|
552
609
|
header_parts.append(f'"X-Matrx-Env" = "{env_b64}"')
|
|
553
610
|
headers_str = ", ".join(header_parts)
|
|
@@ -589,6 +646,7 @@ def _build_gemini_env(
|
|
|
589
646
|
proxy_base = ensure_v1_url(matrx.get("base_url"))
|
|
590
647
|
mx_key, matrx_auth_source = _resolve_matrx_route_key(state, env)
|
|
591
648
|
direct_key = (env.get("GEMINI_API_KEY") or env.get("GOOGLE_API_KEY") or "").strip()
|
|
649
|
+
bootstrap_script = str(Path(__file__).with_name("gemini_env_bootstrap.cjs").resolve())
|
|
592
650
|
|
|
593
651
|
if route == "matrx":
|
|
594
652
|
if not mx_key:
|
|
@@ -610,6 +668,7 @@ def _build_gemini_env(
|
|
|
610
668
|
if runtime_agent_id:
|
|
611
669
|
ctx_params.append(f"mtrx_agent={runtime_agent_id}")
|
|
612
670
|
git_branch, git_commit = _capture_git_context(_workspace_cwd(env))
|
|
671
|
+
git_repo_url = _capture_git_remote_url(_workspace_cwd(env))
|
|
613
672
|
if git_branch:
|
|
614
673
|
ctx_params.append(f"mtrx_branch={git_branch}")
|
|
615
674
|
if git_commit:
|
|
@@ -632,6 +691,8 @@ def _build_gemini_env(
|
|
|
632
691
|
custom_headers.append(f"x-matrx-branch: {git_branch}")
|
|
633
692
|
if git_commit:
|
|
634
693
|
custom_headers.append(f"x-matrx-commit: {git_commit}")
|
|
694
|
+
if git_repo_url:
|
|
695
|
+
custom_headers.append(f"x-matrx-repo-url: {git_repo_url}")
|
|
635
696
|
if env_b64:
|
|
636
697
|
custom_headers.append(f"x-matrx-env: {env_b64}")
|
|
637
698
|
|
|
@@ -641,6 +702,27 @@ def _build_gemini_env(
|
|
|
641
702
|
env["CODE_ASSIST_ENDPOINT"] = proxy_base
|
|
642
703
|
env["GEMINI_CLI_CUSTOM_HEADERS"] = ", ".join(custom_headers)
|
|
643
704
|
env["GEMINI_API_KEY_AUTH_MECHANISM"] = "bearer"
|
|
705
|
+
_ensure_node_require(env, bootstrap_script)
|
|
706
|
+
_append_sandbox_env(
|
|
707
|
+
env,
|
|
708
|
+
"MTRX_GEMINI_CUSTOM_HEADERS_B64",
|
|
709
|
+
base64.b64encode(env["GEMINI_CLI_CUSTOM_HEADERS"].encode("utf-8")).decode("ascii"),
|
|
710
|
+
)
|
|
711
|
+
_append_sandbox_env(
|
|
712
|
+
env,
|
|
713
|
+
"MTRX_CODE_ASSIST_ENDPOINT_B64",
|
|
714
|
+
base64.b64encode(env["CODE_ASSIST_ENDPOINT"].encode("utf-8")).decode("ascii"),
|
|
715
|
+
)
|
|
716
|
+
_append_sandbox_env(
|
|
717
|
+
env,
|
|
718
|
+
"MTRX_GEMINI_API_ENDPOINT_B64",
|
|
719
|
+
base64.b64encode(env["GEMINI_API_ENDPOINT"].encode("utf-8")).decode("ascii"),
|
|
720
|
+
)
|
|
721
|
+
_append_sandbox_env(
|
|
722
|
+
env,
|
|
723
|
+
"MTRX_GEMINI_API_KEY_AUTH_MECHANISM",
|
|
724
|
+
env["GEMINI_API_KEY_AUTH_MECHANISM"],
|
|
725
|
+
)
|
|
644
726
|
|
|
645
727
|
return env, matrx_auth_source
|
|
646
728
|
|
|
@@ -655,6 +737,16 @@ def _build_gemini_env(
|
|
|
655
737
|
value = (env.get(key) or "").strip()
|
|
656
738
|
if "matrx" in value.lower() or "mtrx.so" in value.lower():
|
|
657
739
|
env.pop(key, None)
|
|
740
|
+
_remove_node_require(env, bootstrap_script)
|
|
741
|
+
_remove_sandbox_env_keys(
|
|
742
|
+
env,
|
|
743
|
+
(
|
|
744
|
+
"MTRX_GEMINI_CUSTOM_HEADERS_B64",
|
|
745
|
+
"MTRX_CODE_ASSIST_ENDPOINT_B64",
|
|
746
|
+
"MTRX_GEMINI_API_ENDPOINT_B64",
|
|
747
|
+
"MTRX_GEMINI_API_KEY_AUTH_MECHANISM",
|
|
748
|
+
),
|
|
749
|
+
)
|
|
658
750
|
|
|
659
751
|
custom_headers = (env.get("GEMINI_CLI_CUSTOM_HEADERS") or "").strip().lower()
|
|
660
752
|
if "x-matrx-" in custom_headers:
|
|
@@ -718,10 +810,13 @@ def _build_claude_env(
|
|
|
718
810
|
if project_id:
|
|
719
811
|
custom_headers += f"\nx-matrx-project-id: {project_id}"
|
|
720
812
|
_git_branch, _git_commit = _capture_git_context(_workspace_cwd(env))
|
|
813
|
+
_git_repo_url = _capture_git_remote_url(_workspace_cwd(env))
|
|
721
814
|
if _git_branch:
|
|
722
815
|
custom_headers += f"\nx-matrx-branch: {_git_branch}"
|
|
723
816
|
if _git_commit:
|
|
724
817
|
custom_headers += f"\nx-matrx-commit: {_git_commit}"
|
|
818
|
+
if _git_repo_url:
|
|
819
|
+
custom_headers += f"\nx-matrx-repo-url: {_git_repo_url}"
|
|
725
820
|
if env_b64:
|
|
726
821
|
custom_headers += f"\nx-matrx-env: {env_b64}"
|
|
727
822
|
env["ANTHROPIC_CUSTOM_HEADERS"] = custom_headers
|
|
@@ -973,6 +1068,12 @@ def _validate_gemini_launch_plan(plan: LaunchPlan, state: dict) -> None:
|
|
|
973
1068
|
if "x-matrx-agent-id:" not in custom_headers:
|
|
974
1069
|
raise ValueError("Gemini Matrx route is missing GEMINI_CLI_CUSTOM_HEADERS with X-Matrx-Agent-Id")
|
|
975
1070
|
|
|
1071
|
+
sandbox_env = (plan.env.get("SANDBOX_ENV") or "").strip()
|
|
1072
|
+
if "MTRX_GEMINI_CUSTOM_HEADERS_B64=" not in sandbox_env:
|
|
1073
|
+
raise ValueError("Gemini Matrx route is missing sandbox bootstrap for GEMINI_CLI_CUSTOM_HEADERS")
|
|
1074
|
+
if "MTRX_CODE_ASSIST_ENDPOINT_B64=" not in sandbox_env:
|
|
1075
|
+
raise ValueError("Gemini Matrx route is missing sandbox bootstrap for CODE_ASSIST_ENDPOINT")
|
|
1076
|
+
|
|
976
1077
|
|
|
977
1078
|
def _validate_codex_launch_plan(plan: LaunchPlan, state: dict) -> None:
|
|
978
1079
|
if plan.route != "matrx":
|
package/src/matrx/cli/main.py
CHANGED
|
@@ -144,7 +144,7 @@ def _build_parser() -> argparse.ArgumentParser:
|
|
|
144
144
|
cursor.add_argument(
|
|
145
145
|
"--launch",
|
|
146
146
|
action="store_true",
|
|
147
|
-
help="Launch Cursor
|
|
147
|
+
help="Launch Cursor after applying the current Matrx settings",
|
|
148
148
|
)
|
|
149
149
|
|
|
150
150
|
return parser
|
|
@@ -932,6 +932,7 @@ def _cmd_launch(tool: str, route: str | None, remainder: list[str]) -> int:
|
|
|
932
932
|
|
|
933
933
|
def _cmd_cursor(args) -> int:
|
|
934
934
|
from matrx.cli.cursor_hooks import install_mtrx_hooks, is_mtrx_hooks_installed
|
|
935
|
+
from matrx.cli.cursor_service import is_proxy_running
|
|
935
936
|
from matrx.cli.cursor_launcher import find_cursor_executable
|
|
936
937
|
|
|
937
938
|
route = args.route
|
|
@@ -942,10 +943,13 @@ def _cmd_cursor(args) -> int:
|
|
|
942
943
|
hooks_installed = is_mtrx_hooks_installed()
|
|
943
944
|
base_url = ensure_v1_url(state.get("auth", {}).get("matrx", {}).get("base_url"))
|
|
944
945
|
prev_path = config_dir() / "cursor-previous-settings.json"
|
|
946
|
+
legacy_proxy_prev_path = config_dir() / "cursor-proxy-previous-settings.json"
|
|
945
947
|
configured = prev_path.exists()
|
|
948
|
+
legacy_proxy_active = is_proxy_running() or legacy_proxy_prev_path.exists()
|
|
946
949
|
print("MTRX Cursor integration:")
|
|
947
950
|
print(f" mode: {'Base URL override (all models)' if configured else 'not configured'}")
|
|
948
951
|
print(f" hooks: {'active (sessionEnd, stop → telemetry)' if hooks_installed else 'not installed'}")
|
|
952
|
+
print(f" legacy MITM proxy: {'active' if legacy_proxy_active else 'not active'}")
|
|
949
953
|
if configured:
|
|
950
954
|
print(f" matrx: {base_url}")
|
|
951
955
|
return 0
|
|
@@ -1001,6 +1005,11 @@ def _cmd_cursor(args) -> int:
|
|
|
1001
1005
|
"Use `mtrx use cursor direct` to opt out.",
|
|
1002
1006
|
)
|
|
1003
1007
|
|
|
1008
|
+
# Ensure legacy MITM routing is fully torn down before enabling the
|
|
1009
|
+
# current Cursor base-URL override flow. Leaving both active causes
|
|
1010
|
+
# Cursor traffic to keep flowing through the old telemetry proxy.
|
|
1011
|
+
_restore_cursor_if_needed()
|
|
1012
|
+
|
|
1004
1013
|
# Configure Cursor's Override Base URL — sends chat to MTRX (any model: Claude, GPT-5, Gemini, etc.)
|
|
1005
1014
|
prev_path = config_dir() / "cursor-previous-settings.json"
|
|
1006
1015
|
previous = configure_cursor_for_proxy(matrx_proxy_url, mx_key)
|