mtrx-cli 0.1.15 → 0.1.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/matrx/__init__.py +1 -1
- package/src/matrx/cli/cursor_config.py +2 -2
- package/src/matrx/cli/cursor_hooks.py +31 -4
- package/src/matrx/cli/cursor_proxy.py +207 -10
- package/src/matrx/cli/launcher.py +104 -0
- package/src/matrx/cli/main.py +61 -24
- package/src/matrx/cli/state.py +14 -0
package/package.json
CHANGED
package/src/matrx/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.16"
|
|
@@ -388,7 +388,7 @@ def restore_cursor_settings(
|
|
|
388
388
|
return ok
|
|
389
389
|
|
|
390
390
|
|
|
391
|
-
def print_manual_setup_instructions(proxy_url: str) -> None:
|
|
391
|
+
def print_manual_setup_instructions(proxy_url: str, *, api_key_hint: str = "your Matrx key (mx_...)") -> None:
|
|
392
392
|
"""Print step-by-step instructions for the user to configure Cursor manually."""
|
|
393
393
|
print()
|
|
394
394
|
print(" Could not auto-configure Cursor settings.")
|
|
@@ -397,7 +397,7 @@ def print_manual_setup_instructions(proxy_url: str) -> None:
|
|
|
397
397
|
print(" 1. Open Cursor Settings (Cmd+, or Ctrl+,)")
|
|
398
398
|
print(" 2. Go to Models")
|
|
399
399
|
print(" 3. In the OpenAI API Keys section:")
|
|
400
|
-
print(f" - API Key:
|
|
400
|
+
print(f" - API Key: {api_key_hint}")
|
|
401
401
|
print(f" - Override Base URL: {proxy_url}")
|
|
402
402
|
print(" - Toggle ON 'Override OpenAI Base URL'")
|
|
403
403
|
print()
|
|
@@ -62,25 +62,43 @@ from __future__ import annotations
|
|
|
62
62
|
import json
|
|
63
63
|
import os
|
|
64
64
|
import sys
|
|
65
|
+
from datetime import datetime, timezone
|
|
65
66
|
from pathlib import Path
|
|
66
67
|
|
|
67
68
|
def _config_path() -> Path:
|
|
68
69
|
config_dir = Path(os.environ.get("MTRX_CONFIG_DIR", Path.home() / ".config" / "mtrx"))
|
|
69
70
|
return config_dir / "cursor-hooks-config.json"
|
|
70
71
|
|
|
72
|
+
def _log_path() -> Path:
|
|
73
|
+
return _config_path().parent / "logs" / "cursor-hooks.log"
|
|
74
|
+
|
|
75
|
+
def _log(msg: str) -> None:
|
|
76
|
+
try:
|
|
77
|
+
log_file = _log_path()
|
|
78
|
+
log_file.parent.mkdir(parents=True, exist_ok=True)
|
|
79
|
+
ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
80
|
+
with log_file.open("a") as f:
|
|
81
|
+
f.write(f"{ts} {msg}\n")
|
|
82
|
+
except OSError:
|
|
83
|
+
pass
|
|
84
|
+
|
|
71
85
|
def main() -> None:
|
|
72
86
|
try:
|
|
73
87
|
payload = json.load(sys.stdin)
|
|
74
88
|
except (json.JSONDecodeError, EOFError):
|
|
75
89
|
print("{}")
|
|
76
90
|
return
|
|
91
|
+
event = payload.get("hook_event_name", "?")
|
|
92
|
+
conv = (payload.get("conversation_id") or payload.get("session_id") or "?")[:16]
|
|
77
93
|
cfg_path = _config_path()
|
|
78
94
|
if not cfg_path.exists():
|
|
95
|
+
_log(f"{event} conv={conv} skip=no_config")
|
|
79
96
|
print("{}")
|
|
80
97
|
return
|
|
81
98
|
try:
|
|
82
99
|
cfg = json.loads(cfg_path.read_text(encoding="utf-8"))
|
|
83
100
|
except (json.JSONDecodeError, OSError):
|
|
101
|
+
_log(f"{event} conv={conv} skip=config_error")
|
|
84
102
|
print("{}")
|
|
85
103
|
return
|
|
86
104
|
url = (cfg.get("matrx_base_url") or "").rstrip("/") + "/v1/telemetry/cursor/hooks"
|
|
@@ -97,9 +115,10 @@ def main() -> None:
|
|
|
97
115
|
method="POST",
|
|
98
116
|
)
|
|
99
117
|
with urllib.request.urlopen(req, timeout=10) as resp:
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
118
|
+
status = resp.status
|
|
119
|
+
_log(f"{event} conv={conv} ok status={status}")
|
|
120
|
+
except Exception as e:
|
|
121
|
+
_log(f"{event} conv={conv} err={type(e).__name__}")
|
|
103
122
|
print("{}")
|
|
104
123
|
|
|
105
124
|
if __name__ == "__main__":
|
|
@@ -118,7 +137,15 @@ def install_mtrx_hooks(matrx_key: str, matrx_base_url: str) -> bool:
|
|
|
118
137
|
return False
|
|
119
138
|
|
|
120
139
|
# Write hook script
|
|
121
|
-
|
|
140
|
+
try:
|
|
141
|
+
_HOOKS_DIR.mkdir(parents=True, exist_ok=True)
|
|
142
|
+
except PermissionError as exc:
|
|
143
|
+
logger.debug("cursor_hooks: permission denied: %s", exc)
|
|
144
|
+
raise ValueError(
|
|
145
|
+
"Cannot create ~/.cursor/hooks — directory may be owned by root. "
|
|
146
|
+
"Run: sudo chown -R $(whoami) ~/.cursor"
|
|
147
|
+
) from exc
|
|
148
|
+
|
|
122
149
|
script_path = _HOOKS_DIR / "mtrx-telemetry.py"
|
|
123
150
|
try:
|
|
124
151
|
script_path.write_text(_hook_script_content(), encoding="utf-8")
|
|
@@ -31,6 +31,7 @@ from typing import Any
|
|
|
31
31
|
import httpx
|
|
32
32
|
|
|
33
33
|
from matrx.cli.cursor_ca import CertCache, load_ca
|
|
34
|
+
from matrx.cli.cursor_reroute import is_ai_path, try_reroute_to_matrx
|
|
34
35
|
|
|
35
36
|
logger = logging.getLogger(__name__)
|
|
36
37
|
|
|
@@ -259,7 +260,11 @@ class MITMProxy:
|
|
|
259
260
|
up_writer: asyncio.StreamWriter,
|
|
260
261
|
hostname: str,
|
|
261
262
|
) -> None:
|
|
262
|
-
"""Forward HTTP/1.1 request-response pairs, logging each to telemetry.
|
|
263
|
+
"""Forward HTTP/1.1 request-response pairs, logging each to telemetry.
|
|
264
|
+
|
|
265
|
+
For AI paths (RunSSE, StreamCpp, etc.), attempts to reroute through MTRX
|
|
266
|
+
for live injection. If reroute succeeds, responds from MTRX; else forwards.
|
|
267
|
+
"""
|
|
263
268
|
while True:
|
|
264
269
|
req_line = await client_reader.readline()
|
|
265
270
|
if not req_line:
|
|
@@ -272,15 +277,62 @@ class MITMProxy:
|
|
|
272
277
|
method = parts[0] if parts else "?"
|
|
273
278
|
path = parts[1] if len(parts) > 1 else "/"
|
|
274
279
|
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
280
|
+
req_body_size = 0
|
|
281
|
+
# For AI paths: buffer request and try rerouting through MTRX (live injection)
|
|
282
|
+
if method == "POST" and is_ai_path(path):
|
|
283
|
+
req_headers, req_cl, req_chunked = await self._read_headers_only(
|
|
284
|
+
client_reader
|
|
285
|
+
)
|
|
286
|
+
req_body = await self._read_body_to_bytes(
|
|
287
|
+
client_reader, req_cl, req_chunked
|
|
288
|
+
)
|
|
289
|
+
req_body_size = len(req_body)
|
|
290
|
+
result = await try_reroute_to_matrx(
|
|
291
|
+
path=path,
|
|
292
|
+
method=method,
|
|
293
|
+
req_headers=req_headers,
|
|
294
|
+
req_body=req_body,
|
|
295
|
+
matrx_base_url=self.matrx_base_url,
|
|
296
|
+
matrx_key=self.matrx_key,
|
|
297
|
+
session_id=str(uuid.uuid4()),
|
|
298
|
+
)
|
|
299
|
+
if result is not None:
|
|
300
|
+
success, resp_headers, resp_body, is_streaming = result
|
|
301
|
+
if success and resp_body is not None:
|
|
302
|
+
self._request_count += 1
|
|
303
|
+
self._write_http_response(
|
|
304
|
+
client_writer, 200, resp_headers, resp_body
|
|
305
|
+
)
|
|
306
|
+
asyncio.create_task(
|
|
307
|
+
self._ship_telemetry(
|
|
308
|
+
hostname=hostname,
|
|
309
|
+
method=method,
|
|
310
|
+
path=path,
|
|
311
|
+
status_code=200,
|
|
312
|
+
req_body_size=len(req_body),
|
|
313
|
+
resp_body_size=len(resp_body),
|
|
314
|
+
elapsed_ms=0,
|
|
315
|
+
content_type=resp_headers.get("content-type", ""),
|
|
316
|
+
is_streaming=is_streaming,
|
|
317
|
+
)
|
|
318
|
+
)
|
|
319
|
+
continue
|
|
320
|
+
# Reroute returned but failed — fall through to forward
|
|
321
|
+
# Reroute not implemented or failed — forward to upstream
|
|
322
|
+
up_writer.write(req_line)
|
|
323
|
+
await self._write_headers(up_writer, req_headers)
|
|
324
|
+
up_writer.write(req_body)
|
|
325
|
+
await up_writer.drain()
|
|
326
|
+
else:
|
|
327
|
+
up_writer.write(req_line)
|
|
328
|
+
req_headers, req_cl, req_chunked = await self._forward_headers(
|
|
329
|
+
client_reader, up_writer
|
|
330
|
+
)
|
|
331
|
+
req_body_size = await self._forward_body(
|
|
332
|
+
client_reader, up_writer, req_cl, req_chunked
|
|
333
|
+
)
|
|
334
|
+
if req_body_size == 0 and req_cl > 0:
|
|
335
|
+
req_body_size = req_cl
|
|
284
336
|
|
|
285
337
|
started = time.monotonic()
|
|
286
338
|
|
|
@@ -336,6 +388,76 @@ class MITMProxy:
|
|
|
336
388
|
if "close" in conn_h:
|
|
337
389
|
break
|
|
338
390
|
|
|
391
|
+
async def _read_headers_only(
|
|
392
|
+
self, reader: asyncio.StreamReader
|
|
393
|
+
) -> tuple[dict[str, str], int, bool]:
|
|
394
|
+
"""Read headers without writing. Returns (headers_dict, content_length, is_chunked)."""
|
|
395
|
+
headers: dict[str, str] = {}
|
|
396
|
+
content_length = -1
|
|
397
|
+
chunked = False
|
|
398
|
+
while True:
|
|
399
|
+
line = await reader.readline()
|
|
400
|
+
decoded = line.decode("utf-8", errors="replace").strip()
|
|
401
|
+
if not decoded:
|
|
402
|
+
break
|
|
403
|
+
if ":" in decoded:
|
|
404
|
+
k, _, v = decoded.partition(":")
|
|
405
|
+
k_lower = k.strip().lower()
|
|
406
|
+
v_stripped = v.strip()
|
|
407
|
+
headers[k_lower] = v_stripped
|
|
408
|
+
if k_lower == "content-length":
|
|
409
|
+
content_length = int(v_stripped)
|
|
410
|
+
elif k_lower == "transfer-encoding" and "chunked" in v_stripped.lower():
|
|
411
|
+
chunked = True
|
|
412
|
+
return headers, content_length, chunked
|
|
413
|
+
|
|
414
|
+
async def _read_body_to_bytes(
|
|
415
|
+
self,
|
|
416
|
+
reader: asyncio.StreamReader,
|
|
417
|
+
content_length: int,
|
|
418
|
+
chunked: bool,
|
|
419
|
+
) -> bytes:
|
|
420
|
+
"""Read body into bytes (no writer)."""
|
|
421
|
+
if content_length > 0:
|
|
422
|
+
return await reader.read(content_length)
|
|
423
|
+
if chunked:
|
|
424
|
+
parts: list[bytes] = []
|
|
425
|
+
while True:
|
|
426
|
+
size_line = await reader.readline()
|
|
427
|
+
size_str = size_line.decode("utf-8", errors="replace").strip()
|
|
428
|
+
try:
|
|
429
|
+
chunk_size = int(size_str.split(";")[0], 16)
|
|
430
|
+
except ValueError:
|
|
431
|
+
break
|
|
432
|
+
if chunk_size == 0:
|
|
433
|
+
await reader.readline() # trailer
|
|
434
|
+
break
|
|
435
|
+
parts.append(await reader.read(chunk_size))
|
|
436
|
+
await reader.readline() # crlf
|
|
437
|
+
return b"".join(parts)
|
|
438
|
+
return b""
|
|
439
|
+
|
|
440
|
+
def _write_headers(
|
|
441
|
+
self, writer: asyncio.StreamWriter, headers: dict[str, str]
|
|
442
|
+
) -> None:
|
|
443
|
+
"""Write headers as HTTP lines (caller must drain)."""
|
|
444
|
+
for k, v in headers.items():
|
|
445
|
+
writer.write(f"{k}: {v}\r\n".encode())
|
|
446
|
+
writer.write(b"\r\n")
|
|
447
|
+
|
|
448
|
+
def _write_http_response(
|
|
449
|
+
self,
|
|
450
|
+
writer: asyncio.StreamWriter,
|
|
451
|
+
status: int,
|
|
452
|
+
resp_headers: dict[str, str],
|
|
453
|
+
resp_body: bytes,
|
|
454
|
+
) -> None:
|
|
455
|
+
"""Write a complete HTTP response."""
|
|
456
|
+
writer.write(f"HTTP/1.1 {status} OK\r\n".encode())
|
|
457
|
+
self._write_headers(writer, resp_headers)
|
|
458
|
+
writer.write(resp_body)
|
|
459
|
+
# Caller should drain
|
|
460
|
+
|
|
339
461
|
async def _forward_headers(
|
|
340
462
|
self,
|
|
341
463
|
reader: asyncio.StreamReader,
|
|
@@ -434,6 +556,81 @@ class MITMProxy:
|
|
|
434
556
|
await writer.drain()
|
|
435
557
|
return total
|
|
436
558
|
|
|
559
|
+
async def _read_headers_only(
|
|
560
|
+
self, reader: asyncio.StreamReader
|
|
561
|
+
) -> tuple[dict[str, str], int, bool]:
|
|
562
|
+
"""Read headers from reader without writing. Returns (headers, content_length, chunked)."""
|
|
563
|
+
headers: dict[str, str] = {}
|
|
564
|
+
content_length = -1
|
|
565
|
+
chunked = False
|
|
566
|
+
while True:
|
|
567
|
+
line = await reader.readline()
|
|
568
|
+
decoded = line.decode("utf-8", errors="replace").strip()
|
|
569
|
+
if not decoded:
|
|
570
|
+
break
|
|
571
|
+
if ":" in decoded:
|
|
572
|
+
k, _, v = decoded.partition(":")
|
|
573
|
+
k_lower = k.strip().lower()
|
|
574
|
+
v_stripped = v.strip()
|
|
575
|
+
headers[k_lower] = v_stripped
|
|
576
|
+
if k_lower == "content-length":
|
|
577
|
+
content_length = int(v_stripped)
|
|
578
|
+
elif k_lower == "transfer-encoding" and "chunked" in v_stripped.lower():
|
|
579
|
+
chunked = True
|
|
580
|
+
return headers, content_length, chunked
|
|
581
|
+
|
|
582
|
+
async def _read_body_to_bytes(
|
|
583
|
+
self,
|
|
584
|
+
reader: asyncio.StreamReader,
|
|
585
|
+
content_length: int,
|
|
586
|
+
chunked: bool,
|
|
587
|
+
) -> bytes:
|
|
588
|
+
"""Read body into bytes."""
|
|
589
|
+
if content_length > 0:
|
|
590
|
+
return await reader.readexactly(content_length)
|
|
591
|
+
if chunked:
|
|
592
|
+
parts: list[bytes] = []
|
|
593
|
+
while True:
|
|
594
|
+
size_line = await reader.readline()
|
|
595
|
+
size_str = size_line.decode("utf-8", errors="replace").strip()
|
|
596
|
+
try:
|
|
597
|
+
chunk_size = int(size_str.split(";")[0], 16)
|
|
598
|
+
except ValueError:
|
|
599
|
+
break
|
|
600
|
+
if chunk_size == 0:
|
|
601
|
+
await reader.readline() # trailer
|
|
602
|
+
break
|
|
603
|
+
parts.append(await reader.readexactly(chunk_size))
|
|
604
|
+
await reader.readline() # crlf
|
|
605
|
+
return b"".join(parts)
|
|
606
|
+
return b""
|
|
607
|
+
|
|
608
|
+
def _write_headers(
|
|
609
|
+
self, writer: asyncio.StreamWriter, headers: dict[str, str]
|
|
610
|
+
) -> None:
|
|
611
|
+
"""Write HTTP headers to writer."""
|
|
612
|
+
for k, v in headers.items():
|
|
613
|
+
# Capitalize header key (e.g. content-type -> Content-Type)
|
|
614
|
+
name = "-".join(p.capitalize() for p in k.split("-"))
|
|
615
|
+
writer.write(f"{name}: {v}\r\n".encode())
|
|
616
|
+
writer.write(b"\r\n")
|
|
617
|
+
|
|
618
|
+
def _write_http_response(
|
|
619
|
+
self,
|
|
620
|
+
writer: asyncio.StreamWriter,
|
|
621
|
+
status: int,
|
|
622
|
+
headers: dict[str, str],
|
|
623
|
+
body: bytes,
|
|
624
|
+
) -> None:
|
|
625
|
+
"""Write a complete HTTP response."""
|
|
626
|
+
writer.write(f"HTTP/1.1 {status} OK\r\n".encode())
|
|
627
|
+
if "content-length" not in {k.lower() for k in headers} and body:
|
|
628
|
+
headers = dict(headers)
|
|
629
|
+
headers["Content-Length"] = str(len(body))
|
|
630
|
+
self._write_headers(writer, headers)
|
|
631
|
+
writer.write(body)
|
|
632
|
+
# Note: drain is caller's responsibility
|
|
633
|
+
|
|
437
634
|
# -----------------------------------------------------------------
|
|
438
635
|
# Raw bidirectional pipe (for opaque tunnels)
|
|
439
636
|
# -----------------------------------------------------------------
|
|
@@ -85,6 +85,8 @@ def _runtime_agent_basename(tool: str) -> tuple[str, str, list[str], str]:
|
|
|
85
85
|
return "codex-cli", "Codex CLI", ["cli", "codex"], "codex"
|
|
86
86
|
if tool == "claude":
|
|
87
87
|
return "claude-cli", "Claude CLI", ["claude", "cli"], "claude_code"
|
|
88
|
+
if tool == "gemini":
|
|
89
|
+
return "gemini-cli", "Gemini CLI", ["gemini", "cli"], "gemini_code"
|
|
88
90
|
normalized = f"{tool}-cli"
|
|
89
91
|
return normalized, f"{tool.capitalize()} CLI", ["cli", tool], tool
|
|
90
92
|
|
|
@@ -117,6 +119,8 @@ def find_executable(tool: str) -> str | None:
|
|
|
117
119
|
candidates.extend(["claude.exe", "claude.cmd"])
|
|
118
120
|
if tool == "codex":
|
|
119
121
|
candidates.extend(["codex.exe", "codex.cmd"])
|
|
122
|
+
if tool == "gemini":
|
|
123
|
+
candidates.extend(["gemini.exe", "gemini.cmd"])
|
|
120
124
|
for candidate in candidates:
|
|
121
125
|
found = shutil.which(candidate)
|
|
122
126
|
if found:
|
|
@@ -163,6 +167,13 @@ def build_launch_plan(
|
|
|
163
167
|
env,
|
|
164
168
|
orchestration=orchestration,
|
|
165
169
|
)
|
|
170
|
+
elif tool == "gemini":
|
|
171
|
+
env, auth_source = _build_gemini_env(
|
|
172
|
+
state,
|
|
173
|
+
route,
|
|
174
|
+
env,
|
|
175
|
+
orchestration=orchestration,
|
|
176
|
+
)
|
|
166
177
|
else:
|
|
167
178
|
raise ValueError(f"Unsupported tool: {tool}")
|
|
168
179
|
|
|
@@ -229,6 +240,8 @@ def validate_launch_plan(plan: LaunchPlan, state: dict) -> None:
|
|
|
229
240
|
_validate_claude_launch_plan(plan, state)
|
|
230
241
|
if plan.tool == "codex":
|
|
231
242
|
_validate_codex_launch_plan(plan, state)
|
|
243
|
+
if plan.tool == "gemini":
|
|
244
|
+
_validate_gemini_launch_plan(plan, state)
|
|
232
245
|
|
|
233
246
|
|
|
234
247
|
def claude_credentials_path() -> Path:
|
|
@@ -519,6 +532,59 @@ def _build_codex_env(
|
|
|
519
532
|
return env, "existing_codex_auth", passthrough_args
|
|
520
533
|
|
|
521
534
|
|
|
535
|
+
def _build_gemini_env(
|
|
536
|
+
state: dict,
|
|
537
|
+
route: str,
|
|
538
|
+
env: dict[str, str],
|
|
539
|
+
*,
|
|
540
|
+
orchestration: dict | None = None,
|
|
541
|
+
) -> tuple[dict[str, str], str]:
|
|
542
|
+
matrx = state["auth"]["matrx"]
|
|
543
|
+
# Assuming we might store Gemini-specific keys in future, or use OpenAI key fallback
|
|
544
|
+
# For now, we don't have a specific 'gemini' auth section in state.py, but we can assume
|
|
545
|
+
# if direct route, we use env var.
|
|
546
|
+
proxy_base = ensure_v1_url(matrx.get("base_url"))
|
|
547
|
+
mx_key, matrx_auth_source = _resolve_matrx_route_key(state, env)
|
|
548
|
+
|
|
549
|
+
# Check for direct key in env or potentially saved elsewhere
|
|
550
|
+
direct_key = (env.get("GOOGLE_API_KEY") or "").strip()
|
|
551
|
+
|
|
552
|
+
if route == "matrx":
|
|
553
|
+
if not mx_key:
|
|
554
|
+
raise ValueError("No Matrx key available. Run: mtrx login matrx --key mx_... or set MTRX_KEY")
|
|
555
|
+
|
|
556
|
+
# Clear existing Gemini config to force proxy usage
|
|
557
|
+
env.pop("MTRX_KEY", None)
|
|
558
|
+
|
|
559
|
+
# Set Proxy Config
|
|
560
|
+
env["GOOGLE_GEMINI_BASE_URL"] = proxy_base
|
|
561
|
+
env["GEMINI_API_ENDPOINT"] = proxy_base
|
|
562
|
+
env["GOOGLE_API_KEY"] = mx_key
|
|
563
|
+
|
|
564
|
+
# Matrx-specific headers (if supported by the tool, or for our own tracking)
|
|
565
|
+
# Note: Standard Gemini CLI might not support custom headers via env vars easily.
|
|
566
|
+
# We rely on the Base URL routing to Matrx proxy which handles the logic.
|
|
567
|
+
|
|
568
|
+
return env, matrx_auth_source
|
|
569
|
+
|
|
570
|
+
# Direct route: clear any matrx-managed env vars
|
|
571
|
+
env.pop("MTRX_KEY", None)
|
|
572
|
+
|
|
573
|
+
# Clear proxy overrides
|
|
574
|
+
_clear_if_matches(env, "GOOGLE_GEMINI_BASE_URL", proxy_base)
|
|
575
|
+
_clear_if_matches(env, "GEMINI_API_ENDPOINT", proxy_base)
|
|
576
|
+
|
|
577
|
+
# Clear key if it was the Matrx key
|
|
578
|
+
current_key = (env.get("GOOGLE_API_KEY") or "").strip()
|
|
579
|
+
if current_key == mx_key or current_key.startswith("mx_"):
|
|
580
|
+
env.pop("GOOGLE_API_KEY", None)
|
|
581
|
+
|
|
582
|
+
if env.get("GOOGLE_API_KEY"):
|
|
583
|
+
return env, "existing_google_env"
|
|
584
|
+
|
|
585
|
+
return env, "missing_auth"
|
|
586
|
+
|
|
587
|
+
|
|
522
588
|
def _build_claude_env(
|
|
523
589
|
state: dict,
|
|
524
590
|
route: str,
|
|
@@ -765,6 +831,31 @@ def _validate_claude_launch_plan(plan: LaunchPlan, state: dict) -> None:
|
|
|
765
831
|
raise ValueError("Claude Matrx route should not set ANTHROPIC_API_KEY")
|
|
766
832
|
|
|
767
833
|
|
|
834
|
+
def _validate_gemini_launch_plan(plan: LaunchPlan, state: dict) -> None:
|
|
835
|
+
if plan.route != "matrx":
|
|
836
|
+
return
|
|
837
|
+
|
|
838
|
+
expected_base_url = ensure_v1_url(state.get("auth", {}).get("matrx", {}).get("base_url"))
|
|
839
|
+
|
|
840
|
+
base_url = (plan.env.get("GOOGLE_GEMINI_BASE_URL") or "").strip()
|
|
841
|
+
if not base_url:
|
|
842
|
+
# Try the other one
|
|
843
|
+
base_url = (plan.env.get("GEMINI_API_ENDPOINT") or "").strip()
|
|
844
|
+
|
|
845
|
+
if not base_url:
|
|
846
|
+
raise ValueError("Gemini Matrx route is missing GOOGLE_GEMINI_BASE_URL or GEMINI_API_ENDPOINT")
|
|
847
|
+
|
|
848
|
+
if base_url != expected_base_url:
|
|
849
|
+
raise ValueError(
|
|
850
|
+
"Gemini Matrx route must use the Matrx /v1 base URL. "
|
|
851
|
+
f"Got: {base_url}"
|
|
852
|
+
)
|
|
853
|
+
|
|
854
|
+
mx_key = (plan.env.get("GOOGLE_API_KEY") or "").strip()
|
|
855
|
+
if not mx_key.startswith("mx_"):
|
|
856
|
+
raise ValueError("Gemini Matrx route is missing a valid GOOGLE_API_KEY (should be mx_...)")
|
|
857
|
+
|
|
858
|
+
|
|
768
859
|
def _validate_codex_launch_plan(plan: LaunchPlan, state: dict) -> None:
|
|
769
860
|
if plan.route != "matrx":
|
|
770
861
|
return
|
|
@@ -825,6 +916,16 @@ def describe_launch_plan(plan: LaunchPlan, state: dict) -> list[str]:
|
|
|
825
916
|
" persistent_route: disabled",
|
|
826
917
|
]
|
|
827
918
|
|
|
919
|
+
if plan.tool == "gemini":
|
|
920
|
+
base_url = ensure_v1_url(state.get("auth", {}).get("matrx", {}).get("base_url"))
|
|
921
|
+
return [
|
|
922
|
+
"Launching gemini via Matrx",
|
|
923
|
+
f" base_url: {base_url}",
|
|
924
|
+
f" auth_source: {plan.auth_source}",
|
|
925
|
+
" runtime_route: env injection",
|
|
926
|
+
" persistent_route: disabled",
|
|
927
|
+
]
|
|
928
|
+
|
|
828
929
|
return []
|
|
829
930
|
|
|
830
931
|
|
|
@@ -881,6 +982,9 @@ def _sync_tool_route_config(state: dict, *, tool: str, route: str) -> bool:
|
|
|
881
982
|
return _cleanup_claude_managed_config(state)
|
|
882
983
|
if tool == "codex":
|
|
883
984
|
return _sync_codex_route_config(state, route=route)
|
|
985
|
+
if tool == "gemini":
|
|
986
|
+
# Gemini currently relies on env vars, no config file sync implemented yet.
|
|
987
|
+
return False
|
|
884
988
|
return False
|
|
885
989
|
|
|
886
990
|
|
package/src/matrx/cli/main.py
CHANGED
|
@@ -74,7 +74,7 @@ def main(argv: list[str] | None = None) -> int:
|
|
|
74
74
|
return _cmd_doctor()
|
|
75
75
|
if args.command == "personal":
|
|
76
76
|
return _cmd_personal(args)
|
|
77
|
-
if args.command in {"codex", "claude"}:
|
|
77
|
+
if args.command in {"codex", "claude", "gemini"}:
|
|
78
78
|
return _cmd_launch(args.command, args.route, remainder)
|
|
79
79
|
if args.command == "cursor":
|
|
80
80
|
return _cmd_cursor(args)
|
|
@@ -97,7 +97,7 @@ def _build_parser() -> argparse.ArgumentParser:
|
|
|
97
97
|
login.add_argument("--import", dest="do_import", action="store_true")
|
|
98
98
|
|
|
99
99
|
use = subparsers.add_parser("use")
|
|
100
|
-
use.add_argument("tool", choices=["codex", "claude", "cursor"])
|
|
100
|
+
use.add_argument("tool", choices=["codex", "claude", "cursor", "gemini"])
|
|
101
101
|
use.add_argument("route", choices=["direct", "matrx"])
|
|
102
102
|
|
|
103
103
|
subparsers.add_parser("help")
|
|
@@ -116,6 +116,9 @@ def _build_parser() -> argparse.ArgumentParser:
|
|
|
116
116
|
claude = subparsers.add_parser("claude")
|
|
117
117
|
claude.add_argument("--route", choices=["direct", "matrx"])
|
|
118
118
|
|
|
119
|
+
gemini = subparsers.add_parser("gemini")
|
|
120
|
+
gemini.add_argument("--route", choices=["direct", "matrx"])
|
|
121
|
+
|
|
119
122
|
cursor = subparsers.add_parser("cursor")
|
|
120
123
|
cursor.add_argument("--route", choices=["direct", "matrx"])
|
|
121
124
|
cursor.add_argument("--status", action="store_true", help="Check proxy status")
|
|
@@ -553,6 +556,7 @@ def _cmd_status() -> int:
|
|
|
553
556
|
print("Defaults:")
|
|
554
557
|
print(f" codex: {_default_route_label(configured_route(state, 'codex'))}")
|
|
555
558
|
print(f" claude: {_default_route_label(configured_route(state, 'claude'))}")
|
|
559
|
+
print(f" gemini: {_default_route_label(configured_route(state, 'gemini'))}")
|
|
556
560
|
print(f" cursor: {_default_route_label(configured_route(state, 'cursor'))}")
|
|
557
561
|
print("Auth:")
|
|
558
562
|
print(
|
|
@@ -567,7 +571,7 @@ def _cmd_status() -> int:
|
|
|
567
571
|
local = "present" if claude_oauth_available() else "missing"
|
|
568
572
|
print(f" claude-code oauth: {imported}, local credentials: {local}")
|
|
569
573
|
print("Tool config:")
|
|
570
|
-
for tool in ("codex", "claude"):
|
|
574
|
+
for tool in ("codex", "claude", "gemini"):
|
|
571
575
|
config_status = get_tool_config_status(state, tool)
|
|
572
576
|
route = configured_route(state, tool)
|
|
573
577
|
if tool == "codex" and config_status["verified"] and not config_status["configured"]:
|
|
@@ -589,6 +593,7 @@ def _cmd_status() -> int:
|
|
|
589
593
|
print("Executables:")
|
|
590
594
|
print(f" codex: {find_executable('codex') or 'not found'}")
|
|
591
595
|
print(f" claude: {find_executable('claude') or 'not found'}")
|
|
596
|
+
print(f" gemini: {find_executable('gemini') or 'not found'}")
|
|
592
597
|
profiles = _legacy_shell_proxy_profiles()
|
|
593
598
|
active_env = _active_claude_proxy_env()
|
|
594
599
|
if profiles or active_env:
|
|
@@ -703,7 +708,7 @@ def _cmd_doctor() -> int:
|
|
|
703
708
|
workspace_binding = get_workspace_binding(state, cwd=os.environ.get("PWD") or os.getcwd()) or {}
|
|
704
709
|
workspace_matrx_key = (workspace_binding.get("matrx_key") or "").strip()
|
|
705
710
|
|
|
706
|
-
for tool in ("codex", "claude"):
|
|
711
|
+
for tool in ("codex", "claude", "gemini"):
|
|
707
712
|
found = find_executable(tool)
|
|
708
713
|
if found:
|
|
709
714
|
print(f"[ok] {tool} executable: {found}")
|
|
@@ -768,7 +773,7 @@ def _cmd_doctor() -> int:
|
|
|
768
773
|
"[warn] Cleanup: `unfunction claude 2>/dev/null || true` and `unset ANTHROPIC_BASE_URL ANTHROPIC_CUSTOM_HEADERS ANTHROPIC_API_KEY ANTHROPIC_AUTH_TOKEN MATRX_ACTIVE_ROUTE MATRX_BASE_URL MATRX_API_KEY MATRX_CLAUDE_MODE MATRX_FALLBACK_ENABLED MATRX_PROXY_TIMEOUT_SEC ANTHROPIC_DIRECT_BASE_URL`",
|
|
769
774
|
)
|
|
770
775
|
|
|
771
|
-
for tool in ("codex", "claude", "cursor"):
|
|
776
|
+
for tool in ("codex", "claude", "gemini", "cursor"):
|
|
772
777
|
route = configured_route(state, tool)
|
|
773
778
|
if route == "matrx" and not _has_matrx_login(state, env=os.environ):
|
|
774
779
|
print(f"[fail] Default {tool} route is matrx but no Matrx key is saved")
|
|
@@ -842,22 +847,28 @@ def _cmd_launch(tool: str, route: str | None, remainder: list[str]) -> int:
|
|
|
842
847
|
|
|
843
848
|
def _cmd_cursor(args) -> int:
|
|
844
849
|
from matrx.cli.cursor_hooks import install_mtrx_hooks, is_mtrx_hooks_installed
|
|
850
|
+
from matrx.cli.cursor_launcher import find_cursor_executable
|
|
845
851
|
|
|
846
852
|
route = args.route
|
|
847
853
|
|
|
848
|
-
# --status: report hooks status
|
|
854
|
+
# --status: report Base URL override + hooks status
|
|
849
855
|
if args.status:
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
856
|
+
state = load_state()
|
|
857
|
+
hooks_installed = is_mtrx_hooks_installed()
|
|
858
|
+
base_url = ensure_v1_url(state.get("auth", {}).get("matrx", {}).get("base_url"))
|
|
859
|
+
prev_path = config_dir() / "cursor-previous-settings.json"
|
|
860
|
+
configured = prev_path.exists()
|
|
861
|
+
print("MTRX Cursor integration:")
|
|
862
|
+
print(f" mode: {'Base URL override (all models)' if configured else 'not configured'}")
|
|
863
|
+
print(f" hooks: {'active (sessionEnd, stop → telemetry)' if hooks_installed else 'not installed'}")
|
|
864
|
+
if configured:
|
|
865
|
+
print(f" matrx: {base_url}")
|
|
855
866
|
return 0
|
|
856
867
|
|
|
857
868
|
# --stop: tear down
|
|
858
869
|
if args.stop:
|
|
859
870
|
_restore_cursor_if_needed()
|
|
860
|
-
print("Cursor route set to direct — MTRX
|
|
871
|
+
print("Cursor route set to direct — MTRX disabled.")
|
|
861
872
|
return 0
|
|
862
873
|
|
|
863
874
|
state = load_state()
|
|
@@ -866,12 +877,12 @@ def _cmd_cursor(args) -> int:
|
|
|
866
877
|
|
|
867
878
|
if effective_route == "direct":
|
|
868
879
|
_restore_cursor_if_needed()
|
|
869
|
-
print("Cursor route set to direct — MTRX
|
|
880
|
+
print("Cursor route set to direct — MTRX proxy disabled.")
|
|
870
881
|
if cursor_is_running():
|
|
871
882
|
print(" Restart Cursor for settings to take effect.")
|
|
872
883
|
return 0
|
|
873
884
|
|
|
874
|
-
# --- matrx route:
|
|
885
|
+
# --- matrx route: Base URL override (works with any Cursor model: Claude, GPT, Gemini, etc.) ---
|
|
875
886
|
|
|
876
887
|
try:
|
|
877
888
|
state, login_changed = _complete_matrx_login(state)
|
|
@@ -895,6 +906,7 @@ def _cmd_cursor(args) -> int:
|
|
|
895
906
|
matrx_base_url = ensure_root_url(
|
|
896
907
|
state.get("auth", {}).get("matrx", {}).get("base_url")
|
|
897
908
|
)
|
|
909
|
+
matrx_proxy_url = ensure_v1_url(matrx_base_url)
|
|
898
910
|
|
|
899
911
|
if initialized or login_changed or promoted:
|
|
900
912
|
save_state(state)
|
|
@@ -904,17 +916,42 @@ def _cmd_cursor(args) -> int:
|
|
|
904
916
|
"Use `mtrx use cursor direct` to opt out.",
|
|
905
917
|
)
|
|
906
918
|
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
print(" Works with Cursor from Dock, Spotlight, or CLI — no special launch needed.")
|
|
913
|
-
print(" Check status: mtrx cursor --status")
|
|
914
|
-
print(" To disable: mtrx use cursor direct")
|
|
919
|
+
# Configure Cursor's Override Base URL — sends chat to MTRX (any model: Claude, GPT-5, Gemini, etc.)
|
|
920
|
+
prev_path = config_dir() / "cursor-previous-settings.json"
|
|
921
|
+
previous = configure_cursor_for_proxy(matrx_proxy_url, mx_key)
|
|
922
|
+
if previous is not None:
|
|
923
|
+
prev_path.write_text(__import__("json").dumps(previous), encoding="utf-8")
|
|
915
924
|
else:
|
|
916
|
-
print(
|
|
917
|
-
|
|
925
|
+
print(
|
|
926
|
+
"[warn] Could not write Cursor state.vscdb. Try manual setup:",
|
|
927
|
+
file=sys.stderr,
|
|
928
|
+
)
|
|
929
|
+
print_manual_setup_instructions(matrx_proxy_url, api_key_hint="your Matrx key (mx_...)")
|
|
930
|
+
|
|
931
|
+
# Hooks for session telemetry
|
|
932
|
+
install_mtrx_hooks(mx_key, matrx_base_url)
|
|
933
|
+
|
|
934
|
+
# Optional: launch Cursor
|
|
935
|
+
if getattr(args, "launch", False):
|
|
936
|
+
exe = find_cursor_executable()
|
|
937
|
+
if exe:
|
|
938
|
+
import subprocess
|
|
939
|
+
try:
|
|
940
|
+
subprocess.Popen([exe], start_new_session=True)
|
|
941
|
+
print("Launched Cursor.")
|
|
942
|
+
except Exception:
|
|
943
|
+
print("[warn] Could not launch Cursor.", file=sys.stderr)
|
|
944
|
+
else:
|
|
945
|
+
print("[warn] Could not find Cursor executable.", file=sys.stderr)
|
|
946
|
+
|
|
947
|
+
print()
|
|
948
|
+
print("Cursor configured for MTRX — chat routes through Matrx (all models).")
|
|
949
|
+
print(f" base URL: {matrx_proxy_url}")
|
|
950
|
+
print()
|
|
951
|
+
print(" Works with any Cursor Pro model: Claude, GPT-5, Gemini, and more.")
|
|
952
|
+
print(" Restart Cursor for settings to take effect.")
|
|
953
|
+
print(" Check status: mtrx cursor --status")
|
|
954
|
+
print(" To disable: mtrx use cursor direct")
|
|
918
955
|
return 0
|
|
919
956
|
|
|
920
957
|
|
package/src/matrx/cli/state.py
CHANGED
|
@@ -46,6 +46,7 @@ DEFAULT_STATE: dict = {
|
|
|
46
46
|
"defaults": {
|
|
47
47
|
"codex": None,
|
|
48
48
|
"claude": None,
|
|
49
|
+
"gemini": None,
|
|
49
50
|
"cursor": None,
|
|
50
51
|
},
|
|
51
52
|
"workspaces": {
|
|
@@ -93,6 +94,19 @@ DEFAULT_STATE: dict = {
|
|
|
93
94
|
"previous_matrx_block": None,
|
|
94
95
|
"previous_values": {},
|
|
95
96
|
},
|
|
97
|
+
"gemini": {
|
|
98
|
+
"configured": False,
|
|
99
|
+
"verified": False,
|
|
100
|
+
"config_path": None,
|
|
101
|
+
"backup_path": None,
|
|
102
|
+
"original_backup_path": None,
|
|
103
|
+
"config_fingerprint": None,
|
|
104
|
+
"matrx_key_fingerprint": None,
|
|
105
|
+
"last_verified_at": None,
|
|
106
|
+
"previous_model_provider": None,
|
|
107
|
+
"previous_matrx_block": None,
|
|
108
|
+
"previous_values": {},
|
|
109
|
+
},
|
|
96
110
|
},
|
|
97
111
|
},
|
|
98
112
|
}
|