mtrx-cli 0.1.16 → 0.1.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mtrx-cli",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.17",
|
|
4
4
|
"description": "MATRX CLI for routing Codex, Claude, and Cursor through Matrx",
|
|
5
5
|
"homepage": "https://mtrx.so",
|
|
6
6
|
"repository": {
|
|
@@ -31,6 +31,7 @@
|
|
|
31
31
|
"src/matrx/cli/cursor_daemon.py",
|
|
32
32
|
"src/matrx/cli/cursor_launcher.py",
|
|
33
33
|
"src/matrx/cli/cursor_proxy.py",
|
|
34
|
+
"src/matrx/cli/cursor_reroute.py",
|
|
34
35
|
"src/matrx/cli/cursor_service.py",
|
|
35
36
|
"src/matrx/cli/launcher.py",
|
|
36
37
|
"src/matrx/cli/main.py",
|
|
@@ -31,7 +31,16 @@ from typing import Any
|
|
|
31
31
|
import httpx
|
|
32
32
|
|
|
33
33
|
from matrx.cli.cursor_ca import CertCache, load_ca
|
|
34
|
-
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
from matrx.cli.cursor_reroute import is_ai_path, try_reroute_to_matrx
|
|
37
|
+
except ImportError:
|
|
38
|
+
# Stubs when cursor_reroute not available (e.g. npm package omit).
|
|
39
|
+
def is_ai_path(path: str) -> bool:
|
|
40
|
+
return False
|
|
41
|
+
|
|
42
|
+
async def try_reroute_to_matrx(*, path: str, method: str, **kwargs: Any) -> None:
|
|
43
|
+
return None
|
|
35
44
|
|
|
36
45
|
logger = logging.getLogger(__name__)
|
|
37
46
|
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cursor → MTRX rerouting for live injection.
|
|
3
|
+
|
|
4
|
+
When the MITM proxy intercepts Cursor's AI traffic (RunSSE, StreamCpp, etc.),
|
|
5
|
+
this module reroutes it through MTRX instead of forwarding to Cursor's servers.
|
|
6
|
+
That enables the full suite: memory injection, compression, loop guard, etc.
|
|
7
|
+
|
|
8
|
+
Cursor uses Connect/gRPC protocol with binary protobuf. We parse the request,
|
|
9
|
+
convert to OpenAI/Anthropic format, call MTRX, and convert the response back.
|
|
10
|
+
|
|
11
|
+
Refs: cursor-tap (https://github.com/burpheart/cursor-tap), everestmz/cursor-rpc
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import logging
|
|
18
|
+
import re
|
|
19
|
+
from typing import Any
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
# Cursor AI RPC paths (Connect protocol). RunSSE = main chat, StreamCpp = code completion.
|
|
24
|
+
_AI_PATH_PATTERNS = (
|
|
25
|
+
r"RunSSE",
|
|
26
|
+
r"StreamCpp",
|
|
27
|
+
r"BidiAppend",
|
|
28
|
+
r"AgentService",
|
|
29
|
+
r"AiService",
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def is_ai_path(path: str) -> bool:
|
|
34
|
+
"""Return True if this path is an AI/LLM endpoint we should reroute to MTRX."""
|
|
35
|
+
if not path:
|
|
36
|
+
return False
|
|
37
|
+
return any(re.search(p, path, re.IGNORECASE) for p in _AI_PATH_PATTERNS)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _cursor_model_to_openai(cursor_model: str) -> str:
|
|
41
|
+
"""Map Cursor model names to OpenAI-style names MTRX expects."""
|
|
42
|
+
# Cursor uses names like "claude-sonnet-4" or "gpt-4o" - usually compatible
|
|
43
|
+
normalized = (cursor_model or "").strip().lower()
|
|
44
|
+
if not normalized:
|
|
45
|
+
return "gpt-4o" # fallback
|
|
46
|
+
return cursor_model
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
async def try_reroute_to_matrx(
|
|
50
|
+
*,
|
|
51
|
+
path: str,
|
|
52
|
+
method: str,
|
|
53
|
+
req_headers: dict[str, str],
|
|
54
|
+
req_body: bytes,
|
|
55
|
+
matrx_base_url: str,
|
|
56
|
+
matrx_key: str,
|
|
57
|
+
session_id: str | None = None,
|
|
58
|
+
group_id: str | None = None,
|
|
59
|
+
project_id: str | None = None,
|
|
60
|
+
) -> tuple[bool, dict[str, str], bytes | None, bool] | None:
|
|
61
|
+
"""
|
|
62
|
+
Attempt to reroute a Cursor AI request through MTRX.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
(success, response_headers, response_body, is_streaming) if handled,
|
|
66
|
+
None to fall back to normal forward.
|
|
67
|
+
"""
|
|
68
|
+
if method != "POST" or not is_ai_path(path):
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
# TODO: Full protobuf parsing. Cursor uses Connect/gRPC with binary frames.
|
|
72
|
+
# For now we don't have the proto conversion - fall back to forward.
|
|
73
|
+
# When implemented: parse req_body, extract messages+model, call MTRX,
|
|
74
|
+
# convert response back to Cursor's gRPC format.
|
|
75
|
+
logger.debug("cursor_reroute: path=%s would reroute (protobuf conversion not yet implemented)", path)
|
|
76
|
+
return None
|