mtrx-cli 0.1.28 → 0.1.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -0
- package/package.json +4 -2
- package/src/matrx/__init__.py +1 -1
- package/src/matrx/cli/agent_cmds.py +390 -0
- package/src/matrx/cli/bootstrap.py +60 -4
- package/src/matrx/cli/indexer.py +513 -0
- package/src/matrx/cli/main.py +32 -18
package/README.md
CHANGED
|
@@ -30,3 +30,19 @@ npx mtrx help
|
|
|
30
30
|
- Python 3.10+ available as `python3`, `python`, or `py -3`
|
|
31
31
|
|
|
32
32
|
This npm package is a thin wrapper around the Python CLI bundled in this repo.
|
|
33
|
+
|
|
34
|
+
## Python dependencies
|
|
35
|
+
|
|
36
|
+
The tarball only ships the CLI source under `src/` and sets `PYTHONPATH` so Python can import it. **Third-party libraries** (for example `httpx`, `fastapi`, etc.) are **not** bundled—you still need the **`matrx`** Python package installed into the same interpreter npm invokes:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
pip install matrx
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
Or from a checkout of this repo:
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
pip install -e .
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
Use `mtrx doctor` after install if commands fail with missing-module errors.
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mtrx-cli",
|
|
3
|
-
"version": "0.1.
|
|
4
|
-
"description": "MATRX CLI for routing Codex, Claude, and Cursor through Matrx",
|
|
3
|
+
"version": "0.1.29",
|
|
4
|
+
"description": "MATRX CLI for routing Codex, Claude, Gemini, and Cursor through Matrx",
|
|
5
5
|
"homepage": "https://mtrx.so",
|
|
6
6
|
"repository": {
|
|
7
7
|
"type": "git",
|
|
@@ -35,6 +35,8 @@
|
|
|
35
35
|
"src/matrx/cli/cursor_service.py",
|
|
36
36
|
"src/matrx/cli/bootstrap.py",
|
|
37
37
|
"src/matrx/cli/gemini_env_bootstrap.cjs",
|
|
38
|
+
"src/matrx/cli/indexer.py",
|
|
39
|
+
"src/matrx/cli/agent_cmds.py",
|
|
38
40
|
"src/matrx/cli/launcher.py",
|
|
39
41
|
"src/matrx/cli/main.py",
|
|
40
42
|
"src/matrx/cli/project_cmds.py",
|
package/src/matrx/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.29"
|
|
@@ -0,0 +1,390 @@
|
|
|
1
|
+
"""
|
|
2
|
+
CLI commands for real-time agent coordination.
|
|
3
|
+
|
|
4
|
+
mtrx agents list [--group <id>] — show agents active in the group right now
|
|
5
|
+
mtrx agents push <target> <prompt> — push a task into another agent's inbox
|
|
6
|
+
mtrx agents watch [--group <id>] — stream group events to the terminal (SSE)
|
|
7
|
+
mtrx agents status [--group <id>] — show this workspace's group/agent identity
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import sys
|
|
15
|
+
import time
|
|
16
|
+
from datetime import datetime, timezone
|
|
17
|
+
|
|
18
|
+
import httpx
|
|
19
|
+
|
|
20
|
+
from matrx.cli.state import (
|
|
21
|
+
ensure_root_url,
|
|
22
|
+
get_workspace_binding,
|
|
23
|
+
load_state,
|
|
24
|
+
normalize_matrx_key,
|
|
25
|
+
resolve_workspace_root,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# ---------------------------------------------------------------------------
|
|
30
|
+
# Shared helpers (mirror pattern from project_cmds.py)
|
|
31
|
+
# ---------------------------------------------------------------------------
|
|
32
|
+
|
|
33
|
+
def _api(
|
|
34
|
+
state: dict,
|
|
35
|
+
*,
|
|
36
|
+
method: str,
|
|
37
|
+
path: str,
|
|
38
|
+
key: str,
|
|
39
|
+
json_body: dict | None = None,
|
|
40
|
+
params: dict | None = None,
|
|
41
|
+
timeout: float = 15.0,
|
|
42
|
+
) -> dict:
|
|
43
|
+
base_url = ensure_root_url(state.get("auth", {}).get("matrx", {}).get("base_url"))
|
|
44
|
+
url = f"{base_url.rstrip('/')}/v1{path}"
|
|
45
|
+
headers: dict[str, str] = {"X-Matrx-Key": key}
|
|
46
|
+
if json_body is not None:
|
|
47
|
+
headers["Content-Type"] = "application/json"
|
|
48
|
+
try:
|
|
49
|
+
with httpx.Client(timeout=timeout) as client:
|
|
50
|
+
response = client.request(
|
|
51
|
+
method, url, headers=headers, json=json_body, params=params
|
|
52
|
+
)
|
|
53
|
+
except httpx.HTTPError as exc:
|
|
54
|
+
raise ValueError(f"Matrx API request failed: {exc}") from exc
|
|
55
|
+
if response.status_code >= 400:
|
|
56
|
+
detail = response.text.strip() or response.reason_phrase
|
|
57
|
+
raise ValueError(
|
|
58
|
+
f"Matrx API error ({response.status_code}) for {path}: {detail}"
|
|
59
|
+
)
|
|
60
|
+
if response.status_code == 204 or not response.content:
|
|
61
|
+
return {}
|
|
62
|
+
return response.json()
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _resolve_key(state: dict) -> str:
|
|
66
|
+
env_key = normalize_matrx_key(os.environ.get("MTRX_KEY"))
|
|
67
|
+
if env_key:
|
|
68
|
+
return env_key
|
|
69
|
+
binding = get_workspace_binding(state, cwd=os.getcwd()) or {}
|
|
70
|
+
binding_key = normalize_matrx_key(binding.get("matrx_key"))
|
|
71
|
+
if binding_key:
|
|
72
|
+
return binding_key
|
|
73
|
+
return normalize_matrx_key(
|
|
74
|
+
state.get("auth", {}).get("matrx", {}).get("key")
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _require_key(state: dict) -> str:
|
|
79
|
+
key = _resolve_key(state)
|
|
80
|
+
if not key:
|
|
81
|
+
raise ValueError("No Matrx key found. Run: mtrx login matrx --key mx_...")
|
|
82
|
+
return key
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _resolve_group_id(state: dict, args_group_id: str | None = None) -> str | None:
|
|
86
|
+
"""Return the group_id to use: explicit flag > env var > workspace binding."""
|
|
87
|
+
if args_group_id:
|
|
88
|
+
return args_group_id.strip()
|
|
89
|
+
env_group = (os.environ.get("MTRX_GROUP_ID") or "").strip()
|
|
90
|
+
if env_group:
|
|
91
|
+
return env_group
|
|
92
|
+
binding = get_workspace_binding(state, cwd=os.getcwd()) or {}
|
|
93
|
+
return (binding.get("group_id") or "").strip() or None
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _resolve_agent_id(args_agent_id: str | None = None) -> str | None:
|
|
97
|
+
"""Return the agent_id: explicit flag > env var."""
|
|
98
|
+
if args_agent_id:
|
|
99
|
+
return args_agent_id.strip()
|
|
100
|
+
return (os.environ.get("MTRX_AGENT_ID") or "").strip() or None
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _fmt_time(ts: float | None) -> str:
|
|
104
|
+
if ts is None:
|
|
105
|
+
return "—"
|
|
106
|
+
try:
|
|
107
|
+
dt = datetime.fromtimestamp(float(ts), tz=timezone.utc)
|
|
108
|
+
return dt.strftime("%H:%M:%S UTC")
|
|
109
|
+
except (ValueError, OSError):
|
|
110
|
+
return str(ts)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
# ---------------------------------------------------------------------------
|
|
114
|
+
# mtrx agents list
|
|
115
|
+
# ---------------------------------------------------------------------------
|
|
116
|
+
|
|
117
|
+
def cmd_list(args) -> int:
|
|
118
|
+
"""Show agents currently active in the group."""
|
|
119
|
+
state = load_state()
|
|
120
|
+
try:
|
|
121
|
+
key = _require_key(state)
|
|
122
|
+
except ValueError as exc:
|
|
123
|
+
print(str(exc), file=sys.stderr)
|
|
124
|
+
return 1
|
|
125
|
+
|
|
126
|
+
group_id = _resolve_group_id(state, getattr(args, "group", None))
|
|
127
|
+
|
|
128
|
+
if not group_id:
|
|
129
|
+
# Try to resolve via server
|
|
130
|
+
try:
|
|
131
|
+
ctx = _api(state, method="GET", path="/auth/context", key=key)
|
|
132
|
+
group_id = (ctx.get("default_group_id") or "").strip() or None
|
|
133
|
+
except ValueError:
|
|
134
|
+
pass
|
|
135
|
+
|
|
136
|
+
if not group_id:
|
|
137
|
+
print(
|
|
138
|
+
"No group_id set. Use --group <id>, set MTRX_GROUP_ID, or run: mtrx project switch",
|
|
139
|
+
file=sys.stderr,
|
|
140
|
+
)
|
|
141
|
+
return 1
|
|
142
|
+
|
|
143
|
+
include_history = getattr(args, "history", False)
|
|
144
|
+
try:
|
|
145
|
+
params: dict = {}
|
|
146
|
+
if include_history:
|
|
147
|
+
params["include_history"] = "true"
|
|
148
|
+
params["limit"] = str(getattr(args, "limit", 10))
|
|
149
|
+
data = _api(
|
|
150
|
+
state,
|
|
151
|
+
method="GET",
|
|
152
|
+
path=f"/groups/{group_id}/activity",
|
|
153
|
+
key=key,
|
|
154
|
+
params=params if params else None,
|
|
155
|
+
)
|
|
156
|
+
except ValueError as exc:
|
|
157
|
+
print(str(exc), file=sys.stderr)
|
|
158
|
+
return 1
|
|
159
|
+
|
|
160
|
+
agents = data.get("active_agents", [])
|
|
161
|
+
if not agents:
|
|
162
|
+
print(f"No active agents in group {group_id}")
|
|
163
|
+
else:
|
|
164
|
+
print(f"Active agents in group {group_id}:\n")
|
|
165
|
+
print(f" {'AGENT ID':<28} {'MODEL':<20} {'STARTED':<12} TASK")
|
|
166
|
+
print(" " + "─" * 80)
|
|
167
|
+
for a in agents:
|
|
168
|
+
agent_id = str(a.get("agent_id", "?"))
|
|
169
|
+
model = str(a.get("model", "?"))[:18]
|
|
170
|
+
started = _fmt_time(a.get("started_at"))
|
|
171
|
+
task = str(a.get("task_summary", ""))[:60]
|
|
172
|
+
print(f" {agent_id:<28} {model:<20} {started:<12} {task}")
|
|
173
|
+
|
|
174
|
+
if include_history:
|
|
175
|
+
recent = data.get("recent_activities", [])
|
|
176
|
+
if recent:
|
|
177
|
+
print(f"\nRecent activities:")
|
|
178
|
+
for act in recent:
|
|
179
|
+
status = act.get("status", "?")
|
|
180
|
+
aid = str(act.get("agent_id", "?"))
|
|
181
|
+
summary = str(act.get("task_summary", ""))[:60]
|
|
182
|
+
completed = act.get("completed_at", "")
|
|
183
|
+
print(f" [{status}] {aid}: {summary} ({completed})")
|
|
184
|
+
|
|
185
|
+
return 0
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
# ---------------------------------------------------------------------------
|
|
189
|
+
# mtrx agents push
|
|
190
|
+
# ---------------------------------------------------------------------------
|
|
191
|
+
|
|
192
|
+
def cmd_push(args) -> int:
|
|
193
|
+
"""Push a task prompt into another agent's inbox."""
|
|
194
|
+
state = load_state()
|
|
195
|
+
try:
|
|
196
|
+
key = _require_key(state)
|
|
197
|
+
except ValueError as exc:
|
|
198
|
+
print(str(exc), file=sys.stderr)
|
|
199
|
+
return 1
|
|
200
|
+
|
|
201
|
+
target_agent_id = (getattr(args, "target", "") or "").strip()
|
|
202
|
+
if not target_agent_id:
|
|
203
|
+
print(
|
|
204
|
+
"Usage: mtrx agents push <target-agent-id> <prompt>",
|
|
205
|
+
file=sys.stderr,
|
|
206
|
+
)
|
|
207
|
+
return 1
|
|
208
|
+
|
|
209
|
+
prompt_parts = getattr(args, "prompt_parts", [])
|
|
210
|
+
prompt = " ".join(prompt_parts).strip() if prompt_parts else ""
|
|
211
|
+
if not prompt:
|
|
212
|
+
print("prompt must not be blank", file=sys.stderr)
|
|
213
|
+
return 1
|
|
214
|
+
|
|
215
|
+
group_id = _resolve_group_id(state, getattr(args, "group", None))
|
|
216
|
+
from_agent_id = _resolve_agent_id(getattr(args, "from_agent", None)) or "cli"
|
|
217
|
+
|
|
218
|
+
body: dict = {
|
|
219
|
+
"prompt": prompt,
|
|
220
|
+
"target_agent_id": target_agent_id,
|
|
221
|
+
"from_agent_id": from_agent_id,
|
|
222
|
+
}
|
|
223
|
+
if group_id:
|
|
224
|
+
body["group_id"] = group_id
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
resp = _api(state, method="POST", path="/inbox/push", key=key, json_body=body)
|
|
228
|
+
except ValueError as exc:
|
|
229
|
+
print(str(exc), file=sys.stderr)
|
|
230
|
+
return 1
|
|
231
|
+
|
|
232
|
+
count = resp.get("count", "?")
|
|
233
|
+
gid = group_id or "(default)"
|
|
234
|
+
print(f"Task pushed to {target_agent_id} in group {gid} (queue depth: {count})")
|
|
235
|
+
return 0
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
# ---------------------------------------------------------------------------
|
|
239
|
+
# mtrx agents watch
|
|
240
|
+
# ---------------------------------------------------------------------------
|
|
241
|
+
|
|
242
|
+
def cmd_watch(args) -> int:
|
|
243
|
+
"""Stream group events to the terminal over SSE."""
|
|
244
|
+
state = load_state()
|
|
245
|
+
try:
|
|
246
|
+
key = _require_key(state)
|
|
247
|
+
except ValueError as exc:
|
|
248
|
+
print(str(exc), file=sys.stderr)
|
|
249
|
+
return 1
|
|
250
|
+
|
|
251
|
+
group_id = _resolve_group_id(state, getattr(args, "group", None))
|
|
252
|
+
if not group_id:
|
|
253
|
+
print(
|
|
254
|
+
"No group_id set. Use --group <id> or set MTRX_GROUP_ID.",
|
|
255
|
+
file=sys.stderr,
|
|
256
|
+
)
|
|
257
|
+
return 1
|
|
258
|
+
|
|
259
|
+
base_url = ensure_root_url(
|
|
260
|
+
state.get("auth", {}).get("matrx", {}).get("base_url")
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
# Obtain a short-lived SSE token so we don't need to pass the API key in a
|
|
264
|
+
# persistent connection header.
|
|
265
|
+
try:
|
|
266
|
+
token_resp = _api(
|
|
267
|
+
state,
|
|
268
|
+
method="GET",
|
|
269
|
+
path=f"/groups/{group_id}/stream-token",
|
|
270
|
+
key=key,
|
|
271
|
+
)
|
|
272
|
+
except ValueError as exc:
|
|
273
|
+
print(f"Could not get stream token: {exc}", file=sys.stderr)
|
|
274
|
+
return 1
|
|
275
|
+
|
|
276
|
+
token = token_resp.get("token")
|
|
277
|
+
if not token:
|
|
278
|
+
print("Server did not return an SSE token.", file=sys.stderr)
|
|
279
|
+
return 1
|
|
280
|
+
|
|
281
|
+
stream_url = (
|
|
282
|
+
f"{base_url.rstrip('/')}/v1/groups/{group_id}/stream"
|
|
283
|
+
f"?sse_token={token}"
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
print(f"Watching group {group_id} (Ctrl-C to stop)...")
|
|
287
|
+
print()
|
|
288
|
+
|
|
289
|
+
try:
|
|
290
|
+
with httpx.Client(timeout=None) as client:
|
|
291
|
+
with client.stream("GET", stream_url) as resp:
|
|
292
|
+
if resp.status_code != 200:
|
|
293
|
+
print(
|
|
294
|
+
f"Stream error: HTTP {resp.status_code}",
|
|
295
|
+
file=sys.stderr,
|
|
296
|
+
)
|
|
297
|
+
return 1
|
|
298
|
+
for line in resp.iter_lines():
|
|
299
|
+
if not line:
|
|
300
|
+
continue
|
|
301
|
+
if line.startswith(": keepalive"):
|
|
302
|
+
continue
|
|
303
|
+
if line.startswith("data: "):
|
|
304
|
+
raw = line[6:]
|
|
305
|
+
try:
|
|
306
|
+
evt = json.loads(raw)
|
|
307
|
+
_print_event(evt)
|
|
308
|
+
except json.JSONDecodeError:
|
|
309
|
+
print(raw)
|
|
310
|
+
except KeyboardInterrupt:
|
|
311
|
+
print("\nStopped.")
|
|
312
|
+
except httpx.HTTPError as exc:
|
|
313
|
+
print(f"Stream error: {exc}", file=sys.stderr)
|
|
314
|
+
return 1
|
|
315
|
+
|
|
316
|
+
return 0
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def _print_event(evt: dict) -> None:
|
|
320
|
+
ts = datetime.now(tz=timezone.utc).strftime("%H:%M:%S")
|
|
321
|
+
etype = evt.get("type", "event")
|
|
322
|
+
model = evt.get("model", "")
|
|
323
|
+
provider = evt.get("provider", "")
|
|
324
|
+
agent = evt.get("agent_id", "?")
|
|
325
|
+
tokens_in = evt.get("input_tokens", 0)
|
|
326
|
+
tokens_out = evt.get("output_tokens", 0)
|
|
327
|
+
status_code = evt.get("status_code", "")
|
|
328
|
+
latency = evt.get("latency_ms")
|
|
329
|
+
latency_str = f"{latency:.0f}ms" if latency else ""
|
|
330
|
+
|
|
331
|
+
# Colour codes (simple; no dependency)
|
|
332
|
+
GREEN, DIM, RESET = "\033[32m", "\033[2m", "\033[0m"
|
|
333
|
+
ok = status_code == 200 or status_code == ""
|
|
334
|
+
colour = GREEN if ok else "\033[31m"
|
|
335
|
+
|
|
336
|
+
print(
|
|
337
|
+
f"{DIM}{ts}{RESET} "
|
|
338
|
+
f"{colour}[{etype}]{RESET} "
|
|
339
|
+
f"agent={agent} "
|
|
340
|
+
f"model={model}/{provider} "
|
|
341
|
+
f"in={tokens_in} out={tokens_out} "
|
|
342
|
+
f"{latency_str}"
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
# ---------------------------------------------------------------------------
|
|
347
|
+
# mtrx agents status
|
|
348
|
+
# ---------------------------------------------------------------------------
|
|
349
|
+
|
|
350
|
+
def cmd_status(args) -> int:
|
|
351
|
+
"""Show this workspace's group / agent binding."""
|
|
352
|
+
state = load_state()
|
|
353
|
+
workspace_root = resolve_workspace_root(os.getcwd())
|
|
354
|
+
binding = get_workspace_binding(state, cwd=os.getcwd()) or {}
|
|
355
|
+
|
|
356
|
+
group_id = _resolve_group_id(state, getattr(args, "group", None))
|
|
357
|
+
agent_id = _resolve_agent_id(getattr(args, "agent", None))
|
|
358
|
+
|
|
359
|
+
print(f"Workspace: {workspace_root}")
|
|
360
|
+
print(f"Group ID: {group_id or '(not set)'}")
|
|
361
|
+
print(f"Agent ID: {agent_id or '(not set)'}")
|
|
362
|
+
|
|
363
|
+
env_group = (os.environ.get("MTRX_GROUP_ID") or "").strip()
|
|
364
|
+
env_agent = (os.environ.get("MTRX_AGENT_ID") or "").strip()
|
|
365
|
+
if env_group:
|
|
366
|
+
print(f" group from MTRX_GROUP_ID env var")
|
|
367
|
+
elif binding.get("group_id"):
|
|
368
|
+
print(f" group from workspace binding")
|
|
369
|
+
if env_agent:
|
|
370
|
+
print(f" agent from MTRX_AGENT_ID env var")
|
|
371
|
+
|
|
372
|
+
if not group_id:
|
|
373
|
+
print("\nTip: mtrx agents list --group <id> OR export MTRX_GROUP_ID=<id>")
|
|
374
|
+
return 0
|
|
375
|
+
|
|
376
|
+
# Fetch live count
|
|
377
|
+
try:
|
|
378
|
+
key = _require_key(state)
|
|
379
|
+
data = _api(
|
|
380
|
+
state,
|
|
381
|
+
method="GET",
|
|
382
|
+
path=f"/groups/{group_id}/activity",
|
|
383
|
+
key=key,
|
|
384
|
+
)
|
|
385
|
+
count = len(data.get("active_agents", []))
|
|
386
|
+
print(f"\nActive agents in group: {count}")
|
|
387
|
+
except ValueError:
|
|
388
|
+
pass
|
|
389
|
+
|
|
390
|
+
return 0
|
|
@@ -5,9 +5,17 @@ Called by `mtrx init`.
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
7
|
import json
|
|
8
|
+
import os
|
|
8
9
|
import subprocess
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
|
|
12
|
+
from matrx.cli.state import (
|
|
13
|
+
get_workspace_binding,
|
|
14
|
+
load_state,
|
|
15
|
+
normalize_matrx_key,
|
|
16
|
+
resolve_workspace_root,
|
|
17
|
+
)
|
|
18
|
+
|
|
11
19
|
_DEFAULT_SYSTEMS_TEMPLATE = [
|
|
12
20
|
{
|
|
13
21
|
"id": "memory",
|
|
@@ -36,9 +44,9 @@ _DEFAULT_SYSTEMS_TEMPLATE = [
|
|
|
36
44
|
]
|
|
37
45
|
|
|
38
46
|
|
|
39
|
-
def run_init(project_root: str = ".") -> None:
|
|
47
|
+
def run_init(project_root: str = ".", *, refresh: bool = False) -> None:
|
|
40
48
|
"""Entry point for `mtrx init`."""
|
|
41
|
-
root = Path(project_root).resolve()
|
|
49
|
+
root = Path(resolve_workspace_root(project_root)).resolve()
|
|
42
50
|
print(f"Matrx init: analyzing {root}")
|
|
43
51
|
|
|
44
52
|
# Step 1: Load or create .matrx/systems.json
|
|
@@ -69,11 +77,59 @@ def run_init(project_root: str = ".") -> None:
|
|
|
69
77
|
else:
|
|
70
78
|
print("\n No recently active systems detected (no git history or no matches).")
|
|
71
79
|
|
|
72
|
-
print(
|
|
73
|
-
print(
|
|
80
|
+
print("\n Matrx will generate cards for detected systems on first use.")
|
|
81
|
+
print(" Run your agent — cards will be ready within the first few calls.\n")
|
|
82
|
+
|
|
83
|
+
if refresh:
|
|
84
|
+
_refresh_llm_profile(root)
|
|
85
|
+
|
|
74
86
|
print(" Done.")
|
|
75
87
|
|
|
76
88
|
|
|
89
|
+
def _refresh_llm_profile(root: Path) -> None:
|
|
90
|
+
from matrx.cli.indexer import (
|
|
91
|
+
clear_init_cache,
|
|
92
|
+
resolve_project_id_for_launch,
|
|
93
|
+
trigger_index,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
state = load_state()
|
|
97
|
+
key = _resolved_matrx_key(state, cwd=str(root))
|
|
98
|
+
if not key:
|
|
99
|
+
print(" Skipped codebase re-profile: Matrx login required.")
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
project_id = resolve_project_id_for_launch(state, key, cwd=str(root))
|
|
103
|
+
if not project_id:
|
|
104
|
+
print(
|
|
105
|
+
" Skipped codebase re-profile: no Matrx project resolved for this workspace."
|
|
106
|
+
)
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
clear_init_cache(project_id)
|
|
110
|
+
print(" Profiling your codebase - this takes ~10 seconds...")
|
|
111
|
+
trigger_index(
|
|
112
|
+
state,
|
|
113
|
+
key,
|
|
114
|
+
cwd=str(root),
|
|
115
|
+
blocking=True,
|
|
116
|
+
timeout=30,
|
|
117
|
+
force_llm_profile=True,
|
|
118
|
+
)
|
|
119
|
+
print(" Codebase profile refresh requested.")
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _resolved_matrx_key(state: dict, *, cwd: str) -> str:
|
|
123
|
+
env_key = normalize_matrx_key(os.environ.get("MTRX_KEY"))
|
|
124
|
+
if env_key:
|
|
125
|
+
return env_key
|
|
126
|
+
workspace_binding = get_workspace_binding(state, cwd=cwd) or {}
|
|
127
|
+
workspace_key = normalize_matrx_key(workspace_binding.get("matrx_key"))
|
|
128
|
+
if workspace_key:
|
|
129
|
+
return workspace_key
|
|
130
|
+
return normalize_matrx_key(state.get("auth", {}).get("matrx", {}).get("key")) or ""
|
|
131
|
+
|
|
132
|
+
|
|
77
133
|
def _get_hot_files(root: Path, days: int = 30) -> dict[str, int]:
|
|
78
134
|
"""Parse git log, return file → touch count mapping."""
|
|
79
135
|
try:
|
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Background project indexer.
|
|
3
|
+
|
|
4
|
+
When `mtrx claude/codex/gemini/cursor` is launched inside a git repo, this
|
|
5
|
+
module fires a fire-and-forget daemon thread that deterministically scans the
|
|
6
|
+
workspace and ships structural facts (languages, frameworks, manifests,
|
|
7
|
+
top-level directories) to the Matrx backend, which persists them as
|
|
8
|
+
PROJECT_FACT / STRUCTURE memories scoped to the matching project.
|
|
9
|
+
|
|
10
|
+
Project resolution:
|
|
11
|
+
1. Existing workspace binding (~/.config/mtrx/state.json) wins.
|
|
12
|
+
2. Otherwise, match the git remote URL against project.repo_url in the org.
|
|
13
|
+
3. If neither resolves, the index is silently skipped.
|
|
14
|
+
|
|
15
|
+
Re-index throttle: skipped if last run was <24h ago AND the file-list hash
|
|
16
|
+
is unchanged.
|
|
17
|
+
|
|
18
|
+
The thread never prints on success and never raises into the launcher.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from __future__ import annotations
|
|
22
|
+
|
|
23
|
+
import copy
|
|
24
|
+
import hashlib
|
|
25
|
+
import json
|
|
26
|
+
import os
|
|
27
|
+
import re
|
|
28
|
+
import subprocess
|
|
29
|
+
import threading
|
|
30
|
+
from datetime import datetime, timezone
|
|
31
|
+
from pathlib import Path
|
|
32
|
+
|
|
33
|
+
import httpx
|
|
34
|
+
|
|
35
|
+
from matrx.cli.state import (
|
|
36
|
+
ensure_root_url,
|
|
37
|
+
get_workspace_binding,
|
|
38
|
+
load_state,
|
|
39
|
+
resolve_workspace_root,
|
|
40
|
+
save_state,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
INDEX_MIN_INTERVAL_SEC = 24 * 3600
|
|
44
|
+
INDEX_HTTP_TIMEOUT_SEC = 60
|
|
45
|
+
_MAX_FILE_SIZE = 256 * 1024
|
|
46
|
+
_MAX_FILES_FOR_IMPORT_SCAN = 400
|
|
47
|
+
_SKIP_TOP_DIRS = {"node_modules", "dist", "build", "__pycache__", ".venv", "venv", ".next"}
|
|
48
|
+
|
|
49
|
+
_LANG_BY_EXT: dict[str, str] = {
|
|
50
|
+
".py": "Python",
|
|
51
|
+
".js": "JavaScript",
|
|
52
|
+
".ts": "TypeScript",
|
|
53
|
+
".tsx": "TypeScript",
|
|
54
|
+
".jsx": "JavaScript",
|
|
55
|
+
".go": "Go",
|
|
56
|
+
".rs": "Rust",
|
|
57
|
+
".rb": "Ruby",
|
|
58
|
+
".java": "Java",
|
|
59
|
+
".kt": "Kotlin",
|
|
60
|
+
".swift": "Swift",
|
|
61
|
+
".cpp": "C++",
|
|
62
|
+
".cc": "C++",
|
|
63
|
+
".c": "C",
|
|
64
|
+
".cs": "C#",
|
|
65
|
+
".php": "PHP",
|
|
66
|
+
".sh": "Shell",
|
|
67
|
+
".sql": "SQL",
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
_FRAMEWORK_PATTERNS: list[tuple[re.Pattern[str], str]] = [
|
|
71
|
+
(re.compile(r"\b(?:from|import)\s+fastapi\b"), "FastAPI"),
|
|
72
|
+
(re.compile(r"\b(?:from|import)\s+flask\b"), "Flask"),
|
|
73
|
+
(re.compile(r"\b(?:from|import)\s+django\b"), "Django"),
|
|
74
|
+
(re.compile(r"\bfrom\s+sqlalchemy\b"), "SQLAlchemy"),
|
|
75
|
+
(re.compile(r"\bfrom\s+pydantic\b"), "Pydantic"),
|
|
76
|
+
(re.compile(r"\bimport\s+pandas\b"), "pandas"),
|
|
77
|
+
(re.compile(r"\bimport\s+numpy\b"), "NumPy"),
|
|
78
|
+
(re.compile(r"\bimport\s+torch\b"), "PyTorch"),
|
|
79
|
+
(re.compile(r"\bimport\s+tensorflow\b"), "TensorFlow"),
|
|
80
|
+
(re.compile(r"""from\s+['"]react['"]"""), "React"),
|
|
81
|
+
(re.compile(r"""from\s+['"]vue['"]"""), "Vue"),
|
|
82
|
+
(re.compile(r"""from\s+['"]next(?:/|['"])"""), "Next.js"),
|
|
83
|
+
(re.compile(r"""from\s+['"]express['"]"""), "Express"),
|
|
84
|
+
(re.compile(r"""from\s+['"]fastify['"]"""), "Fastify"),
|
|
85
|
+
(re.compile(r"""from\s+['"]@nestjs/"""), "NestJS"),
|
|
86
|
+
(re.compile(r"""from\s+['"]@angular/"""), "Angular"),
|
|
87
|
+
(re.compile(r"""from\s+['"]svelte['"]"""), "Svelte"),
|
|
88
|
+
(re.compile(r"""from\s+['"]tailwindcss['"]"""), "Tailwind"),
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
_MANIFEST_FILES: tuple[str, ...] = (
|
|
92
|
+
"package.json",
|
|
93
|
+
"pyproject.toml",
|
|
94
|
+
"requirements.txt",
|
|
95
|
+
"Cargo.toml",
|
|
96
|
+
"go.mod",
|
|
97
|
+
"Gemfile",
|
|
98
|
+
"pom.xml",
|
|
99
|
+
"build.gradle",
|
|
100
|
+
"build.gradle.kts",
|
|
101
|
+
"composer.json",
|
|
102
|
+
"tsconfig.json",
|
|
103
|
+
"Dockerfile",
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
_PROFILE_MANIFEST_FILES: tuple[str, ...] = (
|
|
107
|
+
"pyproject.toml",
|
|
108
|
+
"requirements.txt",
|
|
109
|
+
"package.json",
|
|
110
|
+
"go.mod",
|
|
111
|
+
"Cargo.toml",
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
# ---------------------------------------------------------------------------
|
|
116
|
+
# Git helpers
|
|
117
|
+
# ---------------------------------------------------------------------------
|
|
118
|
+
|
|
119
|
+
def _git_tracked_files(root: Path) -> list[str] | None:
|
|
120
|
+
"""Return tracked + untracked-not-ignored files (relative). None if not a git repo."""
|
|
121
|
+
try:
|
|
122
|
+
result = subprocess.run(
|
|
123
|
+
["git", "-C", str(root), "ls-files", "-co", "--exclude-standard"],
|
|
124
|
+
capture_output=True,
|
|
125
|
+
text=True,
|
|
126
|
+
timeout=10,
|
|
127
|
+
check=False,
|
|
128
|
+
)
|
|
129
|
+
except (OSError, subprocess.SubprocessError):
|
|
130
|
+
return None
|
|
131
|
+
if result.returncode != 0:
|
|
132
|
+
return None
|
|
133
|
+
return [ln.strip() for ln in result.stdout.splitlines() if ln.strip()]
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _git_remote_url(root: Path) -> str | None:
|
|
137
|
+
try:
|
|
138
|
+
result = subprocess.run(
|
|
139
|
+
["git", "-C", str(root), "remote", "get-url", "origin"],
|
|
140
|
+
capture_output=True,
|
|
141
|
+
text=True,
|
|
142
|
+
timeout=5,
|
|
143
|
+
check=False,
|
|
144
|
+
)
|
|
145
|
+
except (OSError, subprocess.SubprocessError):
|
|
146
|
+
return None
|
|
147
|
+
if result.returncode != 0:
|
|
148
|
+
return None
|
|
149
|
+
return result.stdout.strip() or None
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _normalize_remote_url(url: str) -> str:
|
|
153
|
+
u = url.strip()
|
|
154
|
+
if u.startswith("git@"):
|
|
155
|
+
# git@github.com:org/repo.git -> https://github.com/org/repo
|
|
156
|
+
host_path = u[len("git@") :].replace(":", "/", 1)
|
|
157
|
+
u = f"https://{host_path}"
|
|
158
|
+
# Strip scheme + embedded credentials
|
|
159
|
+
u = re.sub(r"^[a-zA-Z][a-zA-Z0-9+.-]*://(?:[^@/]+@)?", "https://", u)
|
|
160
|
+
u = u.rstrip("/")
|
|
161
|
+
if u.endswith(".git"):
|
|
162
|
+
u = u[:-4]
|
|
163
|
+
return u.lower()
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# ---------------------------------------------------------------------------
|
|
167
|
+
# Deterministic scan
|
|
168
|
+
# ---------------------------------------------------------------------------
|
|
169
|
+
|
|
170
|
+
def _detect_languages(files: list[str]) -> dict[str, int]:
|
|
171
|
+
counts: dict[str, int] = {}
|
|
172
|
+
for f in files:
|
|
173
|
+
ext = os.path.splitext(f)[1].lower()
|
|
174
|
+
lang = _LANG_BY_EXT.get(ext)
|
|
175
|
+
if lang:
|
|
176
|
+
counts[lang] = counts.get(lang, 0) + 1
|
|
177
|
+
return counts
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _detect_frameworks(root: Path, files: list[str]) -> list[str]:
|
|
181
|
+
frameworks: set[str] = set()
|
|
182
|
+
scan_targets = [
|
|
183
|
+
f
|
|
184
|
+
for f in files
|
|
185
|
+
if os.path.splitext(f)[1].lower() in {".py", ".ts", ".tsx", ".js", ".jsx"}
|
|
186
|
+
][:_MAX_FILES_FOR_IMPORT_SCAN]
|
|
187
|
+
|
|
188
|
+
for rel in scan_targets:
|
|
189
|
+
path = root / rel
|
|
190
|
+
try:
|
|
191
|
+
if path.stat().st_size > _MAX_FILE_SIZE:
|
|
192
|
+
continue
|
|
193
|
+
text = path.read_text(encoding="utf-8", errors="ignore")
|
|
194
|
+
except OSError:
|
|
195
|
+
continue
|
|
196
|
+
for patt, name in _FRAMEWORK_PATTERNS:
|
|
197
|
+
if name not in frameworks and patt.search(text):
|
|
198
|
+
frameworks.add(name)
|
|
199
|
+
return sorted(frameworks)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _top_level_dirs(files: list[str]) -> list[str]:
|
|
203
|
+
dirs: dict[str, int] = {}
|
|
204
|
+
for f in files:
|
|
205
|
+
if "/" not in f:
|
|
206
|
+
continue
|
|
207
|
+
top = f.split("/", 1)[0]
|
|
208
|
+
if top.startswith(".") or top in _SKIP_TOP_DIRS:
|
|
209
|
+
continue
|
|
210
|
+
dirs[top] = dirs.get(top, 0) + 1
|
|
211
|
+
return [d for d, _ in sorted(dirs.items(), key=lambda kv: (-kv[1], kv[0]))[:10]]
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _detect_manifests(root: Path, files: list[str]) -> dict[str, dict]:
|
|
215
|
+
tracked = set(files)
|
|
216
|
+
found: dict[str, dict] = {}
|
|
217
|
+
for name in _MANIFEST_FILES:
|
|
218
|
+
if name not in tracked:
|
|
219
|
+
continue
|
|
220
|
+
entry: dict = {"name": name}
|
|
221
|
+
path = root / name
|
|
222
|
+
try:
|
|
223
|
+
raw = path.read_text(encoding="utf-8", errors="ignore")
|
|
224
|
+
except OSError:
|
|
225
|
+
raw = ""
|
|
226
|
+
entry["size"] = len(raw)
|
|
227
|
+
if name == "package.json" and raw:
|
|
228
|
+
try:
|
|
229
|
+
parsed = json.loads(raw)
|
|
230
|
+
if isinstance(parsed, dict):
|
|
231
|
+
if parsed.get("name"):
|
|
232
|
+
entry["package_name"] = str(parsed["name"])
|
|
233
|
+
dep_count = 0
|
|
234
|
+
for field in ("dependencies", "devDependencies", "peerDependencies"):
|
|
235
|
+
value = parsed.get(field)
|
|
236
|
+
if isinstance(value, dict):
|
|
237
|
+
dep_count += len(value)
|
|
238
|
+
entry["dep_count"] = dep_count
|
|
239
|
+
except (ValueError, TypeError):
|
|
240
|
+
pass
|
|
241
|
+
found[name] = entry
|
|
242
|
+
return found
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def _read_truncated(path: Path, *, limit: int) -> str | None:
|
|
246
|
+
try:
|
|
247
|
+
text = path.read_text(encoding="utf-8", errors="ignore")
|
|
248
|
+
except OSError:
|
|
249
|
+
return None
|
|
250
|
+
return text[:limit]
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _read_manifest_texts(root: Path, files: list[str], *, limit: int) -> dict[str, str]:
|
|
254
|
+
tracked = set(files)
|
|
255
|
+
texts: dict[str, str] = {}
|
|
256
|
+
for name in _PROFILE_MANIFEST_FILES:
|
|
257
|
+
if name not in tracked:
|
|
258
|
+
continue
|
|
259
|
+
text = _read_truncated(root / name, limit=limit)
|
|
260
|
+
if text:
|
|
261
|
+
texts[name] = text
|
|
262
|
+
return texts
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def build_index_payload(
|
|
266
|
+
root: Path,
|
|
267
|
+
*,
|
|
268
|
+
trigger_llm_profile: bool = False,
|
|
269
|
+
) -> dict | None:
|
|
270
|
+
"""Build the structural payload. Returns None if not a git repo."""
|
|
271
|
+
files = _git_tracked_files(root)
|
|
272
|
+
if files is None:
|
|
273
|
+
return None
|
|
274
|
+
|
|
275
|
+
h = hashlib.sha256()
|
|
276
|
+
for f in sorted(files):
|
|
277
|
+
h.update(f.encode("utf-8"))
|
|
278
|
+
h.update(b"\n")
|
|
279
|
+
|
|
280
|
+
return {
|
|
281
|
+
"workspace_root": str(root),
|
|
282
|
+
"file_count": len(files),
|
|
283
|
+
"file_list_hash": h.hexdigest(),
|
|
284
|
+
"languages": _detect_languages(files),
|
|
285
|
+
"frameworks": _detect_frameworks(root, files),
|
|
286
|
+
"top_directories": _top_level_dirs(files),
|
|
287
|
+
"manifests": _detect_manifests(root, files),
|
|
288
|
+
"readme": _read_truncated(root / "README.md", limit=4000),
|
|
289
|
+
"manifest_texts": _read_manifest_texts(root, files, limit=2000),
|
|
290
|
+
"trigger_llm_profile": trigger_llm_profile,
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
# ---------------------------------------------------------------------------
|
|
295
|
+
# Project resolution
|
|
296
|
+
# ---------------------------------------------------------------------------
|
|
297
|
+
|
|
298
|
+
def _resolve_project_id(state: dict, key: str, root: Path) -> str | None:
|
|
299
|
+
binding = get_workspace_binding(state, cwd=str(root)) or {}
|
|
300
|
+
bound_id = (binding.get("project_id") or "").strip()
|
|
301
|
+
if bound_id:
|
|
302
|
+
return bound_id
|
|
303
|
+
|
|
304
|
+
remote = _git_remote_url(root)
|
|
305
|
+
if not remote:
|
|
306
|
+
return None
|
|
307
|
+
target = _normalize_remote_url(remote)
|
|
308
|
+
|
|
309
|
+
base_url = ensure_root_url(
|
|
310
|
+
state.get("auth", {}).get("matrx", {}).get("base_url")
|
|
311
|
+
).rstrip("/")
|
|
312
|
+
headers = {"X-Matrx-Key": key}
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
with httpx.Client(timeout=10) as client:
|
|
316
|
+
ctx_resp = client.get(f"{base_url}/v1/auth/context", headers=headers)
|
|
317
|
+
if ctx_resp.status_code != 200:
|
|
318
|
+
return None
|
|
319
|
+
org_id = (ctx_resp.json().get("org_id") or "").strip()
|
|
320
|
+
if not org_id:
|
|
321
|
+
return None
|
|
322
|
+
|
|
323
|
+
list_resp = client.get(
|
|
324
|
+
f"{base_url}/v1/orgs/{org_id}/projects", headers=headers
|
|
325
|
+
)
|
|
326
|
+
if list_resp.status_code != 200:
|
|
327
|
+
return None
|
|
328
|
+
payload = list_resp.json()
|
|
329
|
+
except (httpx.HTTPError, ValueError):
|
|
330
|
+
return None
|
|
331
|
+
|
|
332
|
+
items = payload if isinstance(payload, list) else payload.get("projects", [])
|
|
333
|
+
for project in items:
|
|
334
|
+
repo_url = (project.get("repo_url") or "").strip()
|
|
335
|
+
if repo_url and _normalize_remote_url(repo_url) == target:
|
|
336
|
+
return project.get("id")
|
|
337
|
+
return None
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def resolve_project_id_for_launch(
|
|
341
|
+
state: dict,
|
|
342
|
+
key: str,
|
|
343
|
+
*,
|
|
344
|
+
cwd: str | None = None,
|
|
345
|
+
) -> str | None:
|
|
346
|
+
if not key:
|
|
347
|
+
return None
|
|
348
|
+
root = Path(resolve_workspace_root(cwd or os.getcwd()))
|
|
349
|
+
if not (root / ".git").exists():
|
|
350
|
+
return None
|
|
351
|
+
return _resolve_project_id(state, key, root)
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
# ---------------------------------------------------------------------------
|
|
355
|
+
# Cache + HTTP
|
|
356
|
+
# ---------------------------------------------------------------------------
|
|
357
|
+
|
|
358
|
+
def _should_skip(state: dict, project_id: str, *, file_list_hash: str) -> bool:
|
|
359
|
+
cache = state.get("index_cache", {})
|
|
360
|
+
entry = cache.get(project_id)
|
|
361
|
+
if not isinstance(entry, dict):
|
|
362
|
+
return False
|
|
363
|
+
if entry.get("file_list_hash") != file_list_hash:
|
|
364
|
+
return False
|
|
365
|
+
ts = str(entry.get("last_indexed_at") or "")
|
|
366
|
+
try:
|
|
367
|
+
last = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
368
|
+
except ValueError:
|
|
369
|
+
return False
|
|
370
|
+
age = (datetime.now(timezone.utc) - last).total_seconds()
|
|
371
|
+
return age < INDEX_MIN_INTERVAL_SEC
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def _record_cache(project_id: str, *, file_list_hash: str, workspace_root: str) -> None:
|
|
375
|
+
latest = load_state()
|
|
376
|
+
cache = latest.setdefault("index_cache", {})
|
|
377
|
+
cache[project_id] = {
|
|
378
|
+
"file_list_hash": file_list_hash,
|
|
379
|
+
"workspace_root": workspace_root,
|
|
380
|
+
"last_indexed_at": datetime.now(timezone.utc)
|
|
381
|
+
.isoformat(timespec="seconds")
|
|
382
|
+
.replace("+00:00", "Z"),
|
|
383
|
+
}
|
|
384
|
+
save_state(latest)
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def has_been_inited(state: dict, project_id: str) -> bool:
|
|
388
|
+
return project_id in state.get("init_cache", {})
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def _record_init(project_id: str) -> None:
|
|
392
|
+
latest = load_state()
|
|
393
|
+
inited_at = (
|
|
394
|
+
datetime.now(timezone.utc)
|
|
395
|
+
.isoformat(timespec="seconds")
|
|
396
|
+
.replace("+00:00", "Z")
|
|
397
|
+
)
|
|
398
|
+
latest.setdefault("init_cache", {})[project_id] = {
|
|
399
|
+
"inited_at": inited_at,
|
|
400
|
+
}
|
|
401
|
+
save_state(latest)
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def clear_init_cache(project_id: str) -> None:
|
|
405
|
+
latest = load_state()
|
|
406
|
+
cache = latest.setdefault("init_cache", {})
|
|
407
|
+
if project_id in cache:
|
|
408
|
+
cache.pop(project_id, None)
|
|
409
|
+
save_state(latest)
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
def _post_index(base_url: str, key: str, project_id: str, payload: dict) -> dict | None:
|
|
413
|
+
url = f"{base_url.rstrip('/')}/v1/projects/{project_id}/index"
|
|
414
|
+
try:
|
|
415
|
+
with httpx.Client(timeout=INDEX_HTTP_TIMEOUT_SEC) as client:
|
|
416
|
+
resp = client.post(
|
|
417
|
+
url,
|
|
418
|
+
headers={"X-Matrx-Key": key, "Content-Type": "application/json"},
|
|
419
|
+
json=payload,
|
|
420
|
+
)
|
|
421
|
+
except httpx.HTTPError:
|
|
422
|
+
return None
|
|
423
|
+
if resp.status_code >= 400:
|
|
424
|
+
return None
|
|
425
|
+
try:
|
|
426
|
+
data = resp.json()
|
|
427
|
+
except ValueError:
|
|
428
|
+
return {}
|
|
429
|
+
return data if isinstance(data, dict) else {}
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
# ---------------------------------------------------------------------------
|
|
433
|
+
# Thread entry point
|
|
434
|
+
# ---------------------------------------------------------------------------
|
|
435
|
+
|
|
436
|
+
def _run_index(
|
|
437
|
+
state_snapshot: dict,
|
|
438
|
+
key: str,
|
|
439
|
+
root: Path,
|
|
440
|
+
*,
|
|
441
|
+
force_llm_profile: bool = False,
|
|
442
|
+
) -> None:
|
|
443
|
+
try:
|
|
444
|
+
project_id = _resolve_project_id(state_snapshot, key, root)
|
|
445
|
+
if not project_id:
|
|
446
|
+
return
|
|
447
|
+
|
|
448
|
+
is_first_run = force_llm_profile or not has_been_inited(state_snapshot, project_id)
|
|
449
|
+
payload = build_index_payload(root, trigger_llm_profile=is_first_run)
|
|
450
|
+
if not payload:
|
|
451
|
+
return
|
|
452
|
+
|
|
453
|
+
if not is_first_run and _should_skip(
|
|
454
|
+
state_snapshot, project_id, file_list_hash=payload["file_list_hash"]
|
|
455
|
+
):
|
|
456
|
+
return
|
|
457
|
+
|
|
458
|
+
base_url = ensure_root_url(
|
|
459
|
+
state_snapshot.get("auth", {}).get("matrx", {}).get("base_url")
|
|
460
|
+
)
|
|
461
|
+
response_data = _post_index(base_url, key, project_id, payload)
|
|
462
|
+
if response_data is None:
|
|
463
|
+
return
|
|
464
|
+
|
|
465
|
+
if is_first_run:
|
|
466
|
+
_record_init(project_id)
|
|
467
|
+
|
|
468
|
+
_record_cache(
|
|
469
|
+
project_id,
|
|
470
|
+
file_list_hash=payload["file_list_hash"],
|
|
471
|
+
workspace_root=str(root),
|
|
472
|
+
)
|
|
473
|
+
except Exception:
|
|
474
|
+
# Background task — never crash the launcher.
|
|
475
|
+
return
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
def trigger_index(
|
|
479
|
+
state: dict,
|
|
480
|
+
key: str,
|
|
481
|
+
*,
|
|
482
|
+
cwd: str | None = None,
|
|
483
|
+
blocking: bool = False,
|
|
484
|
+
timeout: float | None = None,
|
|
485
|
+
force_llm_profile: bool = False,
|
|
486
|
+
) -> None:
|
|
487
|
+
"""Start an index run. Silent on success and failure."""
|
|
488
|
+
if not key:
|
|
489
|
+
return
|
|
490
|
+
root = Path(resolve_workspace_root(cwd or os.getcwd()))
|
|
491
|
+
if not (root / ".git").exists():
|
|
492
|
+
return
|
|
493
|
+
snapshot = copy.deepcopy(state)
|
|
494
|
+
thread = threading.Thread(
|
|
495
|
+
target=_run_index,
|
|
496
|
+
args=(snapshot, key, root),
|
|
497
|
+
kwargs={"force_llm_profile": force_llm_profile},
|
|
498
|
+
name="mtrx-indexer",
|
|
499
|
+
daemon=True,
|
|
500
|
+
)
|
|
501
|
+
thread.start()
|
|
502
|
+
if blocking:
|
|
503
|
+
thread.join(timeout=timeout)
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
def trigger_background_index(
|
|
507
|
+
state: dict,
|
|
508
|
+
key: str,
|
|
509
|
+
*,
|
|
510
|
+
cwd: str | None = None,
|
|
511
|
+
) -> None:
|
|
512
|
+
"""Fire-and-forget index on a daemon thread. Silent on success and failure."""
|
|
513
|
+
trigger_index(state, key, cwd=cwd)
|
package/src/matrx/cli/main.py
CHANGED
|
@@ -80,7 +80,7 @@ def main(argv: list[str] | None = None) -> int:
|
|
|
80
80
|
return _cmd_cursor(args)
|
|
81
81
|
if args.command == "init":
|
|
82
82
|
from matrx.cli.bootstrap import run_init
|
|
83
|
-
run_init()
|
|
83
|
+
run_init(refresh=getattr(args, "refresh", False))
|
|
84
84
|
return 0
|
|
85
85
|
if args.command == "project":
|
|
86
86
|
return _cmd_project(args)
|
|
@@ -112,7 +112,15 @@ def _build_parser() -> argparse.ArgumentParser:
|
|
|
112
112
|
subparsers.add_parser("version")
|
|
113
113
|
subparsers.add_parser("status")
|
|
114
114
|
subparsers.add_parser("doctor")
|
|
115
|
-
subparsers.add_parser(
|
|
115
|
+
init_parser = subparsers.add_parser(
|
|
116
|
+
"init",
|
|
117
|
+
help="Initialize Matrx for an existing project (seeds system registry)",
|
|
118
|
+
)
|
|
119
|
+
init_parser.add_argument(
|
|
120
|
+
"--refresh",
|
|
121
|
+
action="store_true",
|
|
122
|
+
help="Force re-profile the codebase",
|
|
123
|
+
)
|
|
116
124
|
|
|
117
125
|
personal = subparsers.add_parser("personal")
|
|
118
126
|
personal_subparsers = personal.add_subparsers(dest="personal_command")
|
|
@@ -1112,20 +1120,31 @@ def _cmd_launch(tool: str, route: str | None, remainder: list[str]) -> int:
|
|
|
1112
1120
|
return 1
|
|
1113
1121
|
|
|
1114
1122
|
if effective_route == "matrx":
|
|
1115
|
-
|
|
1116
|
-
from matrx.cli.indexer import trigger_background_index
|
|
1117
|
-
|
|
1118
|
-
trigger_background_index(
|
|
1119
|
-
state,
|
|
1120
|
-
_resolved_matrx_key(state, os.environ),
|
|
1121
|
-
cwd=os.getcwd(),
|
|
1122
|
-
)
|
|
1123
|
-
except Exception:
|
|
1124
|
-
pass
|
|
1123
|
+
_trigger_index_for_launch(state, _resolved_matrx_key(state, os.environ))
|
|
1125
1124
|
|
|
1126
1125
|
return launch(plan)
|
|
1127
1126
|
|
|
1128
1127
|
|
|
1128
|
+
def _trigger_index_for_launch(state: dict, key: str) -> None:
|
|
1129
|
+
try:
|
|
1130
|
+
from matrx.cli.indexer import (
|
|
1131
|
+
has_been_inited,
|
|
1132
|
+
resolve_project_id_for_launch,
|
|
1133
|
+
trigger_background_index,
|
|
1134
|
+
trigger_index,
|
|
1135
|
+
)
|
|
1136
|
+
|
|
1137
|
+
project_id = resolve_project_id_for_launch(state, key, cwd=os.getcwd())
|
|
1138
|
+
if project_id and not has_been_inited(state, project_id):
|
|
1139
|
+
print("Profiling your codebase - this takes ~10 seconds...")
|
|
1140
|
+
trigger_index(state, key, cwd=os.getcwd(), blocking=True, timeout=30)
|
|
1141
|
+
print("Done. matrx is ready.\n")
|
|
1142
|
+
else:
|
|
1143
|
+
trigger_background_index(state, key, cwd=os.getcwd())
|
|
1144
|
+
except Exception:
|
|
1145
|
+
pass
|
|
1146
|
+
|
|
1147
|
+
|
|
1129
1148
|
def _cmd_cursor(args) -> int:
|
|
1130
1149
|
import json as _json
|
|
1131
1150
|
import subprocess
|
|
@@ -1318,12 +1337,7 @@ def _cmd_cursor(args) -> int:
|
|
|
1318
1337
|
else:
|
|
1319
1338
|
print("[warn] Could not find Cursor executable. Open Cursor manually.", file=sys.stderr)
|
|
1320
1339
|
|
|
1321
|
-
|
|
1322
|
-
from matrx.cli.indexer import trigger_background_index
|
|
1323
|
-
|
|
1324
|
-
trigger_background_index(state, mx_key, cwd=os.getcwd())
|
|
1325
|
-
except Exception:
|
|
1326
|
-
pass
|
|
1340
|
+
_trigger_index_for_launch(state, mx_key)
|
|
1327
1341
|
|
|
1328
1342
|
print()
|
|
1329
1343
|
print("Cursor configured for MTRX — all traffic routed through MITM proxy.")
|