tanuki-telemetry 1.3.7 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/skills/monitor.md +87 -0
- package/src/dashboard.ts +27 -8
- package/src/db.ts +12 -11
package/package.json
CHANGED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: |
|
|
3
|
+
Autonomous workspace monitoring. Checks inbox + workspace screens on a recurring interval and takes action when sessions complete — dispatches queued work, restarts stalled sessions, reports status.
|
|
4
|
+
allowed-tools: Bash, Read, Glob, Grep, CronCreate, CronDelete, AskUserQuestion, mcp__telemetry__*
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# /monitor — Autonomous Workspace Monitoring
|
|
8
|
+
|
|
9
|
+
You are a monitoring daemon for the coordinator. You check workspace status periodically and take action when needed.
|
|
10
|
+
|
|
11
|
+
## Arguments
|
|
12
|
+
|
|
13
|
+
- No args → monitor all active workspaces every 5 minutes
|
|
14
|
+
- `<interval>` → custom interval (e.g., `2m`, `10m`)
|
|
15
|
+
- `stop` → cancel all monitoring crons
|
|
16
|
+
|
|
17
|
+
## On Invoke
|
|
18
|
+
|
|
19
|
+
### 1. Discover active workspaces
|
|
20
|
+
```bash
|
|
21
|
+
cmux list-workspaces
|
|
22
|
+
```
|
|
23
|
+
For each non-coordinator workspace, get the Claude surface:
|
|
24
|
+
```bash
|
|
25
|
+
cmux list-pane-surfaces --workspace "workspace:N"
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
### 2. Set up the monitoring cron
|
|
29
|
+
```
|
|
30
|
+
CronCreate({
|
|
31
|
+
cron: "*/5 * * * *", // or custom interval
|
|
32
|
+
prompt: "MONITOR CHECK: Read coordinator inbox and check all workspace screens",
|
|
33
|
+
recurring: true
|
|
34
|
+
})
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### 3. On each cron fire
|
|
38
|
+
|
|
39
|
+
#### Check inbox
|
|
40
|
+
```bash
|
|
41
|
+
cat ~/.claude/coordinator-inbox.jsonl 2>/dev/null | tail -10
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
#### For each active workspace, check screen
|
|
45
|
+
```bash
|
|
46
|
+
cmux read-screen --workspace "workspace:N" --surface surface:X --lines 5
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
#### Determine status
|
|
50
|
+
| Signal | Status | Action |
|
|
51
|
+
|--------|--------|--------|
|
|
52
|
+
| `esc to interrupt` | Working | No action needed |
|
|
53
|
+
| `❯` prompt only (idle) | Finished or stuck | Check inbox for completion event |
|
|
54
|
+
| `session_end` in inbox | Completed | Dispatch next queued task if any |
|
|
55
|
+
| Same screen for 3+ checks | Possibly stuck | Nudge: "Are you still working? If stuck, /clear and retry." |
|
|
56
|
+
| Error visible on screen | Failed | Log error, notify coordinator |
|
|
57
|
+
|
|
58
|
+
#### If a workspace completed
|
|
59
|
+
1. Read the inbox for details
|
|
60
|
+
2. Check git log for new commits
|
|
61
|
+
3. If there's a queued task for that workspace, dispatch it
|
|
62
|
+
4. Clear processed inbox messages
|
|
63
|
+
5. Log status to telemetry
|
|
64
|
+
|
|
65
|
+
#### If a workspace seems stuck
|
|
66
|
+
1. Check if it's waiting on something (Inngest job, API call, build)
|
|
67
|
+
2. If idle for 3+ checks with no progress, send a nudge
|
|
68
|
+
3. If nudge doesn't help after 2 more checks, restart the session
|
|
69
|
+
|
|
70
|
+
### 4. Status report
|
|
71
|
+
Every 30 minutes (or 6 checks), output a summary:
|
|
72
|
+
```
|
|
73
|
+
MONITOR STATUS:
|
|
74
|
+
- ws:8 (CDD Marathon): Working, 3 commits since last report
|
|
75
|
+
- ws:11 (Import Fix): Completed, dispatched next task
|
|
76
|
+
- Inbox: 2 messages processed
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
## Stopping
|
|
80
|
+
To stop monitoring:
|
|
81
|
+
```
|
|
82
|
+
CronDelete <job-id>
|
|
83
|
+
```
|
|
84
|
+
Or invoke `/monitor stop` which deletes all monitoring crons.
|
|
85
|
+
|
|
86
|
+
## Key principle
|
|
87
|
+
**Don't just observe — act.** If a workspace finishes and there's queued work, dispatch it immediately. If a workspace is stuck, nudge it. The coordinator shouldn't have to manually check — that's your job.
|
package/src/dashboard.ts
CHANGED
|
@@ -19,6 +19,8 @@ import type { Session, Event, Iteration, Screenshot, Artifact, Insight, PlanStep
|
|
|
19
19
|
import { listCoordinatorSessions, getCoordinatorState, getCoordinatorHistory } from "./coordinator.js";
|
|
20
20
|
import { fileURLToPath } from "url";
|
|
21
21
|
|
|
22
|
+
const DATA_DIR = process.env.DATA_DIR || "/data";
|
|
23
|
+
|
|
22
24
|
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
23
25
|
|
|
24
26
|
const app = express();
|
|
@@ -39,7 +41,7 @@ const SqliteStore = BetterSqlite3SessionStore(session);
|
|
|
39
41
|
// (the store hardcodes table name "sessions" which conflicts with our telemetry sessions table)
|
|
40
42
|
let sessionStoreInstance: InstanceType<typeof SqliteStore> | undefined;
|
|
41
43
|
if (AUTH_ENABLED) {
|
|
42
|
-
const dbPath = process.env.DB_PATH || "
|
|
44
|
+
const dbPath = process.env.DB_PATH || path.join(DATA_DIR, "telemetry.db");
|
|
43
45
|
const sessionDbPath = dbPath.replace(/\.db$/, "-sessions.db");
|
|
44
46
|
const sessionDb = new Database(sessionDbPath);
|
|
45
47
|
sessionDb.pragma("journal_mode = WAL");
|
|
@@ -97,13 +99,26 @@ app.get("/health", (_req, res) => {
|
|
|
97
99
|
res.json({ ok: true, version: TANUKI_VERSION });
|
|
98
100
|
});
|
|
99
101
|
|
|
102
|
+
/** True when `a` is strictly newer than `b` using major.minor.patch comparison. */
|
|
103
|
+
function isNewerVersion(a: string, b: string): boolean {
|
|
104
|
+
const pa = a.split(".").map(Number);
|
|
105
|
+
const pb = b.split(".").map(Number);
|
|
106
|
+
for (let i = 0; i < Math.max(pa.length, pb.length); i++) {
|
|
107
|
+
const va = pa[i] ?? 0;
|
|
108
|
+
const vb = pb[i] ?? 0;
|
|
109
|
+
if (va > vb) return true;
|
|
110
|
+
if (va < vb) return false;
|
|
111
|
+
}
|
|
112
|
+
return false;
|
|
113
|
+
}
|
|
114
|
+
|
|
100
115
|
// Version endpoint
|
|
101
116
|
app.get("/api/version", async (_req, res) => {
|
|
102
117
|
const latest = await getLatestNpmVersion();
|
|
103
118
|
res.json({
|
|
104
119
|
current: TANUKI_VERSION,
|
|
105
120
|
latest: latest ?? TANUKI_VERSION,
|
|
106
|
-
updateAvailable: latest ? latest
|
|
121
|
+
updateAvailable: latest ? isNewerVersion(latest, TANUKI_VERSION) : false,
|
|
107
122
|
});
|
|
108
123
|
});
|
|
109
124
|
|
|
@@ -122,7 +137,7 @@ const upload = multer({
|
|
|
122
137
|
const d = getDb();
|
|
123
138
|
const session = d.prepare("SELECT worktree_name FROM sessions WHERE id = ?").get(sessionId) as { worktree_name: string } | undefined;
|
|
124
139
|
const dirName = session?.worktree_name || sessionId;
|
|
125
|
-
const dir = path.join(
|
|
140
|
+
const dir = path.join(DATA_DIR, dirName, "screenshots");
|
|
126
141
|
fs.mkdirSync(dir, { recursive: true });
|
|
127
142
|
cb(null, dir);
|
|
128
143
|
},
|
|
@@ -488,7 +503,10 @@ app.get("/api/artifacts/by-id/:id", (req, res) => {
|
|
|
488
503
|
const candidates = [
|
|
489
504
|
artifact.stored_path,
|
|
490
505
|
artifact.file_path,
|
|
491
|
-
artifact.file_path?.replace(/^.*?outputs\//, "/
|
|
506
|
+
artifact.file_path?.replace(/^.*?outputs\//, DATA_DIR + "/"),
|
|
507
|
+
artifact.file_path?.replace(/^.*?\.tanuki\/data\//, DATA_DIR + "/"),
|
|
508
|
+
artifact.file_path?.replace(/^.*?outputs\//, "/outputs/"),
|
|
509
|
+
artifact.file_path?.replace(/^.*?outputs/, "/outputs"),
|
|
492
510
|
].filter(Boolean) as string[];
|
|
493
511
|
|
|
494
512
|
for (const candidate of candidates) {
|
|
@@ -501,7 +519,7 @@ app.get("/api/artifacts/by-id/:id", (req, res) => {
|
|
|
501
519
|
}
|
|
502
520
|
}
|
|
503
521
|
|
|
504
|
-
res.status(404).json({ error: "Artifact file not found on disk" });
|
|
522
|
+
res.status(404).json({ error: "Artifact file not found on disk", candidates });
|
|
505
523
|
});
|
|
506
524
|
|
|
507
525
|
// Serve screenshot by database ID — self-contained, doesn't need volume path mapping
|
|
@@ -526,7 +544,8 @@ app.get("/api/screenshots/by-id/:id", (req, res) => {
|
|
|
526
544
|
const candidates = [
|
|
527
545
|
screenshot.stored_path,
|
|
528
546
|
screenshot.file_path,
|
|
529
|
-
screenshot.file_path?.replace(/^.*?outputs\//, "/
|
|
547
|
+
screenshot.file_path?.replace(/^.*?outputs\//, DATA_DIR + "/"),
|
|
548
|
+
screenshot.file_path?.replace(/^.*?\.tanuki\/data\//, DATA_DIR + "/"),
|
|
530
549
|
].filter(Boolean) as string[];
|
|
531
550
|
|
|
532
551
|
for (const candidate of candidates) {
|
|
@@ -602,11 +621,11 @@ app.get("/api/screenshots/*", (req, res) => {
|
|
|
602
621
|
if (requestedPath.startsWith("/")) {
|
|
603
622
|
filePath = requestedPath;
|
|
604
623
|
} else {
|
|
605
|
-
filePath = path.join(
|
|
624
|
+
filePath = path.join(DATA_DIR, requestedPath);
|
|
606
625
|
}
|
|
607
626
|
|
|
608
627
|
const resolved = path.resolve(filePath);
|
|
609
|
-
if (!resolved.startsWith(
|
|
628
|
+
if (!resolved.startsWith(DATA_DIR)) {
|
|
610
629
|
res.status(403).json({ error: "Access denied" });
|
|
611
630
|
return;
|
|
612
631
|
}
|
package/src/db.ts
CHANGED
|
@@ -14,7 +14,8 @@ import type {
|
|
|
14
14
|
WalkthroughScreenshot,
|
|
15
15
|
} from "./types.js";
|
|
16
16
|
|
|
17
|
-
const
|
|
17
|
+
const DATA_DIR = process.env.DATA_DIR || "/data";
|
|
18
|
+
const DB_PATH = process.env.DB_PATH || path.join(DATA_DIR, "telemetry.db");
|
|
18
19
|
|
|
19
20
|
let db: Database.Database;
|
|
20
21
|
|
|
@@ -256,15 +257,15 @@ function initTables(): void {
|
|
|
256
257
|
`);
|
|
257
258
|
|
|
258
259
|
// Ensure storage directories exist
|
|
259
|
-
const screenshotsDir = "
|
|
260
|
+
const screenshotsDir = path.join(DATA_DIR, "screenshots");
|
|
260
261
|
if (!fs.existsSync(screenshotsDir)) {
|
|
261
262
|
fs.mkdirSync(screenshotsDir, { recursive: true });
|
|
262
263
|
}
|
|
263
|
-
const artifactsDir = "
|
|
264
|
+
const artifactsDir = path.join(DATA_DIR, "artifacts");
|
|
264
265
|
if (!fs.existsSync(artifactsDir)) {
|
|
265
266
|
fs.mkdirSync(artifactsDir, { recursive: true });
|
|
266
267
|
}
|
|
267
|
-
const walkthroughScreenshotsDir = "
|
|
268
|
+
const walkthroughScreenshotsDir = path.join(DATA_DIR, "walkthrough-screenshots");
|
|
268
269
|
if (!fs.existsSync(walkthroughScreenshotsDir)) {
|
|
269
270
|
fs.mkdirSync(walkthroughScreenshotsDir, { recursive: true });
|
|
270
271
|
}
|
|
@@ -404,15 +405,15 @@ export function insertScreenshot(
|
|
|
404
405
|
const ext = path.extname(file_path) || ".png";
|
|
405
406
|
const storedName = `${session_id}_${screenshotId}${ext}`;
|
|
406
407
|
const thumbName = `${session_id}_${screenshotId}_thumb${ext}`;
|
|
407
|
-
const storedPath = path.join(
|
|
408
|
-
const thumbPath = path.join(
|
|
408
|
+
const storedPath = path.join(DATA_DIR, "screenshots", storedName);
|
|
409
|
+
const thumbPath = path.join(DATA_DIR, "screenshots", thumbName);
|
|
409
410
|
|
|
410
411
|
try {
|
|
411
412
|
// The file_path might be a host path — try multiple locations
|
|
412
413
|
const candidates = [
|
|
413
414
|
file_path,
|
|
414
|
-
// Map host path into
|
|
415
|
-
file_path.replace(/^.*?outputs\//, "/
|
|
415
|
+
// Map host path into data dir: /Users/.../outputs/foo → <DATA_DIR>/foo
|
|
416
|
+
file_path.replace(/^.*?outputs\//, DATA_DIR + "/"),
|
|
416
417
|
];
|
|
417
418
|
|
|
418
419
|
for (const candidate of candidates) {
|
|
@@ -491,12 +492,12 @@ export function insertArtifact(
|
|
|
491
492
|
|
|
492
493
|
// Try to copy the file into /data/artifacts/ for self-contained serving
|
|
493
494
|
const storedName = `${session_id}_${artifactId}${ext}`;
|
|
494
|
-
const storedPath = path.join(
|
|
495
|
+
const storedPath = path.join(DATA_DIR, "artifacts", storedName);
|
|
495
496
|
|
|
496
497
|
try {
|
|
497
498
|
const candidates = [
|
|
498
499
|
file_path,
|
|
499
|
-
file_path.replace(/^.*?outputs\//, "/
|
|
500
|
+
file_path.replace(/^.*?outputs\//, DATA_DIR + "/"),
|
|
500
501
|
];
|
|
501
502
|
|
|
502
503
|
for (const candidate of candidates) {
|
|
@@ -959,7 +960,7 @@ export function insertWalkthroughScreenshot(
|
|
|
959
960
|
|
|
960
961
|
const safeName = name.replace(/[^a-zA-Z0-9_-]/g, "_");
|
|
961
962
|
const filename = `wt_${walkthrough_id}_${Date.now()}_${safeName}.png`;
|
|
962
|
-
const storedPath = path.join(
|
|
963
|
+
const storedPath = path.join(DATA_DIR, "walkthrough-screenshots", filename);
|
|
963
964
|
|
|
964
965
|
if (file_path_input && fs.existsSync(file_path_input)) {
|
|
965
966
|
// Copy from file path
|