vibeusage 0.6.2 → 0.6.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "vibeusage",
3
- "version": "0.6.2",
3
+ "version": "0.6.4",
4
4
  "description": "Codex CLI token usage tracker (macOS-first, notify-driven).",
5
5
  "license": "MIT",
6
6
  "repository": {
@@ -49,7 +49,9 @@
49
49
  "validate:ui-hardcode": "node scripts/ops/validate-ui-hardcode.cjs"
50
50
  },
51
51
  "dependencies": {
52
- "@insforge/sdk": "1.2.2"
52
+ "@insforge/sdk": "1.2.2",
53
+ "proper-lockfile": "^4.1.2",
54
+ "yaml": "^2.8.3"
53
55
  },
54
56
  "devDependencies": {
55
57
  "@sourcegraph/scip-typescript": "^0.3.6",
@@ -60,5 +62,8 @@
60
62
  ],
61
63
  "engines": {
62
64
  "node": "20.x"
63
- }
65
+ },
66
+ "bundleDependencies": [
67
+ "@insforge/sdk"
68
+ ]
64
69
  }
@@ -146,8 +146,11 @@ function runAuditTokens({ opts, config }) {
146
146
  if (result.exceedsThreshold) {
147
147
  process.stderr.write(
148
148
  `\nFAIL drift ${result.maxDriftPct.toFixed(2)}% exceeds threshold ${result.thresholdPct}%.\n` +
149
- `If vibeusage >= 0.5.0, scrub the Claude/OpenCode cursor block in\n` +
150
- `~/.vibeusage/tracker/cursors.json and rerun \`vibeusage sync --drain\`.\n`,
149
+ `Rebuild this source from its local session files: ` +
150
+ `\`vibeusage sync --rebuild ${result.source}\`\n` +
151
+ `(That clears the source's file/bucket/group cursors atomically and ` +
152
+ `re-parses every session — fixing drift caused by interrupted uploads ` +
153
+ `or partial cursor edits.)\n`,
151
154
  );
152
155
  process.exitCode = 1;
153
156
  }
@@ -176,9 +179,12 @@ function runAuditTokensAll({ opts, config }) {
176
179
  anyHardError = true;
177
180
  }
178
181
  if (result.ok && result.exceedsThreshold) anyExceeds = true;
179
- // no-local-sessions is informational, not a hard error; other non-ok states
180
- // (cannot-resolve-user-id, insforge-db-query-failed, etc.) count as errors.
181
- if (!result.ok && result.error !== "no-local-sessions") anyHardError = true;
182
+ // no-local-sessions and audit-not-applicable are informational, not hard
183
+ // errors. The latter means the source has no independent ground-truth to
184
+ // compare against the DB (e.g. hermes plugin-ledger is the same data that
185
+ // already feeds the DB).
186
+ const informationalErrors = new Set(["no-local-sessions", "audit-not-applicable"]);
187
+ if (!result.ok && !informationalErrors.has(result.error)) anyHardError = true;
182
188
  perSource.push(result);
183
189
  }
184
190
 
@@ -204,8 +210,14 @@ function runAuditTokensAll({ opts, config }) {
204
210
  `${r.source.padEnd(12)} ${statusText.padEnd(22)} ${drift.padStart(10)} ${String(r.filesScanned).padStart(6)} ${String(r.usageLines).padStart(6)}\n`,
205
211
  );
206
212
  } else {
207
- const statusText =
208
- r.error === "no-local-sessions" ? "no local sessions" : `ERR ${r.error}`;
213
+ let statusText;
214
+ if (r.error === "no-local-sessions") {
215
+ statusText = "no local sessions";
216
+ } else if (r.error === "audit-not-applicable") {
217
+ statusText = "N/A";
218
+ } else {
219
+ statusText = `ERR ${r.error}`;
220
+ }
209
221
  process.stdout.write(
210
222
  `${r.source.padEnd(12)} ${statusText.padEnd(22)} ${"—".padStart(10)} ${"—".padStart(6)} ${"—".padStart(6)}\n`,
211
223
  );
@@ -4,6 +4,7 @@ const fs = require("node:fs/promises");
4
4
  const cp = require("node:child_process");
5
5
 
6
6
  const { ensureDir, readJson, writeJson, openLock } = require("../lib/fs");
7
+ const { scrubSourceCursors, listSupportedSources } = require("../lib/cursor-scrub");
7
8
  const {
8
9
  listRolloutFiles,
9
10
  listClaudeProjectFiles,
@@ -65,6 +66,30 @@ async function cmdSync(argv) {
65
66
 
66
67
  const config = await readJson(configPath);
67
68
  const cursors = (await readJson(cursorsPath)) || { version: 1, files: {}, updatedAt: null };
69
+
70
+ if (opts.rebuild) {
71
+ const scrub = scrubSourceCursors({
72
+ cursors,
73
+ sourceId: opts.rebuild,
74
+ home,
75
+ env: process.env,
76
+ });
77
+ // Persist the cleared cursors before parsing begins. If we crash mid-
78
+ // rebuild, the next sync resumes from a clean state for this source
79
+ // rather than re-accumulating onto cached totals (the original bug).
80
+ await writeJson(cursorsPath, cursors);
81
+ process.stdout.write(
82
+ `Rebuilding source=${scrub.sourceId}: cleared ${scrub.filesRemoved} file ` +
83
+ `cursors, ${scrub.bucketsRemoved} hourly buckets, ` +
84
+ `${scrub.projectBucketsRemoved} project buckets, ` +
85
+ `${scrub.groupsRemoved} groupQueued entries` +
86
+ (scrub.extraCursorsCleared.length
87
+ ? `, ${scrub.extraCursorsCleared.join("+")}`
88
+ : "") +
89
+ `.\n`,
90
+ );
91
+ }
92
+
68
93
  const uploadThrottle = normalizeUploadState(await readJson(uploadThrottlePath));
69
94
  let uploadThrottleState = uploadThrottle;
70
95
 
@@ -445,6 +470,7 @@ function parseArgs(argv) {
445
470
  fromRetry: false,
446
471
  fromOpenclaw: false,
447
472
  drain: false,
473
+ rebuild: null,
448
474
  };
449
475
  for (let i = 0; i < argv.length; i++) {
450
476
  const a = argv[i];
@@ -453,7 +479,35 @@ function parseArgs(argv) {
453
479
  else if (a === "--from-retry") out.fromRetry = true;
454
480
  else if (a === "--from-openclaw") out.fromOpenclaw = true;
455
481
  else if (a === "--drain") out.drain = true;
456
- else throw new Error(`Unknown option: ${a}`);
482
+ else if (a === "--rebuild") {
483
+ const v = argv[++i];
484
+ if (!v || v.startsWith("--")) {
485
+ throw new Error("--rebuild requires a source id (e.g. --rebuild claude)");
486
+ }
487
+ out.rebuild = v;
488
+ } else if (a.startsWith("--rebuild=")) {
489
+ const v = a.slice("--rebuild=".length);
490
+ if (!v) throw new Error("--rebuild= requires a source id");
491
+ out.rebuild = v;
492
+ } else throw new Error(`Unknown option: ${a}`);
493
+ }
494
+ if (out.rebuild) {
495
+ const supported = listSupportedSources();
496
+ if (!supported.includes(out.rebuild)) {
497
+ throw new Error(
498
+ `--rebuild: unknown source '${out.rebuild}'. Supported: ${supported.join(", ")}`,
499
+ );
500
+ }
501
+ // A rebuild always wants a full upload pass, otherwise the freshly-rebuilt
502
+ // buckets would sit in queue.jsonl behind the default 10-batch cap.
503
+ out.drain = true;
504
+ // OpenClaw is the only source whose ledger parsing is gated behind an
505
+ // explicit flag (see sync flow's `opts.fromOpenclaw ? parseOpenclaw... : noop`).
506
+ // A `--rebuild=openclaw` that doesn't also turn that flag on would scrub
507
+ // the OpenClaw cursors and persist the cleared state without ever re-
508
+ // aggregating from the ledger — leaving totals stale until a later
509
+ // plugin-triggered sync. Force the flag so rebuild actually rebuilds.
510
+ if (out.rebuild === "openclaw") out.fromOpenclaw = true;
457
511
  }
458
512
  return out;
459
513
  }
@@ -480,11 +534,11 @@ async function parseHermesUsageLedger({ trackerDir, cursors, queuePath }) {
480
534
  typeof event.model === "string" && event.model.trim() ? event.model.trim() : "unknown";
481
535
  const source = "hermes";
482
536
  const delta = {
483
- input_tokens: Math.max(0, Number(event.input_tokens || 0)),
484
- cached_input_tokens: Math.max(
537
+ input_tokens: Math.max(
485
538
  0,
486
- Number(event.cache_read_tokens || 0) + Number(event.cache_write_tokens || 0),
539
+ Number(event.input_tokens || 0) + Number(event.cache_write_tokens || 0),
487
540
  ),
541
+ cached_input_tokens: Math.max(0, Number(event.cache_read_tokens || 0)),
488
542
  output_tokens: Math.max(0, Number(event.output_tokens || 0)),
489
543
  reasoning_output_tokens: Math.max(0, Number(event.reasoning_tokens || 0)),
490
544
  total_tokens: Math.max(0, Number(event.total_tokens || 0)),
@@ -500,6 +554,19 @@ async function parseHermesUsageLedger({ trackerDir, cursors, queuePath }) {
500
554
  continue;
501
555
  }
502
556
 
557
+ // Fallback: if all fine-grained channels are 0 but total_tokens > 0,
558
+ // route total into output so the event isn't silently dropped.
559
+ // This handles upstream plugins that only report total_tokens.
560
+ if (
561
+ delta.input_tokens === 0 &&
562
+ delta.cached_input_tokens === 0 &&
563
+ delta.output_tokens === 0 &&
564
+ delta.reasoning_output_tokens === 0 &&
565
+ delta.total_tokens > 0
566
+ ) {
567
+ delta.output_tokens = delta.total_tokens;
568
+ }
569
+
503
570
  const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
504
571
  addTotals(bucket.totals, delta);
505
572
  touchedBuckets.add(bucketKey(source, model, bucketStart));
@@ -0,0 +1,164 @@
1
+ "use strict";
2
+
3
+ const path = require("node:path");
4
+
5
+ // scrubSourceCursors clears every cursor field that, if left in place, would
6
+ // cause a re-parse of a source's session files to *accumulate* into
7
+ // previously-uploaded buckets instead of *rebuilding* them from scratch.
8
+ //
9
+ // This is the helper that fixes the bug behind the recent "DB tokens doubled"
10
+ // incident: clearing only `cursors.files` causes the parser to re-read the
11
+ // jsonl files and add their token totals on top of whatever was still cached
12
+ // in `cursors.hourly.buckets`. The result is buckets at roughly 2x the
13
+ // ground truth.
14
+ //
15
+ // Four cursor surfaces must be cleared in lockstep for a source rebuild to
16
+ // be correct:
17
+ // 1. cursors.files entries whose path lives under that source's session
18
+ // root (so the parser re-reads each file from offset 0).
19
+ // 2. cursors.hourly.buckets keyed `<source>|<model>|<hour>` (so per-bucket
20
+ // totals restart at zero before re-aggregation).
21
+ // 3. cursors.hourly.groupQueued keys for that source (so the next sync
22
+ // re-enqueues each touched bucket to queue.jsonl for upload).
23
+ // 4. cursors.projectHourly.buckets keyed `<project>|<source>|<hour>`
24
+ // (project-scoped totals are aggregated independently from the global
25
+ // hourly state and would otherwise stay doubled in the dashboard's
26
+ // per-project views).
27
+ //
28
+ // `cursors.projectHourly.projects` holds project metadata (git remotes,
29
+ // display names) and is intentionally preserved — it carries no token
30
+ // totals, just identity.
31
+ //
32
+ // Sources that don't use `cursors.files` (sqlite-backed opencode, ledger-
33
+ // backed hermes/openclaw) carry their progress in dedicated cursor fields;
34
+ // those are reset directly.
35
+
36
+ const SOURCES = {
37
+ claude: {
38
+ sessionRoot: ({ home }) => path.join(home, ".claude", "projects"),
39
+ },
40
+ codex: {
41
+ sessionRoot: ({ home, env }) =>
42
+ path.join(env.CODEX_HOME || path.join(home, ".codex"), "sessions"),
43
+ },
44
+ "every-code": {
45
+ sessionRoot: ({ home, env }) =>
46
+ path.join(env.CODE_HOME || path.join(home, ".code"), "sessions"),
47
+ },
48
+ gemini: {
49
+ sessionRoot: ({ home, env }) =>
50
+ path.join(env.GEMINI_HOME || path.join(home, ".gemini"), "tmp"),
51
+ },
52
+ kimi: {
53
+ sessionRoot: ({ home, env }) =>
54
+ path.join(env.KIMI_HOME || path.join(home, ".kimi"), "sessions"),
55
+ },
56
+ opencode: {
57
+ extraCursorKeys: ["opencode", "opencodeSqlite"],
58
+ },
59
+ hermes: {
60
+ extraCursorKeys: ["hermesLedger"],
61
+ },
62
+ openclaw: {
63
+ extraCursorKeys: ["openclawLedger"],
64
+ },
65
+ };
66
+
67
+ function listSupportedSources() {
68
+ return Object.keys(SOURCES);
69
+ }
70
+
71
+ function scrubSourceCursors({ cursors, sourceId, home, env = process.env }) {
72
+ if (!cursors || typeof cursors !== "object") {
73
+ throw new Error("scrubSourceCursors: cursors must be an object");
74
+ }
75
+ const config = SOURCES[sourceId];
76
+ if (!config) {
77
+ throw new Error(
78
+ `scrubSourceCursors: unknown sourceId '${sourceId}'. Supported: ${listSupportedSources().join(", ")}`,
79
+ );
80
+ }
81
+
82
+ const result = {
83
+ sourceId,
84
+ filesRemoved: 0,
85
+ bucketsRemoved: 0,
86
+ groupsRemoved: 0,
87
+ projectBucketsRemoved: 0,
88
+ extraCursorsCleared: [],
89
+ };
90
+
91
+ // 1) cursors.files — strip every entry whose path lives under this source's
92
+ // session root, so the parser re-reads them from byte 0.
93
+ if (config.sessionRoot && cursors.files && typeof cursors.files === "object") {
94
+ const prefix = config.sessionRoot({ home, env });
95
+ for (const key of Object.keys(cursors.files)) {
96
+ if (typeof key !== "string") continue;
97
+ if (key.startsWith(prefix)) {
98
+ delete cursors.files[key];
99
+ result.filesRemoved += 1;
100
+ }
101
+ }
102
+ }
103
+
104
+ // 2) cursors.hourly.buckets — strip every bucket keyed for this source so
105
+ // its totals restart at zero before re-aggregation.
106
+ if (cursors.hourly && typeof cursors.hourly === "object") {
107
+ const bucketPrefix = `${sourceId}|`;
108
+ if (cursors.hourly.buckets && typeof cursors.hourly.buckets === "object") {
109
+ for (const key of Object.keys(cursors.hourly.buckets)) {
110
+ if (typeof key === "string" && key.startsWith(bucketPrefix)) {
111
+ delete cursors.hourly.buckets[key];
112
+ result.bucketsRemoved += 1;
113
+ }
114
+ }
115
+ }
116
+ // 3) cursors.hourly.groupQueued — strip per-source enqueue records so
117
+ // the next sync re-enqueues each touched bucket for upload.
118
+ if (cursors.hourly.groupQueued && typeof cursors.hourly.groupQueued === "object") {
119
+ for (const key of Object.keys(cursors.hourly.groupQueued)) {
120
+ if (typeof key === "string" && key.startsWith(bucketPrefix)) {
121
+ delete cursors.hourly.groupQueued[key];
122
+ result.groupsRemoved += 1;
123
+ }
124
+ }
125
+ }
126
+ }
127
+
128
+ // 4) cursors.projectHourly.buckets — keyed `<project_key>|<source>|<hour>`.
129
+ // Strip the buckets where the middle segment matches this source, leaving
130
+ // every other source's project-scoped totals (and the projects metadata
131
+ // map) untouched.
132
+ if (
133
+ cursors.projectHourly &&
134
+ typeof cursors.projectHourly === "object" &&
135
+ cursors.projectHourly.buckets &&
136
+ typeof cursors.projectHourly.buckets === "object"
137
+ ) {
138
+ for (const key of Object.keys(cursors.projectHourly.buckets)) {
139
+ if (typeof key !== "string") continue;
140
+ const parts = key.split("|");
141
+ if (parts.length >= 2 && parts[1] === sourceId) {
142
+ delete cursors.projectHourly.buckets[key];
143
+ result.projectBucketsRemoved += 1;
144
+ }
145
+ }
146
+ }
147
+
148
+ // 5) Source-specific top-level cursor fields (opencode sqlite progress,
149
+ // hermes/openclaw ledger offsets). Resetting these is what makes a
150
+ // rebuild work for non-file sources.
151
+ for (const key of config.extraCursorKeys || []) {
152
+ if (key in cursors) {
153
+ delete cursors[key];
154
+ result.extraCursorsCleared.push(key);
155
+ }
156
+ }
157
+
158
+ return result;
159
+ }
160
+
161
+ module.exports = {
162
+ scrubSourceCursors,
163
+ listSupportedSources,
164
+ };
package/src/lib/fs.js CHANGED
@@ -1,6 +1,12 @@
1
1
  const fs = require("node:fs/promises");
2
2
  const path = require("node:path");
3
3
 
4
+ // proper-lockfile is required lazily inside openLock(): some callers copy
5
+ // src/lib/fs.js into sandboxes that have no node_modules (e.g. the openclaw
6
+ // session plugin test, which materializes src/ under a tmp dir to test the
7
+ // ledger). Those callers only use ensureDir/readJson/etc and would crash on
8
+ // a top-level require.
9
+
4
10
  async function ensureDir(p) {
5
11
  await fs.mkdir(p, { recursive: true });
6
12
  }
@@ -47,23 +53,132 @@ async function chmod600IfPossible(filePath) {
47
53
  } catch (_e) {}
48
54
  }
49
55
 
50
- async function openLock(lockPath, { quietIfLocked }) {
56
+ // proper-lockfile gives us atomic mkdir-based mutual exclusion plus a heart-
57
+ // beat mechanism that auto-recovers from orphan locks without TOCTOU races:
58
+ //
59
+ // - The holder process refreshes the lock-directory's mtime every `update`
60
+ // ms. As long as that interval keeps running, the lock is "fresh".
61
+ // - Any acquirer that finds the existing lock with mtime older than `stale`
62
+ // ms takes it over via a compare-and-swap that is safe under concurrent
63
+ // attempts (the library's own contract).
64
+ // - If the holder dies (crash, SIGKILL, reboot) the heartbeat stops; the
65
+ // next acquirer sees the stale mtime and recovers automatically.
66
+ //
67
+ // We deliberately set `stale` larger than the default to give a working sync
68
+ // some headroom against transient event-loop pauses (large JSON.parse, GC).
69
+ // We pass `realpath: false` because the lock target may not exist as a file
70
+ // — proper-lockfile creates the lock-directory at `lockPath` directly.
71
+ const LOCK_STALE_MS = 60_000;
72
+ const LOCK_UPDATE_MS = 10_000;
73
+
74
+ async function openLock(lockPath, { quietIfLocked } = {}) {
75
+ // Lazy require: see top-of-file note about sandboxed callers.
76
+ const lockfile = require("proper-lockfile");
77
+
78
+ // Migration path: pre-proper-lockfile versions of vibeusage created the
79
+ // lock as a regular *file* (fs.open with "wx"). proper-lockfile creates
80
+ // it as a *directory* (mkdir). If we hand a stale legacy file off to
81
+ // proper-lockfile, its mkdir will EEXIST and its internal rmdir-fallback
82
+ // will then ENOTDIR, throwing instead of returning ELOCKED. Detect and
83
+ // resolve that mismatch up front.
84
+ const migration = await migrateLegacyLockFile(lockPath);
85
+ if (migration === "yield-to-legacy-holder") {
86
+ if (!quietIfLocked) process.stdout.write("Another sync is already running.\n");
87
+ return null;
88
+ }
89
+
90
+ let release;
51
91
  try {
52
- const handle = await fs.open(lockPath, "wx");
53
- return {
54
- async release() {
55
- await handle.close().catch(() => {});
56
- },
57
- };
92
+ release = await lockfile.lock(lockPath, {
93
+ lockfilePath: lockPath,
94
+ realpath: false,
95
+ stale: LOCK_STALE_MS,
96
+ update: LOCK_UPDATE_MS,
97
+ retries: 0,
98
+ });
58
99
  } catch (e) {
59
- if (e && e.code === "EEXIST") {
60
- if (!quietIfLocked) {
61
- process.stdout.write("Another sync is already running.\n");
62
- }
100
+ if (e && e.code === "ELOCKED") {
101
+ if (!quietIfLocked) process.stdout.write("Another sync is already running.\n");
63
102
  return null;
64
103
  }
65
104
  throw e;
66
105
  }
106
+ return {
107
+ async release() {
108
+ try {
109
+ await release();
110
+ } catch (_e) {
111
+ // Best-effort cleanup. proper-lockfile throws if the lock was already
112
+ // compromised (e.g. taken over by another process while we were
113
+ // running) — there is nothing useful to do at that point.
114
+ }
115
+ },
116
+ };
117
+ }
118
+
119
+ // Detect a leftover lock file from the previous wx-based scheme. Three cases:
120
+ // - "orphan" — proven dead by PID liveness; safe to unlink and migrate.
121
+ // - "alive" — recorded PID is still running; yield with the standard
122
+ // "another sync running" UX.
123
+ // - "indeterminate" — empty / corrupt / unreadable file. The original
124
+ // production openLock wrote a *zero-byte* file (it never
125
+ // called writeFile after fs.open(path, "wx")), so this
126
+ // is the **expected** legacy format. We cannot prove
127
+ // its holder is dead and we MUST NOT auto-delete: a
128
+ // still-running legacy sync would lose its lock and a
129
+ // new-format sync would start in parallel. Yield and
130
+ // print an actionable manual-cleanup notice.
131
+ async function migrateLegacyLockFile(lockPath) {
132
+ let stat;
133
+ try {
134
+ stat = await fs.lstat(lockPath);
135
+ } catch (e) {
136
+ if (e && e.code === "ENOENT") return "no-legacy";
137
+ throw e;
138
+ }
139
+ if (stat.isDirectory()) return "no-legacy"; // already in proper-lockfile format
140
+
141
+ const verdict = await classifyLegacyFileLock(lockPath);
142
+ if (verdict === "orphan") {
143
+ await fs.unlink(lockPath).catch(() => {});
144
+ return "migrated";
145
+ }
146
+ if (verdict === "indeterminate") {
147
+ process.stderr.write(
148
+ `vibeusage: legacy sync.lock at ${lockPath} carries no PID payload, ` +
149
+ `so we cannot prove its owner is dead. Auto-deletion is unsafe — a ` +
150
+ `still-running legacy sync would lose its lock. If no legacy ` +
151
+ `vibeusage sync is actually running, remove it manually: rm ${JSON.stringify(
152
+ lockPath,
153
+ )}\n`,
154
+ );
155
+ }
156
+ return "yield-to-legacy-holder";
157
+ }
158
+
159
+ async function classifyLegacyFileLock(lockPath) {
160
+ let raw;
161
+ try {
162
+ raw = await fs.readFile(lockPath, "utf8");
163
+ } catch (_e) {
164
+ return "indeterminate";
165
+ }
166
+ if (!raw) return "indeterminate";
167
+ let parsed;
168
+ try {
169
+ parsed = JSON.parse(raw);
170
+ } catch (_e) {
171
+ return "indeterminate";
172
+ }
173
+ const pid = parsed?.pid;
174
+ if (!Number.isFinite(pid)) return "indeterminate";
175
+ try {
176
+ process.kill(pid, 0);
177
+ return "alive";
178
+ } catch (e) {
179
+ if (e && e.code === "ESRCH") return "orphan";
180
+ return "alive"; // EPERM = pid exists but belongs to another user
181
+ }
67
182
  }
68
183
 
69
184
  module.exports = {
@@ -4,6 +4,11 @@ const fs = require("node:fs/promises");
4
4
  const fssync = require("node:fs");
5
5
 
6
6
  const { ensureDir, writeFileAtomic } = require("./fs");
7
+ const {
8
+ addEnabledPlugin,
9
+ removeEnabledPlugin,
10
+ probeEnabledPlugin,
11
+ } = require("./hermes-plugins-config");
7
12
 
8
13
  const HERMES_PLUGIN_ID = "vibeusage";
9
14
  const HERMES_PLUGIN_MARKER = "VIBEUSAGE_HERMES_PLUGIN";
@@ -78,11 +83,33 @@ async function probeHermesPlugin({ home = os.homedir(), env = process.env, track
78
83
  };
79
84
  }
80
85
 
81
- const configured = yamlState.value === expectedYaml && initState.value === expectedInit;
86
+ const filesMatch = yamlState.value === expectedYaml && initState.value === expectedInit;
87
+
88
+ // Hermes user plugins are opt-in: the hooks only fire if vibeusage is in
89
+ // plugins.enabled. Files-on-disk are necessary but NOT sufficient — without
90
+ // the allow-list entry the ledger stays empty and we silently report 0
91
+ // tokens forever (see hermes_cli/plugins.py:_get_enabled_plugins).
92
+ const enabledState = await probeEnabledPlugin({ home, env, name: HERMES_PLUGIN_ID });
93
+ const isEnabled = enabledState.state === "enabled";
94
+
95
+ if (!filesMatch || !isEnabled) {
96
+ return {
97
+ configured: false,
98
+ status: "drifted",
99
+ detail: "Run vibeusage init to reconcile plugin",
100
+ filesMatch,
101
+ enabled: isEnabled,
102
+ enabledState: enabledState.state,
103
+ ...paths,
104
+ };
105
+ }
106
+
82
107
  return {
83
- configured,
84
- status: configured ? "ready" : "drifted",
85
- detail: configured ? "Plugin installed" : "Run vibeusage init to reconcile plugin",
108
+ configured: true,
109
+ status: "ready",
110
+ detail: "Plugin installed",
111
+ filesMatch: true,
112
+ enabled: true,
86
113
  ...paths,
87
114
  };
88
115
  }
@@ -93,30 +120,76 @@ async function installHermesPlugin({ home = os.homedir(), env = process.env, tra
93
120
  const nextInit = buildHermesPluginInit({ ledgerPath: paths.ledgerPath });
94
121
  const currentYaml = await fs.readFile(paths.pluginYamlPath, "utf8").catch(() => null);
95
122
  const currentInit = await fs.readFile(paths.pluginInitPath, "utf8").catch(() => null);
96
- const changed = currentYaml !== nextYaml || currentInit !== nextInit;
123
+ const filesChanged = currentYaml !== nextYaml || currentInit !== nextInit;
97
124
 
98
125
  await ensureDir(paths.pluginDir);
99
126
  await writeFileAtomic(paths.pluginYamlPath, nextYaml);
100
127
  await writeFileAtomic(paths.pluginInitPath, nextInit);
101
- return { configured: true, changed, ...paths };
128
+
129
+ // Also opt the plugin into Hermes' allow-list. Hermes loads user plugins
130
+ // only when their name appears in plugins.enabled (post v20→v21 migration).
131
+ // Without this step the hooks never fire and the ledger stays empty.
132
+ let enabledChanged = false;
133
+ let enableSkippedReason = null;
134
+ try {
135
+ const enableResult = await addEnabledPlugin({ home, env, name: HERMES_PLUGIN_ID });
136
+ enabledChanged = Boolean(enableResult.changed);
137
+ } catch (err) {
138
+ // Refuse to clobber malformed user config; surface the reason but don't
139
+ // throw — the file install half still has value, and the user can rerun
140
+ // init after fixing config.yaml.
141
+ enableSkippedReason = err && err.code ? err.code : "enable-failed";
142
+ }
143
+
144
+ return {
145
+ configured: enableSkippedReason === null,
146
+ changed: filesChanged || enabledChanged,
147
+ filesChanged,
148
+ enabledChanged,
149
+ enableSkippedReason,
150
+ ...paths,
151
+ };
102
152
  }
103
153
 
104
154
  async function removeHermesPlugin({ home = os.homedir(), env = process.env, trackerDir } = {}) {
105
155
  const paths = resolveHermesPluginPaths({ home, env, trackerDir });
106
156
  const hadPluginDir = await pathExists(paths.pluginDir);
157
+
158
+ // Always try to clean up the allow-list entry, even if the plugin dir is
159
+ // already gone — config.yaml could otherwise be left referencing a plugin
160
+ // that no longer exists.
161
+ let enabledChanged = false;
162
+ try {
163
+ const enableResult = await removeEnabledPlugin({ home, env, name: HERMES_PLUGIN_ID });
164
+ enabledChanged = Boolean(enableResult.changed);
165
+ } catch (_err) {
166
+ // Best-effort; mirrors the behaviour above. Don't block file removal on
167
+ // a malformed config.yaml.
168
+ }
169
+
107
170
  if (!hadPluginDir) {
108
- return { removed: false, skippedReason: "plugin-missing", ...paths };
171
+ return {
172
+ removed: enabledChanged,
173
+ skippedReason: enabledChanged ? null : "plugin-missing",
174
+ enabledChanged,
175
+ ...paths,
176
+ };
109
177
  }
110
178
 
111
179
  const yamlText = await fs.readFile(paths.pluginYamlPath, "utf8").catch(() => null);
112
180
  const initText = await fs.readFile(paths.pluginInitPath, "utf8").catch(() => null);
113
181
  const markerPresent = hasHermesPluginMarker(yamlText) && hasHermesPluginMarker(initText);
114
182
  if (!markerPresent) {
115
- return { removed: false, skippedReason: "unexpected-content", ...paths };
183
+ return {
184
+ removed: false,
185
+ skippedReason: "unexpected-content",
186
+ enabledChanged,
187
+ ...paths,
188
+ };
116
189
  }
117
190
 
118
191
  await fs.rm(paths.pluginDir, { recursive: true, force: true }).catch(() => {});
119
- return { removed: true, ...paths };
192
+ return { removed: true, enabledChanged, ...paths };
120
193
  }
121
194
 
122
195
  function buildHermesPluginYaml() {
@@ -0,0 +1,266 @@
1
+ "use strict";
2
+
3
+ /**
4
+ * Manage the `plugins.enabled` allow-list inside Hermes' config.yaml.
5
+ *
6
+ * Hermes (>= the v20→v21 migration) loads user plugins on an opt-in basis:
7
+ * a plugin in `~/.hermes/plugins/<name>/` only fires its hooks if its name
8
+ * appears in `plugins.enabled` of `~/.hermes/config.yaml` (see
9
+ * `hermes_cli/plugins.py:_get_enabled_plugins` and the discovery loop at
10
+ * `hermes_cli/plugins.py:641`).
11
+ *
12
+ * `installHermesPlugin` therefore must do TWO things to actually wire up the
13
+ * vibeusage hook:
14
+ * 1. Drop the plugin files into `~/.hermes/plugins/vibeusage/` (handled by
15
+ * `hermes-config.js`).
16
+ * 2. Make sure `vibeusage` is listed under `plugins.enabled` here.
17
+ *
18
+ * Implementation notes:
19
+ * - We use the `yaml` package's `parseDocument` to preserve user comments,
20
+ * anchors, and key order. Plain `yaml.parse` + `yaml.stringify` would lose
21
+ * all of that.
22
+ * - All operations are idempotent. An already-enabled plugin stays as-is and
23
+ * the function reports `changed: false`.
24
+ * - The file is written atomically via writeFileAtomic to avoid leaving the
25
+ * user with a half-written config.yaml on crash.
26
+ */
27
+
28
+ const os = require("node:os");
29
+ const path = require("node:path");
30
+ const fs = require("node:fs/promises");
31
+
32
+ const YAML = require("yaml");
33
+ const { writeFileAtomic } = require("./fs");
34
+
35
+ const HERMES_CONFIG_FILENAME = "config.yaml";
36
+
37
+ function resolveHermesConfigPath({ home = os.homedir(), env = process.env } = {}) {
38
+ const explicit = typeof env.HERMES_HOME === "string" ? env.HERMES_HOME.trim() : "";
39
+ const hermesHome = explicit ? path.resolve(explicit) : path.join(home, ".hermes");
40
+ return path.join(hermesHome, HERMES_CONFIG_FILENAME);
41
+ }
42
+
43
+ /**
44
+ * Probe whether a plugin name is currently enabled in `plugins.enabled`.
45
+ *
46
+ * Returns one of:
47
+ * - { state: "enabled" } — name appears in plugins.enabled
48
+ * - { state: "missing-key" } — config exists but plugins.enabled key is absent
49
+ * - { state: "missing-name" } — plugins.enabled exists but does not contain the name
50
+ * - { state: "config-missing" } — config.yaml does not exist
51
+ * - { state: "config-unreadable", error } — read or parse error
52
+ */
53
+ async function probeEnabledPlugin({ home, env, name } = {}) {
54
+ if (!name || typeof name !== "string") {
55
+ throw new Error("name is required");
56
+ }
57
+ const configPath = resolveHermesConfigPath({ home, env });
58
+
59
+ let raw;
60
+ try {
61
+ raw = await fs.readFile(configPath, "utf8");
62
+ } catch (err) {
63
+ if (err && (err.code === "ENOENT" || err.code === "ENOTDIR")) {
64
+ return { state: "config-missing", configPath };
65
+ }
66
+ return {
67
+ state: "config-unreadable",
68
+ error: err && err.message ? err.message : String(err),
69
+ configPath,
70
+ };
71
+ }
72
+
73
+ let doc;
74
+ try {
75
+ doc = YAML.parseDocument(raw);
76
+ } catch (err) {
77
+ return {
78
+ state: "config-unreadable",
79
+ error: err && err.message ? err.message : String(err),
80
+ configPath,
81
+ };
82
+ }
83
+ if (doc.errors && doc.errors.length > 0) {
84
+ return {
85
+ state: "config-unreadable",
86
+ error: doc.errors[0].message || "config.yaml has YAML errors",
87
+ configPath,
88
+ };
89
+ }
90
+
91
+ const enabled = doc.getIn(["plugins", "enabled"], true);
92
+ if (enabled === undefined) {
93
+ return { state: "missing-key", configPath };
94
+ }
95
+ const list = enabled && typeof enabled.toJSON === "function" ? enabled.toJSON() : enabled;
96
+ if (!Array.isArray(list)) {
97
+ return { state: "missing-key", configPath };
98
+ }
99
+ if (list.includes(name)) {
100
+ return { state: "enabled", configPath };
101
+ }
102
+ return { state: "missing-name", configPath };
103
+ }
104
+
105
+ /**
106
+ * Idempotently add `name` to plugins.enabled. Creates the `plugins:` map and
107
+ * `enabled` sequence if either is missing. Preserves comments and surrounding
108
+ * keys via parseDocument.
109
+ *
110
+ * Returns { changed, configPath, configCreated, alreadyEnabled }.
111
+ */
112
+ async function addEnabledPlugin({ home, env, name } = {}) {
113
+ if (!name || typeof name !== "string") {
114
+ throw new Error("name is required");
115
+ }
116
+ const configPath = resolveHermesConfigPath({ home, env });
117
+
118
+ let raw = "";
119
+ let configCreated = false;
120
+ try {
121
+ raw = await fs.readFile(configPath, "utf8");
122
+ } catch (err) {
123
+ if (err && (err.code === "ENOENT" || err.code === "ENOTDIR")) {
124
+ configCreated = true;
125
+ } else {
126
+ throw err;
127
+ }
128
+ }
129
+
130
+ const doc = raw ? YAML.parseDocument(raw) : new YAML.Document({});
131
+ if (doc.errors && doc.errors.length > 0) {
132
+ const err = new Error(`config.yaml has YAML errors: ${doc.errors[0].message}`);
133
+ err.code = "HERMES_CONFIG_INVALID";
134
+ throw err;
135
+ }
136
+
137
+ // Ensure plugins is a Map.
138
+ let pluginsNode = doc.get("plugins", true);
139
+ if (pluginsNode === undefined || pluginsNode === null) {
140
+ doc.set("plugins", new YAML.YAMLMap());
141
+ pluginsNode = doc.get("plugins", true);
142
+ } else if (!(pluginsNode instanceof YAML.YAMLMap)) {
143
+ // Existing non-map value (string, list, scalar). Refuse so we never silently
144
+ // clobber user content. Caller should escalate to the user.
145
+ const err = new Error(
146
+ "plugins key in config.yaml is not a mapping; refusing to modify",
147
+ );
148
+ err.code = "HERMES_PLUGINS_NOT_MAP";
149
+ throw err;
150
+ }
151
+
152
+ // Ensure plugins.enabled is a Seq.
153
+ let enabledNode = pluginsNode.get("enabled", true);
154
+ if (enabledNode === undefined || enabledNode === null) {
155
+ pluginsNode.set("enabled", new YAML.YAMLSeq());
156
+ enabledNode = pluginsNode.get("enabled", true);
157
+ } else if (!(enabledNode instanceof YAML.YAMLSeq)) {
158
+ const err = new Error(
159
+ "plugins.enabled in config.yaml is not a sequence; refusing to modify",
160
+ );
161
+ err.code = "HERMES_PLUGINS_ENABLED_NOT_SEQ";
162
+ throw err;
163
+ }
164
+
165
+ // Idempotent add.
166
+ const current = enabledNode.toJSON() || [];
167
+ if (Array.isArray(current) && current.includes(name)) {
168
+ return {
169
+ changed: false,
170
+ configPath,
171
+ configCreated: false,
172
+ alreadyEnabled: true,
173
+ };
174
+ }
175
+ enabledNode.add(name);
176
+
177
+ const out = String(doc);
178
+ await writeFileAtomic(configPath, out);
179
+
180
+ return {
181
+ changed: true,
182
+ configPath,
183
+ configCreated,
184
+ alreadyEnabled: false,
185
+ };
186
+ }
187
+
188
+ /**
189
+ * Idempotently remove `name` from plugins.enabled. Cleans up empty container
190
+ * keys (`plugins.enabled` if it becomes [], and `plugins` if it becomes {}).
191
+ *
192
+ * Returns { changed, configPath, wasEnabled }.
193
+ */
194
+ async function removeEnabledPlugin({ home, env, name } = {}) {
195
+ if (!name || typeof name !== "string") {
196
+ throw new Error("name is required");
197
+ }
198
+ const configPath = resolveHermesConfigPath({ home, env });
199
+
200
+ let raw;
201
+ try {
202
+ raw = await fs.readFile(configPath, "utf8");
203
+ } catch (err) {
204
+ if (err && (err.code === "ENOENT" || err.code === "ENOTDIR")) {
205
+ return { changed: false, configPath, wasEnabled: false };
206
+ }
207
+ throw err;
208
+ }
209
+
210
+ const doc = YAML.parseDocument(raw);
211
+ if (doc.errors && doc.errors.length > 0) {
212
+ const err = new Error(`config.yaml has YAML errors: ${doc.errors[0].message}`);
213
+ err.code = "HERMES_CONFIG_INVALID";
214
+ throw err;
215
+ }
216
+
217
+ const pluginsNode = doc.get("plugins", true);
218
+ if (!(pluginsNode instanceof YAML.YAMLMap)) {
219
+ return { changed: false, configPath, wasEnabled: false };
220
+ }
221
+ const enabledNode = pluginsNode.get("enabled", true);
222
+ if (!(enabledNode instanceof YAML.YAMLSeq)) {
223
+ return { changed: false, configPath, wasEnabled: false };
224
+ }
225
+
226
+ // Find the index, since YAMLSeq.delete by name doesn't match by string value
227
+ // reliably across all node shapes. Iterating and matching the JSON value is
228
+ // the safe path.
229
+ const items = enabledNode.items;
230
+ let foundIndex = -1;
231
+ for (let i = 0; i < items.length; i += 1) {
232
+ const item = items[i];
233
+ const value = item && typeof item === "object" && "value" in item ? item.value : item;
234
+ if (value === name) {
235
+ foundIndex = i;
236
+ break;
237
+ }
238
+ }
239
+
240
+ if (foundIndex < 0) {
241
+ return { changed: false, configPath, wasEnabled: false };
242
+ }
243
+
244
+ enabledNode.delete(foundIndex);
245
+
246
+ // Clean up empty containers so we don't leave noise in user config.
247
+ if (enabledNode.items.length === 0) {
248
+ pluginsNode.delete("enabled");
249
+ }
250
+ if (pluginsNode.items.length === 0) {
251
+ doc.delete("plugins");
252
+ }
253
+
254
+ const out = String(doc);
255
+ await writeFileAtomic(configPath, out);
256
+
257
+ return { changed: true, configPath, wasEnabled: true };
258
+ }
259
+
260
+ module.exports = {
261
+ HERMES_CONFIG_FILENAME,
262
+ resolveHermesConfigPath,
263
+ probeEnabledPlugin,
264
+ addEnabledPlugin,
265
+ removeEnabledPlugin,
266
+ };
@@ -69,6 +69,19 @@ function runSourceAudit({
69
69
  throw new Error(`strategy.${key} is required`);
70
70
  }
71
71
  }
72
+
73
+ if (strategy.supportsAudit === false) {
74
+ return {
75
+ ok: false,
76
+ error: "audit-not-applicable",
77
+ source: strategy.id,
78
+ message:
79
+ `source=${strategy.id} does not support independent ground-truth audit ` +
80
+ "(data comes from the same plugin-ledger pipeline that feeds the DB)",
81
+ rows: [],
82
+ maxDriftPct: 0,
83
+ };
84
+ }
72
85
  if (!Number.isFinite(days) || days <= 0) {
73
86
  throw new Error(`days must be a positive number, got ${days}`);
74
87
  }
@@ -24,6 +24,7 @@ const path = require("node:path");
24
24
  module.exports = {
25
25
  id: "hermes",
26
26
  displayName: "Hermes Plugin",
27
+ supportsAudit: false,
27
28
  sessionRoot({ home, env }) {
28
29
  const base = (env && env.VIBEUSAGE_HOME) || path.join(home, ".vibeusage");
29
30
  return path.join(base, "tracker");
@@ -54,12 +54,12 @@ def post_api_request(session_id="", platform="", model="", provider="", api_mode
54
54
  record.update({
55
55
  "api_mode": str(api_mode or ""),
56
56
  "api_call_count": _safe_int(api_call_count),
57
- "input_tokens": _safe_int(usage.get("input_tokens")),
58
- "output_tokens": _safe_int(usage.get("output_tokens")),
59
- "cache_read_tokens": _safe_int(usage.get("cache_read_tokens")),
60
- "cache_write_tokens": _safe_int(usage.get("cache_write_tokens")),
61
- "reasoning_tokens": _safe_int(usage.get("reasoning_tokens")),
62
- "total_tokens": _safe_int(usage.get("total_tokens")),
57
+ "input_tokens": _safe_int(usage.get("input_tokens") or usage.get("input")),
58
+ "output_tokens": _safe_int(usage.get("output_tokens") or usage.get("output")),
59
+ "cache_read_tokens": _safe_int(usage.get("cache_read_tokens") or usage.get("cache_read")),
60
+ "cache_write_tokens": _safe_int(usage.get("cache_write_tokens") or usage.get("cache_write")),
61
+ "reasoning_tokens": _safe_int(usage.get("reasoning_tokens") or usage.get("reasoning")),
62
+ "total_tokens": _safe_int(usage.get("total_tokens") or usage.get("total")),
63
63
  "finish_reason": str(finish_reason or ""),
64
64
  })
65
65
  return _append_record(record)