vibeusage 0.3.5 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -86,7 +86,7 @@ This is useful when you want to copy an install command from the dashboard or le
86
86
  | **Every Code** | Auto-detected | `notify` hook | `~/.code/sessions/**/rollout-*.jsonl` |
87
87
  | **Claude Code** | Auto-detected | `Stop` + `SessionEnd` hooks | local hook output |
88
88
  | **Gemini CLI** | Auto-detected | `SessionEnd` hook | `~/.gemini/tmp/**/chats/session-*.json` |
89
- | **OpenCode** | Auto-detected | plugin + local parsing | `~/.local/share/opencode/opencode.db` (legacy message files are fallback only) |
89
+ | **OpenCode** | Auto-detected | plugin + local parsing | `~/.local/share/opencode/opencode.db` (SQLite is the only supported local accounting source) |
90
90
  | **Hermes** | Auto-detected when installed | plugin + local parsing | `~/.vibeusage/tracker/hermes.usage.jsonl` |
91
91
  | **OpenClaw** | Auto-detected when installed | session plugin | local sanitized usage ledger |
92
92
 
@@ -273,7 +273,7 @@ npx vibeusage status
273
273
  npx vibeusage doctor
274
274
  ```
275
275
 
276
- If OpenCode support is incomplete, the most common issue is missing `sqlite3` on `PATH`, or a local SQLite query failure.
276
+ VibeUsage reads OpenCode usage only from `opencode.db`, so the most common issues are missing `sqlite3` on `PATH`, a missing database file, or a local SQLite query failure.
277
277
 
278
278
  ### My OpenClaw usage is not showing up. What should I check?
279
279
 
package/README.zh-CN.md CHANGED
@@ -86,7 +86,7 @@ npx --yes vibeusage init --link-code <code>
86
86
  | **Every Code** | 自动检测 | `notify` hook | `~/.code/sessions/**/rollout-*.jsonl` |
87
87
  | **Claude Code** | 自动检测 | `Stop` + `SessionEnd` hooks | 本地 hook 输出 |
88
88
  | **Gemini CLI** | 自动检测 | `SessionEnd` hook | `~/.gemini/tmp/**/chats/session-*.json` |
89
- | **OpenCode** | 自动检测 | plugin + 本地解析 | `~/.local/share/opencode/opencode.db`(旧 message 文件仅作 fallback) |
89
+ | **OpenCode** | 自动检测 | plugin + 本地解析 | `~/.local/share/opencode/opencode.db`(SQLite 是唯一受支持的本地 accounting source) |
90
90
  | **OpenClaw** | 安装后自动检测 | session plugin | 本地 sanitized usage ledger |
91
91
 
92
92
  ### OpenClaw 说明
package/package.json CHANGED
@@ -1,8 +1,16 @@
1
1
  {
2
2
  "name": "vibeusage",
3
- "version": "0.3.5",
3
+ "version": "0.5.0",
4
4
  "description": "Codex CLI token usage tracker (macOS-first, notify-driven).",
5
5
  "license": "MIT",
6
+ "repository": {
7
+ "type": "git",
8
+ "url": "git+https://github.com/victorGPT/vibeusage.git"
9
+ },
10
+ "bugs": {
11
+ "url": "https://github.com/victorGPT/vibeusage/issues"
12
+ },
13
+ "homepage": "https://github.com/victorGPT/vibeusage#readme",
6
14
  "bin": {
7
15
  "tracker": "bin/tracker.js",
8
16
  "vibeusage": "bin/tracker.js",
@@ -190,7 +190,7 @@ function renderWelcome() {
190
190
  DIVIDER,
191
191
  "",
192
192
  "This tool will:",
193
- " - Analyze your local AI CLI configurations (Codex, Every Code, Claude, Gemini, Opencode, Hermes, OpenClaw)",
193
+ " - Analyze your local AI CLI configurations (Codex, Every Code, Claude Code, Gemini, Kimi, Hermes, OpenCode, OpenClaw)",
194
194
  " - Set up lightweight hooks to track your flow state",
195
195
  " - Link your device to your VibeScore account",
196
196
  "",
@@ -486,7 +486,7 @@ try {
486
486
  const originalPath =
487
487
  source === 'every-code'
488
488
  ? codeOriginalPath
489
- : source === 'claude' || source === 'opencode' || source === 'gemini'
489
+ : source === 'claude' || source === 'opencode' || source === 'gemini' || source === 'kimi'
490
490
  ? null
491
491
  : codexOriginalPath;
492
492
  if (originalPath) {
@@ -8,10 +8,11 @@ const {
8
8
  listRolloutFiles,
9
9
  listClaudeProjectFiles,
10
10
  listGeminiSessionFiles,
11
- listOpencodeMessageFiles,
11
+ listKimiSessionFiles,
12
12
  parseRolloutIncremental,
13
13
  parseClaudeIncremental,
14
14
  parseGeminiIncremental,
15
+ parseKimiIncremental,
15
16
  parseOpencodeIncremental,
16
17
  normalizeHourlyState,
17
18
  getHourlyBucket,
@@ -72,9 +73,10 @@ async function cmdSync(argv) {
72
73
  const claudeProjectsDir = path.join(home, ".claude", "projects");
73
74
  const geminiHome = process.env.GEMINI_HOME || path.join(home, ".gemini");
74
75
  const geminiTmpDir = path.join(geminiHome, "tmp");
76
+ const kimiHome = process.env.KIMI_HOME || path.join(home, ".kimi");
77
+ const kimiSessionsDir = path.join(kimiHome, "sessions");
75
78
  const xdgDataHome = process.env.XDG_DATA_HOME || path.join(home, ".local", "share");
76
79
  const opencodeHome = process.env.OPENCODE_HOME || path.join(xdgDataHome, "opencode");
77
- const opencodeStorageDir = path.join(opencodeHome, "storage");
78
80
  const opencodeDbPath = path.join(opencodeHome, "opencode.db");
79
81
 
80
82
  const sources = [
@@ -181,28 +183,38 @@ async function cmdSync(argv) {
181
183
  });
182
184
  }
183
185
 
184
- const opencodeFiles = await listOpencodeMessageFiles(opencodeStorageDir);
185
- let opencodeResult = { filesProcessed: 0, eventsAggregated: 0, bucketsQueued: 0 };
186
- if (progress?.enabled && opencodeFiles.length > 0) {
187
- progress.start(
188
- `Parsing Opencode ${renderBar(0)} 0/${formatNumber(opencodeFiles.length)} files | buckets 0`,
189
- );
186
+ const kimiFiles = await listKimiSessionFiles(kimiSessionsDir);
187
+ let kimiResult = { filesProcessed: 0, eventsAggregated: 0, bucketsQueued: 0 };
188
+ if (kimiFiles.length > 0) {
189
+ if (progress?.enabled) {
190
+ progress.start(
191
+ `Parsing Kimi ${renderBar(0)} 0/${formatNumber(kimiFiles.length)} files | buckets 0`,
192
+ );
193
+ }
194
+ kimiResult = await parseKimiIncremental({
195
+ sessionFiles: kimiFiles,
196
+ cursors,
197
+ queuePath,
198
+ projectQueuePath,
199
+ onProgress: (p) => {
200
+ if (!progress?.enabled) return;
201
+ const pct = p.total > 0 ? p.index / p.total : 1;
202
+ progress.update(
203
+ `Parsing Kimi ${renderBar(pct)} ${formatNumber(p.index)}/${formatNumber(p.total)} files | buckets ${formatNumber(
204
+ p.bucketsQueued,
205
+ )}`,
206
+ );
207
+ },
208
+ source: "kimi",
209
+ });
190
210
  }
211
+
212
+ let opencodeResult = { filesProcessed: 0, eventsAggregated: 0, bucketsQueued: 0 };
191
213
  opencodeResult = await parseOpencodeIncremental({
192
- messageFiles: opencodeFiles,
193
214
  opencodeDbPath,
194
215
  cursors,
195
216
  queuePath,
196
217
  projectQueuePath,
197
- onProgress: (p) => {
198
- if (!progress?.enabled) return;
199
- const pct = p.total > 0 ? p.index / p.total : 1;
200
- progress.update(
201
- `Parsing Opencode ${renderBar(pct)} ${formatNumber(p.index)}/${formatNumber(
202
- p.total,
203
- )} files | buckets ${formatNumber(p.bucketsQueued)}`,
204
- );
205
- },
206
218
  source: "opencode",
207
219
  });
208
220
 
@@ -380,6 +392,7 @@ async function cmdSync(argv) {
380
392
  openclawResult.filesProcessed +
381
393
  claudeResult.filesProcessed +
382
394
  geminiResult.filesProcessed +
395
+ kimiResult.filesProcessed +
383
396
  opencodeResult.filesProcessed;
384
397
  const totalBuckets =
385
398
  parseResult.bucketsQueued +
@@ -387,6 +400,7 @@ async function cmdSync(argv) {
387
400
  openclawResult.bucketsQueued +
388
401
  claudeResult.bucketsQueued +
389
402
  geminiResult.bucketsQueued +
403
+ kimiResult.bucketsQueued +
390
404
  opencodeResult.bucketsQueued;
391
405
  process.stdout.write(
392
406
  [
@@ -8,7 +8,7 @@ const { isDir, isFile } = require("./utils");
8
8
 
9
9
  module.exports = {
10
10
  name: "claude",
11
- summaryLabel: "Claude",
11
+ summaryLabel: "Claude Code",
12
12
  statusLabel: "Claude plugin",
13
13
  async probe(ctx) {
14
14
  const hasConfigDir = await isDir(ctx.claude.configDir);
@@ -7,6 +7,11 @@ const {
7
7
  resolveGeminiSettingsPath,
8
8
  buildGeminiHookCommand,
9
9
  } = require("../gemini-config");
10
+ const {
11
+ resolveKimiConfigDir,
12
+ resolveKimiConfigPath,
13
+ buildKimiHookCommand,
14
+ } = require("../kimi-config");
10
15
  const { resolveOpencodeConfigDir } = require("../opencode-config");
11
16
  const { resolveOpenclawSessionPluginPaths } = require("../openclaw-session-plugin");
12
17
  const { resolveHermesPluginPaths } = require("../hermes-config");
@@ -25,6 +30,7 @@ async function createIntegrationContext({
25
30
  const codeHome = env.CODE_HOME || path.join(home, ".code");
26
31
  const claudeDir = path.join(home, ".claude");
27
32
  const geminiConfigDir = resolveGeminiConfigDir({ home, env });
33
+ const kimiConfigDir = resolveKimiConfigDir({ home, env });
28
34
  const opencodeConfigDir = resolveOpencodeConfigDir({ home, env });
29
35
 
30
36
  return {
@@ -55,6 +61,12 @@ async function createIntegrationContext({
55
61
  settingsPath: resolveGeminiSettingsPath({ configDir: geminiConfigDir }),
56
62
  hookCommand: buildGeminiHookCommand(resolvedNotifyPath),
57
63
  },
64
+ kimi: {
65
+ configDir: kimiConfigDir,
66
+ configPath: resolveKimiConfigPath({ configDir: kimiConfigDir }),
67
+ hookCommand: buildKimiHookCommand(resolvedNotifyPath),
68
+ sessionsDir: path.join(kimiConfigDir, "sessions"),
69
+ },
58
70
  opencode: {
59
71
  configDir: opencodeConfigDir,
60
72
  },
@@ -3,6 +3,7 @@ const codex = require("./codex");
3
3
  const everyCode = require("./every-code");
4
4
  const claude = require("./claude");
5
5
  const gemini = require("./gemini");
6
+ const kimi = require("./kimi");
6
7
  const opencode = require("./opencode");
7
8
  const hermes = require("./hermes");
8
9
  const openclawSession = require("./openclaw-session");
@@ -12,6 +13,7 @@ const INTEGRATIONS = [
12
13
  everyCode,
13
14
  claude,
14
15
  gemini,
16
+ kimi,
15
17
  opencode,
16
18
  hermes,
17
19
  openclawSession,
@@ -0,0 +1,105 @@
1
+ const {
2
+ isKimiHookConfigured,
3
+ upsertKimiHook,
4
+ removeKimiHook,
5
+ probeKimiHook,
6
+ } = require("../kimi-config");
7
+ const { isDir, isFile } = require("./utils");
8
+
9
+ module.exports = {
10
+ name: "kimi",
11
+ summaryLabel: "Kimi",
12
+ statusLabel: "Kimi hooks",
13
+ async probe(ctx) {
14
+ const hasConfigDir = await isDir(ctx.kimi.configDir);
15
+ if (!hasConfigDir) {
16
+ return baseProbe(this, { status: "not_installed", detail: "Config not found" });
17
+ }
18
+ const hasConfigFile = await isFile(ctx.kimi.configPath);
19
+ if (!hasConfigFile) {
20
+ return baseProbe(this, {
21
+ status: "not_installed",
22
+ detail: "config.toml not found",
23
+ });
24
+ }
25
+ const state = await probeKimiHook({
26
+ configPath: ctx.kimi.configPath,
27
+ hookCommand: ctx.kimi.hookCommand,
28
+ });
29
+ if (state.configured) {
30
+ return baseProbe(this, {
31
+ status: "ready",
32
+ detail: "Hooks installed",
33
+ configured: true,
34
+ });
35
+ }
36
+ return baseProbe(this, {
37
+ status: state.drifted ? "drifted" : "not_installed",
38
+ detail: state.drifted
39
+ ? "Run vibeusage init to reconcile hooks"
40
+ : "Run vibeusage init to install hooks",
41
+ });
42
+ },
43
+ async install(ctx) {
44
+ if (!(await isDir(ctx.kimi.configDir))) {
45
+ return action(this, "skipped", false, "Config not found");
46
+ }
47
+ if (!(await isFile(ctx.kimi.configPath))) {
48
+ return action(this, "skipped", false, "config.toml not found");
49
+ }
50
+ const result = await upsertKimiHook({
51
+ configPath: ctx.kimi.configPath,
52
+ hookCommand: ctx.kimi.hookCommand,
53
+ });
54
+ return action(
55
+ this,
56
+ result.changed ? "installed" : "set",
57
+ Boolean(result.changed),
58
+ result.changed ? "Hooks installed" : "Hooks already installed",
59
+ );
60
+ },
61
+ async uninstall(ctx) {
62
+ if (!(await isDir(ctx.kimi.configDir))) {
63
+ return action(this, "skipped", false, "config dir not found");
64
+ }
65
+ const result = await removeKimiHook({ configPath: ctx.kimi.configPath });
66
+ if (result.removed) {
67
+ return action(this, "removed", true, ctx.kimi.configPath);
68
+ }
69
+ if (result.skippedReason === "hook-missing") {
70
+ return action(this, "unchanged", false, "no change", {
71
+ skippedReason: result.skippedReason,
72
+ });
73
+ }
74
+ return action(this, "skipped", false, "config.toml not found");
75
+ },
76
+ renderStatusValue(probe) {
77
+ if (probe.status === "ready") return "set";
78
+ if (probe.status === "not_installed") return "unset";
79
+ return probe.status;
80
+ },
81
+ };
82
+
83
+ function baseProbe(descriptor, values) {
84
+ return {
85
+ name: descriptor.name,
86
+ summaryLabel: descriptor.summaryLabel,
87
+ statusLabel: descriptor.statusLabel,
88
+ configured: false,
89
+ ...values,
90
+ };
91
+ }
92
+
93
+ function action(descriptor, status, changed, detail, extras = {}) {
94
+ return {
95
+ name: descriptor.name,
96
+ label: descriptor.summaryLabel,
97
+ status,
98
+ changed,
99
+ detail,
100
+ ...extras,
101
+ };
102
+ }
103
+
104
+ // Expose for tests / diagnostics that want the probe-only check.
105
+ module.exports.isKimiHookConfigured = isKimiHookConfigured;
@@ -3,8 +3,8 @@ const { isDir } = require("./utils");
3
3
 
4
4
  module.exports = {
5
5
  name: "opencode",
6
- summaryLabel: "Opencode Plugin",
7
- statusLabel: "Opencode plugin",
6
+ summaryLabel: "OpenCode Plugin",
7
+ statusLabel: "OpenCode plugin",
8
8
  async probe(ctx) {
9
9
  const hasConfigDir = await isDir(ctx.opencode.configDir);
10
10
  if (!hasConfigDir) {
@@ -0,0 +1,221 @@
1
+ const os = require("node:os");
2
+ const path = require("node:path");
3
+ const fs = require("node:fs/promises");
4
+
5
+ const { ensureDir } = require("./fs");
6
+
7
+ const DEFAULT_EVENTS = ["SessionEnd", "Stop"];
8
+ const DEFAULT_TIMEOUT = 30;
9
+ const MANAGED_START = "# --- vibeusage Kimi hooks START (managed, do not edit) ---";
10
+ const MANAGED_END = "# --- vibeusage Kimi hooks END ---";
11
+
12
+ function resolveKimiConfigDir({ home = os.homedir(), env = process.env } = {}) {
13
+ const explicit = typeof env.KIMI_HOME === "string" ? env.KIMI_HOME.trim() : "";
14
+ if (explicit) return path.resolve(explicit);
15
+ return path.join(home, ".kimi");
16
+ }
17
+
18
+ function resolveKimiConfigPath({ configDir }) {
19
+ return path.join(configDir, "config.toml");
20
+ }
21
+
22
+ function buildKimiHookCommand(notifyPath) {
23
+ const cmd = typeof notifyPath === "string" ? notifyPath : "";
24
+ return `/usr/bin/env node ${quoteArg(cmd)} --source=kimi`;
25
+ }
26
+
27
+ async function upsertKimiHook({
28
+ configPath,
29
+ hookCommand,
30
+ events = DEFAULT_EVENTS,
31
+ timeout = DEFAULT_TIMEOUT,
32
+ }) {
33
+ const existing = await readFileOrEmpty(configPath);
34
+ const normalizedEvents = normalizeEvents(events);
35
+ const nextBlock = buildManagedBlock({
36
+ hookCommand,
37
+ events: normalizedEvents,
38
+ timeout: normalizeTimeout(timeout),
39
+ });
40
+ const { content, changed } = replaceManagedBlock(existing, nextBlock);
41
+ if (!changed) return { changed: false, backupPath: null };
42
+ const backupPath = await writeWithBackup({ configPath, content });
43
+ return { changed: true, backupPath };
44
+ }
45
+
46
+ async function removeKimiHook({ configPath }) {
47
+ const existing = await readFileOrEmpty(configPath);
48
+ if (!existing) return { removed: false, skippedReason: "config-missing", backupPath: null };
49
+ const { content, changed } = stripManagedBlock(existing);
50
+ if (!changed) return { removed: false, skippedReason: "hook-missing", backupPath: null };
51
+ const backupPath = await writeWithBackup({ configPath, content });
52
+ return { removed: true, skippedReason: null, backupPath };
53
+ }
54
+
55
+ async function isKimiHookConfigured({
56
+ configPath,
57
+ hookCommand,
58
+ events = DEFAULT_EVENTS,
59
+ timeout = DEFAULT_TIMEOUT,
60
+ }) {
61
+ const probe = await probeKimiHook({ configPath, hookCommand, events, timeout });
62
+ return probe.configured;
63
+ }
64
+
65
+ async function probeKimiHook({
66
+ configPath,
67
+ hookCommand,
68
+ events = DEFAULT_EVENTS,
69
+ timeout = DEFAULT_TIMEOUT,
70
+ }) {
71
+ const existing = await readFileOrEmpty(configPath);
72
+ if (!existing) {
73
+ return { configured: false, anyPresent: false, drifted: false };
74
+ }
75
+ const block = extractManagedBlock(existing);
76
+ if (!block) {
77
+ return { configured: false, anyPresent: false, drifted: false };
78
+ }
79
+ const expected = buildManagedBlock({
80
+ hookCommand,
81
+ events: normalizeEvents(events),
82
+ timeout: normalizeTimeout(timeout),
83
+ });
84
+ return {
85
+ configured: block === expected,
86
+ anyPresent: true,
87
+ drifted: block !== expected,
88
+ };
89
+ }
90
+
91
+ function buildManagedBlock({ hookCommand, events, timeout }) {
92
+ const lines = [MANAGED_START];
93
+ for (const event of events) {
94
+ lines.push(
95
+ "[[hooks]]",
96
+ `event = ${tomlString(event)}`,
97
+ `command = ${tomlString(hookCommand)}`,
98
+ `timeout = ${timeout}`,
99
+ );
100
+ lines.push("");
101
+ }
102
+ lines.push(MANAGED_END);
103
+ return lines.join("\n");
104
+ }
105
+
106
+ function replaceManagedBlock(existing, nextBlock) {
107
+ const startIdx = existing.indexOf(MANAGED_START);
108
+ const endIdx = existing.indexOf(MANAGED_END);
109
+
110
+ if (startIdx !== -1 && endIdx !== -1 && endIdx > startIdx) {
111
+ const blockEnd = endIdx + MANAGED_END.length;
112
+ const current = existing.slice(startIdx, blockEnd);
113
+ if (current === nextBlock) return { content: existing, changed: false };
114
+ const before = existing.slice(0, startIdx);
115
+ let after = existing.slice(blockEnd);
116
+ if (after.startsWith("\n")) after = after.slice(1);
117
+ const beforeTrimmed = before.replace(/\n+$/, "");
118
+ const prefix = beforeTrimmed.length > 0 ? `${beforeTrimmed}\n\n` : "";
119
+ const suffix = after.length > 0 ? `\n\n${after.replace(/^\n+/, "")}` : "\n";
120
+ return { content: `${prefix}${nextBlock}${suffix}`, changed: true };
121
+ }
122
+
123
+ const base = existing.replace(/\n+$/, "");
124
+ const prefix = base.length > 0 ? `${base}\n\n` : "";
125
+ return { content: `${prefix}${nextBlock}\n`, changed: true };
126
+ }
127
+
128
+ function stripManagedBlock(existing) {
129
+ const startIdx = existing.indexOf(MANAGED_START);
130
+ const endIdx = existing.indexOf(MANAGED_END);
131
+ if (startIdx === -1 || endIdx === -1 || endIdx < startIdx) {
132
+ return { content: existing, changed: false };
133
+ }
134
+ const blockEnd = endIdx + MANAGED_END.length;
135
+ const before = existing.slice(0, startIdx).replace(/\n+$/, "");
136
+ let after = existing.slice(blockEnd);
137
+ after = after.replace(/^\n+/, "");
138
+ if (before.length === 0 && after.length === 0) {
139
+ return { content: "", changed: true };
140
+ }
141
+ if (before.length === 0) return { content: `${after.endsWith("\n") ? after : `${after}\n`}`, changed: true };
142
+ if (after.length === 0) return { content: `${before}\n`, changed: true };
143
+ return { content: `${before}\n\n${after.endsWith("\n") ? after : `${after}\n`}`, changed: true };
144
+ }
145
+
146
+ function extractManagedBlock(existing) {
147
+ const startIdx = existing.indexOf(MANAGED_START);
148
+ const endIdx = existing.indexOf(MANAGED_END);
149
+ if (startIdx === -1 || endIdx === -1 || endIdx < startIdx) return null;
150
+ const blockEnd = endIdx + MANAGED_END.length;
151
+ return existing.slice(startIdx, blockEnd);
152
+ }
153
+
154
+ function normalizeEvents(raw) {
155
+ const values = Array.isArray(raw) ? raw : [raw];
156
+ const out = [];
157
+ for (const value of values) {
158
+ if (typeof value !== "string") continue;
159
+ const normalized = value.trim();
160
+ if (!normalized || out.includes(normalized)) continue;
161
+ out.push(normalized);
162
+ }
163
+ return out.length > 0 ? out : DEFAULT_EVENTS.slice();
164
+ }
165
+
166
+ function normalizeTimeout(value) {
167
+ const n = Number(value);
168
+ if (!Number.isFinite(n) || n <= 0) return DEFAULT_TIMEOUT;
169
+ return Math.floor(n);
170
+ }
171
+
172
+ function tomlString(value) {
173
+ const v = typeof value === "string" ? value : String(value ?? "");
174
+ return `"${v.replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"`;
175
+ }
176
+
177
+ function quoteArg(value) {
178
+ const v = typeof value === "string" ? value : "";
179
+ if (!v) return '""';
180
+ if (/^[A-Za-z0-9_\-./:@]+$/.test(v)) return v;
181
+ return `"${v.replace(/"/g, '\\"')}"`;
182
+ }
183
+
184
+ async function readFileOrEmpty(filePath) {
185
+ try {
186
+ return await fs.readFile(filePath, "utf8");
187
+ } catch (err) {
188
+ if (err && err.code === "ENOENT") return "";
189
+ throw err;
190
+ }
191
+ }
192
+
193
+ async function writeWithBackup({ configPath, content }) {
194
+ await ensureDir(path.dirname(configPath));
195
+ let backupPath = null;
196
+ try {
197
+ const st = await fs.stat(configPath);
198
+ if (st && st.isFile()) {
199
+ backupPath = `${configPath}.bak.${new Date().toISOString().replace(/[:.]/g, "-")}`;
200
+ await fs.copyFile(configPath, backupPath);
201
+ }
202
+ } catch (_e) {
203
+ // no existing file
204
+ }
205
+ await fs.writeFile(configPath, content, "utf8");
206
+ return backupPath;
207
+ }
208
+
209
+ module.exports = {
210
+ DEFAULT_EVENTS,
211
+ DEFAULT_TIMEOUT,
212
+ MANAGED_START,
213
+ MANAGED_END,
214
+ resolveKimiConfigDir,
215
+ resolveKimiConfigPath,
216
+ buildKimiHookCommand,
217
+ upsertKimiHook,
218
+ removeKimiHook,
219
+ isKimiHookConfigured,
220
+ probeKimiHook,
221
+ };
@@ -2,7 +2,7 @@ const fs = require("node:fs/promises");
2
2
  const os = require("node:os");
3
3
  const path = require("node:path");
4
4
 
5
- const { listOpencodeMessageFiles, parseOpencodeIncremental } = require("./rollout");
5
+ const { parseOpencodeIncremental } = require("./rollout");
6
6
 
7
7
  const BUCKET_SEPARATOR = "|";
8
8
  const DAY_RE = /^\d{4}-\d{2}-\d{2}$/;
@@ -31,7 +31,6 @@ function addTotals(target, delta) {
31
31
  }
32
32
 
33
33
  async function buildLocalHourlyTotals({ storageDir, source = "opencode" }) {
34
- const messageFiles = await listOpencodeMessageFiles(storageDir);
35
34
  const opencodeDbPath = path.resolve(storageDir, "..", "opencode.db");
36
35
  const queuePath = path.join(
37
36
  os.tmpdir(),
@@ -39,7 +38,7 @@ async function buildLocalHourlyTotals({ storageDir, source = "opencode" }) {
39
38
  );
40
39
  const cursors = { version: 1, files: {}, hourly: null, opencode: null, opencodeSqlite: null };
41
40
 
42
- await parseOpencodeIncremental({ messageFiles, opencodeDbPath, cursors, queuePath, source });
41
+ await parseOpencodeIncremental({ opencodeDbPath, cursors, queuePath, source });
43
42
  await fs.rm(queuePath, { force: true }).catch(() => {});
44
43
 
45
44
  const byHour = new Map();
@@ -63,6 +63,25 @@ async function listGeminiSessionFiles(tmpDir) {
63
63
  return out;
64
64
  }
65
65
 
66
+ async function listKimiSessionFiles(sessionsDir) {
67
+ const out = [];
68
+ const projects = await safeReadDir(sessionsDir);
69
+ for (const project of projects) {
70
+ if (!project.isDirectory()) continue;
71
+ const projectDir = path.join(sessionsDir, project.name);
72
+ const sessions = await safeReadDir(projectDir);
73
+ for (const session of sessions) {
74
+ if (!session.isDirectory()) continue;
75
+ const wirePath = path.join(projectDir, session.name, "wire.jsonl");
76
+ const st = await fs.stat(wirePath).catch(() => null);
77
+ if (!st || !st.isFile()) continue;
78
+ out.push(wirePath);
79
+ }
80
+ }
81
+ out.sort((a, b) => a.localeCompare(b));
82
+ return out;
83
+ }
84
+
66
85
  async function listOpencodeMessageFiles(storageDir) {
67
86
  const out = [];
68
87
  const messageDir = path.join(storageDir, "message");
@@ -227,6 +246,8 @@ async function parseClaudeIncremental({
227
246
  const prev = cursors.files[key] || null;
228
247
  const inode = st.ino || 0;
229
248
  const startOffset = prev && prev.inode === inode ? prev.offset || 0 : 0;
249
+ const priorSeenIds =
250
+ prev && prev.inode === inode && Array.isArray(prev.seenIds) ? prev.seenIds : [];
230
251
 
231
252
  const projectContext = projectEnabled
232
253
  ? await resolveProjectContextForFile({
@@ -250,11 +271,13 @@ async function parseClaudeIncremental({
250
271
  projectTouchedBuckets,
251
272
  projectRef,
252
273
  projectKey,
274
+ priorSeenIds,
253
275
  });
254
276
 
255
277
  cursors.files[key] = {
256
278
  inode,
257
279
  offset: result.endOffset,
280
+ seenIds: result.seenIds,
258
281
  updatedAt: new Date().toISOString(),
259
282
  };
260
283
 
@@ -397,23 +420,21 @@ async function parseGeminiIncremental({
397
420
  return { filesProcessed, eventsAggregated, bucketsQueued, projectBucketsQueued };
398
421
  }
399
422
 
400
- async function parseOpencodeIncremental({
401
- messageFiles,
402
- opencodeDbPath,
423
+ async function parseKimiIncremental({
424
+ sessionFiles,
403
425
  cursors,
404
426
  queuePath,
405
427
  projectQueuePath,
406
428
  onProgress,
407
429
  source,
408
430
  publicRepoResolver,
409
- readSqliteRows,
410
431
  }) {
411
432
  await ensureDir(path.dirname(queuePath));
412
433
  let filesProcessed = 0;
413
434
  let eventsAggregated = 0;
414
435
 
415
436
  const cb = typeof onProgress === "function" ? onProgress : null;
416
- const files = Array.isArray(messageFiles) ? messageFiles : [];
437
+ const files = Array.isArray(sessionFiles) ? sessionFiles : [];
417
438
  const totalFiles = files.length;
418
439
  const hourlyState = normalizeHourlyState(cursors?.hourly);
419
440
  const projectEnabled = typeof projectQueuePath === "string" && projectQueuePath.length > 0;
@@ -421,14 +442,8 @@ async function parseOpencodeIncremental({
421
442
  const projectTouchedBuckets = projectEnabled ? new Set() : null;
422
443
  const projectMetaCache = projectEnabled ? new Map() : null;
423
444
  const publicRepoCache = projectEnabled ? new Map() : null;
424
- const opencodeState = normalizeOpencodeState(cursors?.opencode);
425
- const opencodeSqliteState = normalizeOpencodeSqliteState(cursors?.opencodeSqlite);
426
- const messageIndex = opencodeState.messages;
427
445
  const touchedBuckets = new Set();
428
- const defaultSource = normalizeSourceInput(source) || "opencode";
429
- let sqliteStatus = opencodeSqliteState.lastStatus || "never_checked";
430
- let sqliteCheckedAt = opencodeSqliteState.lastCheckedAt || null;
431
- let sqliteErrorCode = opencodeSqliteState.lastErrorCode || null;
446
+ const defaultSource = normalizeSourceInput(source) || "kimi";
432
447
 
433
448
  if (!cursors.files || typeof cursors.files !== "object") {
434
449
  cursors.files = {};
@@ -448,30 +463,8 @@ async function parseOpencodeIncremental({
448
463
  const key = filePath;
449
464
  const prev = cursors.files[key] || null;
450
465
  const inode = st.ino || 0;
451
- const size = Number.isFinite(st.size) ? st.size : 0;
452
- const mtimeMs = Number.isFinite(st.mtimeMs) ? st.mtimeMs : 0;
453
- const unchanged =
454
- prev && prev.inode === inode && prev.size === size && prev.mtimeMs === mtimeMs;
455
- if (unchanged) {
456
- filesProcessed += 1;
457
- if (cb) {
458
- cb({
459
- index: idx + 1,
460
- total: totalFiles,
461
- filePath,
462
- filesProcessed,
463
- eventsAggregated,
464
- bucketsQueued: touchedBuckets.size,
465
- });
466
- }
467
- continue;
468
- }
466
+ const startOffset = prev && prev.inode === inode ? prev.offset || 0 : 0;
469
467
 
470
- const fallbackTotals = prev && typeof prev.lastTotals === "object" ? prev.lastTotals : null;
471
- const fallbackMessageKey =
472
- prev && typeof prev.messageKey === "string" && prev.messageKey.trim()
473
- ? prev.messageKey.trim()
474
- : null;
475
468
  const projectContext = projectEnabled
476
469
  ? await resolveProjectContextForFile({
477
470
  filePath,
@@ -484,11 +477,9 @@ async function parseOpencodeIncremental({
484
477
  const projectRef = projectContext?.projectRef || null;
485
478
  const projectKey = projectContext?.projectKey || null;
486
479
 
487
- const result = await parseOpencodeMessageFile({
480
+ const result = await parseKimiFile({
488
481
  filePath,
489
- messageIndex,
490
- fallbackTotals,
491
- fallbackMessageKey,
482
+ startOffset,
492
483
  hourlyState,
493
484
  touchedBuckets,
494
485
  source: fileSource,
@@ -500,23 +491,13 @@ async function parseOpencodeIncremental({
500
491
 
501
492
  cursors.files[key] = {
502
493
  inode,
503
- size,
504
- mtimeMs,
505
- lastTotals: result.lastTotals,
506
- messageKey: result.messageKey || null,
494
+ offset: result.endOffset,
507
495
  updatedAt: new Date().toISOString(),
508
496
  };
509
497
 
510
498
  filesProcessed += 1;
511
499
  eventsAggregated += result.eventsAggregated;
512
500
 
513
- if (result.messageKey && result.shouldUpdate) {
514
- messageIndex[result.messageKey] = {
515
- lastTotals: result.lastTotals,
516
- updatedAt: new Date().toISOString(),
517
- };
518
- }
519
-
520
501
  if (cb) {
521
502
  cb({
522
503
  index: idx + 1,
@@ -529,6 +510,54 @@ async function parseOpencodeIncremental({
529
510
  }
530
511
  }
531
512
 
513
+ const bucketsQueued = await enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets });
514
+ const projectBucketsQueued = projectEnabled
515
+ ? await enqueueTouchedProjectBuckets({ projectQueuePath, projectState, projectTouchedBuckets })
516
+ : 0;
517
+ hourlyState.updatedAt = new Date().toISOString();
518
+ cursors.hourly = hourlyState;
519
+ if (projectState) {
520
+ projectState.updatedAt = new Date().toISOString();
521
+ cursors.projectHourly = projectState;
522
+ }
523
+
524
+ return { filesProcessed, eventsAggregated, bucketsQueued, projectBucketsQueued };
525
+ }
526
+
527
+ async function parseOpencodeIncremental({
528
+ opencodeDbPath,
529
+ cursors,
530
+ queuePath,
531
+ projectQueuePath,
532
+ onProgress,
533
+ source = "opencode",
534
+ publicRepoResolver = null,
535
+ readSqliteRows,
536
+ }) {
537
+ await ensureDir(path.dirname(queuePath));
538
+ let filesProcessed = 0;
539
+ let eventsAggregated = 0;
540
+
541
+ const cb = typeof onProgress === "function" ? onProgress : null;
542
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
543
+ const projectEnabled = typeof projectQueuePath === "string" && projectQueuePath.length > 0;
544
+ const projectState = projectEnabled ? normalizeProjectState(cursors?.projectHourly) : null;
545
+ const projectTouchedBuckets = projectEnabled ? new Set() : null;
546
+ const projectMetaCache = projectEnabled ? new Map() : null;
547
+ const publicRepoCache = projectEnabled ? new Map() : null;
548
+ const opencodeState = normalizeOpencodeState(cursors?.opencode);
549
+ const opencodeSqliteState = normalizeOpencodeSqliteState(cursors?.opencodeSqlite);
550
+ const messageIndex = opencodeState.messages;
551
+ const touchedBuckets = new Set();
552
+ const defaultSource = normalizeSourceInput(source) || "opencode";
553
+ let sqliteStatus = opencodeSqliteState.lastStatus || "never_checked";
554
+ let sqliteCheckedAt = opencodeSqliteState.lastCheckedAt || null;
555
+ let sqliteErrorCode = opencodeSqliteState.lastErrorCode || null;
556
+
557
+ if (!cursors.files || typeof cursors.files !== "object") {
558
+ cursors.files = {};
559
+ }
560
+
532
561
  if (typeof opencodeDbPath === "string" && opencodeDbPath.length > 0) {
533
562
  const readRows =
534
563
  typeof readSqliteRows === "function" ? readSqliteRows : readOpencodeSqliteRows;
@@ -753,6 +782,8 @@ async function parseRolloutFile({
753
782
  return { endOffset, lastTotal: totals, lastModel: model, eventsAggregated };
754
783
  }
755
784
 
785
+ const CLAUDE_SEEN_IDS_LIMIT = 500;
786
+
756
787
  async function parseClaudeFile({
757
788
  filePath,
758
789
  startOffset,
@@ -763,12 +794,18 @@ async function parseClaudeFile({
763
794
  projectTouchedBuckets,
764
795
  projectRef,
765
796
  projectKey,
797
+ priorSeenIds,
766
798
  }) {
799
+ const seenOrder = Array.isArray(priorSeenIds) ? priorSeenIds.slice() : [];
800
+ const seenSet = new Set(seenOrder);
801
+
767
802
  const st = await fs.stat(filePath).catch(() => null);
768
- if (!st || !st.isFile()) return { endOffset: startOffset, eventsAggregated: 0 };
803
+ if (!st || !st.isFile()) {
804
+ return { endOffset: startOffset, eventsAggregated: 0, seenIds: seenOrder };
805
+ }
769
806
 
770
807
  const endOffset = st.size;
771
- if (startOffset >= endOffset) return { endOffset, eventsAggregated: 0 };
808
+ if (startOffset >= endOffset) return { endOffset, eventsAggregated: 0, seenIds: seenOrder };
772
809
 
773
810
  const stream = fssync.createReadStream(filePath, { encoding: "utf8", start: startOffset });
774
811
  const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
@@ -786,6 +823,12 @@ async function parseClaudeFile({
786
823
  const usage = obj?.message?.usage || obj?.usage;
787
824
  if (!usage || typeof usage !== "object") continue;
788
825
 
826
+ // Claude Code writes the same assistant message multiple times in the session log
827
+ // (same `message.id` / `requestId`, different outer `uuid`). Aggregate once per
828
+ // upstream Anthropic response to avoid multi-counting token usage.
829
+ const dedupeId = obj?.message?.id || obj?.requestId || null;
830
+ if (dedupeId && seenSet.has(dedupeId)) continue;
831
+
789
832
  const model = normalizeModelInput(obj?.message?.model || obj?.model) || DEFAULT_MODEL;
790
833
  const tokenTimestamp = typeof obj?.timestamp === "string" ? obj.timestamp : null;
791
834
  if (!tokenTimestamp) continue;
@@ -796,6 +839,78 @@ async function parseClaudeFile({
796
839
  const bucketStart = toUtcHalfHourStart(tokenTimestamp);
797
840
  if (!bucketStart) continue;
798
841
 
842
+ const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
843
+ addTotals(bucket.totals, delta);
844
+ touchedBuckets.add(bucketKey(source, model, bucketStart));
845
+ if (projectKey && projectState && projectTouchedBuckets) {
846
+ const projectBucket = getProjectBucket(
847
+ projectState,
848
+ projectKey,
849
+ source,
850
+ bucketStart,
851
+ projectRef,
852
+ );
853
+ addTotals(projectBucket.totals, delta);
854
+ projectTouchedBuckets.add(projectBucketKey(projectKey, source, bucketStart));
855
+ }
856
+ if (dedupeId) {
857
+ seenSet.add(dedupeId);
858
+ seenOrder.push(dedupeId);
859
+ }
860
+ eventsAggregated += 1;
861
+ }
862
+
863
+ rl.close();
864
+ stream.close?.();
865
+ const trimmedSeenIds =
866
+ seenOrder.length > CLAUDE_SEEN_IDS_LIMIT
867
+ ? seenOrder.slice(seenOrder.length - CLAUDE_SEEN_IDS_LIMIT)
868
+ : seenOrder;
869
+ return { endOffset, eventsAggregated, seenIds: trimmedSeenIds };
870
+ }
871
+
872
+ async function parseKimiFile({
873
+ filePath,
874
+ startOffset,
875
+ hourlyState,
876
+ touchedBuckets,
877
+ source,
878
+ projectState,
879
+ projectTouchedBuckets,
880
+ projectRef,
881
+ projectKey,
882
+ }) {
883
+ const st = await fs.stat(filePath).catch(() => null);
884
+ if (!st || !st.isFile()) return { endOffset: startOffset, eventsAggregated: 0 };
885
+
886
+ const endOffset = st.size;
887
+ if (startOffset >= endOffset) return { endOffset, eventsAggregated: 0 };
888
+
889
+ const stream = fssync.createReadStream(filePath, { encoding: "utf8", start: startOffset });
890
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
891
+
892
+ let eventsAggregated = 0;
893
+ for await (const line of rl) {
894
+ if (!line || !line.includes("StatusUpdate")) continue;
895
+ let obj;
896
+ try {
897
+ obj = JSON.parse(line);
898
+ } catch (_e) {
899
+ continue;
900
+ }
901
+ if (!obj || !obj.message || obj.message.type !== "StatusUpdate") continue;
902
+
903
+ const payload = obj.message.payload;
904
+ const delta = normalizeKimiUsage(payload?.token_usage);
905
+ if (!delta || isAllZeroUsage(delta)) continue;
906
+
907
+ const tsIso = kimiTimestampToIso(obj.timestamp);
908
+ if (!tsIso) continue;
909
+
910
+ const bucketStart = toUtcHalfHourStart(tsIso);
911
+ if (!bucketStart) continue;
912
+
913
+ const model = normalizeModelInput(payload?.model) || DEFAULT_MODEL;
799
914
  const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
800
915
  addTotals(bucket.totals, delta);
801
916
  touchedBuckets.add(bucketKey(source, model, bucketStart));
@@ -2058,6 +2173,32 @@ function normalizeGeminiTokens(tokens) {
2058
2173
  };
2059
2174
  }
2060
2175
 
2176
+ function normalizeKimiUsage(usage) {
2177
+ if (!usage || typeof usage !== "object") return null;
2178
+ const inputOther = toNonNegativeInt(usage.input_other);
2179
+ const cacheCreation = toNonNegativeInt(usage.input_cache_creation);
2180
+ const cacheRead = toNonNegativeInt(usage.input_cache_read);
2181
+ const output = toNonNegativeInt(usage.output);
2182
+ const inputTokens = inputOther + cacheCreation;
2183
+ const total = inputTokens + cacheRead + output;
2184
+ return {
2185
+ input_tokens: inputTokens,
2186
+ cached_input_tokens: cacheRead,
2187
+ output_tokens: output,
2188
+ reasoning_output_tokens: 0,
2189
+ total_tokens: total,
2190
+ };
2191
+ }
2192
+
2193
+ function kimiTimestampToIso(value) {
2194
+ const n = Number(value);
2195
+ if (!Number.isFinite(n) || n <= 0) return null;
2196
+ const ms = n < 1e12 ? Math.floor(n * 1000) : Math.floor(n);
2197
+ const date = new Date(ms);
2198
+ const iso = date.toISOString();
2199
+ return iso;
2200
+ }
2201
+
2061
2202
  function normalizeOpencodeTokens(tokens) {
2062
2203
  if (!tokens || typeof tokens !== "object") return null;
2063
2204
  const input = toNonNegativeInt(tokens.input);
@@ -2066,7 +2207,10 @@ function normalizeOpencodeTokens(tokens) {
2066
2207
  const cached = toNonNegativeInt(tokens.cache?.read);
2067
2208
  const cacheWrite = toNonNegativeInt(tokens.cache?.write);
2068
2209
  const inputTokens = input + cacheWrite;
2069
- const total = inputTokens + output + reasoning;
2210
+ // Include cache-read tokens in the total so OpenCode sessions do not
2211
+ // under-count the way Claude did before the parallel fix; cache-read is
2212
+ // real spend the user pays for on every turn.
2213
+ const total = inputTokens + cached + output + reasoning;
2070
2214
 
2071
2215
  return {
2072
2216
  input_tokens: inputTokens,
@@ -2189,12 +2333,19 @@ function normalizeUsage(u) {
2189
2333
  function normalizeClaudeUsage(u) {
2190
2334
  const inputTokens =
2191
2335
  toNonNegativeInt(u?.input_tokens) + toNonNegativeInt(u?.cache_creation_input_tokens);
2336
+ const cachedInputTokens = toNonNegativeInt(u?.cache_read_input_tokens);
2192
2337
  const outputTokens = toNonNegativeInt(u?.output_tokens);
2193
2338
  const hasTotal = u && Object.prototype.hasOwnProperty.call(u, "total_tokens");
2194
- const totalTokens = hasTotal ? toNonNegativeInt(u?.total_tokens) : inputTokens + outputTokens;
2339
+ // Claude's Messages API does not emit `total_tokens`. When absent, compose it
2340
+ // from all four channels (input / cache_creation / cache_read / output). The
2341
+ // old formula omitted cache_read, which is ~99% of token spend on long
2342
+ // Claude Opus sessions and was the main driver of user-visible under-counts.
2343
+ const totalTokens = hasTotal
2344
+ ? toNonNegativeInt(u?.total_tokens)
2345
+ : inputTokens + cachedInputTokens + outputTokens;
2195
2346
  return {
2196
2347
  input_tokens: inputTokens,
2197
- cached_input_tokens: toNonNegativeInt(u?.cache_read_input_tokens),
2348
+ cached_input_tokens: cachedInputTokens,
2198
2349
  output_tokens: outputTokens,
2199
2350
  reasoning_output_tokens: 0,
2200
2351
  total_tokens: totalTokens,
@@ -2293,10 +2444,12 @@ module.exports = {
2293
2444
  listRolloutFiles,
2294
2445
  listClaudeProjectFiles,
2295
2446
  listGeminiSessionFiles,
2447
+ listKimiSessionFiles,
2296
2448
  listOpencodeMessageFiles,
2297
2449
  parseRolloutIncremental,
2298
2450
  parseClaudeIncremental,
2299
2451
  parseGeminiIncremental,
2452
+ parseKimiIncremental,
2300
2453
  parseOpencodeIncremental,
2301
2454
  normalizeHourlyState,
2302
2455
  getHourlyBucket,