vibeusage 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,203 @@
1
+ "use strict";
2
+
3
+ const fs = require("node:fs");
4
+ const path = require("node:path");
5
+
6
+ /**
7
+ * Shared strategy factory for Codex-family rollout audits.
8
+ *
9
+ * Codex and Every-Code write identical rollout .jsonl streams into
10
+ * <home>/<subdir>/sessions/<YYYY>/<MM>/<DD>/rollout-*.jsonl. The directory
11
+ * layout is the only difference (CODEX_HOME / CODE_HOME). Token accounting
12
+ * semantics differ from Claude's in two important ways:
13
+ *
14
+ * 1. Token events are `payload.type === "token_count"` rows that carry
15
+ * `info.total_token_usage` (cumulative) and `info.last_token_usage`
16
+ * (delta for the latest API call). Events with `info: null` are
17
+ * rate-limit-only pings and must be ignored.
18
+ *
19
+ * 2. `input_tokens` already includes cached input, and `output_tokens`
20
+ * already includes `reasoning_output_tokens`. Naively summing all five
21
+ * channels double-counts. The authoritative per-turn total is simply
22
+ * `total_tokens` on the upstream payload, which is what normalizeUsage
23
+ * passes through to the DB unchanged.
24
+ *
25
+ * Approach:
26
+ * - walkSessions prunes by YYYY/MM/DD directories before hitting the jsonl
27
+ * files so the auditor does not scan all ~240K Codex rollouts just to
28
+ * look at the last 14 days.
29
+ * - iterateRecords is stateful per file: it tracks the last seen
30
+ * total_token_usage and yields a synthetic delta object whenever the
31
+ * total changes (uses last_token_usage when available, otherwise the
32
+ * total_prev diff). Duplicate token_count rows with identical totals are
33
+ * skipped; that mirrors parseRolloutFile's pickDelta logic.
34
+ * - extractUsage routes the authoritative `total_tokens` number into the
35
+ * `output` channel and zeroes the rest. The framework sums the five
36
+ * channels to compute row.truth, so putting the whole total into one
37
+ * channel is a deliberate trick that keeps day totals correct without
38
+ * exposing Codex's overlapping channel semantics through the generic
39
+ * contract.
40
+ */
41
+
42
+ function makeRolloutStrategy({ id, displayName, envKey, defaultSubdir }) {
43
+ return {
44
+ id,
45
+ displayName,
46
+ sessionRoot({ home, env }) {
47
+ const base = (env && env[envKey]) || path.join(home, defaultSubdir);
48
+ return path.join(base, "sessions");
49
+ },
50
+ walkSessions({ root, windowStartIso }) {
51
+ if (!fs.existsSync(root)) return [];
52
+ // Rollout events written on day N can carry timestamps from day N-1
53
+ // (sessions straddle midnight). Keep directories starting one day
54
+ // before the window so we do not drop boundary events.
55
+ const bufferDay = shiftIsoDay(windowStartIso, -1);
56
+ const out = [];
57
+ for (const year of safeReadDirSync(root)) {
58
+ if (!year.isDirectory() || !/^\d{4}$/.test(year.name)) continue;
59
+ const yearDir = path.join(root, year.name);
60
+ for (const month of safeReadDirSync(yearDir)) {
61
+ if (!month.isDirectory() || !/^\d{2}$/.test(month.name)) continue;
62
+ const monthDir = path.join(yearDir, month.name);
63
+ for (const day of safeReadDirSync(monthDir)) {
64
+ if (!day.isDirectory() || !/^\d{2}$/.test(day.name)) continue;
65
+ if (bufferDay) {
66
+ const dayIso = `${year.name}-${month.name}-${day.name}`;
67
+ if (dayIso < bufferDay) continue;
68
+ }
69
+ const dayDir = path.join(monthDir, day.name);
70
+ for (const f of safeReadDirSync(dayDir)) {
71
+ if (!f.isFile()) continue;
72
+ if (!f.name.startsWith("rollout-") || !f.name.endsWith(".jsonl")) continue;
73
+ out.push(path.join(dayDir, f.name));
74
+ }
75
+ }
76
+ }
77
+ }
78
+ return out;
79
+ },
80
+ *iterateRecords(filePath) {
81
+ let text;
82
+ try {
83
+ text = fs.readFileSync(filePath, "utf8");
84
+ } catch (_err) {
85
+ return;
86
+ }
87
+ if (!text) return;
88
+ let prevTotal = null;
89
+ for (const line of text.split("\n")) {
90
+ if (!line || !line.includes("token_count")) continue;
91
+ let obj;
92
+ try {
93
+ obj = JSON.parse(line);
94
+ } catch (_err) {
95
+ continue;
96
+ }
97
+ const payload = obj?.payload;
98
+ if (!payload || payload.type !== "token_count") continue;
99
+ const info = payload.info;
100
+ if (!info) continue;
101
+ const total = info.total_token_usage || null;
102
+ const last = info.last_token_usage || null;
103
+ if (!total && !last) continue;
104
+ // Duplicate token_count: same totals, skip.
105
+ if (prevTotal && total && sameUsage(prevTotal, total)) continue;
106
+ let delta;
107
+ if (last && Number(last.total_tokens) > 0) {
108
+ delta = last;
109
+ } else if (prevTotal && total) {
110
+ delta = diffUsage(total, prevTotal);
111
+ } else if (total) {
112
+ delta = total;
113
+ } else {
114
+ delta = null;
115
+ }
116
+ if (total) prevTotal = total;
117
+ if (!delta || !Number(delta.total_tokens)) continue;
118
+ yield {
119
+ line: JSON.stringify({ timestamp: obj.timestamp, delta }),
120
+ context: { filePath },
121
+ };
122
+ }
123
+ },
124
+ extractUsage(line) {
125
+ if (!line) return null;
126
+ let obj;
127
+ try {
128
+ obj = JSON.parse(line);
129
+ } catch (_err) {
130
+ return null;
131
+ }
132
+ const ts = typeof obj.timestamp === "string" ? obj.timestamp : null;
133
+ const d = obj.delta;
134
+ if (!ts || !d) return null;
135
+ const totalTokens = Number(d.total_tokens);
136
+ if (!Number.isFinite(totalTokens) || totalTokens <= 0) return null;
137
+ return {
138
+ timestamp: ts,
139
+ dedupeId: null, // per-file dedup already done in iterateRecords
140
+ channels: {
141
+ input: 0,
142
+ cache_creation: 0,
143
+ cache_read: 0,
144
+ // Route the authoritative Codex upstream total into a single
145
+ // channel so the framework's sum-of-channels lands on it. See
146
+ // module docstring for why we do not split the channels.
147
+ output: totalTokens,
148
+ reasoning: 0,
149
+ },
150
+ };
151
+ },
152
+ };
153
+ }
154
+
155
+ function safeReadDirSync(p) {
156
+ try {
157
+ return fs.readdirSync(p, { withFileTypes: true });
158
+ } catch (_err) {
159
+ return [];
160
+ }
161
+ }
162
+
163
+ function sameUsage(a, b) {
164
+ if (!a || !b) return false;
165
+ for (const k of [
166
+ "input_tokens",
167
+ "cached_input_tokens",
168
+ "output_tokens",
169
+ "reasoning_output_tokens",
170
+ "total_tokens",
171
+ ]) {
172
+ if (Number(a[k] || 0) !== Number(b[k] || 0)) return false;
173
+ }
174
+ return true;
175
+ }
176
+
177
+ function diffUsage(curr, prev) {
178
+ if (!curr || !prev) return curr || null;
179
+ const currTotal = Number(curr.total_tokens || 0);
180
+ const prevTotal = Number(prev.total_tokens || 0);
181
+ if (currTotal < prevTotal) return curr; // session reset
182
+ const out = {};
183
+ for (const k of [
184
+ "input_tokens",
185
+ "cached_input_tokens",
186
+ "output_tokens",
187
+ "reasoning_output_tokens",
188
+ "total_tokens",
189
+ ]) {
190
+ out[k] = Math.max(0, Number(curr[k] || 0) - Number(prev[k] || 0));
191
+ }
192
+ return out;
193
+ }
194
+
195
+ function shiftIsoDay(iso, deltaDays) {
196
+ if (typeof iso !== "string" || !iso) return null;
197
+ const base = new Date(`${iso.slice(0, 10)}T00:00:00Z`);
198
+ if (Number.isNaN(base.getTime())) return null;
199
+ base.setUTCDate(base.getUTCDate() + deltaDays);
200
+ return base.toISOString().slice(0, 10);
201
+ }
202
+
203
+ module.exports = { makeRolloutStrategy };
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+
3
+ const fs = require("node:fs");
4
+ const path = require("node:path");
5
+
6
+ module.exports = {
7
+ id: "claude",
8
+ displayName: "Claude Code",
9
+ sessionRoot({ home }) {
10
+ return path.join(home, ".claude", "projects");
11
+ },
12
+ walkSessions({ root }) {
13
+ if (!fs.existsSync(root)) return [];
14
+ const out = [];
15
+ for (const entry of fs.readdirSync(root, { withFileTypes: true })) {
16
+ if (!entry.isDirectory()) continue;
17
+ const dir = path.join(root, entry.name);
18
+ for (const f of fs.readdirSync(dir, { withFileTypes: true })) {
19
+ if (!f.isFile()) continue;
20
+ if (!f.name.endsWith(".jsonl")) continue;
21
+ out.push(path.join(dir, f.name));
22
+ }
23
+ }
24
+ return out;
25
+ },
26
+ extractUsage(line) {
27
+ if (!line || !line.includes('"usage"')) return null;
28
+ let obj;
29
+ try {
30
+ obj = JSON.parse(line);
31
+ } catch (_err) {
32
+ return null;
33
+ }
34
+ const msg = obj?.message || {};
35
+ const usage = msg.usage || obj.usage;
36
+ if (!usage || typeof usage !== "object") return null;
37
+ const timestamp = typeof obj.timestamp === "string" ? obj.timestamp : null;
38
+ if (!timestamp) return null;
39
+
40
+ return {
41
+ timestamp,
42
+ dedupeId: msg.id || obj.requestId || null,
43
+ channels: {
44
+ input: usage.input_tokens,
45
+ cache_creation: usage.cache_creation_input_tokens,
46
+ cache_read: usage.cache_read_input_tokens,
47
+ output: usage.output_tokens,
48
+ reasoning: 0,
49
+ },
50
+ };
51
+ },
52
+ };
@@ -0,0 +1,10 @@
1
+ "use strict";
2
+
3
+ const { makeRolloutStrategy } = require("./_rollout-base");
4
+
5
+ module.exports = makeRolloutStrategy({
6
+ id: "codex",
7
+ displayName: "Codex CLI",
8
+ envKey: "CODEX_HOME",
9
+ defaultSubdir: ".codex",
10
+ });
@@ -0,0 +1,10 @@
1
+ "use strict";
2
+
3
+ const { makeRolloutStrategy } = require("./_rollout-base");
4
+
5
+ module.exports = makeRolloutStrategy({
6
+ id: "every-code",
7
+ displayName: "Every Code",
8
+ envKey: "CODE_HOME",
9
+ defaultSubdir: ".code",
10
+ });
@@ -0,0 +1,154 @@
1
+ "use strict";
2
+
3
+ const fs = require("node:fs");
4
+ const path = require("node:path");
5
+
6
+ /**
7
+ * Gemini audit strategy.
8
+ *
9
+ * Gemini writes one JSON per session under
10
+ * ~/.gemini/tmp/<hash>/chats/session-YYYY-MM-DDTHH-MM-<id>.json
11
+ * The file has `{ messages: [ { role, timestamp, model, tokens } ] }` where
12
+ * `tokens` is the cumulative usage up to that message (not a per-turn delta).
13
+ *
14
+ * Channel semantics differ from Claude but match Codex in one important way:
15
+ * input + cached + output + tool + thoughts != total
16
+ * because `tokens.total` is the authoritative upstream count that
17
+ * src/lib/rollout.js normalizeGeminiTokens passes through as-is to the DB.
18
+ * Naively summing the five sub-channels double-counts. As with the Codex
19
+ * strategy, we route `delta.total` into the output channel and zero the rest
20
+ * so the framework's sum-of-channels row.truth equals the DB total_tokens
21
+ * without exposing Gemini's internal breakdown through the generic contract.
22
+ *
23
+ * Dedupe:
24
+ * - Per-file index diff mirrors parseGeminiFile's `lastTotals` state.
25
+ * - When `tokens.total` drops (session reset / resume), we treat the current
26
+ * cumulative as the delta just like the parser does.
27
+ */
28
+
29
+ module.exports = {
30
+ id: "gemini",
31
+ displayName: "Gemini CLI",
32
+ sessionRoot({ home, env }) {
33
+ const base = (env && env.GEMINI_HOME) || path.join(home, ".gemini");
34
+ return path.join(base, "tmp");
35
+ },
36
+ walkSessions({ root }) {
37
+ if (!fs.existsSync(root)) return [];
38
+ const out = [];
39
+ for (const hash of safeReadDirSync(root)) {
40
+ if (!hash.isDirectory()) continue;
41
+ const chatsDir = path.join(root, hash.name, "chats");
42
+ for (const f of safeReadDirSync(chatsDir)) {
43
+ if (!f.isFile()) continue;
44
+ if (!f.name.startsWith("session-") || !f.name.endsWith(".json")) continue;
45
+ out.push(path.join(chatsDir, f.name));
46
+ }
47
+ }
48
+ return out;
49
+ },
50
+ *iterateRecords(filePath) {
51
+ let raw;
52
+ try {
53
+ raw = fs.readFileSync(filePath, "utf8");
54
+ } catch (_err) {
55
+ return;
56
+ }
57
+ if (!raw.trim()) return;
58
+ let session;
59
+ try {
60
+ session = JSON.parse(raw);
61
+ } catch (_err) {
62
+ return;
63
+ }
64
+ const messages = Array.isArray(session?.messages) ? session.messages : [];
65
+ let prevTotals = null;
66
+ for (const msg of messages) {
67
+ if (!msg || typeof msg !== "object") continue;
68
+ const ts = typeof msg.timestamp === "string" ? msg.timestamp : null;
69
+ if (!ts) continue;
70
+ const tokens = msg.tokens;
71
+ if (!tokens || typeof tokens !== "object") continue;
72
+
73
+ const curr = normalizeTokens(tokens);
74
+ const delta = diffTotals(curr, prevTotals);
75
+ prevTotals = curr;
76
+ if (!delta || !delta.total) continue;
77
+
78
+ yield {
79
+ line: JSON.stringify({ timestamp: ts, delta }),
80
+ context: { filePath },
81
+ };
82
+ }
83
+ },
84
+ extractUsage(line) {
85
+ if (!line) return null;
86
+ let obj;
87
+ try {
88
+ obj = JSON.parse(line);
89
+ } catch (_err) {
90
+ return null;
91
+ }
92
+ const ts = typeof obj.timestamp === "string" ? obj.timestamp : null;
93
+ const d = obj.delta;
94
+ if (!ts || !d || !Number(d.total)) return null;
95
+ return {
96
+ timestamp: ts,
97
+ dedupeId: null, // per-file index diff already dedupes
98
+ channels: {
99
+ input: 0,
100
+ cache_creation: 0,
101
+ cache_read: 0,
102
+ // Route the authoritative upstream total into a single channel; see
103
+ // module docstring for why we do not split it.
104
+ output: Number(d.total),
105
+ reasoning: 0,
106
+ },
107
+ };
108
+ },
109
+ };
110
+
111
+ function normalizeTokens(tokens) {
112
+ return {
113
+ input: nonneg(tokens.input),
114
+ cached: nonneg(tokens.cached),
115
+ output: nonneg(tokens.output),
116
+ tool: nonneg(tokens.tool),
117
+ thoughts: nonneg(tokens.thoughts),
118
+ total: nonneg(tokens.total),
119
+ };
120
+ }
121
+
122
+ function diffTotals(curr, prev) {
123
+ if (!curr) return null;
124
+ if (!prev) {
125
+ // First message with tokens — the whole cumulative value is the delta.
126
+ return curr;
127
+ }
128
+ // Session reset: upstream total decreased (resume / new session). Trust the
129
+ // new value as the full delta.
130
+ if (curr.total < prev.total) return curr;
131
+ const delta = {
132
+ input: Math.max(0, curr.input - prev.input),
133
+ cached: Math.max(0, curr.cached - prev.cached),
134
+ output: Math.max(0, curr.output - prev.output),
135
+ tool: Math.max(0, curr.tool - prev.tool),
136
+ thoughts: Math.max(0, curr.thoughts - prev.thoughts),
137
+ total: Math.max(0, curr.total - prev.total),
138
+ };
139
+ return delta;
140
+ }
141
+
142
+ function nonneg(v) {
143
+ const n = Number(v);
144
+ if (!Number.isFinite(n) || n < 0) return 0;
145
+ return Math.floor(n);
146
+ }
147
+
148
+ function safeReadDirSync(p) {
149
+ try {
150
+ return fs.readdirSync(p, { withFileTypes: true });
151
+ } catch (_err) {
152
+ return [];
153
+ }
154
+ }
@@ -0,0 +1,69 @@
1
+ "use strict";
2
+
3
+ const fs = require("node:fs");
4
+ const path = require("node:path");
5
+
6
+ /**
7
+ * Hermes audit strategy.
8
+ *
9
+ * Hermes does not write raw session logs; it emits one pre-aggregated event
10
+ * per turn into the vibeusage tracker directory:
11
+ * ~/.vibeusage/tracker/hermes.usage.jsonl
12
+ * Each line is a `{type: "usage", emitted_at, model, input_tokens,
13
+ * output_tokens, cache_read_tokens, cache_write_tokens, reasoning_tokens,
14
+ * total_tokens}` record. src/commands/sync.js parseHermesUsageLedger already
15
+ * copies `total_tokens` straight into the bucket, so this audit routes the
16
+ * upstream total into the output channel — same pattern we use for Codex and
17
+ * Gemini.
18
+ *
19
+ * sessionRoot: the tracker directory (NOT `~/.hermes/...` — Hermes usage data
20
+ * lives under ~/.vibeusage/tracker because Hermes is a plugin that hands
21
+ * vibeusage ledger rows directly).
22
+ */
23
+
24
+ module.exports = {
25
+ id: "hermes",
26
+ displayName: "Hermes Plugin",
27
+ sessionRoot({ home, env }) {
28
+ const base = (env && env.VIBEUSAGE_HOME) || path.join(home, ".vibeusage");
29
+ return path.join(base, "tracker");
30
+ },
31
+ walkSessions({ root }) {
32
+ const ledger = path.join(root, "hermes.usage.jsonl");
33
+ if (!fs.existsSync(ledger)) return [];
34
+ return [ledger];
35
+ },
36
+ extractUsage(line) {
37
+ if (!line) return null;
38
+ let event;
39
+ try {
40
+ event = JSON.parse(line);
41
+ } catch (_err) {
42
+ return null;
43
+ }
44
+ if (!event || event.type !== "usage") return null;
45
+ const timestamp = typeof event.emitted_at === "string" ? event.emitted_at : null;
46
+ if (!timestamp) return null;
47
+ const total = nonneg(event.total_tokens);
48
+ if (total === 0) return null;
49
+ return {
50
+ timestamp,
51
+ // Hermes ledger records do not carry a stable per-event id;
52
+ // the ledger is append-only and duplicates are prevented at write time.
53
+ dedupeId: null,
54
+ channels: {
55
+ input: 0,
56
+ cache_creation: 0,
57
+ cache_read: 0,
58
+ output: total, // route authoritative upstream total here
59
+ reasoning: 0,
60
+ },
61
+ };
62
+ },
63
+ };
64
+
65
+ function nonneg(v) {
66
+ const n = Number(v);
67
+ if (!Number.isFinite(n) || n < 0) return 0;
68
+ return Math.floor(n);
69
+ }
@@ -0,0 +1,105 @@
1
+ "use strict";
2
+
3
+ const fs = require("node:fs");
4
+ const path = require("node:path");
5
+
6
+ /**
7
+ * Kimi audit strategy.
8
+ *
9
+ * Kimi CLI writes one wire log per session:
10
+ * ~/.kimi/sessions/<project>/<sessionId>/wire.jsonl
11
+ * Each StatusUpdate line carries the delta for one Anthropic-compatible
12
+ * message:
13
+ * { timestamp: <unix_seconds float>,
14
+ * message: { type: "StatusUpdate",
15
+ * payload: { message_id, token_usage: {
16
+ * input_other, input_cache_creation,
17
+ * input_cache_read, output } } } }
18
+ *
19
+ * Channel mapping lines up with src/lib/rollout.js normalizeKimiUsage so the
20
+ * framework's sum-of-channels row.truth equals the DB total_tokens:
21
+ * input = input_other + input_cache_creation
22
+ * cache_read = input_cache_read
23
+ * output = output
24
+ * (cache_creation, reasoning) = 0 (already folded into input / n/a)
25
+ * total = input + cache_read + output
26
+ *
27
+ * Dedupe key: payload.message_id (chatcmpl-…). Kimi does not currently
28
+ * duplicate rows the way Claude Code does, but keying on message_id is
29
+ * free insurance and matches the AGENTS.md intake checklist.
30
+ */
31
+
32
+ module.exports = {
33
+ id: "kimi",
34
+ displayName: "Kimi CLI",
35
+ sessionRoot({ home, env }) {
36
+ const base = (env && env.KIMI_HOME) || path.join(home, ".kimi");
37
+ return path.join(base, "sessions");
38
+ },
39
+ walkSessions({ root }) {
40
+ if (!fs.existsSync(root)) return [];
41
+ const out = [];
42
+ for (const proj of safeReadDirSync(root)) {
43
+ if (!proj.isDirectory()) continue;
44
+ const projDir = path.join(root, proj.name);
45
+ for (const session of safeReadDirSync(projDir)) {
46
+ if (!session.isDirectory()) continue;
47
+ const wire = path.join(projDir, session.name, "wire.jsonl");
48
+ if (!fs.existsSync(wire)) continue;
49
+ out.push(wire);
50
+ }
51
+ }
52
+ return out;
53
+ },
54
+ extractUsage(line) {
55
+ if (!line || !line.includes("StatusUpdate")) return null;
56
+ let obj;
57
+ try {
58
+ obj = JSON.parse(line);
59
+ } catch (_err) {
60
+ return null;
61
+ }
62
+ if (obj?.message?.type !== "StatusUpdate") return null;
63
+ const payload = obj.message.payload;
64
+ const tokens = payload?.token_usage;
65
+ if (!tokens || typeof tokens !== "object") return null;
66
+ const timestamp = unixSecondsToIso(obj.timestamp);
67
+ if (!timestamp) return null;
68
+ return {
69
+ timestamp,
70
+ dedupeId: typeof payload.message_id === "string" && payload.message_id
71
+ ? payload.message_id
72
+ : null,
73
+ channels: {
74
+ input: nonneg(tokens.input_other) + nonneg(tokens.input_cache_creation),
75
+ cache_creation: 0, // already folded into input per normalizeKimiUsage
76
+ cache_read: nonneg(tokens.input_cache_read),
77
+ output: nonneg(tokens.output),
78
+ reasoning: 0,
79
+ },
80
+ };
81
+ },
82
+ };
83
+
84
+ function unixSecondsToIso(value) {
85
+ const n = Number(value);
86
+ if (!Number.isFinite(n) || n <= 0) return null;
87
+ const ms = n < 1e12 ? Math.floor(n * 1000) : Math.floor(n);
88
+ const d = new Date(ms);
89
+ if (Number.isNaN(d.getTime())) return null;
90
+ return d.toISOString();
91
+ }
92
+
93
+ function nonneg(v) {
94
+ const n = Number(v);
95
+ if (!Number.isFinite(n) || n < 0) return 0;
96
+ return Math.floor(n);
97
+ }
98
+
99
+ function safeReadDirSync(p) {
100
+ try {
101
+ return fs.readdirSync(p, { withFileTypes: true });
102
+ } catch (_err) {
103
+ return [];
104
+ }
105
+ }
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+
3
+ const fs = require("node:fs");
4
+ const path = require("node:path");
5
+
6
+ /**
7
+ * OpenClaw audit strategy.
8
+ *
9
+ * Like Hermes, OpenClaw hands vibeusage pre-aggregated ledger rows instead of
10
+ * raw session logs:
11
+ * ~/.vibeusage/tracker/openclaw-usage-ledger.jsonl
12
+ * Each line is a camelCase event
13
+ * { eventId, emittedAt, source, model, inputTokens, cachedInputTokens,
14
+ * outputTokens, reasoningOutputTokens, totalTokens }
15
+ * src/commands/sync.js parseOpenclawSanitizedLedger copies `totalTokens`
16
+ * straight into the bucket, so this audit routes the upstream total into the
17
+ * output channel. Dedupe is keyed on eventId, which the ledger writer
18
+ * already enforces uniqueness of.
19
+ */
20
+
21
+ module.exports = {
22
+ id: "openclaw",
23
+ displayName: "OpenClaw Plugin",
24
+ sessionRoot({ home, env }) {
25
+ const base = (env && env.VIBEUSAGE_HOME) || path.join(home, ".vibeusage");
26
+ return path.join(base, "tracker");
27
+ },
28
+ walkSessions({ root }) {
29
+ const ledger = path.join(root, "openclaw-usage-ledger.jsonl");
30
+ if (!fs.existsSync(ledger)) return [];
31
+ return [ledger];
32
+ },
33
+ extractUsage(line) {
34
+ if (!line) return null;
35
+ let event;
36
+ try {
37
+ event = JSON.parse(line);
38
+ } catch (_err) {
39
+ return null;
40
+ }
41
+ if (!event || typeof event !== "object") return null;
42
+ const timestamp = typeof event.emittedAt === "string" ? event.emittedAt : null;
43
+ if (!timestamp) return null;
44
+ const total = nonneg(event.totalTokens);
45
+ if (total === 0) return null;
46
+ return {
47
+ timestamp,
48
+ dedupeId: typeof event.eventId === "string" && event.eventId ? event.eventId : null,
49
+ channels: {
50
+ input: 0,
51
+ cache_creation: 0,
52
+ cache_read: 0,
53
+ output: total,
54
+ reasoning: 0,
55
+ },
56
+ };
57
+ },
58
+ };
59
+
60
+ function nonneg(v) {
61
+ const n = Number(v);
62
+ if (!Number.isFinite(n) || n < 0) return 0;
63
+ return Math.floor(n);
64
+ }