tokentracker-cli 0.5.98 → 0.5.100
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -5
- package/README.zh-CN.md +7 -5
- package/bin/tracker.js +14 -0
- package/dashboard/dist/assets/{main-io8wJG1t.js → main-BYMjcXxR.js} +202 -194
- package/dashboard/dist/index.html +1 -1
- package/dashboard/dist/share.html +1 -1
- package/package.json +3 -2
- package/src/commands/init.js +20 -1
- package/src/commands/serve.js +7 -0
- package/src/commands/status.js +63 -15
- package/src/commands/sync.js +100 -1
- package/src/lib/cursor-config.js +15 -0
- package/src/lib/local-api.js +39 -16
- package/src/lib/pricing/seed-snapshot.json +1 -1
- package/src/lib/proxy-env.js +81 -0
- package/src/lib/rollout.js +299 -20
- package/src/lib/source-metadata.js +46 -0
- package/src/lib/usage-limits.js +312 -40
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
const cp = require("node:child_process");
|
|
2
|
+
|
|
3
|
+
function hasProxyEnv(env = process.env) {
|
|
4
|
+
return Boolean(
|
|
5
|
+
env.HTTPS_PROXY ||
|
|
6
|
+
env.https_proxy ||
|
|
7
|
+
env.HTTP_PROXY ||
|
|
8
|
+
env.http_proxy ||
|
|
9
|
+
env.ALL_PROXY ||
|
|
10
|
+
env.all_proxy,
|
|
11
|
+
);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function parseMacProxyOutput(output) {
|
|
15
|
+
const values = {};
|
|
16
|
+
for (const line of String(output || "").split(/\r?\n/)) {
|
|
17
|
+
const match = line.match(/^\s*([A-Za-z]+)\s*:\s*(.+?)\s*$/);
|
|
18
|
+
if (match) values[match[1]] = match[2];
|
|
19
|
+
}
|
|
20
|
+
if (values.HTTPSEnable !== "1" || !values.HTTPSProxy || !values.HTTPSPort) return null;
|
|
21
|
+
return `http://${values.HTTPSProxy}:${values.HTTPSPort}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function resolveSystemProxyEnv({ env = process.env, platform = process.platform, commandRunner = cp.spawnSync } = {}) {
|
|
25
|
+
const out = {};
|
|
26
|
+
if (hasProxyEnv(env)) {
|
|
27
|
+
out.NODE_USE_ENV_PROXY = env.NODE_USE_ENV_PROXY || "1";
|
|
28
|
+
return out;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
if (platform !== "darwin") return null;
|
|
32
|
+
const result = commandRunner("scutil", ["--proxy"], {
|
|
33
|
+
encoding: "utf8",
|
|
34
|
+
timeout: 2000,
|
|
35
|
+
});
|
|
36
|
+
if (result?.error || result?.status !== 0) return null;
|
|
37
|
+
const proxyUrl = parseMacProxyOutput(result.stdout);
|
|
38
|
+
if (!proxyUrl) return null;
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
NODE_USE_ENV_PROXY: "1",
|
|
42
|
+
HTTPS_PROXY: proxyUrl,
|
|
43
|
+
HTTP_PROXY: proxyUrl,
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function shouldRelaunchForProxy(argv, env = process.env) {
|
|
48
|
+
if (env.TOKENTRACKER_PROXY_ENV_APPLIED === "1") return false;
|
|
49
|
+
const command = Array.isArray(argv) ? argv[0] : null;
|
|
50
|
+
return !command || command === "serve";
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function relaunchWithProxyEnvIfNeeded({
|
|
54
|
+
argv,
|
|
55
|
+
originalArgv,
|
|
56
|
+
env = process.env,
|
|
57
|
+
platform = process.platform,
|
|
58
|
+
commandRunner = cp.spawnSync,
|
|
59
|
+
nodePath = process.execPath,
|
|
60
|
+
} = {}) {
|
|
61
|
+
if (!shouldRelaunchForProxy(argv, env)) return null;
|
|
62
|
+
const proxyEnv = resolveSystemProxyEnv({ env, platform, commandRunner });
|
|
63
|
+
if (!proxyEnv || proxyEnv.NODE_USE_ENV_PROXY === env.NODE_USE_ENV_PROXY) return null;
|
|
64
|
+
|
|
65
|
+
const childEnv = {
|
|
66
|
+
...env,
|
|
67
|
+
...proxyEnv,
|
|
68
|
+
TOKENTRACKER_PROXY_ENV_APPLIED: "1",
|
|
69
|
+
};
|
|
70
|
+
return commandRunner(nodePath, originalArgv, {
|
|
71
|
+
stdio: "inherit",
|
|
72
|
+
env: childEnv,
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
module.exports = {
|
|
77
|
+
hasProxyEnv,
|
|
78
|
+
parseMacProxyOutput,
|
|
79
|
+
resolveSystemProxyEnv,
|
|
80
|
+
relaunchWithProxyEnvIfNeeded,
|
|
81
|
+
};
|
package/src/lib/rollout.js
CHANGED
|
@@ -1244,6 +1244,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1244
1244
|
output_tokens: zeroTotals.output_tokens,
|
|
1245
1245
|
reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
|
|
1246
1246
|
total_tokens: zeroTotals.total_tokens,
|
|
1247
|
+
billable_total_tokens: zeroTotals.billable_total_tokens,
|
|
1247
1248
|
conversation_count: zeroTotals.conversation_count,
|
|
1248
1249
|
}),
|
|
1249
1250
|
);
|
|
@@ -1265,6 +1266,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1265
1266
|
output_tokens: zeroTotals.output_tokens,
|
|
1266
1267
|
reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
|
|
1267
1268
|
total_tokens: zeroTotals.total_tokens,
|
|
1269
|
+
billable_total_tokens: zeroTotals.billable_total_tokens,
|
|
1268
1270
|
conversation_count: zeroTotals.conversation_count,
|
|
1269
1271
|
}),
|
|
1270
1272
|
);
|
|
@@ -1292,6 +1294,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1292
1294
|
output_tokens: totals.output_tokens,
|
|
1293
1295
|
reasoning_output_tokens: totals.reasoning_output_tokens,
|
|
1294
1296
|
total_tokens: totals.total_tokens,
|
|
1297
|
+
billable_total_tokens: totals.billable_total_tokens ?? totals.total_tokens,
|
|
1295
1298
|
conversation_count: totals.conversation_count,
|
|
1296
1299
|
}),
|
|
1297
1300
|
);
|
|
@@ -1318,6 +1321,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1318
1321
|
output_tokens: zeroTotals.output_tokens,
|
|
1319
1322
|
reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
|
|
1320
1323
|
total_tokens: zeroTotals.total_tokens,
|
|
1324
|
+
billable_total_tokens: zeroTotals.billable_total_tokens,
|
|
1321
1325
|
conversation_count: zeroTotals.conversation_count,
|
|
1322
1326
|
}),
|
|
1323
1327
|
);
|
|
@@ -1340,6 +1344,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1340
1344
|
output_tokens: zeroTotals.output_tokens,
|
|
1341
1345
|
reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
|
|
1342
1346
|
total_tokens: zeroTotals.total_tokens,
|
|
1347
|
+
billable_total_tokens: zeroTotals.billable_total_tokens,
|
|
1343
1348
|
conversation_count: zeroTotals.conversation_count,
|
|
1344
1349
|
}),
|
|
1345
1350
|
);
|
|
@@ -1361,6 +1366,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1361
1366
|
output_tokens: unknownBucket.totals.output_tokens,
|
|
1362
1367
|
reasoning_output_tokens: unknownBucket.totals.reasoning_output_tokens,
|
|
1363
1368
|
total_tokens: unknownBucket.totals.total_tokens,
|
|
1369
|
+
billable_total_tokens: unknownBucket.totals.billable_total_tokens ?? unknownBucket.totals.total_tokens,
|
|
1364
1370
|
conversation_count: unknownBucket.totals.conversation_count,
|
|
1365
1371
|
}),
|
|
1366
1372
|
);
|
|
@@ -1407,6 +1413,7 @@ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets })
|
|
|
1407
1413
|
output_tokens: group.totals.output_tokens,
|
|
1408
1414
|
reasoning_output_tokens: group.totals.reasoning_output_tokens,
|
|
1409
1415
|
total_tokens: group.totals.total_tokens,
|
|
1416
|
+
billable_total_tokens: group.totals.billable_total_tokens ?? group.totals.total_tokens,
|
|
1410
1417
|
conversation_count: group.totals.conversation_count,
|
|
1411
1418
|
}),
|
|
1412
1419
|
);
|
|
@@ -1461,6 +1468,7 @@ async function enqueueTouchedProjectBuckets({
|
|
|
1461
1468
|
output_tokens: totals.output_tokens,
|
|
1462
1469
|
reasoning_output_tokens: totals.reasoning_output_tokens,
|
|
1463
1470
|
total_tokens: totals.total_tokens,
|
|
1471
|
+
billable_total_tokens: totals.billable_total_tokens ?? totals.total_tokens,
|
|
1464
1472
|
conversation_count: totals.conversation_count,
|
|
1465
1473
|
}),
|
|
1466
1474
|
);
|
|
@@ -1716,6 +1724,7 @@ function initTotals() {
|
|
|
1716
1724
|
output_tokens: 0,
|
|
1717
1725
|
reasoning_output_tokens: 0,
|
|
1718
1726
|
total_tokens: 0,
|
|
1727
|
+
billable_total_tokens: 0,
|
|
1719
1728
|
conversation_count: 0,
|
|
1720
1729
|
};
|
|
1721
1730
|
}
|
|
@@ -1727,6 +1736,7 @@ function addTotals(target, delta) {
|
|
|
1727
1736
|
target.output_tokens += delta.output_tokens || 0;
|
|
1728
1737
|
target.reasoning_output_tokens += delta.reasoning_output_tokens || 0;
|
|
1729
1738
|
target.total_tokens += delta.total_tokens || 0;
|
|
1739
|
+
target.billable_total_tokens += delta.billable_total_tokens ?? delta.total_tokens ?? 0;
|
|
1730
1740
|
target.conversation_count += delta.conversation_count || 0;
|
|
1731
1741
|
}
|
|
1732
1742
|
|
|
@@ -1738,6 +1748,7 @@ function totalsKey(totals) {
|
|
|
1738
1748
|
totals.output_tokens || 0,
|
|
1739
1749
|
totals.reasoning_output_tokens || 0,
|
|
1740
1750
|
totals.total_tokens || 0,
|
|
1751
|
+
totals.billable_total_tokens ?? totals.total_tokens ?? 0,
|
|
1741
1752
|
totals.conversation_count || 0,
|
|
1742
1753
|
].join("|");
|
|
1743
1754
|
}
|
|
@@ -2056,7 +2067,9 @@ function normalizeGeminiTokens(tokens) {
|
|
|
2056
2067
|
const output = toNonNegativeInt(tokens.output);
|
|
2057
2068
|
const tool = toNonNegativeInt(tokens.tool);
|
|
2058
2069
|
const thoughts = toNonNegativeInt(tokens.thoughts);
|
|
2059
|
-
const
|
|
2070
|
+
const reportedTotal = toNonNegativeInt(tokens.total);
|
|
2071
|
+
const computedTotal = input + cached + output + tool + thoughts;
|
|
2072
|
+
const total = Math.max(reportedTotal, computedTotal);
|
|
2060
2073
|
|
|
2061
2074
|
return {
|
|
2062
2075
|
input_tokens: input,
|
|
@@ -2150,26 +2163,18 @@ function pickDelta(lastUsage, totalUsage, prevTotals) {
|
|
|
2150
2163
|
const hasTotal = isNonEmptyObject(totalUsage);
|
|
2151
2164
|
const hasPrevTotals = isNonEmptyObject(prevTotals);
|
|
2152
2165
|
|
|
2153
|
-
// NOTE: We used to guard against "duplicate token_count records where
|
|
2154
|
-
// total_token_usage is unchanged" by returning null here. We removed that
|
|
2155
|
-
// guard to align token counts with ccusage exactly (audited against 10 days
|
|
2156
|
-
// of real rollouts). When last_token_usage is present we trust it as the
|
|
2157
|
-
// per-turn delta; when it's absent the cumulative-subtract path naturally
|
|
2158
|
-
// yields an all-zero delta on duplicates and is still filtered below.
|
|
2159
|
-
if (!hasLast && hasTotal && hasPrevTotals && totalsReset(totalUsage, prevTotals)) {
|
|
2160
|
-
const normalized = normalizeUsage(totalUsage);
|
|
2161
|
-
return isAllZeroUsage(normalized) ? null : normalized;
|
|
2162
|
-
}
|
|
2163
|
-
|
|
2164
|
-
if (hasLast) {
|
|
2165
|
-
return normalizeUsage(lastUsage);
|
|
2166
|
-
}
|
|
2167
|
-
|
|
2168
2166
|
if (hasTotal && hasPrevTotals) {
|
|
2167
|
+
if (totalsReset(totalUsage, prevTotals)) {
|
|
2168
|
+
const resetUsage = hasLast ? lastUsage : totalUsage;
|
|
2169
|
+
const normalized = normalizeUsage(resetUsage);
|
|
2170
|
+
return isAllZeroUsage(normalized) ? null : normalized;
|
|
2171
|
+
}
|
|
2172
|
+
|
|
2169
2173
|
const delta = {};
|
|
2170
2174
|
for (const k of [
|
|
2171
2175
|
"input_tokens",
|
|
2172
2176
|
"cached_input_tokens",
|
|
2177
|
+
"cache_creation_input_tokens",
|
|
2173
2178
|
"output_tokens",
|
|
2174
2179
|
"reasoning_output_tokens",
|
|
2175
2180
|
"total_tokens",
|
|
@@ -2182,6 +2187,11 @@ function pickDelta(lastUsage, totalUsage, prevTotals) {
|
|
|
2182
2187
|
return isAllZeroUsage(normalized) ? null : normalized;
|
|
2183
2188
|
}
|
|
2184
2189
|
|
|
2190
|
+
if (hasLast) {
|
|
2191
|
+
const normalized = normalizeUsage(lastUsage);
|
|
2192
|
+
return isAllZeroUsage(normalized) ? null : normalized;
|
|
2193
|
+
}
|
|
2194
|
+
|
|
2185
2195
|
if (hasTotal) {
|
|
2186
2196
|
const normalized = normalizeUsage(totalUsage);
|
|
2187
2197
|
return isAllZeroUsage(normalized) ? null : normalized;
|
|
@@ -2554,21 +2564,31 @@ async function parseCursorApiIncremental({
|
|
|
2554
2564
|
const hourlyState = normalizeHourlyState(cursors?.hourly);
|
|
2555
2565
|
const touchedBuckets = new Set();
|
|
2556
2566
|
|
|
2557
|
-
//
|
|
2567
|
+
// Cursor's CSV is an account-level API export, not an append-only local log.
|
|
2568
|
+
// Treat the fetched CSV as authoritative so historical backfills and row
|
|
2569
|
+
// corrections replace prior local bucket totals instead of being skipped.
|
|
2558
2570
|
const lastTs = cursors?.cursorApi?.lastRecordTimestamp || null;
|
|
2559
2571
|
let latestTs = lastTs;
|
|
2560
2572
|
let eventsAggregated = 0;
|
|
2561
2573
|
const cb = typeof onProgress === "function" ? onProgress : null;
|
|
2562
2574
|
const total = records.length;
|
|
2563
2575
|
|
|
2576
|
+
if (records.length > 0) {
|
|
2577
|
+
for (const [key, bucket] of Object.entries(hourlyState.buckets || {})) {
|
|
2578
|
+
const parsed = parseBucketKey(key);
|
|
2579
|
+
const sourceKey = normalizeSourceInput(parsed.source) || DEFAULT_SOURCE;
|
|
2580
|
+
if (sourceKey !== defaultSource) continue;
|
|
2581
|
+
if (!bucket?.totals) continue;
|
|
2582
|
+
bucket.totals = initTotals();
|
|
2583
|
+
touchedBuckets.add(key);
|
|
2584
|
+
}
|
|
2585
|
+
}
|
|
2586
|
+
|
|
2564
2587
|
for (let i = 0; i < records.length; i++) {
|
|
2565
2588
|
const record = records[i];
|
|
2566
2589
|
const recordDate = record.date;
|
|
2567
2590
|
if (!recordDate) continue;
|
|
2568
2591
|
|
|
2569
|
-
// Skip records we already processed (CSV is ordered newest-first)
|
|
2570
|
-
if (lastTs && recordDate <= lastTs) continue;
|
|
2571
|
-
|
|
2572
2592
|
const { normalizeCursorUsage } = require("./cursor-config");
|
|
2573
2593
|
const delta = normalizeCursorUsage(record);
|
|
2574
2594
|
if (isAllZeroUsage(delta)) continue;
|
|
@@ -4353,6 +4373,260 @@ async function parseCodebuddyIncremental({
|
|
|
4353
4373
|
return { recordsProcessed, eventsAggregated, bucketsQueued };
|
|
4354
4374
|
}
|
|
4355
4375
|
|
|
4376
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
4377
|
+
// oh-my-pi (omp) — passive JSONL reader (~/.omp/agent/sessions/**/*.jsonl)
|
|
4378
|
+
//
|
|
4379
|
+
// oh-my-pi writes one append-only JSONL per session:
|
|
4380
|
+
// ~/.omp/agent/sessions/--<cwd-encoded>--/<timestamp>_<sessionId>.jsonl
|
|
4381
|
+
//
|
|
4382
|
+
// Per-line record types: the first line is type:"session" (header).
|
|
4383
|
+
// Only type:"message" lines with message.role=="assistant" carry token usage.
|
|
4384
|
+
// The shape (verbatim from oh-my-pi docs/session.md):
|
|
4385
|
+
//
|
|
4386
|
+
// {
|
|
4387
|
+
// "type": "message",
|
|
4388
|
+
// "id": "a1b2c3d4", ← 8-char dedup key
|
|
4389
|
+
// "parentId": "...",
|
|
4390
|
+
// "timestamp": "2026-02-16T10:21:00.000Z",
|
|
4391
|
+
// "message": {
|
|
4392
|
+
// "role": "assistant",
|
|
4393
|
+
// "provider": "anthropic",
|
|
4394
|
+
// "model": "claude-sonnet-4-5",
|
|
4395
|
+
// "usage": {
|
|
4396
|
+
// "input": 100, "output": 20, "cacheRead": 0, "cacheWrite": 0,
|
|
4397
|
+
// "totalTokens": 120, "reasoningTokens": 0
|
|
4398
|
+
// },
|
|
4399
|
+
// "timestamp": 1760000000000 ← ms epoch, preferred for bucketing
|
|
4400
|
+
// }
|
|
4401
|
+
// }
|
|
4402
|
+
//
|
|
4403
|
+
// oh-my-pi is a router — dispatches to upstream providers (Anthropic, OpenAI,
|
|
4404
|
+
// etc.) and records the upstream model name per message. There is no global
|
|
4405
|
+
// default model setting; model is always per-message (fallback: "omp-unknown").
|
|
4406
|
+
// ─────────────────────────────────────────────────────────────────────────────
|
|
4407
|
+
|
|
4408
|
+
function resolveOmpHome(env = process.env) {
|
|
4409
|
+
const home = env.HOME || require("node:os").homedir();
|
|
4410
|
+
// Honor TokenTracker override first, then oh-my-pi upstream env vars.
|
|
4411
|
+
if (env.OMP_HOME) return env.OMP_HOME;
|
|
4412
|
+
if (env.PI_CONFIG_DIR) return path.join(home, env.PI_CONFIG_DIR);
|
|
4413
|
+
return path.join(home, ".omp");
|
|
4414
|
+
}
|
|
4415
|
+
|
|
4416
|
+
function resolveOmpAgentDir(env = process.env) {
|
|
4417
|
+
if (env.PI_CODING_AGENT_DIR) return env.PI_CODING_AGENT_DIR;
|
|
4418
|
+
return path.join(resolveOmpHome(env), "agent");
|
|
4419
|
+
}
|
|
4420
|
+
|
|
4421
|
+
function resolveOmpSessionFiles(env = process.env) {
|
|
4422
|
+
const sessionsDir = path.join(resolveOmpAgentDir(env), "sessions");
|
|
4423
|
+
if (!fssync.existsSync(sessionsDir)) return [];
|
|
4424
|
+
const files = [];
|
|
4425
|
+
try {
|
|
4426
|
+
for (const cwdDir of fssync.readdirSync(sessionsDir)) {
|
|
4427
|
+
const cwdPath = path.join(sessionsDir, cwdDir);
|
|
4428
|
+
let stat;
|
|
4429
|
+
try { stat = fssync.statSync(cwdPath); } catch { continue; }
|
|
4430
|
+
if (!stat.isDirectory()) continue;
|
|
4431
|
+
let entries;
|
|
4432
|
+
try { entries = fssync.readdirSync(cwdPath); } catch { continue; }
|
|
4433
|
+
for (const entry of entries) {
|
|
4434
|
+
if (!entry.endsWith(".jsonl")) continue;
|
|
4435
|
+
files.push(path.join(cwdPath, entry));
|
|
4436
|
+
}
|
|
4437
|
+
}
|
|
4438
|
+
} catch {
|
|
4439
|
+
// ignore — return what we have
|
|
4440
|
+
}
|
|
4441
|
+
files.sort((a, b) => a.localeCompare(b));
|
|
4442
|
+
return files;
|
|
4443
|
+
}
|
|
4444
|
+
|
|
4445
|
+
function resolveOmpDefaultModel() {
|
|
4446
|
+
// oh-my-pi has no global default model setting; model is per-message.
|
|
4447
|
+
return "omp-unknown";
|
|
4448
|
+
}
|
|
4449
|
+
|
|
4450
|
+
async function parseOmpIncremental({
|
|
4451
|
+
sessionFiles,
|
|
4452
|
+
cursors,
|
|
4453
|
+
queuePath,
|
|
4454
|
+
onProgress,
|
|
4455
|
+
env,
|
|
4456
|
+
defaultModel,
|
|
4457
|
+
} = {}) {
|
|
4458
|
+
await ensureDir(path.dirname(queuePath));
|
|
4459
|
+
const ompState = cursors.omp && typeof cursors.omp === "object" ? cursors.omp : {};
|
|
4460
|
+
const seenIds = new Set(Array.isArray(ompState.seenIds) ? ompState.seenIds : []);
|
|
4461
|
+
const fileOffsets =
|
|
4462
|
+
ompState.fileOffsets && typeof ompState.fileOffsets === "object"
|
|
4463
|
+
? { ...ompState.fileOffsets }
|
|
4464
|
+
: {};
|
|
4465
|
+
|
|
4466
|
+
const files = Array.isArray(sessionFiles)
|
|
4467
|
+
? sessionFiles
|
|
4468
|
+
: resolveOmpSessionFiles(env || process.env);
|
|
4469
|
+
const fallbackModel = defaultModel || resolveOmpDefaultModel();
|
|
4470
|
+
|
|
4471
|
+
if (files.length === 0) {
|
|
4472
|
+
cursors.omp = {
|
|
4473
|
+
...ompState,
|
|
4474
|
+
seenIds: Array.from(seenIds),
|
|
4475
|
+
fileOffsets,
|
|
4476
|
+
updatedAt: new Date().toISOString(),
|
|
4477
|
+
};
|
|
4478
|
+
return { recordsProcessed: 0, eventsAggregated: 0, bucketsQueued: 0 };
|
|
4479
|
+
}
|
|
4480
|
+
|
|
4481
|
+
const hourlyState = normalizeHourlyState(cursors?.hourly);
|
|
4482
|
+
const touchedBuckets = new Set();
|
|
4483
|
+
const cb = typeof onProgress === "function" ? onProgress : null;
|
|
4484
|
+
let recordsProcessed = 0;
|
|
4485
|
+
let eventsAggregated = 0;
|
|
4486
|
+
|
|
4487
|
+
for (let fileIdx = 0; fileIdx < files.length; fileIdx++) {
|
|
4488
|
+
const filePath = files[fileIdx];
|
|
4489
|
+
let stat;
|
|
4490
|
+
try { stat = fssync.statSync(filePath); } catch { continue; }
|
|
4491
|
+
|
|
4492
|
+
const prevEntry = fileOffsets[filePath] || {};
|
|
4493
|
+
const prevSize = Number(prevEntry.size) || 0;
|
|
4494
|
+
const prevIno = prevEntry.ino;
|
|
4495
|
+
// Re-read from start if file shrunk (truncate/rewrite) or inode changed.
|
|
4496
|
+
const inodeChanged = typeof prevIno === "number" && prevIno !== stat.ino;
|
|
4497
|
+
const startOffset = stat.size < prevSize || inodeChanged ? 0 : prevSize;
|
|
4498
|
+
if (stat.size <= startOffset) continue;
|
|
4499
|
+
|
|
4500
|
+
let stream;
|
|
4501
|
+
try {
|
|
4502
|
+
stream = fssync.createReadStream(filePath, {
|
|
4503
|
+
encoding: "utf8",
|
|
4504
|
+
start: startOffset,
|
|
4505
|
+
});
|
|
4506
|
+
} catch { continue; }
|
|
4507
|
+
const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
|
|
4508
|
+
|
|
4509
|
+
for await (const line of rl) {
|
|
4510
|
+
if (!line || !line.trim()) continue;
|
|
4511
|
+
let entry;
|
|
4512
|
+
try { entry = JSON.parse(line); } catch { continue; }
|
|
4513
|
+
|
|
4514
|
+
// First line of each file is type:"session" (header) — skip all
|
|
4515
|
+
// non-message records.
|
|
4516
|
+
if (!entry || entry.type !== "message") continue;
|
|
4517
|
+
|
|
4518
|
+
// Only assistant messages carry token usage.
|
|
4519
|
+
const msg = entry.message;
|
|
4520
|
+
if (!msg || msg.role !== "assistant") continue;
|
|
4521
|
+
|
|
4522
|
+
const usage = msg.usage;
|
|
4523
|
+
if (!usage || typeof usage !== "object") continue;
|
|
4524
|
+
|
|
4525
|
+
// Dedup by top-level entry id (8-char string assigned by oh-my-pi).
|
|
4526
|
+
const entryId = typeof entry.id === "string" && entry.id ? entry.id : null;
|
|
4527
|
+
if (!entryId) continue;
|
|
4528
|
+
if (seenIds.has(entryId)) continue;
|
|
4529
|
+
|
|
4530
|
+
recordsProcessed++;
|
|
4531
|
+
|
|
4532
|
+
const input = toNonNegativeInt(usage.input);
|
|
4533
|
+
const output = toNonNegativeInt(usage.output);
|
|
4534
|
+
const cacheRead = toNonNegativeInt(usage.cacheRead);
|
|
4535
|
+
const cacheWrite = toNonNegativeInt(usage.cacheWrite);
|
|
4536
|
+
const reasoningTokens = toNonNegativeInt(usage.reasoningTokens);
|
|
4537
|
+
|
|
4538
|
+
if (input === 0 && output === 0 && cacheRead === 0 && cacheWrite === 0) {
|
|
4539
|
+
seenIds.add(entryId);
|
|
4540
|
+
continue;
|
|
4541
|
+
}
|
|
4542
|
+
|
|
4543
|
+
// Prefer message-level timestamp (ms epoch); fall back to entry-level
|
|
4544
|
+
// ISO string. Entries with no resolvable timestamp are skipped — they
|
|
4545
|
+
// cannot be placed in a bucket.
|
|
4546
|
+
let tsMs = null;
|
|
4547
|
+
if (Number.isFinite(Number(msg.timestamp)) && Number(msg.timestamp) > 0) {
|
|
4548
|
+
tsMs = Number(msg.timestamp);
|
|
4549
|
+
} else if (typeof entry.timestamp === "string" && entry.timestamp) {
|
|
4550
|
+
const parsed = Date.parse(entry.timestamp);
|
|
4551
|
+
if (Number.isFinite(parsed) && parsed > 0) tsMs = parsed;
|
|
4552
|
+
}
|
|
4553
|
+
if (tsMs == null) {
|
|
4554
|
+
seenIds.add(entryId);
|
|
4555
|
+
continue;
|
|
4556
|
+
}
|
|
4557
|
+
|
|
4558
|
+
const tsIso = new Date(tsMs).toISOString();
|
|
4559
|
+
const bucketStart = toUtcHalfHourStart(tsIso);
|
|
4560
|
+
if (!bucketStart) continue;
|
|
4561
|
+
|
|
4562
|
+
// Use provided totalTokens when available; otherwise sum all components.
|
|
4563
|
+
const totalTokens =
|
|
4564
|
+
Number.isFinite(Number(usage.totalTokens)) && Number(usage.totalTokens) > 0
|
|
4565
|
+
? toNonNegativeInt(usage.totalTokens)
|
|
4566
|
+
: input + output + cacheRead + cacheWrite + reasoningTokens;
|
|
4567
|
+
|
|
4568
|
+
const model = normalizeModelInput(msg.model) || fallbackModel;
|
|
4569
|
+
|
|
4570
|
+
const delta = {
|
|
4571
|
+
input_tokens: input,
|
|
4572
|
+
cached_input_tokens: cacheRead,
|
|
4573
|
+
cache_creation_input_tokens: cacheWrite,
|
|
4574
|
+
output_tokens: output,
|
|
4575
|
+
reasoning_output_tokens: reasoningTokens,
|
|
4576
|
+
total_tokens: totalTokens,
|
|
4577
|
+
conversation_count: 1,
|
|
4578
|
+
};
|
|
4579
|
+
|
|
4580
|
+
const bucket = getHourlyBucket(hourlyState, "omp", model, bucketStart);
|
|
4581
|
+
addTotals(bucket.totals, delta);
|
|
4582
|
+
touchedBuckets.add(bucketKey("omp", model, bucketStart));
|
|
4583
|
+
seenIds.add(entryId);
|
|
4584
|
+
eventsAggregated++;
|
|
4585
|
+
|
|
4586
|
+
if (cb) {
|
|
4587
|
+
cb({
|
|
4588
|
+
index: fileIdx + 1,
|
|
4589
|
+
total: files.length,
|
|
4590
|
+
recordsProcessed,
|
|
4591
|
+
eventsAggregated,
|
|
4592
|
+
bucketsQueued: touchedBuckets.size,
|
|
4593
|
+
});
|
|
4594
|
+
}
|
|
4595
|
+
}
|
|
4596
|
+
|
|
4597
|
+
let postStat = stat;
|
|
4598
|
+
try { postStat = fssync.statSync(filePath); } catch {}
|
|
4599
|
+
fileOffsets[filePath] = {
|
|
4600
|
+
size: postStat.size,
|
|
4601
|
+
mtimeMs: postStat.mtimeMs,
|
|
4602
|
+
ino: postStat.ino,
|
|
4603
|
+
};
|
|
4604
|
+
}
|
|
4605
|
+
|
|
4606
|
+
// Cap dedup set to last 10k IDs to bound cursor state size — same convention
|
|
4607
|
+
// as Kimi/CodeBuddy/Copilot so cursors.json doesn't grow unbounded.
|
|
4608
|
+
const seenArr = Array.from(seenIds);
|
|
4609
|
+
const cappedSeen =
|
|
4610
|
+
seenArr.length > 10_000 ? seenArr.slice(seenArr.length - 10_000) : seenArr;
|
|
4611
|
+
|
|
4612
|
+
const bucketsQueued = await enqueueTouchedBuckets({
|
|
4613
|
+
queuePath,
|
|
4614
|
+
hourlyState,
|
|
4615
|
+
touchedBuckets,
|
|
4616
|
+
});
|
|
4617
|
+
const updatedAt = new Date().toISOString();
|
|
4618
|
+
hourlyState.updatedAt = updatedAt;
|
|
4619
|
+
cursors.hourly = hourlyState;
|
|
4620
|
+
cursors.omp = {
|
|
4621
|
+
...ompState,
|
|
4622
|
+
seenIds: cappedSeen,
|
|
4623
|
+
fileOffsets,
|
|
4624
|
+
updatedAt,
|
|
4625
|
+
};
|
|
4626
|
+
|
|
4627
|
+
return { recordsProcessed, eventsAggregated, bucketsQueued };
|
|
4628
|
+
}
|
|
4629
|
+
|
|
4356
4630
|
// ─────────────────────────────────────────────────────────────────────────────
|
|
4357
4631
|
// GitHub Copilot CLI — OpenTelemetry JSONL exporter
|
|
4358
4632
|
// User must opt in by setting:
|
|
@@ -4573,6 +4847,11 @@ module.exports = {
|
|
|
4573
4847
|
resolveKiroCliSessionFiles,
|
|
4574
4848
|
resolveKiroCliDbPath,
|
|
4575
4849
|
parseKiroCliIncremental,
|
|
4850
|
+
resolveOmpHome,
|
|
4851
|
+
resolveOmpAgentDir,
|
|
4852
|
+
resolveOmpSessionFiles,
|
|
4853
|
+
resolveOmpDefaultModel,
|
|
4854
|
+
parseOmpIncremental,
|
|
4576
4855
|
// Exposed for regression tests covering cache-token accounting.
|
|
4577
4856
|
normalizeGeminiTokens,
|
|
4578
4857
|
normalizeOpencodeTokens,
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
const ACCOUNT_LEVEL_SOURCES = new Set(["cursor"]);
|
|
2
|
+
|
|
3
|
+
function normalizeSource(value) {
|
|
4
|
+
return String(value || "").trim().toLowerCase();
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
function getSourceScope(source) {
|
|
8
|
+
return ACCOUNT_LEVEL_SOURCES.has(normalizeSource(source)) ? "account" : "local";
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function isAccountLevelSource(source) {
|
|
12
|
+
return getSourceScope(source) === "account";
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function normalizeUsageScope(value) {
|
|
16
|
+
const raw = String(value || "").trim().toLowerCase();
|
|
17
|
+
return raw === "all" || raw === "raw" ? "all" : "personal";
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function filterRowsByUsageScope(rows, scope = "personal") {
|
|
21
|
+
const normalizedScope = normalizeUsageScope(scope);
|
|
22
|
+
if (normalizedScope === "all") return Array.isArray(rows) ? rows : [];
|
|
23
|
+
return (Array.isArray(rows) ? rows : []).filter((row) => !isAccountLevelSource(row?.source));
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function listExcludedSources(rows, scope = "personal") {
|
|
27
|
+
const normalizedScope = normalizeUsageScope(scope);
|
|
28
|
+
if (normalizedScope === "all") return [];
|
|
29
|
+
const seen = new Set();
|
|
30
|
+
const out = [];
|
|
31
|
+
for (const row of Array.isArray(rows) ? rows : []) {
|
|
32
|
+
const source = normalizeSource(row?.source);
|
|
33
|
+
if (!source || seen.has(source) || !isAccountLevelSource(source)) continue;
|
|
34
|
+
seen.add(source);
|
|
35
|
+
out.push({ source, source_scope: getSourceScope(source), reason: "account_level_source" });
|
|
36
|
+
}
|
|
37
|
+
return out.sort((a, b) => a.source.localeCompare(b.source));
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
module.exports = {
|
|
41
|
+
getSourceScope,
|
|
42
|
+
isAccountLevelSource,
|
|
43
|
+
normalizeUsageScope,
|
|
44
|
+
filterRowsByUsageScope,
|
|
45
|
+
listExcludedSources,
|
|
46
|
+
};
|