tokentrack 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,203 @@
1
+ #!/usr/bin/env node
2
+ import {
3
+ aggregateByProject,
4
+ aggregateByProvider,
5
+ applyFilters,
6
+ createServer,
7
+ findLiteLLMPricing,
8
+ getPricingTable,
9
+ getProviderInfo,
10
+ getRecords,
11
+ getSummary,
12
+ loadLiteLLMPricing,
13
+ updatePricingCache
14
+ } from "./chunk-KQYHDGZP.js";
15
+
16
+ // src/cli.ts
17
+ import { Command } from "commander";
18
+ import chalk from "chalk";
19
+ import ora from "ora";
20
+ import Table from "cli-table3";
21
+ import open from "open";
22
+ var program = new Command();
23
+ program.name("tokentrack").description("Visual analytics dashboard for Claude Code, Codex & Antigravity token usage").version("2.0.0").option("-p, --port <number>", "Server port", "3847").option("--summary", "Show CLI summary only (no browser)").option("--project <name>", "Filter by project").option("--from <date>", "Start date (YYYY-MM-DD)").option("--to <date>", "End date (YYYY-MM-DD)").option("--provider <name>", "Filter by provider (claude-code, codex, antigravity, all)", "all").option("--export <format>", "Export data (csv or json)").option("--output <file>", "Export output file path").option("--pricing", "Display current pricing table (with LiteLLM live data when available)").option("--pricing-update", "Force refresh of LiteLLM pricing cache").option("--verbose", "Show verbose parsing output").option("--dev", "Development mode").action(async (opts) => {
24
+ if (opts.pricingUpdate) {
25
+ const spinner2 = ora("Fetching latest pricing from LiteLLM...").start();
26
+ try {
27
+ const count = await updatePricingCache();
28
+ spinner2.succeed(`Updated LiteLLM pricing cache (${count} models)`);
29
+ } catch (e) {
30
+ spinner2.fail(`Failed to update pricing: ${e.message}`);
31
+ }
32
+ return;
33
+ }
34
+ if (opts.pricing) {
35
+ const spinner2 = ora("Loading LiteLLM pricing...").start();
36
+ let litellm;
37
+ let litellmSource = "hardcoded fallback";
38
+ try {
39
+ litellm = await loadLiteLLMPricing();
40
+ if (litellm.size > 0) {
41
+ litellmSource = `LiteLLM (${litellm.size} models)`;
42
+ }
43
+ } catch {
44
+ litellm = /* @__PURE__ */ new Map();
45
+ }
46
+ spinner2.succeed(`Pricing source: ${litellmSource}`);
47
+ const table = getPricingTable();
48
+ const seen = /* @__PURE__ */ new Set();
49
+ const ptable = new Table({
50
+ head: ["Model", "Provider", "Input/M", "Output/M", "Cache W/M", "Cache R/M"].map((h) => chalk.bold(h)),
51
+ style: { head: [], border: [] }
52
+ });
53
+ for (const [model, p] of Object.entries(table)) {
54
+ const normalized = model.replace(/\./g, "-");
55
+ if (seen.has(normalized)) continue;
56
+ seen.add(normalized);
57
+ let inputDisplay;
58
+ let outputDisplay;
59
+ let cacheWDisplay;
60
+ let cacheRDisplay;
61
+ const live = findLiteLLMPricing(model, litellm);
62
+ if (live) {
63
+ inputDisplay = `$${(live.input_cost_per_token * 1e6).toFixed(2)}`;
64
+ outputDisplay = `$${(live.output_cost_per_token * 1e6).toFixed(2)}`;
65
+ const isOpenAI = p.provider === "openai";
66
+ if (live.cache_creation_input_token_cost) {
67
+ cacheWDisplay = `$${(live.cache_creation_input_token_cost * 1e6).toFixed(3)}`;
68
+ } else if (isOpenAI) {
69
+ cacheWDisplay = "-";
70
+ } else {
71
+ cacheWDisplay = p.cacheWrite ? `$${p.cacheWrite}` : "-";
72
+ }
73
+ if (live.cache_read_input_token_cost != null) {
74
+ const label = isOpenAI ? "Cached/M" : "Cache R/M";
75
+ cacheRDisplay = `$${(live.cache_read_input_token_cost * 1e6).toFixed(3)}`;
76
+ void label;
77
+ } else {
78
+ cacheRDisplay = p.cacheRead ? `$${p.cacheRead}` : p.cachedInput ? `$${p.cachedInput}` : "-";
79
+ }
80
+ } else {
81
+ inputDisplay = `$${p.input}`;
82
+ outputDisplay = `$${p.output}`;
83
+ const isOpenAI = p.provider === "openai";
84
+ cacheWDisplay = isOpenAI ? p.cachedInput ? `$${p.cachedInput}` : "-" : p.cacheWrite ? `$${p.cacheWrite}` : "-";
85
+ cacheRDisplay = p.cacheRead ? `$${p.cacheRead}` : "-";
86
+ }
87
+ ptable.push([
88
+ model,
89
+ p.provider,
90
+ inputDisplay,
91
+ outputDisplay,
92
+ cacheWDisplay,
93
+ cacheRDisplay
94
+ ]);
95
+ }
96
+ console.log(chalk.bold("\n TokenTrack \u2014 Pricing Table\n"));
97
+ console.log(ptable.toString());
98
+ console.log(chalk.gray(` Source: ${litellmSource}. Run --pricing-update to refresh.
99
+ `));
100
+ return;
101
+ }
102
+ const spinner = ora("Scanning data files across all providers...").start();
103
+ try {
104
+ const providerInfo = await getProviderInfo();
105
+ const available = providerInfo.filter((p) => p.isAvailable);
106
+ spinner.text = `Found ${available.length} provider(s). Parsing...`;
107
+ const records = await getRecords();
108
+ spinner.succeed(`Parsed ${records.length} usage records from ${available.map((p) => p.name).join(", ") || "no providers"}`);
109
+ const filters = {};
110
+ if (opts.from) filters.from = new Date(opts.from);
111
+ if (opts.to) filters.to = /* @__PURE__ */ new Date(opts.to + "T23:59:59");
112
+ if (opts.project) filters.project = opts.project;
113
+ if (opts.provider && opts.provider !== "all") filters.provider = opts.provider;
114
+ const filtered = applyFilters(records, filters);
115
+ const summary = getSummary(filtered);
116
+ console.log("");
117
+ console.log(chalk.bold(" TokenTrack v2.0 \u2014 Multi-Provider Usage Summary"));
118
+ console.log(chalk.gray(` ${summary.dateRange.from} \u2192 ${summary.dateRange.to}`));
119
+ console.log("");
120
+ const providerSummaries = aggregateByProvider(filtered);
121
+ if (providerSummaries.length > 0) {
122
+ for (const ps of providerSummaries) {
123
+ const color = ps.provider === "claude-code" ? chalk.hex("#d97757") : ps.provider === "codex" ? chalk.hex("#10a37f") : chalk.hex("#4285f4");
124
+ console.log(` ${color("\u25A0")} ${chalk.bold(ps.name.padEnd(16))} ${String(ps.records).padStart(6)} records \u2502 ${String(ps.projects).padStart(2)} projects \u2502 ${chalk.bold(`$${ps.totalCost.toFixed(2)}`)}`);
125
+ }
126
+ console.log("");
127
+ }
128
+ const fmt = (n) => n >= 1e6 ? `${(n / 1e6).toFixed(1)}M` : n >= 1e3 ? `${(n / 1e3).toFixed(1)}K` : String(n);
129
+ const table = new Table({
130
+ head: ["Metric", "Value"].map((h) => chalk.bold(h)),
131
+ style: { head: [], border: [] }
132
+ });
133
+ table.push(
134
+ ["Total Tokens", chalk.white.bold(fmt(summary.totalTokens))],
135
+ ["Input Tokens", chalk.hex("#6a9bcc")(fmt(summary.totalInputTokens))],
136
+ ["Output Tokens", chalk.hex("#d97757")(fmt(summary.totalOutputTokens))],
137
+ ["Cache Tokens", chalk.hex("#788c5d")(fmt(summary.totalCacheTokens))],
138
+ ["Est. API Cost", chalk.hex("#d97757").bold(`$${summary.totalCost.toFixed(2)}`)],
139
+ ["Sessions", String(summary.totalSessions)],
140
+ ["Projects", String(summary.totalProjects)]
141
+ );
142
+ console.log(table.toString());
143
+ console.log("");
144
+ const projects = aggregateByProject(filtered);
145
+ if (projects.length > 0) {
146
+ const ptable = new Table({
147
+ head: ["Project", "Provider", "Sessions", "Input", "Output", "Cache", "Cost"].map((h) => chalk.bold(h)),
148
+ style: { head: [], border: [] }
149
+ });
150
+ for (const p of projects.slice(0, 15)) {
151
+ const providerBadges = p.providers.map((pr) => {
152
+ if (pr === "claude-code") return chalk.hex("#d97757")("CC");
153
+ if (pr === "codex") return chalk.hex("#10a37f")("CX");
154
+ return chalk.hex("#4285f4")("AG");
155
+ }).join(" ");
156
+ ptable.push([
157
+ p.project,
158
+ providerBadges,
159
+ String(p.sessions),
160
+ fmt(p.inputTokens),
161
+ fmt(p.outputTokens),
162
+ fmt(p.cacheTokens),
163
+ chalk.hex("#d97757")(`$${p.totalCost.toFixed(2)}`)
164
+ ]);
165
+ }
166
+ console.log(chalk.bold(" Projects"));
167
+ console.log(ptable.toString());
168
+ console.log("");
169
+ }
170
+ if (opts.export) {
171
+ const { writeFileSync } = await import("fs");
172
+ if (opts.export === "csv") {
173
+ const header = "timestamp,session_id,project,provider,model,input_tokens,output_tokens,cache_write_tokens,cache_read_tokens,cost_usd,is_estimated";
174
+ const rows = filtered.map(
175
+ (r) => `${r.timestamp.toISOString()},${r.sessionId},${r.project},${r.provider},${r.model},${r.inputTokens},${r.outputTokens},${r.cacheWriteTokens},${r.cacheReadTokens},${r.costUSD.toFixed(6)},${r.isEstimated}`
176
+ );
177
+ const csv = "\uFEFF" + header + "\n" + rows.join("\n");
178
+ const outPath = opts.output || "tokentrack-export.csv";
179
+ writeFileSync(outPath, csv, "utf-8");
180
+ console.log(chalk.green(` \u2713 Exported to ${outPath}`));
181
+ } else {
182
+ const outPath = opts.output || "tokentrack-export.json";
183
+ writeFileSync(outPath, JSON.stringify(filtered, null, 2), "utf-8");
184
+ console.log(chalk.green(` \u2713 Exported to ${outPath}`));
185
+ }
186
+ return;
187
+ }
188
+ if (opts.summary) return;
189
+ const requestedPort = parseInt(opts.port);
190
+ const { port: actualPort } = await createServer(requestedPort);
191
+ const url = `http://localhost:${actualPort}`;
192
+ console.log(chalk.bold(` \u{1F537} Dashboard \u2192 ${chalk.underline(url)}`));
193
+ console.log(chalk.gray(" Press Ctrl+C to stop\n"));
194
+ if (!opts.dev) {
195
+ await open(url);
196
+ }
197
+ } catch (err) {
198
+ spinner.fail("Failed to parse usage data");
199
+ console.error(err);
200
+ process.exit(1);
201
+ }
202
+ });
203
+ program.parse();
package/dist/server.js ADDED
@@ -0,0 +1,6 @@
1
+ import {
2
+ createServer
3
+ } from "./chunk-KQYHDGZP.js";
4
+ export {
5
+ createServer
6
+ };