claude-usage-dashboard 1.5.9 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-usage-dashboard",
3
- "version": "1.5.9",
3
+ "version": "1.6.0",
4
4
  "description": "Claude Code usage dashboard — token costs, quota cycle tracking, cache efficiency, multi-machine sync across all your devices",
5
5
  "main": "server/index.js",
6
6
  "bin": {
@@ -37,8 +37,13 @@ export function renderModelDistribution(container, data) {
37
37
  .style('flex-shrink', '0')
38
38
  .append('g').attr('transform', `translate(${size / 2},${size / 2})`);
39
39
 
40
- const total = d3.sum(data.models, d => d.total_tokens);
41
- const pie = d3.pie().value(d => d.total_tokens).sort(null);
40
+ // Use non-cache tokens (input + output) for both slice size and percentages
41
+ // so the share of each model matches Anthropic's official usage report.
42
+ // Cache reads dominate total_tokens and drown out small-output models like
43
+ // new Opus 4.7, making the distribution misleading.
44
+ const nonCache = m => (m.input_tokens || 0) + (m.output_tokens || 0);
45
+ const total = d3.sum(data.models, nonCache);
46
+ const pie = d3.pie().value(nonCache).sort(null);
42
47
  const arc = d3.arc().innerRadius(innerRadius).outerRadius(radius);
43
48
 
44
49
  svg.selectAll('path').data(pie(data.models)).enter().append('path')
@@ -47,7 +52,7 @@ export function renderModelDistribution(container, data) {
47
52
 
48
53
  const legend = wrapper.append('div');
49
54
  data.models.forEach(m => {
50
- const pct = ((m.total_tokens / total) * 100).toFixed(1);
55
+ const pct = total > 0 ? ((nonCache(m) / total) * 100).toFixed(1) : '0.0';
51
56
  const color = MODEL_COLORS[m.id] || '#64748b';
52
57
  const shortName = MODEL_DISPLAY[m.id] || m.id.replace('claude-', '').replace(/-(\d+)-(\d+)/, ' $1.$2');
53
58
  legend.append('div').style('font-size', '11px').style('color', '#94a3b8').style('margin-bottom', '4px')
package/server/parser.js CHANGED
@@ -61,6 +61,7 @@ export function parseLogFile(filePath) {
61
61
  sessionId: entry.sessionId,
62
62
  timestamp: entry.timestamp,
63
63
  model,
64
+ messageId: entry.message?.id || null,
64
65
  input_tokens: usage.input_tokens || 0,
65
66
  output_tokens: usage.output_tokens || 0,
66
67
  cache_creation_tokens: usage.cache_creation_input_tokens || 0,
@@ -71,6 +72,33 @@ export function parseLogFile(filePath) {
71
72
  return records;
72
73
  }
73
74
 
75
+ /**
76
+ * Deduplicate assistant records by `messageId`.
77
+ *
78
+ * Claude Code JSONL contains one line per streaming snapshot, not one per
79
+ * message. Multiple lines can share the same `message.id`, with the early
80
+ * ones reporting partial cumulative `output_tokens` and the final one the
81
+ * full total. Multi-machine sync further multiplies the same message across
82
+ * machines. Anthropic bills the server-side message once, so we keep the
83
+ * record whose `output_tokens` is largest (the final cumulative snapshot)
84
+ * for each `messageId`. Records without a `messageId` pass through — they
85
+ * predate the id field and are assumed to already be one-per-message.
86
+ */
87
+ export function dedupByMessageId(records) {
88
+ const best = new Map();
89
+ const passthrough = [];
90
+
91
+ for (const r of records) {
92
+ if (!r.messageId) { passthrough.push(r); continue; }
93
+ const prev = best.get(r.messageId);
94
+ if (!prev || r.output_tokens > prev.output_tokens) {
95
+ best.set(r.messageId, r);
96
+ }
97
+ }
98
+
99
+ return [...best.values(), ...passthrough];
100
+ }
101
+
74
102
  export function parseLogDirectory(baseDir) {
75
103
  const allRecords = [];
76
104
 
@@ -127,7 +155,7 @@ export function parseLogDirectory(baseDir) {
127
155
  }
128
156
  }
129
157
 
130
- return allRecords;
158
+ return dedupByMessageId(allRecords);
131
159
  }
132
160
 
133
161
  export function parseMultiMachineDirectory(syncDir) {
@@ -147,5 +175,5 @@ export function parseMultiMachineDirectory(syncDir) {
147
175
  allRecords.push(...records);
148
176
  }
149
177
 
150
- return allRecords;
178
+ return dedupByMessageId(allRecords);
151
179
  }
package/server/pricing.js CHANGED
@@ -5,6 +5,12 @@ export const MODEL_PRICING = {
5
5
  cache_read_price_per_mtok: 0.50,
6
6
  cache_creation_price_per_mtok: 6.25,
7
7
  },
8
+ 'claude-opus-4-7': {
9
+ input_price_per_mtok: 5,
10
+ output_price_per_mtok: 25,
11
+ cache_read_price_per_mtok: 0.50,
12
+ cache_creation_price_per_mtok: 6.25,
13
+ },
8
14
  'claude-sonnet-4-6': {
9
15
  input_price_per_mtok: 3,
10
16
  output_price_per_mtok: 15,
@@ -145,34 +145,42 @@ export function createApiRouter(logBaseDir, options = {}) {
145
145
  options.syncDir || null,
146
146
  options.snapshotDir
147
147
  );
148
- if (data.currentCycle) {
149
- // Recompute current cycle from parsed records in sync mode this
150
- // includes all machines' data, matching /api/cost and /api/usage.
151
- // The snapshot's utilization % (from the quota API) is preserved.
152
- // Convert UTC cycle dates to local date-only strings (YYYY-MM-DD) so
153
- // filterByDateRange uses local midnight boundaries, matching the date
154
- // picker's range that drives the summary cards and /api/cost.
155
- const toLocalDate = (iso) => {
156
- const d = new Date(iso);
157
- return `${d.getFullYear()}-${String(d.getMonth()+1).padStart(2,'0')}-${String(d.getDate()).padStart(2,'0')}`;
158
- };
159
- const records = refreshRecords();
148
+
149
+ // Recompute token/cost fields from the live parser for every cycle
150
+ // (current + history). The snapshot's utilization % (from the Anthropic
151
+ // quota API) is preserved that value is Anthropic's truth and cannot
152
+ // be reconstructed locally but all other fields come from the current
153
+ // parser so historical cycles reflect dedup fixes applied to older logs.
154
+ //
155
+ // Convert UTC cycle dates to local date-only strings (YYYY-MM-DD) so
156
+ // filterByDateRange uses local midnight boundaries, matching the date
157
+ // picker's range that drives the summary cards and /api/cost.
158
+ const toLocalDate = (iso) => {
159
+ const d = new Date(iso);
160
+ return `${d.getFullYear()}-${String(d.getMonth()+1).padStart(2,'0')}-${String(d.getDate()).padStart(2,'0')}`;
161
+ };
162
+ const records = refreshRecords();
163
+ const recompute = (cycle) => {
160
164
  const cycleRecords = filterByDateRange(
161
- records, toLocalDate(data.currentCycle.start), toLocalDate(data.currentCycle.resets_at)
165
+ records, toLocalDate(cycle.start), toLocalDate(cycle.resets_at)
162
166
  );
163
167
  const quotaShim = {
164
- seven_day: { utilization: data.currentCycle.overall.utilization },
165
- seven_day_opus: { utilization: data.currentCycle.models?.opus?.utilization || 0 },
166
- seven_day_sonnet: { utilization: data.currentCycle.models?.sonnet?.utilization || 0 },
168
+ seven_day: { utilization: cycle.overall?.utilization || 0 },
169
+ seven_day_opus: { utilization: cycle.models?.opus?.utilization || 0 },
170
+ seven_day_sonnet: { utilization: cycle.models?.sonnet?.utilization || 0 },
167
171
  };
168
- const fresh = computeCycleData(cycleRecords, quotaShim);
169
- Object.assign(data.currentCycle, fresh);
172
+ Object.assign(cycle, computeCycleData(cycleRecords, quotaShim));
173
+ };
170
174
 
175
+ if (data.currentCycle) {
176
+ recompute(data.currentCycle);
171
177
  const start = new Date(data.currentCycle.start);
172
178
  const now = new Date();
173
179
  data.currentCycle.daysElapsed = Math.round(((now - start) / (1000 * 60 * 60 * 24)) * 10) / 10;
174
180
  data.currentCycle.daysTotal = 7;
175
181
  }
182
+ for (const cycle of data.history || []) recompute(cycle);
183
+
176
184
  res.json(data);
177
185
  } catch (err) {
178
186
  res.status(500).json({ error: err.message });