tokburn 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +397 -0
- package/card.js +134 -0
- package/cli.js +307 -0
- package/config.js +45 -0
- package/costs.js +69 -0
- package/display.js +342 -0
- package/init.js +260 -0
- package/package.json +60 -0
- package/proxy.js +304 -0
- package/statusline.js +205 -0
- package/statusline.sh +48 -0
- package/store.js +121 -0
- package/tracker.js +35 -0
package/proxy.js
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
const http = require('http');
|
|
2
|
+
const https = require('https');
|
|
3
|
+
const { URL } = require('url');
|
|
4
|
+
const { logUsage } = require('./tracker');
|
|
5
|
+
const { getConfig } = require('./config');
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const fs = require('fs');
|
|
8
|
+
const { spawn } = require('child_process');
|
|
9
|
+
|
|
10
|
+
function extractUsageFromSSE(buffer) {
|
|
11
|
+
// Parse buffered SSE data to extract usage information
|
|
12
|
+
const text = buffer.toString('utf8');
|
|
13
|
+
const lines = text.split('\n');
|
|
14
|
+
let model = null;
|
|
15
|
+
let inputTokens = 0;
|
|
16
|
+
let outputTokens = 0;
|
|
17
|
+
|
|
18
|
+
for (const line of lines) {
|
|
19
|
+
if (!line.startsWith('data: ')) continue;
|
|
20
|
+
const jsonStr = line.slice(6).trim();
|
|
21
|
+
if (jsonStr === '[DONE]') continue;
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
const data = JSON.parse(jsonStr);
|
|
25
|
+
|
|
26
|
+
// message_start contains model and initial usage
|
|
27
|
+
if (data.type === 'message_start' && data.message) {
|
|
28
|
+
model = data.message.model || model;
|
|
29
|
+
if (data.message.usage) {
|
|
30
|
+
inputTokens = data.message.usage.input_tokens || 0;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// message_delta contains final usage
|
|
35
|
+
if (data.type === 'message_delta' && data.usage) {
|
|
36
|
+
outputTokens = data.usage.output_tokens || 0;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Also check for content_block_delta model info
|
|
40
|
+
if (data.model) {
|
|
41
|
+
model = data.model;
|
|
42
|
+
}
|
|
43
|
+
} catch {
|
|
44
|
+
// Skip unparseable lines
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return { model, input_tokens: inputTokens, output_tokens: outputTokens };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function extractUsageFromJSON(buffer) {
|
|
52
|
+
try {
|
|
53
|
+
const data = JSON.parse(buffer.toString('utf8'));
|
|
54
|
+
const model = data.model || null;
|
|
55
|
+
let inputTokens = 0;
|
|
56
|
+
let outputTokens = 0;
|
|
57
|
+
|
|
58
|
+
if (data.usage) {
|
|
59
|
+
inputTokens = data.usage.input_tokens || 0;
|
|
60
|
+
outputTokens = data.usage.output_tokens || 0;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return { model, input_tokens: inputTokens, output_tokens: outputTokens };
|
|
64
|
+
} catch {
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function extractConversationId(reqHeaders, reqBody) {
|
|
70
|
+
// Try to extract conversation/session ID from request metadata
|
|
71
|
+
try {
|
|
72
|
+
if (reqHeaders['x-conversation-id']) return reqHeaders['x-conversation-id'];
|
|
73
|
+
if (reqHeaders['x-session-id']) return reqHeaders['x-session-id'];
|
|
74
|
+
if (reqBody) {
|
|
75
|
+
const body = JSON.parse(reqBody.toString('utf8'));
|
|
76
|
+
if (body.metadata && body.metadata.conversation_id) return body.metadata.conversation_id;
|
|
77
|
+
if (body.metadata && body.metadata.session_id) return body.metadata.session_id;
|
|
78
|
+
}
|
|
79
|
+
} catch {
|
|
80
|
+
// Ignore
|
|
81
|
+
}
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function createProxy(config) {
|
|
86
|
+
const targetUrl = new URL(config.target || 'https://api.anthropic.com');
|
|
87
|
+
const port = config.port || 4088;
|
|
88
|
+
|
|
89
|
+
const server = http.createServer((clientReq, clientRes) => {
|
|
90
|
+
const startTime = Date.now();
|
|
91
|
+
|
|
92
|
+
// Collect request body
|
|
93
|
+
const reqChunks = [];
|
|
94
|
+
clientReq.on('data', (chunk) => reqChunks.push(chunk));
|
|
95
|
+
clientReq.on('end', () => {
|
|
96
|
+
const reqBody = Buffer.concat(reqChunks);
|
|
97
|
+
const conversationId = extractConversationId(clientReq.headers, reqBody);
|
|
98
|
+
|
|
99
|
+
// Build upstream request options
|
|
100
|
+
const isHTTPS = targetUrl.protocol === 'https:';
|
|
101
|
+
const mod = isHTTPS ? https : http;
|
|
102
|
+
|
|
103
|
+
const upstreamHeaders = { ...clientReq.headers };
|
|
104
|
+
// Update host header for upstream
|
|
105
|
+
upstreamHeaders.host = targetUrl.host;
|
|
106
|
+
// Remove hop-by-hop headers that shouldn't be forwarded
|
|
107
|
+
delete upstreamHeaders['connection'];
|
|
108
|
+
delete upstreamHeaders['keep-alive'];
|
|
109
|
+
delete upstreamHeaders['transfer-encoding'];
|
|
110
|
+
|
|
111
|
+
const options = {
|
|
112
|
+
hostname: targetUrl.hostname,
|
|
113
|
+
port: targetUrl.port || (isHTTPS ? 443 : 80),
|
|
114
|
+
path: clientReq.url,
|
|
115
|
+
method: clientReq.method,
|
|
116
|
+
headers: upstreamHeaders,
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
const upstreamReq = mod.request(options, (upstreamRes) => {
|
|
120
|
+
const contentType = upstreamRes.headers['content-type'] || '';
|
|
121
|
+
const isStreaming = contentType.includes('text/event-stream');
|
|
122
|
+
|
|
123
|
+
// Forward status and headers to client immediately
|
|
124
|
+
const responseHeaders = { ...upstreamRes.headers };
|
|
125
|
+
// Remove transfer-encoding to avoid conflicts
|
|
126
|
+
delete responseHeaders['transfer-encoding'];
|
|
127
|
+
clientRes.writeHead(upstreamRes.statusCode, responseHeaders);
|
|
128
|
+
|
|
129
|
+
// Buffer for async usage extraction
|
|
130
|
+
const resChunks = [];
|
|
131
|
+
|
|
132
|
+
upstreamRes.on('data', (chunk) => {
|
|
133
|
+
// Forward to client immediately
|
|
134
|
+
clientRes.write(chunk);
|
|
135
|
+
// Buffer copy for usage extraction
|
|
136
|
+
resChunks.push(Buffer.from(chunk));
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
upstreamRes.on('end', () => {
|
|
140
|
+
clientRes.end();
|
|
141
|
+
const latency = Date.now() - startTime;
|
|
142
|
+
|
|
143
|
+
// Async: extract usage and log (never block the response)
|
|
144
|
+
setImmediate(() => {
|
|
145
|
+
try {
|
|
146
|
+
const fullBuffer = Buffer.concat(resChunks);
|
|
147
|
+
let usage = null;
|
|
148
|
+
|
|
149
|
+
if (isStreaming) {
|
|
150
|
+
usage = extractUsageFromSSE(fullBuffer);
|
|
151
|
+
} else if (contentType.includes('application/json')) {
|
|
152
|
+
usage = extractUsageFromJSON(fullBuffer);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (usage && (usage.input_tokens > 0 || usage.output_tokens > 0)) {
|
|
156
|
+
logUsage({
|
|
157
|
+
model: usage.model,
|
|
158
|
+
input_tokens: usage.input_tokens,
|
|
159
|
+
output_tokens: usage.output_tokens,
|
|
160
|
+
conversation_id: conversationId,
|
|
161
|
+
latency_ms: latency,
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
} catch {
|
|
165
|
+
// Swallow all errors - never break the proxy
|
|
166
|
+
}
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
upstreamRes.on('error', () => {
|
|
171
|
+
try { clientRes.end(); } catch {}
|
|
172
|
+
});
|
|
173
|
+
});
|
|
174
|
+
|
|
175
|
+
upstreamReq.on('error', (err) => {
|
|
176
|
+
try {
|
|
177
|
+
clientRes.writeHead(502, { 'Content-Type': 'application/json' });
|
|
178
|
+
clientRes.end(JSON.stringify({ error: 'Proxy upstream error', message: err.message }));
|
|
179
|
+
} catch {
|
|
180
|
+
// Swallow
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
// Send request body to upstream
|
|
185
|
+
if (reqBody.length > 0) {
|
|
186
|
+
upstreamReq.write(reqBody);
|
|
187
|
+
}
|
|
188
|
+
upstreamReq.end();
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
clientReq.on('error', () => {
|
|
192
|
+
try { clientRes.end(); } catch {}
|
|
193
|
+
});
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
return { server, port };
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
function startServer() {
|
|
200
|
+
const config = getConfig();
|
|
201
|
+
const { server, port } = createProxy(config);
|
|
202
|
+
|
|
203
|
+
server.listen(port, '127.0.0.1', () => {
|
|
204
|
+
console.log(`tokburn proxy listening on http://127.0.0.1:${port}`);
|
|
205
|
+
console.log(`Forwarding to ${config.target || 'https://api.anthropic.com'}`);
|
|
206
|
+
console.log('Set ANTHROPIC_BASE_URL=http://127.0.0.1:' + port);
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
server.on('error', (err) => {
|
|
210
|
+
if (err.code === 'EADDRINUSE') {
|
|
211
|
+
console.error(`Error: Port ${port} is already in use. Is tokburn already running?`);
|
|
212
|
+
process.exit(1);
|
|
213
|
+
}
|
|
214
|
+
console.error('Proxy server error:', err.message);
|
|
215
|
+
process.exit(1);
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
return server;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Daemon management
|
|
222
|
+
const TOKBURN_DIR = path.join(process.env.HOME || process.env.USERPROFILE, '.tokburn');
|
|
223
|
+
const PID_FILE = path.join(TOKBURN_DIR, 'tokburn.pid');
|
|
224
|
+
|
|
225
|
+
function startDaemon() {
|
|
226
|
+
if (!fs.existsSync(TOKBURN_DIR)) {
|
|
227
|
+
fs.mkdirSync(TOKBURN_DIR, { recursive: true });
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Check if already running
|
|
231
|
+
if (isRunning()) {
|
|
232
|
+
const pid = fs.readFileSync(PID_FILE, 'utf8').trim();
|
|
233
|
+
return { success: false, message: `tokburn is already running (PID ${pid})` };
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const logFile = path.join(TOKBURN_DIR, 'proxy.log');
|
|
237
|
+
const out = fs.openSync(logFile, 'a');
|
|
238
|
+
const err = fs.openSync(logFile, 'a');
|
|
239
|
+
|
|
240
|
+
const child = spawn(process.execPath, [__filename, '--serve'], {
|
|
241
|
+
detached: true,
|
|
242
|
+
stdio: ['ignore', out, err],
|
|
243
|
+
env: { ...process.env },
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
child.unref();
|
|
247
|
+
|
|
248
|
+
fs.writeFileSync(PID_FILE, String(child.pid) + '\n', 'utf8');
|
|
249
|
+
|
|
250
|
+
return { success: true, pid: child.pid, message: `tokburn proxy started (PID ${child.pid})` };
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
function stopDaemon() {
|
|
254
|
+
if (!fs.existsSync(PID_FILE)) {
|
|
255
|
+
return { success: false, message: 'tokburn is not running (no PID file)' };
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
const pid = parseInt(fs.readFileSync(PID_FILE, 'utf8').trim(), 10);
|
|
259
|
+
if (isNaN(pid)) {
|
|
260
|
+
fs.unlinkSync(PID_FILE);
|
|
261
|
+
return { success: false, message: 'Invalid PID file, cleaned up' };
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
try {
|
|
265
|
+
process.kill(pid, 'SIGTERM');
|
|
266
|
+
// Give it a moment, then clean up PID file
|
|
267
|
+
try { fs.unlinkSync(PID_FILE); } catch {}
|
|
268
|
+
return { success: true, message: `tokburn proxy stopped (PID ${pid})` };
|
|
269
|
+
} catch (e) {
|
|
270
|
+
if (e.code === 'ESRCH') {
|
|
271
|
+
try { fs.unlinkSync(PID_FILE); } catch {}
|
|
272
|
+
return { success: false, message: 'Process was not running, cleaned up stale PID file' };
|
|
273
|
+
}
|
|
274
|
+
return { success: false, message: `Failed to stop process: ${e.message}` };
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
function isRunning() {
|
|
279
|
+
if (!fs.existsSync(PID_FILE)) return false;
|
|
280
|
+
|
|
281
|
+
const pid = parseInt(fs.readFileSync(PID_FILE, 'utf8').trim(), 10);
|
|
282
|
+
if (isNaN(pid)) return false;
|
|
283
|
+
|
|
284
|
+
try {
|
|
285
|
+
process.kill(pid, 0); // Signal 0 = check if process exists
|
|
286
|
+
return true;
|
|
287
|
+
} catch {
|
|
288
|
+
// Process doesn't exist, clean up stale PID file
|
|
289
|
+
try { fs.unlinkSync(PID_FILE); } catch {}
|
|
290
|
+
return false;
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// When run directly, start the server
|
|
295
|
+
if (require.main === module) {
|
|
296
|
+
if (process.argv.includes('--serve')) {
|
|
297
|
+
startServer();
|
|
298
|
+
} else {
|
|
299
|
+
// If run directly without --serve, also start
|
|
300
|
+
startServer();
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
module.exports = { createProxy, startServer, startDaemon, stopDaemon, isRunning };
|
package/statusline.js
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* tokburn — Status line renderer for Claude Code
|
|
4
|
+
* Reads session JSON from stdin, renders configured modules.
|
|
5
|
+
* Configured via ~/.tokburn/config.json → statusline_modules
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const fs = require('fs');
|
|
9
|
+
const path = require('path');
|
|
10
|
+
const { execFileSync } = require('child_process');
|
|
11
|
+
|
|
12
|
+
// Read stdin synchronously
|
|
13
|
+
let input = '';
|
|
14
|
+
try {
|
|
15
|
+
input = fs.readFileSync('/dev/stdin', 'utf8');
|
|
16
|
+
} catch (_) {}
|
|
17
|
+
|
|
18
|
+
let data = {};
|
|
19
|
+
try {
|
|
20
|
+
data = JSON.parse(input);
|
|
21
|
+
} catch (_) {}
|
|
22
|
+
|
|
23
|
+
// Load config
|
|
24
|
+
const configPath = path.join(process.env.HOME || process.env.USERPROFILE, '.tokburn', 'config.json');
|
|
25
|
+
let config = {};
|
|
26
|
+
try {
|
|
27
|
+
if (fs.existsSync(configPath)) {
|
|
28
|
+
config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
29
|
+
}
|
|
30
|
+
} catch (_) {}
|
|
31
|
+
|
|
32
|
+
const enabledModules = config.statusline_modules || [
|
|
33
|
+
'model_context', 'repo_branch', 'current_limit', 'weekly_limit', 'cost'
|
|
34
|
+
];
|
|
35
|
+
|
|
36
|
+
// ── Module renderers ────────────────────────────────────────────────────────────
|
|
37
|
+
|
|
38
|
+
function dotBar(pct, count) {
|
|
39
|
+
count = count || 10;
|
|
40
|
+
const p = Math.max(0, Math.min(100, pct || 0));
|
|
41
|
+
const filled = Math.round((p / 100) * count);
|
|
42
|
+
return '\u25CF'.repeat(filled) + '\u25CB'.repeat(count - filled);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function abbreviate(n) {
|
|
46
|
+
n = n || 0;
|
|
47
|
+
if (n >= 1000000) return (n / 1000000).toFixed(1) + 'M';
|
|
48
|
+
if (n >= 1000) return (n / 1000).toFixed(1) + 'K';
|
|
49
|
+
return String(n);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function formatResetTime(resetTimestamp) {
|
|
53
|
+
if (!resetTimestamp) return '';
|
|
54
|
+
const reset = new Date(resetTimestamp * 1000);
|
|
55
|
+
const now = new Date();
|
|
56
|
+
const diff = reset - now;
|
|
57
|
+
|
|
58
|
+
if (diff <= 0) return 'now';
|
|
59
|
+
|
|
60
|
+
const mins = Math.floor(diff / 60000);
|
|
61
|
+
if (mins < 60) return mins + 'min';
|
|
62
|
+
|
|
63
|
+
const hrs = Math.floor(mins / 60);
|
|
64
|
+
const remainMins = mins % 60;
|
|
65
|
+
if (hrs < 24) return hrs + 'hr ' + (remainMins > 0 ? remainMins + 'min' : '');
|
|
66
|
+
|
|
67
|
+
// Show day + time for >24hr
|
|
68
|
+
const days = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
|
|
69
|
+
const day = days[reset.getDay()];
|
|
70
|
+
const h = reset.getHours();
|
|
71
|
+
const m = reset.getMinutes();
|
|
72
|
+
const ampm = h >= 12 ? 'PM' : 'AM';
|
|
73
|
+
const h12 = h % 12 || 12;
|
|
74
|
+
return day + ' ' + h12 + ':' + String(m).padStart(2, '0') + ampm;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const MODULES = {
|
|
78
|
+
model_context: function () {
|
|
79
|
+
const model = (data.model && data.model.display_name) || '?';
|
|
80
|
+
const ctxPct = Math.round((data.context_window && data.context_window.used_percentage) || 0);
|
|
81
|
+
return model + ' | ctx ' + ctxPct + '%';
|
|
82
|
+
},
|
|
83
|
+
|
|
84
|
+
repo_branch: function () {
|
|
85
|
+
const cwd = (data.workspace && data.workspace.current_dir) || data.cwd || '';
|
|
86
|
+
const repoName = path.basename(cwd);
|
|
87
|
+
let branch = '';
|
|
88
|
+
try {
|
|
89
|
+
branch = execFileSync('git', ['-C', cwd, 'branch', '--show-current'], {
|
|
90
|
+
encoding: 'utf8', timeout: 500, stdio: ['pipe', 'pipe', 'pipe'],
|
|
91
|
+
}).trim();
|
|
92
|
+
const status = execFileSync('git', ['-C', cwd, 'status', '--porcelain'], {
|
|
93
|
+
encoding: 'utf8', timeout: 500, stdio: ['pipe', 'pipe', 'pipe'],
|
|
94
|
+
}).trim();
|
|
95
|
+
if (status) branch += '*';
|
|
96
|
+
} catch (_) {}
|
|
97
|
+
|
|
98
|
+
if (branch) return repoName + ' (' + branch + ')';
|
|
99
|
+
return repoName;
|
|
100
|
+
},
|
|
101
|
+
|
|
102
|
+
current_limit: function () {
|
|
103
|
+
const rl = data.rate_limits && data.rate_limits.five_hour;
|
|
104
|
+
if (!rl) return 'current ' + dotBar(0) + ' 0%';
|
|
105
|
+
|
|
106
|
+
const pct = Math.round(rl.used_percentage || 0);
|
|
107
|
+
const reset = formatResetTime(rl.resets_at);
|
|
108
|
+
return 'current ' + dotBar(pct) + ' ' + pct + '%' + (reset ? ' \u21BB ' + reset : '');
|
|
109
|
+
},
|
|
110
|
+
|
|
111
|
+
weekly_limit: function () {
|
|
112
|
+
const rl = data.rate_limits && data.rate_limits.seven_day;
|
|
113
|
+
if (!rl) return 'weekly ' + dotBar(0) + ' 0%';
|
|
114
|
+
|
|
115
|
+
const pct = Math.round(rl.used_percentage || 0);
|
|
116
|
+
const reset = formatResetTime(rl.resets_at);
|
|
117
|
+
return 'weekly ' + dotBar(pct) + ' ' + pct + '%' + (reset ? ' \u21BB ' + reset : '');
|
|
118
|
+
},
|
|
119
|
+
|
|
120
|
+
token_count: function () {
|
|
121
|
+
const input = (data.context_window && data.context_window.total_input_tokens) || 0;
|
|
122
|
+
const output = (data.context_window && data.context_window.total_output_tokens) || 0;
|
|
123
|
+
return abbreviate(input + output) + ' tok';
|
|
124
|
+
},
|
|
125
|
+
|
|
126
|
+
cost: function () {
|
|
127
|
+
const cost = (data.cost && data.cost.total_cost_usd) || 0;
|
|
128
|
+
return '$' + cost.toFixed(2);
|
|
129
|
+
},
|
|
130
|
+
|
|
131
|
+
burn_rate: function () {
|
|
132
|
+
try {
|
|
133
|
+
const usagePath = path.join(process.env.HOME || '', '.tokburn', 'usage.jsonl');
|
|
134
|
+
if (!fs.existsSync(usagePath)) return '';
|
|
135
|
+
const raw = fs.readFileSync(usagePath, 'utf8').trim();
|
|
136
|
+
if (!raw) return '';
|
|
137
|
+
const lines = raw.split('\n');
|
|
138
|
+
const today = new Date().toISOString().split('T')[0];
|
|
139
|
+
const todayEntries = [];
|
|
140
|
+
for (const l of lines) {
|
|
141
|
+
if (!l.startsWith('{"timestamp":"' + today)) continue;
|
|
142
|
+
try { todayEntries.push(JSON.parse(l)); } catch (_) {}
|
|
143
|
+
}
|
|
144
|
+
if (todayEntries.length < 2) return '';
|
|
145
|
+
const first = new Date(todayEntries[0].timestamp);
|
|
146
|
+
const last = new Date(todayEntries[todayEntries.length - 1].timestamp);
|
|
147
|
+
const elapsed = (last - first) / 60000;
|
|
148
|
+
if (elapsed <= 0) return '';
|
|
149
|
+
let total = 0;
|
|
150
|
+
for (const e of todayEntries) total += (e.input_tokens || 0) + (e.output_tokens || 0);
|
|
151
|
+
return '~' + abbreviate(Math.round(total / elapsed)) + '/min';
|
|
152
|
+
} catch (_) {
|
|
153
|
+
return '';
|
|
154
|
+
}
|
|
155
|
+
},
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
// ── Available modules metadata (used by init wizard) ────────────────────────────
|
|
159
|
+
|
|
160
|
+
const MODULE_LIST = [
|
|
161
|
+
{ key: 'model_context', label: 'Model + context', example: 'Opus 4.6 | ctx 13%' },
|
|
162
|
+
{ key: 'repo_branch', label: 'Repo + branch', example: 'tokburn (master*)' },
|
|
163
|
+
{ key: 'current_limit', label: 'Current rate limit', example: '\u25CB\u25CB\u25CB\u25CB\u25CB\u25CB\u25CB\u25CB\u25CB\u25CB 9% 3hr 32min' },
|
|
164
|
+
{ key: 'weekly_limit', label: 'Weekly rate limit', example: '\u25CF\u25CF\u25CF\u25CF\u25CB\u25CB\u25CB\u25CB\u25CB\u25CB 45% Fri 12:30PM' },
|
|
165
|
+
{ key: 'token_count', label: 'Token count', example: '142.8K tok' },
|
|
166
|
+
{ key: 'cost', label: 'Cost estimate', example: '$1.95' },
|
|
167
|
+
{ key: 'burn_rate', label: 'Burn rate (proxy)', example: '~2.1K/min' },
|
|
168
|
+
];
|
|
169
|
+
|
|
170
|
+
// ── Presets ──────────────────────────────────────────────────────────────────────
|
|
171
|
+
|
|
172
|
+
const PRESETS = {
|
|
173
|
+
recommended: ['model_context', 'repo_branch', 'current_limit', 'weekly_limit', 'cost'],
|
|
174
|
+
minimal: ['model_context', 'current_limit'],
|
|
175
|
+
full: ['model_context', 'repo_branch', 'current_limit', 'weekly_limit', 'token_count', 'cost', 'burn_rate'],
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
// ── Render ───────────────────────────────────────────────────────────────────────
|
|
179
|
+
|
|
180
|
+
if (require.main === module) {
|
|
181
|
+
const outputLines = [];
|
|
182
|
+
const lineOneModules = [];
|
|
183
|
+
const extraLines = [];
|
|
184
|
+
|
|
185
|
+
for (const mod of enabledModules) {
|
|
186
|
+
if (!MODULES[mod]) continue;
|
|
187
|
+
const val = MODULES[mod]();
|
|
188
|
+
if (!val) continue;
|
|
189
|
+
|
|
190
|
+
if (mod === 'current_limit' || mod === 'weekly_limit') {
|
|
191
|
+
extraLines.push(val);
|
|
192
|
+
} else {
|
|
193
|
+
lineOneModules.push(val);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (lineOneModules.length > 0) {
|
|
198
|
+
outputLines.push(lineOneModules.join(' \u2502 '));
|
|
199
|
+
}
|
|
200
|
+
outputLines.push(...extraLines);
|
|
201
|
+
|
|
202
|
+
process.stdout.write(outputLines.join('\n'));
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
module.exports = { MODULE_LIST, PRESETS, MODULES };
|
package/statusline.sh
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# tokburn — Claude Code status line script
|
|
3
|
+
# Receives session JSON on stdin, outputs a compact usage summary.
|
|
4
|
+
# Install: tokburn init (or manually set statusLine in ~/.claude/settings.json)
|
|
5
|
+
|
|
6
|
+
input=$(cat)
|
|
7
|
+
|
|
8
|
+
model=$(echo "$input" | jq -r '.model.display_name // "?"')
|
|
9
|
+
five_hr_pct=$(echo "$input" | jq -r '.rate_limits.five_hour.used_percentage // 0' | cut -d. -f1)
|
|
10
|
+
cost=$(echo "$input" | jq -r '.cost.total_cost_usd // 0')
|
|
11
|
+
input_tok=$(echo "$input" | jq -r '.context_window.total_input_tokens // 0')
|
|
12
|
+
output_tok=$(echo "$input" | jq -r '.context_window.total_output_tokens // 0')
|
|
13
|
+
|
|
14
|
+
total_tok=$((input_tok + output_tok))
|
|
15
|
+
|
|
16
|
+
# Abbreviate token count
|
|
17
|
+
if [ "$total_tok" -ge 1000000 ]; then
|
|
18
|
+
tok_fmt="$(echo "scale=1; $total_tok / 1000000" | bc)M"
|
|
19
|
+
elif [ "$total_tok" -ge 1000 ]; then
|
|
20
|
+
tok_fmt="$(echo "scale=1; $total_tok / 1000" | bc)K"
|
|
21
|
+
else
|
|
22
|
+
tok_fmt="$total_tok"
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
cost_fmt=$(printf '$%.2f' "$cost")
|
|
26
|
+
|
|
27
|
+
# State indicator
|
|
28
|
+
if [ "$five_hr_pct" -ge 90 ]; then
|
|
29
|
+
state="!! ${tok_fmt} tok ${five_hr_pct}% of 5hr ${cost_fmt}"
|
|
30
|
+
elif [ "$five_hr_pct" -ge 50 ]; then
|
|
31
|
+
state=">> ${tok_fmt} tok ${five_hr_pct}% of 5hr ${cost_fmt}"
|
|
32
|
+
else
|
|
33
|
+
state=":: ${tok_fmt} tok ${five_hr_pct}% of 5hr ${cost_fmt}"
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
# Append burn rate from proxy if running
|
|
37
|
+
pid_file="$HOME/.tokburn/tokburn.pid"
|
|
38
|
+
if [ -f "$pid_file" ]; then
|
|
39
|
+
pid=$(cat "$pid_file" 2>/dev/null)
|
|
40
|
+
if kill -0 "$pid" 2>/dev/null; then
|
|
41
|
+
rate=$(tokburn _burn-rate 2>/dev/null)
|
|
42
|
+
if [ -n "$rate" ] && [ "$rate" != "0" ]; then
|
|
43
|
+
state="${state} ~${rate}/min"
|
|
44
|
+
fi
|
|
45
|
+
fi
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
echo "$state"
|
package/store.js
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { getTokburnDir } = require('./config');
|
|
4
|
+
|
|
5
|
+
const USAGE_FILE = path.join(getTokburnDir(), 'usage.jsonl');
|
|
6
|
+
|
|
7
|
+
function getAllEntries() {
|
|
8
|
+
try {
|
|
9
|
+
if (!fs.existsSync(USAGE_FILE)) return [];
|
|
10
|
+
const raw = fs.readFileSync(USAGE_FILE, 'utf8').trim();
|
|
11
|
+
if (!raw) return [];
|
|
12
|
+
return raw.split('\n').map(line => {
|
|
13
|
+
try {
|
|
14
|
+
return JSON.parse(line);
|
|
15
|
+
} catch {
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
}).filter(Boolean);
|
|
19
|
+
} catch {
|
|
20
|
+
return [];
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function dateStr(d) {
|
|
25
|
+
return d.toISOString().split('T')[0];
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function todayStr() {
|
|
29
|
+
return dateStr(new Date());
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function getToday() {
|
|
33
|
+
const today = todayStr();
|
|
34
|
+
return getAllEntries().filter(e => {
|
|
35
|
+
try {
|
|
36
|
+
return e.timestamp && e.timestamp.startsWith(today);
|
|
37
|
+
} catch {
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function getRange(startDate, endDate) {
|
|
44
|
+
const start = typeof startDate === 'string' ? startDate : dateStr(startDate);
|
|
45
|
+
const end = typeof endDate === 'string' ? endDate : dateStr(endDate);
|
|
46
|
+
return getAllEntries().filter(e => {
|
|
47
|
+
try {
|
|
48
|
+
const d = e.timestamp.split('T')[0];
|
|
49
|
+
return d >= start && d <= end;
|
|
50
|
+
} catch {
|
|
51
|
+
return false;
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function getWeek() {
|
|
57
|
+
const now = new Date();
|
|
58
|
+
const sevenDaysAgo = new Date(now);
|
|
59
|
+
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 6);
|
|
60
|
+
return getRange(sevenDaysAgo, now);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function clearToday() {
|
|
64
|
+
try {
|
|
65
|
+
const today = todayStr();
|
|
66
|
+
const entries = getAllEntries().filter(e => {
|
|
67
|
+
try {
|
|
68
|
+
return !e.timestamp.startsWith(today);
|
|
69
|
+
} catch {
|
|
70
|
+
return true;
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
const dir = getTokburnDir();
|
|
74
|
+
if (!fs.existsSync(dir)) {
|
|
75
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
76
|
+
}
|
|
77
|
+
if (entries.length === 0) {
|
|
78
|
+
fs.writeFileSync(USAGE_FILE, '', 'utf8');
|
|
79
|
+
} else {
|
|
80
|
+
fs.writeFileSync(USAGE_FILE, entries.map(e => JSON.stringify(e)).join('\n') + '\n', 'utf8');
|
|
81
|
+
}
|
|
82
|
+
} catch {
|
|
83
|
+
// Fail silently
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function exportCSV() {
|
|
88
|
+
const entries = getAllEntries();
|
|
89
|
+
const header = 'timestamp,model,input_tokens,output_tokens,conversation_id,latency_ms';
|
|
90
|
+
const rows = entries.map(e => {
|
|
91
|
+
return [
|
|
92
|
+
e.timestamp || '',
|
|
93
|
+
e.model || '',
|
|
94
|
+
e.input_tokens || 0,
|
|
95
|
+
e.output_tokens || 0,
|
|
96
|
+
e.conversation_id || '',
|
|
97
|
+
e.latency_ms || '',
|
|
98
|
+
].join(',');
|
|
99
|
+
});
|
|
100
|
+
return [header, ...rows].join('\n') + '\n';
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function getWeekByDay() {
|
|
104
|
+
const entries = getWeek();
|
|
105
|
+
const byDay = {};
|
|
106
|
+
// Initialize all 7 days
|
|
107
|
+
for (let i = 6; i >= 0; i--) {
|
|
108
|
+
const d = new Date();
|
|
109
|
+
d.setDate(d.getDate() - i);
|
|
110
|
+
byDay[dateStr(d)] = [];
|
|
111
|
+
}
|
|
112
|
+
for (const e of entries) {
|
|
113
|
+
const d = e.timestamp.split('T')[0];
|
|
114
|
+
if (byDay[d]) {
|
|
115
|
+
byDay[d].push(e);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
return byDay;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
module.exports = { getAllEntries, getToday, getRange, getWeek, clearToday, exportCSV, getWeekByDay };
|