@vibe-cafe/vibe-usage 0.6.0 → 0.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/package.json +1 -1
- package/src/parsers/copilot-cli.js +128 -0
- package/src/parsers/index.js +2 -0
- package/src/sync.js +9 -6
- package/src/tools.js +5 -0
package/README.md
CHANGED
|
@@ -31,6 +31,7 @@ npx vibe-usage status # Show config & detected tools
|
|
|
31
31
|
|------|---------------|
|
|
32
32
|
| Claude Code | `~/.claude/projects/` (tokens + sessions), `~/.claude/transcripts/` (sessions only) |
|
|
33
33
|
| Codex CLI | `~/.codex/sessions/` |
|
|
34
|
+
| GitHub Copilot CLI | `~/.copilot/session-state/*/events.jsonl` |
|
|
34
35
|
| Gemini CLI | `~/.gemini/tmp/` |
|
|
35
36
|
| OpenCode | `~/.local/share/opencode/opencode.db` (SQLite, `json_extract` query) |
|
|
36
37
|
| OpenClaw | `~/.openclaw/agents/` |
|
|
@@ -41,7 +42,7 @@ npx vibe-usage status # Show config & detected tools
|
|
|
41
42
|
|
|
42
43
|
- Parses local session logs from each AI coding tool
|
|
43
44
|
- Aggregates token usage into 30-minute buckets
|
|
44
|
-
- Extracts session metadata from all
|
|
45
|
+
- Extracts session metadata from all 8 parsers: active time (sum of turn durations), total duration, message counts
|
|
45
46
|
- Uploads buckets + sessions to your vibecafe.ai dashboard
|
|
46
47
|
- Stateless: computes full totals from local logs each sync (idempotent, no state files)
|
|
47
48
|
- For continuous syncing, use `npx vibe-usage daemon` or the [Vibe Usage Mac app](https://github.com/vibe-cafe/vibe-usage-app)
|
package/package.json
CHANGED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import { existsSync, readdirSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { basename, join } from 'node:path';
|
|
3
|
+
import { homedir } from 'node:os';
|
|
4
|
+
import { aggregateToBuckets, extractSessions } from './index.js';
|
|
5
|
+
|
|
6
|
+
const SESSION_STATE_DIR = join(homedir(), '.copilot', 'session-state');
|
|
7
|
+
|
|
8
|
+
function findEventFiles(baseDir) {
|
|
9
|
+
const results = [];
|
|
10
|
+
if (!existsSync(baseDir)) return results;
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
for (const entry of readdirSync(baseDir, { withFileTypes: true })) {
|
|
14
|
+
if (!entry.isDirectory()) continue;
|
|
15
|
+
|
|
16
|
+
const eventsFile = join(baseDir, entry.name, 'events.jsonl');
|
|
17
|
+
if (existsSync(eventsFile)) {
|
|
18
|
+
results.push({ filePath: eventsFile, sessionId: entry.name });
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
} catch {
|
|
22
|
+
return results;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return results;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function getProjectFromContext(context) {
|
|
29
|
+
const projectPath = context?.gitRoot || context?.cwd;
|
|
30
|
+
if (!projectPath) return 'unknown';
|
|
31
|
+
|
|
32
|
+
return basename(projectPath) || 'unknown';
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Parse GitHub Copilot CLI session logs from ~/.copilot/session-state.
|
|
37
|
+
* Returns usage buckets from session shutdown summaries and session metadata
|
|
38
|
+
* from user/assistant message timings.
|
|
39
|
+
*/
|
|
40
|
+
export async function parse() {
|
|
41
|
+
const eventFiles = findEventFiles(SESSION_STATE_DIR);
|
|
42
|
+
if (eventFiles.length === 0) return { buckets: [], sessions: [] };
|
|
43
|
+
|
|
44
|
+
const entries = [];
|
|
45
|
+
const sessionEvents = [];
|
|
46
|
+
|
|
47
|
+
for (const { filePath, sessionId } of eventFiles) {
|
|
48
|
+
let content;
|
|
49
|
+
try {
|
|
50
|
+
content = readFileSync(filePath, 'utf-8');
|
|
51
|
+
} catch {
|
|
52
|
+
continue;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
let currentProject = 'unknown';
|
|
56
|
+
|
|
57
|
+
for (const line of content.split('\n')) {
|
|
58
|
+
if (!line.trim()) continue;
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const obj = JSON.parse(line);
|
|
62
|
+
const timestamp = obj.timestamp ? new Date(obj.timestamp) : null;
|
|
63
|
+
const hasTimestamp = timestamp && !isNaN(timestamp.getTime());
|
|
64
|
+
|
|
65
|
+
if (obj.type === 'session.start' || obj.type === 'session.resume') {
|
|
66
|
+
currentProject = getProjectFromContext(obj.data?.context);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if (hasTimestamp && obj.type === 'user.message') {
|
|
70
|
+
sessionEvents.push({
|
|
71
|
+
sessionId,
|
|
72
|
+
source: 'copilot-cli',
|
|
73
|
+
project: currentProject,
|
|
74
|
+
timestamp,
|
|
75
|
+
role: 'user',
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (hasTimestamp && obj.type === 'assistant.message') {
|
|
80
|
+
sessionEvents.push({
|
|
81
|
+
sessionId,
|
|
82
|
+
source: 'copilot-cli',
|
|
83
|
+
project: currentProject,
|
|
84
|
+
timestamp,
|
|
85
|
+
role: 'assistant',
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
if (obj.type !== 'session.shutdown' || !hasTimestamp) continue;
|
|
90
|
+
|
|
91
|
+
const modelMetrics = obj.data?.modelMetrics || {};
|
|
92
|
+
for (const [model, metrics] of Object.entries(modelMetrics)) {
|
|
93
|
+
const usage = metrics?.usage;
|
|
94
|
+
if (!usage) continue;
|
|
95
|
+
|
|
96
|
+
const totalInput = usage.inputTokens || 0;
|
|
97
|
+
const cachedRead = usage.cacheReadTokens || 0;
|
|
98
|
+
const cacheWrite = usage.cacheWriteTokens || 0;
|
|
99
|
+
const output = usage.outputTokens || 0;
|
|
100
|
+
|
|
101
|
+
if (totalInput === 0 && cachedRead === 0 && cacheWrite === 0 && output === 0) {
|
|
102
|
+
continue;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
entries.push({
|
|
106
|
+
source: 'copilot-cli',
|
|
107
|
+
model,
|
|
108
|
+
project: currentProject,
|
|
109
|
+
timestamp,
|
|
110
|
+
// Copilot reports cache reads separately, but cache writes are part of
|
|
111
|
+
// regular input for this schema because buckets don't have a dedicated field.
|
|
112
|
+
inputTokens: Math.max(0, totalInput - cachedRead),
|
|
113
|
+
outputTokens: output,
|
|
114
|
+
cachedInputTokens: cachedRead,
|
|
115
|
+
reasoningOutputTokens: 0,
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
} catch {
|
|
119
|
+
continue;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
return {
|
|
125
|
+
buckets: aggregateToBuckets(entries),
|
|
126
|
+
sessions: extractSessions(sessionEvents),
|
|
127
|
+
};
|
|
128
|
+
}
|
package/src/parsers/index.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { createHash } from 'node:crypto';
|
|
2
2
|
import { parse as parseClaudeCode } from './claude-code.js';
|
|
3
3
|
import { parse as parseCodex } from './codex.js';
|
|
4
|
+
import { parse as parseCopilotCli } from './copilot-cli.js';
|
|
4
5
|
import { parse as parseGeminiCli } from './gemini-cli.js';
|
|
5
6
|
import { parse as parseOpencode } from './opencode.js';
|
|
6
7
|
import { parse as parseOpenclaw } from './openclaw.js';
|
|
@@ -10,6 +11,7 @@ import { parse as parseKimiCode } from './kimi-code.js';
|
|
|
10
11
|
export const parsers = {
|
|
11
12
|
'claude-code': parseClaudeCode,
|
|
12
13
|
'codex': parseCodex,
|
|
14
|
+
'copilot-cli': parseCopilotCli,
|
|
13
15
|
'gemini-cli': parseGeminiCli,
|
|
14
16
|
'opencode': parseOpencode,
|
|
15
17
|
'openclaw': parseOpenclaw,
|
package/src/sync.js
CHANGED
|
@@ -4,6 +4,7 @@ import { ingest, fetchSettings } from './api.js';
|
|
|
4
4
|
import { parsers } from './parsers/index.js';
|
|
5
5
|
|
|
6
6
|
const BATCH_SIZE = 100;
|
|
7
|
+
const SESSION_BATCH_SIZE = 500;
|
|
7
8
|
|
|
8
9
|
function formatBytes(bytes) {
|
|
9
10
|
if (bytes < 1024) return `${bytes}B`;
|
|
@@ -85,7 +86,9 @@ export async function runSync({ throws = false, quiet = false } = {}) {
|
|
|
85
86
|
|
|
86
87
|
let totalIngested = 0;
|
|
87
88
|
let totalSessionsSynced = 0;
|
|
88
|
-
const
|
|
89
|
+
const bucketBatches = Math.ceil(allBuckets.length / BATCH_SIZE);
|
|
90
|
+
const sessionBatches = Math.ceil(allSessions.length / SESSION_BATCH_SIZE);
|
|
91
|
+
const totalBatches = Math.max(bucketBatches, sessionBatches, 1);
|
|
89
92
|
|
|
90
93
|
const parts = [];
|
|
91
94
|
if (allBuckets.length > 0) parts.push(`${allBuckets.length} buckets`);
|
|
@@ -93,18 +96,18 @@ export async function runSync({ throws = false, quiet = false } = {}) {
|
|
|
93
96
|
console.log(`Uploading ${parts.join(' + ')} (${totalBatches} batch${totalBatches > 1 ? 'es' : ''})...`);
|
|
94
97
|
|
|
95
98
|
try {
|
|
96
|
-
for (let
|
|
97
|
-
const batch = allBuckets.slice(
|
|
98
|
-
const
|
|
99
|
+
for (let batchIdx = 0; batchIdx < totalBatches; batchIdx++) {
|
|
100
|
+
const batch = allBuckets.slice(batchIdx * BATCH_SIZE, (batchIdx + 1) * BATCH_SIZE);
|
|
101
|
+
const batchSessions = allSessions.slice(batchIdx * SESSION_BATCH_SIZE, (batchIdx + 1) * SESSION_BATCH_SIZE);
|
|
102
|
+
const batchNum = batchIdx + 1;
|
|
99
103
|
const prefix = totalBatches > 1 ? ` [${batchNum}/${totalBatches}] ` : ' ';
|
|
100
|
-
const batchSessions = i === 0 ? allSessions : undefined;
|
|
101
104
|
|
|
102
105
|
const result = await ingest(apiUrl, config.apiKey, batch, {
|
|
103
106
|
onProgress(sent, total) {
|
|
104
107
|
const pct = Math.round((sent / total) * 100);
|
|
105
108
|
process.stdout.write(`\r${prefix}${formatBytes(sent)}/${formatBytes(total)} (${pct}%)\x1b[K`);
|
|
106
109
|
},
|
|
107
|
-
}, batchSessions);
|
|
110
|
+
}, batchSessions.length > 0 ? batchSessions : undefined);
|
|
108
111
|
totalIngested += result.ingested ?? batch.length;
|
|
109
112
|
totalSessionsSynced += result.sessions ?? 0;
|
|
110
113
|
}
|
package/src/tools.js
CHANGED
|
@@ -13,6 +13,11 @@ export const TOOLS = [
|
|
|
13
13
|
id: 'codex',
|
|
14
14
|
dataDir: join(homedir(), '.codex', 'sessions'),
|
|
15
15
|
},
|
|
16
|
+
{
|
|
17
|
+
name: 'GitHub Copilot CLI',
|
|
18
|
+
id: 'copilot-cli',
|
|
19
|
+
dataDir: join(homedir(), '.copilot', 'session-state'),
|
|
20
|
+
},
|
|
16
21
|
{
|
|
17
22
|
name: 'Gemini CLI',
|
|
18
23
|
id: 'gemini-cli',
|