@vibe-cafe/vibe-usage 0.1.5 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -30,7 +30,7 @@ npx vibe-usage status # Show config & detected tools
30
30
  | Claude Code | Yes (session hook) | `~/.claude/projects/` |
31
31
  | Codex CLI | Yes (notify hook) | `~/.codex/sessions/` |
32
32
  | Gemini CLI | Yes (session hook) | `~/.gemini/tmp/` |
33
- | OpenCode | Manual only | `~/.local/share/opencode/` |
33
+ | OpenCode | Manual only | `~/.local/share/opencode/opencode.db` (SQLite) |
34
34
  | OpenClaw | Manual only | `~/.openclaw/agents/` |
35
35
 
36
36
  ## How It Works
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vibe-cafe/vibe-usage",
3
- "version": "0.1.5",
3
+ "version": "0.1.7",
4
4
  "description": "Track your AI coding tool token usage and sync to vibecafe.ai",
5
5
  "type": "module",
6
6
  "bin": {
package/src/api.js CHANGED
@@ -2,15 +2,39 @@ import https from 'node:https';
2
2
  import http from 'node:http';
3
3
  import { URL } from 'node:url';
4
4
 
5
+ const MAX_RETRIES = 3;
6
+ const INITIAL_DELAY = 1000;
7
+
5
8
  /**
6
9
  * POST buckets to the vibecafe ingest API.
7
10
  * Uses native http/https — zero dependencies.
11
+ * Retries up to 3 times with exponential backoff on transient failures.
8
12
  * @param {string} apiUrl - Base URL (e.g. "https://vibecafe.ai")
9
13
  * @param {string} apiKey - Bearer token (vbu_xxx)
10
14
  * @param {Array} buckets - Array of usage bucket objects
11
15
  * @returns {Promise<{ingested: number}>}
12
16
  */
13
- export function ingest(apiUrl, apiKey, buckets) {
17
+ export async function ingest(apiUrl, apiKey, buckets) {
18
+ let lastError;
19
+ for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
20
+ try {
21
+ return await _send(apiUrl, apiKey, buckets);
22
+ } catch (err) {
23
+ lastError = err;
24
+ // Don't retry auth errors or client errors
25
+ if (err.message === 'UNAUTHORIZED' || err.statusCode >= 400 && err.statusCode < 500) {
26
+ throw err;
27
+ }
28
+ if (attempt < MAX_RETRIES - 1) {
29
+ const delay = INITIAL_DELAY * 2 ** attempt;
30
+ await new Promise(r => setTimeout(r, delay));
31
+ }
32
+ }
33
+ }
34
+ throw lastError;
35
+ }
36
+
37
+ function _send(apiUrl, apiKey, buckets) {
14
38
  return new Promise((resolve, reject) => {
15
39
  const url = new URL('/api/usage/ingest', apiUrl);
16
40
  const body = JSON.stringify({ buckets });
@@ -33,7 +57,9 @@ export function ingest(apiUrl, apiKey, buckets) {
33
57
  return;
34
58
  }
35
59
  if (res.statusCode < 200 || res.statusCode >= 300) {
36
- reject(new Error(`HTTP ${res.statusCode}: ${data}`));
60
+ const err = new Error(`HTTP ${res.statusCode}: ${data}`);
61
+ err.statusCode = res.statusCode;
62
+ reject(err);
37
63
  return;
38
64
  }
39
65
  try {
@@ -1,12 +1,103 @@
1
+ import { execFileSync } from 'node:child_process';
1
2
  import { readdirSync, readFileSync, statSync, existsSync } from 'node:fs';
2
3
  import { join, basename } from 'node:path';
3
4
  import { homedir } from 'node:os';
4
5
  import { aggregateToBuckets } from './index.js';
5
6
 
6
7
  const DATA_DIR = join(homedir(), '.local', 'share', 'opencode');
8
+ const DB_PATH = join(DATA_DIR, 'opencode.db');
7
9
  const MESSAGES_DIR = join(DATA_DIR, 'storage', 'message');
8
10
 
11
+ /**
12
+ * Parse opencode usage data.
13
+ * Tries SQLite database first (opencode >= v0.2), falls back to legacy JSON files.
14
+ */
9
15
  export async function parse(lastSync) {
16
+ if (existsSync(DB_PATH)) {
17
+ try {
18
+ return parseFromSqlite(lastSync);
19
+ } catch (err) {
20
+ process.stderr.write(`warn: opencode sqlite parse failed (${err.message}), trying legacy json...\n`);
21
+ }
22
+ }
23
+ return parseFromJson(lastSync);
24
+ }
25
+
26
+ function parseFromSqlite(lastSync) {
27
+ // Build WHERE clause: only messages with token data
28
+ const conditions = [
29
+ "(json_extract(data, '$.tokens.input') > 0 OR json_extract(data, '$.tokens.output') > 0)",
30
+ ];
31
+ if (lastSync) {
32
+ const sinceMs = new Date(lastSync).getTime();
33
+ conditions.push(`time_created > ${sinceMs}`);
34
+ }
35
+
36
+ const query = `SELECT data FROM message WHERE ${conditions.join(' AND ')}`;
37
+
38
+ let output;
39
+ try {
40
+ output = execFileSync('sqlite3', [
41
+ '-json',
42
+ DB_PATH,
43
+ query,
44
+ ], { encoding: 'utf-8', maxBuffer: 100 * 1024 * 1024, timeout: 30000 });
45
+ } catch (err) {
46
+ if (err.status === 127 || (err.message && err.message.includes('ENOENT'))) {
47
+ throw new Error('sqlite3 CLI not found. Install sqlite3 to sync opencode data.');
48
+ }
49
+ throw err;
50
+ }
51
+
52
+ output = output.trim();
53
+ if (!output || output === '[]') return [];
54
+
55
+ let rows;
56
+ try {
57
+ rows = JSON.parse(output);
58
+ } catch {
59
+ throw new Error('Failed to parse sqlite3 JSON output');
60
+ }
61
+
62
+ const entries = [];
63
+ for (const row of rows) {
64
+ let data;
65
+ try {
66
+ data = JSON.parse(row.data);
67
+ } catch {
68
+ continue;
69
+ }
70
+
71
+ if (!data.modelID) continue;
72
+
73
+ const tokens = data.tokens;
74
+ if (!tokens) continue;
75
+ if (!tokens.input && !tokens.output) continue;
76
+
77
+ const timestamp = new Date(data.time?.created);
78
+ if (isNaN(timestamp.getTime())) continue;
79
+ if (lastSync && timestamp <= new Date(lastSync)) continue;
80
+
81
+ const rootPath = data.path?.root;
82
+ const project = rootPath ? basename(rootPath) : 'unknown';
83
+
84
+ entries.push({
85
+ source: 'opencode',
86
+ model: data.modelID || 'unknown',
87
+ project,
88
+ timestamp,
89
+ inputTokens: tokens.input || 0,
90
+ outputTokens: tokens.output || 0,
91
+ cachedInputTokens: tokens.cache?.read || 0,
92
+ reasoningOutputTokens: tokens.reasoning || 0,
93
+ });
94
+ }
95
+
96
+ return aggregateToBuckets(entries);
97
+ }
98
+
99
+ /** Legacy parser: reads JSON files from storage/message directories. */
100
+ function parseFromJson(lastSync) {
10
101
  if (!existsSync(MESSAGES_DIR)) return [];
11
102
 
12
103
  const entries = [];
@@ -45,10 +136,8 @@ export async function parse(lastSync) {
45
136
  continue;
46
137
  }
47
138
 
48
-
49
139
  if (!data.modelID) continue;
50
140
 
51
-
52
141
  const tokens = data.tokens;
53
142
  if (!tokens) continue;
54
143
  if (!tokens.input && !tokens.output) continue;
@@ -57,7 +146,6 @@ export async function parse(lastSync) {
57
146
  if (isNaN(timestamp.getTime())) continue;
58
147
  if (lastSync && timestamp <= new Date(lastSync)) continue;
59
148
 
60
-
61
149
  const rootPath = data.path?.root;
62
150
  const project = rootPath ? basename(rootPath) : 'unknown';
63
151
 
package/src/sync.js CHANGED
@@ -32,22 +32,31 @@ export async function runSync() {
32
32
 
33
33
  const apiUrl = config.apiUrl || 'https://vibecafe.ai';
34
34
  let totalIngested = 0;
35
+ const totalBatches = Math.ceil(allBuckets.length / BATCH_SIZE);
36
+
37
+ console.log(`Uploading ${allBuckets.length} buckets (${totalBatches} batch${totalBatches > 1 ? 'es' : ''})...`);
35
38
 
36
39
  try {
37
40
  for (let i = 0; i < allBuckets.length; i += BATCH_SIZE) {
38
41
  const batch = allBuckets.slice(i, i + BATCH_SIZE);
42
+ const batchNum = Math.floor(i / BATCH_SIZE) + 1;
43
+ const uploaded = Math.min(i + BATCH_SIZE, allBuckets.length);
44
+
45
+ if (totalBatches > 1) {
46
+ process.stdout.write(` [${batchNum}/${totalBatches}] ${uploaded}/${allBuckets.length} buckets...\r`);
47
+ }
48
+
39
49
  const result = await ingest(apiUrl, config.apiKey, batch);
40
50
  totalIngested += result.ingested ?? batch.length;
41
51
 
42
52
  // Save progress after each successful batch so partial uploads survive interruptions
43
53
  config.lastSync = new Date().toISOString();
44
54
  saveConfig(config);
45
-
46
- if (allBuckets.length > BATCH_SIZE) {
47
- process.stdout.write(` ${Math.min(i + BATCH_SIZE, allBuckets.length)}/${allBuckets.length} buckets...\r`);
48
- }
49
55
  }
50
56
 
57
+ if (totalBatches > 1) {
58
+ process.stdout.write('\n');
59
+ }
51
60
  console.log(`Synced ${totalIngested} buckets.`);
52
61
  return totalIngested;
53
62
  } catch (err) {