vibeusage 0.2.15 β 0.2.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/README.zh-CN.md +2 -0
- package/package.json +7 -3
- package/src/commands/init.js +25 -0
- package/src/commands/sync.js +38 -4
- package/src/lib/project-usage-purge.js +100 -0
- package/src/lib/rollout.js +588 -19
- package/src/lib/uploader.js +106 -8
- package/src/lib/vibeusage-api.js +2 -2
- package/src/lib/vibeusage-public-repo.js +88 -0
package/README.md
CHANGED
package/README.zh-CN.md
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "vibeusage",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.16",
|
|
4
4
|
"description": "Codex CLI token usage tracker (macOS-first, notify-driven).",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"publishConfig": {
|
|
@@ -17,13 +17,16 @@
|
|
|
17
17
|
"dashboard:preview": "npm --prefix dashboard run preview",
|
|
18
18
|
"dev:shim": "node scripts/dev-bin-shim.cjs",
|
|
19
19
|
"validate:copy": "node scripts/validate-copy-registry.cjs",
|
|
20
|
+
"validate:ui-hardcode": "node scripts/ops/validate-ui-hardcode.cjs",
|
|
20
21
|
"copy:pull": "node scripts/copy-sync.cjs pull",
|
|
21
22
|
"copy:push": "node scripts/copy-sync.cjs push",
|
|
22
23
|
"architecture:canvas": "node scripts/ops/architecture-canvas.cjs",
|
|
23
24
|
"architecture:canvas:focus": "node scripts/ops/architecture-canvas.cjs --focus",
|
|
24
25
|
"architecture:canvas:list-modules": "node scripts/ops/architecture-canvas.cjs --list-modules",
|
|
25
26
|
"validate:guardrails": "node scripts/validate-architecture-guardrails.cjs",
|
|
26
|
-
"validate:insforge2-db": "node scripts/ops/insforge2-db-validate.cjs"
|
|
27
|
+
"validate:insforge2-db": "node scripts/ops/insforge2-db-validate.cjs",
|
|
28
|
+
"graph:scip": "node scripts/graph/generate-scip.cjs",
|
|
29
|
+
"graph:auto-index": "node scripts/graph/auto-index.cjs"
|
|
27
30
|
},
|
|
28
31
|
"bin": {
|
|
29
32
|
"tracker": "bin/tracker.js",
|
|
@@ -37,9 +40,10 @@
|
|
|
37
40
|
"README.md"
|
|
38
41
|
],
|
|
39
42
|
"engines": {
|
|
40
|
-
"node": "
|
|
43
|
+
"node": "20.x"
|
|
41
44
|
},
|
|
42
45
|
"devDependencies": {
|
|
46
|
+
"@sourcegraph/scip-typescript": "^0.3.6",
|
|
43
47
|
"esbuild": "0.27.2"
|
|
44
48
|
},
|
|
45
49
|
"dependencies": {
|
package/src/commands/init.js
CHANGED
|
@@ -600,12 +600,37 @@ const trackerBinPath = ${JSON.stringify(trackerBinPath)};
|
|
|
600
600
|
const fallbackPkg = ${JSON.stringify(fallbackPkg)};
|
|
601
601
|
const selfPath = path.resolve(__filename);
|
|
602
602
|
const home = os.homedir();
|
|
603
|
+
const debugLogPath = path.join(trackerDir, 'notify.debug.jsonl');
|
|
604
|
+
const debugEnabled = ['1', 'true'].includes((process.env.VIBEUSAGE_NOTIFY_DEBUG || '').toLowerCase());
|
|
605
|
+
const debugMaxBytesRaw = Number.parseInt(process.env.VIBEUSAGE_NOTIFY_DEBUG_MAX_BYTES || '', 10);
|
|
606
|
+
const debugMaxBytes = Number.isFinite(debugMaxBytesRaw) && debugMaxBytesRaw > 0
|
|
607
|
+
? debugMaxBytesRaw
|
|
608
|
+
: 1_000_000;
|
|
603
609
|
|
|
604
610
|
try {
|
|
605
611
|
fs.mkdirSync(trackerDir, { recursive: true });
|
|
606
612
|
fs.writeFileSync(signalPath, new Date().toISOString(), { encoding: 'utf8' });
|
|
607
613
|
} catch (_) {}
|
|
608
614
|
|
|
615
|
+
if (debugEnabled) {
|
|
616
|
+
try {
|
|
617
|
+
let size = 0;
|
|
618
|
+
try {
|
|
619
|
+
size = fs.statSync(debugLogPath).size;
|
|
620
|
+
} catch (err) {
|
|
621
|
+
if (err && err.code !== 'ENOENT') throw err;
|
|
622
|
+
}
|
|
623
|
+
if (size < debugMaxBytes) {
|
|
624
|
+
const entry = {
|
|
625
|
+
ts: new Date().toISOString(),
|
|
626
|
+
source,
|
|
627
|
+
cwd: process.cwd()
|
|
628
|
+
};
|
|
629
|
+
fs.appendFileSync(debugLogPath, JSON.stringify(entry) + os.EOL, 'utf8');
|
|
630
|
+
}
|
|
631
|
+
} catch (_) {}
|
|
632
|
+
}
|
|
633
|
+
|
|
609
634
|
// Throttle spawn: at most once per 20 seconds.
|
|
610
635
|
try {
|
|
611
636
|
const throttlePath = path.join(trackerDir, 'sync.throttle');
|
package/src/commands/sync.js
CHANGED
|
@@ -24,6 +24,7 @@ const {
|
|
|
24
24
|
recordUploadSuccess,
|
|
25
25
|
recordUploadFailure
|
|
26
26
|
} = require('../lib/upload-throttle');
|
|
27
|
+
const { purgeProjectUsage } = require('../lib/project-usage-purge');
|
|
27
28
|
const { resolveTrackerPaths } = require('../lib/tracker-paths');
|
|
28
29
|
const { resolveRuntimeConfig } = require('../lib/runtime-config');
|
|
29
30
|
|
|
@@ -45,6 +46,8 @@ async function cmdSync(argv) {
|
|
|
45
46
|
const cursorsPath = path.join(trackerDir, 'cursors.json');
|
|
46
47
|
const queuePath = path.join(trackerDir, 'queue.jsonl');
|
|
47
48
|
const queueStatePath = path.join(trackerDir, 'queue.state.json');
|
|
49
|
+
const projectQueuePath = path.join(trackerDir, 'project.queue.jsonl');
|
|
50
|
+
const projectQueueStatePath = path.join(trackerDir, 'project.queue.state.json');
|
|
48
51
|
const uploadThrottlePath = path.join(trackerDir, 'upload.throttle.json');
|
|
49
52
|
|
|
50
53
|
const config = await readJson(configPath);
|
|
@@ -85,6 +88,7 @@ async function cmdSync(argv) {
|
|
|
85
88
|
rolloutFiles,
|
|
86
89
|
cursors,
|
|
87
90
|
queuePath,
|
|
91
|
+
projectQueuePath,
|
|
88
92
|
onProgress: (p) => {
|
|
89
93
|
if (!progress?.enabled) return;
|
|
90
94
|
const pct = p.total > 0 ? p.index / p.total : 1;
|
|
@@ -106,6 +110,7 @@ async function cmdSync(argv) {
|
|
|
106
110
|
projectFiles: claudeFiles,
|
|
107
111
|
cursors,
|
|
108
112
|
queuePath,
|
|
113
|
+
projectQueuePath,
|
|
109
114
|
onProgress: (p) => {
|
|
110
115
|
if (!progress?.enabled) return;
|
|
111
116
|
const pct = p.total > 0 ? p.index / p.total : 1;
|
|
@@ -129,6 +134,7 @@ async function cmdSync(argv) {
|
|
|
129
134
|
sessionFiles: geminiFiles,
|
|
130
135
|
cursors,
|
|
131
136
|
queuePath,
|
|
137
|
+
projectQueuePath,
|
|
132
138
|
onProgress: (p) => {
|
|
133
139
|
if (!progress?.enabled) return;
|
|
134
140
|
const pct = p.total > 0 ? p.index / p.total : 1;
|
|
@@ -152,6 +158,7 @@ async function cmdSync(argv) {
|
|
|
152
158
|
messageFiles: opencodeFiles,
|
|
153
159
|
cursors,
|
|
154
160
|
queuePath,
|
|
161
|
+
projectQueuePath,
|
|
155
162
|
onProgress: (p) => {
|
|
156
163
|
if (!progress?.enabled) return;
|
|
157
164
|
const pct = p.total > 0 ? p.index / p.total : 1;
|
|
@@ -165,6 +172,21 @@ async function cmdSync(argv) {
|
|
|
165
172
|
});
|
|
166
173
|
}
|
|
167
174
|
|
|
175
|
+
if (cursors?.projectHourly?.projects && projectQueuePath && projectQueueStatePath) {
|
|
176
|
+
for (const [projectKey, meta] of Object.entries(cursors.projectHourly.projects)) {
|
|
177
|
+
if (!meta || typeof meta !== 'object') continue;
|
|
178
|
+
if (meta.status !== 'blocked' || !meta.purge_pending) continue;
|
|
179
|
+
await purgeProjectUsage({
|
|
180
|
+
projectKey,
|
|
181
|
+
projectQueuePath,
|
|
182
|
+
projectQueueStatePath,
|
|
183
|
+
projectState: cursors.projectHourly
|
|
184
|
+
});
|
|
185
|
+
meta.purge_pending = false;
|
|
186
|
+
meta.purged_at = new Date().toISOString();
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
168
190
|
cursors.updatedAt = new Date().toISOString();
|
|
169
191
|
await writeJson(cursorsPath, cursors);
|
|
170
192
|
|
|
@@ -178,8 +200,12 @@ async function cmdSync(argv) {
|
|
|
178
200
|
let uploadAttempted = false;
|
|
179
201
|
if (deviceToken) {
|
|
180
202
|
const beforeState = (await readJson(queueStatePath)) || { offset: 0 };
|
|
203
|
+
const projectBeforeState = (await readJson(projectQueueStatePath)) || { offset: 0 };
|
|
181
204
|
const queueSize = await safeStatSize(queuePath);
|
|
182
|
-
const
|
|
205
|
+
const projectQueueSize = await safeStatSize(projectQueuePath);
|
|
206
|
+
const pendingBytes =
|
|
207
|
+
Math.max(0, queueSize - Number(beforeState.offset || 0)) +
|
|
208
|
+
Math.max(0, projectQueueSize - Number(projectBeforeState.offset || 0));
|
|
183
209
|
let maxBatches = opts.auto ? 3 : opts.drain ? 10_000 : 10;
|
|
184
210
|
let batchSize = UPLOAD_DEFAULTS.batchSize;
|
|
185
211
|
let allowUpload = pendingBytes > 0;
|
|
@@ -208,9 +234,11 @@ async function cmdSync(argv) {
|
|
|
208
234
|
}
|
|
209
235
|
|
|
210
236
|
if (progress?.enabled && pendingBytes > 0 && allowUpload) {
|
|
211
|
-
const
|
|
237
|
+
const totalSize = queueSize + projectQueueSize;
|
|
238
|
+
const totalOffset = Number(beforeState.offset || 0) + Number(projectBeforeState.offset || 0);
|
|
239
|
+
const pct = totalSize > 0 ? totalOffset / totalSize : 0;
|
|
212
240
|
progress.start(
|
|
213
|
-
`Uploading ${renderBar(pct)} ${formatBytes(
|
|
241
|
+
`Uploading ${renderBar(pct)} ${formatBytes(totalOffset)}/${formatBytes(totalSize)} | inserted 0 skipped 0`
|
|
214
242
|
);
|
|
215
243
|
}
|
|
216
244
|
|
|
@@ -222,6 +250,8 @@ async function cmdSync(argv) {
|
|
|
222
250
|
deviceToken,
|
|
223
251
|
queuePath,
|
|
224
252
|
queueStatePath,
|
|
253
|
+
projectQueuePath,
|
|
254
|
+
projectQueueStatePath,
|
|
225
255
|
maxBatches,
|
|
226
256
|
batchSize,
|
|
227
257
|
onProgress: (u) => {
|
|
@@ -267,7 +297,11 @@ async function cmdSync(argv) {
|
|
|
267
297
|
|
|
268
298
|
const afterState = (await readJson(queueStatePath)) || { offset: 0 };
|
|
269
299
|
const queueSize = await safeStatSize(queuePath);
|
|
270
|
-
const
|
|
300
|
+
const projectAfterState = (await readJson(projectQueueStatePath)) || { offset: 0 };
|
|
301
|
+
const projectQueueSize = await safeStatSize(projectQueuePath);
|
|
302
|
+
const pendingBytes =
|
|
303
|
+
Math.max(0, queueSize - Number(afterState.offset || 0)) +
|
|
304
|
+
Math.max(0, projectQueueSize - Number(projectAfterState.offset || 0));
|
|
271
305
|
|
|
272
306
|
if (pendingBytes <= 0) {
|
|
273
307
|
await clearAutoRetry(trackerDir);
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
const fs = require('node:fs');
|
|
2
|
+
const fsp = require('node:fs/promises');
|
|
3
|
+
const path = require('node:path');
|
|
4
|
+
const readline = require('node:readline');
|
|
5
|
+
|
|
6
|
+
async function purgeProjectUsage({ projectKey, projectQueuePath, projectQueueStatePath, projectState }) {
|
|
7
|
+
if (!projectKey || !projectQueuePath || !projectQueueStatePath || !projectState) {
|
|
8
|
+
return { removed: 0, kept: 0, removedBuckets: 0 };
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
let previousOffset = 0;
|
|
12
|
+
const previousState = await fsp.readFile(projectQueueStatePath, 'utf8').catch(() => null);
|
|
13
|
+
if (previousState) {
|
|
14
|
+
try {
|
|
15
|
+
const parsed = JSON.parse(previousState);
|
|
16
|
+
const offset = Number(parsed?.offset || 0);
|
|
17
|
+
if (Number.isFinite(offset) && offset >= 0) previousOffset = offset;
|
|
18
|
+
} catch (_err) {
|
|
19
|
+
previousOffset = 0;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const buckets = projectState.buckets && typeof projectState.buckets === 'object' ? projectState.buckets : {};
|
|
24
|
+
let removedBuckets = 0;
|
|
25
|
+
const prefix = `${projectKey}|`;
|
|
26
|
+
for (const key of Object.keys(buckets)) {
|
|
27
|
+
if (key.startsWith(prefix)) {
|
|
28
|
+
delete buckets[key];
|
|
29
|
+
removedBuckets += 1;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
projectState.buckets = buckets;
|
|
33
|
+
|
|
34
|
+
let removed = 0;
|
|
35
|
+
let kept = 0;
|
|
36
|
+
let nextOffset = 0;
|
|
37
|
+
const st = await fsp.stat(projectQueuePath).catch(() => null);
|
|
38
|
+
if (st && st.isFile()) {
|
|
39
|
+
const tmpPath = `${projectQueuePath}.tmp`;
|
|
40
|
+
await fsp.mkdir(path.dirname(projectQueuePath), { recursive: true });
|
|
41
|
+
const input = fs.createReadStream(projectQueuePath, 'utf8');
|
|
42
|
+
const output = fs.createWriteStream(tmpPath, { encoding: 'utf8' });
|
|
43
|
+
const rl = readline.createInterface({ input, crlfDelay: Infinity });
|
|
44
|
+
|
|
45
|
+
let inputOffset = 0;
|
|
46
|
+
let outputOffset = 0;
|
|
47
|
+
for await (const line of rl) {
|
|
48
|
+
const trimmed = line.trim();
|
|
49
|
+
const inputBytes = Buffer.byteLength(line, 'utf8') + 1;
|
|
50
|
+
inputOffset += inputBytes;
|
|
51
|
+
if (!trimmed) continue;
|
|
52
|
+
let parsed = null;
|
|
53
|
+
try {
|
|
54
|
+
parsed = JSON.parse(trimmed);
|
|
55
|
+
} catch (_err) {
|
|
56
|
+
const entry = trimmed + '\n';
|
|
57
|
+
output.write(entry);
|
|
58
|
+
outputOffset += Buffer.byteLength(entry, 'utf8');
|
|
59
|
+
kept += 1;
|
|
60
|
+
if (inputOffset <= previousOffset) {
|
|
61
|
+
nextOffset = outputOffset;
|
|
62
|
+
}
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
if (parsed && parsed.project_key === projectKey) {
|
|
66
|
+
removed += 1;
|
|
67
|
+
if (inputOffset <= previousOffset) {
|
|
68
|
+
nextOffset = outputOffset;
|
|
69
|
+
}
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
const entry = JSON.stringify(parsed) + '\n';
|
|
73
|
+
output.write(entry);
|
|
74
|
+
outputOffset += Buffer.byteLength(entry, 'utf8');
|
|
75
|
+
kept += 1;
|
|
76
|
+
if (inputOffset <= previousOffset) {
|
|
77
|
+
nextOffset = outputOffset;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
await new Promise((resolve, reject) => {
|
|
82
|
+
output.end(resolve);
|
|
83
|
+
output.on('error', reject);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
await fsp.rename(tmpPath, projectQueuePath);
|
|
87
|
+
if (previousOffset >= inputOffset) {
|
|
88
|
+
nextOffset = outputOffset;
|
|
89
|
+
}
|
|
90
|
+
} else {
|
|
91
|
+
await fsp.writeFile(projectQueuePath, '', 'utf8');
|
|
92
|
+
nextOffset = 0;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
await fsp.writeFile(projectQueueStatePath, JSON.stringify({ offset: nextOffset }), 'utf8');
|
|
96
|
+
|
|
97
|
+
return { removed, kept, removedBuckets };
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
module.exports = { purgeProjectUsage };
|