vibeusage 0.2.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1317 @@
1
+ const fs = require('node:fs/promises');
2
+ const fssync = require('node:fs');
3
+ const path = require('node:path');
4
+ const readline = require('node:readline');
5
+
6
+ const { ensureDir } = require('./fs');
7
+
8
+ const DEFAULT_SOURCE = 'codex';
9
+ const DEFAULT_MODEL = 'unknown';
10
+ const BUCKET_SEPARATOR = '|';
11
+
12
+ async function listRolloutFiles(sessionsDir) {
13
+ const out = [];
14
+ const years = await safeReadDir(sessionsDir);
15
+ for (const y of years) {
16
+ if (!/^[0-9]{4}$/.test(y.name) || !y.isDirectory()) continue;
17
+ const yearDir = path.join(sessionsDir, y.name);
18
+ const months = await safeReadDir(yearDir);
19
+ for (const m of months) {
20
+ if (!/^[0-9]{2}$/.test(m.name) || !m.isDirectory()) continue;
21
+ const monthDir = path.join(yearDir, m.name);
22
+ const days = await safeReadDir(monthDir);
23
+ for (const d of days) {
24
+ if (!/^[0-9]{2}$/.test(d.name) || !d.isDirectory()) continue;
25
+ const dayDir = path.join(monthDir, d.name);
26
+ const files = await safeReadDir(dayDir);
27
+ for (const f of files) {
28
+ if (!f.isFile()) continue;
29
+ if (!f.name.startsWith('rollout-') || !f.name.endsWith('.jsonl')) continue;
30
+ out.push(path.join(dayDir, f.name));
31
+ }
32
+ }
33
+ }
34
+ }
35
+
36
+ out.sort((a, b) => a.localeCompare(b));
37
+ return out;
38
+ }
39
+
40
+ async function listClaudeProjectFiles(projectsDir) {
41
+ const out = [];
42
+ await walkClaudeProjects(projectsDir, out);
43
+ out.sort((a, b) => a.localeCompare(b));
44
+ return out;
45
+ }
46
+
47
+ async function listGeminiSessionFiles(tmpDir) {
48
+ const out = [];
49
+ const roots = await safeReadDir(tmpDir);
50
+ for (const root of roots) {
51
+ if (!root.isDirectory()) continue;
52
+ const chatsDir = path.join(tmpDir, root.name, 'chats');
53
+ const chats = await safeReadDir(chatsDir);
54
+ for (const entry of chats) {
55
+ if (!entry.isFile()) continue;
56
+ if (!entry.name.startsWith('session-') || !entry.name.endsWith('.json')) continue;
57
+ out.push(path.join(chatsDir, entry.name));
58
+ }
59
+ }
60
+ out.sort((a, b) => a.localeCompare(b));
61
+ return out;
62
+ }
63
+
64
+ async function listOpencodeMessageFiles(storageDir) {
65
+ const out = [];
66
+ const messageDir = path.join(storageDir, 'message');
67
+ await walkOpencodeMessages(messageDir, out);
68
+ out.sort((a, b) => a.localeCompare(b));
69
+ return out;
70
+ }
71
+
72
+ async function parseRolloutIncremental({ rolloutFiles, cursors, queuePath, onProgress, source }) {
73
+ await ensureDir(path.dirname(queuePath));
74
+ let filesProcessed = 0;
75
+ let eventsAggregated = 0;
76
+
77
+ const cb = typeof onProgress === 'function' ? onProgress : null;
78
+ const totalFiles = Array.isArray(rolloutFiles) ? rolloutFiles.length : 0;
79
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
80
+ const touchedBuckets = new Set();
81
+ const defaultSource = normalizeSourceInput(source) || DEFAULT_SOURCE;
82
+
83
+ if (!cursors.files || typeof cursors.files !== 'object') {
84
+ cursors.files = {};
85
+ }
86
+
87
+ for (let idx = 0; idx < rolloutFiles.length; idx++) {
88
+ const entry = rolloutFiles[idx];
89
+ const filePath = typeof entry === 'string' ? entry : entry?.path;
90
+ if (!filePath) continue;
91
+ const fileSource =
92
+ typeof entry === 'string' ? defaultSource : normalizeSourceInput(entry?.source) || defaultSource;
93
+ const st = await fs.stat(filePath).catch(() => null);
94
+ if (!st || !st.isFile()) continue;
95
+
96
+ const key = filePath;
97
+ const prev = cursors.files[key] || null;
98
+ const inode = st.ino || 0;
99
+ const startOffset = prev && prev.inode === inode ? prev.offset || 0 : 0;
100
+ const lastTotal = prev && prev.inode === inode ? prev.lastTotal || null : null;
101
+ const lastModel = prev && prev.inode === inode ? prev.lastModel || null : null;
102
+
103
+ const result = await parseRolloutFile({
104
+ filePath,
105
+ startOffset,
106
+ lastTotal,
107
+ lastModel,
108
+ hourlyState,
109
+ touchedBuckets,
110
+ source: fileSource
111
+ });
112
+
113
+ cursors.files[key] = {
114
+ inode,
115
+ offset: result.endOffset,
116
+ lastTotal: result.lastTotal,
117
+ lastModel: result.lastModel,
118
+ updatedAt: new Date().toISOString()
119
+ };
120
+
121
+ filesProcessed += 1;
122
+ eventsAggregated += result.eventsAggregated;
123
+
124
+ if (cb) {
125
+ cb({
126
+ index: idx + 1,
127
+ total: totalFiles,
128
+ filePath,
129
+ filesProcessed,
130
+ eventsAggregated,
131
+ bucketsQueued: touchedBuckets.size
132
+ });
133
+ }
134
+ }
135
+
136
+ const bucketsQueued = await enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets });
137
+ hourlyState.updatedAt = new Date().toISOString();
138
+ cursors.hourly = hourlyState;
139
+
140
+ return { filesProcessed, eventsAggregated, bucketsQueued };
141
+ }
142
+
143
+ async function parseClaudeIncremental({ projectFiles, cursors, queuePath, onProgress, source }) {
144
+ await ensureDir(path.dirname(queuePath));
145
+ let filesProcessed = 0;
146
+ let eventsAggregated = 0;
147
+
148
+ const cb = typeof onProgress === 'function' ? onProgress : null;
149
+ const files = Array.isArray(projectFiles) ? projectFiles : [];
150
+ const totalFiles = files.length;
151
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
152
+ const touchedBuckets = new Set();
153
+ const defaultSource = normalizeSourceInput(source) || 'claude';
154
+
155
+ if (!cursors.files || typeof cursors.files !== 'object') {
156
+ cursors.files = {};
157
+ }
158
+
159
+ for (let idx = 0; idx < files.length; idx++) {
160
+ const entry = files[idx];
161
+ const filePath = typeof entry === 'string' ? entry : entry?.path;
162
+ if (!filePath) continue;
163
+ const fileSource =
164
+ typeof entry === 'string' ? defaultSource : normalizeSourceInput(entry?.source) || defaultSource;
165
+ const st = await fs.stat(filePath).catch(() => null);
166
+ if (!st || !st.isFile()) continue;
167
+
168
+ const key = filePath;
169
+ const prev = cursors.files[key] || null;
170
+ const inode = st.ino || 0;
171
+ const startOffset = prev && prev.inode === inode ? prev.offset || 0 : 0;
172
+
173
+ const result = await parseClaudeFile({
174
+ filePath,
175
+ startOffset,
176
+ hourlyState,
177
+ touchedBuckets,
178
+ source: fileSource
179
+ });
180
+
181
+ cursors.files[key] = {
182
+ inode,
183
+ offset: result.endOffset,
184
+ updatedAt: new Date().toISOString()
185
+ };
186
+
187
+ filesProcessed += 1;
188
+ eventsAggregated += result.eventsAggregated;
189
+
190
+ if (cb) {
191
+ cb({
192
+ index: idx + 1,
193
+ total: totalFiles,
194
+ filePath,
195
+ filesProcessed,
196
+ eventsAggregated,
197
+ bucketsQueued: touchedBuckets.size
198
+ });
199
+ }
200
+ }
201
+
202
+ const bucketsQueued = await enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets });
203
+ hourlyState.updatedAt = new Date().toISOString();
204
+ cursors.hourly = hourlyState;
205
+
206
+ return { filesProcessed, eventsAggregated, bucketsQueued };
207
+ }
208
+
209
+ async function parseGeminiIncremental({ sessionFiles, cursors, queuePath, onProgress, source }) {
210
+ await ensureDir(path.dirname(queuePath));
211
+ let filesProcessed = 0;
212
+ let eventsAggregated = 0;
213
+
214
+ const cb = typeof onProgress === 'function' ? onProgress : null;
215
+ const files = Array.isArray(sessionFiles) ? sessionFiles : [];
216
+ const totalFiles = files.length;
217
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
218
+ const touchedBuckets = new Set();
219
+ const defaultSource = normalizeSourceInput(source) || 'gemini';
220
+
221
+ if (!cursors.files || typeof cursors.files !== 'object') {
222
+ cursors.files = {};
223
+ }
224
+
225
+ for (let idx = 0; idx < files.length; idx++) {
226
+ const entry = files[idx];
227
+ const filePath = typeof entry === 'string' ? entry : entry?.path;
228
+ if (!filePath) continue;
229
+ const fileSource =
230
+ typeof entry === 'string' ? defaultSource : normalizeSourceInput(entry?.source) || defaultSource;
231
+ const st = await fs.stat(filePath).catch(() => null);
232
+ if (!st || !st.isFile()) continue;
233
+
234
+ const key = filePath;
235
+ const prev = cursors.files[key] || null;
236
+ const inode = st.ino || 0;
237
+ let startIndex = prev && prev.inode === inode ? Number(prev.lastIndex || -1) : -1;
238
+ let lastTotals = prev && prev.inode === inode ? prev.lastTotals || null : null;
239
+ let lastModel = prev && prev.inode === inode ? prev.lastModel || null : null;
240
+
241
+ const result = await parseGeminiFile({
242
+ filePath,
243
+ startIndex,
244
+ lastTotals,
245
+ lastModel,
246
+ hourlyState,
247
+ touchedBuckets,
248
+ source: fileSource
249
+ });
250
+
251
+ cursors.files[key] = {
252
+ inode,
253
+ lastIndex: result.lastIndex,
254
+ lastTotals: result.lastTotals,
255
+ lastModel: result.lastModel,
256
+ updatedAt: new Date().toISOString()
257
+ };
258
+
259
+ filesProcessed += 1;
260
+ eventsAggregated += result.eventsAggregated;
261
+
262
+ if (cb) {
263
+ cb({
264
+ index: idx + 1,
265
+ total: totalFiles,
266
+ filePath,
267
+ filesProcessed,
268
+ eventsAggregated,
269
+ bucketsQueued: touchedBuckets.size
270
+ });
271
+ }
272
+ }
273
+
274
+ const bucketsQueued = await enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets });
275
+ hourlyState.updatedAt = new Date().toISOString();
276
+ cursors.hourly = hourlyState;
277
+
278
+ return { filesProcessed, eventsAggregated, bucketsQueued };
279
+ }
280
+
281
+ async function parseOpencodeIncremental({ messageFiles, cursors, queuePath, onProgress, source }) {
282
+ await ensureDir(path.dirname(queuePath));
283
+ let filesProcessed = 0;
284
+ let eventsAggregated = 0;
285
+
286
+ const cb = typeof onProgress === 'function' ? onProgress : null;
287
+ const files = Array.isArray(messageFiles) ? messageFiles : [];
288
+ const totalFiles = files.length;
289
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
290
+ const touchedBuckets = new Set();
291
+ const defaultSource = normalizeSourceInput(source) || 'opencode';
292
+
293
+ if (!cursors.files || typeof cursors.files !== 'object') {
294
+ cursors.files = {};
295
+ }
296
+
297
+ for (let idx = 0; idx < files.length; idx++) {
298
+ const entry = files[idx];
299
+ const filePath = typeof entry === 'string' ? entry : entry?.path;
300
+ if (!filePath) continue;
301
+ const fileSource =
302
+ typeof entry === 'string' ? defaultSource : normalizeSourceInput(entry?.source) || defaultSource;
303
+ const st = await fs.stat(filePath).catch(() => null);
304
+ if (!st || !st.isFile()) continue;
305
+
306
+ const key = filePath;
307
+ const prev = cursors.files[key] || null;
308
+ const inode = st.ino || 0;
309
+ const size = Number.isFinite(st.size) ? st.size : 0;
310
+ const mtimeMs = Number.isFinite(st.mtimeMs) ? st.mtimeMs : 0;
311
+ const unchanged = prev && prev.inode === inode && prev.size === size && prev.mtimeMs === mtimeMs;
312
+ if (unchanged) {
313
+ filesProcessed += 1;
314
+ if (cb) {
315
+ cb({
316
+ index: idx + 1,
317
+ total: totalFiles,
318
+ filePath,
319
+ filesProcessed,
320
+ eventsAggregated,
321
+ bucketsQueued: touchedBuckets.size
322
+ });
323
+ }
324
+ continue;
325
+ }
326
+
327
+ const lastTotals = prev && prev.inode === inode ? prev.lastTotals || null : null;
328
+ const result = await parseOpencodeMessageFile({
329
+ filePath,
330
+ lastTotals,
331
+ hourlyState,
332
+ touchedBuckets,
333
+ source: fileSource
334
+ });
335
+
336
+ cursors.files[key] = {
337
+ inode,
338
+ size,
339
+ mtimeMs,
340
+ lastTotals: result.lastTotals,
341
+ updatedAt: new Date().toISOString()
342
+ };
343
+
344
+ filesProcessed += 1;
345
+ eventsAggregated += result.eventsAggregated;
346
+
347
+ if (cb) {
348
+ cb({
349
+ index: idx + 1,
350
+ total: totalFiles,
351
+ filePath,
352
+ filesProcessed,
353
+ eventsAggregated,
354
+ bucketsQueued: touchedBuckets.size
355
+ });
356
+ }
357
+ }
358
+
359
+ const bucketsQueued = await enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets });
360
+ hourlyState.updatedAt = new Date().toISOString();
361
+ cursors.hourly = hourlyState;
362
+
363
+ return { filesProcessed, eventsAggregated, bucketsQueued };
364
+ }
365
+
366
+ async function parseRolloutFile({
367
+ filePath,
368
+ startOffset,
369
+ lastTotal,
370
+ lastModel,
371
+ hourlyState,
372
+ touchedBuckets,
373
+ source
374
+ }) {
375
+ const st = await fs.stat(filePath);
376
+ const endOffset = st.size;
377
+ if (startOffset >= endOffset) {
378
+ return { endOffset, lastTotal, lastModel, eventsAggregated: 0 };
379
+ }
380
+
381
+ const stream = fssync.createReadStream(filePath, { encoding: 'utf8', start: startOffset });
382
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
383
+
384
+ let model = typeof lastModel === 'string' ? lastModel : null;
385
+ let totals = lastTotal && typeof lastTotal === 'object' ? lastTotal : null;
386
+ let eventsAggregated = 0;
387
+
388
+ for await (const line of rl) {
389
+ if (!line) continue;
390
+ const maybeTokenCount = line.includes('"token_count"');
391
+ const maybeTurnContext = !maybeTokenCount && line.includes('"turn_context"') && line.includes('"model"');
392
+ if (!maybeTokenCount && !maybeTurnContext) continue;
393
+
394
+ let obj;
395
+ try {
396
+ obj = JSON.parse(line);
397
+ } catch (_e) {
398
+ continue;
399
+ }
400
+
401
+ if (obj?.type === 'turn_context' && obj?.payload && typeof obj.payload.model === 'string') {
402
+ model = obj.payload.model;
403
+ continue;
404
+ }
405
+
406
+ const token = extractTokenCount(obj);
407
+ if (!token) continue;
408
+
409
+ const info = token.info;
410
+ if (!info || typeof info !== 'object') continue;
411
+
412
+ const tokenTimestamp = typeof token.timestamp === 'string' ? token.timestamp : null;
413
+ if (!tokenTimestamp) continue;
414
+
415
+ const lastUsage = info.last_token_usage;
416
+ const totalUsage = info.total_token_usage;
417
+
418
+ const delta = pickDelta(lastUsage, totalUsage, totals);
419
+ if (!delta) continue;
420
+
421
+ if (totalUsage && typeof totalUsage === 'object') {
422
+ totals = totalUsage;
423
+ }
424
+
425
+ const bucketStart = toUtcHalfHourStart(tokenTimestamp);
426
+ if (!bucketStart) continue;
427
+
428
+ const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
429
+ addTotals(bucket.totals, delta);
430
+ touchedBuckets.add(bucketKey(source, model, bucketStart));
431
+ eventsAggregated += 1;
432
+ }
433
+
434
+ return { endOffset, lastTotal: totals, lastModel: model, eventsAggregated };
435
+ }
436
+
437
+ async function parseClaudeFile({ filePath, startOffset, hourlyState, touchedBuckets, source }) {
438
+ const st = await fs.stat(filePath).catch(() => null);
439
+ if (!st || !st.isFile()) return { endOffset: startOffset, eventsAggregated: 0 };
440
+
441
+ const endOffset = st.size;
442
+ if (startOffset >= endOffset) return { endOffset, eventsAggregated: 0 };
443
+
444
+ const stream = fssync.createReadStream(filePath, { encoding: 'utf8', start: startOffset });
445
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
446
+
447
+ let eventsAggregated = 0;
448
+ for await (const line of rl) {
449
+ if (!line || !line.includes('\"usage\"')) continue;
450
+ let obj;
451
+ try {
452
+ obj = JSON.parse(line);
453
+ } catch (_e) {
454
+ continue;
455
+ }
456
+
457
+ const usage = obj?.message?.usage || obj?.usage;
458
+ if (!usage || typeof usage !== 'object') continue;
459
+
460
+ const model = normalizeModelInput(obj?.message?.model || obj?.model) || DEFAULT_MODEL;
461
+ const tokenTimestamp = typeof obj?.timestamp === 'string' ? obj.timestamp : null;
462
+ if (!tokenTimestamp) continue;
463
+
464
+ const delta = normalizeClaudeUsage(usage);
465
+ if (!delta || isAllZeroUsage(delta)) continue;
466
+
467
+ const bucketStart = toUtcHalfHourStart(tokenTimestamp);
468
+ if (!bucketStart) continue;
469
+
470
+ const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
471
+ addTotals(bucket.totals, delta);
472
+ touchedBuckets.add(bucketKey(source, model, bucketStart));
473
+ eventsAggregated += 1;
474
+ }
475
+
476
+ rl.close();
477
+ stream.close?.();
478
+ return { endOffset, eventsAggregated };
479
+ }
480
+
481
+ async function parseGeminiFile({
482
+ filePath,
483
+ startIndex,
484
+ lastTotals,
485
+ lastModel,
486
+ hourlyState,
487
+ touchedBuckets,
488
+ source
489
+ }) {
490
+ const raw = await fs.readFile(filePath, 'utf8').catch(() => '');
491
+ if (!raw.trim()) return { lastIndex: startIndex, lastTotals, lastModel, eventsAggregated: 0 };
492
+
493
+ let session;
494
+ try {
495
+ session = JSON.parse(raw);
496
+ } catch (_e) {
497
+ return { lastIndex: startIndex, lastTotals, lastModel, eventsAggregated: 0 };
498
+ }
499
+
500
+ const messages = Array.isArray(session?.messages) ? session.messages : [];
501
+ if (startIndex >= messages.length) {
502
+ startIndex = -1;
503
+ lastTotals = null;
504
+ lastModel = null;
505
+ }
506
+
507
+ let eventsAggregated = 0;
508
+ let model = typeof lastModel === 'string' ? lastModel : null;
509
+ let totals = lastTotals && typeof lastTotals === 'object' ? lastTotals : null;
510
+ const begin = Number.isFinite(startIndex) ? startIndex + 1 : 0;
511
+
512
+ for (let idx = begin; idx < messages.length; idx++) {
513
+ const msg = messages[idx];
514
+ if (!msg || typeof msg !== 'object') continue;
515
+
516
+ const normalizedModel = normalizeModelInput(msg.model);
517
+ if (normalizedModel) model = normalizedModel;
518
+
519
+ const timestamp = typeof msg.timestamp === 'string' ? msg.timestamp : null;
520
+ const currentTotals = normalizeGeminiTokens(msg.tokens);
521
+ if (!timestamp || !currentTotals) {
522
+ totals = currentTotals || totals;
523
+ continue;
524
+ }
525
+
526
+ const delta = diffGeminiTotals(currentTotals, totals);
527
+ if (!delta || isAllZeroUsage(delta)) {
528
+ totals = currentTotals;
529
+ continue;
530
+ }
531
+
532
+ const bucketStart = toUtcHalfHourStart(timestamp);
533
+ if (!bucketStart) {
534
+ totals = currentTotals;
535
+ continue;
536
+ }
537
+
538
+ const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
539
+ addTotals(bucket.totals, delta);
540
+ touchedBuckets.add(bucketKey(source, model, bucketStart));
541
+ eventsAggregated += 1;
542
+ totals = currentTotals;
543
+ }
544
+
545
+ return {
546
+ lastIndex: messages.length - 1,
547
+ lastTotals: totals,
548
+ lastModel: model,
549
+ eventsAggregated
550
+ };
551
+ }
552
+
553
+ async function parseOpencodeMessageFile({ filePath, lastTotals, hourlyState, touchedBuckets, source }) {
554
+ const raw = await fs.readFile(filePath, 'utf8').catch(() => '');
555
+ if (!raw.trim()) return { lastTotals, eventsAggregated: 0 };
556
+
557
+ let msg;
558
+ try {
559
+ msg = JSON.parse(raw);
560
+ } catch (_e) {
561
+ return { lastTotals, eventsAggregated: 0 };
562
+ }
563
+
564
+ const currentTotals = normalizeOpencodeTokens(msg?.tokens);
565
+ if (!currentTotals) return { lastTotals, eventsAggregated: 0 };
566
+
567
+ const delta = diffGeminiTotals(currentTotals, lastTotals);
568
+ if (!delta || isAllZeroUsage(delta)) return { lastTotals: currentTotals, eventsAggregated: 0 };
569
+
570
+ const timestampMs = coerceEpochMs(msg?.time?.completed) || coerceEpochMs(msg?.time?.created);
571
+ if (!timestampMs) return { lastTotals, eventsAggregated: 0 };
572
+
573
+ const tsIso = new Date(timestampMs).toISOString();
574
+ const bucketStart = toUtcHalfHourStart(tsIso);
575
+ if (!bucketStart) return { lastTotals, eventsAggregated: 0 };
576
+
577
+ const model = normalizeModelInput(msg?.modelID) || DEFAULT_MODEL;
578
+ const bucket = getHourlyBucket(hourlyState, source, model, bucketStart);
579
+ addTotals(bucket.totals, delta);
580
+ touchedBuckets.add(bucketKey(source, model, bucketStart));
581
+ return { lastTotals: currentTotals, eventsAggregated: 1 };
582
+ }
583
+
584
+ async function enqueueTouchedBuckets({ queuePath, hourlyState, touchedBuckets }) {
585
+ if (!touchedBuckets || touchedBuckets.size === 0) return 0;
586
+
587
+ const touchedGroups = new Set();
588
+ for (const bucketStart of touchedBuckets) {
589
+ const parsed = parseBucketKey(bucketStart);
590
+ const hourStart = parsed.hourStart;
591
+ if (!hourStart) continue;
592
+ touchedGroups.add(groupBucketKey(parsed.source, hourStart));
593
+ }
594
+ if (touchedGroups.size === 0) return 0;
595
+
596
+ const groupQueued = hourlyState.groupQueued && typeof hourlyState.groupQueued === 'object' ? hourlyState.groupQueued : {};
597
+ let codexTouched = false;
598
+ const legacyGroups = new Set();
599
+ for (const groupKey of touchedGroups) {
600
+ if (Object.prototype.hasOwnProperty.call(groupQueued, groupKey)) {
601
+ legacyGroups.add(groupKey);
602
+ }
603
+ if (!codexTouched && groupKey.startsWith(`${DEFAULT_SOURCE}${BUCKET_SEPARATOR}`)) {
604
+ codexTouched = true;
605
+ }
606
+ }
607
+
608
+ const groupedBuckets = new Map();
609
+ for (const [key, bucket] of Object.entries(hourlyState.buckets || {})) {
610
+ if (!bucket || !bucket.totals) continue;
611
+ const parsed = parseBucketKey(key);
612
+ const hourStart = parsed.hourStart;
613
+ if (!hourStart) continue;
614
+ const groupKey = groupBucketKey(parsed.source, hourStart);
615
+ if (!touchedGroups.has(groupKey) || legacyGroups.has(groupKey)) continue;
616
+
617
+ const source = normalizeSourceInput(parsed.source) || DEFAULT_SOURCE;
618
+ const model = normalizeModelInput(parsed.model) || DEFAULT_MODEL;
619
+ let group = groupedBuckets.get(groupKey);
620
+ if (!group) {
621
+ group = { source, hourStart, buckets: new Map() };
622
+ groupedBuckets.set(groupKey, group);
623
+ }
624
+
625
+ if (bucket.queuedKey != null && typeof bucket.queuedKey !== 'string') {
626
+ bucket.queuedKey = null;
627
+ }
628
+ group.buckets.set(model, bucket);
629
+ }
630
+
631
+ if (codexTouched) {
632
+ const recomputeGroups = new Set();
633
+ for (const [key, bucket] of Object.entries(hourlyState.buckets || {})) {
634
+ if (!bucket || !bucket.totals) continue;
635
+ const parsed = parseBucketKey(key);
636
+ const hourStart = parsed.hourStart;
637
+ if (!hourStart) continue;
638
+ const source = normalizeSourceInput(parsed.source) || DEFAULT_SOURCE;
639
+ if (source !== 'every-code') continue;
640
+ const groupKey = groupBucketKey(source, hourStart);
641
+ if (legacyGroups.has(groupKey) || groupedBuckets.has(groupKey)) continue;
642
+ const model = normalizeModelInput(parsed.model) || DEFAULT_MODEL;
643
+ if (model !== DEFAULT_MODEL) continue;
644
+ recomputeGroups.add(groupKey);
645
+ }
646
+
647
+ if (recomputeGroups.size > 0) {
648
+ for (const [key, bucket] of Object.entries(hourlyState.buckets || {})) {
649
+ if (!bucket || !bucket.totals) continue;
650
+ const parsed = parseBucketKey(key);
651
+ const hourStart = parsed.hourStart;
652
+ if (!hourStart) continue;
653
+ const source = normalizeSourceInput(parsed.source) || DEFAULT_SOURCE;
654
+ const groupKey = groupBucketKey(source, hourStart);
655
+ if (!recomputeGroups.has(groupKey)) continue;
656
+ let group = groupedBuckets.get(groupKey);
657
+ if (!group) {
658
+ group = { source, hourStart, buckets: new Map() };
659
+ groupedBuckets.set(groupKey, group);
660
+ }
661
+ if (bucket.queuedKey != null && typeof bucket.queuedKey !== 'string') {
662
+ bucket.queuedKey = null;
663
+ }
664
+ const model = normalizeModelInput(parsed.model) || DEFAULT_MODEL;
665
+ group.buckets.set(model, bucket);
666
+ }
667
+ }
668
+ }
669
+
670
+ const codexDominants = collectCodexDominantModels(hourlyState);
671
+
672
+ const toAppend = [];
673
+ for (const group of groupedBuckets.values()) {
674
+ const unknownBucket = group.buckets.get(DEFAULT_MODEL) || null;
675
+ const dominantModel = pickDominantModel(group.buckets);
676
+ let alignedModel = null;
677
+ if (unknownBucket?.alignedModel) {
678
+ const normalized = normalizeModelInput(unknownBucket.alignedModel);
679
+ alignedModel = normalized && normalized !== DEFAULT_MODEL ? normalized : null;
680
+ }
681
+ const zeroTotals = initTotals();
682
+ const zeroKey = totalsKey(zeroTotals);
683
+
684
+ if (dominantModel) {
685
+ if (alignedModel && !group.buckets.has(alignedModel)) {
686
+ toAppend.push(
687
+ JSON.stringify({
688
+ source: group.source,
689
+ model: alignedModel,
690
+ hour_start: group.hourStart,
691
+ input_tokens: zeroTotals.input_tokens,
692
+ cached_input_tokens: zeroTotals.cached_input_tokens,
693
+ output_tokens: zeroTotals.output_tokens,
694
+ reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
695
+ total_tokens: zeroTotals.total_tokens
696
+ })
697
+ );
698
+ }
699
+ if (unknownBucket && !alignedModel && unknownBucket.queuedKey && unknownBucket.queuedKey !== zeroKey) {
700
+ if (unknownBucket.retractedUnknownKey !== zeroKey) {
701
+ toAppend.push(
702
+ JSON.stringify({
703
+ source: group.source,
704
+ model: DEFAULT_MODEL,
705
+ hour_start: group.hourStart,
706
+ input_tokens: zeroTotals.input_tokens,
707
+ cached_input_tokens: zeroTotals.cached_input_tokens,
708
+ output_tokens: zeroTotals.output_tokens,
709
+ reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
710
+ total_tokens: zeroTotals.total_tokens
711
+ })
712
+ );
713
+ unknownBucket.retractedUnknownKey = zeroKey;
714
+ }
715
+ }
716
+ if (unknownBucket) unknownBucket.alignedModel = null;
717
+ for (const [model, bucket] of group.buckets.entries()) {
718
+ if (model === DEFAULT_MODEL) continue;
719
+ let totals = bucket.totals;
720
+ if (model === dominantModel && unknownBucket?.totals) {
721
+ totals = cloneTotals(bucket.totals);
722
+ addTotals(totals, unknownBucket.totals);
723
+ }
724
+ const key = totalsKey(totals);
725
+ if (bucket.queuedKey === key) continue;
726
+ toAppend.push(
727
+ JSON.stringify({
728
+ source: group.source,
729
+ model,
730
+ hour_start: group.hourStart,
731
+ input_tokens: totals.input_tokens,
732
+ cached_input_tokens: totals.cached_input_tokens,
733
+ output_tokens: totals.output_tokens,
734
+ reasoning_output_tokens: totals.reasoning_output_tokens,
735
+ total_tokens: totals.total_tokens
736
+ })
737
+ );
738
+ bucket.queuedKey = key;
739
+ }
740
+ continue;
741
+ }
742
+
743
+ if (!unknownBucket?.totals) continue;
744
+ let outputModel = DEFAULT_MODEL;
745
+ if (group.source === 'every-code') {
746
+ const aligned = findNearestCodexModel(group.hourStart, codexDominants);
747
+ if (aligned) outputModel = aligned;
748
+ }
749
+ const nextAligned = outputModel !== DEFAULT_MODEL ? outputModel : null;
750
+ if (alignedModel && alignedModel !== nextAligned) {
751
+ toAppend.push(
752
+ JSON.stringify({
753
+ source: group.source,
754
+ model: alignedModel,
755
+ hour_start: group.hourStart,
756
+ input_tokens: zeroTotals.input_tokens,
757
+ cached_input_tokens: zeroTotals.cached_input_tokens,
758
+ output_tokens: zeroTotals.output_tokens,
759
+ reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
760
+ total_tokens: zeroTotals.total_tokens
761
+ })
762
+ );
763
+ }
764
+ if (!alignedModel && nextAligned && unknownBucket.queuedKey && unknownBucket.queuedKey !== zeroKey) {
765
+ if (unknownBucket.retractedUnknownKey !== zeroKey) {
766
+ toAppend.push(
767
+ JSON.stringify({
768
+ source: group.source,
769
+ model: DEFAULT_MODEL,
770
+ hour_start: group.hourStart,
771
+ input_tokens: zeroTotals.input_tokens,
772
+ cached_input_tokens: zeroTotals.cached_input_tokens,
773
+ output_tokens: zeroTotals.output_tokens,
774
+ reasoning_output_tokens: zeroTotals.reasoning_output_tokens,
775
+ total_tokens: zeroTotals.total_tokens
776
+ })
777
+ );
778
+ unknownBucket.retractedUnknownKey = zeroKey;
779
+ }
780
+ }
781
+ if (unknownBucket) unknownBucket.alignedModel = nextAligned;
782
+ const key = totalsKey(unknownBucket.totals);
783
+ const outputKey = outputModel === DEFAULT_MODEL ? key : `${key}|${outputModel}`;
784
+ if (unknownBucket.queuedKey === outputKey) continue;
785
+ toAppend.push(
786
+ JSON.stringify({
787
+ source: group.source,
788
+ model: outputModel,
789
+ hour_start: group.hourStart,
790
+ input_tokens: unknownBucket.totals.input_tokens,
791
+ cached_input_tokens: unknownBucket.totals.cached_input_tokens,
792
+ output_tokens: unknownBucket.totals.output_tokens,
793
+ reasoning_output_tokens: unknownBucket.totals.reasoning_output_tokens,
794
+ total_tokens: unknownBucket.totals.total_tokens
795
+ })
796
+ );
797
+ unknownBucket.queuedKey = outputKey;
798
+ }
799
+
800
+ if (legacyGroups.size > 0) {
801
+ const grouped = new Map();
802
+ for (const [key, bucket] of Object.entries(hourlyState.buckets || {})) {
803
+ if (!bucket || !bucket.totals) continue;
804
+ const parsed = parseBucketKey(key);
805
+ const hourStart = parsed.hourStart;
806
+ if (!hourStart) continue;
807
+ const groupKey = groupBucketKey(parsed.source, hourStart);
808
+ if (!legacyGroups.has(groupKey)) continue;
809
+
810
+ let group = grouped.get(groupKey);
811
+ if (!group) {
812
+ group = {
813
+ source: normalizeSourceInput(parsed.source) || DEFAULT_SOURCE,
814
+ hourStart,
815
+ models: new Set(),
816
+ totals: initTotals()
817
+ };
818
+ grouped.set(groupKey, group);
819
+ }
820
+ group.models.add(parsed.model || DEFAULT_MODEL);
821
+ addTotals(group.totals, bucket.totals);
822
+ }
823
+
824
+ for (const group of grouped.values()) {
825
+ const model = group.models.size === 1 ? [...group.models][0] : DEFAULT_MODEL;
826
+ const key = totalsKey(group.totals);
827
+ const groupKey = groupBucketKey(group.source, group.hourStart);
828
+ if (groupQueued[groupKey] === key) continue;
829
+ toAppend.push(
830
+ JSON.stringify({
831
+ source: group.source,
832
+ model,
833
+ hour_start: group.hourStart,
834
+ input_tokens: group.totals.input_tokens,
835
+ cached_input_tokens: group.totals.cached_input_tokens,
836
+ output_tokens: group.totals.output_tokens,
837
+ reasoning_output_tokens: group.totals.reasoning_output_tokens,
838
+ total_tokens: group.totals.total_tokens
839
+ })
840
+ );
841
+ groupQueued[groupKey] = key;
842
+ }
843
+ }
844
+
845
+ hourlyState.groupQueued = groupQueued;
846
+
847
+ if (toAppend.length > 0) {
848
+ await fs.appendFile(queuePath, toAppend.join('\n') + '\n', 'utf8');
849
+ }
850
+
851
+ return toAppend.length;
852
+ }
853
+
854
+ function pickDominantModel(buckets) {
855
+ let dominantModel = null;
856
+ let dominantTotal = -1;
857
+ for (const [model, bucket] of buckets.entries()) {
858
+ if (model === DEFAULT_MODEL) continue;
859
+ const total = Number(bucket?.totals?.total_tokens || 0);
860
+ if (
861
+ dominantModel == null ||
862
+ total > dominantTotal ||
863
+ (total === dominantTotal && model < dominantModel)
864
+ ) {
865
+ dominantModel = model;
866
+ dominantTotal = total;
867
+ }
868
+ }
869
+ return dominantModel;
870
+ }
871
+
872
+ function cloneTotals(totals) {
873
+ const cloned = initTotals();
874
+ addTotals(cloned, totals || {});
875
+ return cloned;
876
+ }
877
+
878
+ function collectCodexDominantModels(hourlyState) {
879
+ const grouped = new Map();
880
+ for (const [key, bucket] of Object.entries(hourlyState.buckets || {})) {
881
+ if (!bucket || !bucket.totals) continue;
882
+ const parsed = parseBucketKey(key);
883
+ const hourStart = parsed.hourStart;
884
+ if (!hourStart) continue;
885
+ const source = normalizeSourceInput(parsed.source) || DEFAULT_SOURCE;
886
+ if (source !== DEFAULT_SOURCE) continue;
887
+ const model = normalizeModelInput(parsed.model) || DEFAULT_MODEL;
888
+ if (model === DEFAULT_MODEL) continue;
889
+
890
+ let models = grouped.get(hourStart);
891
+ if (!models) {
892
+ models = new Map();
893
+ grouped.set(hourStart, models);
894
+ }
895
+ const total = Number(bucket.totals.total_tokens || 0);
896
+ models.set(model, (models.get(model) || 0) + total);
897
+ }
898
+
899
+ const dominants = [];
900
+ for (const [hourStart, models] of grouped.entries()) {
901
+ let dominantModel = null;
902
+ let dominantTotal = -1;
903
+ for (const [model, total] of models.entries()) {
904
+ if (
905
+ dominantModel == null ||
906
+ total > dominantTotal ||
907
+ (total === dominantTotal && model < dominantModel)
908
+ ) {
909
+ dominantModel = model;
910
+ dominantTotal = total;
911
+ }
912
+ }
913
+ if (dominantModel) {
914
+ dominants.push({ hourStart, model: dominantModel });
915
+ }
916
+ }
917
+
918
+ return dominants;
919
+ }
920
+
921
+ function findNearestCodexModel(hourStart, dominants) {
922
+ if (!hourStart || !dominants || dominants.length === 0) return null;
923
+ const target = Date.parse(hourStart);
924
+ if (!Number.isFinite(target)) return null;
925
+
926
+ let best = null;
927
+ for (const entry of dominants) {
928
+ const candidate = Date.parse(entry.hourStart);
929
+ if (!Number.isFinite(candidate)) continue;
930
+ const diff = Math.abs(candidate - target);
931
+ if (!best || diff < best.diff || (diff === best.diff && candidate < best.time)) {
932
+ best = { diff, time: candidate, model: entry.model };
933
+ }
934
+ }
935
+
936
+ return best ? best.model : null;
937
+ }
938
+
939
+ function normalizeHourlyState(raw) {
940
+ const state = raw && typeof raw === 'object' ? raw : {};
941
+ const version = Number(state.version || 1);
942
+ const rawBuckets = state.buckets && typeof state.buckets === 'object' ? state.buckets : {};
943
+ const buckets = {};
944
+ const groupQueued = {};
945
+
946
+ if (!Number.isFinite(version) || version < 2) {
947
+ for (const [key, value] of Object.entries(rawBuckets)) {
948
+ const parsed = parseBucketKey(key);
949
+ const hourStart = parsed.hourStart;
950
+ if (!hourStart) continue;
951
+ const source = normalizeSourceInput(parsed.source) || DEFAULT_SOURCE;
952
+ const normalizedKey = bucketKey(source, DEFAULT_MODEL, hourStart);
953
+ buckets[normalizedKey] = value;
954
+ if (value?.queuedKey) {
955
+ groupQueued[groupBucketKey(source, hourStart)] = value.queuedKey;
956
+ }
957
+ }
958
+ return {
959
+ version: 3,
960
+ buckets,
961
+ groupQueued,
962
+ updatedAt: typeof state.updatedAt === 'string' ? state.updatedAt : null
963
+ };
964
+ }
965
+
966
+ for (const [key, value] of Object.entries(rawBuckets)) {
967
+ const parsed = parseBucketKey(key);
968
+ const hourStart = parsed.hourStart;
969
+ if (!hourStart) continue;
970
+ const normalizedKey = bucketKey(parsed.source, parsed.model, hourStart);
971
+ buckets[normalizedKey] = value;
972
+ }
973
+
974
+ const existingGroupQueued =
975
+ state.groupQueued && typeof state.groupQueued === 'object' ? state.groupQueued : {};
976
+
977
+ return {
978
+ version: 3,
979
+ buckets,
980
+ groupQueued: version >= 3 ? existingGroupQueued : {},
981
+ updatedAt: typeof state.updatedAt === 'string' ? state.updatedAt : null
982
+ };
983
+ }
984
+
985
+ function getHourlyBucket(state, source, model, hourStart) {
986
+ const buckets = state.buckets;
987
+ const normalizedSource = normalizeSourceInput(source) || DEFAULT_SOURCE;
988
+ const normalizedModel = normalizeModelInput(model) || DEFAULT_MODEL;
989
+ const key = bucketKey(normalizedSource, normalizedModel, hourStart);
990
+ let bucket = buckets[key];
991
+ if (!bucket || typeof bucket !== 'object') {
992
+ bucket = { totals: initTotals(), queuedKey: null };
993
+ buckets[key] = bucket;
994
+ return bucket;
995
+ }
996
+
997
+ if (!bucket.totals || typeof bucket.totals !== 'object') {
998
+ bucket.totals = initTotals();
999
+ }
1000
+
1001
+ if (bucket.queuedKey != null && typeof bucket.queuedKey !== 'string') {
1002
+ bucket.queuedKey = null;
1003
+ }
1004
+
1005
+ return bucket;
1006
+ }
1007
+
1008
+ function initTotals() {
1009
+ return {
1010
+ input_tokens: 0,
1011
+ cached_input_tokens: 0,
1012
+ output_tokens: 0,
1013
+ reasoning_output_tokens: 0,
1014
+ total_tokens: 0
1015
+ };
1016
+ }
1017
+
1018
+ function addTotals(target, delta) {
1019
+ target.input_tokens += delta.input_tokens || 0;
1020
+ target.cached_input_tokens += delta.cached_input_tokens || 0;
1021
+ target.output_tokens += delta.output_tokens || 0;
1022
+ target.reasoning_output_tokens += delta.reasoning_output_tokens || 0;
1023
+ target.total_tokens += delta.total_tokens || 0;
1024
+ }
1025
+
1026
+ function totalsKey(totals) {
1027
+ return [
1028
+ totals.input_tokens || 0,
1029
+ totals.cached_input_tokens || 0,
1030
+ totals.output_tokens || 0,
1031
+ totals.reasoning_output_tokens || 0,
1032
+ totals.total_tokens || 0
1033
+ ].join('|');
1034
+ }
1035
+
1036
+ function toUtcHalfHourStart(ts) {
1037
+ const dt = new Date(ts);
1038
+ if (!Number.isFinite(dt.getTime())) return null;
1039
+ const minutes = dt.getUTCMinutes();
1040
+ const halfMinute = minutes >= 30 ? 30 : 0;
1041
+ const bucketStart = new Date(
1042
+ Date.UTC(
1043
+ dt.getUTCFullYear(),
1044
+ dt.getUTCMonth(),
1045
+ dt.getUTCDate(),
1046
+ dt.getUTCHours(),
1047
+ halfMinute,
1048
+ 0,
1049
+ 0
1050
+ )
1051
+ );
1052
+ return bucketStart.toISOString();
1053
+ }
1054
+
1055
+ function bucketKey(source, model, hourStart) {
1056
+ const safeSource = normalizeSourceInput(source) || DEFAULT_SOURCE;
1057
+ const safeModel = normalizeModelInput(model) || DEFAULT_MODEL;
1058
+ return `${safeSource}${BUCKET_SEPARATOR}${safeModel}${BUCKET_SEPARATOR}${hourStart}`;
1059
+ }
1060
+
1061
+ function groupBucketKey(source, hourStart) {
1062
+ const safeSource = normalizeSourceInput(source) || DEFAULT_SOURCE;
1063
+ return `${safeSource}${BUCKET_SEPARATOR}${hourStart}`;
1064
+ }
1065
+
1066
+ function parseBucketKey(key) {
1067
+ if (typeof key !== 'string') return { source: DEFAULT_SOURCE, model: DEFAULT_MODEL, hourStart: '' };
1068
+ const first = key.indexOf(BUCKET_SEPARATOR);
1069
+ if (first <= 0) return { source: DEFAULT_SOURCE, model: DEFAULT_MODEL, hourStart: key };
1070
+ const second = key.indexOf(BUCKET_SEPARATOR, first + 1);
1071
+ if (second <= 0) {
1072
+ return { source: key.slice(0, first), model: DEFAULT_MODEL, hourStart: key.slice(first + 1) };
1073
+ }
1074
+ return {
1075
+ source: key.slice(0, first),
1076
+ model: key.slice(first + 1, second),
1077
+ hourStart: key.slice(second + 1)
1078
+ };
1079
+ }
1080
+
1081
+ function normalizeSourceInput(value) {
1082
+ if (typeof value !== 'string') return null;
1083
+ const trimmed = value.trim().toLowerCase();
1084
+ return trimmed.length > 0 ? trimmed : null;
1085
+ }
1086
+
1087
+ function normalizeModelInput(value) {
1088
+ if (typeof value !== 'string') return null;
1089
+ const trimmed = value.trim();
1090
+ return trimmed.length > 0 ? trimmed : null;
1091
+ }
1092
+
1093
+ function normalizeGeminiTokens(tokens) {
1094
+ if (!tokens || typeof tokens !== 'object') return null;
1095
+ const input = toNonNegativeInt(tokens.input);
1096
+ const cached = toNonNegativeInt(tokens.cached);
1097
+ const output = toNonNegativeInt(tokens.output);
1098
+ const tool = toNonNegativeInt(tokens.tool);
1099
+ const thoughts = toNonNegativeInt(tokens.thoughts);
1100
+ const total = toNonNegativeInt(tokens.total);
1101
+
1102
+ return {
1103
+ input_tokens: input,
1104
+ cached_input_tokens: cached,
1105
+ output_tokens: output + tool,
1106
+ reasoning_output_tokens: thoughts,
1107
+ total_tokens: total
1108
+ };
1109
+ }
1110
+
1111
+ function normalizeOpencodeTokens(tokens) {
1112
+ if (!tokens || typeof tokens !== 'object') return null;
1113
+ const input = toNonNegativeInt(tokens.input);
1114
+ const output = toNonNegativeInt(tokens.output);
1115
+ const reasoning = toNonNegativeInt(tokens.reasoning);
1116
+ const cached = toNonNegativeInt(tokens.cache?.read);
1117
+ const total = input + output + reasoning;
1118
+
1119
+ return {
1120
+ input_tokens: input,
1121
+ cached_input_tokens: cached,
1122
+ output_tokens: output,
1123
+ reasoning_output_tokens: reasoning,
1124
+ total_tokens: total
1125
+ };
1126
+ }
1127
+
1128
+ function sameGeminiTotals(a, b) {
1129
+ if (!a || !b) return false;
1130
+ return (
1131
+ a.input_tokens === b.input_tokens &&
1132
+ a.cached_input_tokens === b.cached_input_tokens &&
1133
+ a.output_tokens === b.output_tokens &&
1134
+ a.reasoning_output_tokens === b.reasoning_output_tokens &&
1135
+ a.total_tokens === b.total_tokens
1136
+ );
1137
+ }
1138
+
1139
+ function diffGeminiTotals(current, previous) {
1140
+ if (!current || typeof current !== 'object') return null;
1141
+ if (!previous || typeof previous !== 'object') return current;
1142
+ if (sameGeminiTotals(current, previous)) return null;
1143
+
1144
+ const totalReset = (current.total_tokens || 0) < (previous.total_tokens || 0);
1145
+ if (totalReset) return current;
1146
+
1147
+ const delta = {
1148
+ input_tokens: Math.max(0, (current.input_tokens || 0) - (previous.input_tokens || 0)),
1149
+ cached_input_tokens: Math.max(0, (current.cached_input_tokens || 0) - (previous.cached_input_tokens || 0)),
1150
+ output_tokens: Math.max(0, (current.output_tokens || 0) - (previous.output_tokens || 0)),
1151
+ reasoning_output_tokens: Math.max(0, (current.reasoning_output_tokens || 0) - (previous.reasoning_output_tokens || 0)),
1152
+ total_tokens: Math.max(0, (current.total_tokens || 0) - (previous.total_tokens || 0))
1153
+ };
1154
+
1155
+ return isAllZeroUsage(delta) ? null : delta;
1156
+ }
1157
+
1158
+ function extractTokenCount(obj) {
1159
+ const payload = obj?.payload;
1160
+ if (!payload) return null;
1161
+ if (payload.type === 'token_count') {
1162
+ return { info: payload.info, timestamp: obj?.timestamp || null };
1163
+ }
1164
+ const msg = payload.msg;
1165
+ if (msg && msg.type === 'token_count') {
1166
+ return { info: msg.info, timestamp: obj?.timestamp || null };
1167
+ }
1168
+ return null;
1169
+ }
1170
+
1171
+ function pickDelta(lastUsage, totalUsage, prevTotals) {
1172
+ const hasLast = isNonEmptyObject(lastUsage);
1173
+ const hasTotal = isNonEmptyObject(totalUsage);
1174
+ const hasPrevTotals = isNonEmptyObject(prevTotals);
1175
+
1176
+ // Codex rollout logs sometimes emit duplicate token_count records where total_token_usage does not
1177
+ // change between adjacent entries. Counting last_token_usage in those cases will double-count.
1178
+ if (hasTotal && hasPrevTotals && sameUsage(totalUsage, prevTotals)) {
1179
+ return null;
1180
+ }
1181
+
1182
+ if (!hasLast && hasTotal && hasPrevTotals && totalsReset(totalUsage, prevTotals)) {
1183
+ const normalized = normalizeUsage(totalUsage);
1184
+ return isAllZeroUsage(normalized) ? null : normalized;
1185
+ }
1186
+
1187
+ if (hasLast) {
1188
+ return normalizeUsage(lastUsage);
1189
+ }
1190
+
1191
+ if (hasTotal && hasPrevTotals) {
1192
+ const delta = {};
1193
+ for (const k of ['input_tokens', 'cached_input_tokens', 'output_tokens', 'reasoning_output_tokens', 'total_tokens']) {
1194
+ const a = Number(totalUsage[k]);
1195
+ const b = Number(prevTotals[k]);
1196
+ if (Number.isFinite(a) && Number.isFinite(b)) delta[k] = Math.max(0, a - b);
1197
+ }
1198
+ const normalized = normalizeUsage(delta);
1199
+ return isAllZeroUsage(normalized) ? null : normalized;
1200
+ }
1201
+
1202
+ if (hasTotal) {
1203
+ const normalized = normalizeUsage(totalUsage);
1204
+ return isAllZeroUsage(normalized) ? null : normalized;
1205
+ }
1206
+
1207
+ return null;
1208
+ }
1209
+
1210
+ function normalizeUsage(u) {
1211
+ const out = {};
1212
+ for (const k of ['input_tokens', 'cached_input_tokens', 'output_tokens', 'reasoning_output_tokens', 'total_tokens']) {
1213
+ const n = Number(u[k] || 0);
1214
+ out[k] = Number.isFinite(n) && n >= 0 ? Math.floor(n) : 0;
1215
+ }
1216
+ return out;
1217
+ }
1218
+
1219
+ function normalizeClaudeUsage(u) {
1220
+ const inputTokens = toNonNegativeInt(u?.input_tokens);
1221
+ const outputTokens = toNonNegativeInt(u?.output_tokens);
1222
+ const hasTotal = u && Object.prototype.hasOwnProperty.call(u, 'total_tokens');
1223
+ const totalTokens = hasTotal ? toNonNegativeInt(u?.total_tokens) : inputTokens + outputTokens;
1224
+ return {
1225
+ input_tokens: inputTokens,
1226
+ cached_input_tokens: toNonNegativeInt(u?.cache_read_input_tokens),
1227
+ output_tokens: outputTokens,
1228
+ reasoning_output_tokens: 0,
1229
+ total_tokens: totalTokens
1230
+ };
1231
+ }
1232
+
1233
+ function isNonEmptyObject(v) {
1234
+ return Boolean(v && typeof v === 'object' && !Array.isArray(v) && Object.keys(v).length > 0);
1235
+ }
1236
+
1237
+ function isAllZeroUsage(u) {
1238
+ if (!u || typeof u !== 'object') return true;
1239
+ for (const k of ['input_tokens', 'cached_input_tokens', 'output_tokens', 'reasoning_output_tokens', 'total_tokens']) {
1240
+ if (Number(u[k] || 0) !== 0) return false;
1241
+ }
1242
+ return true;
1243
+ }
1244
+
1245
+ function sameUsage(a, b) {
1246
+ for (const k of ['input_tokens', 'cached_input_tokens', 'output_tokens', 'reasoning_output_tokens', 'total_tokens']) {
1247
+ if (toNonNegativeInt(a?.[k]) !== toNonNegativeInt(b?.[k])) return false;
1248
+ }
1249
+ return true;
1250
+ }
1251
+
1252
+ function totalsReset(curr, prev) {
1253
+ const currTotal = curr?.total_tokens;
1254
+ const prevTotal = prev?.total_tokens;
1255
+ if (!isFiniteNumber(currTotal) || !isFiniteNumber(prevTotal)) return false;
1256
+ return currTotal < prevTotal;
1257
+ }
1258
+
1259
+ function isFiniteNumber(v) {
1260
+ return typeof v === 'number' && Number.isFinite(v);
1261
+ }
1262
+
1263
+ function toNonNegativeInt(v) {
1264
+ const n = Number(v);
1265
+ if (!Number.isFinite(n) || n < 0) return 0;
1266
+ return Math.floor(n);
1267
+ }
1268
+
1269
+ function coerceEpochMs(v) {
1270
+ const n = Number(v);
1271
+ if (!Number.isFinite(n) || n <= 0) return 0;
1272
+ if (n < 1e12) return Math.floor(n * 1000);
1273
+ return Math.floor(n);
1274
+ }
1275
+
1276
+ async function safeReadDir(dir) {
1277
+ try {
1278
+ return await fs.readdir(dir, { withFileTypes: true });
1279
+ } catch (_e) {
1280
+ return [];
1281
+ }
1282
+ }
1283
+
1284
+ async function walkClaudeProjects(dir, out) {
1285
+ const entries = await safeReadDir(dir);
1286
+ for (const entry of entries) {
1287
+ const fullPath = path.join(dir, entry.name);
1288
+ if (entry.isDirectory()) {
1289
+ await walkClaudeProjects(fullPath, out);
1290
+ continue;
1291
+ }
1292
+ if (entry.isFile() && entry.name.endsWith('.jsonl')) out.push(fullPath);
1293
+ }
1294
+ }
1295
+
1296
+ async function walkOpencodeMessages(dir, out) {
1297
+ const entries = await safeReadDir(dir);
1298
+ for (const entry of entries) {
1299
+ const fullPath = path.join(dir, entry.name);
1300
+ if (entry.isDirectory()) {
1301
+ await walkOpencodeMessages(fullPath, out);
1302
+ continue;
1303
+ }
1304
+ if (entry.isFile() && entry.name.startsWith('msg_') && entry.name.endsWith('.json')) out.push(fullPath);
1305
+ }
1306
+ }
1307
+
1308
+ module.exports = {
1309
+ listRolloutFiles,
1310
+ listClaudeProjectFiles,
1311
+ listGeminiSessionFiles,
1312
+ listOpencodeMessageFiles,
1313
+ parseRolloutIncremental,
1314
+ parseClaudeIncremental,
1315
+ parseGeminiIncremental,
1316
+ parseOpencodeIncremental
1317
+ };