tokentracker-cli 0.5.99 → 0.5.101

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4373,6 +4373,591 @@ async function parseCodebuddyIncremental({
4373
4373
  return { recordsProcessed, eventsAggregated, bucketsQueued };
4374
4374
  }
4375
4375
 
4376
+ // ─────────────────────────────────────────────────────────────────────────────
4377
+ // oh-my-pi (omp) — passive JSONL reader (~/.omp/agent/sessions/**/*.jsonl)
4378
+ //
4379
+ // oh-my-pi writes one append-only JSONL per session:
4380
+ // ~/.omp/agent/sessions/--<cwd-encoded>--/<timestamp>_<sessionId>.jsonl
4381
+ //
4382
+ // Per-line record types: the first line is type:"session" (header).
4383
+ // Only type:"message" lines with message.role=="assistant" carry token usage.
4384
+ // The shape (verbatim from oh-my-pi docs/session.md):
4385
+ //
4386
+ // {
4387
+ // "type": "message",
4388
+ // "id": "a1b2c3d4", ← 8-char dedup key
4389
+ // "parentId": "...",
4390
+ // "timestamp": "2026-02-16T10:21:00.000Z",
4391
+ // "message": {
4392
+ // "role": "assistant",
4393
+ // "provider": "anthropic",
4394
+ // "model": "claude-sonnet-4-5",
4395
+ // "usage": {
4396
+ // "input": 100, "output": 20, "cacheRead": 0, "cacheWrite": 0,
4397
+ // "totalTokens": 120, "reasoningTokens": 0
4398
+ // },
4399
+ // "timestamp": 1760000000000 ← ms epoch, preferred for bucketing
4400
+ // }
4401
+ // }
4402
+ //
4403
+ // oh-my-pi is a router — dispatches to upstream providers (Anthropic, OpenAI,
4404
+ // etc.) and records the upstream model name per message. There is no global
4405
+ // default model setting; model is always per-message (fallback: "omp-unknown").
4406
+ // ─────────────────────────────────────────────────────────────────────────────
4407
+
4408
+ function resolveOmpHome(env = process.env) {
4409
+ const home = env.HOME || require("node:os").homedir();
4410
+ // Honor TokenTracker override first, then oh-my-pi upstream env vars.
4411
+ if (env.OMP_HOME) return env.OMP_HOME;
4412
+ if (env.PI_CONFIG_DIR) return path.join(home, env.PI_CONFIG_DIR);
4413
+ return path.join(home, ".omp");
4414
+ }
4415
+
4416
+ function resolveOmpAgentDir(env = process.env) {
4417
+ if (env.PI_CODING_AGENT_DIR) return env.PI_CODING_AGENT_DIR;
4418
+ return path.join(resolveOmpHome(env), "agent");
4419
+ }
4420
+
4421
+ function resolveOmpSessionFiles(env = process.env) {
4422
+ const sessionsDir = path.join(resolveOmpAgentDir(env), "sessions");
4423
+ if (!fssync.existsSync(sessionsDir)) return [];
4424
+ const files = [];
4425
+ try {
4426
+ for (const cwdDir of fssync.readdirSync(sessionsDir)) {
4427
+ const cwdPath = path.join(sessionsDir, cwdDir);
4428
+ let stat;
4429
+ try { stat = fssync.statSync(cwdPath); } catch { continue; }
4430
+ if (!stat.isDirectory()) continue;
4431
+ let entries;
4432
+ try { entries = fssync.readdirSync(cwdPath); } catch { continue; }
4433
+ for (const entry of entries) {
4434
+ if (!entry.endsWith(".jsonl")) continue;
4435
+ files.push(path.join(cwdPath, entry));
4436
+ }
4437
+ }
4438
+ } catch {
4439
+ // ignore — return what we have
4440
+ }
4441
+ files.sort((a, b) => a.localeCompare(b));
4442
+ return files;
4443
+ }
4444
+
4445
+ function resolveOmpDefaultModel() {
4446
+ // oh-my-pi has no global default model setting; model is per-message.
4447
+ return "omp-unknown";
4448
+ }
4449
+
4450
+ async function parseOmpIncremental({
4451
+ sessionFiles,
4452
+ cursors,
4453
+ queuePath,
4454
+ onProgress,
4455
+ env,
4456
+ defaultModel,
4457
+ } = {}) {
4458
+ await ensureDir(path.dirname(queuePath));
4459
+ const ompState = cursors.omp && typeof cursors.omp === "object" ? cursors.omp : {};
4460
+ const seenIds = new Set(Array.isArray(ompState.seenIds) ? ompState.seenIds : []);
4461
+ const fileOffsets =
4462
+ ompState.fileOffsets && typeof ompState.fileOffsets === "object"
4463
+ ? { ...ompState.fileOffsets }
4464
+ : {};
4465
+
4466
+ const files = Array.isArray(sessionFiles)
4467
+ ? sessionFiles
4468
+ : resolveOmpSessionFiles(env || process.env);
4469
+ const fallbackModel = defaultModel || resolveOmpDefaultModel();
4470
+
4471
+ if (files.length === 0) {
4472
+ cursors.omp = {
4473
+ ...ompState,
4474
+ seenIds: Array.from(seenIds),
4475
+ fileOffsets,
4476
+ updatedAt: new Date().toISOString(),
4477
+ };
4478
+ return { recordsProcessed: 0, eventsAggregated: 0, bucketsQueued: 0 };
4479
+ }
4480
+
4481
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
4482
+ const touchedBuckets = new Set();
4483
+ const cb = typeof onProgress === "function" ? onProgress : null;
4484
+ let recordsProcessed = 0;
4485
+ let eventsAggregated = 0;
4486
+
4487
+ for (let fileIdx = 0; fileIdx < files.length; fileIdx++) {
4488
+ const filePath = files[fileIdx];
4489
+ let stat;
4490
+ try { stat = fssync.statSync(filePath); } catch { continue; }
4491
+
4492
+ const prevEntry = fileOffsets[filePath] || {};
4493
+ const prevSize = Number(prevEntry.size) || 0;
4494
+ const prevIno = prevEntry.ino;
4495
+ // Re-read from start if file shrunk (truncate/rewrite) or inode changed.
4496
+ const inodeChanged = typeof prevIno === "number" && prevIno !== stat.ino;
4497
+ const startOffset = stat.size < prevSize || inodeChanged ? 0 : prevSize;
4498
+ if (stat.size <= startOffset) continue;
4499
+
4500
+ let stream;
4501
+ try {
4502
+ stream = fssync.createReadStream(filePath, {
4503
+ encoding: "utf8",
4504
+ start: startOffset,
4505
+ });
4506
+ } catch { continue; }
4507
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
4508
+
4509
+ for await (const line of rl) {
4510
+ if (!line || !line.trim()) continue;
4511
+ let entry;
4512
+ try { entry = JSON.parse(line); } catch { continue; }
4513
+
4514
+ // First line of each file is type:"session" (header) — skip all
4515
+ // non-message records.
4516
+ if (!entry || entry.type !== "message") continue;
4517
+
4518
+ // Only assistant messages carry token usage.
4519
+ const msg = entry.message;
4520
+ if (!msg || msg.role !== "assistant") continue;
4521
+
4522
+ const usage = msg.usage;
4523
+ if (!usage || typeof usage !== "object") continue;
4524
+
4525
+ // Dedup by top-level entry id (8-char string assigned by oh-my-pi).
4526
+ const entryId = typeof entry.id === "string" && entry.id ? entry.id : null;
4527
+ if (!entryId) continue;
4528
+ if (seenIds.has(entryId)) continue;
4529
+
4530
+ recordsProcessed++;
4531
+
4532
+ const input = toNonNegativeInt(usage.input);
4533
+ const output = toNonNegativeInt(usage.output);
4534
+ const cacheRead = toNonNegativeInt(usage.cacheRead);
4535
+ const cacheWrite = toNonNegativeInt(usage.cacheWrite);
4536
+ const reasoningTokens = toNonNegativeInt(usage.reasoningTokens);
4537
+
4538
+ if (input === 0 && output === 0 && cacheRead === 0 && cacheWrite === 0) {
4539
+ seenIds.add(entryId);
4540
+ continue;
4541
+ }
4542
+
4543
+ // Prefer message-level timestamp (ms epoch); fall back to entry-level
4544
+ // ISO string. Entries with no resolvable timestamp are skipped — they
4545
+ // cannot be placed in a bucket.
4546
+ let tsMs = null;
4547
+ if (Number.isFinite(Number(msg.timestamp)) && Number(msg.timestamp) > 0) {
4548
+ tsMs = Number(msg.timestamp);
4549
+ } else if (typeof entry.timestamp === "string" && entry.timestamp) {
4550
+ const parsed = Date.parse(entry.timestamp);
4551
+ if (Number.isFinite(parsed) && parsed > 0) tsMs = parsed;
4552
+ }
4553
+ if (tsMs == null) {
4554
+ seenIds.add(entryId);
4555
+ continue;
4556
+ }
4557
+
4558
+ const tsIso = new Date(tsMs).toISOString();
4559
+ const bucketStart = toUtcHalfHourStart(tsIso);
4560
+ if (!bucketStart) continue;
4561
+
4562
+ // Use provided totalTokens when available; otherwise sum all components.
4563
+ const totalTokens =
4564
+ Number.isFinite(Number(usage.totalTokens)) && Number(usage.totalTokens) > 0
4565
+ ? toNonNegativeInt(usage.totalTokens)
4566
+ : input + output + cacheRead + cacheWrite + reasoningTokens;
4567
+
4568
+ const model = normalizeModelInput(msg.model) || fallbackModel;
4569
+
4570
+ const delta = {
4571
+ input_tokens: input,
4572
+ cached_input_tokens: cacheRead,
4573
+ cache_creation_input_tokens: cacheWrite,
4574
+ output_tokens: output,
4575
+ reasoning_output_tokens: reasoningTokens,
4576
+ total_tokens: totalTokens,
4577
+ conversation_count: 1,
4578
+ };
4579
+
4580
+ const bucket = getHourlyBucket(hourlyState, "omp", model, bucketStart);
4581
+ addTotals(bucket.totals, delta);
4582
+ touchedBuckets.add(bucketKey("omp", model, bucketStart));
4583
+ seenIds.add(entryId);
4584
+ eventsAggregated++;
4585
+
4586
+ if (cb) {
4587
+ cb({
4588
+ index: fileIdx + 1,
4589
+ total: files.length,
4590
+ recordsProcessed,
4591
+ eventsAggregated,
4592
+ bucketsQueued: touchedBuckets.size,
4593
+ });
4594
+ }
4595
+ }
4596
+
4597
+ let postStat = stat;
4598
+ try { postStat = fssync.statSync(filePath); } catch {}
4599
+ fileOffsets[filePath] = {
4600
+ size: postStat.size,
4601
+ mtimeMs: postStat.mtimeMs,
4602
+ ino: postStat.ino,
4603
+ };
4604
+ }
4605
+
4606
+ // Cap dedup set to last 10k IDs to bound cursor state size — same convention
4607
+ // as Kimi/CodeBuddy/Copilot so cursors.json doesn't grow unbounded.
4608
+ const seenArr = Array.from(seenIds);
4609
+ const cappedSeen =
4610
+ seenArr.length > 10_000 ? seenArr.slice(seenArr.length - 10_000) : seenArr;
4611
+
4612
+ const bucketsQueued = await enqueueTouchedBuckets({
4613
+ queuePath,
4614
+ hourlyState,
4615
+ touchedBuckets,
4616
+ });
4617
+ const updatedAt = new Date().toISOString();
4618
+ hourlyState.updatedAt = updatedAt;
4619
+ cursors.hourly = hourlyState;
4620
+ cursors.omp = {
4621
+ ...ompState,
4622
+ seenIds: cappedSeen,
4623
+ fileOffsets,
4624
+ updatedAt,
4625
+ };
4626
+
4627
+ return { recordsProcessed, eventsAggregated, bucketsQueued };
4628
+ }
4629
+
4630
+ // ─────────────────────────────────────────────────────────────────────────────
4631
+ // Craft Agents (lukilabs/craft-agents-oss) — passive JSONL reader
4632
+ //
4633
+ // Craft is a desktop Electron agent that wraps the Claude Agent SDK plus
4634
+ // multiple LLM backends (Anthropic, OpenAI, Google, GitHub Copilot, OpenRouter,
4635
+ // Groq, Mistral, DeepSeek, xAI, Bedrock, Vertex). It writes per-session JSONL
4636
+ // files with a pre-aggregated SessionTokenUsage block on the FIRST line:
4637
+ //
4638
+ // line 1: SessionHeader
4639
+ // {
4640
+ // "id": "260430-swift-river",
4641
+ // "model": "claude-sonnet-4-6",
4642
+ // "llmConnection": "anthropic-default",
4643
+ // "lastMessageAt": 1745003600000,
4644
+ // "tokenUsage": {
4645
+ // "inputTokens": 1234, ← pure non-cached input
4646
+ // "outputTokens": 567,
4647
+ // "totalTokens": 9876,
4648
+ // "cacheReadTokens": 5500,
4649
+ // "cacheCreationTokens": 1100
4650
+ // }
4651
+ // }
4652
+ // line 2..N: StoredMessage records (we do not need them for token totals)
4653
+ //
4654
+ // Disk layout:
4655
+ // ~/.craft-agent/ ← config dir (override: CRAFT_CONFIG_DIR)
4656
+ // config.json ← workspaces[].rootPath list
4657
+ // workspaces/<id>/sessions/<sid>/session.jsonl (default)
4658
+ // <user-chosen-rootPath>/sessions/<sid>/session.jsonl (custom workspaces)
4659
+ //
4660
+ // Workspaces can be relocated outside ~/.craft-agent, so we MUST read
4661
+ // config.json to enumerate every rootPath rather than just globbing the
4662
+ // default directory.
4663
+ //
4664
+ // Token semantics map directly onto TokenTracker conventions — `inputTokens`
4665
+ // is already pure non-cached input (no Codex-style trap, see
4666
+ // feedback_rollout_input_semantics.md). Re-parses are idempotent: the header
4667
+ // is rewritten as the session grows, and we dedup by sessionId combined with
4668
+ // the most-recent header byte length so a growing total replaces the old
4669
+ // snapshot instead of double-counting.
4670
+ // ─────────────────────────────────────────────────────────────────────────────
4671
+
4672
+ function resolveCraftConfigDir(env = process.env) {
4673
+ if (env.CRAFT_CONFIG_DIR) return env.CRAFT_CONFIG_DIR;
4674
+ const home = env.HOME || require("node:os").homedir();
4675
+ return path.join(home, ".craft-agent");
4676
+ }
4677
+
4678
+ function resolveCraftWorkspaceRoots(env = process.env) {
4679
+ const configDir = resolveCraftConfigDir(env);
4680
+ const roots = new Set();
4681
+ // Always include the default workspaces directory so a fresh install (no
4682
+ // config.json yet) still gets discovered.
4683
+ const defaultWorkspaces = path.join(configDir, "workspaces");
4684
+ if (fssync.existsSync(defaultWorkspaces)) {
4685
+ try {
4686
+ for (const entry of fssync.readdirSync(defaultWorkspaces)) {
4687
+ const wsPath = path.join(defaultWorkspaces, entry);
4688
+ let stat;
4689
+ try { stat = fssync.statSync(wsPath); } catch { continue; }
4690
+ if (stat.isDirectory()) roots.add(wsPath);
4691
+ }
4692
+ } catch {
4693
+ // ignore
4694
+ }
4695
+ }
4696
+ // Layer in user-relocated workspaces from config.json.
4697
+ const configPath = path.join(configDir, "config.json");
4698
+ if (fssync.existsSync(configPath)) {
4699
+ try {
4700
+ const raw = fssync.readFileSync(configPath, "utf8");
4701
+ const cfg = JSON.parse(raw);
4702
+ const list = Array.isArray(cfg?.workspaces) ? cfg.workspaces : [];
4703
+ for (const ws of list) {
4704
+ const root = ws && typeof ws.rootPath === "string" ? ws.rootPath : null;
4705
+ if (root && fssync.existsSync(root)) roots.add(root);
4706
+ }
4707
+ } catch {
4708
+ // malformed config.json — fall back to default discovery only
4709
+ }
4710
+ }
4711
+ return Array.from(roots).sort((a, b) => a.localeCompare(b));
4712
+ }
4713
+
4714
+ function resolveCraftSessionFiles(env = process.env) {
4715
+ const roots = resolveCraftWorkspaceRoots(env);
4716
+ if (roots.length === 0) return [];
4717
+ const files = [];
4718
+ for (const root of roots) {
4719
+ const sessionsDir = path.join(root, "sessions");
4720
+ if (!fssync.existsSync(sessionsDir)) continue;
4721
+ let entries;
4722
+ try { entries = fssync.readdirSync(sessionsDir); } catch { continue; }
4723
+ for (const sessionId of entries) {
4724
+ const sessionDir = path.join(sessionsDir, sessionId);
4725
+ let stat;
4726
+ try { stat = fssync.statSync(sessionDir); } catch { continue; }
4727
+ if (!stat.isDirectory()) continue;
4728
+ const filePath = path.join(sessionDir, "session.jsonl");
4729
+ if (fssync.existsSync(filePath)) files.push(filePath);
4730
+ }
4731
+ }
4732
+ files.sort((a, b) => a.localeCompare(b));
4733
+ return files;
4734
+ }
4735
+
4736
+ function resolveCraftDefaultModel() {
4737
+ // Craft is a router. Per-session header carries the actual model.
4738
+ return "craft-unknown";
4739
+ }
4740
+
4741
+ async function parseCraftIncremental({
4742
+ sessionFiles,
4743
+ cursors,
4744
+ queuePath,
4745
+ onProgress,
4746
+ env,
4747
+ defaultModel,
4748
+ } = {}) {
4749
+ await ensureDir(path.dirname(queuePath));
4750
+ const craftState = cursors.craft && typeof cursors.craft === "object" ? cursors.craft : {};
4751
+ // Per-session previous totals so each re-parse only contributes the delta
4752
+ // of the running token totals (the header rewrites in place as the session
4753
+ // grows). Shape: { [sessionId]: { input, output, cacheRead, cacheWrite, total } }
4754
+ const sessionTotals =
4755
+ craftState.sessionTotals && typeof craftState.sessionTotals === "object"
4756
+ ? { ...craftState.sessionTotals }
4757
+ : {};
4758
+
4759
+ const files = Array.isArray(sessionFiles)
4760
+ ? sessionFiles
4761
+ : resolveCraftSessionFiles(env || process.env);
4762
+ const fallbackModel = defaultModel || resolveCraftDefaultModel();
4763
+
4764
+ if (files.length === 0) {
4765
+ cursors.craft = {
4766
+ ...craftState,
4767
+ sessionTotals,
4768
+ updatedAt: new Date().toISOString(),
4769
+ };
4770
+ return { recordsProcessed: 0, eventsAggregated: 0, bucketsQueued: 0 };
4771
+ }
4772
+
4773
+ const hourlyState = normalizeHourlyState(cursors?.hourly);
4774
+ const touchedBuckets = new Set();
4775
+ const cb = typeof onProgress === "function" ? onProgress : null;
4776
+ let recordsProcessed = 0;
4777
+ let eventsAggregated = 0;
4778
+
4779
+ for (let fileIdx = 0; fileIdx < files.length; fileIdx++) {
4780
+ const filePath = files[fileIdx];
4781
+ let stat;
4782
+ try { stat = fssync.statSync(filePath); } catch { continue; }
4783
+
4784
+ // Read only the FIRST line — the SessionHeader carries the running totals.
4785
+ // Streaming the whole file would be wasted work since we don't use
4786
+ // per-message records for token accounting. We cap at 1 MiB to bound
4787
+ // memory if the first line is unexpectedly huge; real headers observed
4788
+ // in v0.9.0 are ~1–2 KiB so this is generous.
4789
+ let header = null;
4790
+ let parseError = null;
4791
+ let stream;
4792
+ try {
4793
+ stream = fssync.createReadStream(filePath, {
4794
+ encoding: "utf8",
4795
+ end: 1024 * 1024 - 1,
4796
+ });
4797
+ } catch { continue; }
4798
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
4799
+ for await (const line of rl) {
4800
+ if (!line || !line.trim()) continue;
4801
+ try {
4802
+ header = JSON.parse(line);
4803
+ } catch (e) {
4804
+ parseError = e;
4805
+ header = null;
4806
+ }
4807
+ break;
4808
+ }
4809
+ rl.close();
4810
+ try { stream.destroy(); } catch {}
4811
+
4812
+ if (!header || typeof header !== "object") {
4813
+ if (parseError && process.env.TOKENTRACKER_DEBUG) {
4814
+ process.stderr.write(
4815
+ `[craft] header parse failed for ${filePath}: ${parseError.message}\n`,
4816
+ );
4817
+ }
4818
+ continue;
4819
+ }
4820
+ const usage = header.tokenUsage;
4821
+ if (!usage || typeof usage !== "object") continue;
4822
+
4823
+ const sessionId =
4824
+ typeof header.id === "string" && header.id
4825
+ ? header.id
4826
+ : (typeof header.sdkSessionId === "string" && header.sdkSessionId
4827
+ ? header.sdkSessionId
4828
+ : null);
4829
+ if (!sessionId) continue;
4830
+
4831
+ recordsProcessed++;
4832
+
4833
+ const totalInput = toNonNegativeInt(usage.inputTokens);
4834
+ const totalOutput = toNonNegativeInt(usage.outputTokens);
4835
+ const totalCacheRead = toNonNegativeInt(usage.cacheReadTokens);
4836
+ const totalCacheWrite = toNonNegativeInt(usage.cacheCreationTokens);
4837
+ const totalReported =
4838
+ Number.isFinite(Number(usage.totalTokens)) && Number(usage.totalTokens) > 0
4839
+ ? toNonNegativeInt(usage.totalTokens)
4840
+ : totalInput + totalOutput + totalCacheRead + totalCacheWrite;
4841
+
4842
+ const prev = sessionTotals[sessionId] || {
4843
+ input: 0,
4844
+ output: 0,
4845
+ cacheRead: 0,
4846
+ cacheWrite: 0,
4847
+ total: 0,
4848
+ };
4849
+
4850
+ // Compute the delta since the last sync. Negative deltas mean the session
4851
+ // was reset/truncated — clamp to 0 and replace the snapshot.
4852
+ const dInput = Math.max(0, totalInput - prev.input);
4853
+ const dOutput = Math.max(0, totalOutput - prev.output);
4854
+ const dCacheRead = Math.max(0, totalCacheRead - prev.cacheRead);
4855
+ const dCacheWrite = Math.max(0, totalCacheWrite - prev.cacheWrite);
4856
+ const dTotal = Math.max(0, totalReported - prev.total);
4857
+
4858
+ const nowMs = Date.now();
4859
+
4860
+ if (dInput === 0 && dOutput === 0 && dCacheRead === 0 && dCacheWrite === 0) {
4861
+ // No new usage since last parse — but still update the snapshot in case
4862
+ // an earlier truncate left it stale, and refresh lastSeenAt so the
4863
+ // eviction policy treats the session as live.
4864
+ sessionTotals[sessionId] = {
4865
+ input: totalInput,
4866
+ output: totalOutput,
4867
+ cacheRead: totalCacheRead,
4868
+ cacheWrite: totalCacheWrite,
4869
+ total: totalReported,
4870
+ lastSeenAt: nowMs,
4871
+ };
4872
+ continue;
4873
+ }
4874
+
4875
+ // Bucket on lastMessageAt (preferred) or createdAt — both ms epoch.
4876
+ let tsMs = null;
4877
+ const tsCandidates = [header.lastMessageAt, header.lastUsedAt, header.createdAt];
4878
+ for (const cand of tsCandidates) {
4879
+ if (Number.isFinite(Number(cand)) && Number(cand) > 0) {
4880
+ tsMs = Number(cand);
4881
+ break;
4882
+ }
4883
+ }
4884
+ if (tsMs == null) tsMs = stat.mtimeMs;
4885
+ if (!Number.isFinite(tsMs) || tsMs <= 0) continue;
4886
+
4887
+ const tsIso = new Date(tsMs).toISOString();
4888
+ const bucketStart = toUtcHalfHourStart(tsIso);
4889
+ if (!bucketStart) continue;
4890
+
4891
+ const model = normalizeModelInput(header.model) || fallbackModel;
4892
+
4893
+ // conversation_count: 1 the first time we see a session, 0 on subsequent
4894
+ // syncs of the same session. NOTE: this differs from omp/Claude which
4895
+ // count one-per-assistant-message. Cross-provider "conversations" totals
4896
+ // are therefore not directly comparable — Craft's are per-session.
4897
+ const delta = {
4898
+ input_tokens: dInput,
4899
+ cached_input_tokens: dCacheRead,
4900
+ cache_creation_input_tokens: dCacheWrite,
4901
+ output_tokens: dOutput,
4902
+ reasoning_output_tokens: 0,
4903
+ total_tokens: dTotal > 0 ? dTotal : dInput + dOutput + dCacheRead + dCacheWrite,
4904
+ conversation_count: prev.total === 0 ? 1 : 0,
4905
+ };
4906
+
4907
+ const bucket = getHourlyBucket(hourlyState, "craft", model, bucketStart);
4908
+ addTotals(bucket.totals, delta);
4909
+ touchedBuckets.add(bucketKey("craft", model, bucketStart));
4910
+ eventsAggregated++;
4911
+
4912
+ sessionTotals[sessionId] = {
4913
+ input: totalInput,
4914
+ output: totalOutput,
4915
+ cacheRead: totalCacheRead,
4916
+ cacheWrite: totalCacheWrite,
4917
+ total: totalReported,
4918
+ lastSeenAt: nowMs,
4919
+ };
4920
+
4921
+ if (cb) {
4922
+ cb({
4923
+ index: fileIdx + 1,
4924
+ total: files.length,
4925
+ recordsProcessed,
4926
+ eventsAggregated,
4927
+ bucketsQueued: touchedBuckets.size,
4928
+ });
4929
+ }
4930
+ }
4931
+
4932
+ // Cap session-totals map at 5k entries to bound cursor state size. Evict by
4933
+ // lastSeenAt (least-recently-seen first) so that long-lived sessions stay
4934
+ // tracked even when many newer one-shot sessions cycle through. Insertion
4935
+ // order would silently re-zero a long-running session and double-count its
4936
+ // total on the next sync.
4937
+ const entries = Object.entries(sessionTotals);
4938
+ let capped = sessionTotals;
4939
+ if (entries.length > 5000) {
4940
+ entries.sort((a, b) => (a[1]?.lastSeenAt || 0) - (b[1]?.lastSeenAt || 0));
4941
+ capped = Object.fromEntries(entries.slice(entries.length - 5000));
4942
+ }
4943
+
4944
+ const bucketsQueued = await enqueueTouchedBuckets({
4945
+ queuePath,
4946
+ hourlyState,
4947
+ touchedBuckets,
4948
+ });
4949
+ const updatedAt = new Date().toISOString();
4950
+ hourlyState.updatedAt = updatedAt;
4951
+ cursors.hourly = hourlyState;
4952
+ cursors.craft = {
4953
+ ...craftState,
4954
+ sessionTotals: capped,
4955
+ updatedAt,
4956
+ };
4957
+
4958
+ return { recordsProcessed, eventsAggregated, bucketsQueued };
4959
+ }
4960
+
4376
4961
  // ─────────────────────────────────────────────────────────────────────────────
4377
4962
  // GitHub Copilot CLI — OpenTelemetry JSONL exporter
4378
4963
  // User must opt in by setting:
@@ -4593,6 +5178,16 @@ module.exports = {
4593
5178
  resolveKiroCliSessionFiles,
4594
5179
  resolveKiroCliDbPath,
4595
5180
  parseKiroCliIncremental,
5181
+ resolveOmpHome,
5182
+ resolveOmpAgentDir,
5183
+ resolveOmpSessionFiles,
5184
+ resolveOmpDefaultModel,
5185
+ parseOmpIncremental,
5186
+ resolveCraftConfigDir,
5187
+ resolveCraftWorkspaceRoots,
5188
+ resolveCraftSessionFiles,
5189
+ resolveCraftDefaultModel,
5190
+ parseCraftIncremental,
4596
5191
  // Exposed for regression tests covering cache-token accounting.
4597
5192
  normalizeGeminiTokens,
4598
5193
  normalizeOpencodeTokens,