ai-lens 0.8.60 → 0.8.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.commithash CHANGED
@@ -1 +1 @@
1
- ac28725
1
+ ee4f839
package/client/capture.js CHANGED
@@ -494,14 +494,68 @@ function extractFilePath(toolInput) {
494
494
  }
495
495
 
496
496
  /**
497
- * Walk up from a path to find the nearest .git directory.
498
- * Returns the git root (parent of .git) or null.
497
+ * Given a directory that contains a `.git` entry, return the real repo root.
498
+ * If `.git` is a directory, the input is already the repo root. If `.git` is
499
+ * a FILE (git worktree — contains "gitdir: <path-to-main>/.git/worktrees/<n>")
500
+ * we follow the pointer up to the main `.git` directory and return its parent
501
+ * so worktree sessions attribute to the main repo rather than a per-branch
502
+ * pseudo-project (e.g. `agent-a8d9bb19`, `ANL-689`).
503
+ *
504
+ * Submodules also use a `.git` file, but its gitdir points under
505
+ * `<super>/.git/modules/<name>`. We intentionally skip those so edits inside
506
+ * a submodule keep attributing to the submodule, not the super-project —
507
+ * that's outside the scope of ANL-729.
508
+ *
509
+ * Never returns a path "outside" the filesystem; on any parse failure falls
510
+ * back to the original dir.
511
+ */
512
+ function resolveWorktreeToMainRepo(dir) {
513
+ try {
514
+ const gitEntry = join(dir, '.git');
515
+ const st = statSync(gitEntry);
516
+ if (st.isDirectory()) return dir;
517
+ if (!st.isFile()) return dir;
518
+
519
+ const contents = readFileSync(gitEntry, 'utf-8');
520
+ const match = contents.match(/^\s*gitdir:\s*(.+?)\s*$/m);
521
+ if (!match) return dir;
522
+
523
+ let gitdir = match[1];
524
+ // Resolve relative pointers against the worktree dir. POSIX-only check
525
+ // (`startsWith('/')`) — AI Lens runs on macOS/Linux dev machines, Windows
526
+ // isn't a supported client target.
527
+ if (!gitdir.startsWith('/')) gitdir = join(dir, gitdir);
528
+
529
+ // Only real worktrees: `<main>/.git/worktrees/<name>`. Submodules
530
+ // (`<super>/.git/modules/<name>`) are left alone on purpose.
531
+ if (!gitdir.includes('/.git/worktrees/')) return dir;
532
+
533
+ // Walk up until we hit the directory literally named `.git`; its parent
534
+ // is the main repo. Depth is bounded: gitdir is `<main>/.git/worktrees/<n>`
535
+ // so the `.git` segment is at most 2 levels up — 4 gives a safety margin.
536
+ let cur = gitdir;
537
+ for (let i = 0; i < 4; i++) {
538
+ const parent = dirname(cur);
539
+ if (parent === cur) break;
540
+ if (cur.endsWith('/.git')) return parent;
541
+ cur = parent;
542
+ }
543
+ return dir;
544
+ } catch {
545
+ return dir;
546
+ }
547
+ }
548
+
549
+ /**
550
+ * Walk up from a path to find the nearest .git entry.
551
+ * Returns the repo root (parent of a `.git` directory, or the main repo root
552
+ * when `.git` is a worktree-pointer file) or null.
499
553
  */
500
554
  function findGitRoot(filePath) {
501
555
  let dir = dirname(filePath);
502
556
  while (dir && dir !== '/' && dir.length > 1) {
503
557
  try {
504
- if (existsSync(join(dir, '.git'))) return dir;
558
+ if (existsSync(join(dir, '.git'))) return resolveWorktreeToMainRepo(dir);
505
559
  } catch {}
506
560
  const parent = dirname(dir);
507
561
  if (parent === dir) break;
@@ -510,6 +564,22 @@ function findGitRoot(filePath) {
510
564
  return null;
511
565
  }
512
566
 
567
+ /**
568
+ * If `dir` is a git worktree checkout, return the main repo root; otherwise
569
+ * return `dir` unchanged. Used at session intake where we have a launcher cwd
570
+ * (SessionStart / workspace_roots) and want to avoid attributing the session
571
+ * to a worktree branch name.
572
+ */
573
+ function canonicalizeProjectPath(dir) {
574
+ if (!dir || typeof dir !== 'string') return dir;
575
+ try {
576
+ if (existsSync(join(dir, '.git'))) {
577
+ return resolveWorktreeToMainRepo(dir);
578
+ }
579
+ } catch {}
580
+ return dir;
581
+ }
582
+
513
583
  /**
514
584
  * Refine project_path using file paths from tool events.
515
585
  * Picks the deepest (most specific) git root — correct for nested repos
@@ -617,8 +687,9 @@ function normalizeClaudeCode(event) {
617
687
  let type = CLAUDE_CODE_TYPE_MAP[hookType] || hookType;
618
688
  const timestamp = new Date().toISOString();
619
689
 
620
- // Extract project path from cwd (SessionStart) or cache
621
- let projectPath = event.cwd || null;
690
+ // Extract project path from cwd (SessionStart) or cache. Canonicalize
691
+ // worktree checkouts to the main repo root (ANL-599 follow-up).
692
+ let projectPath = event.cwd ? canonicalizeProjectPath(event.cwd) : null;
622
693
  if (projectPath && sessionId) {
623
694
  cacheSessionPath(sessionId, projectPath);
624
695
  } else if (sessionId) {
@@ -826,6 +897,8 @@ function normalizeCursor(event) {
826
897
  const type = CURSOR_TYPE_MAP[hookName] || hookName;
827
898
  const timestamp = new Date().toISOString();
828
899
  let projectPath = pickWorkspaceRoot(event.workspace_roots);
900
+ // Canonicalize worktree checkouts to main repo root (ANL-599 follow-up).
901
+ if (projectPath) projectPath = canonicalizeProjectPath(projectPath);
829
902
  // Guard: null sessionId would cache/lookup under key "null", contaminating unrelated sessions
830
903
  if (projectPath && sessionId) {
831
904
  cacheSessionPath(sessionId, projectPath);
@@ -7,7 +7,11 @@ import {
7
7
  unlinkSync,
8
8
  readdirSync,
9
9
  realpathSync,
10
+ renameSync,
10
11
  statSync,
12
+ openSync,
13
+ readSync,
14
+ closeSync,
11
15
  } from 'node:fs';
12
16
  import { join } from 'node:path';
13
17
  import { homedir } from 'node:os';
@@ -20,6 +24,7 @@ import {
20
24
  getMonitoredProjects,
21
25
  isCodexEnabled,
22
26
  captureLog,
27
+ _clearConfigCache,
23
28
  } from './config.js';
24
29
  import { deterministicEventId, resolveIdentity, checkDuplicate, commitDedup, writeToSpool, trySpawnSender } from './capture.js';
25
30
  import { createCodexTrackerState, isProjectMonitored, normalizeCodexHistoryEntry, normalizeCodexSessionEntries } from './codex.js';
@@ -28,9 +33,26 @@ const EXPLICIT_USER_CODEX_DIRS = [
28
33
  process.env.AI_LENS_CODEX_DIR || null,
29
34
  process.env.CODEX_HOME || null,
30
35
  ];
31
- const LOCK_PATH = join(process.env.AI_LENS_DATA_DIR || join(homedir(), '.ai-lens'), 'codex-watcher.lock');
32
- const POLL_MS = 2000;
33
- const DISCOVERY_MS = 30_000;
36
+ const DATA_DIR = process.env.AI_LENS_DATA_DIR || join(homedir(), '.ai-lens');
37
+ const LOCK_PATH = join(DATA_DIR, 'codex-watcher.lock');
38
+ const STATE_PATH = join(DATA_DIR, 'codex-watcher-state.json');
39
+ const STATE_VERSION = 1;
40
+ // A session whose file rotated before synthetic SessionEnd fired stays in the
41
+ // persisted snapshot so a watcher crash inside that window doesn't drop the
42
+ // close event. Cap the age so the file doesn't accumulate forever.
43
+ const PERSIST_MISSING_MAX_AGE_MS = 24 * 60 * 60 * 1000;
44
+ const parseMs = (raw, fallback) => {
45
+ const n = Number.parseInt(raw || '', 10);
46
+ return Number.isFinite(n) && n > 0 ? n : fallback;
47
+ };
48
+ const POLL_MS = parseMs(process.env.AI_LENS_CODEX_POLL_MS, 2000);
49
+ const STATE_SAVE_MS = parseMs(process.env.AI_LENS_CODEX_STATE_SAVE_MS, 10_000);
50
+ // Recursive walkForCodexDirs is expensive on large monorepos; project layout
51
+ // rarely changes, so re-scan for new .codex dirs only every few minutes.
52
+ // Session files inside already-discovered dirs stay snappy via SESSION_LIST_TTL_MS.
53
+ const DISCOVERY_MS = 5 * 60_000;
54
+ const CONFIG_CHECK_MS = parseMs(process.env.AI_LENS_CODEX_CONFIG_CHECK_MS, 30_000);
55
+ const SESSION_LIST_TTL_MS = 30_000;
34
56
  const STOP_IDLE_TIMEOUT_MS = 15_000;
35
57
  const EOF_IDLE_TIMEOUT_MS = 300_000;
36
58
  const REPLAY_DAYS = Math.max(1, Number.parseInt(process.env.AI_LENS_CODEX_REPLAY_DAYS || '30', 10) || 30);
@@ -160,19 +182,70 @@ function listJsonlFiles(dir) {
160
182
  return files.sort();
161
183
  }
162
184
 
163
- function getLinesSince(filePath, linesSeen) {
164
- if (!existsSync(filePath)) return { lines: [], total: 0 };
165
- const raw = readFileSync(filePath, 'utf-8');
166
- const allLines = raw.split(/\r?\n/).filter(Boolean);
167
- return {
168
- lines: allLines.slice(linesSeen),
169
- total: allLines.length,
170
- };
185
+ function fileFingerprint(stat) {
186
+ // dev:ino usually identifies a file uniquely on Unix. Some filesystems
187
+ // (tmpfs-like, WSL) reuse an inode after a fast unlink + create at the same
188
+ // path, so include birthtimeMs as a tiebreaker: the creation time always
189
+ // advances for a freshly created file.
190
+ return `${stat.dev}:${stat.ino}:${stat.birthtimeMs || 0}`;
171
191
  }
172
192
 
173
- function countNonEmptyLines(filePath) {
174
- if (!existsSync(filePath)) return 0;
175
- return readFileSync(filePath, 'utf-8').split(/\r?\n/).filter(Boolean).length;
193
+ // Incremental JSONL reader: skips work when stat is unchanged, otherwise reads
194
+ // only the bytes appended since the last tracked offset. Holds back a trailing
195
+ // partial line until its terminator arrives so JSON.parse never sees a half row.
196
+ export function readNewLines(filePath, state) {
197
+ let stat;
198
+ try { stat = statSync(filePath); } catch { return { lines: [], state }; }
199
+
200
+ const size = stat.size;
201
+ const mtimeMs = stat.mtimeMs;
202
+ const fingerprint = fileFingerprint(stat);
203
+ const sameFile = state.fingerprint === fingerprint;
204
+
205
+ if (sameFile && state.statSize === size && state.statMtimeMs === mtimeMs) {
206
+ return { lines: [], state };
207
+ }
208
+
209
+ let startOffset = sameFile && Number.isFinite(state.bytesSeen) ? state.bytesSeen : 0;
210
+ // File truncated or rotated underneath us — start over.
211
+ if (size < startOffset) startOffset = 0;
212
+
213
+ const baseState = { ...state, statSize: size, statMtimeMs: mtimeMs, fingerprint };
214
+
215
+ if (size === startOffset) {
216
+ return { lines: [], state: { ...baseState, bytesSeen: size } };
217
+ }
218
+
219
+ const length = size - startOffset;
220
+ const buf = Buffer.alloc(length);
221
+ let fd;
222
+ try {
223
+ fd = openSync(filePath, 'r');
224
+ let total = 0;
225
+ while (total < length) {
226
+ const r = readSync(fd, buf, total, length - total, startOffset + total);
227
+ if (!r) break;
228
+ total += r;
229
+ }
230
+ } catch {
231
+ if (fd !== undefined) { try { closeSync(fd); } catch {} }
232
+ return { lines: [], state: baseState };
233
+ }
234
+ try { closeSync(fd); } catch {}
235
+
236
+ const lastNewlineByte = buf.lastIndexOf(0x0A);
237
+ if (lastNewlineByte === -1) {
238
+ // No complete line yet; wait for the terminator in a later poll.
239
+ return { lines: [], state: baseState };
240
+ }
241
+ const completeBytes = lastNewlineByte + 1;
242
+ const text = buf.slice(0, completeBytes).toString('utf-8');
243
+ const lines = text.split(/\r?\n/).filter(Boolean);
244
+
245
+ return {
246
+ lines,
247
+ state: { ...baseState, bytesSeen: startOffset + completeBytes },
248
+ };
176
249
  }
177
250
 
178
251
  function normalizeExistingDir(dirPath) {
@@ -258,13 +331,111 @@ export function resolveWatchedCodexDirs(monitoredRoots, userCodexDirs = EXPLICIT
258
331
  }
259
332
 
260
333
  export function buildInitialTrackedFileState(filePath, { firstScanCompleted = false, replayExisting = false } = {}) {
334
+ let bytesSeen = 0;
335
+ let statSize = 0;
336
+ let statMtimeMs = 0;
337
+ let fingerprint = null;
338
+ // On cold start, skip existing backlog via stat() instead of reading the file.
339
+ if (!firstScanCompleted && !replayExisting) {
340
+ try {
341
+ const stat = statSync(filePath);
342
+ bytesSeen = stat.size;
343
+ statSize = stat.size;
344
+ statMtimeMs = stat.mtimeMs;
345
+ fingerprint = fileFingerprint(stat);
346
+ } catch { /* file may not exist yet */ }
347
+ }
261
348
  return {
262
- linesSeen: (!firstScanCompleted && !replayExisting) ? countNonEmptyLines(filePath) : 0,
349
+ bytesSeen,
350
+ statSize,
351
+ statMtimeMs,
352
+ fingerprint,
263
353
  sessionEndEmitted: false,
264
354
  lastStopReason: null,
265
355
  };
266
356
  }
267
357
 
358
+ export function loadPersistedWatcherState(path = STATE_PATH) {
359
+ let raw;
360
+ try {
361
+ raw = readFileSync(path, 'utf-8');
362
+ } catch (err) {
363
+ // ENOENT is expected on first run; other I/O errors (permission, bad FS)
364
+ // are worth flagging because silently cold-starting can lose events that
365
+ // accumulated while the watcher was down.
366
+ if (err.code !== 'ENOENT') {
367
+ try { captureLog({ msg: 'codex-watcher-state-read-failed', path, error: err.message }); } catch {}
368
+ }
369
+ return null;
370
+ }
371
+ let data;
372
+ try {
373
+ data = JSON.parse(raw);
374
+ } catch (err) {
375
+ try { captureLog({ msg: 'codex-watcher-state-parse-failed', path, error: err.message }); } catch {}
376
+ return null;
377
+ }
378
+ if (!data || data.version !== STATE_VERSION) {
379
+ try { captureLog({ msg: 'codex-watcher-state-unsupported-version', path, version: data?.version }); } catch {}
380
+ return null;
381
+ }
382
+ const historyFiles = data.historyFiles && typeof data.historyFiles === 'object' ? data.historyFiles : {};
383
+ const sessionFiles = data.sessionFiles && typeof data.sessionFiles === 'object' ? data.sessionFiles : {};
384
+ return { historyFiles, sessionFiles };
385
+ }
386
+
387
+ export function persistWatcherState(runtimeState, path = STATE_PATH, nowMs = Date.now()) {
388
+ // Build a serializable snapshot that excludes entries for files that no
389
+ // longer exist. We must NOT mutate runtimeState.sessionFiles here: the loop
390
+ // still needs in-memory entries for rotated/deleted files so that
391
+ // processSyntheticSessionEnds can emit a SessionEnd/SubagentStop once the
392
+ // idle threshold elapses. Pruning a copy keeps the state file bounded
393
+ // without breaking synthetic end emission.
394
+ //
395
+ // For session files specifically, we also keep entries whose file is gone
396
+ // IF the synthetic close hasn't fired yet and the session is still recent:
397
+ // that way a watcher crash after file rotation but before EOF_IDLE_TIMEOUT_MS
398
+ // elapses doesn't drop the synthetic close across the restart.
399
+ const ageCutoff = nowMs - PERSIST_MISSING_MAX_AGE_MS;
400
+ const snapshotHistory = (map) => {
401
+ const out = {};
402
+ for (const [key, value] of map) {
403
+ if (existsSync(key)) out[key] = value;
404
+ }
405
+ return out;
406
+ };
407
+ const snapshotSessions = (map) => {
408
+ const out = {};
409
+ for (const [key, value] of map) {
410
+ if (existsSync(key)) {
411
+ out[key] = value;
412
+ continue;
413
+ }
414
+ const pendingClose = value
415
+ && value.sessionEndEmitted === false
416
+ && Number.isFinite(value.lastActivityMs)
417
+ && value.lastActivityMs >= ageCutoff;
418
+ if (pendingClose) out[key] = value;
419
+ }
420
+ return out;
421
+ };
422
+ const data = {
423
+ version: STATE_VERSION,
424
+ historyFiles: snapshotHistory(runtimeState.historyFiles),
425
+ sessionFiles: snapshotSessions(runtimeState.sessionFiles),
426
+ };
427
+ const tmp = `${path}.tmp.${process.pid}`;
428
+ try {
429
+ writeFileSync(tmp, JSON.stringify(data));
430
+ renameSync(tmp, path);
431
+ return true;
432
+ } catch (err) {
433
+ try { captureLog({ msg: 'codex-watcher-state-save-failed', error: err.message }); } catch {}
434
+ try { unlinkSync(tmp); } catch {}
435
+ return false;
436
+ }
437
+ }
438
+
268
439
  function ingestUnifiedEvent(unified, rawLine, eventSalt = '') {
269
440
  if (!unified || !unified.session_id) return;
270
441
 
@@ -369,17 +540,23 @@ export function buildSyntheticSubagentStop(fileState, sessionEndEvent) {
369
540
  };
370
541
  }
371
542
 
372
- function createRuntimeState() {
543
+ export function createRuntimeState(seed = null) {
373
544
  const nowMs = Date.now();
545
+ const historyFiles = new Map(seed?.historyFiles ? Object.entries(seed.historyFiles) : []);
546
+ const sessionFiles = new Map(seed?.sessionFiles ? Object.entries(seed.sessionFiles) : []);
374
547
  return {
375
548
  tracker: createCodexTrackerState(),
376
549
  codexDirs: [],
377
550
  lastDiscoveryAt: 0,
378
- firstScanCompleted: false,
551
+ // If we resumed from disk, any file we discover that is NOT in the seed is
552
+ // genuinely new since shutdown, so its content should be processed from
553
+ // byte 0 rather than skipped as backlog.
554
+ firstScanCompleted: Boolean(seed),
379
555
  replayExisting: process.env.AI_LENS_CODEX_REPLAY_EXISTING === '1',
380
556
  replayCutoffMs: nowMs - REPLAY_WINDOW_MS,
381
- historyFiles: new Map(),
382
- sessionFiles: new Map(),
557
+ historyFiles,
558
+ sessionFiles,
559
+ sessionFileLists: new Map(),
383
560
  pendingHistory: new Map(),
384
561
  };
385
562
  }
@@ -428,8 +605,8 @@ function processHistory(runtimeState) {
428
605
  const historyPath = join(codexDir, 'history.jsonl');
429
606
  const current = runtimeState.historyFiles.get(historyPath)
430
607
  || buildInitialTrackedFileState(historyPath, runtimeState);
431
- const { lines, total } = getLinesSince(historyPath, current.linesSeen);
432
- runtimeState.historyFiles.set(historyPath, { linesSeen: total });
608
+ const { lines, state: nextState } = readNewLines(historyPath, current);
609
+ runtimeState.historyFiles.set(historyPath, nextState);
433
610
  for (const line of lines) {
434
611
  try {
435
612
  const entry = JSON.parse(line);
@@ -448,20 +625,28 @@ function processHistory(runtimeState) {
448
625
  }
449
626
  }
450
627
 
628
+ function listSessionFilesCached(runtimeState, sessionsDir) {
629
+ const now = Date.now();
630
+ const cache = runtimeState.sessionFileLists.get(sessionsDir);
631
+ if (cache && now - cache.ts < SESSION_LIST_TTL_MS) return cache.files;
632
+ const files = listJsonlFiles(sessionsDir);
633
+ runtimeState.sessionFileLists.set(sessionsDir, { ts: now, files });
634
+ return files;
635
+ }
636
+
451
637
  function processSessionFiles(runtimeState) {
452
638
  for (const codexDir of refreshCodexDirs(runtimeState)) {
453
639
  const sessionsDir = join(codexDir, 'sessions');
454
- for (const filePath of listJsonlFiles(sessionsDir)) {
640
+ for (const filePath of listSessionFilesCached(runtimeState, sessionsDir)) {
455
641
  const current = runtimeState.sessionFiles.get(filePath)
456
642
  || buildInitialTrackedFileState(filePath, runtimeState);
457
- const { lines, total } = getLinesSince(filePath, current.linesSeen);
458
- current.linesSeen = total;
643
+ const { lines, state: nextState } = readNewLines(filePath, current);
459
644
  for (const line of lines) {
460
645
  try {
461
646
  const entry = JSON.parse(line);
462
647
  const events = normalizeCodexSessionEntries(entry, runtimeState.tracker, filePath);
463
648
  events.forEach((unified, index) => {
464
- trackSessionFileEvent(current, unified, Date.now());
649
+ trackSessionFileEvent(nextState, unified, Date.now());
465
650
  if (isWithinReplayWindow(runtimeState, unified)) {
466
651
  ingestUnifiedEvent(unified, line, `${index}:${unified.type}`);
467
652
  }
@@ -470,7 +655,7 @@ function processSessionFiles(runtimeState) {
470
655
  captureLog({ msg: 'codex-session-parse-failed', filePath, error: err.message });
471
656
  }
472
657
  }
473
- runtimeState.sessionFiles.set(filePath, current);
658
+ runtimeState.sessionFiles.set(filePath, nextState);
474
659
  }
475
660
  }
476
661
  }
@@ -503,14 +688,22 @@ async function main() {
503
688
  ensureDataDir();
504
689
  if (!acquireLock()) process.exit(0);
505
690
 
506
- const runtimeState = createRuntimeState();
691
+ const runtimeState = createRuntimeState(loadPersistedWatcherState());
692
+ const savePersistedState = () => { try { persistWatcherState(runtimeState); } catch {} };
507
693
  const shutdown = () => {
694
+ savePersistedState();
508
695
  releaseLock();
509
696
  process.exit(0);
510
697
  };
511
698
  process.on('SIGINT', shutdown);
512
699
  process.on('SIGTERM', shutdown);
513
- process.on('exit', releaseLock);
700
+ process.on('exit', () => {
701
+ savePersistedState();
702
+ releaseLock();
703
+ });
704
+
705
+ let lastConfigCheckAt = Date.now();
706
+ let lastStateSaveAt = Date.now();
514
707
 
515
708
  while (true) {
516
709
  try {
@@ -523,6 +716,23 @@ async function main() {
523
716
  } catch (err) {
524
717
  captureLog({ msg: 'codex-watcher-loop-failed', error: err.message });
525
718
  }
719
+
720
+ const now = Date.now();
721
+ if (now - lastConfigCheckAt >= CONFIG_CHECK_MS) {
722
+ // Config is cached in-process; drop it so codexEnabled: false is observed.
723
+ _clearConfigCache();
724
+ if (!isCodexEnabled()) {
725
+ captureLog({ msg: 'codex-watcher-disabled', reason: 'config-flag-cleared' });
726
+ shutdown();
727
+ return;
728
+ }
729
+ lastConfigCheckAt = now;
730
+ }
731
+ if (now - lastStateSaveAt >= STATE_SAVE_MS) {
732
+ savePersistedState();
733
+ lastStateSaveAt = now;
734
+ }
735
+
526
736
  await new Promise(resolve => setTimeout(resolve, POLL_MS));
527
737
  }
528
738
  }
package/client/codex.js CHANGED
@@ -1,4 +1,4 @@
1
- import { existsSync, realpathSync } from 'node:fs';
1
+ import { existsSync, realpathSync, statSync, readFileSync } from 'node:fs';
2
2
  import { randomUUID } from 'node:crypto';
3
3
  import { dirname, join } from 'node:path';
4
4
  import { toNumberOrNull } from './token-usage.js';
@@ -139,13 +139,59 @@ function extractFilePath(input) {
139
139
  return null;
140
140
  }
141
141
 
142
+ // Mirrors `resolveWorktreeToMainRepo` in capture.js (ANL-729). If this
143
+ // changes in one place, change it in the other — we keep the copy inline
144
+ // instead of a shared module to avoid cross-file churn in this pipeline.
145
+ function resolveWorktreeToMainRepo(dir) {
146
+ try {
147
+ const gitEntry = join(dir, '.git');
148
+ const st = statSync(gitEntry);
149
+ if (st.isDirectory()) return dir;
150
+ if (!st.isFile()) return dir;
151
+
152
+ const contents = readFileSync(gitEntry, 'utf-8');
153
+ const match = contents.match(/^\s*gitdir:\s*(.+?)\s*$/m);
154
+ if (!match) return dir;
155
+
156
+ let gitdir = match[1];
157
+ // POSIX-only (`startsWith('/')`) — AI Lens clients are macOS/Linux.
158
+ if (!gitdir.startsWith('/')) gitdir = join(dir, gitdir);
159
+
160
+ // Only real worktrees (`<main>/.git/worktrees/<name>`). Submodules
161
+ // (`<super>/.git/modules/<name>`) are left alone.
162
+ if (!gitdir.includes('/.git/worktrees/')) return dir;
163
+
164
+ // `.git` segment sits 2 levels above the gitdir; 4 is a safety margin.
165
+ let cur = gitdir;
166
+ for (let i = 0; i < 4; i++) {
167
+ const parent = dirname(cur);
168
+ if (parent === cur) break;
169
+ if (cur.endsWith('/.git')) return parent;
170
+ cur = parent;
171
+ }
172
+ return dir;
173
+ } catch {
174
+ return dir;
175
+ }
176
+ }
177
+
178
+ function canonicalizeProjectPath(dir) {
179
+ if (!dir || typeof dir !== 'string') return dir;
180
+ try {
181
+ if (existsSync(join(dir, '.git'))) {
182
+ return resolveWorktreeToMainRepo(dir);
183
+ }
184
+ } catch {}
185
+ return dir;
186
+ }
187
+
142
188
  function findGitRoot(filePath) {
143
189
  if (!filePath || typeof filePath !== 'string' || !filePath.startsWith('/')) return null;
144
190
 
145
191
  let dir = filePath;
146
192
  while (dir && dir !== '/' && dir.length > 1) {
147
193
  try {
148
- if (existsSync(join(dir, '.git'))) return dir;
194
+ if (existsSync(join(dir, '.git'))) return resolveWorktreeToMainRepo(dir);
149
195
  } catch {}
150
196
 
151
197
  const parent = dirname(dir);
@@ -327,7 +373,7 @@ export function normalizeCodexSessionEntries(record, state, streamKey = 'default
327
373
 
328
374
  stream.sessionId = logicalSessionId;
329
375
  stream.rawSessionId = sessionId;
330
- stream.projectPath = cwd;
376
+ stream.projectPath = canonicalizeProjectPath(cwd);
331
377
  stream.hasActivity = false;
332
378
  stream.model = null;
333
379
  events.push(buildUnifiedEvent(
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-lens",
3
- "version": "0.8.60",
3
+ "version": "0.8.62",
4
4
  "type": "module",
5
5
  "description": "Centralized session analytics for AI coding tools",
6
6
  "bin": {
@@ -14,7 +14,9 @@
14
14
  "README.md"
15
15
  ],
16
16
  "scripts": {
17
- "prepare": "git rev-parse --short HEAD > .commithash 2>/dev/null || true",
17
+ "prepare": "git rev-parse --short HEAD > .commithash 2>/dev/null || true; git config core.hooksPath .githooks 2>/dev/null || true",
18
+ "version": "node scripts/check-changelog.js && git add CHANGELOG.md",
19
+ "release": "./scripts/release-cli.sh",
18
20
  "init": "node bin/ai-lens.js init",
19
21
  "remove": "node bin/ai-lens.js remove",
20
22
  "status": "node bin/ai-lens.js status",