ai-lens 0.7.5 → 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.commithash CHANGED
@@ -1 +1 @@
1
- 7c3d9b5
1
+ 1dfdd25
package/cli/hooks.js CHANGED
@@ -60,7 +60,7 @@ export function shellEscape(str) {
60
60
 
61
61
  function captureCommand() {
62
62
  const escaped = shellEscape(CAPTURE_PATH);
63
- return `${shellEscape(process.execPath)} ${escaped} || node ${escaped}`;
63
+ return `${shellEscape(process.execPath)} ${escaped}`;
64
64
  }
65
65
 
66
66
  // ---------------------------------------------------------------------------
package/cli/init.js CHANGED
@@ -371,7 +371,7 @@ export default async function init() {
371
371
  if (projects) {
372
372
  const home = homedir();
373
373
  projects = projects.split(',').map(p => p.trim()).filter(Boolean)
374
- .map(p => p.startsWith('~/') ? join(home, p.slice(2)) : resolve(p))
374
+ .map(p => p === '~' ? home : p.startsWith('~/') ? join(home, p.slice(2)) : resolve(p))
375
375
  .join(',');
376
376
  }
377
377
  if (projects) {
@@ -402,7 +402,9 @@ export default async function init() {
402
402
  if (tokenStatus === 'valid') {
403
403
  success(' Already authenticated (token verified)');
404
404
  } else if (tokenStatus === 'unknown') {
405
- warn(' Token format not recognized — keeping existing token');
405
+ warn(' Token format not recognized — re-authenticating...');
406
+ currentConfig.authToken = null;
407
+ newConfig.authToken = null;
406
408
  } else if (tokenStatus === 'invalid') {
407
409
  warn(' Existing token is invalid or revoked — re-authenticating...');
408
410
  currentConfig.authToken = null;
package/cli/status.js CHANGED
@@ -84,15 +84,22 @@ function checkCaptureRun(installedTools) {
84
84
  }
85
85
 
86
86
  try {
87
- execSync(`echo '{}' | ${command}`, {
87
+ const testEvent = JSON.stringify({
88
+ hook_event_name: 'Stop',
89
+ session_id: 'status-check-' + Date.now(),
90
+ stop_reason: 'test',
91
+ });
92
+ const testCmd = `echo '${testEvent.replace(/'/g, "'\\''")}' | ${command}`;
93
+ execSync(testCmd, {
88
94
  encoding: 'utf-8',
89
95
  timeout: 10_000,
90
96
  stdio: ['pipe', 'pipe', 'pipe'],
97
+ env: { ...process.env, AI_LENS_PROJECTS: '/ai-lens-status-check-nonexistent' },
91
98
  });
92
99
  return {
93
100
  ok: true,
94
101
  summary: 'capture runs OK',
95
- detail: `Ran: echo '{}' | ${command}\nResult: exit 0`,
102
+ detail: `Ran: ${testCmd}\nResult: exit 0`,
96
103
  };
97
104
  } catch (err) {
98
105
  const stderr = err.stderr?.trim() || err.message;
@@ -264,8 +271,8 @@ function checkSenderLog() {
264
271
  for (const line of last20) {
265
272
  try {
266
273
  const entry = JSON.parse(line);
267
- if (entry.msg === 'batch_sent' || entry.msg === 'send_ok') lastSend = entry.ts;
268
- if (entry.msg === 'send_error' || entry.msg === 'send_fail' || entry.level === 'error') hasErrors = true;
274
+ if (entry.msg === 'sent') lastSend = entry.ts;
275
+ if (entry.msg === 'failed' || entry.msg === 'error' || entry.msg === 'auth-failed') hasErrors = true;
269
276
  } catch { /* non-JSON line */ }
270
277
  }
271
278
 
package/client/capture.js CHANGED
@@ -7,7 +7,7 @@
7
7
  * normalizes to unified event format, appends to queue, spawns sender if needed.
8
8
  */
9
9
 
10
- import { readFileSync, writeFileSync, appendFileSync, existsSync, renameSync } from 'node:fs';
10
+ import { readFileSync, writeFileSync, appendFileSync, existsSync, renameSync, realpathSync } from 'node:fs';
11
11
  import { spawn } from 'node:child_process';
12
12
  import { join, dirname } from 'node:path';
13
13
  import { fileURLToPath } from 'node:url';
@@ -26,15 +26,16 @@ import {
26
26
  getMonitoredProjects,
27
27
  } from './config.js';
28
28
  // Soft import — redact.js may not exist on older client installs
29
- let redactSecrets = (s) => s;
29
+ let redactObject = (o) => o;
30
30
  try {
31
31
  const mod = await import('./redact.js');
32
- redactSecrets = mod.redactSecrets;
32
+ redactObject = mod.redactObject;
33
33
  } catch { /* redact.js not installed yet — skip redaction, server will handle it */ }
34
34
 
35
35
  const __dirname = dirname(fileURLToPath(import.meta.url));
36
36
 
37
37
  function logDrop(reason, meta = {}) {
38
+ if (process.env.AI_LENS_DRY_RUN) return;
38
39
  try {
39
40
  const entry = { ts: new Date().toISOString(), reason, ...meta };
40
41
  appendFileSync(CAPTURE_LOG_PATH, JSON.stringify(entry) + '\n');
@@ -73,13 +74,24 @@ function truncate(text, maxLen) {
73
74
  return text.slice(0, end) + ` [...truncated, ${text.length} chars total]`;
74
75
  }
75
76
 
76
- function truncateToolInput(input, toolName) {
77
+ function truncateToolInput(input, toolName, depth = 0) {
77
78
  if (!input || typeof input !== 'object') return input;
79
+ if (depth > 5) return input;
80
+ if (Array.isArray(input)) {
81
+ return input.map(item => {
82
+ if (typeof item === 'string') return truncate(item, TRUNCATION_LIMITS.toolInput.default);
83
+ if (item && typeof item === 'object') return truncateToolInput(item, toolName, depth + 1);
84
+ return item;
85
+ });
86
+ }
78
87
  const result = { ...input };
79
88
  for (const [key, value] of Object.entries(result)) {
80
- if (typeof value !== 'string') continue;
81
- const limit = TRUNCATION_LIMITS.toolInput[key] || TRUNCATION_LIMITS.toolInput.default;
82
- result[key] = truncate(value, limit);
89
+ if (typeof value === 'string') {
90
+ const limit = TRUNCATION_LIMITS.toolInput[key] || TRUNCATION_LIMITS.toolInput.default;
91
+ result[key] = truncate(value, limit);
92
+ } else if (value && typeof value === 'object') {
93
+ result[key] = truncateToolInput(value, toolName, depth + 1);
94
+ }
83
95
  }
84
96
  return result;
85
97
  }
@@ -280,7 +292,7 @@ function normalizeClaudeCode(event) {
280
292
  data = {
281
293
  tool: failedTool,
282
294
  input: truncateToolInput(event.tool_input || event.input, failedTool),
283
- error: truncate(event.error || '', TRUNCATION_LIMITS.toolResult),
295
+ error: truncate(event.error || '', TRUNCATION_LIMITS.toolResult[failedTool] || TRUNCATION_LIMITS.toolResult.default),
284
296
  };
285
297
  const failMcp = event.mcp_server || (failedTool.startsWith('mcp__') ? failedTool.split('__')[1] : null);
286
298
  if (failMcp) data.mcp_server = failMcp;
@@ -356,9 +368,11 @@ function pickWorkspaceRoot(roots) {
356
368
  if (valid.length === 1) return valid[0];
357
369
  const monitored = getMonitoredProjects();
358
370
  if (monitored) {
359
- const match = valid.find(root =>
360
- monitored.some(p => root === p || root.startsWith(p + '/'))
361
- );
371
+ const match = valid.find(root => {
372
+ let resolved = root;
373
+ try { resolved = realpathSync(root); } catch {}
374
+ return monitored.some(p => resolved === p || resolved.startsWith(p + '/'));
375
+ });
362
376
  if (match) return match;
363
377
  }
364
378
  return valid[0];
@@ -495,13 +509,10 @@ export function normalizeEvent(event) {
495
509
  // =============================================================================
496
510
 
497
511
  function appendToQueue(unified) {
498
- // Redact secrets from data and raw before writing to queue
499
- if (unified.data) {
500
- unified.data = JSON.parse(redactSecrets(JSON.stringify(unified.data)));
501
- }
502
- if (unified.raw) {
503
- unified.raw = JSON.parse(redactSecrets(JSON.stringify(unified.raw)));
504
- }
512
+ // Redact secrets from individual string values (not serialized JSON)
513
+ // to prevent regex patterns from matching across JSON structural boundaries
514
+ if (unified.data) unified.data = redactObject(unified.data);
515
+ if (unified.raw) unified.raw = redactObject(unified.raw);
505
516
  appendFileSync(QUEUE_PATH, JSON.stringify(unified) + '\n');
506
517
  }
507
518
 
@@ -557,10 +568,13 @@ async function main() {
557
568
 
558
569
  // Filter by monitored projects (if configured)
559
570
  const monitored = getMonitoredProjects();
560
- if (monitored && unified.project_path && !monitored.some(p => unified.project_path === p || unified.project_path.startsWith(p + '/'))) {
571
+ let projectPath = unified.project_path;
572
+ try { projectPath = realpathSync(projectPath); } catch {}
573
+ if (monitored && projectPath && !monitored.some(p => projectPath === p || projectPath.startsWith(p + '/'))) {
561
574
  // Fallback: for Cursor multi-root workspaces, check if any raw workspace_roots entry matches
562
575
  const roots = Array.isArray(event.workspace_roots) ? event.workspace_roots : [];
563
- if (!roots.some(root => monitored.some(p => root === p || root.startsWith(p + '/')))) {
576
+ const resolvedRoots = roots.map(r => { try { return realpathSync(r); } catch { return r; } });
577
+ if (!resolvedRoots.some(root => monitored.some(p => root === p || root.startsWith(p + '/')))) {
564
578
  logDrop('project_filter', { type: unified.type, source: unified.source, session_id: unified.session_id, project_path: unified.project_path, monitored });
565
579
  process.exit(0);
566
580
  }
@@ -568,7 +582,7 @@ async function main() {
568
582
 
569
583
  // Resolve identity: git first, then fall back to event payload (e.g. Cursor's user_email)
570
584
  // When auth token is present, server resolves developer from token — email is optional
571
- const identity = getGitIdentity();
585
+ const identity = getGitIdentity(unified.project_path);
572
586
  const hasAuthToken = !!getAuthToken();
573
587
  const resolved = resolveIdentity(identity, event, hasAuthToken);
574
588
  if (!resolved.proceed) {
package/client/config.js CHANGED
@@ -1,4 +1,4 @@
1
- import { mkdirSync, appendFileSync, readFileSync, writeFileSync, existsSync, renameSync } from 'node:fs';
1
+ import { mkdirSync, appendFileSync, readFileSync, writeFileSync, existsSync, renameSync, realpathSync } from 'node:fs';
2
2
  import { join, resolve } from 'node:path';
3
3
  import { homedir } from 'node:os';
4
4
  import { execSync } from 'node:child_process';
@@ -19,6 +19,7 @@ export function log(fields) {
19
19
  }
20
20
 
21
21
  export function captureLog(fields) {
22
+ if (process.env.AI_LENS_DRY_RUN) return;
22
23
  const entry = { ts: new Date().toISOString(), ...fields };
23
24
  try {
24
25
  appendFileSync(CAPTURE_LOG_PATH, JSON.stringify(entry) + '\n');
@@ -64,8 +65,9 @@ export function getMonitoredProjects() {
64
65
  if (paths.length === 0) return null;
65
66
  const home = homedir();
66
67
  return paths
67
- .map(p => p.startsWith('~/') ? join(home, p.slice(2)) : p)
68
+ .map(p => p === '~' ? home : p.startsWith('~/') ? join(home, p.slice(2)) : p)
68
69
  .map(p => resolve(p))
70
+ .map(p => { try { return realpathSync(p); } catch { return p; } })
69
71
  .map(p => p.endsWith('/') ? p.slice(0, -1) : p);
70
72
  }
71
73
 
@@ -73,18 +75,20 @@ export function getAuthToken() {
73
75
  return process.env.AI_LENS_AUTH_TOKEN || loadConfig().authToken || null;
74
76
  }
75
77
 
76
- export function getGitIdentity() {
78
+ export function getGitIdentity(cwd) {
77
79
  let email = null;
78
80
  let name = null;
81
+ const opts = { encoding: 'utf-8', timeout: 3000 };
82
+ if (cwd) opts.cwd = cwd;
79
83
 
80
84
  try {
81
- email = execSync('git config user.email', { encoding: 'utf-8', timeout: 3000 }).trim();
85
+ email = execSync('git config user.email', opts).trim();
82
86
  } catch (err) {
83
87
  captureLog({ msg: 'git-email-failed', error: err.message?.split('\n')[0] });
84
88
  }
85
89
 
86
90
  try {
87
- name = execSync('git config user.name', { encoding: 'utf-8', timeout: 3000 }).trim();
91
+ name = execSync('git config user.name', opts).trim();
88
92
  } catch {
89
93
  // git name missing is non-critical — email or token is sufficient
90
94
  }
package/client/redact.js CHANGED
@@ -75,3 +75,20 @@ export function redactSecrets(str) {
75
75
  }
76
76
  return result;
77
77
  }
78
+
79
+ /**
80
+ * Recursively redact secrets from individual string leaf values in an object.
81
+ * Prevents regex patterns from matching across JSON structural boundaries.
82
+ */
83
+ export function redactObject(obj) {
84
+ if (typeof obj === 'string') return redactSecrets(obj);
85
+ if (Array.isArray(obj)) return obj.map(redactObject);
86
+ if (obj && typeof obj === 'object') {
87
+ const result = {};
88
+ for (const [k, v] of Object.entries(obj)) {
89
+ result[k] = redactObject(v);
90
+ }
91
+ return result;
92
+ }
93
+ return obj;
94
+ }
package/client/sender.js CHANGED
@@ -25,6 +25,15 @@ import {
25
25
 
26
26
  export const MAX_QUEUE_SIZE = 10_000;
27
27
  export const MAX_CHUNK_BYTES = 4 * 1024 * 1024; // 4 MB per POST (Express limit is 50 MB)
28
+ export const LOCK_MAX_AGE_MS = 5 * 60 * 1000; // 5 minutes
29
+
30
+ /**
31
+ * Refresh lock timestamp to prevent false staleness during long sends.
32
+ * Called after each chunk POST to keep the lock fresh.
33
+ */
34
+ export function refreshLock(lockPath) {
35
+ try { writeFileSync(lockPath, `${process.pid}\n${Date.now()}`); } catch {}
36
+ }
28
37
 
29
38
  /**
30
39
  * Parse queue file content into events array.
@@ -93,14 +102,20 @@ export function buildRollbackContent(unsentContent, existingContent) {
93
102
  }
94
103
 
95
104
  /**
96
- * Check if a sender lock file is stale (owner process no longer running).
97
- * Returns true if lock is missing or process is dead.
105
+ * Check if a sender lock file is stale (owner process no longer running or lock too old).
106
+ * Lock format: "<pid>\n<timestamp>" (backward-compat: old "<pid>" treated as stale).
107
+ * Returns true if lock is missing, process is dead, or lock exceeds LOCK_MAX_AGE_MS.
98
108
  */
99
109
  export function isLockStale(lockPath) {
100
110
  try {
101
- const pid = parseInt(readFileSync(lockPath, 'utf-8').trim(), 10);
111
+ const content = readFileSync(lockPath, 'utf-8').trim();
112
+ const lines = content.split('\n');
113
+ const pid = parseInt(lines[0], 10);
114
+ const lockTime = parseInt(lines[1], 10);
115
+ // Old format (no timestamp) or expired → stale regardless of PID liveness
116
+ if (!lockTime || Date.now() - lockTime > LOCK_MAX_AGE_MS) return true;
102
117
  process.kill(pid, 0); // throws if process doesn't exist
103
- return false; // process alive lock is active
118
+ return false; // process alive and lock fresh active
104
119
  } catch (err) {
105
120
  if (err.code === 'ESRCH') return true; // process dead — stale
106
121
  if (err.code === 'ENOENT') return true; // no lock file
@@ -124,6 +139,19 @@ export function mergeToQueue(unsentContent, queuePath) {
124
139
  if (err.code !== 'ENOENT') throw err;
125
140
  // No queue existed — nothing to drain
126
141
  }
142
+ // Retry-drain: capture.js may have created a new queue.jsonl between the
143
+ // initial drain and this rename. Loop to absorb any newly appeared file,
144
+ // narrowing the race window to a single renameSync syscall.
145
+ for (let i = 0; i < 3; i++) {
146
+ try {
147
+ renameSync(queuePath, drainPath);
148
+ appendFileSync(tmpPath, readFileSync(drainPath, 'utf-8'));
149
+ unlinkSync(drainPath);
150
+ } catch (err) {
151
+ if (err.code === 'ENOENT') break;
152
+ throw err;
153
+ }
154
+ }
127
155
  renameSync(tmpPath, queuePath);
128
156
  }
129
157
 
@@ -165,8 +193,8 @@ export function acquireQueue(queuePath = QUEUE_PATH, sendingPath = SENDING_PATH)
165
193
  throw err;
166
194
  }
167
195
 
168
- // Write PID lock so other senders know we're active
169
- writeFileSync(lockPath, String(process.pid));
196
+ // Write PID + timestamp lock so other senders know we're active
197
+ writeFileSync(lockPath, `${process.pid}\n${Date.now()}`);
170
198
 
171
199
  const content = readFileSync(sendingPath, 'utf-8');
172
200
  const { events, dropped, overflow } = parseQueueContent(content);
@@ -258,6 +286,31 @@ export function chunkEvents(events, maxBytes = MAX_CHUNK_BYTES) {
258
286
  return chunks;
259
287
  }
260
288
 
289
+ /**
290
+ * Filter out oversized events, salvaging those with a `raw` field by stripping it.
291
+ * Returns { sendable, droppedIds } where droppedIds contains event_ids of permanently dropped events.
292
+ */
293
+ export function filterOversized(batch, maxBytes = MAX_CHUNK_BYTES) {
294
+ const sendable = [];
295
+ const droppedIds = new Set();
296
+ for (const evt of batch) {
297
+ let evtBytes = Buffer.byteLength(JSON.stringify(evt));
298
+ if (evtBytes > maxBytes && evt.raw !== undefined) {
299
+ const originalBytes = evtBytes;
300
+ delete evt.raw;
301
+ evtBytes = Buffer.byteLength(JSON.stringify(evt));
302
+ log({ msg: 'stripped-raw-oversized', event_id: evt.event_id, type: evt.type, original_bytes: originalBytes, stripped_bytes: evtBytes });
303
+ }
304
+ if (evtBytes > maxBytes) {
305
+ log({ msg: 'skip-oversized', event_id: evt.event_id, type: evt.type, bytes: evtBytes, limit: maxBytes });
306
+ if (evt.event_id) droppedIds.add(evt.event_id);
307
+ } else {
308
+ sendable.push(evt);
309
+ }
310
+ }
311
+ return { sendable, droppedIds };
312
+ }
313
+
261
314
  /**
262
315
  * POST events to server using Node.js stdlib.
263
316
  */
@@ -276,12 +329,8 @@ function postEvents(serverUrl, events, identity) {
276
329
  if (identity.name) headers['X-Developer-Name'] = encodeURIComponent(identity.name);
277
330
 
278
331
  const authToken = getAuthToken();
279
- if (authToken) {
280
- if (authToken.startsWith('ailens_dev_')) {
281
- headers['X-Auth-Token'] = authToken;
282
- } else {
283
- headers['Authorization'] = 'Basic ' + Buffer.from(authToken).toString('base64');
284
- }
332
+ if (authToken && authToken.startsWith('ailens_dev_')) {
333
+ headers['X-Auth-Token'] = authToken;
285
334
  }
286
335
 
287
336
  const options = {
@@ -340,21 +389,14 @@ async function main() {
340
389
 
341
390
  try {
342
391
  for (const { identity, events: batch } of byDeveloper.values()) {
343
- // Filter out individual oversized events before chunking
344
- const sendable = [];
345
- for (const evt of batch) {
346
- const evtBytes = Buffer.byteLength(JSON.stringify(evt));
347
- if (evtBytes > MAX_CHUNK_BYTES) {
348
- log({ msg: 'skip-oversized', event_id: evt.event_id, type: evt.type, bytes: evtBytes, limit: MAX_CHUNK_BYTES });
349
- if (evt.event_id) sentEventIds.add(evt.event_id);
350
- } else {
351
- sendable.push(evt);
352
- }
353
- }
392
+ // Filter out individual oversized events before chunking (strip raw to salvage)
393
+ const { sendable, droppedIds } = filterOversized(batch);
394
+ for (const id of droppedIds) sentEventIds.add(id);
354
395
  const chunks = chunkEvents(sendable);
355
396
  let totalReceived = 0;
356
397
  for (const chunk of chunks) {
357
398
  const result = await postEvents(serverUrl, chunk, identity);
399
+ refreshLock(sendingPath + '.lock');
358
400
  totalReceived += result.received;
359
401
  if (result.skipped > 0) {
360
402
  log({ msg: 'server-skipped', skipped: result.skipped, chunk_size: chunk.length, developer: identity.email });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-lens",
3
- "version": "0.7.5",
3
+ "version": "0.8.2",
4
4
  "type": "module",
5
5
  "description": "Centralized session analytics for AI coding tools",
6
6
  "bin": {