ai-lens 0.7.2 → 0.7.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.commithash CHANGED
@@ -1 +1 @@
1
- 13ac046
1
+ 8b0424f
package/README.md CHANGED
@@ -11,7 +11,7 @@ Hook fires → capture.js → normalize → queue.jsonl → sender.js → POST /
11
11
  Run the init command on each developer machine:
12
12
 
13
13
  ```bash
14
- npx ai-lens init
14
+ npx -y ai-lens init
15
15
  ```
16
16
 
17
17
  This will:
package/cli/hooks.js CHANGED
@@ -44,11 +44,11 @@ export function readLensConfig() {
44
44
 
45
45
  export function saveLensConfig(config) {
46
46
  mkdirSync(dirname(CONFIG_PATH), { recursive: true });
47
- writeFileSync(CONFIG_PATH, JSON.stringify(config, null, 2) + '\n');
47
+ const tmpPath = CONFIG_PATH + '.tmp.' + process.pid;
48
+ writeFileSync(tmpPath, JSON.stringify(config, null, 2) + '\n');
49
+ renameSync(tmpPath, CONFIG_PATH);
48
50
  }
49
51
 
50
- const DEFAULT_SERVER_URL = 'http://localhost:3000';
51
-
52
52
  /**
53
53
  * Escape a string for safe embedding in a single-quoted shell context.
54
54
  * Standard POSIX approach: replace each ' with '\'' (end quote, escaped quote, start quote).
@@ -59,19 +59,8 @@ export function shellEscape(str) {
59
59
  }
60
60
 
61
61
  function captureCommand() {
62
- const config = readLensConfig();
63
- const envs = [];
64
- if (config.serverUrl && config.serverUrl !== DEFAULT_SERVER_URL) {
65
- envs.push(`AI_LENS_SERVER_URL=${shellEscape(config.serverUrl)}`);
66
- }
67
- if (config.projects) {
68
- envs.push(`AI_LENS_PROJECTS=${shellEscape(config.projects)}`);
69
- }
70
- if (config.authToken) {
71
- envs.push(`AI_LENS_AUTH_TOKEN=${shellEscape(config.authToken)}`);
72
- }
73
- const base = `node ${CAPTURE_PATH}`;
74
- return envs.length > 0 ? `${envs.join(' ')} ${base}` : base;
62
+ const escaped = shellEscape(CAPTURE_PATH);
63
+ return `${shellEscape(process.execPath)} ${escaped} || node ${escaped}`;
75
64
  }
76
65
 
77
66
  // ---------------------------------------------------------------------------
@@ -169,13 +158,10 @@ export const TOOL_CONFIGS = [
169
158
  export function isAiLensHook(entry) {
170
159
  // Flat format (Cursor): { command: "..." }
171
160
  const cmd = entry?.command || '';
172
- if (cmd.includes('ai-lens') && cmd.includes('capture.js')) return true;
161
+ if (cmd.includes(CAPTURE_PATH)) return true;
173
162
  // Nested format (Claude Code): { matcher, hooks: [{ command: "..." }] }
174
163
  if (Array.isArray(entry?.hooks)) {
175
- return entry.hooks.some(h => {
176
- const c = h?.command || '';
177
- return c.includes('ai-lens') && c.includes('capture.js');
178
- });
164
+ return entry.hooks.some(h => (h?.command || '').includes(CAPTURE_PATH));
179
165
  }
180
166
  return false;
181
167
  }
@@ -345,7 +331,9 @@ export function buildStrippedConfig(tool, existingConfig) {
345
331
 
346
332
  export function writeHooksConfig(tool, config) {
347
333
  mkdirSync(dirname(tool.configPath), { recursive: true });
348
- writeFileSync(tool.configPath, JSON.stringify(config, null, 2) + '\n');
334
+ const tmpPath = tool.configPath + '.tmp.' + process.pid;
335
+ writeFileSync(tmpPath, JSON.stringify(config, null, 2) + '\n');
336
+ renameSync(tmpPath, tool.configPath);
349
337
  }
350
338
 
351
339
  // ---------------------------------------------------------------------------
package/cli/init.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import { createInterface } from 'node:readline';
2
2
  import { execSync } from 'node:child_process';
3
- import { existsSync } from 'node:fs';
4
- import { join } from 'node:path';
3
+ import { existsSync, copyFileSync } from 'node:fs';
4
+ import { join, resolve } from 'node:path';
5
5
  import { homedir } from 'node:os';
6
6
  import { request as httpRequest } from 'node:http';
7
7
  import { request as httpsRequest } from 'node:https';
@@ -9,6 +9,7 @@ import {
9
9
  initLogger, info, success, warn, error,
10
10
  heading, detail, blank, getLogPath,
11
11
  } from './logger.js';
12
+ import { getGitIdentity } from '../client/config.js';
12
13
  import {
13
14
  CAPTURE_PATH, detectInstalledTools,
14
15
  analyzeToolHooks, buildMergedConfig, writeHooksConfig, describePlan,
@@ -226,6 +227,41 @@ async function deviceCodeAuth(serverUrl) {
226
227
  throw new Error('Device code expired. Please try again.');
227
228
  }
228
229
 
230
+ /**
231
+ * Validate an existing auth token against the server.
232
+ * Returns 'valid', 'invalid' (401 — revoked/wrong), or 'unreachable' (network error).
233
+ */
234
+ async function validateExistingToken(serverUrl, token) {
235
+ if (!token) return 'invalid';
236
+ if (!token.startsWith('ailens_dev_')) return 'unknown'; // legacy format — keep it
237
+ try {
238
+ const parsed = new URL(`${serverUrl}/api/auth/verify`);
239
+ const isHttps = parsed.protocol === 'https:';
240
+ const requestFn = isHttps ? httpsRequest : httpRequest;
241
+ const status = await new Promise((resolve, reject) => {
242
+ const req = requestFn({
243
+ hostname: parsed.hostname,
244
+ port: parsed.port || (isHttps ? 443 : 80),
245
+ path: parsed.pathname,
246
+ method: 'GET',
247
+ headers: { 'X-Auth-Token': token },
248
+ timeout: 10_000,
249
+ }, (res) => {
250
+ res.resume();
251
+ resolve(res.statusCode);
252
+ });
253
+ req.on('error', reject);
254
+ req.on('timeout', () => { req.destroy(); reject(new Error('timeout')); });
255
+ req.end();
256
+ });
257
+ if (status === 200) return 'valid';
258
+ if (status === 401) return 'invalid';
259
+ return 'unreachable';
260
+ } catch {
261
+ return 'unreachable';
262
+ }
263
+ }
264
+
229
265
  // =============================================================================
230
266
  // CLI flags for non-interactive mode
231
267
  // =============================================================================
@@ -277,7 +313,7 @@ export default async function init() {
277
313
  if (tools.length === 0) {
278
314
  warn('No supported AI tools detected.');
279
315
  info('Looked for ~/.claude/ and ~/.cursor/ directories.');
280
- info('Install Claude Code or Cursor, then re-run: npx ai-lens init');
316
+ info('Install Claude Code or Cursor, then re-run: npx -y ai-lens init');
281
317
  return;
282
318
  }
283
319
 
@@ -303,6 +339,8 @@ export default async function init() {
303
339
  );
304
340
  serverUrl = (serverInput || currentServer).replace(/\/+$/, '');
305
341
  }
342
+ if (!/^https?:\/\//i.test(serverUrl)) serverUrl = `http://${serverUrl}`;
343
+ try { new URL(serverUrl); } catch { error(`Invalid server URL: ${serverUrl}`); process.exit(1); }
306
344
  info(` Server: ${serverUrl}`);
307
345
 
308
346
  // Project filter
@@ -317,7 +355,24 @@ export default async function init() {
317
355
  const projectsInput = await ask(
318
356
  `Projects to track (comma-separated, ~ supported, Enter = ${projectsDefault}): `,
319
357
  );
320
- projects = projectsInput || currentProjects;
358
+ projects = (projectsInput && projectsInput.trim() && projectsInput.trim().toLowerCase() !== 'all')
359
+ ? projectsInput
360
+ : null;
361
+ }
362
+ // Guard: non-string (e.g. array from corrupt config) → treat as unset
363
+ if (projects && typeof projects !== 'string') {
364
+ projects = null;
365
+ }
366
+ // Guard: "all" means monitor everything → null
367
+ if (typeof projects === 'string' && projects.trim().toLowerCase() === 'all') {
368
+ projects = null;
369
+ }
370
+ // Normalize: resolve relative paths to absolute, expand ~
371
+ if (projects) {
372
+ const home = homedir();
373
+ projects = projects.split(',').map(p => p.trim()).filter(Boolean)
374
+ .map(p => p.startsWith('~/') ? join(home, p.slice(2)) : resolve(p))
375
+ .join(',');
321
376
  }
322
377
  if (projects) {
323
378
  info(` Tracking: ${projects}`);
@@ -325,15 +380,37 @@ export default async function init() {
325
380
  info(' Tracking: all projects');
326
381
  }
327
382
 
328
- // Save config if changed
383
+ // Build new config in memory — saved after "Proceed?" confirmation
329
384
  const newConfig = { ...currentConfig, serverUrl, projects };
330
- if (serverUrl !== currentConfig.serverUrl || projects !== currentConfig.projects) {
331
- saveLensConfig(newConfig);
385
+ blank();
386
+
387
+ // Install client files to ~/.ai-lens/client/
388
+ heading('Installing client files...');
389
+ try {
390
+ installClientFiles();
391
+ success(' Copied client files to ~/.ai-lens/client/');
392
+ } catch (err) {
393
+ error(` Failed to install client files: ${err.message}`);
394
+ return;
332
395
  }
333
396
  blank();
334
397
 
335
398
  // Authentication
336
399
  heading('Authentication');
400
+ if (currentConfig.authToken) {
401
+ const tokenStatus = await validateExistingToken(serverUrl, currentConfig.authToken);
402
+ if (tokenStatus === 'valid') {
403
+ success(' Already authenticated (token verified)');
404
+ } else if (tokenStatus === 'unknown') {
405
+ warn(' Token format not recognized — keeping existing token');
406
+ } else if (tokenStatus === 'invalid') {
407
+ warn(' Existing token is invalid or revoked — re-authenticating...');
408
+ currentConfig.authToken = null;
409
+ newConfig.authToken = null;
410
+ } else {
411
+ warn(' Could not reach server to verify token — keeping existing token');
412
+ }
413
+ }
337
414
  if (!currentConfig.authToken) {
338
415
  try {
339
416
  const result = await deviceCodeAuth(serverUrl);
@@ -344,12 +421,22 @@ export default async function init() {
344
421
  if (err.message.includes('not configured')) {
345
422
  warn(` Auth not configured on server — personal mode (events sent via git identity)`);
346
423
  } else {
347
- error(` Authentication failed: ${err.message}`);
348
- return;
424
+ warn(` Authentication failed: ${err.message}`);
425
+ warn(` Run "npx -y ai-lens init" again later to authenticate`);
349
426
  }
350
427
  }
351
- } else {
352
- success(' Already authenticated (token present)');
428
+ }
429
+
430
+ // Validate identity: no token + no git email = events will be dropped
431
+ if (!newConfig.authToken) {
432
+ const { email } = getGitIdentity();
433
+ if (!email) {
434
+ blank();
435
+ error(' No auth token and no git email configured.');
436
+ error(' Events will be silently dropped until one is available.');
437
+ info(' Fix: git config --global user.email "you@example.com"');
438
+ info(' Or re-run init when Auth0 is configured on the server.');
439
+ }
353
440
  }
354
441
  blank();
355
442
 
@@ -382,15 +469,17 @@ export default async function init() {
382
469
  // Filter to tools that need changes
383
470
  const pending = analyses.filter(a => a.analysis.status !== 'current');
384
471
 
385
- // Clean up legacy hook locations (always, even if current hooks are up-to-date)
386
- for (const { tool } of analyses) {
387
- for (const lr of cleanupLegacyHooks(tool)) {
388
- success(` ${tool.name}: ${lr.action} legacy hooks in ${lr.path}`);
472
+ if (pending.length === 0) {
473
+ saveLensConfig(newConfig);
474
+
475
+ // Clean up legacy hook locations (safe: hooks are already current)
476
+ for (const { tool } of analyses) {
477
+ for (const lr of cleanupLegacyHooks(tool)) {
478
+ success(` ${tool.name}: ${lr.action} legacy hooks in ${lr.path}`);
479
+ }
389
480
  }
390
- }
391
481
 
392
- if (pending.length === 0) {
393
- success('Everything is up-to-date. Nothing to do.');
482
+ success('Hooks are up-to-date.');
394
483
  } else {
395
484
  // Show plan
396
485
  heading('Plan:');
@@ -412,16 +501,15 @@ export default async function init() {
412
501
  }
413
502
  blank();
414
503
 
415
- // Install client files to ~/.ai-lens/client/
416
- heading('Installing client files...');
417
- try {
418
- installClientFiles();
419
- success(' Copied client files to ~/.ai-lens/client/');
420
- } catch (err) {
421
- error(` Failed to install client files: ${err.message}`);
422
- return;
504
+ // Persist config only after confirmation
505
+ saveLensConfig(newConfig);
506
+
507
+ // Clean up legacy hook locations before applying new ones
508
+ for (const { tool } of analyses) {
509
+ for (const lr of cleanupLegacyHooks(tool)) {
510
+ success(` ${tool.name}: ${lr.action} legacy hooks in ${lr.path}`);
511
+ }
423
512
  }
424
- blank();
425
513
 
426
514
  // Apply
427
515
  heading('Applying changes...');
@@ -429,6 +517,10 @@ export default async function init() {
429
517
 
430
518
  for (const { tool, analysis } of pending) {
431
519
  try {
520
+ // Backup malformed shared configs (e.g. ~/.claude/settings.json) before overwriting
521
+ if (analysis.status === 'malformed' && tool.sharedConfig) {
522
+ try { copyFileSync(tool.configPath, tool.configPath + '.bak'); } catch { /* file may be gone */ }
523
+ }
432
524
  const existingConfig = analysis.config || null;
433
525
  const merged = buildMergedConfig(tool, existingConfig);
434
526
  writeHooksConfig(tool, merged);
@@ -443,12 +535,14 @@ export default async function init() {
443
535
 
444
536
  // Verify hooks were written correctly
445
537
  heading('Verifying hooks...');
538
+ let verifyFailed = false;
446
539
  for (const { tool } of pending) {
447
540
  const recheck = analyzeToolHooks(tool);
448
541
  if (recheck.status === 'current') {
449
542
  success(` ${tool.name}: hooks verified`);
450
543
  } else {
451
544
  error(` ${tool.name}: hooks not current (status: ${recheck.status})`);
545
+ verifyFailed = true;
452
546
  }
453
547
  }
454
548
  blank();
@@ -462,6 +556,9 @@ export default async function init() {
462
556
  error(` ${r.tool}: failed (${r.error})`);
463
557
  }
464
558
  }
559
+ if (results.some(r => !r.ok) || verifyFailed) {
560
+ process.exitCode = 1;
561
+ }
465
562
  blank();
466
563
  }
467
564
 
@@ -594,6 +691,64 @@ export default async function init() {
594
691
  } catch (err) {
595
692
  warn(` Token: could not verify — ${err.message}`);
596
693
  }
694
+
695
+ // 3. E2E: POST a test event
696
+ try {
697
+ const { name: gitName, email: gitEmail } = getGitIdentity();
698
+ const testEvent = [{
699
+ source: 'cli',
700
+ session_id: `e2e-test-${Date.now()}`,
701
+ type: 'E2eTest',
702
+ timestamp: new Date().toISOString(),
703
+ project_path: process.cwd(),
704
+ data: { trigger: 'init' },
705
+ }];
706
+ const headers = {
707
+ 'Content-Type': 'application/json',
708
+ 'X-Auth-Token': finalConfig.authToken,
709
+ };
710
+ if (gitEmail) headers['X-Developer-Git-Email'] = gitEmail;
711
+ if (gitName) headers['X-Developer-Name'] = gitName;
712
+
713
+ const parsed = new URL(`${verifyUrl}/api/events`);
714
+ const isHttps = parsed.protocol === 'https:';
715
+ const requestFn = isHttps ? httpsRequest : httpRequest;
716
+ const data = JSON.stringify(testEvent);
717
+ headers['Content-Length'] = String(Buffer.byteLength(data));
718
+
719
+ const e2eBody = await new Promise((resolve, reject) => {
720
+ const req = requestFn({
721
+ hostname: parsed.hostname,
722
+ port: parsed.port || (isHttps ? 443 : 80),
723
+ path: parsed.pathname,
724
+ method: 'POST',
725
+ headers,
726
+ timeout: 10_000,
727
+ }, (res) => {
728
+ let buf = '';
729
+ res.on('data', (chunk) => { buf += chunk; });
730
+ res.on('end', () => {
731
+ try {
732
+ resolve({ status: res.statusCode, data: JSON.parse(buf) });
733
+ } catch {
734
+ reject(new Error(`Server responded ${res.statusCode}: ${buf}`));
735
+ }
736
+ });
737
+ });
738
+ req.on('error', reject);
739
+ req.on('timeout', () => { req.destroy(); reject(new Error('timeout')); });
740
+ req.write(data);
741
+ req.end();
742
+ });
743
+
744
+ if (e2eBody.status === 200 && e2eBody.data.received >= 1) {
745
+ success(' E2E: event accepted by server');
746
+ } else {
747
+ error(` E2E: unexpected response (HTTP ${e2eBody.status})`);
748
+ }
749
+ } catch (err) {
750
+ error(` E2E: ${err.message}`);
751
+ }
597
752
  }
598
753
  } else {
599
754
  warn(' No server URL configured — skipping verification');
package/cli/status.js CHANGED
@@ -4,7 +4,7 @@ import { join } from 'node:path';
4
4
  import { homedir } from 'node:os';
5
5
 
6
6
  import { getVersionInfo, readLensConfig, detectInstalledTools, analyzeToolHooks, CAPTURE_PATH, TOOL_CONFIGS } from './hooks.js';
7
- import { DATA_DIR, QUEUE_PATH, LOG_PATH, getGitIdentity } from '../client/config.js';
7
+ import { DATA_DIR, QUEUE_PATH, LOG_PATH, CAPTURE_LOG_PATH, getGitIdentity } from '../client/config.js';
8
8
  import { initLogger, info, success, warn, error, heading, blank } from './logger.js';
9
9
 
10
10
  // ANSI helpers
@@ -286,6 +286,48 @@ function checkSenderLog() {
286
286
  };
287
287
  }
288
288
 
289
+ function checkCaptureLog() {
290
+ if (!existsSync(CAPTURE_LOG_PATH)) {
291
+ return { ok: true, summary: 'no drops logged', detail: 'Capture log does not exist (no events dropped)' };
292
+ }
293
+
294
+ let lines;
295
+ try {
296
+ lines = readFileSync(CAPTURE_LOG_PATH, 'utf-8').split('\n').filter(Boolean);
297
+ } catch (err) {
298
+ return { ok: false, summary: `error reading log: ${err.message}`, detail: `Error: ${err.message}` };
299
+ }
300
+
301
+ // Count entries by category (reason for drops, msg for errors)
302
+ const counts = {};
303
+ let lastTs = null;
304
+ let hasErrors = false;
305
+ for (const line of lines) {
306
+ try {
307
+ const entry = JSON.parse(line);
308
+ const category = entry.reason || entry.msg || 'unknown';
309
+ counts[category] = (counts[category] || 0) + 1;
310
+ lastTs = entry.ts;
311
+ if (entry.msg) hasErrors = true;
312
+ } catch { /* non-JSON line */ }
313
+ }
314
+
315
+ const total = lines.length;
316
+ const breakdown = Object.entries(counts).map(([r, n]) => `${r}: ${n}`).join(', ');
317
+
318
+ let summary = `${total} entries`;
319
+ if (breakdown) summary += ` (${breakdown})`;
320
+ if (lastTs) summary += `, last ${relativeTime(lastTs)}`;
321
+
322
+ const last10 = lines.slice(-10);
323
+
324
+ return {
325
+ ok: !hasErrors,
326
+ summary,
327
+ detail: `Log: ${CAPTURE_LOG_PATH}\nTotal: ${total}\n\nLast 10 entries:\n${last10.join('\n')}`,
328
+ };
329
+ }
330
+
289
331
  async function checkServer(serverUrl) {
290
332
  if (!serverUrl) {
291
333
  return { ok: false, summary: 'no server URL configured', detail: 'Cannot check server: no serverUrl in config' };
@@ -340,6 +382,44 @@ async function checkToken(serverUrl, authToken) {
340
382
  }
341
383
  }
342
384
 
385
+ async function checkE2e(serverUrl, authToken) {
386
+ if (!serverUrl || !authToken) {
387
+ const missing = !serverUrl ? 'server URL' : 'auth token';
388
+ return { ok: false, summary: `no ${missing} configured`, detail: `Cannot run E2E test: missing ${missing}` };
389
+ }
390
+
391
+ const { name, email } = getGitIdentity();
392
+ const testEvent = [{
393
+ source: 'cli',
394
+ session_id: `e2e-test-${Date.now()}`,
395
+ type: 'E2eTest',
396
+ timestamp: new Date().toISOString(),
397
+ project_path: process.cwd(),
398
+ data: { trigger: 'status' },
399
+ }];
400
+
401
+ const url = `${serverUrl}/api/events`;
402
+ try {
403
+ const res = await fetch(url, {
404
+ method: 'POST',
405
+ headers: {
406
+ 'Content-Type': 'application/json',
407
+ 'X-Auth-Token': authToken,
408
+ ...(email && { 'X-Developer-Git-Email': email }),
409
+ ...(name && { 'X-Developer-Name': name }),
410
+ },
411
+ body: JSON.stringify(testEvent),
412
+ });
413
+ const body = await res.json();
414
+ if (res.ok && body.received >= 1) {
415
+ return { ok: true, summary: 'event accepted', detail: `POST ${url} → ${res.status}, received: ${body.received}` };
416
+ }
417
+ return { ok: false, summary: `unexpected response (${res.status})`, detail: `POST ${url} → ${res.status}\nBody: ${JSON.stringify(body)}` };
418
+ } catch (err) {
419
+ return { ok: false, summary: `failed (${err.message})`, detail: `POST ${url}\nError: ${err.message}` };
420
+ }
421
+ }
422
+
343
423
  // ---------------------------------------------------------------------------
344
424
  // Report file generation
345
425
  // ---------------------------------------------------------------------------
@@ -423,6 +503,20 @@ function buildReport(results, timestamp) {
423
503
  }
424
504
  lines.push('');
425
505
 
506
+ // Capture drops log (last 100 lines)
507
+ lines.push(`${'='.repeat(60)}`);
508
+ lines.push(`Capture drops (${CAPTURE_LOG_PATH}):`);
509
+ try {
510
+ const capLines = readFileSync(CAPTURE_LOG_PATH, 'utf-8').split('\n').filter(Boolean);
511
+ lines.push(`Total: ${capLines.length} drops`);
512
+ for (const cl of capLines.slice(-100)) {
513
+ lines.push(cl);
514
+ }
515
+ } catch {
516
+ lines.push('(not found)');
517
+ }
518
+ lines.push('');
519
+
426
520
  return lines.join('\n');
427
521
  }
428
522
 
@@ -498,6 +592,9 @@ export default async function status() {
498
592
  // 8. Sender log
499
593
  printLine('Sender log', checkSenderLog());
500
594
 
595
+ // 8b. Capture drops
596
+ printLine('Capture drops', checkCaptureLog());
597
+
501
598
  // 9. Server connectivity
502
599
  const serverUrl = configResult.serverUrl || readLensConfig().serverUrl;
503
600
  const serverResult = await checkServer(serverUrl);
@@ -508,6 +605,10 @@ export default async function status() {
508
605
  const tokenResult = await checkToken(serverUrl, authToken);
509
606
  printLine('Token', tokenResult);
510
607
 
608
+ // 11. E2E connectivity test
609
+ const e2eResult = await checkE2e(serverUrl, authToken);
610
+ printLine('E2E test', e2eResult);
611
+
511
612
  // Write report file
512
613
  const timestamp = new Date().toISOString();
513
614
  const report = buildReport(results, timestamp);
package/client/capture.js CHANGED
@@ -17,7 +17,10 @@ import {
17
17
  QUEUE_PATH,
18
18
  SESSION_PATHS_PATH,
19
19
  LAST_EVENTS_PATH,
20
+ CAPTURE_LOG_PATH,
21
+ captureLog,
20
22
  getServerUrl,
23
+ getAuthToken,
21
24
  getGitIdentity,
22
25
  getGitMetadata,
23
26
  getMonitoredProjects,
@@ -31,6 +34,24 @@ try {
31
34
 
32
35
  const __dirname = dirname(fileURLToPath(import.meta.url));
33
36
 
37
+ function logDrop(reason, meta = {}) {
38
+ try {
39
+ const entry = { ts: new Date().toISOString(), reason, ...meta };
40
+ appendFileSync(CAPTURE_LOG_PATH, JSON.stringify(entry) + '\n');
41
+ } catch { /* best-effort */ }
42
+ }
43
+
44
+ // =============================================================================
45
+ // Identity Resolution
46
+ // =============================================================================
47
+
48
+ export function resolveIdentity(gitIdentity, event, hasAuthToken) {
49
+ const email = gitIdentity.email || event.user_email || null;
50
+ if (!email && !hasAuthToken) return { proceed: false, email: null, name: null };
51
+ const name = gitIdentity.name || event.user_name || email || null;
52
+ return { proceed: true, email, name };
53
+ }
54
+
34
55
  // =============================================================================
35
56
  // Truncation (reused from ai-session-lens prompts.js approach)
36
57
  // =============================================================================
@@ -117,7 +138,9 @@ function cacheSessionPath(sessionId, projectPath) {
117
138
  const paths = loadSessionPaths();
118
139
  if (paths[sessionId] !== projectPath) {
119
140
  paths[sessionId] = projectPath;
120
- saveSessionPaths(paths);
141
+ try {
142
+ saveSessionPaths(paths);
143
+ } catch { /* cache write failed — event proceeds without cached path */ }
121
144
  }
122
145
  }
123
146
 
@@ -150,18 +173,27 @@ function saveLastEvents(cache) {
150
173
  }
151
174
 
152
175
  /**
153
- * Returns true if this event is a duplicate that should be dropped.
154
- * Updates the cache with the current event type.
176
+ * Pure check — returns true if this event is a duplicate that should be dropped.
177
+ * Does NOT update the cache. Call commitDedup() after successful queue write.
155
178
  */
156
- export function isDuplicateEvent(sessionId, type) {
179
+ export function checkDuplicate(sessionId, source, type) {
157
180
  const cache = loadLastEvents();
158
- const prev = cache[sessionId];
159
- const dominated = DEDUP_TYPES.has(type) && prev === type;
160
- if (prev !== type) {
161
- cache[sessionId] = type;
162
- saveLastEvents(cache);
181
+ const key = `${source}:${sessionId}`;
182
+ const prev = cache[key];
183
+ return DEDUP_TYPES.has(type) && prev === type;
184
+ }
185
+
186
+ /**
187
+ * Commit the event type to the dedup cache.
188
+ * Call only after successful queue write to avoid cache poisoning.
189
+ */
190
+ export function commitDedup(sessionId, source, type) {
191
+ const cache = loadLastEvents();
192
+ const key = `${source}:${sessionId}`;
193
+ if (cache[key] !== type) {
194
+ cache[key] = type;
195
+ try { saveLastEvents(cache); } catch { /* best effort */ }
163
196
  }
164
- return dominated;
165
197
  }
166
198
 
167
199
  // =============================================================================
@@ -317,12 +349,32 @@ const CURSOR_TYPE_MAP = {
317
349
  sessionEnd: 'SessionEnd',
318
350
  };
319
351
 
352
+ function pickWorkspaceRoot(roots) {
353
+ if (!Array.isArray(roots) || roots.length === 0) return null;
354
+ const valid = roots.filter(r => typeof r === 'string' && r.length > 0);
355
+ if (valid.length === 0) return null;
356
+ if (valid.length === 1) return valid[0];
357
+ const monitored = getMonitoredProjects();
358
+ if (monitored) {
359
+ const match = valid.find(root =>
360
+ monitored.some(p => root === p || root.startsWith(p + '/'))
361
+ );
362
+ if (match) return match;
363
+ }
364
+ return valid[0];
365
+ }
366
+
320
367
  function normalizeCursor(event) {
321
368
  const sessionId = event.conversation_id || null;
322
369
  const hookName = event.hook_event_name;
323
370
  const type = CURSOR_TYPE_MAP[hookName] || hookName;
324
371
  const timestamp = new Date().toISOString();
325
- const projectPath = Array.isArray(event.workspace_roots) ? event.workspace_roots[0] : null;
372
+ let projectPath = pickWorkspaceRoot(event.workspace_roots);
373
+ if (projectPath) {
374
+ cacheSessionPath(sessionId, projectPath);
375
+ } else {
376
+ projectPath = getCachedSessionPath(sessionId);
377
+ }
326
378
 
327
379
  let data = {};
328
380
  switch (hookName) {
@@ -339,10 +391,12 @@ function normalizeCursor(event) {
339
391
  input: truncateToolInput(event.tool_input, toolName),
340
392
  result: truncateToolResult(event.tool_result, toolName),
341
393
  };
394
+ const mcpServer = event.mcp_server || (toolName.startsWith('mcp__') ? toolName.split('__')[1] : null);
395
+ if (mcpServer) data.mcp_server = mcpServer;
342
396
  break;
343
397
  }
344
398
  case 'afterFileEdit':
345
- data = { file_path: event.file_path, edits: event.edits };
399
+ data = { file_path: event.file_path, edits: truncateToolResult(event.edits, 'default') };
346
400
  break;
347
401
  case 'afterShellExecution':
348
402
  data = {
@@ -353,16 +407,18 @@ function normalizeCursor(event) {
353
407
  case 'postToolUseFailure': {
354
408
  const failToolName = event.tool_name || 'unknown';
355
409
  data = {
356
- tool_name: failToolName,
357
- tool_input: truncateToolInput(event.tool_input, failToolName),
358
- error_message: truncate(event.error_message || '', 300),
359
- failure_type: event.failure_type || null,
410
+ tool: failToolName,
411
+ input: truncateToolInput(event.tool_input, failToolName),
412
+ error: truncate(event.error_message || '', 300),
413
+ failure_type: event.failure_type ?? null,
360
414
  duration: event.duration ?? null,
361
415
  };
416
+ const failMcp = event.mcp_server || (failToolName.startsWith('mcp__') ? failToolName.split('__')[1] : null);
417
+ if (failMcp) data.mcp_server = failMcp;
362
418
  break;
363
419
  }
364
420
  case 'afterMCPExecution':
365
- data = { mcp_server: event.mcp_server, result: event.result };
421
+ data = { mcp_server: event.mcp_server, result: truncateToolResult(event.result, 'default') };
366
422
  break;
367
423
  case 'subagentStart':
368
424
  data = {
@@ -468,7 +524,7 @@ async function main() {
468
524
  // Check server is configured
469
525
  const serverUrl = getServerUrl();
470
526
  if (!serverUrl) {
471
- // No server configured — silently exit
527
+ logDrop('no_server_url');
472
528
  process.exit(0);
473
529
  }
474
530
 
@@ -481,6 +537,7 @@ async function main() {
481
537
  }
482
538
 
483
539
  if (!input.trim()) {
540
+ logDrop('empty_stdin');
484
541
  process.exit(0);
485
542
  }
486
543
 
@@ -488,35 +545,45 @@ async function main() {
488
545
  try {
489
546
  event = JSON.parse(input);
490
547
  } catch {
491
- // Malformed stdin exit gracefully
548
+ logDrop('malformed_json', { input_length: input.length, first_chars: input.slice(0, 100) });
492
549
  process.exit(0);
493
550
  }
494
551
 
495
552
  const unified = normalizeEvent(event);
496
553
  if (!unified || !unified.session_id) {
497
- // Unknown source or no session_id — drop
498
- process.exit(0);
499
- }
500
-
501
- // Deduplicate consecutive identical event types (e.g. repeated Stop from idle sessions)
502
- if (isDuplicateEvent(unified.session_id, unified.type)) {
554
+ logDrop('normalize_failed', { hook: event.hook_event_name });
503
555
  process.exit(0);
504
556
  }
505
557
 
506
558
  // Filter by monitored projects (if configured)
507
559
  const monitored = getMonitoredProjects();
508
- if (monitored && !monitored.some(p => unified.project_path === p || unified.project_path?.startsWith(p + '/'))) {
509
- process.exit(0);
560
+ if (monitored && unified.project_path && !monitored.some(p => unified.project_path === p || unified.project_path.startsWith(p + '/'))) {
561
+ // Fallback: for Cursor multi-root workspaces, check if any raw workspace_roots entry matches
562
+ const roots = Array.isArray(event.workspace_roots) ? event.workspace_roots : [];
563
+ if (!roots.some(root => monitored.some(p => root === p || root.startsWith(p + '/')))) {
564
+ logDrop('project_filter', { type: unified.type, source: unified.source, session_id: unified.session_id, project_path: unified.project_path, monitored });
565
+ process.exit(0);
566
+ }
510
567
  }
511
568
 
512
569
  // Resolve identity: git first, then fall back to event payload (e.g. Cursor's user_email)
570
+ // When auth token is present, server resolves developer from token — email is optional
513
571
  const identity = getGitIdentity();
514
- const email = identity.email || event.user_email || null;
515
- if (!email) {
572
+ const hasAuthToken = !!getAuthToken();
573
+ const resolved = resolveIdentity(identity, event, hasAuthToken);
574
+ if (!resolved.proceed) {
575
+ logDrop('no_email', { type: unified.type, session_id: unified.session_id });
516
576
  process.exit(0);
517
577
  }
518
- unified.developer_email = email;
519
- unified.developer_name = identity.name || event.user_name || email;
578
+
579
+ // Deduplicate consecutive identical event types (e.g. repeated Stop from idle sessions)
580
+ // Placed after project_filter and no_email checks so dropped events don't poison the cache
581
+ if (checkDuplicate(unified.session_id, unified.source, unified.type)) {
582
+ logDrop('duplicate', { type: unified.type, session_id: unified.session_id });
583
+ process.exit(0);
584
+ }
585
+ unified.developer_email = resolved.email;
586
+ unified.developer_name = resolved.name;
520
587
 
521
588
  // Attach git metadata (remote, branch, commit)
522
589
  const gitMeta = getGitMetadata(unified.project_path);
@@ -525,14 +592,30 @@ async function main() {
525
592
  unified.git_commit = gitMeta.git_commit;
526
593
 
527
594
  // Append to queue
528
- appendToQueue(unified);
595
+ try {
596
+ appendToQueue(unified);
597
+ } catch (err) {
598
+ captureLog({ msg: 'queue-write-failed', error: err.message, type: unified.type, session_id: unified.session_id });
599
+ process.exit(1);
600
+ }
601
+
602
+ // Commit dedup cache only after successful queue write (avoids cache poisoning on write failure)
603
+ commitDedup(unified.session_id, unified.source, unified.type);
529
604
 
530
605
  // Always try to spawn sender — atomic rename in sender handles dedup
531
- trySpawnSender();
606
+ try {
607
+ trySpawnSender();
608
+ } catch (err) {
609
+ captureLog({ msg: 'sender-spawn-failed', error: err.message });
610
+ // event is queued — sender will be spawned on next capture
611
+ }
532
612
  }
533
613
 
534
614
  // Only run main when executed directly (not when imported for testing)
535
615
  const isDirectRun = process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1];
536
616
  if (isDirectRun) {
537
- main().catch(() => process.exit(0));
617
+ main().catch((err) => {
618
+ try { captureLog({ msg: 'capture-error', error: err.message }); } catch {}
619
+ process.exit(1);
620
+ });
538
621
  }
package/client/config.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { mkdirSync, appendFileSync, readFileSync, writeFileSync, existsSync, renameSync } from 'node:fs';
2
- import { join } from 'node:path';
2
+ import { join, resolve } from 'node:path';
3
3
  import { homedir } from 'node:os';
4
4
  import { execSync } from 'node:child_process';
5
5
 
@@ -11,18 +11,30 @@ export const SESSION_PATHS_PATH = join(DATA_DIR, 'session-paths.json');
11
11
  export const GIT_REMOTES_PATH = join(DATA_DIR, 'git-remotes.json');
12
12
  export const LAST_EVENTS_PATH = join(DATA_DIR, 'last-events.json');
13
13
  export const LOG_PATH = join(DATA_DIR, 'sender.log');
14
+ export const CAPTURE_LOG_PATH = join(DATA_DIR, 'capture.log');
14
15
 
15
16
  export function log(fields) {
16
17
  const entry = { ts: new Date().toISOString(), ...fields };
17
18
  appendFileSync(LOG_PATH, JSON.stringify(entry) + '\n');
18
19
  }
19
20
 
21
+ export function captureLog(fields) {
22
+ const entry = { ts: new Date().toISOString(), ...fields };
23
+ try {
24
+ appendFileSync(CAPTURE_LOG_PATH, JSON.stringify(entry) + '\n');
25
+ } catch { /* last resort — disk may be full */ }
26
+ }
27
+
20
28
  let _configCache;
21
29
  function loadConfig() {
22
30
  if (_configCache !== undefined) return _configCache;
23
31
  try {
24
32
  _configCache = JSON.parse(readFileSync(CONFIG_PATH, 'utf-8'));
25
- } catch {
33
+ } catch (err) {
34
+ if (err.code !== 'ENOENT') {
35
+ captureLog({ msg: 'config-parse-error', path: CONFIG_PATH, error: err.message });
36
+ console.error(`[ai-lens] config.json corrupt, falling back to defaults: ${err.message}`);
37
+ }
26
38
  _configCache = {};
27
39
  }
28
40
  return _configCache;
@@ -46,16 +58,19 @@ export function getServerUrl() {
46
58
  export function getMonitoredProjects() {
47
59
  const val = process.env.AI_LENS_PROJECTS || loadConfig().projects;
48
60
  if (!val) return null; // null = monitor everything
61
+ if (typeof val !== 'string') return null; // non-string (e.g. array) = treat as unset
62
+ if (val.trim().toLowerCase() === 'all') return null;
63
+ const paths = val.split(',').map(p => p.trim()).filter(Boolean);
64
+ if (paths.length === 0) return null;
49
65
  const home = homedir();
50
- return val.split(',').map(p => p.trim()).filter(Boolean)
66
+ return paths
51
67
  .map(p => p.startsWith('~/') ? join(home, p.slice(2)) : p)
68
+ .map(p => resolve(p))
52
69
  .map(p => p.endsWith('/') ? p.slice(0, -1) : p);
53
70
  }
54
71
 
55
- const DEFAULT_AUTH_TOKEN = 'collector:secret-collector-token-2026-ai-lens';
56
-
57
72
  export function getAuthToken() {
58
- return process.env.AI_LENS_AUTH_TOKEN || loadConfig().authToken || DEFAULT_AUTH_TOKEN;
73
+ return process.env.AI_LENS_AUTH_TOKEN || loadConfig().authToken || null;
59
74
  }
60
75
 
61
76
  export function getGitIdentity() {
@@ -64,14 +79,14 @@ export function getGitIdentity() {
64
79
 
65
80
  try {
66
81
  email = execSync('git config user.email', { encoding: 'utf-8', timeout: 3000 }).trim();
67
- } catch {
68
- // git not configured
82
+ } catch (err) {
83
+ captureLog({ msg: 'git-email-failed', error: err.message?.split('\n')[0] });
69
84
  }
70
85
 
71
86
  try {
72
87
  name = execSync('git config user.name', { encoding: 'utf-8', timeout: 3000 }).trim();
73
88
  } catch {
74
- // git not configured
89
+ // git name missing is non-critical — email or token is sufficient
75
90
  }
76
91
 
77
92
  return { email, name };
@@ -106,7 +121,9 @@ function cacheRemote(projectPath, remote) {
106
121
  const remotes = loadGitRemotes();
107
122
  if (remotes[projectPath] !== remote) {
108
123
  remotes[projectPath] = remote;
109
- saveGitRemotes(remotes);
124
+ try {
125
+ saveGitRemotes(remotes);
126
+ } catch { /* cache write failed — event proceeds without cached remote */ }
110
127
  }
111
128
  }
112
129
 
package/client/redact.js CHANGED
@@ -34,11 +34,20 @@ const PATTERNS = [
34
34
  { type: 'JWT', re: /eyJ[A-Za-z0-9_-]{10,}\.eyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_\-.+/=]{10,}/g },
35
35
 
36
36
  // PEM private keys — full block or truncated (just the header + base64 content)
37
- { type: 'PRIVATE_KEY', re: /-----BEGIN[A-Z ]*PRIVATE KEY-----[\s\S]*?(?:-----END[A-Z ]*PRIVATE KEY-----|$)/g },
37
+ { type: 'PRIVATE_KEY', re: /-----BEGIN[A-Z ]*PRIVATE KEY-----[A-Za-z0-9+/=\s\\.]*(?:-----END[A-Z ]*PRIVATE KEY-----)?/g },
38
38
 
39
39
  // Connection string password (://user:password@host) — redacts password only
40
40
  { type: 'CONNECTION_STRING', re: /:\/\/([^:@\s]+):([^@\s]{3,})@/g, replacer: (m, user, _pw) => `://${user}:[REDACTED:CONNECTION_STRING]@` },
41
41
 
42
+ // Environment variables: UPPER_CASE_VAR with secret keyword = value
43
+ // Catches AI_LENS_AUTH_TOKEN=..., PGPASSWORD=..., AWS_SECRET_ACCESS_KEY=..., etc.
44
+ // Skips template refs like ${VAR} since value excludes { } chars.
45
+ {
46
+ type: 'ENV_VAR',
47
+ re: /([A-Z_]*(?:SECRET|TOKEN|PASSWORD|PASSWD|API_KEY|APIKEY)[A-Z_]*\s*=\s*["']?)([^\s"';\\\`|>{}\[\]]{8,})/g,
48
+ replacer: (m, prefix, _val) => `${prefix}[REDACTED:ENV_VAR]`,
49
+ },
50
+
42
51
  // Key-value pairs: password=..., token: ..., etc.
43
52
  {
44
53
  type: 'KEY_VALUE',
package/client/sender.js CHANGED
@@ -10,7 +10,7 @@
10
10
  * - Rollback on failure: unsent events prepended back to queue.jsonl
11
11
  */
12
12
 
13
- import { readFileSync, writeFileSync, unlinkSync, renameSync } from 'node:fs';
13
+ import { readFileSync, writeFileSync, appendFileSync, unlinkSync, renameSync } from 'node:fs';
14
14
  import { request as httpsRequest } from 'node:https';
15
15
  import { request as httpRequest } from 'node:http';
16
16
  import { fileURLToPath } from 'node:url';
@@ -56,19 +56,32 @@ export function parseQueueContent(content) {
56
56
 
57
57
  /**
58
58
  * Group events by developer_email.
59
- * Events without developer_email are skipped.
60
- * Returns Map<email, { identity: { email, name }, events: [] }>
59
+ * Events without developer_email are skipped unless hasAuthToken is true,
60
+ * in which case they are grouped under a special '__token_auth__' key
61
+ * (server resolves identity from the token).
62
+ * Returns Map<email|'__token_auth__', { identity: { email, name }, events: [] }>
61
63
  */
62
- export function groupByDeveloper(events) {
64
+ export function groupByDeveloper(events, hasAuthToken = false) {
65
+ const TOKEN_KEY = '__token_auth__';
63
66
  const byDeveloper = new Map();
67
+ let skippedNoEmail = 0;
64
68
  for (const evt of events) {
65
69
  const email = evt.developer_email;
66
- if (!email) continue;
70
+ if (!email) {
71
+ if (!hasAuthToken) { skippedNoEmail++; continue; }
72
+ if (!byDeveloper.has(TOKEN_KEY))
73
+ byDeveloper.set(TOKEN_KEY, { identity: { email: null, name: null }, events: [] });
74
+ byDeveloper.get(TOKEN_KEY).events.push(evt);
75
+ continue;
76
+ }
67
77
  if (!byDeveloper.has(email)) {
68
78
  byDeveloper.set(email, { identity: { email, name: evt.developer_name || email }, events: [] });
69
79
  }
70
80
  byDeveloper.get(email).events.push(evt);
71
81
  }
82
+ if (skippedNoEmail > 0) {
83
+ log({ msg: 'skip-no-email', skipped: skippedNoEmail, total: events.length });
84
+ }
72
85
  return byDeveloper;
73
86
  }
74
87
 
@@ -79,29 +92,93 @@ export function buildRollbackContent(unsentContent, existingContent) {
79
92
  return unsentContent + existingContent;
80
93
  }
81
94
 
95
+ /**
96
+ * Check if a sender lock file is stale (owner process no longer running).
97
+ * Returns true if lock is missing or process is dead.
98
+ */
99
+ export function isLockStale(lockPath) {
100
+ try {
101
+ const pid = parseInt(readFileSync(lockPath, 'utf-8').trim(), 10);
102
+ process.kill(pid, 0); // throws if process doesn't exist
103
+ return false; // process alive — lock is active
104
+ } catch (err) {
105
+ if (err.code === 'ESRCH') return true; // process dead — stale
106
+ if (err.code === 'ENOENT') return true; // no lock file
107
+ return false; // permission error etc — assume active
108
+ }
109
+ }
110
+
111
+ /**
112
+ * Merge unsent content back into the queue without losing concurrent appends.
113
+ * Uses atomic rename to drain current queue before merging.
114
+ */
115
+ export function mergeToQueue(unsentContent, queuePath) {
116
+ const tmpPath = queuePath + '.rollback.' + process.pid;
117
+ writeFileSync(tmpPath, unsentContent);
118
+ const drainPath = queuePath + '.drain.' + process.pid;
119
+ try {
120
+ renameSync(queuePath, drainPath);
121
+ appendFileSync(tmpPath, readFileSync(drainPath, 'utf-8'));
122
+ unlinkSync(drainPath);
123
+ } catch (err) {
124
+ if (err.code !== 'ENOENT') throw err;
125
+ // No queue existed — nothing to drain
126
+ }
127
+ renameSync(tmpPath, queuePath);
128
+ }
129
+
82
130
  /**
83
131
  * Atomically acquire the queue for sending.
84
132
  * renameSync is atomic on POSIX — acts as a mutex.
85
133
  * Returns { events, sendingPath } or null if nothing to send.
86
134
  */
87
- function acquireQueue() {
135
+ /**
136
+ * @param {string} [queuePath] - Override queue path (for testing); defaults to QUEUE_PATH
137
+ * @param {string} [sendingPath] - Override sending path (for testing); defaults to SENDING_PATH
138
+ */
139
+ export function acquireQueue(queuePath = QUEUE_PATH, sendingPath = SENDING_PATH) {
140
+ const lockPath = sendingPath + '.lock';
141
+
142
+ // Recover orphaned sending file from a previously crashed sender.
143
+ // Only recover if the lock is stale (owner process is dead).
144
+ try {
145
+ const orphaned = readFileSync(sendingPath, 'utf-8');
146
+ if (!isLockStale(lockPath)) {
147
+ // Another sender is actively working — exit without touching its file
148
+ return null;
149
+ }
150
+ if (orphaned.trim()) {
151
+ mergeToQueue(orphaned, queuePath);
152
+ log({ msg: 'orphan-recovered', bytes: Buffer.byteLength(orphaned) });
153
+ }
154
+ unlinkSync(sendingPath);
155
+ try { unlinkSync(lockPath); } catch { /* stale lock already gone */ }
156
+ } catch (err) {
157
+ if (err.code !== 'ENOENT') throw err;
158
+ // No orphan — normal path
159
+ }
160
+
88
161
  try {
89
- renameSync(QUEUE_PATH, SENDING_PATH);
162
+ renameSync(queuePath, sendingPath);
90
163
  } catch (err) {
91
164
  if (err.code === 'ENOENT') return null; // no queue or another sender got it
92
165
  throw err;
93
166
  }
94
167
 
95
- const content = readFileSync(SENDING_PATH, 'utf-8');
168
+ // Write PID lock so other senders know we're active
169
+ writeFileSync(lockPath, String(process.pid));
170
+
171
+ const content = readFileSync(sendingPath, 'utf-8');
96
172
  const { events, dropped, overflow } = parseQueueContent(content);
97
173
  if (dropped > 0) log({ msg: 'queue-corruption', dropped });
98
174
  if (overflow > 0) log({ msg: 'queue-overflow', dropped: overflow, kept: MAX_QUEUE_SIZE });
99
175
  if (events.length === 0) {
100
- unlinkSync(SENDING_PATH);
176
+ unlinkSync(sendingPath);
177
+ try { unlinkSync(lockPath); } catch {}
101
178
  return null;
102
179
  }
103
180
 
104
- return { events, sendingPath: SENDING_PATH };
181
+ return { events, sendingPath };
105
182
  }
106
183
 
107
184
  /**
@@ -109,6 +186,7 @@ function acquireQueue() {
109
186
  */
110
187
  function commitQueue(sendingPath) {
111
188
  try { unlinkSync(sendingPath); } catch { /* already gone */ }
189
+ try { unlinkSync(sendingPath + '.lock'); } catch { /* already gone */ }
112
190
  }
113
191
 
114
192
  /**
@@ -118,10 +196,9 @@ function commitQueue(sendingPath) {
118
196
  function rollbackQueue(sendingPath, eventCount) {
119
197
  try {
120
198
  const unsent = readFileSync(sendingPath, 'utf-8');
121
- let existing = '';
122
- try { existing = readFileSync(QUEUE_PATH, 'utf-8'); } catch { /* no new events */ }
123
- writeFileSync(QUEUE_PATH, buildRollbackContent(unsent, existing));
199
+ mergeToQueue(unsent, QUEUE_PATH);
124
200
  unlinkSync(sendingPath);
201
+ try { unlinkSync(sendingPath + '.lock'); } catch {}
125
202
  log({ msg: 'rollback', events: eventCount });
126
203
  } catch { /* best effort */ }
127
204
  }
@@ -136,13 +213,12 @@ function rollbackQueue(sendingPath, eventCount) {
136
213
  */
137
214
  export function partialRollback(sendingPath, unsentEvents, totalCount, queuePath = QUEUE_PATH) {
138
215
  try {
139
- let existing = '';
140
- try { existing = readFileSync(queuePath, 'utf-8'); } catch { /* no new events */ }
141
216
  if (unsentEvents.length > 0) {
142
217
  const unsentContent = unsentEvents.map(e => JSON.stringify(e)).join('\n') + '\n';
143
- writeFileSync(queuePath, unsentContent + existing);
218
+ mergeToQueue(unsentContent, queuePath);
144
219
  }
145
220
  try { unlinkSync(sendingPath); } catch { /* already gone */ }
221
+ try { unlinkSync(sendingPath + '.lock'); } catch { /* already gone */ }
146
222
  log({ msg: 'partial-rollback', sent: totalCount - unsentEvents.length, unsent: unsentEvents.length });
147
223
  } catch (rollbackErr) {
148
224
  log({ msg: 'rollback-failed', error: rollbackErr.message });
@@ -188,16 +264,16 @@ export function chunkEvents(events, maxBytes = MAX_CHUNK_BYTES) {
188
264
  function postEvents(serverUrl, events, identity) {
189
265
  return new Promise((resolve, reject) => {
190
266
  const body = JSON.stringify(events);
191
- const url = new URL('/api/events', serverUrl);
267
+ const url = new URL(`${serverUrl}/api/events`);
192
268
  const isHttps = url.protocol === 'https:';
193
269
  const requestFn = isHttps ? httpsRequest : httpRequest;
194
270
 
195
271
  const headers = {
196
272
  'Content-Type': 'application/json',
197
273
  'Content-Length': Buffer.byteLength(body),
198
- 'X-Developer-Git-Email': identity.email,
199
- 'X-Developer-Name': encodeURIComponent(identity.name),
200
274
  };
275
+ if (identity.email) headers['X-Developer-Git-Email'] = identity.email;
276
+ if (identity.name) headers['X-Developer-Name'] = encodeURIComponent(identity.name);
201
277
 
202
278
  const authToken = getAuthToken();
203
279
  if (authToken) {
@@ -250,9 +326,12 @@ async function main() {
250
326
  const { events, sendingPath } = acquired;
251
327
 
252
328
  // Group events by developer_email (baked in at capture time)
253
- const byDeveloper = groupByDeveloper(events);
329
+ // When auth token is present, null-email events are grouped for token-based identity
330
+ const hasAuthToken = !!getAuthToken();
331
+ const byDeveloper = groupByDeveloper(events, hasAuthToken);
254
332
 
255
333
  if (byDeveloper.size === 0) {
334
+ log({ msg: 'queue-empty-after-grouping', total_events: events.length, has_auth_token: hasAuthToken });
256
335
  commitQueue(sendingPath);
257
336
  process.exit(0);
258
337
  }
@@ -277,6 +356,9 @@ async function main() {
277
356
  for (const chunk of chunks) {
278
357
  const result = await postEvents(serverUrl, chunk, identity);
279
358
  totalReceived += result.received;
359
+ if (result.skipped > 0) {
360
+ log({ msg: 'server-skipped', skipped: result.skipped, chunk_size: chunk.length, developer: identity.email });
361
+ }
280
362
  for (const evt of chunk) {
281
363
  if (evt.event_id) sentEventIds.add(evt.event_id);
282
364
  }
@@ -289,6 +371,9 @@ async function main() {
289
371
  const unsentEvents = events.filter(e => !sentEventIds.has(e.event_id));
290
372
  partialRollback(sendingPath, unsentEvents, events.length);
291
373
  log({ msg: 'failed', error: err.message, sent: events.length - unsentEvents.length, unsent: unsentEvents.length, server: serverUrl });
374
+ if (err.message.includes('401')) {
375
+ log({ msg: 'auth-failed', error: 'Token invalid or revoked. Run: npx -y ai-lens init' });
376
+ }
292
377
  }
293
378
  }
294
379
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-lens",
3
- "version": "0.7.2",
3
+ "version": "0.7.4",
4
4
  "type": "module",
5
5
  "description": "Centralized session analytics for AI coding tools",
6
6
  "bin": {