litmus-cli 1.0.20 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/README.md +2 -2
  2. package/dist/commands/init.d.ts.map +1 -1
  3. package/dist/commands/init.js +4 -3
  4. package/dist/commands/init.js.map +1 -1
  5. package/dist/commands/status.js +1 -1
  6. package/dist/commands/status.js.map +1 -1
  7. package/dist/commands/submit.d.ts.map +1 -1
  8. package/dist/commands/submit.js +35 -11
  9. package/dist/commands/submit.js.map +1 -1
  10. package/dist/index.js +1 -1
  11. package/dist/index.js.map +1 -1
  12. package/dist/lib/api.d.ts +6 -16
  13. package/dist/lib/api.d.ts.map +1 -1
  14. package/dist/lib/api.js +67 -5
  15. package/dist/lib/api.js.map +1 -1
  16. package/dist/lib/config.d.ts +0 -3
  17. package/dist/lib/config.d.ts.map +1 -1
  18. package/dist/lib/config.js +0 -3
  19. package/dist/lib/config.js.map +1 -1
  20. package/dist/lib/detect-project.d.ts.map +1 -0
  21. package/dist/lib/detect-project.js.map +1 -0
  22. package/dist/lib/env-hosts.d.ts +12 -0
  23. package/dist/lib/env-hosts.d.ts.map +1 -0
  24. package/dist/lib/env-hosts.js +19 -0
  25. package/dist/lib/env-hosts.js.map +1 -0
  26. package/dist/{utils → lib}/errors.d.ts +1 -1
  27. package/dist/lib/errors.d.ts.map +1 -0
  28. package/dist/{utils → lib}/errors.js +2 -3
  29. package/dist/lib/errors.js.map +1 -0
  30. package/dist/lib/extract.d.ts +1 -1
  31. package/dist/lib/extract.d.ts.map +1 -1
  32. package/dist/lib/extract.js +19 -3
  33. package/dist/lib/extract.js.map +1 -1
  34. package/dist/lib/hook-logger.cjs +6 -4
  35. package/dist/lib/platform.d.ts +37 -0
  36. package/dist/lib/platform.d.ts.map +1 -0
  37. package/dist/lib/platform.js +69 -0
  38. package/dist/lib/platform.js.map +1 -0
  39. package/dist/lib/schemas.d.ts +24 -0
  40. package/dist/lib/schemas.d.ts.map +1 -0
  41. package/dist/lib/schemas.js +21 -0
  42. package/dist/lib/schemas.js.map +1 -0
  43. package/dist/lib/tracker.d.ts +1 -1
  44. package/dist/lib/tracker.js +2 -2
  45. package/dist/lib/tracker.js.map +1 -1
  46. package/dist/lib/watcher.d.ts +17 -0
  47. package/dist/lib/watcher.d.ts.map +1 -0
  48. package/dist/lib/watcher.js +752 -0
  49. package/dist/lib/watcher.js.map +1 -0
  50. package/package.json +23 -8
  51. package/dist/lib/watcher.cjs +0 -592
  52. package/dist/staging.d.ts +0 -2
  53. package/dist/staging.d.ts.map +0 -1
  54. package/dist/staging.js +0 -6
  55. package/dist/staging.js.map +0 -1
  56. package/dist/utils/detect-project.d.ts.map +0 -1
  57. package/dist/utils/detect-project.js.map +0 -1
  58. package/dist/utils/errors.d.ts.map +0 -1
  59. package/dist/utils/errors.js.map +0 -1
  60. /package/dist/{utils → lib}/detect-project.d.ts +0 -0
  61. /package/dist/{utils → lib}/detect-project.js +0 -0
@@ -0,0 +1,752 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Litmus file activity watcher.
4
+ * Runs as a detached background process started by `litmus init`.
5
+ * Usage: node watcher.js <projectDir> <activityLogPath> [cliBinPath]
6
+ *
7
+ * Tracks:
8
+ * - File changes with size deltas and large-paste detection (debounced 500ms per file)
9
+ * - Heartbeat events every 5 minutes
10
+ * - Git commit events (polled every 30s)
11
+ * - Editor and AI tool detection (polled every 10 min)
12
+ * - Auto-submit at deadline (if deadline/timeLimit configured)
13
+ *
14
+ * Writes newline-delimited JSON to the activity log.
15
+ */
16
+ import fs from "fs";
17
+ import path from "path";
18
+ import crypto from "crypto";
19
+ import { execSync, execFile } from "child_process";
20
+ import dns from "dns";
21
+ import https from "https";
22
+ import http from "http";
23
+ import { getEffectiveDeadline } from "./config.js";
24
+ import { getProcessList, getConnections, extractConnectionIP } from "./platform.js";
25
+ const [, , projectDir, activityLogPath, cliBinPath] = process.argv;
26
+ process.stderr.write(`[watcher] starting: projectDir=${projectDir} log=${activityLogPath}\n`);
27
+ process.stderr.write(`[watcher] node ${process.version}, platform=${process.platform}, arch=${process.arch}\n`);
28
+ if (!projectDir || !activityLogPath) {
29
+ process.stderr.write("[watcher] missing arguments\n");
30
+ process.exit(1);
31
+ }
32
+ if (!fs.existsSync(projectDir)) {
33
+ process.stderr.write(`[watcher] projectDir does not exist: ${projectDir}\n`);
34
+ process.exit(1);
35
+ }
36
+ // Ensure the log directory exists
37
+ fs.mkdirSync(path.dirname(activityLogPath), { recursive: true });
38
+ // Open the log file for appending
39
+ const log = fs.createWriteStream(activityLogPath, { flags: "a" });
40
+ // ── Config ──────────────────────────────────────────────────────
41
+ let litmusConfig = null;
42
+ try {
43
+ const configPath = path.join(projectDir, ".litmus", "config.json");
44
+ litmusConfig = JSON.parse(fs.readFileSync(configPath, "utf8"));
45
+ }
46
+ catch { /* config not available */ }
47
+ let uploadConfig = null;
48
+ let uploadBuffer = [];
49
+ const MAX_BUFFER_SIZE = 10000;
50
+ if (litmusConfig && litmusConfig.token && litmusConfig.backendUrl) {
51
+ uploadConfig = { token: litmusConfig.token, backendUrl: litmusConfig.backendUrl };
52
+ process.stderr.write("[watcher] upload enabled\n");
53
+ }
54
+ /**
55
+ * Upload buffered events to server.
56
+ * Returns a Promise that resolves when the HTTP request completes (used by shutdown).
57
+ * During normal operation the returned promise is ignored (fire-and-forget).
58
+ */
59
+ function uploadEvents() {
60
+ if (!uploadConfig || uploadBuffer.length === 0)
61
+ return Promise.resolve();
62
+ const events = uploadBuffer.splice(0); // take all, clear buffer
63
+ const body = JSON.stringify({ events });
64
+ const url = new URL(`${uploadConfig.backendUrl}/cli/activity`);
65
+ const reqFn = url.protocol === "https:" ? https.request : http.request;
66
+ return new Promise((resolve) => {
67
+ const req = reqFn(url, {
68
+ method: "POST",
69
+ headers: {
70
+ "Content-Type": "application/json",
71
+ "Content-Length": Buffer.byteLength(body),
72
+ "Authorization": `Bearer ${uploadConfig.token}`,
73
+ },
74
+ timeout: 10000,
75
+ }, (res) => {
76
+ // Collect response body (for pendingQuestions and other server signals)
77
+ let responseData = "";
78
+ res.on("data", (chunk) => { responseData += chunk.toString(); });
79
+ res.on("end", () => {
80
+ if (res.statusCode >= 500) {
81
+ process.stderr.write(`[watcher] upload failed (will retry): ${res.statusCode}\n`);
82
+ uploadBuffer = events.concat(uploadBuffer);
83
+ if (uploadBuffer.length > MAX_BUFFER_SIZE) {
84
+ uploadBuffer = uploadBuffer.slice(-MAX_BUFFER_SIZE);
85
+ }
86
+ }
87
+ else if (res.statusCode >= 400) {
88
+ process.stderr.write(`[watcher] upload rejected: ${res.statusCode}\n`);
89
+ }
90
+ else {
91
+ // Parse response for server signals (e.g., pending questions)
92
+ try {
93
+ const body = JSON.parse(responseData);
94
+ if (body.pendingQuestions && body.pendingQuestions > 0) {
95
+ const nudgePath = path.join(projectDir, ".litmus", "QUESTION.md");
96
+ const nudge = `# You have ${body.pendingQuestions} unanswered question${body.pendingQuestions > 1 ? "s" : ""}\n\nPlease check your Litmus browser tab to view and answer.\n`;
97
+ try {
98
+ fs.writeFileSync(nudgePath, nudge, "utf8");
99
+ }
100
+ catch { /* non-critical */ }
101
+ process.stderr.write(`[litmus] ${body.pendingQuestions} unanswered question(s) — check your browser tab\n`);
102
+ }
103
+ else {
104
+ // Clean up nudge file if no pending questions
105
+ const nudgePath = path.join(projectDir, ".litmus", "QUESTION.md");
106
+ try {
107
+ fs.unlinkSync(nudgePath);
108
+ }
109
+ catch { /* may not exist */ }
110
+ }
111
+ }
112
+ catch { /* response parse failed, non-critical */ }
113
+ }
114
+ resolve();
115
+ });
116
+ });
117
+ req.on("error", () => {
118
+ // Network error — retry on next heartbeat
119
+ uploadBuffer = events.concat(uploadBuffer);
120
+ if (uploadBuffer.length > MAX_BUFFER_SIZE) {
121
+ uploadBuffer = uploadBuffer.slice(-MAX_BUFFER_SIZE);
122
+ }
123
+ resolve();
124
+ });
125
+ req.on("timeout", () => { req.destroy(); });
126
+ req.end(body);
127
+ });
128
+ }
129
+ // ── Ignore patterns ──────────────────────────────────────────────
130
+ const IGNORED = [
131
+ /(?:^|[/\\])\./, // dotfiles and dotdirs (including .git, .litmus)
132
+ /node_modules/,
133
+ /__pycache__/,
134
+ /\.pyc$/,
135
+ /\.class$/,
136
+ /(?:^|[/\\])venv[/\\]/,
137
+ /(?:^|[/\\])\.venv[/\\]/,
138
+ ];
139
+ function shouldIgnore(relPath) {
140
+ return IGNORED.some((re) => re.test(relPath));
141
+ }
142
+ // ── Hash chain for tamper evidence ──────────────────────────────
143
+ let prevHash = "0".repeat(64); // genesis
144
+ let nextSeq = 0;
145
+ function emit(event) {
146
+ event._seq = nextSeq++;
147
+ event._prevHash = prevHash;
148
+ const hash = crypto.createHash("sha256").update(JSON.stringify(event)).digest("hex");
149
+ event._hash = hash;
150
+ prevHash = hash;
151
+ log.write(JSON.stringify(event) + "\n");
152
+ if (uploadConfig && uploadBuffer.length < MAX_BUFFER_SIZE) {
153
+ uploadBuffer.push(event);
154
+ }
155
+ }
156
+ // ── File size tracking ───────────────────────────────────────────
157
+ const LARGE_PASTE_THRESHOLD = 5000; // bytes — matches backend constant
158
+ const fileSizes = new Map();
159
+ const debounceTimers = new Map();
160
+ function emitFileEvent(type, relPath) {
161
+ const fullPath = path.join(projectDir, relPath);
162
+ const event = { ts: new Date().toISOString(), type, path: relPath };
163
+ try {
164
+ const stat = fs.statSync(fullPath);
165
+ const fileSize = stat.size;
166
+ event.fileSize = fileSize;
167
+ const prev = fileSizes.get(relPath);
168
+ const sizeDelta = prev !== undefined ? fileSize - prev : fileSize;
169
+ event.sizeDelta = sizeDelta;
170
+ fileSizes.set(relPath, fileSize);
171
+ if (sizeDelta > LARGE_PASTE_THRESHOLD) {
172
+ event.largePaste = true;
173
+ event.bytesAdded = sizeDelta;
174
+ }
175
+ }
176
+ catch {
177
+ // File was deleted or unreadable — still log the event, just without size data
178
+ fileSizes.delete(relPath);
179
+ }
180
+ emit(event);
181
+ }
182
+ // ── Burst-edit detection (AI-driven multi-file edits) ────────────
183
+ const BURST_WINDOW_MS = 2000; // 2 seconds
184
+ const BURST_MIN_FILES = 3; // 3+ distinct files = burst
185
+ const recentEdits = [];
186
+ function checkBurstEdit(filename) {
187
+ const now = Date.now();
188
+ recentEdits.push({ file: filename, ts: now });
189
+ // Prune edits outside the window
190
+ while (recentEdits.length > 0 && now - recentEdits[0].ts > BURST_WINDOW_MS) {
191
+ recentEdits.shift();
192
+ }
193
+ // Count distinct files in the window
194
+ const distinctFiles = new Set(recentEdits.map((e) => e.file));
195
+ if (distinctFiles.size >= BURST_MIN_FILES) {
196
+ const files = [...distinctFiles];
197
+ emit({
198
+ ts: new Date().toISOString(),
199
+ type: "burst_edit",
200
+ fileCount: files.length,
201
+ files: files.slice(0, 10), // Cap at 10 filenames
202
+ });
203
+ recentEdits.length = 0; // Reset to avoid duplicate bursts
204
+ }
205
+ }
206
+ // ── File system watcher (debounced) ──────────────────────────────
207
+ process.on("uncaughtException", (err) => {
208
+ process.stderr.write(`[watcher] uncaughtException: ${err.stack || err}\n`);
209
+ process.exit(1);
210
+ });
211
+ try {
212
+ const watcher = fs.watch(projectDir, { recursive: true }, (eventType, filename) => {
213
+ if (!filename)
214
+ return;
215
+ // Check for AI artifact files before applying ignore filter (artifacts are dotfiles)
216
+ checkArtifact(filename);
217
+ if (shouldIgnore(filename))
218
+ return;
219
+ // Check for burst edits on raw events (before debounce — needs real timing)
220
+ checkBurstEdit(filename);
221
+ // Debounce per file: collapse multiple events within 500ms into one
222
+ if (debounceTimers.has(filename)) {
223
+ clearTimeout(debounceTimers.get(filename));
224
+ }
225
+ debounceTimers.set(filename, setTimeout(() => {
226
+ debounceTimers.delete(filename);
227
+ emitFileEvent(eventType, filename);
228
+ }, 500));
229
+ });
230
+ watcher.on("error", (err) => {
231
+ process.stderr.write(`[watcher] fs.watch error: ${err}\n`);
232
+ });
233
+ process.stderr.write("[watcher] file watcher started\n");
234
+ }
235
+ catch (err) {
236
+ // fs.watch with recursive: true is not supported on Linux < Node 22.
237
+ // Continue without file watching — heartbeats and git commit polling still work.
238
+ process.stderr.write(`[watcher] file watching unavailable: ${err.message} (heartbeat + git tracking still active)\n`);
239
+ }
240
+ // ── Heartbeats ───────────────────────────────────────────────────
241
+ function emitHeartbeat() {
242
+ emit({ ts: new Date().toISOString(), type: "heartbeat" });
243
+ uploadEvents(); // Upload buffered events to server
244
+ }
245
+ emitHeartbeat(); // Immediate heartbeat + initial upload
246
+ setInterval(emitHeartbeat, 5 * 60 * 1000); // Every 5 minutes
247
+ // ── Git commit polling ───────────────────────────────────────────
248
+ let lastCommitHash = null;
249
+ function readCurrentCommitHash() {
250
+ try {
251
+ const headContent = fs.readFileSync(path.join(projectDir, ".git", "HEAD"), "utf8").trim();
252
+ if (headContent.startsWith("ref: ")) {
253
+ const ref = headContent.slice(5);
254
+ const refPath = path.join(projectDir, ".git", ref);
255
+ // Try loose ref first, then fall back to packed-refs
256
+ if (fs.existsSync(refPath)) {
257
+ return fs.readFileSync(refPath, "utf8").trim();
258
+ }
259
+ const packedPath = path.join(projectDir, ".git", "packed-refs");
260
+ if (fs.existsSync(packedPath)) {
261
+ const packed = fs.readFileSync(packedPath, "utf8");
262
+ for (const line of packed.split("\n")) {
263
+ if (line.endsWith(" " + ref)) {
264
+ return line.split(" ")[0];
265
+ }
266
+ }
267
+ }
268
+ }
269
+ else {
270
+ return headContent; // Detached HEAD — the hash itself
271
+ }
272
+ }
273
+ catch { /* .git may not exist */ }
274
+ return null;
275
+ }
276
+ // Initialize with current commit hash
277
+ lastCommitHash = readCurrentCommitHash();
278
+ function checkGitCommits() {
279
+ try {
280
+ const currentHash = readCurrentCommitHash();
281
+ if (!currentHash || currentHash === lastCommitHash)
282
+ return;
283
+ lastCommitHash = currentHash;
284
+ const info = execSync('git log -1 --numstat --pretty=format:"%H|%aI|%s"', { cwd: projectDir, timeout: 5000, encoding: "utf8" });
285
+ const lines = info.trim().split("\n");
286
+ const header = lines[0].replace(/^"|"$/g, "");
287
+ const firstPipe = header.indexOf("|");
288
+ const secondPipe = header.indexOf("|", firstPipe + 1);
289
+ const hash = header.slice(0, firstPipe);
290
+ const message = secondPipe >= 0 ? header.slice(secondPipe + 1) : "";
291
+ let linesAdded = 0, linesDeleted = 0, filesChanged = 0;
292
+ for (let i = 1; i < lines.length; i++) {
293
+ const line = lines[i].trim();
294
+ if (!line)
295
+ continue;
296
+ const parts = line.split("\t");
297
+ if (parts[0] !== "-")
298
+ linesAdded += parseInt(parts[0], 10) || 0;
299
+ if (parts[1] !== "-")
300
+ linesDeleted += parseInt(parts[1], 10) || 0;
301
+ filesChanged++;
302
+ }
303
+ emit({
304
+ ts: new Date().toISOString(),
305
+ type: "commit",
306
+ hash: hash ? hash.slice(0, 12) : currentHash.slice(0, 12),
307
+ message: message || "",
308
+ linesAdded,
309
+ linesDeleted,
310
+ filesChanged,
311
+ });
312
+ }
313
+ catch { /* git not available or command failed — skip */ }
314
+ }
315
+ setInterval(checkGitCommits, 30 * 1000); // Every 30 seconds
316
+ // ── Editor & AI tool detection ───────────────────────────────────
317
+ const EDITOR_PATTERNS = {
318
+ vscode: /\bcode\b/,
319
+ cursor: /\bcursor\b/,
320
+ vim: /\b[gn]?vim\b/,
321
+ intellij: /\bidea\b/,
322
+ webstorm: /\bwebstorm\b/,
323
+ sublime: /\bsubl\b/,
324
+ emacs: /\bemacs\b/,
325
+ zed: /\bzed\b/,
326
+ };
327
+ const AI_TOOL_PATTERNS = {
328
+ claude: /\bclaude\b/,
329
+ aider: /\baider\b/,
330
+ codeium: /\bcodeium\b/,
331
+ codex: /\bcodex\b/,
332
+ copilot: /\bcopilot\b/,
333
+ windsurf: /\bwindsurf\b/,
334
+ ollama: /\bollama\b/,
335
+ tabnine: /\btabnine\b/,
336
+ supermaven: /\bsupermaven\b/,
337
+ cline: /\bcline\b/,
338
+ cody: /\bsourcegraph-cody\b|\bcody-agent\b/,
339
+ "amazon-q": /\bamazon-q\b/,
340
+ "roo-code": /\broo-code\b|\broo_code\b/,
341
+ pearai: /\bpearai\b/,
342
+ ghostwriter: /\bghostwriter\b/,
343
+ };
344
+ function detectEnvironment() {
345
+ try {
346
+ const ps = getProcessList();
347
+ const editors = Object.entries(EDITOR_PATTERNS)
348
+ .filter(([, re]) => re.test(ps))
349
+ .map(([name]) => name);
350
+ const aiTools = Object.entries(AI_TOOL_PATTERNS)
351
+ .filter(([, re]) => re.test(ps))
352
+ .map(([name]) => name);
353
+ // Cursor is both an editor and an AI tool
354
+ if (editors.includes("cursor") && !aiTools.includes("cursor")) {
355
+ aiTools.push("cursor");
356
+ }
357
+ if (editors.length > 0 || aiTools.length > 0) {
358
+ const event = { ts: new Date().toISOString(), type: "env_detected" };
359
+ if (editors.length)
360
+ event.editors = editors;
361
+ if (aiTools.length)
362
+ event.aiTools = aiTools;
363
+ emit(event);
364
+ }
365
+ }
366
+ catch { /* process listing may fail, skip */ }
367
+ }
368
+ detectEnvironment(); // Run immediately on startup
369
+ setInterval(detectEnvironment, 10 * 60 * 1000); // Every 10 minutes
370
+ // ── AI artifact file detection ──────────────────────────────────
371
+ const AI_ARTIFACT_PATTERNS = [
372
+ { tool: "cursor", paths: [".cursorrules", ".cursorignore"] },
373
+ { tool: "windsurf", paths: [".windsurfrules"] },
374
+ { tool: "aider", paths: [".aider.chat.history.md", ".aider.input.history", ".aider.tags.cache"] },
375
+ { tool: "continue", paths: [".continue"] },
376
+ { tool: "codeium", paths: [".codeium"] },
377
+ { tool: "tabnine", paths: [".tabnine"] },
378
+ { tool: "codex", paths: ["codex.md", ".codex"] },
379
+ ];
380
+ const seenArtifacts = new Set();
381
+ function checkArtifact(relPath) {
382
+ if (seenArtifacts.has(relPath))
383
+ return;
384
+ for (const { tool, paths } of AI_ARTIFACT_PATTERNS) {
385
+ for (const p of paths) {
386
+ if (relPath === p || relPath.startsWith(p + "/") || relPath.startsWith(p + "\\")) {
387
+ seenArtifacts.add(relPath);
388
+ emit({
389
+ ts: new Date().toISOString(),
390
+ type: "ai_artifact_detected",
391
+ tool,
392
+ file: relPath,
393
+ });
394
+ return;
395
+ }
396
+ }
397
+ }
398
+ }
399
+ // Startup scan for pre-existing artifacts
400
+ for (const { tool, paths } of AI_ARTIFACT_PATTERNS) {
401
+ for (const p of paths) {
402
+ try {
403
+ if (fs.existsSync(path.join(projectDir, p))) {
404
+ seenArtifacts.add(p);
405
+ emit({
406
+ ts: new Date().toISOString(),
407
+ type: "ai_artifact_detected",
408
+ tool,
409
+ file: p,
410
+ });
411
+ }
412
+ }
413
+ catch { /* skip */ }
414
+ }
415
+ }
416
+ // ── Network monitoring (AI API endpoint detection) ──────────────
417
+ const AI_NETWORK_HOSTS = {
418
+ "api.openai.com": "openai",
419
+ "chat.openai.com": "chatgpt",
420
+ "chatgpt.com": "chatgpt",
421
+ "api.anthropic.com": "claude_api",
422
+ "claude.ai": "claude_web",
423
+ "generativelanguage.googleapis.com": "gemini_api",
424
+ "gemini.google.com": "gemini_web",
425
+ "copilot-proxy.githubusercontent.com": "copilot",
426
+ "githubcopilot.com": "copilot",
427
+ "api2.cursor.sh": "cursor",
428
+ "cursor.sh": "cursor",
429
+ "server.codeium.com": "codeium",
430
+ "api.perplexity.ai": "perplexity",
431
+ "perplexity.ai": "perplexity",
432
+ "api.deepseek.com": "deepseek",
433
+ "api.groq.com": "groq",
434
+ "api.mistral.ai": "mistral",
435
+ };
436
+ // IP -> tool name cache (refreshed every 30 min)
437
+ const ipToTool = new Map();
438
+ let lastDnsRefresh = 0;
439
+ const DNS_REFRESH_MS = 30 * 60 * 1000;
440
+ function refreshDnsCache() {
441
+ lastDnsRefresh = Date.now();
442
+ for (const [host, tool] of Object.entries(AI_NETWORK_HOSTS)) {
443
+ dns.resolve4(host, (err, addresses) => {
444
+ if (!err && addresses) {
445
+ for (const ip of addresses) {
446
+ ipToTool.set(ip, { tool, host });
447
+ }
448
+ }
449
+ });
450
+ }
451
+ }
452
+ refreshDnsCache(); // Resolve on startup
453
+ let lastNetworkTools = ""; // Dedup: JSON stringified tool set from previous check
454
+ function detectNetworkAI() {
455
+ if (Date.now() - lastDnsRefresh > DNS_REFRESH_MS)
456
+ refreshDnsCache();
457
+ if (ipToTool.size === 0)
458
+ return; // DNS hasn't resolved yet
459
+ getConnections((err, stdout) => {
460
+ if (err || !stdout)
461
+ return;
462
+ parseNetworkOutput(stdout);
463
+ });
464
+ }
465
+ function parseNetworkOutput(output) {
466
+ const detected = new Map(); // tool -> Set<host>
467
+ for (const line of output.split("\n")) {
468
+ const ipMatch = extractConnectionIP(line);
469
+ if (!ipMatch)
470
+ continue;
471
+ const ip = ipMatch[1];
472
+ const entry = ipToTool.get(ip);
473
+ if (entry) {
474
+ if (!detected.has(entry.tool))
475
+ detected.set(entry.tool, new Set());
476
+ detected.get(entry.tool).add(entry.host);
477
+ }
478
+ }
479
+ if (detected.size === 0)
480
+ return;
481
+ // Dedup: don't re-emit if same tools detected as last check
482
+ const toolKey = JSON.stringify([...detected.keys()].sort());
483
+ if (toolKey === lastNetworkTools)
484
+ return;
485
+ lastNetworkTools = toolKey;
486
+ const tools = [];
487
+ const hosts = [];
488
+ for (const [tool, hostSet] of detected) {
489
+ tools.push(tool);
490
+ for (const h of hostSet)
491
+ hosts.push(h);
492
+ }
493
+ emit({
494
+ ts: new Date().toISOString(),
495
+ type: "network_ai_detected",
496
+ tools,
497
+ hosts,
498
+ });
499
+ }
500
+ detectNetworkAI(); // Run immediately
501
+ setInterval(detectNetworkAI, 5 * 60 * 1000); // Every 5 minutes
502
+ // ── Codebase snapshot upload (for live interview agent) ──────────
503
+ const SNAPSHOT_INTERVAL_MS = 5 * 60 * 1000; // Every 5 minutes
504
+ const MAX_FILE_SIZE_BYTES = 50000; // Skip files > 50KB
505
+ const MAX_TOTAL_SNAPSHOT_BYTES = 2000000; // Cap total snapshot at 2MB
506
+ const SNAPSHOT_IGNORE = [
507
+ /(?:^|[/\\])\./, // dotfiles/dotdirs (.git, .litmus, .env)
508
+ /node_modules/,
509
+ /__pycache__/,
510
+ /\.pyc$/,
511
+ /\.class$/,
512
+ /\.jar$/,
513
+ /\.exe$/,
514
+ /\.dll$/,
515
+ /\.so$/,
516
+ /\.dylib$/,
517
+ /\.wasm$/,
518
+ /\.png$/,
519
+ /\.jpg$/,
520
+ /\.jpeg$/,
521
+ /\.gif$/,
522
+ /\.svg$/,
523
+ /\.ico$/,
524
+ /\.mp4$/,
525
+ /\.mp3$/,
526
+ /\.zip$/,
527
+ /\.tar$/,
528
+ /\.gz$/,
529
+ /(?:^|[/\\])venv[/\\]/,
530
+ /(?:^|[/\\])\.venv[/\\]/,
531
+ /(?:^|[/\\])dist[/\\]/,
532
+ /(?:^|[/\\])build[/\\]/,
533
+ /package-lock\.json$/,
534
+ /yarn\.lock$/,
535
+ /pnpm-lock\.yaml$/,
536
+ ];
537
+ function shouldIgnoreForSnapshot(relPath) {
538
+ return SNAPSHOT_IGNORE.some((re) => re.test(relPath));
539
+ }
540
+ function collectProjectFiles() {
541
+ const files = {};
542
+ let totalBytes = 0;
543
+ function walk(dir, prefix) {
544
+ if (totalBytes >= MAX_TOTAL_SNAPSHOT_BYTES)
545
+ return;
546
+ let entries;
547
+ try {
548
+ entries = fs.readdirSync(dir, { withFileTypes: true });
549
+ }
550
+ catch {
551
+ return;
552
+ }
553
+ for (const entry of entries) {
554
+ if (totalBytes >= MAX_TOTAL_SNAPSHOT_BYTES)
555
+ break;
556
+ const relPath = prefix ? `${prefix}/${entry.name}` : entry.name;
557
+ if (shouldIgnoreForSnapshot(relPath))
558
+ continue;
559
+ if (entry.isDirectory()) {
560
+ walk(path.join(dir, entry.name), relPath);
561
+ }
562
+ else if (entry.isFile()) {
563
+ try {
564
+ const stat = fs.statSync(path.join(dir, entry.name));
565
+ if (stat.size > MAX_FILE_SIZE_BYTES)
566
+ continue;
567
+ if (totalBytes + stat.size > MAX_TOTAL_SNAPSHOT_BYTES)
568
+ continue;
569
+ const content = fs.readFileSync(path.join(dir, entry.name), "utf8");
570
+ files[relPath] = content;
571
+ totalBytes += stat.size;
572
+ }
573
+ catch { /* binary or unreadable, skip */ }
574
+ }
575
+ }
576
+ }
577
+ walk(projectDir, "");
578
+ return files;
579
+ }
580
+ function getGitDiff() {
581
+ try {
582
+ return execSync("git diff HEAD~1 HEAD --stat --patch", {
583
+ cwd: projectDir,
584
+ maxBuffer: 500000,
585
+ timeout: 10000,
586
+ }).toString().slice(0, 200000);
587
+ }
588
+ catch {
589
+ // No previous commit or git not available
590
+ try {
591
+ return execSync("git diff --cached --stat --patch", {
592
+ cwd: projectDir,
593
+ maxBuffer: 500000,
594
+ timeout: 10000,
595
+ }).toString().slice(0, 200000);
596
+ }
597
+ catch {
598
+ return "";
599
+ }
600
+ }
601
+ }
602
+ function getRecentCommits() {
603
+ try {
604
+ const raw = execSync("git log --oneline --shortstat -10", {
605
+ cwd: projectDir,
606
+ maxBuffer: 100000,
607
+ timeout: 10000,
608
+ }).toString();
609
+ const commits = [];
610
+ const lines = raw.split("\n");
611
+ let current = null;
612
+ for (const line of lines) {
613
+ const trimmed = line.trim();
614
+ if (!trimmed)
615
+ continue;
616
+ // Commit line: "abc1234 commit message"
617
+ const commitMatch = trimmed.match(/^([0-9a-f]{7,}) (.+)$/);
618
+ if (commitMatch) {
619
+ if (current)
620
+ commits.push(current);
621
+ current = { hash: commitMatch[1], message: commitMatch[2], linesAdded: 0, linesDeleted: 0 };
622
+ continue;
623
+ }
624
+ // Stats line: "3 files changed, 45 insertions(+), 12 deletions(-)"
625
+ if (current && trimmed.includes("file")) {
626
+ const addMatch = trimmed.match(/(\d+) insertion/);
627
+ const delMatch = trimmed.match(/(\d+) deletion/);
628
+ if (addMatch)
629
+ current.linesAdded = parseInt(addMatch[1]);
630
+ if (delMatch)
631
+ current.linesDeleted = parseInt(delMatch[1]);
632
+ }
633
+ }
634
+ if (current)
635
+ commits.push(current);
636
+ return commits;
637
+ }
638
+ catch {
639
+ return [];
640
+ }
641
+ }
642
+ function uploadSnapshot() {
643
+ if (!uploadConfig)
644
+ return;
645
+ const files = collectProjectFiles();
646
+ const fileCount = Object.keys(files).length;
647
+ if (fileCount === 0)
648
+ return;
649
+ const gitDiff = getGitDiff();
650
+ const recentCommits = getRecentCommits();
651
+ const body = JSON.stringify({ files, git_diff: gitDiff, recent_commits: recentCommits });
652
+ const url = new URL(`${uploadConfig.backendUrl}/cli/snapshot`);
653
+ const reqFn = url.protocol === "https:" ? https.request : http.request;
654
+ const req = reqFn(url, {
655
+ method: "POST",
656
+ headers: {
657
+ "Content-Type": "application/json",
658
+ "Content-Length": Buffer.byteLength(body),
659
+ "Authorization": `Bearer ${uploadConfig.token}`,
660
+ },
661
+ timeout: 30000, // 30s — snapshots can be larger
662
+ }, (res) => {
663
+ res.resume(); // Drain response
664
+ res.on("end", () => {
665
+ if (res.statusCode >= 400) {
666
+ process.stderr.write(`[watcher] snapshot upload failed: ${res.statusCode}\n`);
667
+ }
668
+ else {
669
+ process.stderr.write(`[watcher] snapshot uploaded: ${fileCount} files, ${(Buffer.byteLength(body) / 1024).toFixed(0)}KB\n`);
670
+ }
671
+ });
672
+ });
673
+ req.on("error", () => {
674
+ process.stderr.write("[watcher] snapshot upload error (network)\n");
675
+ });
676
+ req.on("timeout", () => { req.destroy(); });
677
+ req.end(body);
678
+ }
679
+ // First snapshot after 60s, then on interval
680
+ setTimeout(() => {
681
+ uploadSnapshot();
682
+ setInterval(uploadSnapshot, SNAPSHOT_INTERVAL_MS);
683
+ }, 60 * 1000);
684
+ // ── Auto-submit at deadline ──────────────────────────────────────
685
+ if (cliBinPath && litmusConfig) {
686
+ const effectiveDeadline = getEffectiveDeadline(litmusConfig);
687
+ if (effectiveDeadline !== null) {
688
+ const msUntilDeadline = effectiveDeadline - Date.now();
689
+ const MAX_TIMEOUT = 2147483647; // setTimeout max (2^31 - 1 ms, ~24.8 days)
690
+ if (msUntilDeadline > 0 && msUntilDeadline <= MAX_TIMEOUT) {
691
+ process.stderr.write(`[watcher] deadline timer set for ${new Date(effectiveDeadline).toISOString()} (${Math.round(msUntilDeadline / 1000)}s)\n`);
692
+ setTimeout(() => {
693
+ process.stderr.write("[watcher] deadline reached, auto-submitting\n");
694
+ execFile(process.execPath, [cliBinPath, "submit", "--yes"], { cwd: projectDir, timeout: 5 * 60000 }, (err, _stdout, stderr) => {
695
+ if (!err) {
696
+ process.stderr.write("[watcher] auto-submit succeeded\n");
697
+ emit({ ts: new Date().toISOString(), type: "auto_submit" });
698
+ }
699
+ else {
700
+ process.stderr.write(`[watcher] auto-submit failed: ${stderr || err.message}\n`);
701
+ emit({ ts: new Date().toISOString(), type: "auto_submit_failed", error: (stderr || err.message).slice(0, 500) });
702
+ }
703
+ });
704
+ }, msUntilDeadline);
705
+ }
706
+ else if (msUntilDeadline > MAX_TIMEOUT) {
707
+ process.stderr.write(`[watcher] deadline too far in the future (${Math.round(msUntilDeadline / 86400000)}d), skipping auto-submit timer\n`);
708
+ }
709
+ else {
710
+ process.stderr.write("[watcher] deadline already passed, skipping auto-submit\n");
711
+ }
712
+ }
713
+ }
714
+ // ── Graceful shutdown ────────────────────────────────────────────
715
+ let shuttingDown = false;
716
+ const shutdownFile = path.join(projectDir, ".litmus", "shutdown");
717
+ function shutdown() {
718
+ if (shuttingDown)
719
+ return;
720
+ shuttingDown = true;
721
+ // Clean up sentinel file if it exists
722
+ try {
723
+ fs.unlinkSync(shutdownFile);
724
+ }
725
+ catch { /* may not exist */ }
726
+ // Flush buffered events to server, then close log and exit
727
+ const done = () => log.end(() => process.exit(0));
728
+ const uploadPromise = uploadEvents();
729
+ if (uploadPromise) {
730
+ uploadPromise.then(done, done);
731
+ // Hard exit after 5s if upload hangs
732
+ setTimeout(() => process.exit(0), 5000).unref();
733
+ }
734
+ else {
735
+ done();
736
+ }
737
+ }
738
+ // SIGTERM/SIGINT: works on Unix; SIGINT also fires on Windows via Ctrl+C
739
+ process.on("SIGTERM", shutdown);
740
+ process.on("SIGINT", shutdown);
741
+ // Sentinel file: cross-platform shutdown mechanism.
742
+ // submit.ts writes .litmus/shutdown → watcher detects it and flushes gracefully.
743
+ // Primary shutdown path on Windows where SIGTERM kills without invoking handlers.
744
+ const sentinelPoll = setInterval(() => {
745
+ if (fs.existsSync(shutdownFile)) {
746
+ process.stderr.write("[watcher] shutdown sentinel detected\n");
747
+ shutdown();
748
+ }
749
+ }, 1000);
750
+ sentinelPoll.unref(); // Don't keep process alive just for this
751
+ process.stderr.write("[watcher] all monitors started\n");
752
+ //# sourceMappingURL=watcher.js.map