context-mode 1.0.111 → 1.0.113

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. package/.claude-plugin/marketplace.json +2 -2
  2. package/.claude-plugin/plugin.json +1 -1
  3. package/.openclaw-plugin/index.ts +3 -2
  4. package/.openclaw-plugin/openclaw.plugin.json +1 -1
  5. package/.openclaw-plugin/package.json +1 -1
  6. package/README.md +152 -34
  7. package/bin/statusline.mjs +144 -127
  8. package/build/adapters/base.d.ts +8 -5
  9. package/build/adapters/base.js +8 -18
  10. package/build/adapters/claude-code/index.d.ts +24 -3
  11. package/build/adapters/claude-code/index.js +44 -11
  12. package/build/adapters/codex/hooks.d.ts +10 -5
  13. package/build/adapters/codex/hooks.js +10 -5
  14. package/build/adapters/codex/index.d.ts +17 -5
  15. package/build/adapters/codex/index.js +337 -37
  16. package/build/adapters/codex/paths.d.ts +1 -0
  17. package/build/adapters/codex/paths.js +12 -0
  18. package/build/adapters/cursor/index.d.ts +6 -0
  19. package/build/adapters/cursor/index.js +83 -2
  20. package/build/adapters/detect.d.ts +1 -1
  21. package/build/adapters/detect.js +29 -6
  22. package/build/adapters/omp/index.d.ts +65 -0
  23. package/build/adapters/omp/index.js +182 -0
  24. package/build/adapters/omp/plugin.d.ts +75 -0
  25. package/build/adapters/omp/plugin.js +220 -0
  26. package/build/adapters/openclaw/mcp-tools.d.ts +54 -0
  27. package/build/adapters/openclaw/mcp-tools.js +198 -0
  28. package/build/adapters/openclaw/plugin.d.ts +130 -0
  29. package/build/adapters/openclaw/plugin.js +629 -0
  30. package/build/adapters/openclaw/workspace-router.d.ts +29 -0
  31. package/build/adapters/openclaw/workspace-router.js +64 -0
  32. package/build/adapters/opencode/plugin.d.ts +145 -0
  33. package/build/adapters/opencode/plugin.js +457 -0
  34. package/build/adapters/pi/extension.d.ts +26 -0
  35. package/build/adapters/pi/extension.js +552 -0
  36. package/build/adapters/pi/index.d.ts +57 -0
  37. package/build/adapters/pi/index.js +173 -0
  38. package/build/adapters/pi/mcp-bridge.d.ts +113 -0
  39. package/build/adapters/pi/mcp-bridge.js +251 -0
  40. package/build/adapters/types.d.ts +11 -6
  41. package/build/cli.js +186 -170
  42. package/build/db-base.d.ts +15 -2
  43. package/build/db-base.js +50 -5
  44. package/build/executor.d.ts +2 -0
  45. package/build/executor.js +15 -2
  46. package/build/runPool.d.ts +36 -0
  47. package/build/runPool.js +51 -0
  48. package/build/runtime.js +64 -5
  49. package/build/search/auto-memory.js +6 -4
  50. package/build/security.js +30 -10
  51. package/build/server.d.ts +23 -1
  52. package/build/server.js +662 -182
  53. package/build/session/analytics.d.ts +404 -1
  54. package/build/session/analytics.js +1347 -42
  55. package/build/session/db.d.ts +114 -5
  56. package/build/session/db.js +275 -27
  57. package/build/session/event-emit.d.ts +48 -0
  58. package/build/session/event-emit.js +101 -0
  59. package/build/session/extract.d.ts +1 -0
  60. package/build/session/extract.js +79 -12
  61. package/build/session/purge.d.ts +111 -0
  62. package/build/session/purge.js +138 -0
  63. package/build/store.d.ts +7 -0
  64. package/build/store.js +69 -6
  65. package/build/util/claude-config.d.ts +26 -0
  66. package/build/util/claude-config.js +91 -0
  67. package/build/util/hook-config.d.ts +4 -0
  68. package/build/util/hook-config.js +39 -0
  69. package/build/util/project-dir.d.ts +49 -0
  70. package/build/util/project-dir.js +67 -0
  71. package/cli.bundle.mjs +411 -208
  72. package/configs/antigravity/GEMINI.md +0 -3
  73. package/configs/claude-code/CLAUDE.md +1 -4
  74. package/configs/codex/AGENTS.md +1 -4
  75. package/configs/codex/config.toml +3 -0
  76. package/configs/codex/hooks.json +8 -0
  77. package/configs/cursor/context-mode.mdc +0 -3
  78. package/configs/gemini-cli/GEMINI.md +0 -3
  79. package/configs/jetbrains-copilot/copilot-instructions.md +0 -3
  80. package/configs/kilo/AGENTS.md +0 -3
  81. package/configs/kiro/KIRO.md +0 -3
  82. package/configs/omp/SYSTEM.md +85 -0
  83. package/configs/omp/mcp.json +7 -0
  84. package/configs/openclaw/AGENTS.md +0 -3
  85. package/configs/opencode/AGENTS.md +0 -3
  86. package/configs/pi/AGENTS.md +0 -3
  87. package/configs/qwen-code/QWEN.md +1 -4
  88. package/configs/vscode-copilot/copilot-instructions.md +0 -3
  89. package/configs/zed/AGENTS.md +0 -3
  90. package/hooks/codex/posttooluse.mjs +9 -2
  91. package/hooks/codex/precompact.mjs +69 -0
  92. package/hooks/codex/sessionstart.mjs +13 -9
  93. package/hooks/codex/stop.mjs +1 -2
  94. package/hooks/codex/userpromptsubmit.mjs +1 -2
  95. package/hooks/core/routing.mjs +237 -18
  96. package/hooks/cursor/afteragentresponse.mjs +1 -1
  97. package/hooks/cursor/hooks.json +31 -0
  98. package/hooks/cursor/posttooluse.mjs +1 -1
  99. package/hooks/cursor/sessionstart.mjs +5 -5
  100. package/hooks/cursor/stop.mjs +1 -1
  101. package/hooks/ensure-deps.mjs +12 -13
  102. package/hooks/gemini-cli/aftertool.mjs +1 -1
  103. package/hooks/gemini-cli/beforeagent.mjs +1 -1
  104. package/hooks/gemini-cli/precompress.mjs +3 -2
  105. package/hooks/gemini-cli/sessionstart.mjs +9 -9
  106. package/hooks/jetbrains-copilot/posttooluse.mjs +1 -1
  107. package/hooks/jetbrains-copilot/precompact.mjs +3 -2
  108. package/hooks/jetbrains-copilot/sessionstart.mjs +9 -9
  109. package/hooks/kiro/agentspawn.mjs +5 -5
  110. package/hooks/kiro/posttooluse.mjs +2 -2
  111. package/hooks/kiro/userpromptsubmit.mjs +1 -1
  112. package/hooks/posttooluse.mjs +45 -0
  113. package/hooks/precompact.mjs +17 -0
  114. package/hooks/pretooluse.mjs +23 -0
  115. package/hooks/routing-block.mjs +0 -12
  116. package/hooks/run-hook.mjs +16 -3
  117. package/hooks/session-db.bundle.mjs +27 -18
  118. package/hooks/session-extract.bundle.mjs +2 -2
  119. package/hooks/session-helpers.mjs +101 -64
  120. package/hooks/sessionstart.mjs +51 -2
  121. package/hooks/vscode-copilot/posttooluse.mjs +1 -1
  122. package/hooks/vscode-copilot/precompact.mjs +3 -2
  123. package/hooks/vscode-copilot/sessionstart.mjs +9 -9
  124. package/openclaw.plugin.json +1 -1
  125. package/package.json +14 -8
  126. package/server.bundle.mjs +349 -147
  127. package/start.mjs +16 -4
  128. package/skills/UPSTREAM-CREDITS.md +0 -51
  129. package/skills/context-mode-ops/SKILL.md +0 -299
  130. package/skills/context-mode-ops/agent-teams.md +0 -198
  131. package/skills/context-mode-ops/communication.md +0 -224
  132. package/skills/context-mode-ops/marketing.md +0 -124
  133. package/skills/context-mode-ops/release.md +0 -214
  134. package/skills/context-mode-ops/review-pr.md +0 -269
  135. package/skills/context-mode-ops/tdd.md +0 -329
  136. package/skills/context-mode-ops/triage-issue.md +0 -266
  137. package/skills/context-mode-ops/validation.md +0 -307
  138. package/skills/diagnose/SKILL.md +0 -122
  139. package/skills/diagnose/scripts/hitl-loop.template.sh +0 -41
  140. package/skills/grill-me/SKILL.md +0 -15
  141. package/skills/grill-with-docs/ADR-FORMAT.md +0 -47
  142. package/skills/grill-with-docs/CONTEXT-FORMAT.md +0 -77
  143. package/skills/grill-with-docs/SKILL.md +0 -93
  144. package/skills/improve-codebase-architecture/DEEPENING.md +0 -37
  145. package/skills/improve-codebase-architecture/INTERFACE-DESIGN.md +0 -44
  146. package/skills/improve-codebase-architecture/LANGUAGE.md +0 -53
  147. package/skills/improve-codebase-architecture/SKILL.md +0 -76
  148. package/skills/tdd/SKILL.md +0 -114
  149. package/skills/tdd/deep-modules.md +0 -33
  150. package/skills/tdd/interface-design.md +0 -31
  151. package/skills/tdd/mocking.md +0 -59
  152. package/skills/tdd/refactoring.md +0 -10
  153. package/skills/tdd/tests.md +0 -61
package/build/server.js CHANGED
@@ -2,29 +2,34 @@
2
2
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
3
3
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
4
  import { createRequire } from "node:module";
5
- import { createHash } from "node:crypto";
6
5
  import { existsSync, unlinkSync, readdirSync, readFileSync, writeFileSync, renameSync, rmSync, mkdirSync, cpSync, statSync, symlinkSync, lstatSync } from "node:fs";
7
- import { execSync } from "node:child_process";
6
+ import { execSync, spawnSync } from "node:child_process";
8
7
  import { join, dirname, resolve, sep, isAbsolute } from "node:path";
9
8
  import { fileURLToPath } from "node:url";
10
9
  import { homedir, tmpdir, cpus } from "node:os";
11
10
  import { request as httpsRequest } from "node:https";
12
11
  import { z } from "zod";
13
12
  import { PolyglotExecutor } from "./executor.js";
14
- import { runPool } from "./concurrency/runPool.js";
13
+ import { runPool } from "./runPool.js";
15
14
  import { ContentStore, cleanupStaleDBs, cleanupStaleContentDBs } from "./store.js";
16
15
  import { composeFetchCacheKey } from "./fetch-cache.js";
17
16
  import { readBashPolicies, evaluateCommandDenyOnly, extractShellCommands, readToolDenyPatterns, evaluateFilePath, } from "./security.js";
18
17
  import { detectRuntimes, getRuntimeSummary, getAvailableLanguages, hasBunRuntime, } from "./runtime.js";
19
18
  import { classifyNonZeroExit } from "./exit-classify.js";
20
19
  import { startLifecycleGuard } from "./lifecycle.js";
21
- import { getWorktreeSuffix, SessionDB } from "./session/db.js";
20
+ import { hashProjectDirCanonical, hashProjectDirLegacy, resolveContentStorePath, resolveSessionDbPath, SessionDB } from "./session/db.js";
21
+ import { purgeSession } from "./session/purge.js";
22
+ import { emitCacheHitEvent, emitIndexWriteEvent, emitSandboxExecuteEvent, } from "./session/event-emit.js";
22
23
  import { persistToolCallCounter, restoreSessionStats } from "./session/persist-tool-calls.js";
23
24
  import { searchAllSources } from "./search/unified.js";
24
25
  import { buildNodeCommand } from "./adapters/types.js";
25
26
  import { detectPlatform, getSessionDirSegments } from "./adapters/detect.js";
27
+ import { resolveCodexConfigDir } from "./adapters/codex/paths.js";
28
+ import { getHookScriptPaths } from "./util/hook-config.js";
29
+ import { resolveClaudeConfigDir } from "./util/claude-config.js";
30
+ import { resolveProjectDir } from "./util/project-dir.js";
26
31
  import { loadDatabase } from "./db-base.js";
27
- import { AnalyticsEngine, formatReport, getLifetimeStats, OPUS_INPUT_PRICE_PER_TOKEN } from "./session/analytics.js";
32
+ import { AnalyticsEngine, formatReport, getConversationStats, getLifetimeStats, getMultiAdapterLifetimeStats, getRealBytesStats, OPUS_INPUT_PRICE_PER_TOKEN } from "./session/analytics.js";
28
33
  const __pkg_dir = dirname(fileURLToPath(import.meta.url));
29
34
  const VERSION = (() => {
30
35
  for (const rel of ["../package.json", "./package.json"]) {
@@ -106,9 +111,34 @@ let _detectedAdapter = null;
106
111
  // Tracks the ctx_insight dashboard child so shutdown can terminate it.
107
112
  // See ctx_insight handler + shutdown() in main().
108
113
  let _insightChild = null;
114
+ /**
115
+ * Resolve the Claude Code config root, honoring `CLAUDE_CONFIG_DIR` (incl.
116
+ * leading `~`) before falling back to `~/.claude`. Mirrors
117
+ * `hooks/session-helpers.mjs::resolveConfigDir` and
118
+ * `ClaudeCodeAdapter.getConfigDir` so the pre-detection path agrees with
119
+ * hooks/adapter on where Claude Code session data lives. See issue #453.
120
+ *
121
+ * Issue #460 round-3: delegates to the canonical util so empty/whitespace
122
+ * env values fall back instead of poisoning downstream `join()` calls.
123
+ */
124
+ function resolveClaudeConfigRoot() {
125
+ return resolveClaudeConfigDir();
126
+ }
127
+ async function getDiagnosticAdapter() {
128
+ if (_detectedAdapter)
129
+ return _detectedAdapter;
130
+ try {
131
+ const { getAdapter } = await import("./adapters/detect.js");
132
+ const signal = detectPlatform();
133
+ return await getAdapter(signal.platform);
134
+ }
135
+ catch {
136
+ return null;
137
+ }
138
+ }
109
139
  /**
110
140
  * Get the platform-specific sessions directory from the detected adapter.
111
- * Falls back to ~/.claude/context-mode/sessions/ before adapter detection.
141
+ * Falls back to the detected platform config root before adapter detection.
112
142
  */
113
143
  function getSessionDir() {
114
144
  if (_detectedAdapter)
@@ -116,18 +146,27 @@ function getSessionDir() {
116
146
  // Pre-detection path (race window before MCP `initialize` completes):
117
147
  // call detectPlatform() (sync, env-var-based) and look up segments via
118
148
  // getSessionDirSegments() (sync map, no adapter instantiation). This keeps
119
- // non-Claude platforms from spilling sessions into ~/.claude/.
149
+ // non-Claude platforms from spilling sessions into ~/.claude/. For Claude
150
+ // Code/Codex (single-segment roots), reroute through their config-dir
151
+ // contracts so the pre-detection window does not split-state with hooks.
120
152
  try {
121
153
  const signal = detectPlatform();
122
154
  const segments = getSessionDirSegments(signal.platform);
123
155
  if (segments) {
124
- const dir = join(homedir(), ...segments, "context-mode", "sessions");
156
+ let root = join(homedir(), ...segments);
157
+ if (segments.length === 1 && segments[0] === ".claude") {
158
+ root = resolveClaudeConfigRoot();
159
+ }
160
+ else if (segments.length === 1 && segments[0] === ".codex") {
161
+ root = resolveCodexConfigDir();
162
+ }
163
+ const dir = join(root, "context-mode", "sessions");
125
164
  mkdirSync(dir, { recursive: true });
126
165
  return dir;
127
166
  }
128
167
  }
129
- catch { /* fall through to .claude fallback */ }
130
- const dir = join(homedir(), ".claude", "context-mode", "sessions");
168
+ catch { /* fall through to claude fallback */ }
169
+ const dir = join(resolveClaudeConfigRoot(), "context-mode", "sessions");
131
170
  mkdirSync(dir, { recursive: true });
132
171
  return dir;
133
172
  }
@@ -143,14 +182,15 @@ function getSessionDir() {
143
182
  * that don't set their own env var (Cursor, OpenClaw, Codex, Kiro, Zed).
144
183
  */
145
184
  function getProjectDir() {
146
- return process.env.CLAUDE_PROJECT_DIR
147
- || process.env.GEMINI_PROJECT_DIR
148
- || process.env.VSCODE_CWD
149
- || process.env.OPENCODE_PROJECT_DIR
150
- || process.env.PI_PROJECT_DIR
151
- || process.env.IDEA_INITIAL_DIRECTORY
152
- || process.env.CONTEXT_MODE_PROJECT_DIR
153
- || process.cwd();
185
+ // Delegated to the shared resolver so the env-var chain rejects plugin
186
+ // install paths (set by a prior MCP boot's start.mjs after `/ctx-upgrade`)
187
+ // and prefers the shell-set PWD before the chdir'd cwd. See
188
+ // src/util/project-dir.ts for the rationale + safety rules.
189
+ return resolveProjectDir({
190
+ env: process.env,
191
+ cwd: process.cwd(),
192
+ pwd: process.env.PWD,
193
+ });
154
194
  }
155
195
  /**
156
196
  * Resolve a possibly-relative path against the project directory (full env cascade),
@@ -161,23 +201,17 @@ function resolveProjectPath(filePath) {
161
201
  return isAbsolute(filePath) ? filePath : resolve(getProjectDir(), filePath);
162
202
  }
163
203
  /**
164
- * Consistent project dir hashing across all DB paths.
165
- * Normalizes Windows backslashes before hashing so the same project
166
- * always produces the same hash regardless of path separator.
167
- */
168
- function hashProjectDir() {
169
- const projectDir = getProjectDir();
170
- const normalized = projectDir.replace(/\\/g, "/");
171
- return createHash("sha256").update(normalized).digest("hex").slice(0, 16);
172
- }
173
- /**
174
- * Resolve the per-project SessionDB path the way 4742160 originally did
175
- * for `persistToolCallCounter`. Centralized so the write-back, the
176
- * restore-on-startup, and any future SessionDB consumer all hash to the
177
- * same file under worktree isolation.
204
+ * Resolve the per-project SessionDB path. Delegates to
205
+ * {@link resolveSessionDbPath} so casing-only variants of the same
206
+ * physical worktree on macOS / Windows hit ONE DB, not two — and any
207
+ * pre-existing legacy raw-casing DB gets migrated in place on first
208
+ * resolve. Linux is a no-op.
178
209
  */
179
210
  function getSessionDbPath() {
180
- return join(getSessionDir(), `${hashProjectDir()}${getWorktreeSuffix()}.db`);
211
+ return resolveSessionDbPath({
212
+ projectDir: getProjectDir(),
213
+ sessionsDir: getSessionDir(),
214
+ });
181
215
  }
182
216
  /**
183
217
  * Compute a per-project, per-platform persistent path for the ContentStore.
@@ -189,12 +223,14 @@ function getSessionDbPath() {
189
223
  * ~/.cursor/context-mode/content/87c28c41ddb64d38.db
190
224
  */
191
225
  function getStorePath() {
192
- const hash = hashProjectDir();
193
226
  // Derive content dir from session dir: .../sessions/ → .../content/
194
- const sessDir = getSessionDir();
195
- const dir = join(dirname(sessDir), "content");
227
+ const dir = join(dirname(getSessionDir()), "content");
196
228
  mkdirSync(dir, { recursive: true });
197
- return join(dir, `${hash}.db`);
229
+ // Delegate to resolveContentStorePath: same case-fold + one-shot legacy
230
+ // rename behavior as resolveSessionDbPath. On macOS / Windows, an
231
+ // existing legacy raw-casing FTS5 db (with -wal/-shm sidecars) is
232
+ // migrated in place on first call. On Linux it's a no-op.
233
+ return resolveContentStorePath({ projectDir: getProjectDir(), contentDir: dir });
198
234
  }
199
235
  function getStore() {
200
236
  if (!_store) {
@@ -202,6 +238,21 @@ function getStore() {
202
238
  // Server just opens whatever DB exists (or creates new if hook deleted it).
203
239
  const dbPath = getStorePath();
204
240
  _store = new ContentStore(dbPath);
241
+ // Wire deny-policy hook: store re-checks the Read deny list before
242
+ // re-reading any file_path during auto-refresh. Catches policy edits
243
+ // made after a file was originally indexed. See #442 round-3.
244
+ _store.setDenyChecker((filePath) => {
245
+ try {
246
+ const projectDir = getProjectDir();
247
+ const denyGlobs = readToolDenyPatterns("Read", projectDir);
248
+ const r = evaluateFilePath(filePath, denyGlobs, process.platform === "win32", projectDir);
249
+ return r.denied;
250
+ }
251
+ catch {
252
+ // Fail-closed for refresh: skip on error rather than re-read.
253
+ return true;
254
+ }
255
+ });
205
256
  // One-time startup cleanup: remove stale content DBs (>14 days)
206
257
  try {
207
258
  const contentDir = dirname(getStorePath());
@@ -310,11 +361,15 @@ function healCacheMidSession() {
310
361
  return;
311
362
  _cacheHealDone = true;
312
363
  try {
313
- const ipPath = resolve(homedir(), ".claude", "plugins", "installed_plugins.json");
364
+ // Issue #460 round-3: honor $CLAUDE_CONFIG_DIR so users who relocate
365
+ // their CC config root don't have plugin cache healing operate against
366
+ // the wrong tree (and silently miss dangling-symlink cleanup).
367
+ const claudeRoot = resolveClaudeConfigDir();
368
+ const ipPath = resolve(claudeRoot, "plugins", "installed_plugins.json");
314
369
  if (!existsSync(ipPath))
315
370
  return;
316
371
  const ip = JSON.parse(readFileSync(ipPath, "utf-8"));
317
- const cacheRoot = resolve(homedir(), ".claude", "plugins", "cache");
372
+ const cacheRoot = resolve(claudeRoot, "plugins", "cache");
318
373
  // Plugin root: build/ for tsc, plugin root for bundle
319
374
  const pluginRoot = existsSync(resolve(__pkg_dir, "package.json")) ? __pkg_dir : dirname(__pkg_dir);
320
375
  for (const [key, entries] of Object.entries((ip.plugins ?? {}))) {
@@ -368,11 +423,34 @@ function trackResponse(toolName, response) {
368
423
  // setImmediate keeps this off the response hot path; the helper itself
369
424
  // is best-effort (never throws).
370
425
  setImmediate(() => persistToolCallCounter(getSessionDbPath(), toolName, bytes));
426
+ // D2 Phase 5/7 — sandbox-execute event emission. Tracks the bytes the
427
+ // user actually saw from sandboxed runs so getRealBytesStats() can
428
+ // replace the conservative `events × 256` estimate. Best-effort and
429
+ // off the hot path, same shape as persistToolCallCounter above.
430
+ if (toolName === "ctx_execute"
431
+ || toolName === "ctx_execute_file"
432
+ || toolName === "ctx_batch_execute") {
433
+ setImmediate(() => emitSandboxExecuteEvent({
434
+ sessionDbPath: getSessionDbPath(),
435
+ toolName,
436
+ bytesReturned: bytes,
437
+ }));
438
+ }
371
439
  return response;
372
440
  }
373
- function trackIndexed(bytes) {
441
+ function trackIndexed(bytes, source = "unknown") {
374
442
  sessionStats.bytesIndexed += bytes;
375
443
  persistStats();
444
+ // D2 Phase 5/7 — index-write event emission. `bytes_avoided` because
445
+ // these are bytes that would have flooded context if the user had
446
+ // Read'd the source instead of indexing.
447
+ if (bytes > 0) {
448
+ setImmediate(() => emitIndexWriteEvent({
449
+ sessionDbPath: getSessionDbPath(),
450
+ source,
451
+ bytesAvoided: bytes,
452
+ }));
453
+ }
376
454
  }
377
455
  // ─────────────────────────────────────────────────────────
378
456
  // Stats persistence — written after every tool call so
@@ -805,7 +883,7 @@ export async function runBatchCommands(commands, opts, executor) {
805
883
  // ─────────────────────────────────────────────────────────
806
884
  server.registerTool("ctx_execute", {
807
885
  title: "Execute Code",
808
- description: `MANDATORY: Use for any command where output exceeds 20 lines. Execute code in a sandboxed subprocess. Only stdout enters context — raw data stays in the subprocess.${bunNote} Available: ${langList}.\n\nPREFER THIS OVER BASH for: API calls (gh, curl, aws), test runners (npm test, pytest), git queries (git log, git diff), data processing, and ANY CLI command that may produce large output. Bash should only be used for file mutations, git writes, and navigation.\n\nTHINK IN CODE: When you need to analyze, count, filter, compare, or process data — write code that does the work and console.log() only the answer. Do NOT read raw data into context to process mentally. Program the analysis, don't compute it in your reasoning. Write robust, pure JavaScript (no npm dependencies). Use only Node.js built-ins (fs, path, child_process). Always wrap in try/catch. Handle null/undefined. Works on both Node.js and Bun.\n\nWhen reporting results — terse like caveman. Technical substance exact. Only fluff die. Pattern: [thing] [action] [reason]. [next step].`,
886
+ description: `MANDATORY: Use for any command where output exceeds 20 lines. Execute code in a sandboxed subprocess. Only stdout enters context — raw data stays in the subprocess.${bunNote} Available: ${langList}.\n\nPREFER THIS OVER BASH for: API calls (gh, curl, aws), test runners (npm test, pytest), git queries (git log, git diff), data processing, and ANY CLI command that may produce large output. Bash should only be used for file mutations, git writes, and navigation.\n\nTHINK IN CODE: When you need to analyze, count, filter, compare, or process data — write code that does the work and console.log() only the answer. Do NOT read raw data into context to process mentally. Program the analysis, don't compute it in your reasoning. Write robust, pure JavaScript (no npm dependencies). Use only Node.js built-ins (fs, path, child_process). Always wrap in try/catch. Handle null/undefined. Works on both Node.js and Bun.`,
809
887
  inputSchema: z.object({
810
888
  language: z
811
889
  .enum([
@@ -1097,7 +1175,7 @@ function intentSearch(stdout, intent, source, maxResults = 5) {
1097
1175
  // ─────────────────────────────────────────────────────────
1098
1176
  server.registerTool("ctx_execute_file", {
1099
1177
  title: "Execute File Processing",
1100
- description: "Read a file and process it without loading contents into context. The file is read into a FILE_CONTENT variable inside the sandbox. Only your printed summary enters context.\n\nPREFER THIS OVER Read/cat for: log files, data files (CSV, JSON, XML), large source files for analysis, and any file where you need to extract specific information rather than read the entire content.\n\nTHINK IN CODE: Write code that processes FILE_CONTENT and console.log() only the answer. Don't read files into context to analyze mentally. Write robust, pure JavaScript — no npm deps, try/catch, null-safe. Node.js + Bun compatible.\n\nWhen reporting results — terse like caveman. Technical substance exact. Only fluff die. Pattern: [thing] [action] [reason]. [next step].",
1178
+ description: "Read a file and process it without loading contents into context. The file is read into a FILE_CONTENT variable inside the sandbox. Only your printed summary enters context.\n\nPREFER THIS OVER Read/cat for: log files, data files (CSV, JSON, XML), large source files for analysis, and any file where you need to extract specific information rather than read the entire content.\n\nTHINK IN CODE: Write code that processes FILE_CONTENT and console.log() only the answer. Don't read files into context to analyze mentally. Write robust, pure JavaScript — no npm deps, try/catch, null-safe. Node.js + Bun compatible.",
1101
1179
  inputSchema: z.object({
1102
1180
  path: z
1103
1181
  .string()
@@ -1132,7 +1210,7 @@ server.registerTool("ctx_execute_file", {
1132
1210
  }),
1133
1211
  }, async ({ path, language, code, timeout, intent }) => {
1134
1212
  // Security: check file path against Read deny patterns
1135
- const pathDenied = checkFilePathDenyPolicy(path, "execute_file");
1213
+ const pathDenied = checkFilePathDenyPolicy(path, "ctx_execute_file");
1136
1214
  if (pathDenied)
1137
1215
  return pathDenied;
1138
1216
  // Security: check code parameter against Bash deny patterns
@@ -1267,6 +1345,15 @@ server.registerTool("ctx_index", {
1267
1345
  isError: true,
1268
1346
  });
1269
1347
  }
1348
+ // Apply Read deny-policy to prevent indexing sensitive files into the
1349
+ // FTS5 store, which would otherwise be queryable via ctx_search and
1350
+ // exfiltrate content into the model's context (issue #442). Mirrors the
1351
+ // check ctx_execute_file already performs.
1352
+ if (path) {
1353
+ const pathDenied = checkFilePathDenyPolicy(path, "ctx_index");
1354
+ if (pathDenied)
1355
+ return pathDenied;
1356
+ }
1270
1357
  try {
1271
1358
  const resolvedPath = path ? resolveProjectPath(path) : undefined;
1272
1359
  // Track the raw bytes being indexed (content or file)
@@ -1343,8 +1430,7 @@ server.registerTool("ctx_search", {
1343
1430
  "Pass ALL search questions as queries array in ONE call. " +
1344
1431
  "File-backed sources are auto-refreshed when the source file changes.\n\n" +
1345
1432
  "TIPS: 2-4 specific terms per query. Use 'source' to scope results.\n\n" +
1346
- "SESSION STATE: If skills, roles, or decisions were set earlier in this conversation, they are still active. Do not discard or contradict them.\n\n" +
1347
- "When reporting results — terse like caveman. Technical substance exact. Only fluff die. Pattern: [thing] [action] [reason]. [next step].",
1433
+ "SESSION STATE: If skills, roles, or decisions were set earlier in this conversation, they are still active. Do not discard or contradict them.",
1348
1434
  inputSchema: z.object({
1349
1435
  queries: z.preprocess(coerceJsonArray, z
1350
1436
  .array(z.string())
@@ -1438,14 +1524,15 @@ server.registerTool("ctx_search", {
1438
1524
  if (sort === "timeline") {
1439
1525
  try {
1440
1526
  const sessionsDir = getSessionDir();
1441
- const dbFile = join(sessionsDir, `${hashProjectDir()}${getWorktreeSuffix()}.db`);
1527
+ const projectDir = getProjectDir();
1528
+ const dbFile = resolveSessionDbPath({ projectDir, sessionsDir });
1442
1529
  if (existsSync(dbFile)) {
1443
1530
  timelineDB = new SessionDB({ dbPath: dbFile });
1444
1531
  }
1445
1532
  }
1446
1533
  catch { /* SessionDB unavailable — search ContentStore + auto-memory only */ }
1447
1534
  }
1448
- const configDir = _detectedAdapter?.getConfigDir() ?? (process.env.CLAUDE_CONFIG_DIR || join(homedir(), ".claude"));
1535
+ const configDir = _detectedAdapter?.getConfigDir() ?? resolveClaudeConfigRoot();
1449
1536
  try {
1450
1537
  for (const q of queryList) {
1451
1538
  if (totalSize > MAX_TOTAL) {
@@ -1551,17 +1638,151 @@ function resolveGfmPluginPath() {
1551
1638
  // Subprocess code that fetches a URL, detects Content-Type, and outputs a
1552
1639
  // __CM_CT__:<type> marker on the first line so the handler can route to the
1553
1640
  // appropriate indexing strategy. HTML is converted to markdown via Turndown.
1554
- function buildFetchCode(url, outputPath) {
1641
+ export function buildFetchCode(url, outputPath) {
1555
1642
  const turndownPath = JSON.stringify(resolveTurndownPath());
1556
1643
  const gfmPath = JSON.stringify(resolveGfmPluginPath());
1557
1644
  const escapedOutputPath = JSON.stringify(outputPath);
1645
+ // Embed classifyIp into the subprocess so the connect-time DNS lookup is
1646
+ // re-validated with the same policy as ssrfGuard. Without this, an attacker
1647
+ // can serve a public IP for the parent's pre-flight ssrfGuard lookup and
1648
+ // then a blocked IP (e.g. 169.254.169.254 IMDS) for the subprocess fetch's
1649
+ // own lookup — classic DNS rebinding across the parent/child boundary.
1650
+ const classifyIpSrc = classifyIp.toString();
1651
+ const strictMode = process.env.CTX_FETCH_STRICT === "1";
1558
1652
  return `
1559
1653
  const TurndownService = require(${turndownPath});
1560
1654
  const { gfm } = require(${gfmPath});
1561
1655
  const fs = require('fs');
1656
+ const dns = require('node:dns');
1657
+ const dnsPromises = require('node:dns/promises');
1562
1658
  const url = ${JSON.stringify(url)};
1563
1659
  const outputPath = ${escapedOutputPath};
1564
1660
 
1661
+ // Strip proxy env vars from this subprocess only. A configured outbound
1662
+ // proxy (HTTP_PROXY / HTTPS_PROXY / ALL_PROXY) would route fetch through
1663
+ // an arbitrary target — DNS resolution happens at the proxy and the
1664
+ // in-subprocess DNS rebinding guard never sees the rebound IP. The
1665
+ // sandbox fetch path has no legitimate need for an upstream proxy.
1666
+ delete process.env.HTTP_PROXY;
1667
+ delete process.env.HTTPS_PROXY;
1668
+ delete process.env.ALL_PROXY;
1669
+ delete process.env.http_proxy;
1670
+ delete process.env.https_proxy;
1671
+ delete process.env.all_proxy;
1672
+ delete process.env.npm_config_proxy;
1673
+ delete process.env.npm_config_https_proxy;
1674
+
1675
+ ${classifyIpSrc}
1676
+
1677
+ const STRICT = ${JSON.stringify(strictMode)};
1678
+
1679
+ // SSRF rebinding defense: every dns.lookup call inside this subprocess
1680
+ // (including the one undici performs to connect the fetch socket) is
1681
+ // re-validated against the same policy ssrfGuard runs in the parent.
1682
+ // Even if a hostname rebinds between the parent's pre-flight check and
1683
+ // the subprocess's actual connect, the connect-time lookup re-classifies
1684
+ // every returned record and aborts before TCP if any verdict is "block".
1685
+ const _origLookup = dns.lookup;
1686
+ dns.lookup = function patchedLookup(hostname, options, callback) {
1687
+ if (typeof options === 'function') { callback = options; options = {}; }
1688
+ if (typeof options === 'number') { options = { family: options }; }
1689
+ const wantAll = options && options.all;
1690
+ const opts = Object.assign({}, options || {}, { all: true, verbatim: true });
1691
+ _origLookup(hostname, opts, function(err, records) {
1692
+ if (err) return callback(err);
1693
+ if (!Array.isArray(records)) {
1694
+ records = [{ address: records, family: (options && options.family) || 4 }];
1695
+ }
1696
+ for (var i = 0; i < records.length; i++) {
1697
+ var verdict = classifyIp(records[i].address);
1698
+ if (verdict === 'block' || (STRICT && verdict === 'private')) {
1699
+ return callback(new Error(
1700
+ 'SSRF blocked at connect-time: ' + hostname +
1701
+ ' resolves to ' + records[i].address +
1702
+ ' (' + verdict + ')'
1703
+ ));
1704
+ }
1705
+ }
1706
+ if (wantAll) callback(null, records);
1707
+ else callback(null, records[0].address, records[0].family);
1708
+ });
1709
+ };
1710
+
1711
+ // dns/promises is a separate function reference. Patching dns.lookup does
1712
+ // NOT affect dnsPromises.lookup. Today undici's connect path uses callback
1713
+ // dns.lookup so default fetch is covered, but the invariant is fragile —
1714
+ // any future undici switch (or user code calling dnsPromises.lookup
1715
+ // directly) would bypass the guard. Patch both to keep the contract.
1716
+ const _origPromisesLookup = dnsPromises.lookup;
1717
+ dnsPromises.lookup = async function patchedPromisesLookup(hostname, options) {
1718
+ const opts = Object.assign({}, options || {}, { all: true, verbatim: true });
1719
+ const records = await _origPromisesLookup(hostname, opts);
1720
+ const list = Array.isArray(records) ? records : [records];
1721
+ for (var i = 0; i < list.length; i++) {
1722
+ var verdict = classifyIp(list[i].address);
1723
+ if (verdict === 'block' || (STRICT && verdict === 'private')) {
1724
+ throw new Error(
1725
+ 'SSRF blocked at connect-time: ' + hostname +
1726
+ ' resolves to ' + list[i].address + ' (' + verdict + ')'
1727
+ );
1728
+ }
1729
+ }
1730
+ return options && options.all
1731
+ ? list
1732
+ : { address: list[0].address, family: list[0].family };
1733
+ };
1734
+
1735
+ // dns.resolve4 / dns.resolve6 use a different code path (no getaddrinfo,
1736
+ // no /etc/hosts) than dns.lookup — they must be patched separately or the
1737
+ // guard is trivially bypassed by any caller using dns.resolve* directly.
1738
+ ['resolve4', 'resolve6'].forEach(function patchResolve(name) {
1739
+ const _origResolve = dns[name];
1740
+ dns[name] = function patchedResolve(hostname, options, cb) {
1741
+ if (typeof options === 'function') { cb = options; options = undefined; }
1742
+ _origResolve.call(dns, hostname, options || {}, function(err, addrs) {
1743
+ if (err) return cb(err);
1744
+ var withTtl = options && options.ttl;
1745
+ for (var i = 0; i < addrs.length; i++) {
1746
+ var ip = withTtl ? addrs[i].address : addrs[i];
1747
+ var v = classifyIp(ip);
1748
+ if (v === 'block' || (STRICT && v === 'private')) {
1749
+ return cb(new Error(
1750
+ 'SSRF blocked at connect-time: ' + hostname +
1751
+ ' resolves to ' + ip + ' (' + v + ')'
1752
+ ));
1753
+ }
1754
+ }
1755
+ cb(null, addrs);
1756
+ });
1757
+ };
1758
+ });
1759
+
1760
+ // Generic dns.resolve is a polymorphic dispatcher (rrtype-driven). Internally
1761
+ // Node delegates to dns.resolve4/dns.resolve6 for A/AAAA, but the patches
1762
+ // above hook the *exported* references — Node's internal dispatcher holds
1763
+ // captured originals and bypasses our patch. Patch the wrapper explicitly:
1764
+ // classify A/AAAA records the same way; pass through CNAME/MX/TXT/SRV/etc.
1765
+ const _origResolveGeneric = dns.resolve;
1766
+ dns.resolve = function patchedResolveGeneric(hostname, rrtype, cb) {
1767
+ if (typeof rrtype === 'function') { cb = rrtype; rrtype = 'A'; }
1768
+ _origResolveGeneric.call(dns, hostname, rrtype, function(err, records) {
1769
+ if (err) return cb(err);
1770
+ if ((rrtype === 'A' || rrtype === 'AAAA') && Array.isArray(records)) {
1771
+ for (var i = 0; i < records.length; i++) {
1772
+ var ip = records[i];
1773
+ var v = classifyIp(ip);
1774
+ if (v === 'block' || (STRICT && v === 'private')) {
1775
+ return cb(new Error(
1776
+ 'SSRF blocked at connect-time: ' + hostname +
1777
+ ' resolves to ' + ip + ' (' + v + ')'
1778
+ ));
1779
+ }
1780
+ }
1781
+ }
1782
+ cb(null, records);
1783
+ });
1784
+ };
1785
+
1565
1786
  function emit(ct, content) {
1566
1787
  // Write content to file to bypass executor stdout truncation (100KB limit).
1567
1788
  // Only the content-type marker goes to stdout.
@@ -1569,8 +1790,60 @@ function emit(ct, content) {
1569
1790
  console.log('__CM_CT__:' + ct);
1570
1791
  }
1571
1792
 
1793
+ // Manual redirect handling: a 3xx Location header can rebind the subprocess
1794
+ // fetch to an alternate host the parent's pre-flight ssrfGuard never saw.
1795
+ // Even with the connect-time DNS patch, a redirect target that is a literal
1796
+ // IP (e.g. http://169.254.169.254/) skips getaddrinfo entirely. Walk the
1797
+ // chain manually so every hop runs through classifyIp before the next fetch.
1798
+ const MAX_REDIRECTS = 5;
1799
+ async function fetchWithManualRedirect(initialUrl) {
1800
+ let currentUrl = initialUrl;
1801
+ for (let redirectCount = 0; redirectCount <= MAX_REDIRECTS; redirectCount++) {
1802
+ const resp = await fetch(currentUrl, { redirect: 'manual' });
1803
+ if (resp.status < 300 || resp.status >= 400) return resp;
1804
+ const location = resp.headers.get('location') || resp.headers.get('Location');
1805
+ if (!location) return resp;
1806
+ if (redirectCount === MAX_REDIRECTS) {
1807
+ throw new Error('SSRF blocked: redirect chain exceeded ' + MAX_REDIRECTS + ' hops');
1808
+ }
1809
+ let nextParsed;
1810
+ try { nextParsed = new URL(location, currentUrl); } catch (e) {
1811
+ throw new Error('SSRF blocked: invalid redirect Location: ' + location);
1812
+ }
1813
+ if (nextParsed.protocol !== 'http:' && nextParsed.protocol !== 'https:') {
1814
+ throw new Error('SSRF blocked: redirect to non-http(s) scheme ' + nextParsed.protocol);
1815
+ }
1816
+ // If the redirect target is a literal IP, classify it directly — no DNS
1817
+ // lookup will fire and the connect-time guard would never see it.
1818
+ const hostname = nextParsed.hostname.replace(/^\[|\]$/g, '');
1819
+ const isIpLiteral = /^[0-9.]+$/.test(hostname) || hostname.includes(':');
1820
+ if (isIpLiteral) {
1821
+ const verdict = classifyIp(hostname);
1822
+ if (verdict === 'block' || (STRICT && verdict === 'private')) {
1823
+ throw new Error('SSRF blocked: redirect to ' + hostname + ' (' + verdict + ')');
1824
+ }
1825
+ } else {
1826
+ // Hostname target: resolve and classify every record. The patched
1827
+ // dns.lookup also fires on the next fetch's connect, but checking
1828
+ // here gives a clearer error and short-circuits before TCP setup.
1829
+ const records = await dnsPromises.lookup(hostname, { all: true, verbatim: true });
1830
+ for (const rec of records) {
1831
+ const verdict = classifyIp(rec.address);
1832
+ if (verdict === 'block' || (STRICT && verdict === 'private')) {
1833
+ throw new Error(
1834
+ 'SSRF blocked: redirect target ' + hostname +
1835
+ ' resolves to ' + rec.address + ' (' + verdict + ')'
1836
+ );
1837
+ }
1838
+ }
1839
+ }
1840
+ currentUrl = nextParsed.toString();
1841
+ }
1842
+ throw new Error('SSRF blocked: redirect chain exceeded ' + MAX_REDIRECTS + ' hops');
1843
+ }
1844
+
1572
1845
  async function main() {
1573
- const resp = await fetch(url);
1846
+ const resp = await fetchWithManualRedirect(url);
1574
1847
  if (!resp.ok) { console.error("HTTP " + resp.status); process.exit(1); }
1575
1848
  const contentType = resp.headers.get('content-type') || '';
1576
1849
 
@@ -1699,7 +1972,14 @@ async function ssrfGuard(rawUrl) {
1699
1972
  *
1700
1973
  * Exported (via the function name) so SSRF tests can exercise the matcher directly.
1701
1974
  */
1702
- export function classifyIp(ip) {
1975
+ export function classifyIp(rawIp) {
1976
+ // RFC 6874 zone identifiers (`fe80::1%eth0`, URL-encoded `%25eth0`) must
1977
+ // be stripped BEFORE any prefix/equality classification. Without the strip,
1978
+ // a loopback `::1%eth0` no longer matches `lower === "::1"` and falls
1979
+ // through to "public" — silently bypassing the SSRF guard. Strip first,
1980
+ // classify second.
1981
+ const pctIdx = rawIp.indexOf("%");
1982
+ const ip = pctIdx === -1 ? rawIp : rawIp.slice(0, pctIdx);
1703
1983
  const lower = ip.toLowerCase();
1704
1984
  // IPv6 takes priority — check for `:` first so IPv4-mapped addresses
1705
1985
  // (`::ffff:127.0.0.1`) don't get incorrectly routed through the IPv4 parser.
@@ -1857,8 +2137,7 @@ server.registerTool("ctx_fetch_and_index", {
1857
2137
  " ✅ Use concurrency: 4-8 for: library docs sweep, multi-changelog scan, competitive pricing pages, multi-region docs, GitHub raw file pulls.\n" +
1858
2138
  " ❌ Single URL → use the legacy {url, source} shape (concurrency irrelevant).\n" +
1859
2139
  " Example: requests: [{url: 'https://react.dev/...', source: 'react'}, {url: 'https://vuejs.org/...', source: 'vue'}], concurrency: 5.\n" +
1860
- " Indexing is serial regardless of concurrency fetches race, FTS5 writes don't (avoids SQLite WAL contention).\n\n" +
1861
- "When reporting results — terse like caveman. Technical substance exact. Only fluff die. Pattern: [thing] [action] [reason]. [next step].",
2140
+ " Fetches parallelize up to your concurrency setting; FTS5 indexing serializes the writes after (SQLite single-writer rule).",
1862
2141
  inputSchema: z.object({
1863
2142
  url: z.string().optional().describe("Single URL to fetch and index (legacy single-shape)"),
1864
2143
  source: z
@@ -1930,6 +2209,16 @@ server.registerTool("ctx_fetch_and_index", {
1930
2209
  if (v.kind === "cached") {
1931
2210
  sessionStats.cacheHits++;
1932
2211
  sessionStats.cacheBytesSaved += v.estimatedBytes;
2212
+ // D2 Phase 5/7 — cache-hit event emission. `bytes_avoided` is the
2213
+ // size of the cached payload that would have re-entered context
2214
+ // had the TTL window missed. Best-effort, off the hot path.
2215
+ const cachedBytes = v.estimatedBytes;
2216
+ const cachedLabel = v.label;
2217
+ setImmediate(() => emitCacheHitEvent({
2218
+ sessionDbPath: getSessionDbPath(),
2219
+ source: cachedLabel,
2220
+ bytesAvoided: cachedBytes,
2221
+ }));
1933
2222
  finalized.push({ kind: "cached", label: v.label, chunkCount: v.chunkCount, ageStr: v.ageStr });
1934
2223
  }
1935
2224
  else if (v.kind === "fetch_error") {
@@ -2018,8 +2307,8 @@ server.registerTool("ctx_fetch_and_index", {
2018
2307
  const cappedNote = capped
2019
2308
  ? ` cap=${effectiveConcurrency}/${cpus().length}cpu`
2020
2309
  : "";
2021
- // Caveman style — terse status line: counts + sections + size.
2022
- // Singular forms used at count=1 to avoid grammar drift ("1 errors" "1 error").
2310
+ // Status line: counts + sections + size, with singular/plural agreement
2311
+ // (count=1 "1 error" not "1 errors") so the line stays grammatical.
2023
2312
  const fmt = (n, sing, plur) => `${n} ${n === 1 ? sing : plur}`;
2024
2313
  const headerLine = `fetched ${batch.length} c=${effectiveConcurrency}${cappedNote}. ` +
2025
2314
  `ok=${fetchedCount} cache=${cachedCount} err=${errorCount}. ` +
@@ -2052,8 +2341,7 @@ server.registerTool("ctx_batch_execute", {
2052
2341
  " ❌ Keep concurrency: 1 for: npm test, build, lint, image processing (CPU-bound), or commands sharing state (ports, lock files, same-repo writes).\n" +
2053
2342
  " Example: [gh issue view 1, gh issue view 2, gh issue view 3] → concurrency: 3.\n" +
2054
2343
  " Speedup depends on workload — applies to I/O wait, not CPU work.\n\n" +
2055
- "THINK IN CODE — NON-NEGOTIABLE: When commands produce data you need to analyze, count, filter, compare, or transform — add a processing command that runs JavaScript and console.log() ONLY the answer. NEVER pull raw output into context to reason over. Concurrency parallelizes the FETCH; THINK IN CODE owns the PROCESSING. One programmed analysis replaces ten read-and-reason rounds. Pure JavaScript, Node.js built-ins (fs, path, child_process), try/catch, null-safe.\n\n" +
2056
- "When reporting results — terse like caveman. Technical substance exact. Only fluff die. Pattern: [thing] [action] [reason]. [next step].",
2344
+ "THINK IN CODE — NON-NEGOTIABLE: When commands produce data you need to analyze, count, filter, compare, or transform — add a processing command that runs JavaScript and console.log() ONLY the answer. NEVER pull raw output into context to reason over. Concurrency parallelizes the FETCH; THINK IN CODE owns the PROCESSING. One programmed analysis replaces ten read-and-reason rounds. Pure JavaScript, Node.js built-ins (fs, path, child_process), try/catch, null-safe.",
2057
2345
  inputSchema: z.object({
2058
2346
  commands: z.preprocess(coerceCommandsArray, z
2059
2347
  .array(z.object({
@@ -2203,9 +2491,16 @@ server.registerTool("ctx_stats", {
2203
2491
  // ONE call, ONE source — AnalyticsEngine.queryAll()
2204
2492
  let text;
2205
2493
  try {
2206
- const dbHash = hashProjectDir();
2207
- const worktreeSuffix = getWorktreeSuffix();
2208
- const sessionDbPath = join(getSessionDir(), `${dbHash}${worktreeSuffix}.db`);
2494
+ const projectDir = getProjectDir();
2495
+ // Canonical hash + migration-aware path. The downstream
2496
+ // getConversationStats / getRealBytesStats reconstruct the DB
2497
+ // filename from worktreeHash; pass the SAME canonical hash that
2498
+ // resolveSessionDbPath used so they hit the same file.
2499
+ const dbHash = hashProjectDirCanonical(projectDir);
2500
+ const sessionDbPath = resolveSessionDbPath({
2501
+ projectDir,
2502
+ sessionsDir: getSessionDir(),
2503
+ });
2209
2504
  if (existsSync(sessionDbPath)) {
2210
2505
  const Database = loadDatabase();
2211
2506
  const sdb = new Database(sessionDbPath, { readonly: true });
@@ -2217,8 +2512,43 @@ server.registerTool("ctx_stats", {
2217
2512
  // Lifetime stats span every project's SessionDB + auto-memory dir
2218
2513
  // (Bugs #3/#4); failures are absorbed inside getLifetimeStats so a
2219
2514
  // corrupt sidecar can never break ctx_stats.
2220
- const lifetime = getLifetimeStats();
2221
- text = formatReport(report, VERSION, _latestVersion, { lifetime, mcpUsage });
2515
+ // B3b Slice 3.1: scope to active adapter via getSessionDir() so
2516
+ // non-Claude platforms (Cursor, OpenCode, JetBrains, ...) read
2517
+ // from THEIR sessions dir — not the hardcoded ~/.claude/ default.
2518
+ // Mirrors the statusline contract at src/server.ts:540.
2519
+ const lifetime = getLifetimeStats({ sessionsDir: getSessionDir() });
2520
+ // B3b Slices 3.2-3.6: cross-adapter aggregation so the renderer
2521
+ // can show "Where it came from" + the "across N AI tools"
2522
+ // headline. Best-effort — failures absorbed so a corrupt
2523
+ // sidecar in any adapter dir cannot break ctx_stats.
2524
+ let multiAdapter;
2525
+ try {
2526
+ multiAdapter = getMultiAdapterLifetimeStats();
2527
+ }
2528
+ catch { /* never block ctx_stats */ }
2529
+ // F1: wire conversation + realBytes opts so formatReport renders the
2530
+ // narrative 5-section "kitap gibi" layout (timeline, ladder, receipt,
2531
+ // example cost, auto-memory). Without these, formatReport falls back
2532
+ // to the legacy active-session header. Best-effort — failures absorbed.
2533
+ // Resolve session_id: prefer env (CLAUDE_SESSION_ID), else most-recent
2534
+ // UUID session_id from session_events in this DB.
2535
+ let conversation;
2536
+ let realBytes;
2537
+ try {
2538
+ let sid = process.env.CLAUDE_SESSION_ID;
2539
+ if (!sid) {
2540
+ const row = sdb.prepare("SELECT session_id FROM session_events WHERE session_id LIKE '________-____-____-____-____________' ORDER BY created_at DESC LIMIT 1").get();
2541
+ sid = row?.session_id;
2542
+ }
2543
+ if (sid) {
2544
+ conversation = getConversationStats({ sessionId: sid, sessionsDir: getSessionDir(), worktreeHash: dbHash });
2545
+ const convReal = getRealBytesStats({ sessionId: sid, sessionsDir: getSessionDir(), worktreeHash: dbHash });
2546
+ const lifeReal = getRealBytesStats({ sessionsDir: getSessionDir() });
2547
+ realBytes = { conversation: convReal, lifetime: lifeReal };
2548
+ }
2549
+ }
2550
+ catch { /* never block ctx_stats */ }
2551
+ text = formatReport(report, VERSION, _latestVersion, { lifetime, mcpUsage, multiAdapter, conversation, realBytes });
2222
2552
  }
2223
2553
  finally {
2224
2554
  sdb.close();
@@ -2229,8 +2559,13 @@ server.registerTool("ctx_stats", {
2229
2559
  // Lifetime still meaningful (other projects, auto-memory) so include it.
2230
2560
  const engine = new AnalyticsEngine(createMinimalDb());
2231
2561
  const report = engine.queryAll(sessionStats);
2232
- const lifetime = getLifetimeStats();
2233
- text = formatReport(report, VERSION, _latestVersion, { lifetime });
2562
+ const lifetime = getLifetimeStats({ sessionsDir: getSessionDir() });
2563
+ let multiAdapter;
2564
+ try {
2565
+ multiAdapter = getMultiAdapterLifetimeStats();
2566
+ }
2567
+ catch { /* never block ctx_stats */ }
2568
+ text = formatReport(report, VERSION, _latestVersion, { lifetime, multiAdapter });
2234
2569
  }
2235
2570
  }
2236
2571
  catch {
@@ -2239,10 +2574,15 @@ server.registerTool("ctx_stats", {
2239
2574
  const report = engine.queryAll(sessionStats);
2240
2575
  let lifetime;
2241
2576
  try {
2242
- lifetime = getLifetimeStats();
2577
+ lifetime = getLifetimeStats({ sessionsDir: getSessionDir() });
2578
+ }
2579
+ catch { /* never block ctx_stats */ }
2580
+ let multiAdapter;
2581
+ try {
2582
+ multiAdapter = getMultiAdapterLifetimeStats();
2243
2583
  }
2244
2584
  catch { /* never block ctx_stats */ }
2245
- text = formatReport(report, VERSION, _latestVersion, lifetime ? { lifetime } : undefined);
2585
+ text = formatReport(report, VERSION, _latestVersion, (lifetime || multiAdapter) ? { lifetime, multiAdapter } : undefined);
2246
2586
  }
2247
2587
  return trackResponse("ctx_stats", {
2248
2588
  content: [{ type: "text", text }],
@@ -2323,13 +2663,30 @@ server.registerTool("ctx_doctor", {
2323
2663
  catch { /* best effort */ }
2324
2664
  }
2325
2665
  }
2326
- // Hook script
2327
- const hookPath = resolve(pluginRoot, "hooks", "pretooluse.mjs");
2328
- if (existsSync(hookPath)) {
2329
- lines.push(`[OK] Hook script: PASS — ${hookPath}`);
2666
+ // Hooks
2667
+ const diagnosticAdapter = await getDiagnosticAdapter();
2668
+ if (diagnosticAdapter) {
2669
+ for (const result of diagnosticAdapter.validateHooks(pluginRoot)) {
2670
+ const prefix = result.status === "pass" ? "[OK]" : result.status === "warn" ? "[WARN]" : "[FAIL]";
2671
+ const fix = result.fix ? ` — fix: ${result.fix}` : "";
2672
+ lines.push(`${prefix} ${result.check}: ${result.message}${fix}`);
2673
+ }
2674
+ const hookScriptPaths = getHookScriptPaths(diagnosticAdapter, pluginRoot);
2675
+ if (hookScriptPaths.length === 0) {
2676
+ lines.push("[OK] Hook scripts: no direct .mjs script paths to verify");
2677
+ }
2678
+ for (const scriptPath of hookScriptPaths) {
2679
+ const hookPath = resolve(pluginRoot, scriptPath);
2680
+ if (existsSync(hookPath)) {
2681
+ lines.push(`[OK] Hook script: PASS — ${hookPath}`);
2682
+ }
2683
+ else {
2684
+ lines.push(`[FAIL] Hook script: FAIL — not found at ${hookPath}`);
2685
+ }
2686
+ }
2330
2687
  }
2331
2688
  else {
2332
- lines.push(`[FAIL] Hook script: FAIL not found at ${hookPath}`);
2689
+ lines.push("[WARN] Hooks: adapter detection unavailable");
2333
2690
  }
2334
2691
  // Version
2335
2692
  lines.push(`[OK] Version: v${VERSION}`);
@@ -2355,16 +2712,11 @@ server.registerTool("ctx_upgrade", {
2355
2712
  const sessDir = getSessionDir();
2356
2713
  const insightCacheDir = join(dirname(sessDir), "insight-cache");
2357
2714
  if (existsSync(insightCacheDir)) {
2358
- // Kill any running insight server first
2359
- try {
2360
- if (process.platform === "win32") {
2361
- execSync('for /f "tokens=5" %a in (\'netstat -ano ^| findstr :4747\') do taskkill /F /PID %a', { stdio: "pipe" });
2362
- }
2363
- else {
2364
- execSync("lsof -ti:4747 | xargs kill 2>/dev/null", { stdio: "pipe" });
2365
- }
2366
- }
2367
- catch { /* no process to kill */ }
2715
+ // Kill any running insight server first via the shared helper —
2716
+ // this is locale-independent on Windows (PR #469) and isolates per-pid
2717
+ // failures. We ignore the structured result: cache cleanup is
2718
+ // best-effort and must never block ctx_upgrade.
2719
+ killProcessOnPort(4747);
2368
2720
  rmSync(insightCacheDir, { recursive: true, force: true });
2369
2721
  }
2370
2722
  }
@@ -2380,13 +2732,11 @@ server.registerTool("ctx_upgrade", {
2380
2732
  // Inline fallback: neither CLI file exists (e.g. marketplace installs).
2381
2733
  // Generate a self-contained node -e script that performs the upgrade.
2382
2734
  const repoUrl = "https://github.com/mksglu/context-mode.git";
2383
- const copyDirs = ["build", "hooks", "skills", "scripts", ".claude-plugin"];
2384
- const copyFiles = ["start.mjs", "server.bundle.mjs", "cli.bundle.mjs", "package.json"];
2385
2735
  // Write inline script to a temp .mjs file — avoids quote-escaping issues
2386
2736
  // across cmd.exe, PowerShell, and bash (node -e '...' breaks on Windows).
2387
2737
  const scriptLines = [
2388
2738
  `import{execFileSync}from"node:child_process";`,
2389
- `import{cpSync,rmSync,existsSync,mkdtempSync}from"node:fs";`,
2739
+ `import{cpSync,rmSync,existsSync,mkdtempSync,readFileSync,writeFileSync}from"node:fs";`,
2390
2740
  `import{join}from"node:path";`,
2391
2741
  `import{tmpdir}from"node:os";`,
2392
2742
  `const P=${JSON.stringify(pluginRoot)};`,
@@ -2398,9 +2748,11 @@ server.registerTool("ctx_upgrade", {
2398
2748
  `execFileSync(process.platform==="win32"?"npm.cmd":"npm",["install"],{cwd:T,stdio:"inherit",shell:process.platform==="win32"});`,
2399
2749
  `execFileSync(process.platform==="win32"?"npm.cmd":"npm",["run","build"],{cwd:T,stdio:"inherit",shell:process.platform==="win32"});`,
2400
2750
  `console.log("- [x] Built from source");`,
2401
- ...copyDirs.map((d) => `if(existsSync(join(T,${JSON.stringify(d)})))cpSync(join(T,${JSON.stringify(d)}),join(P,${JSON.stringify(d)}),{recursive:true,force:true});`),
2402
- ...copyFiles.map((f) => `if(existsSync(join(T,${JSON.stringify(f)})))cpSync(join(T,${JSON.stringify(f)}),join(P,${JSON.stringify(f)}),{force:true});`),
2403
- `console.log("- [x] Copied build artifacts");`,
2751
+ `const pkg=JSON.parse(readFileSync(join(T,"package.json"),"utf8"));`,
2752
+ `const items=[...(Array.isArray(pkg.files)?pkg.files:[]),"src","package.json"];`,
2753
+ `for(const item of items){const from=join(T,item);const to=join(P,item);if(existsSync(from)){rmSync(to,{recursive:true,force:true});cpSync(from,to,{recursive:true,force:true});}}`,
2754
+ `writeFileSync(join(P,".mcp.json"),JSON.stringify({mcpServers:{"context-mode":{command:"node",args:["\${CLAUDE_PLUGIN_ROOT}/start.mjs"]}}},null,2)+"\\n");`,
2755
+ `console.log("- [x] Copied package files");`,
2404
2756
  `execFileSync(process.platform==="win32"?"npm.cmd":"npm",["install","--production"],{cwd:P,stdio:"inherit",shell:process.platform==="win32"});`,
2405
2757
  `console.log("- [x] Installed production dependencies");`,
2406
2758
  `console.log("## context-mode upgrade complete");`,
@@ -2462,76 +2814,40 @@ server.registerTool("ctx_purge", {
2462
2814
  }],
2463
2815
  });
2464
2816
  }
2465
- const deleted = [];
2466
- // 1. Wipe the persistent FTS5 content store
2817
+ // Close the persistent FTS5 content store handle BEFORE delegating to
2818
+ // purgeSession so the store's lock is released on Windows. The handle
2819
+ // is recreated lazily on the next getStore() call.
2820
+ let storePathForPurge;
2821
+ try {
2822
+ storePathForPurge = getStorePath();
2823
+ }
2824
+ catch { /* best effort — store path may be unresolvable on fresh install */ }
2467
2825
  if (_store) {
2468
- let storeFound = false;
2469
2826
  try {
2470
2827
  _store.cleanup();
2471
- storeFound = true;
2472
2828
  }
2473
2829
  catch { /* best effort */ }
2474
2830
  _store = null;
2475
- if (storeFound)
2476
- deleted.push("knowledge base (FTS5)");
2477
2831
  }
2478
- else {
2479
- const dbPath = getStorePath();
2480
- let found = false;
2481
- for (const suffix of ["", "-wal", "-shm"]) {
2482
- try {
2483
- unlinkSync(dbPath + suffix);
2484
- found = true;
2485
- }
2486
- catch { /* file may not exist */ }
2487
- }
2488
- if (found)
2489
- deleted.push("knowledge base (FTS5)");
2490
- }
2491
- // 2. Wipe legacy shared content DB (~/.context-mode/content/<hash>.db)
2492
- try {
2493
- const legacyPath = join(homedir(), ".context-mode", "content", `${hashProjectDir()}.db`);
2494
- for (const suffix of ["", "-wal", "-shm"]) {
2495
- try {
2496
- unlinkSync(legacyPath + suffix);
2497
- }
2498
- catch { /* ignore */ }
2499
- }
2500
- }
2501
- catch { /* best effort */ }
2502
- // 3. Wipe session events DB (analytics, metadata, resume snapshots)
2503
- try {
2504
- const dbHash = hashProjectDir();
2505
- const worktreeSuffix = getWorktreeSuffix();
2506
- const sessDir = getSessionDir();
2507
- const sessDbPath = join(sessDir, `${dbHash}${worktreeSuffix}.db`);
2508
- const eventsPath = join(sessDir, `${dbHash}${worktreeSuffix}-events.md`);
2509
- const cleanupFlag = join(sessDir, `${dbHash}${worktreeSuffix}.cleanup`);
2510
- let sessDbFound = false;
2511
- for (const suffix of ["", "-wal", "-shm"]) {
2512
- try {
2513
- unlinkSync(sessDbPath + suffix);
2514
- sessDbFound = true;
2515
- }
2516
- catch { /* ignore */ }
2517
- }
2518
- if (sessDbFound)
2519
- deleted.push("session events DB");
2520
- let eventsFound = false;
2521
- try {
2522
- unlinkSync(eventsPath);
2523
- eventsFound = true;
2524
- }
2525
- catch { /* ignore */ }
2526
- if (eventsFound)
2527
- deleted.push("session events markdown");
2528
- try {
2529
- unlinkSync(cleanupFlag);
2530
- }
2531
- catch { /* ignore */ }
2532
- }
2533
- catch { /* best effort */ }
2534
- // 3. Reset in-memory session stats
2832
+ // FTS5 store: pass contentDir so purgeSession sweeps BOTH canonical
2833
+ // and legacy raw-casing variants (dual-hash, mirrors session events).
2834
+ // storePath is also passed for the rare case where the resolver picked
2835
+ // an absolute path that differs from the dual-hash pair (e.g. caller
2836
+ // pre-migrated). Both paths are de-duped during unlink.
2837
+ const contentDir = storePathForPurge ? dirname(storePathForPurge) : undefined;
2838
+ const { deleted } = purgeSession({
2839
+ projectDir: getProjectDir(),
2840
+ sessionsDir: getSessionDir(),
2841
+ storePath: storePathForPurge,
2842
+ contentDir,
2843
+ legacyContentDir: join(homedir(), ".context-mode", "content"),
2844
+ // hashProjectDirLegacy mirrors the deployed (≤ v1.0.111) raw-casing
2845
+ // hash that named files under ~/.context-mode/content/. Using the
2846
+ // legacy hash here is correct: that pre-pre-legacy directory was
2847
+ // never migrated and still uses raw casing.
2848
+ contentHash: hashProjectDirLegacy(getProjectDir()),
2849
+ });
2850
+ // Reset in-memory session stats
2535
2851
  sessionStats.calls = {};
2536
2852
  sessionStats.bytesReturned = {};
2537
2853
  sessionStats.bytesIndexed = 0;
@@ -2554,6 +2870,173 @@ server.registerTool("ctx_purge", {
2554
2870
  }],
2555
2871
  });
2556
2872
  });
2873
+ // Hard upper bound on every helper-internal spawnSync call. Caps tail-latency
2874
+ // when an external binary hangs (xdg-open waiting for an X11 session, lsof
2875
+ // stalling on /proc, taskkill blocking on an unresponsive process, etc.) so
2876
+ // the MCP tool surfaces a diagnostic instead of blocking the agent loop.
2877
+ // 5s is comfortably above the 99th-percentile completion of every command we
2878
+ // invoke; anything past that is hung.
2879
+ const HELPER_SPAWN_TIMEOUT_MS = 5000;
2880
+ // Returns the argv attempts for opening `url` on `platform`, in fall-back order.
2881
+ // Pure data — no I/O.
2882
+ export function browserOpenArgv(url, platform) {
2883
+ if (platform === "darwin")
2884
+ return [{ cmd: "open", args: [url] }];
2885
+ if (platform === "win32") {
2886
+ // `start` is a cmd.exe builtin; the empty title arg ("") prevents the URL
2887
+ // from being consumed as the window title.
2888
+ return [{ cmd: "cmd", args: ["/c", "start", "", url] }];
2889
+ }
2890
+ // linux/bsd: try xdg-open, then sensible-browser (Debian/Ubuntu).
2891
+ return [
2892
+ { cmd: "xdg-open", args: [url] },
2893
+ { cmd: "sensible-browser", args: [url] },
2894
+ ];
2895
+ }
2896
+ // Opens a browser synchronously, waiting for each attempt to complete.
2897
+ // Returns a structured result so callers can surface auto-open failures
2898
+ // to the user instead of falsely reporting success.
2899
+ export function openBrowserSync(url, platform = process.platform, runner = spawnSync) {
2900
+ const attempts = browserOpenArgv(url, platform);
2901
+ const errors = [];
2902
+ for (const { cmd, args } of attempts) {
2903
+ try {
2904
+ const r = runner(cmd, args, { stdio: "ignore", timeout: HELPER_SPAWN_TIMEOUT_MS });
2905
+ // Treat signal-kill (status === null) and any non-zero status as failure
2906
+ // so the next fallback fires.
2907
+ if (!r.error && r.status === 0)
2908
+ return { ok: true, method: cmd };
2909
+ const reason = r.error?.message ?? `status=${r.status === null ? "signaled" : r.status}`;
2910
+ errors.push(`${cmd}: ${reason}`);
2911
+ }
2912
+ catch (e) {
2913
+ errors.push(`${cmd}: ${e instanceof Error ? e.message : String(e)}`);
2914
+ }
2915
+ }
2916
+ return { ok: false, method: "none", reason: errors.join("; ") };
2917
+ }
2918
+ // Kills any process listening on `port`. Returns a structured result so
2919
+ // the caller can distinguish between (a) port was free, (b) kill succeeded,
2920
+ // (c) kill failed (perms, missing binary, or per-pid failure mid-loop).
2921
+ //
2922
+ // On Windows the netstat parser is locale-independent: the STATE column
2923
+ // ("LISTENING" / "ESTABLISHED" / ...) is translated on non-English Windows
2924
+ // (Windows-FR shows "À l'écoute", Windows-DE "ABHÖREN", etc.), but the REMOTE
2925
+ // ADDRESS column is not. A listening TCP socket always has remote
2926
+ // "0.0.0.0:0" (IPv4) or "[::]:0" (IPv6); a connected one has a real
2927
+ // addr:port. We therefore key off the remote column instead of the state
2928
+ // string. This also rules out the pre-fix bug where matching only the local
2929
+ // port number cross-matched a remote :port from an outbound connection and
2930
+ // taskkill'd an unrelated process.
2931
+ export function killProcessOnPort(port, platform = process.platform, runner = spawnSync) {
2932
+ const result = { killedPids: [], attemptedPids: [], errors: [] };
2933
+ if (!Number.isInteger(port) || port < 1 || port > 65535) {
2934
+ result.errors.push(`invalid port: ${port}`);
2935
+ return result;
2936
+ }
2937
+ try {
2938
+ if (platform === "win32") {
2939
+ const r = runner("netstat", ["-ano"], {
2940
+ encoding: "utf-8",
2941
+ stdio: ["ignore", "pipe", "ignore"],
2942
+ timeout: HELPER_SPAWN_TIMEOUT_MS,
2943
+ });
2944
+ if (r.error) {
2945
+ result.errors.push(`netstat: ${r.error.message}`);
2946
+ return result;
2947
+ }
2948
+ if (r.status !== 0 || typeof r.stdout !== "string")
2949
+ return result;
2950
+ const portSuffix = `:${port}`;
2951
+ const pids = new Set();
2952
+ for (const rawLine of r.stdout.split(/\r?\n/)) {
2953
+ const line = rawLine.trim();
2954
+ if (!line)
2955
+ continue;
2956
+ const tokens = line.split(/\s+/);
2957
+ // netstat -ano LISTENING row (en-US): "TCP 0.0.0.0:4747 0.0.0.0:0 LISTENING 1234"
2958
+ // The STATE column is locale-translated and may itself contain spaces
2959
+ // (Windows-FR `À l'écoute` splits into two tokens), so we cannot index
2960
+ // STATE by position. PID is always the trailing column; PROTO/LOCAL/
2961
+ // REMOTE are the first three. We anchor on those + a remote-wildcard
2962
+ // check that's locale-independent.
2963
+ if (tokens.length < 5)
2964
+ continue;
2965
+ const proto = tokens[0];
2966
+ const local = tokens[1];
2967
+ const remote = tokens[2];
2968
+ const pid = tokens[tokens.length - 1];
2969
+ if (proto !== "TCP")
2970
+ continue;
2971
+ if (!local.endsWith(portSuffix))
2972
+ continue;
2973
+ // Listening sockets carry a wildcard remote; anything else is a
2974
+ // connection (and matching it would kill an unrelated process).
2975
+ if (remote !== "0.0.0.0:0" && remote !== "[::]:0")
2976
+ continue;
2977
+ if (!/^\d+$/.test(pid))
2978
+ continue;
2979
+ pids.add(pid);
2980
+ }
2981
+ for (const pid of pids) {
2982
+ result.attemptedPids.push(pid);
2983
+ try {
2984
+ const k = runner("taskkill", ["/F", "/PID", pid], {
2985
+ stdio: "ignore",
2986
+ timeout: HELPER_SPAWN_TIMEOUT_MS,
2987
+ });
2988
+ if (k.error || k.status !== 0) {
2989
+ result.errors.push(`taskkill ${pid}: ${k.error?.message ?? `status=${k.status}`}`);
2990
+ }
2991
+ else {
2992
+ result.killedPids.push(pid);
2993
+ }
2994
+ }
2995
+ catch (e) {
2996
+ result.errors.push(`taskkill ${pid}: ${e instanceof Error ? e.message : String(e)}`);
2997
+ }
2998
+ }
2999
+ }
3000
+ else {
3001
+ const r = runner("lsof", ["-ti", `:${port}`], {
3002
+ encoding: "utf-8",
3003
+ stdio: ["ignore", "pipe", "ignore"],
3004
+ timeout: HELPER_SPAWN_TIMEOUT_MS,
3005
+ });
3006
+ if (r.error) {
3007
+ // ENOENT (lsof not installed) is a real diagnostic; surface it.
3008
+ result.errors.push(`lsof: ${r.error.message}`);
3009
+ return result;
3010
+ }
3011
+ // lsof exits 1 with empty stdout when the port is free — not an error.
3012
+ if (r.status !== 0 || typeof r.stdout !== "string")
3013
+ return result;
3014
+ const pids = r.stdout.split(/\r?\n/).filter(p => /^\d+$/.test(p));
3015
+ for (const pid of pids) {
3016
+ result.attemptedPids.push(pid);
3017
+ try {
3018
+ const k = runner("kill", [pid], {
3019
+ stdio: "ignore",
3020
+ timeout: HELPER_SPAWN_TIMEOUT_MS,
3021
+ });
3022
+ if (k.error || k.status !== 0) {
3023
+ result.errors.push(`kill ${pid}: ${k.error?.message ?? `status=${k.status}`}`);
3024
+ }
3025
+ else {
3026
+ result.killedPids.push(pid);
3027
+ }
3028
+ }
3029
+ catch (e) {
3030
+ result.errors.push(`kill ${pid}: ${e instanceof Error ? e.message : String(e)}`);
3031
+ }
3032
+ }
3033
+ }
3034
+ }
3035
+ catch (e) {
3036
+ result.errors.push(e instanceof Error ? e.message : String(e));
3037
+ }
3038
+ return result;
3039
+ }
2557
3040
  // ── ctx-insight: analytics dashboard ──────────────────────────────────────────
2558
3041
  server.registerTool("ctx_insight", {
2559
3042
  title: "Open Insight Dashboard",
@@ -2562,7 +3045,7 @@ server.registerTool("ctx_insight", {
2562
3045
  "parallel work patterns, project focus, and actionable insights. " +
2563
3046
  "First run installs dependencies (~30s). Subsequent runs open instantly.",
2564
3047
  inputSchema: z.object({
2565
- port: z.coerce.number().optional().describe("Port to serve on (default: 4747)"),
3048
+ port: z.coerce.number().int().min(1).max(65535).optional().describe("Port to serve on (default: 4747)"),
2566
3049
  sessionDir: z.string().optional().describe("Override INSIGHT_SESSION_DIR: directory containing context-mode session .db files"),
2567
3050
  contentDir: z.string().optional().describe("Override INSIGHT_CONTENT_DIR: directory containing context-mode content/index .db files"),
2568
3051
  insightSessionDir: z.string().optional().describe("Alias for sessionDir / INSIGHT_SESSION_DIR"),
@@ -2656,34 +3139,39 @@ server.registerTool("ctx_insight", {
2656
3139
  if (portOccupied && sourceUpdated) {
2657
3140
  // Source was updated but stale server is running on port — kill it so fresh code runs
2658
3141
  steps.push("Killing stale dashboard server (source updated)...");
2659
- try {
2660
- if (process.platform === "win32") {
2661
- execSync(`for /f "tokens=5" %a in ('netstat -ano ^| findstr :${port}') do taskkill /F /PID %a`, { stdio: "pipe" });
2662
- }
2663
- else {
2664
- execSync(`lsof -ti:${port} | xargs kill 2>/dev/null`, { stdio: "pipe" });
2665
- }
2666
- await new Promise(r => setTimeout(r, 500)); // Wait for port to free
3142
+ const kill = killProcessOnPort(port);
3143
+ if (kill.attemptedPids.length > 0 && kill.killedPids.length === 0) {
3144
+ // Tried to kill, every attempt failed (perms, race, missing binary).
3145
+ // Surface so the agent doesn't loop on the same port forever.
3146
+ return trackResponse("ctx_insight", {
3147
+ content: [{
3148
+ type: "text",
3149
+ text: `Could not free port ${port} (kill failed for ${kill.attemptedPids.join(", ")}: ${kill.errors.join("; ")}). Try ctx_insight({ port: ${port + 1} }) or stop the process manually.`,
3150
+ }],
3151
+ });
2667
3152
  }
2668
- catch { /* no process to kill proceed anyway */ }
2669
- steps.push("Stale server killed.");
3153
+ if (kill.errors.length > 0 && kill.attemptedPids.length === 0) {
3154
+ // Couldn't even probe the port (e.g. lsof not installed).
3155
+ return trackResponse("ctx_insight", {
3156
+ content: [{
3157
+ type: "text",
3158
+ text: `Cannot reclaim port ${port}: ${kill.errors.join("; ")}. Stop the process manually or pick another port.`,
3159
+ }],
3160
+ });
3161
+ }
3162
+ await new Promise(r => setTimeout(r, 500)); // Wait for port to free
3163
+ steps.push(`Stale server killed (${kill.killedPids.length} pid${kill.killedPids.length === 1 ? "" : "s"}).`);
2670
3164
  }
2671
3165
  else if (portOccupied) {
2672
3166
  // Source unchanged, server is running fine — just open browser
2673
3167
  steps.push("Dashboard already running.");
2674
3168
  const url = `http://localhost:${port}`;
2675
- const platform = process.platform;
2676
- try {
2677
- if (platform === "darwin")
2678
- execSync(`open "${url}"`, { stdio: "pipe" });
2679
- else if (platform === "win32")
2680
- execSync(`start "" "${url}"`, { stdio: "pipe" });
2681
- else
2682
- execSync(`xdg-open "${url}" 2>/dev/null || sensible-browser "${url}" 2>/dev/null`, { stdio: "pipe" });
2683
- }
2684
- catch { /* browser open is best-effort */ }
3169
+ const open = openBrowserSync(url);
3170
+ const tail = open.ok
3171
+ ? ""
3172
+ : ` (auto-open failed: ${open.reason}; navigate manually)`;
2685
3173
  return trackResponse("ctx_insight", {
2686
- content: [{ type: "text", text: `Dashboard already running at http://localhost:${port}` }],
3174
+ content: [{ type: "text", text: `Dashboard already running at ${url}${tail}` }],
2687
3175
  });
2688
3176
  }
2689
3177
  // Kill any previous insight child this MCP spawned (e.g. re-invocation).
@@ -2739,17 +3227,9 @@ server.registerTool("ctx_insight", {
2739
3227
  }
2740
3228
  // Open browser (cross-platform)
2741
3229
  const url = `http://localhost:${port}`;
2742
- const platform = process.platform;
2743
- try {
2744
- if (platform === "darwin")
2745
- execSync(`open "${url}"`, { stdio: "pipe" });
2746
- else if (platform === "win32")
2747
- execSync(`start "" "${url}"`, { stdio: "pipe" });
2748
- else
2749
- execSync(`xdg-open "${url}" 2>/dev/null || sensible-browser "${url}" 2>/dev/null`, { stdio: "pipe" });
2750
- }
2751
- catch { /* browser open is best-effort */ }
2752
- steps.push(`Dashboard running at ${url}`);
3230
+ const open = openBrowserSync(url);
3231
+ const openTail = open.ok ? "" : ` (auto-open failed: ${open.reason}; navigate manually)`;
3232
+ steps.push(`Dashboard running at ${url}${openTail}`);
2753
3233
  return trackResponse("ctx_insight", {
2754
3234
  content: [{
2755
3235
  type: "text",