cccmemory 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (216) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +349 -0
  3. package/dist/ConversationMemory.d.ts +231 -0
  4. package/dist/ConversationMemory.d.ts.map +1 -0
  5. package/dist/ConversationMemory.js +357 -0
  6. package/dist/ConversationMemory.js.map +1 -0
  7. package/dist/cache/QueryCache.d.ts +215 -0
  8. package/dist/cache/QueryCache.d.ts.map +1 -0
  9. package/dist/cache/QueryCache.js +294 -0
  10. package/dist/cache/QueryCache.js.map +1 -0
  11. package/dist/cli/commands.d.ts +9 -0
  12. package/dist/cli/commands.d.ts.map +1 -0
  13. package/dist/cli/commands.js +954 -0
  14. package/dist/cli/commands.js.map +1 -0
  15. package/dist/cli/help.d.ts +16 -0
  16. package/dist/cli/help.d.ts.map +1 -0
  17. package/dist/cli/help.js +361 -0
  18. package/dist/cli/help.js.map +1 -0
  19. package/dist/cli/index.d.ts +30 -0
  20. package/dist/cli/index.d.ts.map +1 -0
  21. package/dist/cli/index.js +111 -0
  22. package/dist/cli/index.js.map +1 -0
  23. package/dist/context/ContextInjector.d.ts +38 -0
  24. package/dist/context/ContextInjector.d.ts.map +1 -0
  25. package/dist/context/ContextInjector.js +235 -0
  26. package/dist/context/ContextInjector.js.map +1 -0
  27. package/dist/documentation/CodeAnalyzer.d.ts +29 -0
  28. package/dist/documentation/CodeAnalyzer.d.ts.map +1 -0
  29. package/dist/documentation/CodeAnalyzer.js +122 -0
  30. package/dist/documentation/CodeAnalyzer.js.map +1 -0
  31. package/dist/documentation/ConversationAnalyzer.d.ts +19 -0
  32. package/dist/documentation/ConversationAnalyzer.d.ts.map +1 -0
  33. package/dist/documentation/ConversationAnalyzer.js +157 -0
  34. package/dist/documentation/ConversationAnalyzer.js.map +1 -0
  35. package/dist/documentation/CrossReferencer.d.ts +67 -0
  36. package/dist/documentation/CrossReferencer.d.ts.map +1 -0
  37. package/dist/documentation/CrossReferencer.js +247 -0
  38. package/dist/documentation/CrossReferencer.js.map +1 -0
  39. package/dist/documentation/DocumentationGenerator.d.ts +22 -0
  40. package/dist/documentation/DocumentationGenerator.d.ts.map +1 -0
  41. package/dist/documentation/DocumentationGenerator.js +57 -0
  42. package/dist/documentation/DocumentationGenerator.js.map +1 -0
  43. package/dist/documentation/MarkdownFormatter.d.ts +26 -0
  44. package/dist/documentation/MarkdownFormatter.d.ts.map +1 -0
  45. package/dist/documentation/MarkdownFormatter.js +301 -0
  46. package/dist/documentation/MarkdownFormatter.js.map +1 -0
  47. package/dist/documentation/types.d.ts +176 -0
  48. package/dist/documentation/types.d.ts.map +1 -0
  49. package/dist/documentation/types.js +5 -0
  50. package/dist/documentation/types.js.map +1 -0
  51. package/dist/embeddings/ConfigManager.d.ts +46 -0
  52. package/dist/embeddings/ConfigManager.d.ts.map +1 -0
  53. package/dist/embeddings/ConfigManager.js +177 -0
  54. package/dist/embeddings/ConfigManager.js.map +1 -0
  55. package/dist/embeddings/EmbeddingConfig.d.ts +39 -0
  56. package/dist/embeddings/EmbeddingConfig.d.ts.map +1 -0
  57. package/dist/embeddings/EmbeddingConfig.js +132 -0
  58. package/dist/embeddings/EmbeddingConfig.js.map +1 -0
  59. package/dist/embeddings/EmbeddingGenerator.d.ts +51 -0
  60. package/dist/embeddings/EmbeddingGenerator.d.ts.map +1 -0
  61. package/dist/embeddings/EmbeddingGenerator.js +157 -0
  62. package/dist/embeddings/EmbeddingGenerator.js.map +1 -0
  63. package/dist/embeddings/EmbeddingProvider.d.ts +34 -0
  64. package/dist/embeddings/EmbeddingProvider.d.ts.map +1 -0
  65. package/dist/embeddings/EmbeddingProvider.js +6 -0
  66. package/dist/embeddings/EmbeddingProvider.js.map +1 -0
  67. package/dist/embeddings/ModelRegistry.d.ts +48 -0
  68. package/dist/embeddings/ModelRegistry.d.ts.map +1 -0
  69. package/dist/embeddings/ModelRegistry.js +170 -0
  70. package/dist/embeddings/ModelRegistry.js.map +1 -0
  71. package/dist/embeddings/VectorStore.d.ts +114 -0
  72. package/dist/embeddings/VectorStore.d.ts.map +1 -0
  73. package/dist/embeddings/VectorStore.js +393 -0
  74. package/dist/embeddings/VectorStore.js.map +1 -0
  75. package/dist/embeddings/providers/OllamaEmbeddings.d.ts +38 -0
  76. package/dist/embeddings/providers/OllamaEmbeddings.d.ts.map +1 -0
  77. package/dist/embeddings/providers/OllamaEmbeddings.js +125 -0
  78. package/dist/embeddings/providers/OllamaEmbeddings.js.map +1 -0
  79. package/dist/embeddings/providers/OpenAIEmbeddings.d.ts +40 -0
  80. package/dist/embeddings/providers/OpenAIEmbeddings.d.ts.map +1 -0
  81. package/dist/embeddings/providers/OpenAIEmbeddings.js +129 -0
  82. package/dist/embeddings/providers/OpenAIEmbeddings.js.map +1 -0
  83. package/dist/embeddings/providers/TransformersEmbeddings.d.ts +38 -0
  84. package/dist/embeddings/providers/TransformersEmbeddings.d.ts.map +1 -0
  85. package/dist/embeddings/providers/TransformersEmbeddings.js +115 -0
  86. package/dist/embeddings/providers/TransformersEmbeddings.js.map +1 -0
  87. package/dist/handoff/SessionHandoffStore.d.ts +80 -0
  88. package/dist/handoff/SessionHandoffStore.d.ts.map +1 -0
  89. package/dist/handoff/SessionHandoffStore.js +314 -0
  90. package/dist/handoff/SessionHandoffStore.js.map +1 -0
  91. package/dist/index.d.ts +7 -0
  92. package/dist/index.d.ts.map +1 -0
  93. package/dist/index.js +115 -0
  94. package/dist/index.js.map +1 -0
  95. package/dist/mcp-server.d.ts +27 -0
  96. package/dist/mcp-server.d.ts.map +1 -0
  97. package/dist/mcp-server.js +157 -0
  98. package/dist/mcp-server.js.map +1 -0
  99. package/dist/memory/WorkingMemoryStore.d.ts +83 -0
  100. package/dist/memory/WorkingMemoryStore.d.ts.map +1 -0
  101. package/dist/memory/WorkingMemoryStore.js +318 -0
  102. package/dist/memory/WorkingMemoryStore.js.map +1 -0
  103. package/dist/memory/types.d.ts +192 -0
  104. package/dist/memory/types.d.ts.map +1 -0
  105. package/dist/memory/types.js +8 -0
  106. package/dist/memory/types.js.map +1 -0
  107. package/dist/parsers/CodexConversationParser.d.ts +51 -0
  108. package/dist/parsers/CodexConversationParser.d.ts.map +1 -0
  109. package/dist/parsers/CodexConversationParser.js +301 -0
  110. package/dist/parsers/CodexConversationParser.js.map +1 -0
  111. package/dist/parsers/ConversationParser.d.ts +286 -0
  112. package/dist/parsers/ConversationParser.d.ts.map +1 -0
  113. package/dist/parsers/ConversationParser.js +795 -0
  114. package/dist/parsers/ConversationParser.js.map +1 -0
  115. package/dist/parsers/DecisionExtractor.d.ts +144 -0
  116. package/dist/parsers/DecisionExtractor.d.ts.map +1 -0
  117. package/dist/parsers/DecisionExtractor.js +434 -0
  118. package/dist/parsers/DecisionExtractor.js.map +1 -0
  119. package/dist/parsers/GitIntegrator.d.ts +156 -0
  120. package/dist/parsers/GitIntegrator.d.ts.map +1 -0
  121. package/dist/parsers/GitIntegrator.js +348 -0
  122. package/dist/parsers/GitIntegrator.js.map +1 -0
  123. package/dist/parsers/MistakeExtractor.d.ts +151 -0
  124. package/dist/parsers/MistakeExtractor.d.ts.map +1 -0
  125. package/dist/parsers/MistakeExtractor.js +460 -0
  126. package/dist/parsers/MistakeExtractor.js.map +1 -0
  127. package/dist/parsers/RequirementsExtractor.d.ts +166 -0
  128. package/dist/parsers/RequirementsExtractor.d.ts.map +1 -0
  129. package/dist/parsers/RequirementsExtractor.js +338 -0
  130. package/dist/parsers/RequirementsExtractor.js.map +1 -0
  131. package/dist/realtime/ConversationWatcher.d.ts +87 -0
  132. package/dist/realtime/ConversationWatcher.d.ts.map +1 -0
  133. package/dist/realtime/ConversationWatcher.js +204 -0
  134. package/dist/realtime/ConversationWatcher.js.map +1 -0
  135. package/dist/realtime/IncrementalParser.d.ts +83 -0
  136. package/dist/realtime/IncrementalParser.d.ts.map +1 -0
  137. package/dist/realtime/IncrementalParser.js +232 -0
  138. package/dist/realtime/IncrementalParser.js.map +1 -0
  139. package/dist/realtime/LiveExtractor.d.ts +72 -0
  140. package/dist/realtime/LiveExtractor.d.ts.map +1 -0
  141. package/dist/realtime/LiveExtractor.js +288 -0
  142. package/dist/realtime/LiveExtractor.js.map +1 -0
  143. package/dist/search/SemanticSearch.d.ts +121 -0
  144. package/dist/search/SemanticSearch.d.ts.map +1 -0
  145. package/dist/search/SemanticSearch.js +823 -0
  146. package/dist/search/SemanticSearch.js.map +1 -0
  147. package/dist/storage/BackupManager.d.ts +58 -0
  148. package/dist/storage/BackupManager.d.ts.map +1 -0
  149. package/dist/storage/BackupManager.js +223 -0
  150. package/dist/storage/BackupManager.js.map +1 -0
  151. package/dist/storage/ConversationStorage.d.ts +341 -0
  152. package/dist/storage/ConversationStorage.d.ts.map +1 -0
  153. package/dist/storage/ConversationStorage.js +792 -0
  154. package/dist/storage/ConversationStorage.js.map +1 -0
  155. package/dist/storage/DeletionService.d.ts +70 -0
  156. package/dist/storage/DeletionService.d.ts.map +1 -0
  157. package/dist/storage/DeletionService.js +253 -0
  158. package/dist/storage/DeletionService.js.map +1 -0
  159. package/dist/storage/GlobalIndex.d.ts +133 -0
  160. package/dist/storage/GlobalIndex.d.ts.map +1 -0
  161. package/dist/storage/GlobalIndex.js +310 -0
  162. package/dist/storage/GlobalIndex.js.map +1 -0
  163. package/dist/storage/SQLiteManager.d.ts +114 -0
  164. package/dist/storage/SQLiteManager.d.ts.map +1 -0
  165. package/dist/storage/SQLiteManager.js +636 -0
  166. package/dist/storage/SQLiteManager.js.map +1 -0
  167. package/dist/storage/migrations.d.ts +54 -0
  168. package/dist/storage/migrations.d.ts.map +1 -0
  169. package/dist/storage/migrations.js +285 -0
  170. package/dist/storage/migrations.js.map +1 -0
  171. package/dist/storage/schema.sql +436 -0
  172. package/dist/tools/ToolDefinitions.d.ts +946 -0
  173. package/dist/tools/ToolDefinitions.d.ts.map +1 -0
  174. package/dist/tools/ToolDefinitions.js +937 -0
  175. package/dist/tools/ToolDefinitions.js.map +1 -0
  176. package/dist/tools/ToolHandlers.d.ts +791 -0
  177. package/dist/tools/ToolHandlers.d.ts.map +1 -0
  178. package/dist/tools/ToolHandlers.js +3262 -0
  179. package/dist/tools/ToolHandlers.js.map +1 -0
  180. package/dist/types/ToolTypes.d.ts +824 -0
  181. package/dist/types/ToolTypes.d.ts.map +1 -0
  182. package/dist/types/ToolTypes.js +6 -0
  183. package/dist/types/ToolTypes.js.map +1 -0
  184. package/dist/utils/Logger.d.ts +70 -0
  185. package/dist/utils/Logger.d.ts.map +1 -0
  186. package/dist/utils/Logger.js +131 -0
  187. package/dist/utils/Logger.js.map +1 -0
  188. package/dist/utils/McpConfig.d.ts +54 -0
  189. package/dist/utils/McpConfig.d.ts.map +1 -0
  190. package/dist/utils/McpConfig.js +136 -0
  191. package/dist/utils/McpConfig.js.map +1 -0
  192. package/dist/utils/ProjectMigration.d.ts +82 -0
  193. package/dist/utils/ProjectMigration.d.ts.map +1 -0
  194. package/dist/utils/ProjectMigration.js +416 -0
  195. package/dist/utils/ProjectMigration.js.map +1 -0
  196. package/dist/utils/constants.d.ts +75 -0
  197. package/dist/utils/constants.d.ts.map +1 -0
  198. package/dist/utils/constants.js +105 -0
  199. package/dist/utils/constants.js.map +1 -0
  200. package/dist/utils/safeJson.d.ts +37 -0
  201. package/dist/utils/safeJson.d.ts.map +1 -0
  202. package/dist/utils/safeJson.js +48 -0
  203. package/dist/utils/safeJson.js.map +1 -0
  204. package/dist/utils/sanitization.d.ts +45 -0
  205. package/dist/utils/sanitization.d.ts.map +1 -0
  206. package/dist/utils/sanitization.js +153 -0
  207. package/dist/utils/sanitization.js.map +1 -0
  208. package/dist/utils/worktree.d.ts +15 -0
  209. package/dist/utils/worktree.d.ts.map +1 -0
  210. package/dist/utils/worktree.js +86 -0
  211. package/dist/utils/worktree.js.map +1 -0
  212. package/package.json +98 -0
  213. package/scripts/changelog-check.sh +62 -0
  214. package/scripts/check-node.js +17 -0
  215. package/scripts/dev-config.js +56 -0
  216. package/scripts/postinstall.js +117 -0
@@ -0,0 +1,3262 @@
1
+ /**
2
+ * MCP Tool Handlers - Implementation of all 22 tools for the cccmemory MCP server.
3
+ *
4
+ * This class provides the implementation for all MCP (Model Context Protocol) tools
5
+ * that allow Claude to interact with conversation history and memory.
6
+ *
7
+ * Tools are organized into categories:
8
+ * - Indexing: index_conversations
9
+ * - Search: search_conversations, searchDecisions, search_mistakes
10
+ * - File Context: check_before_modify, get_file_evolution
11
+ * - History: get_tool_history, link_commits_to_conversations
12
+ * - Discovery: find_similar_sessions, get_requirements
13
+ * - Recall: recall_and_apply
14
+ * - Documentation: generate_documentation
15
+ * - Migration: discover_old_conversations, migrate_project
16
+ *
17
+ * @example
18
+ * ```typescript
19
+ * const handlers = new ToolHandlers(memory, db, '/path/to/projects');
20
+ * const result = await handlers.indexConversations({
21
+ * project_path: '/Users/me/my-project'
22
+ * });
23
+ * ```
24
+ */
25
+ import { sanitizeForLike } from "../utils/sanitization.js";
26
+ import { getCanonicalProjectPath, getWorktreeInfo } from "../utils/worktree.js";
27
+ import { DocumentationGenerator } from "../documentation/DocumentationGenerator.js";
28
+ import { ProjectMigration } from "../utils/ProjectMigration.js";
29
+ import { pathToProjectFolderName } from "../utils/sanitization.js";
30
+ import { DeletionService } from "../storage/DeletionService.js";
31
+ import { readdirSync } from "fs";
32
+ import { join, resolve } from "path";
33
+ import { safeJsonParse } from "../utils/safeJson.js";
34
+ /**
35
+ * Pagination Patterns:
36
+ *
37
+ * This codebase uses two different pagination patterns based on data source:
38
+ *
39
+ * 1. SQL-based pagination (fetch+1):
40
+ * - Fetch limit+1 records from database
41
+ * - hasMore = results.length > limit
42
+ * - Slice to limit if hasMore is true
43
+ * - Use case: Single-database SQL queries (searchMistakes, linkCommitsToConversations)
44
+ * - Advantage: Efficient, minimal data transfer
45
+ *
46
+ * 2. In-memory pagination (slice):
47
+ * - Fetch all needed results (or limit+offset)
48
+ * - Slice to get paginated subset: results.slice(offset, offset + limit)
49
+ * - has_more = offset + limit < results.length
50
+ * - Use case: Semantic search, cross-project aggregation
51
+ * - Advantage: Allows sorting/filtering before pagination
52
+ *
53
+ * Both patterns are correct and optimized for their respective use cases.
54
+ */
55
+ /**
56
+ * Tool handlers for the cccmemory MCP server.
57
+ *
58
+ * Provides methods for indexing, searching, and managing conversation history.
59
+ */
60
+ export class ToolHandlers {
61
+ memory;
62
+ db;
63
+ migration;
64
+ lastAutoIndex = 0;
65
+ autoIndexPromise = null;
66
+ AUTO_INDEX_COOLDOWN = 60000; // 1 minute
67
+ /**
68
+ * Create a new ToolHandlers instance.
69
+ *
70
+ * @param memory - ConversationMemory instance for core operations
71
+ * @param db - SQLiteManager for database access
72
+ * @param projectsDir - Optional directory for storing project data
73
+ */
74
+ constructor(memory, db, projectsDir) {
75
+ this.memory = memory;
76
+ this.db = db;
77
+ this.migration = new ProjectMigration(db, projectsDir);
78
+ }
79
+ resolveProjectPath(input) {
80
+ const rawPath = input || process.cwd();
81
+ return getCanonicalProjectPath(rawPath).canonicalPath;
82
+ }
83
+ resolveOptionalProjectPath(input) {
84
+ if (!input) {
85
+ return undefined;
86
+ }
87
+ return this.resolveProjectPath(input);
88
+ }
89
+ inferProjectPathFromMessages(messages) {
90
+ const counts = new Map();
91
+ for (const message of messages) {
92
+ const cwd = message.cwd;
93
+ if (!cwd || typeof cwd !== "string") {
94
+ continue;
95
+ }
96
+ const trimmed = cwd.trim();
97
+ if (!trimmed) {
98
+ continue;
99
+ }
100
+ counts.set(trimmed, (counts.get(trimmed) || 0) + 1);
101
+ }
102
+ let bestPath = null;
103
+ let bestCount = 0;
104
+ for (const [path, count] of counts) {
105
+ if (count > bestCount) {
106
+ bestCount = count;
107
+ bestPath = path;
108
+ }
109
+ }
110
+ return bestPath;
111
+ }
112
+ /**
113
+ * Automatically run incremental indexing if cooldown has expired.
114
+ * Uses a mutex (autoIndexPromise) to coalesce concurrent calls and prevent stampede.
115
+ * This ensures search results include recent conversations without
116
+ * requiring manual indexing.
117
+ */
118
+ async maybeAutoIndex() {
119
+ const now = Date.now();
120
+ // If indexing is already in progress, wait for it
121
+ if (this.autoIndexPromise) {
122
+ await this.autoIndexPromise;
123
+ return;
124
+ }
125
+ // Check cooldown
126
+ if (now - this.lastAutoIndex <= this.AUTO_INDEX_COOLDOWN) {
127
+ return;
128
+ }
129
+ // Update timestamp immediately to prevent concurrent triggers
130
+ this.lastAutoIndex = now;
131
+ try {
132
+ // Create the indexing promise and store it for coalescing
133
+ this.autoIndexPromise = this.indexAllProjects({ incremental: true }).then(() => { });
134
+ await this.autoIndexPromise;
135
+ }
136
+ catch (error) {
137
+ // Log but don't fail - search should still work with existing index
138
+ console.error('Auto-indexing failed:', error);
139
+ }
140
+ finally {
141
+ this.autoIndexPromise = null;
142
+ }
143
+ }
144
+ /**
145
+ * Index conversation history for a project.
146
+ *
147
+ * Parses conversation files from Claude Code's conversation history, extracts
148
+ * decisions, mistakes, and requirements, links git commits, and generates
149
+ * semantic embeddings for search.
150
+ *
151
+ * @param args - Indexing arguments:
152
+ * - `project_path`: Path to the project (defaults to cwd)
153
+ * - `session_id`: Optional specific session to index
154
+ * - `include_thinking`: Include thinking blocks (default: false)
155
+ * - `enable_git`: Enable git integration (default: true)
156
+ * - `exclude_mcp_conversations`: Exclude MCP tool conversations (default: 'self-only')
157
+ * - `exclude_mcp_servers`: List of specific MCP servers to exclude
158
+ *
159
+ * @returns Result containing:
160
+ * - `success`: Whether indexing succeeded
161
+ * - `stats`: Counts of conversations, messages, decisions, etc.
162
+ * - `indexed_folders`: List of folders that were indexed
163
+ * - `database_path`: Path to the SQLite database
164
+ * - `embeddings_generated`: Whether embeddings were created
165
+ * - `embedding_error`: Error message if embeddings failed
166
+ * - `message`: Human-readable status message
167
+ *
168
+ * @example
169
+ * ```typescript
170
+ * const result = await handlers.indexConversations({
171
+ * project_path: '/Users/me/my-project',
172
+ * enable_git: true,
173
+ * exclude_mcp_conversations: 'self-only'
174
+ * });
175
+ * console.error(result.message); // "Indexed 5 conversation(s) with 245 messages..."
176
+ * ```
177
+ */
178
+ async indexConversations(args) {
179
+ const typedArgs = args;
180
+ const rawProjectPath = typedArgs.project_path || process.cwd();
181
+ const { canonicalPath, worktreePaths } = getWorktreeInfo(rawProjectPath);
182
+ const projectPath = canonicalPath;
183
+ const sessionId = typedArgs.session_id;
184
+ const includeThinking = typedArgs.include_thinking ?? false;
185
+ const enableGit = typedArgs.enable_git ?? true;
186
+ const excludeMcpConversations = typedArgs.exclude_mcp_conversations ?? 'self-only';
187
+ const excludeMcpServers = typedArgs.exclude_mcp_servers;
188
+ const { GlobalIndex } = await import("../storage/GlobalIndex.js");
189
+ const globalIndex = new GlobalIndex();
190
+ try {
191
+ let lastIndexedMs;
192
+ if (!sessionId) {
193
+ const existingProject = globalIndex.getProject(projectPath);
194
+ if (existingProject) {
195
+ lastIndexedMs = existingProject.last_indexed;
196
+ }
197
+ }
198
+ // Check if we need to use a project-specific database
199
+ // This is needed when indexing a different project than where the MCP server is running
200
+ const currentDbPath = this.db.getDbPath();
201
+ const targetProjectFolderName = pathToProjectFolderName(projectPath);
202
+ // Use exact path segment match to avoid false positives with substring matching
203
+ // e.g., "my-project" should not match "my-project-v2"
204
+ const isCurrentProject = currentDbPath.endsWith(`/${targetProjectFolderName}/`) ||
205
+ currentDbPath.endsWith(`\\${targetProjectFolderName}\\`) ||
206
+ currentDbPath.includes(`/${targetProjectFolderName}/`) ||
207
+ currentDbPath.includes(`\\${targetProjectFolderName}\\`);
208
+ let indexResult;
209
+ let stats;
210
+ if (!isCurrentProject) {
211
+ // Create a project-specific database for the target project
212
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
213
+ const { ConversationStorage } = await import("../storage/ConversationStorage.js");
214
+ const { ConversationParser } = await import("../parsers/ConversationParser.js");
215
+ const { DecisionExtractor } = await import("../parsers/DecisionExtractor.js");
216
+ const { MistakeExtractor } = await import("../parsers/MistakeExtractor.js");
217
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
218
+ const { homedir } = await import("os");
219
+ // Create dedicated database in the target project's .claude folder
220
+ const projectDbPath = join(homedir(), ".claude", "projects", targetProjectFolderName, ".cccmemory.db");
221
+ console.error(`\n📂 Using project-specific database for: ${projectPath}`);
222
+ console.error(` Database path: ${projectDbPath}`);
223
+ const projectDb = new SQLiteManager({ dbPath: projectDbPath });
224
+ try {
225
+ const projectStorage = new ConversationStorage(projectDb);
226
+ // Parse conversations from the target project
227
+ const parser = new ConversationParser();
228
+ let parseResult = parser.parseProjects(worktreePaths, sessionId, projectPath, lastIndexedMs);
229
+ // Filter MCP conversations if requested
230
+ if (excludeMcpConversations || excludeMcpServers) {
231
+ parseResult = this.filterMcpConversationsHelper(parseResult, {
232
+ excludeMcpConversations,
233
+ excludeMcpServers,
234
+ });
235
+ }
236
+ // Store basic entities
237
+ await projectStorage.storeConversations(parseResult.conversations);
238
+ await projectStorage.storeMessages(parseResult.messages);
239
+ await projectStorage.storeToolUses(parseResult.tool_uses);
240
+ await projectStorage.storeToolResults(parseResult.tool_results);
241
+ await projectStorage.storeFileEdits(parseResult.file_edits);
242
+ if (includeThinking !== false) {
243
+ await projectStorage.storeThinkingBlocks(parseResult.thinking_blocks);
244
+ }
245
+ // Extract and store decisions
246
+ const decisionExtractor = new DecisionExtractor();
247
+ const decisions = decisionExtractor.extractDecisions(parseResult.messages, parseResult.thinking_blocks);
248
+ await projectStorage.storeDecisions(decisions);
249
+ // Extract and store mistakes
250
+ const mistakeExtractor = new MistakeExtractor();
251
+ const mistakes = mistakeExtractor.extractMistakes(parseResult.messages, parseResult.tool_results);
252
+ await projectStorage.storeMistakes(mistakes);
253
+ // Generate embeddings for semantic search
254
+ let embeddingError;
255
+ try {
256
+ const semanticSearch = new SemanticSearch(projectDb);
257
+ await semanticSearch.indexMessages(parseResult.messages);
258
+ await semanticSearch.indexDecisions(decisions);
259
+ await semanticSearch.indexMistakes(mistakes);
260
+ // Also index any decisions/mistakes in DB that are missing embeddings
261
+ // (catches items created before embeddings were available)
262
+ await semanticSearch.indexMissingDecisionEmbeddings();
263
+ await semanticSearch.indexMissingMistakeEmbeddings();
264
+ console.error(`✓ Generated embeddings for project: ${projectPath}`);
265
+ }
266
+ catch (embedError) {
267
+ embeddingError = embedError.message;
268
+ console.error(`⚠️ Embedding generation failed:`, embeddingError);
269
+ console.error(" FTS fallback will be used for search");
270
+ }
271
+ // Get stats
272
+ stats = projectStorage.getStats();
273
+ indexResult = {
274
+ embeddings_generated: !embeddingError,
275
+ embedding_error: embeddingError,
276
+ indexed_folders: parseResult.indexed_folders,
277
+ database_path: projectDbPath,
278
+ };
279
+ }
280
+ finally {
281
+ // Close the project database
282
+ projectDb.close();
283
+ }
284
+ }
285
+ else {
286
+ // Use the existing memory instance for the current project
287
+ indexResult = await this.memory.indexConversations({
288
+ projectPath,
289
+ sessionId,
290
+ includeThinking,
291
+ enableGitIntegration: enableGit,
292
+ excludeMcpConversations,
293
+ excludeMcpServers,
294
+ lastIndexedMs,
295
+ });
296
+ stats = this.memory.getStats();
297
+ }
298
+ const dbPathForIndex = indexResult.database_path || this.db.getDbPath();
299
+ globalIndex.registerProject({
300
+ project_path: projectPath,
301
+ source_type: "claude-code",
302
+ db_path: dbPathForIndex,
303
+ message_count: stats.messages.count,
304
+ conversation_count: stats.conversations.count,
305
+ decision_count: stats.decisions.count,
306
+ mistake_count: stats.mistakes.count,
307
+ metadata: {
308
+ indexed_folders: indexResult.indexed_folders || [],
309
+ },
310
+ });
311
+ const sessionInfo = sessionId ? ` (session: ${sessionId})` : ' (all sessions)';
312
+ let message = `Indexed ${stats.conversations.count} conversation(s) with ${stats.messages.count} messages${sessionInfo}`;
313
+ // Add indexed folders info
314
+ if (indexResult.indexed_folders && indexResult.indexed_folders.length > 0) {
315
+ message += `\n📁 Indexed from: ${indexResult.indexed_folders.join(', ')}`;
316
+ }
317
+ // Add database location info
318
+ if (indexResult.database_path) {
319
+ message += `\n💾 Database: ${indexResult.database_path}`;
320
+ }
321
+ // Add embedding status to message
322
+ if (indexResult.embeddings_generated) {
323
+ message += '\n✅ Semantic search enabled (embeddings generated)';
324
+ }
325
+ else if (indexResult.embedding_error) {
326
+ message += `\n⚠️ Semantic search unavailable: ${indexResult.embedding_error}`;
327
+ message += '\n Falling back to full-text search';
328
+ }
329
+ return {
330
+ success: true,
331
+ project_path: projectPath,
332
+ indexed_folders: indexResult.indexed_folders,
333
+ database_path: indexResult.database_path,
334
+ stats,
335
+ embeddings_generated: indexResult.embeddings_generated,
336
+ embedding_error: indexResult.embedding_error,
337
+ message,
338
+ };
339
+ }
340
+ finally {
341
+ globalIndex.close();
342
+ }
343
+ }
344
+ /**
345
+ * Helper method to filter MCP conversations from parse results.
346
+ * Extracted to be usable in both the main indexConversations and project-specific indexing.
347
+ */
348
+ filterMcpConversationsHelper(result, options) {
349
+ // Determine which MCP servers to exclude
350
+ const serversToExclude = new Set();
351
+ if (options.excludeMcpServers && options.excludeMcpServers.length > 0) {
352
+ options.excludeMcpServers.forEach(s => serversToExclude.add(s));
353
+ }
354
+ else if (options.excludeMcpConversations === 'self-only') {
355
+ serversToExclude.add('cccmemory');
356
+ }
357
+ else if (options.excludeMcpConversations === 'all-mcp' || options.excludeMcpConversations === true) {
358
+ for (const toolUse of result.tool_uses) {
359
+ if (toolUse.tool_name.startsWith('mcp__')) {
360
+ const parts = toolUse.tool_name.split('__');
361
+ if (parts.length >= 2) {
362
+ serversToExclude.add(parts[1]);
363
+ }
364
+ }
365
+ }
366
+ }
367
+ if (serversToExclude.size === 0) {
368
+ return result;
369
+ }
370
+ // Build set of excluded tool_use IDs
371
+ const excludedToolUseIds = new Set();
372
+ for (const toolUse of result.tool_uses) {
373
+ if (toolUse.tool_name.startsWith('mcp__')) {
374
+ const parts = toolUse.tool_name.split('__');
375
+ if (parts.length >= 2 && serversToExclude.has(parts[1])) {
376
+ excludedToolUseIds.add(toolUse.id);
377
+ }
378
+ }
379
+ }
380
+ // Build set of excluded message IDs
381
+ const excludedMessageIds = new Set();
382
+ for (const toolUse of result.tool_uses) {
383
+ if (excludedToolUseIds.has(toolUse.id)) {
384
+ excludedMessageIds.add(toolUse.message_id);
385
+ }
386
+ }
387
+ for (const toolResult of result.tool_results) {
388
+ if (excludedToolUseIds.has(toolResult.tool_use_id)) {
389
+ excludedMessageIds.add(toolResult.message_id);
390
+ }
391
+ }
392
+ if (excludedMessageIds.size > 0) {
393
+ console.error(`\n⚠️ Excluding ${excludedMessageIds.size} message(s) containing MCP tool calls from: ${Array.from(serversToExclude).join(', ')}`);
394
+ }
395
+ const remainingMessageIds = new Set(result.messages
396
+ .filter(m => !excludedMessageIds.has(m.id))
397
+ .map(m => m.id));
398
+ return {
399
+ ...result,
400
+ messages: result.messages.filter(m => !excludedMessageIds.has(m.id)),
401
+ tool_uses: result.tool_uses.filter(t => !excludedToolUseIds.has(t.id)),
402
+ tool_results: result.tool_results.filter(tr => !excludedToolUseIds.has(tr.tool_use_id)),
403
+ file_edits: result.file_edits.filter(fe => remainingMessageIds.has(fe.message_id)),
404
+ thinking_blocks: result.thinking_blocks.filter(tb => remainingMessageIds.has(tb.message_id)),
405
+ };
406
+ }
407
+ /**
408
+ * Search conversation history using natural language queries.
409
+ *
410
+ * Uses semantic search with embeddings if available, otherwise falls back
411
+ * to full-text search. Returns relevant messages with context and similarity scores.
412
+ *
413
+ * @param args - Search arguments:
414
+ * - `query`: Natural language search query (required)
415
+ * - `limit`: Maximum number of results (default: 10)
416
+ * - `date_range`: Optional [start_timestamp, end_timestamp] filter
417
+ *
418
+ * @returns Search results containing:
419
+ * - `query`: The search query used
420
+ * - `results`: Array of matching messages with:
421
+ * - `conversation_id`: Conversation containing the message
422
+ * - `message_id`: Message identifier
423
+ * - `timestamp`: When the message was created
424
+ * - `similarity`: Relevance score (0-1)
425
+ * - `snippet`: Text excerpt from the message
426
+ * - `git_branch`: Git branch at the time
427
+ * - `message_type`: Type of message
428
+ * - `role`: Message role (user/assistant)
429
+ * - `total_found`: Number of results returned
430
+ *
431
+ * @example
432
+ * ```typescript
433
+ * const result = await handlers.searchConversations({
434
+ * query: 'authentication bug fix',
435
+ * limit: 5
436
+ * });
437
+ * result.results.forEach(r => {
438
+ * console.error(`${r.similarity.toFixed(2)}: ${r.snippet}`);
439
+ * });
440
+ * ```
441
+ */
442
+ async searchConversations(args) {
443
+ await this.maybeAutoIndex();
444
+ const typedArgs = args;
445
+ const { query, limit = 10, offset = 0, date_range, scope = 'all', conversation_id } = typedArgs;
446
+ // Handle global scope by delegating to searchAllConversations
447
+ if (scope === 'global') {
448
+ const { GlobalIndex } = await import("../storage/GlobalIndex.js");
449
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
450
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
451
+ const { getEmbeddingGenerator } = await import("../embeddings/EmbeddingGenerator.js");
452
+ const globalIndex = new GlobalIndex();
453
+ const projects = globalIndex.getAllProjects();
454
+ const allResults = [];
455
+ // Pre-compute query embedding once for all projects
456
+ let queryEmbedding;
457
+ try {
458
+ const embedder = await getEmbeddingGenerator();
459
+ if (embedder.isAvailable()) {
460
+ queryEmbedding = await embedder.embed(query);
461
+ }
462
+ }
463
+ catch (_embeddingError) {
464
+ // Fall back to FTS
465
+ }
466
+ for (const project of projects) {
467
+ let projectDb = null;
468
+ try {
469
+ projectDb = new SQLiteManager({ dbPath: project.db_path, readOnly: true });
470
+ const semanticSearch = new SemanticSearch(projectDb);
471
+ const localResults = await semanticSearch.searchConversations(query, limit + offset, undefined, queryEmbedding);
472
+ const filteredResults = date_range
473
+ ? localResults.filter((r) => {
474
+ const timestamp = r.message.timestamp;
475
+ return timestamp >= date_range[0] && timestamp <= date_range[1];
476
+ })
477
+ : localResults;
478
+ for (const result of filteredResults) {
479
+ allResults.push({
480
+ conversation_id: result.conversation.id,
481
+ message_id: result.message.id,
482
+ timestamp: new Date(result.message.timestamp).toISOString(),
483
+ similarity: result.similarity,
484
+ snippet: result.snippet,
485
+ git_branch: result.conversation.git_branch,
486
+ message_type: result.message.message_type,
487
+ role: result.message.role,
488
+ });
489
+ }
490
+ }
491
+ catch (error) {
492
+ // Track failed projects for debugging - don't silently ignore
493
+ console.error(`Search failed for project ${project.db_path}:`, error.message);
494
+ continue;
495
+ }
496
+ finally {
497
+ if (projectDb) {
498
+ projectDb.close();
499
+ }
500
+ }
501
+ }
502
+ allResults.sort((a, b) => b.similarity - a.similarity);
503
+ const paginatedResults = allResults.slice(offset, offset + limit);
504
+ return {
505
+ query,
506
+ results: paginatedResults,
507
+ total_found: paginatedResults.length,
508
+ has_more: offset + limit < allResults.length,
509
+ offset,
510
+ scope: 'global',
511
+ };
512
+ }
513
+ // Handle current session scope
514
+ if (scope === 'current') {
515
+ if (!conversation_id) {
516
+ throw new Error("conversation_id is required when scope='current'");
517
+ }
518
+ // Overfetch to account for post-query filtering (conversation_id, date_range)
519
+ // Use 4x multiplier to ensure we have enough results after filtering
520
+ const overfetchMultiplier = 4;
521
+ const fetchLimit = (limit + offset) * overfetchMultiplier;
522
+ const results = await this.memory.search(query, fetchLimit);
523
+ const filteredResults = results.filter(r => r.conversation.id === conversation_id);
524
+ const dateFilteredResults = date_range
525
+ ? filteredResults.filter(r => {
526
+ const timestamp = r.message.timestamp;
527
+ return timestamp >= date_range[0] && timestamp <= date_range[1];
528
+ })
529
+ : filteredResults;
530
+ const paginatedResults = dateFilteredResults.slice(offset, offset + limit);
531
+ return {
532
+ query,
533
+ results: paginatedResults.map((r) => ({
534
+ conversation_id: r.conversation.id,
535
+ message_id: r.message.id,
536
+ timestamp: new Date(r.message.timestamp).toISOString(),
537
+ similarity: r.similarity,
538
+ snippet: r.snippet,
539
+ git_branch: r.conversation.git_branch,
540
+ message_type: r.message.message_type,
541
+ role: r.message.role,
542
+ })),
543
+ total_found: paginatedResults.length,
544
+ has_more: offset + limit < dateFilteredResults.length,
545
+ offset,
546
+ scope: 'current',
547
+ };
548
+ }
549
+ // Handle 'all' scope (default) - all sessions in current project
550
+ const results = await this.memory.search(query, limit + offset);
551
+ const filteredResults = date_range
552
+ ? results.filter(r => {
553
+ const timestamp = r.message.timestamp;
554
+ return timestamp >= date_range[0] && timestamp <= date_range[1];
555
+ })
556
+ : results;
557
+ const paginatedResults = filteredResults.slice(offset, offset + limit);
558
+ return {
559
+ query,
560
+ results: paginatedResults.map((r) => ({
561
+ conversation_id: r.conversation.id,
562
+ message_id: r.message.id,
563
+ timestamp: new Date(r.message.timestamp).toISOString(),
564
+ similarity: r.similarity,
565
+ snippet: r.snippet,
566
+ git_branch: r.conversation.git_branch,
567
+ message_type: r.message.message_type,
568
+ role: r.message.role,
569
+ })),
570
+ total_found: paginatedResults.length,
571
+ has_more: offset + limit < filteredResults.length,
572
+ offset,
573
+ scope: 'all',
574
+ };
575
+ }
576
+ /**
577
+ * Find decisions made about a specific topic, file, or component.
578
+ *
579
+ * Searches through extracted decisions to find relevant architectural choices,
580
+ * technical decisions, and their rationale. Shows alternatives considered and
581
+ * rejected approaches.
582
+ *
583
+ * @param args - Decision search arguments:
584
+ * - `query`: Topic or keyword to search for (required)
585
+ * - `file_path`: Optional filter for decisions related to a specific file
586
+ * - `limit`: Maximum number of results (default: 10)
587
+ *
588
+ * @returns Decision search results containing:
589
+ * - `query`: The search query used
590
+ * - `file_path`: File filter if applied
591
+ * - `decisions`: Array of matching decisions with:
592
+ * - `decision_id`: Decision identifier
593
+ * - `decision_text`: The decision that was made
594
+ * - `rationale`: Why this decision was made
595
+ * - `alternatives_considered`: Other options that were considered
596
+ * - `rejected_reasons`: Why alternatives were rejected
597
+ * - `context`: Context in which the decision was made
598
+ * - `related_files`: Files affected by this decision
599
+ * - `related_commits`: Git commits implementing this decision
600
+ * - `timestamp`: When the decision was made
601
+ * - `similarity`: Relevance score
602
+ * - `total_found`: Number of decisions returned
603
+ *
604
+ * @example
605
+ * ```typescript
606
+ * const result = await handlers.getDecisions({
607
+ * query: 'database',
608
+ * file_path: 'src/storage/SQLiteManager.ts',
609
+ * limit: 5
610
+ * });
611
+ * result.decisions.forEach(d => {
612
+ * console.error(`Decision: ${d.decision_text}`);
613
+ * console.error(`Rationale: ${d.rationale}`);
614
+ * });
615
+ * ```
616
+ */
617
+ async getDecisions(args) {
618
+ await this.maybeAutoIndex();
619
+ const typedArgs = args;
620
+ const { query, file_path, limit = 10, offset = 0, scope = 'all', conversation_id } = typedArgs;
621
+ // Handle global scope
622
+ if (scope === 'global') {
623
+ const globalResponse = await this.getAllDecisions({ query, file_path, limit, offset, source_type: 'all' });
624
+ return {
625
+ query,
626
+ file_path,
627
+ decisions: globalResponse.decisions.map(d => ({
628
+ decision_id: d.decision_id,
629
+ decision_text: d.decision_text,
630
+ rationale: d.rationale,
631
+ alternatives_considered: d.alternatives_considered,
632
+ rejected_reasons: d.rejected_reasons,
633
+ context: d.context,
634
+ related_files: d.related_files,
635
+ related_commits: d.related_commits,
636
+ timestamp: d.timestamp,
637
+ similarity: d.similarity,
638
+ })),
639
+ total_found: globalResponse.total_found,
640
+ has_more: globalResponse.has_more,
641
+ offset: globalResponse.offset,
642
+ scope: 'global',
643
+ };
644
+ }
645
+ // Overfetch to account for post-query filtering (file_path, conversation_id)
646
+ // Use 4x multiplier to ensure we have enough results after filtering
647
+ const overfetchMultiplier = (file_path || scope === 'current') ? 4 : 1;
648
+ const fetchLimit = (limit + offset) * overfetchMultiplier;
649
+ const results = await this.memory.searchDecisions(query, fetchLimit);
650
+ // Filter by file if specified
651
+ let filteredResults = results;
652
+ if (file_path) {
653
+ filteredResults = results.filter((r) => r.decision.related_files.includes(file_path));
654
+ }
655
+ // Filter by conversation_id if scope is 'current'
656
+ if (scope === 'current') {
657
+ if (!conversation_id) {
658
+ throw new Error("conversation_id is required when scope='current'");
659
+ }
660
+ filteredResults = filteredResults.filter((r) => r.decision.conversation_id === conversation_id);
661
+ }
662
+ const paginatedResults = filteredResults.slice(offset, offset + limit);
663
+ return {
664
+ query,
665
+ file_path,
666
+ decisions: paginatedResults.map((r) => ({
667
+ decision_id: r.decision.id,
668
+ decision_text: r.decision.decision_text,
669
+ rationale: r.decision.rationale,
670
+ alternatives_considered: r.decision.alternatives_considered,
671
+ rejected_reasons: r.decision.rejected_reasons,
672
+ context: r.decision.context,
673
+ related_files: r.decision.related_files,
674
+ related_commits: r.decision.related_commits,
675
+ timestamp: new Date(r.decision.timestamp).toISOString(),
676
+ similarity: r.similarity,
677
+ })),
678
+ total_found: paginatedResults.length,
679
+ has_more: offset + limit < filteredResults.length,
680
+ offset,
681
+ scope,
682
+ };
683
+ }
684
+ /**
685
+ * Check important context before modifying a file.
686
+ *
687
+ * Shows recent changes, related decisions, commits, and past mistakes to avoid
688
+ * when working on a file. Use this before making significant changes to understand
689
+ * the file's history and context.
690
+ *
691
+ * @param args - Check arguments:
692
+ * - `file_path`: Path to the file you want to modify (required)
693
+ *
694
+ * @returns Context information containing:
695
+ * - `file_path`: The file being checked
696
+ * - `warning`: Warning message if important context found
697
+ * - `recent_changes`: Recent edits and commits to this file
698
+ * - `edits`: Recent file edits with timestamps and conversation IDs
699
+ * - `commits`: Recent git commits affecting this file
700
+ * - `related_decisions`: Decisions that affect this file
701
+ * - `mistakes_to_avoid`: Past mistakes related to this file
702
+ *
703
+ * @example
704
+ * ```typescript
705
+ * const context = await handlers.checkBeforeModify({
706
+ * file_path: 'src/storage/SQLiteManager.ts'
707
+ * });
708
+ * console.error(context.warning);
709
+ * console.error(`${context.related_decisions.length} decisions affect this file`);
710
+ * console.error(`${context.mistakes_to_avoid.length} mistakes to avoid`);
711
+ * ```
712
+ */
713
+ async checkBeforeModify(args) {
714
+ const typedArgs = args;
715
+ const { file_path } = typedArgs;
716
+ // Validate required parameter
717
+ if (!file_path || typeof file_path !== 'string' || file_path.trim() === '') {
718
+ throw new Error("file_path is required and must be a non-empty string");
719
+ }
720
+ const timeline = this.memory.getFileTimeline(file_path);
721
+ // Get recent mistakes affecting this file
722
+ const sanitized = sanitizeForLike(file_path);
723
+ const mistakes = this.db
724
+ .prepare("SELECT * FROM mistakes WHERE files_affected LIKE ? ESCAPE '\\' ORDER BY timestamp DESC LIMIT 5")
725
+ .all(`%"${sanitized}"%`);
726
+ return {
727
+ file_path,
728
+ warning: timeline.edits.length > 0 || timeline.decisions.length > 0
729
+ ? "⚠️ Important context found for this file"
730
+ : "No significant history found",
731
+ recent_changes: {
732
+ edits: timeline.edits.slice(0, 5).map((e) => ({
733
+ timestamp: new Date(e.snapshot_timestamp).toISOString(),
734
+ conversation_id: e.conversation_id,
735
+ })),
736
+ commits: timeline.commits.slice(0, 5).map((c) => ({
737
+ hash: c.hash.substring(0, 7),
738
+ message: c.message,
739
+ timestamp: new Date(c.timestamp).toISOString(),
740
+ })),
741
+ },
742
+ related_decisions: timeline.decisions.slice(0, 3).map((d) => ({
743
+ decision_text: d.decision_text,
744
+ rationale: d.rationale,
745
+ timestamp: new Date(d.timestamp).toISOString(),
746
+ })),
747
+ mistakes_to_avoid: mistakes.map((m) => ({
748
+ what_went_wrong: m.what_went_wrong,
749
+ correction: m.correction,
750
+ mistake_type: m.mistake_type,
751
+ })),
752
+ };
753
+ }
754
+ /**
755
+ * Show complete timeline of changes to a file.
756
+ *
757
+ * Returns a chronological timeline of all edits, commits, and related decisions
758
+ * for a specific file across all conversations and git history.
759
+ *
760
+ * @param args - Evolution arguments:
761
+ * - `file_path`: Path to the file (required)
762
+ * - `include_decisions`: Include related decisions (default: true)
763
+ * - `include_commits`: Include git commits (default: true)
764
+ *
765
+ * @returns File evolution timeline containing:
766
+ * - `file_path`: The file being analyzed
767
+ * - `total_edits`: Total number of edits to this file
768
+ * - `timeline`: Chronological array of events (most recent first):
769
+ * - `type`: Event type ('edit', 'commit', or 'decision')
770
+ * - `timestamp`: When the event occurred
771
+ * - `data`: Event-specific data (conversation_id, commit hash, decision text, etc.)
772
+ *
773
+ * @example
774
+ * ```typescript
775
+ * const evolution = await handlers.getFileEvolution({
776
+ * file_path: 'src/index.ts',
777
+ * include_decisions: true,
778
+ * include_commits: true
779
+ * });
780
+ * console.error(`${evolution.total_edits} edits across ${evolution.timeline.length} events`);
781
+ * evolution.timeline.forEach(event => {
782
+ * console.error(`${event.timestamp}: ${event.type}`);
783
+ * });
784
+ * ```
785
+ */
786
+ async getFileEvolution(args) {
787
+ const typedArgs = args;
788
+ const { file_path, include_decisions = true, include_commits = true, limit = 50, offset = 0 } = typedArgs;
789
+ const timeline = this.memory.getFileTimeline(file_path);
790
+ const events = [];
791
+ timeline.edits.forEach((edit) => {
792
+ events.push({
793
+ type: "edit",
794
+ timestamp: new Date(edit.snapshot_timestamp).toISOString(),
795
+ data: {
796
+ conversation_id: edit.conversation_id,
797
+ backup_version: edit.backup_version,
798
+ },
799
+ });
800
+ });
801
+ if (include_commits) {
802
+ timeline.commits.forEach((commit) => {
803
+ events.push({
804
+ type: "commit",
805
+ timestamp: new Date(commit.timestamp).toISOString(),
806
+ data: {
807
+ hash: commit.hash.substring(0, 7),
808
+ message: commit.message,
809
+ author: commit.author,
810
+ },
811
+ });
812
+ });
813
+ }
814
+ if (include_decisions) {
815
+ timeline.decisions.forEach((decision) => {
816
+ events.push({
817
+ type: "decision",
818
+ timestamp: new Date(decision.timestamp).toISOString(),
819
+ data: {
820
+ decision_text: decision.decision_text,
821
+ rationale: decision.rationale,
822
+ },
823
+ });
824
+ });
825
+ }
826
+ // Sort by timestamp (descending - most recent first)
827
+ events.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
828
+ // Apply pagination
829
+ const paginatedEvents = events.slice(offset, offset + limit);
830
+ return {
831
+ file_path,
832
+ total_edits: timeline.edits.length,
833
+ timeline: paginatedEvents,
834
+ has_more: offset + limit < events.length,
835
+ };
836
+ }
837
+ /**
838
+ * Link git commits to the conversations where they were made or discussed.
839
+ *
840
+ * Finds git commits that are associated with specific conversations, showing
841
+ * which code changes were made during which conversations. Helps answer "WHY
842
+ * was this code changed?"
843
+ *
844
+ * @param args - Link arguments:
845
+ * - `query`: Optional search query for commit messages
846
+ * - `conversation_id`: Optional filter for specific conversation
847
+ * - `limit`: Maximum number of commits (default: 20)
848
+ *
849
+ * @returns Commit links containing:
850
+ * - `query`: Search query if provided
851
+ * - `conversation_id`: Conversation filter if provided
852
+ * - `commits`: Array of linked commits with:
853
+ * - `hash`: Short commit hash (7 chars)
854
+ * - `full_hash`: Full commit hash
855
+ * - `message`: Commit message
856
+ * - `author`: Commit author
857
+ * - `timestamp`: When commit was made
858
+ * - `branch`: Git branch
859
+ * - `files_changed`: List of files changed
860
+ * - `conversation_id`: Conversation where this was discussed/made
861
+ * - `total_found`: Number of commits returned
862
+ *
863
+ * @example
864
+ * ```typescript
865
+ * const links = await handlers.linkCommitsToConversations({
866
+ * query: 'fix authentication',
867
+ * limit: 10
868
+ * });
869
+ * links.commits.forEach(c => {
870
+ * console.error(`${c.hash}: ${c.message}`);
871
+ * console.error(` Conversation: ${c.conversation_id}`);
872
+ * });
873
+ * ```
874
+ */
875
+ async linkCommitsToConversations(args) {
876
+ const typedArgs = args;
877
+ const { query, conversation_id, limit = 20, offset = 0, scope = 'all' } = typedArgs;
878
+ // Global scope not supported for git commits (project-specific)
879
+ if (scope === 'global') {
880
+ throw new Error("Global scope is not supported for linkCommitsToConversations (git commits are project-specific)");
881
+ }
882
+ let sql = "SELECT * FROM git_commits WHERE 1=1";
883
+ const params = [];
884
+ if (conversation_id || scope === 'current') {
885
+ const targetId = conversation_id || typedArgs.conversation_id;
886
+ if (!targetId) {
887
+ throw new Error("conversation_id is required when scope='current'");
888
+ }
889
+ sql += " AND conversation_id = ?";
890
+ params.push(targetId);
891
+ }
892
+ if (query) {
893
+ sql += " AND message LIKE ?";
894
+ params.push(`%${sanitizeForLike(query)}%`);
895
+ }
896
+ sql += ` ORDER BY timestamp DESC LIMIT ? OFFSET ?`;
897
+ params.push(limit + 1); // Fetch one extra to determine has_more
898
+ params.push(offset);
899
+ const commits = this.db.prepare(sql).all(...params);
900
+ const hasMore = commits.length > limit;
901
+ const results = hasMore ? commits.slice(0, limit) : commits;
902
+ return {
903
+ query,
904
+ conversation_id,
905
+ commits: results.map((c) => ({
906
+ hash: c.hash.substring(0, 7),
907
+ full_hash: c.hash,
908
+ message: c.message,
909
+ author: c.author,
910
+ timestamp: new Date(c.timestamp).toISOString(),
911
+ branch: c.branch,
912
+ files_changed: safeJsonParse(c.files_changed, []),
913
+ conversation_id: c.conversation_id,
914
+ })),
915
+ total_found: results.length,
916
+ has_more: hasMore,
917
+ offset,
918
+ scope,
919
+ };
920
+ }
921
+ /**
922
+ * Find past mistakes to avoid repeating them.
923
+ *
924
+ * Searches through extracted mistakes to find documented errors, bugs, and
925
+ * wrong approaches. Shows what went wrong and how it was corrected.
926
+ *
927
+ * @param args - Mistake search arguments:
928
+ * - `query`: Search query for mistakes (required)
929
+ * - `mistake_type`: Optional filter by type (logic_error, wrong_approach, misunderstanding, tool_error, syntax_error)
930
+ * - `limit`: Maximum number of results (default: 10)
931
+ *
932
+ * @returns Mistake search results containing:
933
+ * - `query`: Search query used
934
+ * - `mistake_type`: Type filter if applied
935
+ * - `mistakes`: Array of matching mistakes with:
936
+ * - `mistake_id`: Mistake identifier
937
+ * - `mistake_type`: Type of mistake
938
+ * - `what_went_wrong`: Description of the mistake
939
+ * - `correction`: How it was fixed
940
+ * - `user_correction_message`: User's correction message if available
941
+ * - `files_affected`: List of files involved
942
+ * - `timestamp`: When the mistake occurred
943
+ * - `total_found`: Number of mistakes returned
944
+ *
945
+ * @example
946
+ * ```typescript
947
+ * const mistakes = await handlers.searchMistakes({
948
+ * query: 'database transaction',
949
+ * mistake_type: 'logic_error',
950
+ * limit: 5
951
+ * });
952
+ * mistakes.mistakes.forEach(m => {
953
+ * console.error(`${m.mistake_type}: ${m.what_went_wrong}`);
954
+ * console.error(`Fix: ${m.correction}`);
955
+ * });
956
+ * ```
957
+ */
958
+ async searchMistakes(args) {
959
+ await this.maybeAutoIndex();
960
+ const typedArgs = args;
961
+ const { query, mistake_type, limit = 10, offset = 0, scope = 'all', conversation_id } = typedArgs;
962
+ // Handle global scope
963
+ if (scope === 'global') {
964
+ const globalResponse = await this.searchAllMistakes({ query, mistake_type, limit, offset, source_type: 'all' });
965
+ return {
966
+ query,
967
+ mistake_type,
968
+ mistakes: globalResponse.mistakes.map(m => ({
969
+ mistake_id: m.mistake_id,
970
+ mistake_type: m.mistake_type,
971
+ what_went_wrong: m.what_went_wrong,
972
+ correction: m.correction,
973
+ user_correction_message: m.user_correction_message,
974
+ files_affected: m.files_affected,
975
+ timestamp: m.timestamp,
976
+ })),
977
+ total_found: globalResponse.total_found,
978
+ has_more: globalResponse.has_more,
979
+ offset: globalResponse.offset,
980
+ scope: 'global',
981
+ };
982
+ }
983
+ // Try semantic search first for better results
984
+ try {
985
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
986
+ const semanticSearch = new SemanticSearch(this.db);
987
+ // Fetch more than needed to allow for filtering and pagination
988
+ const semanticResults = await semanticSearch.searchMistakes(query, limit + offset + 10);
989
+ // Apply additional filters
990
+ let filtered = semanticResults;
991
+ if (mistake_type) {
992
+ filtered = filtered.filter(r => r.mistake.mistake_type === mistake_type);
993
+ }
994
+ if (scope === 'current') {
995
+ if (!conversation_id) {
996
+ throw new Error("conversation_id is required when scope='current'");
997
+ }
998
+ filtered = filtered.filter(r => r.mistake.conversation_id === conversation_id);
999
+ }
1000
+ // Apply pagination
1001
+ const paginated = filtered.slice(offset, offset + limit + 1);
1002
+ const hasMore = paginated.length > limit;
1003
+ const results = hasMore ? paginated.slice(0, limit) : paginated;
1004
+ if (results.length > 0) {
1005
+ return {
1006
+ query,
1007
+ mistake_type,
1008
+ mistakes: results.map(r => ({
1009
+ mistake_id: r.mistake.id,
1010
+ mistake_type: r.mistake.mistake_type,
1011
+ what_went_wrong: r.mistake.what_went_wrong,
1012
+ correction: r.mistake.correction,
1013
+ user_correction_message: r.mistake.user_correction_message,
1014
+ files_affected: r.mistake.files_affected,
1015
+ timestamp: new Date(r.mistake.timestamp).toISOString(),
1016
+ })),
1017
+ total_found: results.length,
1018
+ has_more: hasMore,
1019
+ offset,
1020
+ scope,
1021
+ };
1022
+ }
1023
+ // Fall through to LIKE search if semantic returned no results
1024
+ }
1025
+ catch (_e) {
1026
+ // Semantic search failed, fall back to LIKE search
1027
+ console.error("Semantic mistake search failed, using LIKE fallback");
1028
+ }
1029
+ // Fallback to LIKE search
1030
+ const sanitized = sanitizeForLike(query);
1031
+ let sql = "SELECT * FROM mistakes WHERE what_went_wrong LIKE ? ESCAPE '\\'";
1032
+ const params = [`%${sanitized}%`];
1033
+ if (mistake_type) {
1034
+ sql += " AND mistake_type = ?";
1035
+ params.push(mistake_type);
1036
+ }
1037
+ // Filter by conversation_id if scope is 'current'
1038
+ if (scope === 'current') {
1039
+ if (!conversation_id) {
1040
+ throw new Error("conversation_id is required when scope='current'");
1041
+ }
1042
+ sql += " AND conversation_id = ?";
1043
+ params.push(conversation_id);
1044
+ }
1045
+ sql += ` ORDER BY timestamp DESC LIMIT ? OFFSET ?`;
1046
+ params.push(limit + 1); // Fetch one extra to determine has_more
1047
+ params.push(offset);
1048
+ const mistakes = this.db.prepare(sql).all(...params);
1049
+ const hasMore = mistakes.length > limit;
1050
+ const results = hasMore ? mistakes.slice(0, limit) : mistakes;
1051
+ return {
1052
+ query,
1053
+ mistake_type,
1054
+ mistakes: results.map((m) => ({
1055
+ mistake_id: m.id,
1056
+ mistake_type: m.mistake_type,
1057
+ what_went_wrong: m.what_went_wrong,
1058
+ correction: m.correction,
1059
+ user_correction_message: m.user_correction_message,
1060
+ files_affected: safeJsonParse(m.files_affected, []),
1061
+ timestamp: new Date(m.timestamp).toISOString(),
1062
+ })),
1063
+ total_found: results.length,
1064
+ has_more: hasMore,
1065
+ offset,
1066
+ scope,
1067
+ };
1068
+ }
1069
+ /**
1070
+ * Look up requirements and constraints for a component or feature.
1071
+ *
1072
+ * Finds documented requirements, dependencies, performance constraints, and
1073
+ * compatibility requirements that affect a component or feature.
1074
+ *
1075
+ * @param args - Requirements search arguments:
1076
+ * - `component`: Component or feature name (required)
1077
+ * - `type`: Optional filter by requirement type (dependency, performance, compatibility, business)
1078
+ *
1079
+ * @returns Requirements results containing:
1080
+ * - `component`: Component searched
1081
+ * - `type`: Type filter if applied
1082
+ * - `requirements`: Array of matching requirements with:
1083
+ * - `requirement_id`: Requirement identifier
1084
+ * - `type`: Requirement type
1085
+ * - `description`: Requirement description
1086
+ * - `rationale`: Why this requirement exists
1087
+ * - `affects_components`: List of affected components
1088
+ * - `timestamp`: When requirement was documented
1089
+ * - `total_found`: Number of requirements returned
1090
+ *
1091
+ * @example
1092
+ * ```typescript
1093
+ * const reqs = await handlers.getRequirements({
1094
+ * component: 'authentication',
1095
+ * type: 'security'
1096
+ * });
1097
+ * reqs.requirements.forEach(r => {
1098
+ * console.error(`${r.type}: ${r.description}`);
1099
+ * console.error(`Rationale: ${r.rationale}`);
1100
+ * });
1101
+ * ```
1102
+ */
1103
+ async getRequirements(args) {
1104
+ const typedArgs = args;
1105
+ const { component, type } = typedArgs;
1106
+ const sanitized = sanitizeForLike(component);
1107
+ // Wrap OR group in parentheses to ensure AND type=? applies to both conditions
1108
+ let sql = "SELECT * FROM requirements WHERE (description LIKE ? ESCAPE '\\' OR affects_components LIKE ? ESCAPE '\\')";
1109
+ const params = [`%${sanitized}%`, `%${sanitized}%`];
1110
+ if (type) {
1111
+ sql += " AND type = ?";
1112
+ params.push(type);
1113
+ }
1114
+ sql += " ORDER BY timestamp DESC";
1115
+ const requirements = this.db.prepare(sql).all(...params);
1116
+ return {
1117
+ component,
1118
+ type,
1119
+ requirements: requirements.map((r) => ({
1120
+ requirement_id: r.id,
1121
+ type: r.type,
1122
+ description: r.description,
1123
+ rationale: r.rationale,
1124
+ affects_components: safeJsonParse(r.affects_components, []),
1125
+ timestamp: new Date(r.timestamp).toISOString(),
1126
+ })),
1127
+ total_found: requirements.length,
1128
+ };
1129
+ }
1130
+ /**
1131
+ * Query history of tool uses (bash commands, file edits, reads, etc.) with pagination and filtering.
1132
+ *
1133
+ * Shows what tools were used during conversations and their results. Useful
1134
+ * for understanding what commands were run, what files were edited, and
1135
+ * whether operations succeeded or failed.
1136
+ *
1137
+ * @param args - Tool history arguments:
1138
+ * - `tool_name`: Optional filter by tool name (Bash, Edit, Write, Read)
1139
+ * - `file_path`: Optional filter by file path
1140
+ * - `limit`: Maximum number of results (default: 20)
1141
+ * - `offset`: Skip N results for pagination (default: 0)
1142
+ * - `include_content`: Include tool content in response (default: false for security, set true to include)
1143
+ * - `max_content_length`: Maximum characters per content field (default: 500)
1144
+ * - `date_range`: Filter by timestamp range [start, end]
1145
+ * - `conversation_id`: Filter by specific conversation
1146
+ * - `errors_only`: Show only failed tool uses (default: false)
1147
+ *
1148
+ * @returns Tool history containing:
1149
+ * - `tool_name`: Tool filter if applied
1150
+ * - `file_path`: File filter if applied
1151
+ * - `tool_uses`: Array of tool uses (may have truncated content)
1152
+ * - `total_found`: Number of results returned in this page
1153
+ * - `total_in_database`: Total matching records in database
1154
+ * - `has_more`: Whether more results exist beyond current page
1155
+ * - `offset`: Current offset position
1156
+ *
1157
+ * @example
1158
+ * ```typescript
1159
+ * // Get first page of Bash commands
1160
+ * const page1 = await handlers.getToolHistory({
1161
+ * tool_name: 'Bash',
1162
+ * limit: 20,
1163
+ * offset: 0
1164
+ * });
1165
+ *
1166
+ * // Get metadata only (no content)
1167
+ * const metadata = await handlers.getToolHistory({
1168
+ * include_content: false,
1169
+ * limit: 50
1170
+ * });
1171
+ *
1172
+ * // Get errors from last 24 hours
1173
+ * const errors = await handlers.getToolHistory({
1174
+ * errors_only: true,
1175
+ * date_range: [Date.now() - 86400000, Date.now()]
1176
+ * });
1177
+ * ```
1178
+ */
1179
+ async getToolHistory(args) {
1180
+ const typedArgs = args;
1181
+ const { tool_name, file_path, limit = 20, offset = 0, include_content = false, max_content_length = 500, date_range, conversation_id, errors_only = false, } = typedArgs;
1182
+ // Helper function to truncate text with indicator
1183
+ const truncateText = (text, maxLength) => {
1184
+ if (!text) {
1185
+ return { value: undefined, truncated: false };
1186
+ }
1187
+ if (text.length <= maxLength) {
1188
+ return { value: text, truncated: false };
1189
+ }
1190
+ return {
1191
+ value: text.substring(0, maxLength) + '... (truncated)',
1192
+ truncated: true,
1193
+ };
1194
+ };
1195
+ // Build WHERE clause for filters
1196
+ let whereClause = "WHERE 1=1";
1197
+ const params = [];
1198
+ if (tool_name) {
1199
+ whereClause += " AND tu.tool_name = ?";
1200
+ params.push(tool_name);
1201
+ }
1202
+ if (file_path) {
1203
+ const sanitized = sanitizeForLike(file_path);
1204
+ whereClause += " AND tu.tool_input LIKE ? ESCAPE '\\'";
1205
+ params.push(`%${sanitized}%`);
1206
+ }
1207
+ if (date_range && date_range.length === 2) {
1208
+ whereClause += " AND tu.timestamp BETWEEN ? AND ?";
1209
+ params.push(date_range[0], date_range[1]);
1210
+ }
1211
+ if (conversation_id) {
1212
+ whereClause += " AND tu.message_id IN (SELECT id FROM messages WHERE conversation_id = ?)";
1213
+ params.push(conversation_id);
1214
+ }
1215
+ if (errors_only) {
1216
+ whereClause += " AND tr.is_error = 1";
1217
+ }
1218
+ // Get total count of matching records
1219
+ const countSql = `
1220
+ SELECT COUNT(*) as total
1221
+ FROM tool_uses tu
1222
+ LEFT JOIN tool_results tr ON tu.id = tr.tool_use_id
1223
+ ${whereClause}
1224
+ `;
1225
+ const countResult = this.db.prepare(countSql).get(...params);
1226
+ const totalInDatabase = countResult.total;
1227
+ // Get paginated results
1228
+ const sql = `
1229
+ SELECT tu.*, tr.content as result_content, tr.is_error, tr.stdout, tr.stderr
1230
+ FROM tool_uses tu
1231
+ LEFT JOIN tool_results tr ON tu.id = tr.tool_use_id
1232
+ ${whereClause}
1233
+ ORDER BY tu.timestamp DESC
1234
+ LIMIT ? OFFSET ?
1235
+ `;
1236
+ const queryParams = [...params, limit, offset];
1237
+ const toolUses = this.db.prepare(sql).all(...queryParams);
1238
+ // Calculate pagination metadata
1239
+ const hasMore = offset + toolUses.length < totalInDatabase;
1240
+ return {
1241
+ tool_name,
1242
+ file_path,
1243
+ tool_uses: toolUses.map((t) => {
1244
+ // Parse tool input
1245
+ const toolInput = safeJsonParse(t.tool_input, {});
1246
+ // Build result object based on include_content setting
1247
+ const result = {
1248
+ is_error: Boolean(t.is_error),
1249
+ };
1250
+ if (include_content) {
1251
+ // Truncate content fields if they exist
1252
+ const contentTrunc = truncateText(t.result_content, max_content_length);
1253
+ const stdoutTrunc = truncateText(t.stdout, max_content_length);
1254
+ const stderrTrunc = truncateText(t.stderr, max_content_length);
1255
+ if (contentTrunc.value !== undefined) {
1256
+ result.content = contentTrunc.value;
1257
+ if (contentTrunc.truncated) {
1258
+ result.content_truncated = true;
1259
+ }
1260
+ }
1261
+ if (stdoutTrunc.value !== undefined) {
1262
+ result.stdout = stdoutTrunc.value;
1263
+ if (stdoutTrunc.truncated) {
1264
+ result.stdout_truncated = true;
1265
+ }
1266
+ }
1267
+ if (stderrTrunc.value !== undefined) {
1268
+ result.stderr = stderrTrunc.value;
1269
+ if (stderrTrunc.truncated) {
1270
+ result.stderr_truncated = true;
1271
+ }
1272
+ }
1273
+ }
1274
+ // If include_content=false, only return is_error (no content, stdout, stderr)
1275
+ return {
1276
+ tool_use_id: t.id,
1277
+ tool_name: t.tool_name,
1278
+ tool_input: toolInput,
1279
+ result,
1280
+ timestamp: new Date(t.timestamp).toISOString(),
1281
+ };
1282
+ }),
1283
+ total_found: toolUses.length,
1284
+ total_in_database: totalInDatabase,
1285
+ has_more: hasMore,
1286
+ offset,
1287
+ };
1288
+ }
1289
+ /**
1290
+ * Find conversations that dealt with similar topics or problems.
1291
+ *
1292
+ * Searches across all conversations to find ones that discussed similar topics,
1293
+ * allowing you to learn from past work on similar problems.
1294
+ *
1295
+ * @param args - Similarity search arguments:
1296
+ * - `query`: Description of the topic or problem (required)
1297
+ * - `limit`: Maximum number of sessions (default: 5)
1298
+ *
1299
+ * @returns Similar sessions containing:
1300
+ * - `query`: Search query used
1301
+ * - `sessions`: Array of similar conversation sessions with:
1302
+ * - `conversation_id`: Session identifier
1303
+ * - `project_path`: Project path for this session
1304
+ * - `first_message_at`: When the conversation started
1305
+ * - `message_count`: Number of messages in the conversation
1306
+ * - `git_branch`: Git branch at the time
1307
+ * - `relevance_score`: Similarity score to the query
1308
+ * - `relevant_messages`: Sample of relevant messages from this session
1309
+ * - `total_found`: Number of sessions returned
1310
+ *
1311
+ * @example
1312
+ * ```typescript
1313
+ * const similar = await handlers.findSimilarSessions({
1314
+ * query: 'implementing user authentication with JWT',
1315
+ * limit: 3
1316
+ * });
1317
+ * similar.sessions.forEach(s => {
1318
+ * console.error(`Session ${s.conversation_id} (${s.message_count} messages)`);
1319
+ * console.error(`Relevance: ${s.relevance_score.toFixed(2)}`);
1320
+ * console.error(`Messages: ${s.relevant_messages.length} relevant`);
1321
+ * });
1322
+ * ```
1323
+ */
1324
+ async findSimilarSessions(args) {
1325
+ await this.maybeAutoIndex();
1326
+ const typedArgs = args;
1327
+ const { query, limit = 5, offset = 0, scope = 'all', conversation_id: _conversation_id } = typedArgs;
1328
+ // Note: scope='global' and scope='current' have limited usefulness for finding similar SESSIONS
1329
+ // but we implement them for API consistency
1330
+ if (scope === 'current') {
1331
+ throw new Error("scope='current' is not supported for findSimilarSessions (it finds sessions, not messages within a session)");
1332
+ }
1333
+ const results = await this.memory.search(query, (limit + offset) * 3); // Get more to group by conversation
1334
+ // Group by conversation
1335
+ const conversationMap = new Map();
1336
+ for (const result of results) {
1337
+ const convId = result.conversation.id;
1338
+ if (convId && !conversationMap.has(convId)) {
1339
+ conversationMap.set(convId, {
1340
+ conversation_id: convId,
1341
+ project_path: result.conversation.project_path,
1342
+ first_message_at: new Date(result.conversation.first_message_at).toISOString(),
1343
+ message_count: result.conversation.message_count,
1344
+ git_branch: result.conversation.git_branch,
1345
+ relevance_score: result.similarity,
1346
+ relevant_messages: [],
1347
+ });
1348
+ }
1349
+ const conversation = conversationMap.get(convId);
1350
+ if (conversation) {
1351
+ conversation.relevant_messages.push({
1352
+ message_id: result.message.id,
1353
+ snippet: result.snippet,
1354
+ similarity: result.similarity,
1355
+ });
1356
+ }
1357
+ }
1358
+ const allSessions = Array.from(conversationMap.values())
1359
+ .sort((a, b) => b.relevance_score - a.relevance_score);
1360
+ const sessions = allSessions.slice(offset, offset + limit);
1361
+ return {
1362
+ query,
1363
+ sessions,
1364
+ total_found: sessions.length,
1365
+ has_more: offset + limit < allSessions.length,
1366
+ offset,
1367
+ scope,
1368
+ };
1369
+ }
1370
+ /**
1371
+ * Recall relevant context and format for application to current work.
1372
+ *
1373
+ * This is a comprehensive context retrieval tool that searches across multiple
1374
+ * data sources (conversations, decisions, mistakes, file changes, commits) and
1375
+ * returns actionable suggestions for applying historical context to current work.
1376
+ *
1377
+ * @param args - Recall arguments:
1378
+ * - `query`: What you're working on or need context for (required)
1379
+ * - `context_types`: Types to recall (default: all types)
1380
+ * - Options: "conversations", "decisions", "mistakes", "file_changes", "commits"
1381
+ * - `file_path`: Optional filter for file-specific context
1382
+ * - `date_range`: Optional [start_timestamp, end_timestamp] filter
1383
+ * - `limit`: Maximum items per context type (default: 5)
1384
+ *
1385
+ * @returns Recalled context containing:
1386
+ * - `query`: Search query used
1387
+ * - `context_summary`: High-level summary of what was found
1388
+ * - `recalled_context`: Structured context data:
1389
+ * - `conversations`: Relevant past conversations
1390
+ * - `decisions`: Related decisions with rationale
1391
+ * - `mistakes`: Past mistakes to avoid
1392
+ * - `file_changes`: File modification history
1393
+ * - `commits`: Related git commits
1394
+ * - `application_suggestions`: Actionable suggestions for applying this context
1395
+ * - `total_items_found`: Total number of context items found
1396
+ *
1397
+ * @example
1398
+ * ```typescript
1399
+ * const context = await handlers.recallAndApply({
1400
+ * query: 'refactoring database connection pooling',
1401
+ * context_types: ['decisions', 'mistakes', 'commits'],
1402
+ * file_path: 'src/database/pool.ts',
1403
+ * limit: 5
1404
+ * });
1405
+ * console.error(context.context_summary);
1406
+ * context.application_suggestions.forEach(s => console.error(`- ${s}`));
1407
+ * ```
1408
+ */
1409
+ async recallAndApply(args) {
1410
+ await this.maybeAutoIndex();
1411
+ const typedArgs = args;
1412
+ const { query, context_types = ["conversations", "decisions", "mistakes", "file_changes", "commits"], file_path, date_range, limit = 5, offset = 0, scope = 'all', conversation_id } = typedArgs;
1413
+ const recalled = {};
1414
+ let totalItems = 0;
1415
+ const suggestions = [];
1416
+ // 1. Recall conversations if requested
1417
+ if (context_types.includes("conversations")) {
1418
+ // Use searchConversations with scope support
1419
+ const convResponse = await this.searchConversations({
1420
+ query,
1421
+ limit,
1422
+ offset,
1423
+ date_range,
1424
+ scope,
1425
+ conversation_id,
1426
+ });
1427
+ recalled.conversations = convResponse.results.map(result => ({
1428
+ session_id: result.conversation_id,
1429
+ timestamp: result.timestamp,
1430
+ snippet: result.snippet,
1431
+ relevance_score: result.similarity,
1432
+ }));
1433
+ totalItems += recalled.conversations.length;
1434
+ if (recalled.conversations.length > 0) {
1435
+ suggestions.push(`Review ${recalled.conversations.length} past conversation(s) about similar topics`);
1436
+ }
1437
+ }
1438
+ // 2. Recall decisions if requested
1439
+ if (context_types.includes("decisions")) {
1440
+ // Use getDecisions with scope support
1441
+ const decisionsResponse = await this.getDecisions({
1442
+ query,
1443
+ file_path,
1444
+ limit,
1445
+ offset,
1446
+ scope,
1447
+ conversation_id,
1448
+ });
1449
+ recalled.decisions = decisionsResponse.decisions.map(d => ({
1450
+ decision_id: d.decision_id,
1451
+ type: d.context || 'unknown',
1452
+ description: d.decision_text,
1453
+ rationale: d.rationale || undefined,
1454
+ alternatives: d.alternatives_considered,
1455
+ rejected_approaches: Object.values(d.rejected_reasons ?? {}),
1456
+ affects_components: d.related_files,
1457
+ timestamp: d.timestamp,
1458
+ }));
1459
+ totalItems += recalled.decisions.length;
1460
+ if (recalled.decisions.length > 0) {
1461
+ suggestions.push(`Apply learnings from ${recalled.decisions.length} past decision(s) with documented rationale`);
1462
+ }
1463
+ }
1464
+ // 3. Recall mistakes if requested
1465
+ if (context_types.includes("mistakes")) {
1466
+ // Use searchMistakes with scope support
1467
+ const mistakesResponse = await this.searchMistakes({
1468
+ query,
1469
+ limit,
1470
+ offset,
1471
+ scope,
1472
+ conversation_id,
1473
+ });
1474
+ recalled.mistakes = mistakesResponse.mistakes.map(m => ({
1475
+ mistake_id: m.mistake_id,
1476
+ type: m.mistake_type,
1477
+ description: m.what_went_wrong,
1478
+ what_happened: m.what_went_wrong,
1479
+ how_fixed: m.correction || undefined,
1480
+ lesson_learned: m.user_correction_message || undefined,
1481
+ files_affected: m.files_affected,
1482
+ timestamp: m.timestamp,
1483
+ }));
1484
+ totalItems += recalled.mistakes.length;
1485
+ if (recalled.mistakes.length > 0) {
1486
+ suggestions.push(`Avoid repeating ${recalled.mistakes.length} documented mistake(s) from the past`);
1487
+ }
1488
+ }
1489
+ // 4. Recall file changes if requested
1490
+ if (context_types.includes("file_changes") && file_path) {
1491
+ // Query file_edits table (not messages) - file_path is stored in file_edits
1492
+ const fileChanges = this.db.getDatabase()
1493
+ .prepare(`
1494
+ SELECT
1495
+ file_path,
1496
+ COUNT(DISTINCT conversation_id) as change_count,
1497
+ MAX(snapshot_timestamp) as last_modified,
1498
+ GROUP_CONCAT(DISTINCT conversation_id) as conversation_ids
1499
+ FROM file_edits
1500
+ WHERE file_path LIKE ? ESCAPE '\\'
1501
+ ${date_range ? 'AND snapshot_timestamp BETWEEN ? AND ?' : ''}
1502
+ GROUP BY file_path
1503
+ ORDER BY last_modified DESC
1504
+ LIMIT ?
1505
+ `)
1506
+ .all(`%${sanitizeForLike(file_path)}%`, ...(date_range ? [date_range[0], date_range[1]] : []), limit);
1507
+ recalled.file_changes = fileChanges.map(fc => ({
1508
+ file_path: fc.file_path,
1509
+ change_count: fc.change_count,
1510
+ last_modified: new Date(fc.last_modified).toISOString(),
1511
+ related_conversations: fc.conversation_ids ? fc.conversation_ids.split(',') : [],
1512
+ }));
1513
+ totalItems += recalled.file_changes.length;
1514
+ if (recalled.file_changes.length > 0) {
1515
+ suggestions.push(`Consider ${recalled.file_changes.length} file(s) with relevant history before making changes`);
1516
+ }
1517
+ }
1518
+ // 5. Recall commits if requested
1519
+ if (context_types.includes("commits")) {
1520
+ const commits = this.db.getDatabase()
1521
+ .prepare(`
1522
+ SELECT hash, message, timestamp, files_changed
1523
+ FROM git_commits
1524
+ WHERE message LIKE ? ${file_path ? 'AND files_changed LIKE ?' : ''}
1525
+ ${date_range ? 'AND timestamp BETWEEN ? AND ?' : ''}
1526
+ ORDER BY timestamp DESC
1527
+ LIMIT ?
1528
+ `)
1529
+ .all(`%${sanitizeForLike(query)}%`, ...(file_path ? [`%${sanitizeForLike(file_path)}%`] : []), ...(date_range ? [date_range[0], date_range[1]] : []), limit);
1530
+ recalled.commits = commits.map(c => ({
1531
+ commit_hash: c.hash,
1532
+ message: c.message,
1533
+ timestamp: new Date(c.timestamp).toISOString(),
1534
+ files_affected: safeJsonParse(c.files_changed, []),
1535
+ }));
1536
+ totalItems += recalled.commits.length;
1537
+ if (recalled.commits.length > 0) {
1538
+ suggestions.push(`Reference ${recalled.commits.length} related git commit(s) for implementation patterns`);
1539
+ }
1540
+ }
1541
+ // Generate context summary
1542
+ const summaryParts = [];
1543
+ if (recalled.conversations && recalled.conversations.length > 0) {
1544
+ summaryParts.push(`${recalled.conversations.length} relevant conversation(s)`);
1545
+ }
1546
+ if (recalled.decisions && recalled.decisions.length > 0) {
1547
+ summaryParts.push(`${recalled.decisions.length} decision(s)`);
1548
+ }
1549
+ if (recalled.mistakes && recalled.mistakes.length > 0) {
1550
+ summaryParts.push(`${recalled.mistakes.length} past mistake(s)`);
1551
+ }
1552
+ if (recalled.file_changes && recalled.file_changes.length > 0) {
1553
+ summaryParts.push(`${recalled.file_changes.length} file change(s)`);
1554
+ }
1555
+ if (recalled.commits && recalled.commits.length > 0) {
1556
+ summaryParts.push(`${recalled.commits.length} commit(s)`);
1557
+ }
1558
+ const contextSummary = summaryParts.length > 0
1559
+ ? `Recalled: ${summaryParts.join(', ')}`
1560
+ : 'No relevant context found';
1561
+ // Add general suggestion if we found context
1562
+ if (totalItems > 0) {
1563
+ suggestions.push(`Use this historical context to inform your current implementation`);
1564
+ }
1565
+ else {
1566
+ suggestions.push(`No historical context found - you may be working on something new`);
1567
+ }
1568
+ return {
1569
+ query,
1570
+ context_summary: contextSummary,
1571
+ recalled_context: recalled,
1572
+ application_suggestions: suggestions,
1573
+ total_items_found: totalItems,
1574
+ };
1575
+ }
1576
+ /**
1577
+ * Generate comprehensive project documentation by combining codebase analysis
1578
+ * with conversation history.
1579
+ *
1580
+ * Creates documentation that shows WHAT exists in the code (via CODE-GRAPH-RAG-MCP)
1581
+ * and WHY it was built that way (via conversation history). Requires CODE-GRAPH-RAG-MCP
1582
+ * to be indexed first.
1583
+ *
1584
+ * @param args - Documentation generation arguments:
1585
+ * - `project_path`: Path to the project (defaults to cwd)
1586
+ * - `session_id`: Optional specific session to include
1587
+ * - `scope`: Documentation scope (default: 'full')
1588
+ * - 'full': Everything (architecture, decisions, quality)
1589
+ * - 'architecture': Module structure and dependencies
1590
+ * - 'decisions': Decision log with rationale
1591
+ * - 'quality': Code quality insights
1592
+ * - `module_filter`: Optional filter for specific module path (e.g., 'src/auth')
1593
+ *
1594
+ * @returns Documentation result containing:
1595
+ * - `success`: Whether generation succeeded
1596
+ * - `project_path`: Project that was documented
1597
+ * - `scope`: Scope of documentation generated
1598
+ * - `documentation`: Generated markdown documentation
1599
+ * - `statistics`: Summary statistics:
1600
+ * - `modules`: Number of modules documented
1601
+ * - `decisions`: Number of decisions included
1602
+ * - `mistakes`: Number of mistakes documented
1603
+ * - `commits`: Number of commits referenced
1604
+ *
1605
+ * @example
1606
+ * ```typescript
1607
+ * const doc = await handlers.generateDocumentation({
1608
+ * project_path: '/Users/me/my-project',
1609
+ * scope: 'full',
1610
+ * module_filter: 'src/auth'
1611
+ * });
1612
+ * console.error(doc.documentation); // Markdown documentation
1613
+ * console.error(`Documented ${doc.statistics.modules} modules`);
1614
+ * ```
1615
+ */
1616
+ async generateDocumentation(args) {
1617
+ const typedArgs = args;
1618
+ const projectPath = this.resolveProjectPath(typedArgs.project_path);
1619
+ const sessionId = typedArgs.session_id;
1620
+ const scope = typedArgs.scope || 'full';
1621
+ const moduleFilter = typedArgs.module_filter;
1622
+ console.error('\n📚 Starting documentation generation...');
1623
+ console.error(`Note: This tool requires CODE-GRAPH-RAG-MCP to be indexed first.`);
1624
+ console.error(`Please ensure you have run code-graph-rag index on this project.`);
1625
+ // Note: In a real implementation, we would call CODE-GRAPH-RAG-MCP tools here
1626
+ // For now, we'll create a placeholder that shows the structure
1627
+ const codeGraphData = {
1628
+ entities: [],
1629
+ hotspots: [],
1630
+ clones: [],
1631
+ graph: {}
1632
+ };
1633
+ const generator = new DocumentationGenerator(this.db);
1634
+ const documentation = await generator.generate({
1635
+ projectPath,
1636
+ sessionId,
1637
+ scope,
1638
+ moduleFilter
1639
+ }, codeGraphData);
1640
+ // Extract statistics from the generated documentation
1641
+ const lines = documentation.split('\n');
1642
+ const modulesLine = lines.find(l => l.includes('**Modules**:'));
1643
+ const decisionsLine = lines.find(l => l.includes('| Decisions |'));
1644
+ const mistakesLine = lines.find(l => l.includes('| Mistakes |'));
1645
+ const commitsLine = lines.find(l => l.includes('| Git Commits |'));
1646
+ const extractNumber = (line) => {
1647
+ if (!line) {
1648
+ return 0;
1649
+ }
1650
+ const match = line.match(/\d+/);
1651
+ return match ? parseInt(match[0], 10) : 0;
1652
+ };
1653
+ return {
1654
+ success: true,
1655
+ project_path: projectPath,
1656
+ scope,
1657
+ documentation,
1658
+ statistics: {
1659
+ modules: extractNumber(modulesLine),
1660
+ decisions: extractNumber(decisionsLine),
1661
+ mistakes: extractNumber(mistakesLine),
1662
+ commits: extractNumber(commitsLine)
1663
+ }
1664
+ };
1665
+ }
1666
+ /**
1667
+ * Discover old conversation folders that might contain conversation history
1668
+ * for the current project.
1669
+ *
1670
+ * Searches through stored conversation folders to find potential matches for
1671
+ * the current project path. Useful when project paths have changed (e.g., after
1672
+ * moving or renaming a project directory).
1673
+ *
1674
+ * @param args - Discovery arguments:
1675
+ * - `current_project_path`: Current project path (defaults to cwd)
1676
+ *
1677
+ * @returns Discovery results containing:
1678
+ * - `success`: Whether discovery succeeded
1679
+ * - `current_project_path`: Current project path searched for
1680
+ * - `candidates`: Array of potential matches sorted by score:
1681
+ * - `folder_name`: Name of the conversation folder
1682
+ * - `folder_path`: Full path to the folder
1683
+ * - `stored_project_path`: Original project path stored in conversations
1684
+ * - `score`: Match score (higher is better match)
1685
+ * - `stats`: Folder statistics:
1686
+ * - `conversations`: Number of conversations in folder
1687
+ * - `messages`: Number of messages in folder
1688
+ * - `files`: Number of .jsonl files
1689
+ * - `last_activity`: Timestamp of last activity
1690
+ * - `message`: Human-readable status message
1691
+ *
1692
+ * @example
1693
+ * ```typescript
1694
+ * const discovery = await handlers.discoverOldConversations({
1695
+ * current_project_path: '/Users/me/projects/my-app'
1696
+ * });
1697
+ * console.error(discovery.message);
1698
+ * discovery.candidates.forEach(c => {
1699
+ * console.error(`Score ${c.score}: ${c.folder_name}`);
1700
+ * console.error(` Original path: ${c.stored_project_path}`);
1701
+ * console.error(` Stats: ${c.stats.conversations} conversations, ${c.stats.files} files`);
1702
+ * });
1703
+ * ```
1704
+ */
1705
+ async discoverOldConversations(args) {
1706
+ const typedArgs = args;
1707
+ const currentProjectPath = this.resolveProjectPath(typedArgs.current_project_path);
1708
+ const candidates = await this.migration.discoverOldFolders(currentProjectPath);
1709
+ // Convert to response format with additional stats
1710
+ const formattedCandidates = candidates.map(c => ({
1711
+ folder_name: c.folderName,
1712
+ folder_path: c.folderPath,
1713
+ stored_project_path: c.storedProjectPath,
1714
+ score: Math.round(c.score * 10) / 10, // Round to 1 decimal
1715
+ stats: {
1716
+ conversations: c.stats.conversations,
1717
+ messages: c.stats.messages,
1718
+ files: 0, // Will be calculated below
1719
+ last_activity: c.stats.lastActivity
1720
+ }
1721
+ }));
1722
+ // Count JSONL files for each candidate
1723
+ for (const candidate of formattedCandidates) {
1724
+ try {
1725
+ const files = readdirSync(candidate.folder_path);
1726
+ candidate.stats.files = files.filter((f) => f.endsWith('.jsonl')).length;
1727
+ }
1728
+ catch (_error) {
1729
+ candidate.stats.files = 0;
1730
+ }
1731
+ }
1732
+ const message = candidates.length > 0
1733
+ ? `Found ${candidates.length} potential old conversation folder(s). Top match has ${formattedCandidates[0].stats.conversations} conversations and ${formattedCandidates[0].stats.files} files (score: ${formattedCandidates[0].score}).`
1734
+ : `No old conversation folders found for project path: ${currentProjectPath}`;
1735
+ return {
1736
+ success: true,
1737
+ current_project_path: currentProjectPath,
1738
+ candidates: formattedCandidates,
1739
+ message
1740
+ };
1741
+ }
1742
+ /**
1743
+ * Migrate or merge conversation history from an old project path to a new one.
1744
+ *
1745
+ * Use this when a project has been moved or renamed to bring the conversation
1746
+ * history along. Supports two modes: 'migrate' (move all files) or 'merge'
1747
+ * (combine with existing files).
1748
+ *
1749
+ * @param args - Migration arguments:
1750
+ * - `source_folder`: Source folder containing old conversations (required)
1751
+ * - `old_project_path`: Original project path in the conversations (required)
1752
+ * - `new_project_path`: New project path to update to (required)
1753
+ * - `dry_run`: Preview changes without applying them (default: false)
1754
+ * - `mode`: Migration mode (default: 'migrate')
1755
+ * - 'migrate': Move all files from source to target
1756
+ * - 'merge': Combine source files with existing target files
1757
+ *
1758
+ * @returns Migration result containing:
1759
+ * - `success`: Whether migration succeeded
1760
+ * - `source_folder`: Source folder path
1761
+ * - `target_folder`: Target folder path (where files were copied)
1762
+ * - `files_copied`: Number of files copied/migrated
1763
+ * - `database_updated`: Whether database was updated with new paths
1764
+ * - `backup_created`: Whether backup was created (always true for non-dry-run)
1765
+ * - `message`: Human-readable status message
1766
+ *
1767
+ * @example
1768
+ * ```typescript
1769
+ * // First, preview with dry run
1770
+ * const preview = await handlers.migrateProject({
1771
+ * source_folder: '/path/to/old/conversations',
1772
+ * old_project_path: '/old/path/to/project',
1773
+ * new_project_path: '/new/path/to/project',
1774
+ * dry_run: true
1775
+ * });
1776
+ * console.error(preview.message); // "Dry run: Would migrate X files..."
1777
+ *
1778
+ * // Then, execute the migration
1779
+ * const result = await handlers.migrateProject({
1780
+ * source_folder: '/path/to/old/conversations',
1781
+ * old_project_path: '/old/path/to/project',
1782
+ * new_project_path: '/new/path/to/project',
1783
+ * dry_run: false,
1784
+ * mode: 'migrate'
1785
+ * });
1786
+ * console.error(`Migrated ${result.files_copied} files`);
1787
+ * ```
1788
+ */
1789
+ async migrateProject(args) {
1790
+ const typedArgs = args;
1791
+ const sourceFolder = typedArgs.source_folder;
1792
+ // Validate all required parameters
1793
+ if (!sourceFolder || typeof sourceFolder !== 'string' || sourceFolder.trim() === '') {
1794
+ throw new Error("source_folder is required and must be a non-empty string");
1795
+ }
1796
+ if (!typedArgs.old_project_path || !typedArgs.new_project_path) {
1797
+ throw new Error("old_project_path and new_project_path are required");
1798
+ }
1799
+ const oldProjectPath = getCanonicalProjectPath(typedArgs.old_project_path).canonicalPath;
1800
+ const newProjectPath = getCanonicalProjectPath(typedArgs.new_project_path).canonicalPath;
1801
+ const dryRun = typedArgs.dry_run ?? false;
1802
+ const mode = typedArgs.mode ?? "migrate";
1803
+ // Validate paths are under expected directories using resolved paths
1804
+ // to prevent path traversal attacks (e.g., /projects/../../../etc/passwd)
1805
+ const projectsDir = resolve(this.migration.getProjectsDir());
1806
+ const resolvedSource = resolve(sourceFolder);
1807
+ if (!resolvedSource.startsWith(projectsDir + "/") && resolvedSource !== projectsDir) {
1808
+ throw new Error(`Source folder must be under ${projectsDir}`);
1809
+ }
1810
+ // Calculate target folder path
1811
+ const targetFolderName = pathToProjectFolderName(newProjectPath);
1812
+ const targetFolder = join(this.migration.getProjectsDir(), targetFolderName);
1813
+ // Execute migration or merge
1814
+ const result = await this.migration.executeMigration(sourceFolder, targetFolder, oldProjectPath, newProjectPath, dryRun, mode);
1815
+ let message;
1816
+ if (dryRun) {
1817
+ message =
1818
+ mode === "merge"
1819
+ ? `Dry run: Would merge ${result.filesCopied} new conversation files into ${targetFolder}`
1820
+ : `Dry run: Would migrate ${result.filesCopied} conversation files from ${sourceFolder} to ${targetFolder}`;
1821
+ }
1822
+ else {
1823
+ message =
1824
+ mode === "merge"
1825
+ ? `Successfully merged ${result.filesCopied} new conversation files into ${targetFolder}. Original files preserved in ${sourceFolder}.`
1826
+ : `Successfully migrated ${result.filesCopied} conversation files to ${targetFolder}. Original files preserved in ${sourceFolder}.`;
1827
+ }
1828
+ return {
1829
+ success: result.success,
1830
+ source_folder: sourceFolder,
1831
+ target_folder: targetFolder,
1832
+ files_copied: result.filesCopied,
1833
+ database_updated: result.databaseUpdated,
1834
+ backup_created: !dryRun && result.databaseUpdated,
1835
+ message
1836
+ };
1837
+ }
1838
+ /**
1839
+ * Forget conversations by topic/keywords.
1840
+ *
1841
+ * Searches for conversations matching the provided keywords and optionally deletes them.
1842
+ * Creates automatic backup before deletion.
1843
+ *
1844
+ * @param args - Arguments:
1845
+ * - `keywords`: Array of keywords/topics to search for
1846
+ * - `project_path`: Path to the project (defaults to cwd)
1847
+ * - `confirm`: Must be true to actually delete (default: false for preview)
1848
+ *
1849
+ * @returns Result containing:
1850
+ * - `success`: Whether operation succeeded
1851
+ * - `preview_mode`: Whether this was a preview (confirm=false)
1852
+ * - `conversations_found`: Number of conversations matching keywords
1853
+ * - `conversations_deleted`: Number of conversations actually deleted
1854
+ * - `messages_deleted`: Number of messages deleted
1855
+ * - `decisions_deleted`: Number of decisions deleted
1856
+ * - `mistakes_deleted`: Number of mistakes deleted
1857
+ * - `backup_path`: Path to backup file (if deletion occurred)
1858
+ * - `conversation_summaries`: List of conversations with basic info
1859
+ * - `message`: Human-readable status message
1860
+ *
1861
+ * @example
1862
+ * ```typescript
1863
+ * // Preview what would be deleted
1864
+ * const preview = await handlers.forgetByTopic({
1865
+ * keywords: ['authentication', 'redesign'],
1866
+ * confirm: false
1867
+ * });
1868
+ *
1869
+ * // Actually delete after reviewing preview
1870
+ * const result = await handlers.forgetByTopic({
1871
+ * keywords: ['authentication', 'redesign'],
1872
+ * confirm: true
1873
+ * });
1874
+ * ```
1875
+ */
1876
+ async forgetByTopic(args) {
1877
+ const typedArgs = args;
1878
+ // Filter out empty strings and trim whitespace
1879
+ const keywords = (typedArgs.keywords || [])
1880
+ .map(k => k.trim())
1881
+ .filter(k => k.length > 0);
1882
+ const projectPath = this.resolveProjectPath(typedArgs.project_path);
1883
+ // SECURITY: Require strict boolean true to prevent truthy string coercion
1884
+ const confirm = typedArgs.confirm === true;
1885
+ if (keywords.length === 0) {
1886
+ return {
1887
+ success: false,
1888
+ preview_mode: true,
1889
+ conversations_found: 0,
1890
+ conversations_deleted: 0,
1891
+ messages_deleted: 0,
1892
+ decisions_deleted: 0,
1893
+ mistakes_deleted: 0,
1894
+ backup_path: null,
1895
+ conversation_summaries: [],
1896
+ message: "No keywords provided. Please specify keywords/topics to search for."
1897
+ };
1898
+ }
1899
+ try {
1900
+ // Create deletion service
1901
+ const storage = this.memory.getStorage();
1902
+ const semanticSearch = this.memory.getSemanticSearch();
1903
+ const deletionService = new DeletionService(this.db.getDatabase(), storage, semanticSearch);
1904
+ // Preview what would be deleted
1905
+ const preview = await deletionService.previewDeletionByTopic(keywords, projectPath);
1906
+ if (preview.conversationIds.length === 0) {
1907
+ return {
1908
+ success: true,
1909
+ preview_mode: true,
1910
+ conversations_found: 0,
1911
+ conversations_deleted: 0,
1912
+ messages_deleted: 0,
1913
+ decisions_deleted: 0,
1914
+ mistakes_deleted: 0,
1915
+ backup_path: null,
1916
+ conversation_summaries: [],
1917
+ message: preview.summary
1918
+ };
1919
+ }
1920
+ // Format conversation summaries for response
1921
+ const conversationSummaries = preview.conversations.map(conv => ({
1922
+ id: conv.id,
1923
+ session_id: conv.session_id,
1924
+ created_at: new Date(conv.created_at).toISOString(),
1925
+ message_count: conv.message_count
1926
+ }));
1927
+ // If not confirmed, return preview
1928
+ if (!confirm) {
1929
+ return {
1930
+ success: true,
1931
+ preview_mode: true,
1932
+ conversations_found: preview.conversationIds.length,
1933
+ conversations_deleted: 0,
1934
+ messages_deleted: 0,
1935
+ decisions_deleted: 0,
1936
+ mistakes_deleted: 0,
1937
+ backup_path: null,
1938
+ conversation_summaries: conversationSummaries,
1939
+ message: `${preview.summary}\n\nSet confirm=true to delete these conversations.`
1940
+ };
1941
+ }
1942
+ // Actually delete with backup
1943
+ const result = await deletionService.forgetByTopic(keywords, projectPath);
1944
+ return {
1945
+ success: true,
1946
+ preview_mode: false,
1947
+ conversations_found: result.deleted.conversations,
1948
+ conversations_deleted: result.deleted.conversations,
1949
+ messages_deleted: result.deleted.messages,
1950
+ decisions_deleted: result.deleted.decisions,
1951
+ mistakes_deleted: result.deleted.mistakes,
1952
+ backup_path: result.backup.backupPath,
1953
+ conversation_summaries: conversationSummaries,
1954
+ message: result.summary
1955
+ };
1956
+ }
1957
+ catch (error) {
1958
+ return {
1959
+ success: false,
1960
+ preview_mode: !confirm,
1961
+ conversations_found: 0,
1962
+ conversations_deleted: 0,
1963
+ messages_deleted: 0,
1964
+ decisions_deleted: 0,
1965
+ mistakes_deleted: 0,
1966
+ backup_path: null,
1967
+ conversation_summaries: [],
1968
+ message: `Error: ${error.message}`
1969
+ };
1970
+ }
1971
+ }
1972
+ // ==================== High-Value Utility Tools ====================
1973
+ /**
1974
+ * Search for all context related to a specific file.
1975
+ *
1976
+ * Combines discussions, decisions, and mistakes related to a file
1977
+ * in one convenient query.
1978
+ *
1979
+ * @param args - Search arguments with file_path
1980
+ * @returns Combined file context from all sources
1981
+ */
1982
+ async searchByFile(args) {
1983
+ const typedArgs = args;
1984
+ const filePath = typedArgs.file_path;
1985
+ const limit = typedArgs.limit || 5;
1986
+ if (!filePath) {
1987
+ return {
1988
+ file_path: "",
1989
+ discussions: [],
1990
+ decisions: [],
1991
+ mistakes: [],
1992
+ total_mentions: 0,
1993
+ message: "Error: file_path is required",
1994
+ };
1995
+ }
1996
+ // Normalize the file path for searching (handle both relative and absolute)
1997
+ const normalizedPath = filePath.replace(/^\.\//, "");
1998
+ const escapedPath = sanitizeForLike(normalizedPath);
1999
+ try {
2000
+ const messagesQuery = `
2001
+ SELECT id, conversation_id, content, timestamp, role
2002
+ FROM messages
2003
+ WHERE content LIKE ? OR content LIKE ?
2004
+ ORDER BY timestamp DESC
2005
+ LIMIT ?
2006
+ `;
2007
+ const discussions = this.db
2008
+ .prepare(messagesQuery)
2009
+ .all(`%${escapedPath}%`, `%/${escapedPath}%`, limit);
2010
+ const decisionsQuery = `
2011
+ SELECT d.id, d.decision_text, d.rationale, d.context, d.timestamp
2012
+ FROM decisions d
2013
+ WHERE d.related_files LIKE ?
2014
+ OR d.related_files LIKE ?
2015
+ OR d.decision_text LIKE ?
2016
+ ORDER BY d.timestamp DESC
2017
+ LIMIT ?
2018
+ `;
2019
+ const decisions = this.db
2020
+ .prepare(decisionsQuery)
2021
+ .all(`%${escapedPath}%`, `%/${escapedPath}%`, `%${escapedPath}%`, limit);
2022
+ const mistakesQuery = `
2023
+ SELECT m.id, m.mistake_type, m.what_went_wrong, m.correction, m.timestamp
2024
+ FROM mistakes m
2025
+ WHERE m.files_affected LIKE ?
2026
+ OR m.files_affected LIKE ?
2027
+ OR m.what_went_wrong LIKE ?
2028
+ ORDER BY m.timestamp DESC
2029
+ LIMIT ?
2030
+ `;
2031
+ const mistakes = this.db
2032
+ .prepare(mistakesQuery)
2033
+ .all(`%${escapedPath}%`, `%/${escapedPath}%`, `%${escapedPath}%`, limit);
2034
+ const totalMentions = discussions.length + decisions.length + mistakes.length;
2035
+ return {
2036
+ file_path: filePath,
2037
+ discussions: discussions.map((d) => ({
2038
+ id: d.id,
2039
+ conversation_id: d.conversation_id,
2040
+ content: d.content.substring(0, 500),
2041
+ timestamp: d.timestamp,
2042
+ role: d.role,
2043
+ })),
2044
+ decisions: decisions.map((d) => ({
2045
+ id: d.id,
2046
+ decision_text: d.decision_text,
2047
+ rationale: d.rationale || undefined,
2048
+ context: d.context || undefined,
2049
+ timestamp: d.timestamp,
2050
+ })),
2051
+ mistakes: mistakes.map((m) => ({
2052
+ id: m.id,
2053
+ mistake_type: m.mistake_type,
2054
+ what_went_wrong: m.what_went_wrong,
2055
+ correction: m.correction || undefined,
2056
+ timestamp: m.timestamp,
2057
+ })),
2058
+ total_mentions: totalMentions,
2059
+ message: totalMentions > 0
2060
+ ? `Found ${totalMentions} mentions: ${discussions.length} discussions, ${decisions.length} decisions, ${mistakes.length} mistakes`
2061
+ : `No mentions found for file: ${filePath}`,
2062
+ };
2063
+ }
2064
+ catch (error) {
2065
+ return {
2066
+ file_path: filePath,
2067
+ discussions: [],
2068
+ decisions: [],
2069
+ mistakes: [],
2070
+ total_mentions: 0,
2071
+ message: `Error searching for file: ${error.message}`,
2072
+ };
2073
+ }
2074
+ }
2075
+ /**
2076
+ * List recent conversation sessions.
2077
+ *
2078
+ * Provides an overview of recent sessions with basic stats.
2079
+ *
2080
+ * @param args - Query arguments with limit/offset
2081
+ * @returns List of recent sessions with summaries
2082
+ */
2083
+ async listRecentSessions(args) {
2084
+ const typedArgs = args;
2085
+ const limit = typedArgs.limit || 10;
2086
+ const offset = typedArgs.offset || 0;
2087
+ const projectPath = this.resolveOptionalProjectPath(typedArgs.project_path);
2088
+ try {
2089
+ let query;
2090
+ let params;
2091
+ if (projectPath) {
2092
+ query = `
2093
+ SELECT
2094
+ c.id,
2095
+ c.id as session_id,
2096
+ c.project_path,
2097
+ c.created_at,
2098
+ (SELECT COUNT(*) FROM messages WHERE conversation_id = c.id) as message_count,
2099
+ (SELECT content FROM messages WHERE conversation_id = c.id ORDER BY timestamp ASC LIMIT 1) as first_message_preview
2100
+ FROM conversations c
2101
+ WHERE c.project_path = ?
2102
+ ORDER BY c.created_at DESC
2103
+ LIMIT ? OFFSET ?
2104
+ `;
2105
+ params = [projectPath, limit + 1, offset];
2106
+ }
2107
+ else {
2108
+ query = `
2109
+ SELECT
2110
+ c.id,
2111
+ c.id as session_id,
2112
+ c.project_path,
2113
+ c.created_at,
2114
+ (SELECT COUNT(*) FROM messages WHERE conversation_id = c.id) as message_count,
2115
+ (SELECT content FROM messages WHERE conversation_id = c.id ORDER BY timestamp ASC LIMIT 1) as first_message_preview
2116
+ FROM conversations c
2117
+ ORDER BY c.created_at DESC
2118
+ LIMIT ? OFFSET ?
2119
+ `;
2120
+ params = [limit + 1, offset];
2121
+ }
2122
+ const rows = this.db.prepare(query).all(...params);
2123
+ const hasMore = rows.length > limit;
2124
+ const sessions = hasMore ? rows.slice(0, limit) : rows;
2125
+ const countQuery = projectPath
2126
+ ? "SELECT COUNT(*) as total FROM conversations WHERE project_path = ?"
2127
+ : "SELECT COUNT(*) as total FROM conversations";
2128
+ const countParams = projectPath ? [projectPath] : [];
2129
+ const countRow = this.db.prepare(countQuery).get(...countParams);
2130
+ const totalSessions = countRow?.total || 0;
2131
+ return {
2132
+ sessions: sessions.map((s) => ({
2133
+ id: s.id,
2134
+ session_id: s.session_id,
2135
+ project_path: s.project_path,
2136
+ created_at: s.created_at,
2137
+ message_count: s.message_count,
2138
+ first_message_preview: s.first_message_preview
2139
+ ? s.first_message_preview.substring(0, 200)
2140
+ : undefined,
2141
+ })),
2142
+ total_sessions: totalSessions,
2143
+ has_more: hasMore,
2144
+ message: `Found ${totalSessions} sessions${projectPath ? ` for ${projectPath}` : ""}`,
2145
+ };
2146
+ }
2147
+ catch (error) {
2148
+ return {
2149
+ sessions: [],
2150
+ total_sessions: 0,
2151
+ has_more: false,
2152
+ message: `Error listing sessions: ${error.message}`,
2153
+ };
2154
+ }
2155
+ }
2156
+ // ==================== Global Cross-Project Tools ====================
2157
+ /**
2158
+ * Index all projects (Claude Code + Codex).
2159
+ *
2160
+ * Discovers and indexes all projects from both Claude Code and Codex,
2161
+ * registering them in a global index for cross-project search.
2162
+ *
2163
+ * @param args - Indexing arguments
2164
+ * @returns Summary of all indexed projects
2165
+ */
2166
+ async indexAllProjects(args) {
2167
+ const { GlobalIndex } = await import("../storage/GlobalIndex.js");
2168
+ const { homedir } = await import("os");
2169
+ const { join } = await import("path");
2170
+ const { existsSync, readdirSync } = await import("fs");
2171
+ const typedArgs = args;
2172
+ const { include_codex = true, include_claude_code = true, codex_path = join(homedir(), ".codex"), claude_projects_path = join(homedir(), ".claude", "projects"), incremental = true, } = typedArgs;
2173
+ const globalIndex = new GlobalIndex();
2174
+ try {
2175
+ const projects = [];
2176
+ const errors = [];
2177
+ const claudeProjectsByPath = new Map();
2178
+ let totalMessages = 0;
2179
+ let totalConversations = 0;
2180
+ let totalDecisions = 0;
2181
+ let totalMistakes = 0;
2182
+ // Index Codex if requested
2183
+ if (include_codex && existsSync(codex_path)) {
2184
+ try {
2185
+ const { CodexConversationParser } = await import("../parsers/CodexConversationParser.js");
2186
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
2187
+ const { ConversationStorage } = await import("../storage/ConversationStorage.js");
2188
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
2189
+ const { DecisionExtractor } = await import("../parsers/DecisionExtractor.js");
2190
+ const { MistakeExtractor } = await import("../parsers/MistakeExtractor.js");
2191
+ // Create dedicated database for Codex
2192
+ const codexDbPath = join(codex_path, ".cccmemory.db");
2193
+ const codexDb = new SQLiteManager({ dbPath: codexDbPath });
2194
+ const resolvedCodexDbPath = codexDb.getDbPath();
2195
+ try {
2196
+ const codexStorage = new ConversationStorage(codexDb);
2197
+ // Get last indexed time for incremental mode
2198
+ let codexLastIndexedMs;
2199
+ if (incremental) {
2200
+ const existingProject = globalIndex.getProject(codex_path);
2201
+ if (existingProject) {
2202
+ codexLastIndexedMs = existingProject.last_indexed;
2203
+ }
2204
+ }
2205
+ // Parse Codex sessions
2206
+ const parser = new CodexConversationParser();
2207
+ const parseResult = parser.parseSession(codex_path, undefined, codexLastIndexedMs);
2208
+ // Store all parsed data (skip FTS rebuild for performance, will rebuild once at end)
2209
+ await codexStorage.storeConversations(parseResult.conversations);
2210
+ await codexStorage.storeMessages(parseResult.messages, true);
2211
+ await codexStorage.storeToolUses(parseResult.tool_uses);
2212
+ await codexStorage.storeToolResults(parseResult.tool_results);
2213
+ await codexStorage.storeFileEdits(parseResult.file_edits);
2214
+ await codexStorage.storeThinkingBlocks(parseResult.thinking_blocks);
2215
+ // Extract and store decisions
2216
+ const decisionExtractor = new DecisionExtractor();
2217
+ const decisions = decisionExtractor.extractDecisions(parseResult.messages, parseResult.thinking_blocks);
2218
+ await codexStorage.storeDecisions(decisions, true);
2219
+ // Rebuild FTS indexes once after all data is stored
2220
+ codexStorage.rebuildAllFts();
2221
+ // Extract and store mistakes
2222
+ const mistakeExtractor = new MistakeExtractor();
2223
+ const mistakes = mistakeExtractor.extractMistakes(parseResult.messages, parseResult.tool_results);
2224
+ await codexStorage.storeMistakes(mistakes);
2225
+ // Generate embeddings for semantic search
2226
+ try {
2227
+ const semanticSearch = new SemanticSearch(codexDb);
2228
+ await semanticSearch.indexMessages(parseResult.messages, incremental);
2229
+ await semanticSearch.indexDecisions(decisions, incremental);
2230
+ console.error(`✓ Generated embeddings for Codex project`);
2231
+ }
2232
+ catch (embedError) {
2233
+ console.error("⚠️ Embedding generation failed for Codex:", embedError.message);
2234
+ console.error(" FTS fallback will be used for search");
2235
+ }
2236
+ // Get stats from the database
2237
+ const stats = codexDb.getDatabase()
2238
+ .prepare("SELECT COUNT(*) as count FROM conversations")
2239
+ .get();
2240
+ const messageStats = codexDb.getDatabase()
2241
+ .prepare("SELECT COUNT(*) as count FROM messages")
2242
+ .get();
2243
+ const decisionStats = codexDb.getDatabase()
2244
+ .prepare("SELECT COUNT(*) as count FROM decisions")
2245
+ .get();
2246
+ const mistakeStats = codexDb.getDatabase()
2247
+ .prepare("SELECT COUNT(*) as count FROM mistakes")
2248
+ .get();
2249
+ // Register in global index
2250
+ globalIndex.registerProject({
2251
+ project_path: codex_path,
2252
+ source_type: "codex",
2253
+ db_path: resolvedCodexDbPath,
2254
+ message_count: messageStats.count,
2255
+ conversation_count: stats.count,
2256
+ decision_count: decisionStats.count,
2257
+ mistake_count: mistakeStats.count,
2258
+ metadata: {
2259
+ indexed_folders: parseResult.indexed_folders || [],
2260
+ },
2261
+ });
2262
+ projects.push({
2263
+ project_path: codex_path,
2264
+ source_type: "codex",
2265
+ message_count: messageStats.count,
2266
+ conversation_count: stats.count,
2267
+ });
2268
+ totalMessages += messageStats.count;
2269
+ totalConversations += stats.count;
2270
+ totalDecisions += decisionStats.count;
2271
+ totalMistakes += mistakeStats.count;
2272
+ }
2273
+ finally {
2274
+ // Always close the Codex database to prevent handle leaks
2275
+ codexDb.close();
2276
+ }
2277
+ }
2278
+ catch (error) {
2279
+ errors.push({
2280
+ project_path: codex_path,
2281
+ error: error.message,
2282
+ });
2283
+ }
2284
+ }
2285
+ // Index Claude Code projects if requested
2286
+ if (include_claude_code && existsSync(claude_projects_path)) {
2287
+ try {
2288
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
2289
+ const { ConversationStorage } = await import("../storage/ConversationStorage.js");
2290
+ const { ConversationParser } = await import("../parsers/ConversationParser.js");
2291
+ const { DecisionExtractor } = await import("../parsers/DecisionExtractor.js");
2292
+ const { MistakeExtractor } = await import("../parsers/MistakeExtractor.js");
2293
+ const { statSync } = await import("fs");
2294
+ const projectFolders = readdirSync(claude_projects_path);
2295
+ const indexedFolderLastIndexed = new Map();
2296
+ if (incremental) {
2297
+ const existingProjects = globalIndex.getAllProjects("claude-code");
2298
+ for (const project of existingProjects) {
2299
+ const folders = project.metadata?.indexed_folders;
2300
+ if (!Array.isArray(folders)) {
2301
+ continue;
2302
+ }
2303
+ for (const folder of folders) {
2304
+ if (typeof folder === "string") {
2305
+ indexedFolderLastIndexed.set(folder, project.last_indexed);
2306
+ }
2307
+ }
2308
+ }
2309
+ }
2310
+ for (const folder of projectFolders) {
2311
+ const folderPath = join(claude_projects_path, folder);
2312
+ try {
2313
+ // Skip if not a directory
2314
+ if (!statSync(folderPath).isDirectory()) {
2315
+ continue;
2316
+ }
2317
+ // Get last indexed time for incremental mode
2318
+ let lastIndexedMs;
2319
+ if (incremental) {
2320
+ const metadataIndexed = indexedFolderLastIndexed.get(folderPath);
2321
+ if (metadataIndexed) {
2322
+ lastIndexedMs = metadataIndexed;
2323
+ }
2324
+ else {
2325
+ const existingProject = globalIndex.getProject(folderPath);
2326
+ if (existingProject) {
2327
+ lastIndexedMs = existingProject.last_indexed;
2328
+ }
2329
+ }
2330
+ }
2331
+ // Parse Claude Code conversations directly from this folder
2332
+ const parser = new ConversationParser();
2333
+ const parseResult = parser.parseFromFolder(folderPath, undefined, lastIndexedMs);
2334
+ // Skip empty projects
2335
+ if (parseResult.messages.length === 0) {
2336
+ continue;
2337
+ }
2338
+ const inferredPath = this.inferProjectPathFromMessages(parseResult.messages);
2339
+ const canonicalProjectPath = inferredPath
2340
+ ? getCanonicalProjectPath(inferredPath).canonicalPath
2341
+ : folderPath;
2342
+ if (canonicalProjectPath !== folderPath) {
2343
+ for (const conversation of parseResult.conversations) {
2344
+ conversation.project_path = canonicalProjectPath;
2345
+ }
2346
+ }
2347
+ const projectDb = new SQLiteManager({ projectPath: canonicalProjectPath });
2348
+ const projectDbPath = projectDb.getDbPath();
2349
+ try {
2350
+ const projectStorage = new ConversationStorage(projectDb);
2351
+ // Store all parsed data (skip FTS rebuild for performance, will rebuild once at end)
2352
+ await projectStorage.storeConversations(parseResult.conversations);
2353
+ await projectStorage.storeMessages(parseResult.messages, true);
2354
+ await projectStorage.storeToolUses(parseResult.tool_uses);
2355
+ await projectStorage.storeToolResults(parseResult.tool_results);
2356
+ await projectStorage.storeFileEdits(parseResult.file_edits);
2357
+ await projectStorage.storeThinkingBlocks(parseResult.thinking_blocks);
2358
+ // Extract and store decisions
2359
+ const decisionExtractor = new DecisionExtractor();
2360
+ const decisions = decisionExtractor.extractDecisions(parseResult.messages, parseResult.thinking_blocks);
2361
+ await projectStorage.storeDecisions(decisions, true);
2362
+ // Rebuild FTS indexes once after all data is stored
2363
+ projectStorage.rebuildAllFts();
2364
+ // Extract and store mistakes
2365
+ const mistakeExtractor = new MistakeExtractor();
2366
+ const mistakes = mistakeExtractor.extractMistakes(parseResult.messages, parseResult.tool_results);
2367
+ await projectStorage.storeMistakes(mistakes);
2368
+ // Generate embeddings for semantic search
2369
+ try {
2370
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
2371
+ const semanticSearch = new SemanticSearch(projectDb);
2372
+ await semanticSearch.indexMessages(parseResult.messages, incremental);
2373
+ await semanticSearch.indexDecisions(decisions, incremental);
2374
+ console.error(`✓ Generated embeddings for project: ${canonicalProjectPath}`);
2375
+ }
2376
+ catch (embedError) {
2377
+ console.error(`⚠️ Embedding generation failed for ${canonicalProjectPath}:`, embedError.message);
2378
+ console.error(" FTS fallback will be used for search");
2379
+ }
2380
+ // Get stats from the database
2381
+ const stats = projectDb.getDatabase()
2382
+ .prepare("SELECT COUNT(*) as count FROM conversations")
2383
+ .get();
2384
+ const messageStats = projectDb.getDatabase()
2385
+ .prepare("SELECT COUNT(*) as count FROM messages")
2386
+ .get();
2387
+ const decisionStats = projectDb.getDatabase()
2388
+ .prepare("SELECT COUNT(*) as count FROM decisions")
2389
+ .get();
2390
+ const mistakeStats = projectDb.getDatabase()
2391
+ .prepare("SELECT COUNT(*) as count FROM mistakes")
2392
+ .get();
2393
+ const existingAggregate = claudeProjectsByPath.get(canonicalProjectPath);
2394
+ const indexedFolders = existingAggregate
2395
+ ? existingAggregate.indexed_folders
2396
+ : new Set();
2397
+ indexedFolders.add(folderPath);
2398
+ // Register in global index with the canonical project path
2399
+ globalIndex.registerProject({
2400
+ project_path: canonicalProjectPath,
2401
+ source_type: "claude-code",
2402
+ db_path: projectDbPath,
2403
+ message_count: messageStats.count,
2404
+ conversation_count: stats.count,
2405
+ decision_count: decisionStats.count,
2406
+ mistake_count: mistakeStats.count,
2407
+ metadata: {
2408
+ indexed_folders: Array.from(indexedFolders),
2409
+ },
2410
+ });
2411
+ claudeProjectsByPath.set(canonicalProjectPath, {
2412
+ project_path: canonicalProjectPath,
2413
+ source_type: "claude-code",
2414
+ message_count: messageStats.count,
2415
+ conversation_count: stats.count,
2416
+ decision_count: decisionStats.count,
2417
+ mistake_count: mistakeStats.count,
2418
+ db_path: projectDbPath,
2419
+ indexed_folders: indexedFolders,
2420
+ });
2421
+ }
2422
+ finally {
2423
+ // Always close the project database to prevent handle leaks
2424
+ projectDb.close();
2425
+ }
2426
+ }
2427
+ catch (error) {
2428
+ errors.push({
2429
+ project_path: folder,
2430
+ error: error.message,
2431
+ });
2432
+ }
2433
+ }
2434
+ }
2435
+ catch (error) {
2436
+ errors.push({
2437
+ project_path: claude_projects_path,
2438
+ error: error.message,
2439
+ });
2440
+ }
2441
+ }
2442
+ for (const project of claudeProjectsByPath.values()) {
2443
+ projects.push({
2444
+ project_path: project.project_path,
2445
+ source_type: "claude-code",
2446
+ message_count: project.message_count,
2447
+ conversation_count: project.conversation_count,
2448
+ });
2449
+ totalMessages += project.message_count;
2450
+ totalConversations += project.conversation_count;
2451
+ totalDecisions += project.decision_count;
2452
+ totalMistakes += project.mistake_count;
2453
+ }
2454
+ const stats = globalIndex.getGlobalStats();
2455
+ return {
2456
+ success: true,
2457
+ global_index_path: globalIndex.getDbPath(),
2458
+ projects_indexed: projects.length,
2459
+ claude_code_projects: stats.claude_code_projects,
2460
+ codex_projects: stats.codex_projects,
2461
+ total_messages: totalMessages,
2462
+ total_conversations: totalConversations,
2463
+ total_decisions: totalDecisions,
2464
+ total_mistakes: totalMistakes,
2465
+ projects,
2466
+ errors,
2467
+ message: `Indexed ${projects.length} project(s): ${stats.claude_code_projects} Claude Code + ${stats.codex_projects} Codex`,
2468
+ };
2469
+ }
2470
+ finally {
2471
+ // Ensure GlobalIndex is always closed
2472
+ globalIndex.close();
2473
+ }
2474
+ }
2475
+ /**
2476
+ * Search across all indexed projects.
2477
+ *
2478
+ * @param args - Search arguments
2479
+ * @returns Search results from all projects
2480
+ */
2481
+ async searchAllConversations(args) {
2482
+ await this.maybeAutoIndex();
2483
+ const { GlobalIndex } = await import("../storage/GlobalIndex.js");
2484
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
2485
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
2486
+ const { getEmbeddingGenerator } = await import("../embeddings/EmbeddingGenerator.js");
2487
+ const typedArgs = args;
2488
+ const { query, limit = 20, offset = 0, date_range, source_type = "all" } = typedArgs;
2489
+ const globalIndex = new GlobalIndex();
2490
+ try {
2491
+ const projects = globalIndex.getAllProjects(source_type === "all" ? undefined : source_type);
2492
+ // Pre-compute query embedding once for all projects (major optimization)
2493
+ let queryEmbedding;
2494
+ try {
2495
+ const embedder = await getEmbeddingGenerator();
2496
+ if (embedder.isAvailable()) {
2497
+ queryEmbedding = await embedder.embed(query);
2498
+ }
2499
+ }
2500
+ catch (_embeddingError) {
2501
+ // Fall back to FTS in each project
2502
+ }
2503
+ const allResults = [];
2504
+ const failedProjects = [];
2505
+ let claudeCodeResults = 0;
2506
+ let codexResults = 0;
2507
+ for (const project of projects) {
2508
+ let projectDb = null;
2509
+ try {
2510
+ // Open this project's database
2511
+ projectDb = new SQLiteManager({ dbPath: project.db_path, readOnly: true });
2512
+ const semanticSearch = new SemanticSearch(projectDb);
2513
+ // Search using pre-computed embedding (avoids re-embedding per project)
2514
+ const localResults = await semanticSearch.searchConversations(query, limit + offset, undefined, queryEmbedding);
2515
+ // Filter by date range if specified
2516
+ const filteredResults = date_range
2517
+ ? localResults.filter((r) => {
2518
+ const timestamp = r.message.timestamp;
2519
+ return timestamp >= date_range[0] && timestamp <= date_range[1];
2520
+ })
2521
+ : localResults;
2522
+ // Enrich results with project info
2523
+ for (const result of filteredResults) {
2524
+ allResults.push({
2525
+ conversation_id: result.conversation.id,
2526
+ message_id: result.message.id,
2527
+ timestamp: new Date(result.message.timestamp).toISOString(),
2528
+ similarity: result.similarity,
2529
+ snippet: result.snippet,
2530
+ git_branch: result.conversation.git_branch,
2531
+ message_type: result.message.message_type,
2532
+ role: result.message.role,
2533
+ project_path: project.project_path,
2534
+ source_type: project.source_type,
2535
+ });
2536
+ if (project.source_type === "claude-code") {
2537
+ claudeCodeResults++;
2538
+ }
2539
+ else {
2540
+ codexResults++;
2541
+ }
2542
+ }
2543
+ }
2544
+ catch (error) {
2545
+ // Track failed projects instead of silently ignoring
2546
+ failedProjects.push(`${project.project_path}: ${error.message}`);
2547
+ continue;
2548
+ }
2549
+ finally {
2550
+ // Close project database
2551
+ if (projectDb) {
2552
+ projectDb.close();
2553
+ }
2554
+ }
2555
+ }
2556
+ // Sort by similarity and paginate
2557
+ const sortedResults = allResults.sort((a, b) => b.similarity - a.similarity);
2558
+ const paginatedResults = sortedResults.slice(offset, offset + limit);
2559
+ const successfulProjects = projects.length - failedProjects.length;
2560
+ return {
2561
+ query,
2562
+ results: paginatedResults,
2563
+ total_found: paginatedResults.length,
2564
+ has_more: offset + limit < sortedResults.length,
2565
+ offset,
2566
+ projects_searched: projects.length,
2567
+ projects_succeeded: successfulProjects,
2568
+ failed_projects: failedProjects.length > 0 ? failedProjects : undefined,
2569
+ search_stats: {
2570
+ claude_code_results: claudeCodeResults,
2571
+ codex_results: codexResults,
2572
+ },
2573
+ message: failedProjects.length > 0
2574
+ ? `Found ${paginatedResults.length} result(s) across ${successfulProjects}/${projects.length} project(s). ${failedProjects.length} project(s) failed.`
2575
+ : `Found ${paginatedResults.length} result(s) across ${projects.length} project(s)`,
2576
+ };
2577
+ }
2578
+ finally {
2579
+ // Ensure GlobalIndex is always closed
2580
+ globalIndex.close();
2581
+ }
2582
+ }
2583
+ /**
2584
+ * Get decisions from all indexed projects.
2585
+ *
2586
+ * @param args - Query arguments
2587
+ * @returns Decisions from all projects
2588
+ */
2589
+ async getAllDecisions(args) {
2590
+ await this.maybeAutoIndex();
2591
+ const { GlobalIndex } = await import("../storage/GlobalIndex.js");
2592
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
2593
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
2594
+ const typedArgs = args;
2595
+ const { query, file_path, limit = 20, offset = 0, source_type = 'all' } = typedArgs;
2596
+ const globalIndex = new GlobalIndex();
2597
+ try {
2598
+ const projects = globalIndex.getAllProjects(source_type === "all" ? undefined : source_type);
2599
+ const allDecisions = [];
2600
+ for (const project of projects) {
2601
+ let projectDb = null;
2602
+ try {
2603
+ projectDb = new SQLiteManager({ dbPath: project.db_path, readOnly: true });
2604
+ const semanticSearch = new SemanticSearch(projectDb);
2605
+ // Use semantic search for better results
2606
+ const searchResults = await semanticSearch.searchDecisions(query, limit + offset);
2607
+ // Filter by file_path if specified
2608
+ const filteredResults = file_path
2609
+ ? searchResults.filter(r => r.decision.related_files.includes(file_path))
2610
+ : searchResults;
2611
+ for (const r of filteredResults) {
2612
+ allDecisions.push({
2613
+ decision_id: r.decision.id,
2614
+ decision_text: r.decision.decision_text,
2615
+ rationale: r.decision.rationale,
2616
+ alternatives_considered: r.decision.alternatives_considered,
2617
+ rejected_reasons: r.decision.rejected_reasons,
2618
+ context: r.decision.context,
2619
+ related_files: r.decision.related_files,
2620
+ related_commits: r.decision.related_commits,
2621
+ timestamp: new Date(r.decision.timestamp).toISOString(),
2622
+ similarity: r.similarity,
2623
+ project_path: project.project_path,
2624
+ source_type: project.source_type,
2625
+ });
2626
+ }
2627
+ }
2628
+ catch (_error) {
2629
+ continue;
2630
+ }
2631
+ finally {
2632
+ if (projectDb) {
2633
+ projectDb.close();
2634
+ }
2635
+ }
2636
+ }
2637
+ // Sort by similarity (semantic relevance) and paginate
2638
+ const sortedDecisions = allDecisions.sort((a, b) => b.similarity - a.similarity);
2639
+ const paginatedDecisions = sortedDecisions.slice(offset, offset + limit);
2640
+ return {
2641
+ query,
2642
+ decisions: paginatedDecisions,
2643
+ total_found: paginatedDecisions.length,
2644
+ has_more: offset + limit < sortedDecisions.length,
2645
+ offset,
2646
+ projects_searched: projects.length,
2647
+ message: `Found ${paginatedDecisions.length} decision(s) across ${projects.length} project(s)`,
2648
+ };
2649
+ }
2650
+ finally {
2651
+ globalIndex.close();
2652
+ }
2653
+ }
2654
+ /**
2655
+ * Search mistakes across all indexed projects.
2656
+ *
2657
+ * @param args - Search arguments
2658
+ * @returns Mistakes from all projects
2659
+ */
2660
+ async searchAllMistakes(args) {
2661
+ await this.maybeAutoIndex();
2662
+ const { GlobalIndex } = await import("../storage/GlobalIndex.js");
2663
+ const { SQLiteManager } = await import("../storage/SQLiteManager.js");
2664
+ const { SemanticSearch } = await import("../search/SemanticSearch.js");
2665
+ const typedArgs = args;
2666
+ const { query, mistake_type, limit = 20, offset = 0, source_type = 'all' } = typedArgs;
2667
+ const globalIndex = new GlobalIndex();
2668
+ try {
2669
+ const projects = globalIndex.getAllProjects(source_type === "all" ? undefined : source_type);
2670
+ const allMistakes = [];
2671
+ for (const project of projects) {
2672
+ let projectDb = null;
2673
+ try {
2674
+ projectDb = new SQLiteManager({ dbPath: project.db_path, readOnly: true });
2675
+ const semanticSearch = new SemanticSearch(projectDb);
2676
+ // Use semantic search for better results
2677
+ const searchResults = await semanticSearch.searchMistakes(query, limit + offset);
2678
+ // Filter by mistake_type if specified
2679
+ const filteredResults = mistake_type
2680
+ ? searchResults.filter(r => r.mistake.mistake_type === mistake_type)
2681
+ : searchResults;
2682
+ for (const r of filteredResults) {
2683
+ allMistakes.push({
2684
+ mistake_id: r.mistake.id,
2685
+ mistake_type: r.mistake.mistake_type,
2686
+ what_went_wrong: r.mistake.what_went_wrong,
2687
+ correction: r.mistake.correction,
2688
+ user_correction_message: r.mistake.user_correction_message,
2689
+ files_affected: r.mistake.files_affected,
2690
+ timestamp: new Date(r.mistake.timestamp).toISOString(),
2691
+ project_path: project.project_path,
2692
+ source_type: project.source_type,
2693
+ similarity: r.similarity,
2694
+ });
2695
+ }
2696
+ }
2697
+ catch (_error) {
2698
+ continue;
2699
+ }
2700
+ finally {
2701
+ if (projectDb) {
2702
+ projectDb.close();
2703
+ }
2704
+ }
2705
+ }
2706
+ // Sort by similarity (semantic relevance) and paginate
2707
+ const sortedMistakes = allMistakes.sort((a, b) => b.similarity - a.similarity);
2708
+ const paginatedMistakes = sortedMistakes.slice(offset, offset + limit);
2709
+ // Remove similarity from results (not in GlobalMistake type)
2710
+ const results = paginatedMistakes.map(({ similarity: _similarity, ...rest }) => rest);
2711
+ return {
2712
+ query,
2713
+ mistakes: results,
2714
+ total_found: results.length,
2715
+ has_more: offset + limit < sortedMistakes.length,
2716
+ offset,
2717
+ projects_searched: projects.length,
2718
+ message: `Found ${results.length} mistake(s) across ${projects.length} project(s)`,
2719
+ };
2720
+ }
2721
+ finally {
2722
+ globalIndex.close();
2723
+ }
2724
+ }
2725
+ // ==================== Live Context Layer Tools ====================
2726
+ /**
2727
+ * Store a fact, decision, or context in working memory.
2728
+ *
2729
+ * @param args - Remember arguments with key, value, context, tags, ttl
2730
+ * @returns The stored memory item
2731
+ */
2732
+ async remember(args) {
2733
+ const { WorkingMemoryStore } = await import("../memory/WorkingMemoryStore.js");
2734
+ const typedArgs = args;
2735
+ const { key, value, context, tags, ttl, project_path, } = typedArgs;
2736
+ const projectPath = this.resolveProjectPath(project_path);
2737
+ if (!key || !value) {
2738
+ return {
2739
+ success: false,
2740
+ message: "key and value are required",
2741
+ };
2742
+ }
2743
+ try {
2744
+ const store = new WorkingMemoryStore(this.db.getDatabase());
2745
+ const item = store.remember({
2746
+ key,
2747
+ value,
2748
+ context,
2749
+ tags,
2750
+ ttl,
2751
+ projectPath,
2752
+ });
2753
+ return {
2754
+ success: true,
2755
+ item: {
2756
+ id: item.id,
2757
+ key: item.key,
2758
+ value: item.value,
2759
+ context: item.context,
2760
+ tags: item.tags,
2761
+ created_at: new Date(item.createdAt).toISOString(),
2762
+ updated_at: new Date(item.updatedAt).toISOString(),
2763
+ expires_at: item.expiresAt ? new Date(item.expiresAt).toISOString() : undefined,
2764
+ },
2765
+ message: `Remembered "${key}" successfully`,
2766
+ };
2767
+ }
2768
+ catch (error) {
2769
+ return {
2770
+ success: false,
2771
+ message: `Error storing memory: ${error.message}`,
2772
+ };
2773
+ }
2774
+ }
2775
+ /**
2776
+ * Recall a specific memory item by key.
2777
+ *
2778
+ * @param args - Recall arguments with key
2779
+ * @returns The recalled memory item or null
2780
+ */
2781
+ async recall(args) {
2782
+ const { WorkingMemoryStore } = await import("../memory/WorkingMemoryStore.js");
2783
+ const typedArgs = args;
2784
+ const { key, project_path } = typedArgs;
2785
+ const projectPath = this.resolveProjectPath(project_path);
2786
+ if (!key) {
2787
+ return {
2788
+ success: false,
2789
+ found: false,
2790
+ message: "key is required",
2791
+ };
2792
+ }
2793
+ try {
2794
+ const store = new WorkingMemoryStore(this.db.getDatabase());
2795
+ const item = store.recall(key, projectPath);
2796
+ if (!item) {
2797
+ return {
2798
+ success: true,
2799
+ found: false,
2800
+ message: `No memory found for key "${key}"`,
2801
+ };
2802
+ }
2803
+ return {
2804
+ success: true,
2805
+ found: true,
2806
+ item: {
2807
+ id: item.id,
2808
+ key: item.key,
2809
+ value: item.value,
2810
+ context: item.context,
2811
+ tags: item.tags,
2812
+ created_at: new Date(item.createdAt).toISOString(),
2813
+ updated_at: new Date(item.updatedAt).toISOString(),
2814
+ expires_at: item.expiresAt ? new Date(item.expiresAt).toISOString() : undefined,
2815
+ },
2816
+ message: `Found memory for "${key}"`,
2817
+ };
2818
+ }
2819
+ catch (error) {
2820
+ return {
2821
+ success: false,
2822
+ found: false,
2823
+ message: `Error recalling memory: ${error.message}`,
2824
+ };
2825
+ }
2826
+ }
2827
+ /**
2828
+ * Search working memory semantically.
2829
+ *
2830
+ * @param args - Search arguments with query
2831
+ * @returns Relevant memory items
2832
+ */
2833
+ async recallRelevant(args) {
2834
+ const { WorkingMemoryStore } = await import("../memory/WorkingMemoryStore.js");
2835
+ const typedArgs = args;
2836
+ const { query, limit = 10, project_path } = typedArgs;
2837
+ const projectPath = this.resolveProjectPath(project_path);
2838
+ if (!query) {
2839
+ return {
2840
+ success: false,
2841
+ items: [],
2842
+ message: "query is required",
2843
+ };
2844
+ }
2845
+ try {
2846
+ const store = new WorkingMemoryStore(this.db.getDatabase());
2847
+ const results = store.recallRelevant({
2848
+ query,
2849
+ projectPath,
2850
+ limit,
2851
+ });
2852
+ return {
2853
+ success: true,
2854
+ items: results.map(item => ({
2855
+ id: item.id,
2856
+ key: item.key,
2857
+ value: item.value,
2858
+ context: item.context,
2859
+ tags: item.tags,
2860
+ similarity: item.similarity,
2861
+ created_at: new Date(item.createdAt).toISOString(),
2862
+ updated_at: new Date(item.updatedAt).toISOString(),
2863
+ })),
2864
+ total_found: results.length,
2865
+ message: results.length > 0
2866
+ ? `Found ${results.length} relevant memory item(s)`
2867
+ : "No relevant memories found",
2868
+ };
2869
+ }
2870
+ catch (error) {
2871
+ return {
2872
+ success: false,
2873
+ items: [],
2874
+ message: `Error searching memory: ${error.message}`,
2875
+ };
2876
+ }
2877
+ }
2878
+ /**
2879
+ * List all items in working memory.
2880
+ *
2881
+ * @param args - List arguments with optional tags filter
2882
+ * @returns All memory items
2883
+ */
2884
+ async listMemory(args) {
2885
+ const { WorkingMemoryStore } = await import("../memory/WorkingMemoryStore.js");
2886
+ const typedArgs = args;
2887
+ const { tags, limit = 100, offset = 0, project_path, } = typedArgs;
2888
+ const projectPath = this.resolveProjectPath(project_path);
2889
+ try {
2890
+ const store = new WorkingMemoryStore(this.db.getDatabase());
2891
+ const items = store.list(projectPath, { tags, limit: limit + 1, offset });
2892
+ const hasMore = items.length > limit;
2893
+ const results = hasMore ? items.slice(0, limit) : items;
2894
+ const totalCount = store.count(projectPath);
2895
+ return {
2896
+ success: true,
2897
+ items: results.map(item => ({
2898
+ id: item.id,
2899
+ key: item.key,
2900
+ value: item.value,
2901
+ context: item.context,
2902
+ tags: item.tags,
2903
+ created_at: new Date(item.createdAt).toISOString(),
2904
+ updated_at: new Date(item.updatedAt).toISOString(),
2905
+ expires_at: item.expiresAt ? new Date(item.expiresAt).toISOString() : undefined,
2906
+ })),
2907
+ total_count: totalCount,
2908
+ has_more: hasMore,
2909
+ offset,
2910
+ message: `Listed ${results.length} of ${totalCount} memory item(s)`,
2911
+ };
2912
+ }
2913
+ catch (error) {
2914
+ return {
2915
+ success: false,
2916
+ items: [],
2917
+ total_count: 0,
2918
+ has_more: false,
2919
+ offset: 0,
2920
+ message: `Error listing memory: ${error.message}`,
2921
+ };
2922
+ }
2923
+ }
2924
+ /**
2925
+ * Remove a memory item by key.
2926
+ *
2927
+ * @param args - Forget arguments with key
2928
+ * @returns Success status
2929
+ */
2930
+ async forget(args) {
2931
+ const { WorkingMemoryStore } = await import("../memory/WorkingMemoryStore.js");
2932
+ const typedArgs = args;
2933
+ const { key, project_path } = typedArgs;
2934
+ const projectPath = this.resolveProjectPath(project_path);
2935
+ if (!key) {
2936
+ return {
2937
+ success: false,
2938
+ message: "key is required",
2939
+ };
2940
+ }
2941
+ try {
2942
+ const store = new WorkingMemoryStore(this.db.getDatabase());
2943
+ const deleted = store.forget(key, projectPath);
2944
+ return {
2945
+ success: deleted,
2946
+ message: deleted
2947
+ ? `Forgot memory for "${key}"`
2948
+ : `No memory found for key "${key}"`,
2949
+ };
2950
+ }
2951
+ catch (error) {
2952
+ return {
2953
+ success: false,
2954
+ message: `Error forgetting memory: ${error.message}`,
2955
+ };
2956
+ }
2957
+ }
2958
+ // ============================================================
2959
+ // SESSION HANDOFF TOOLS
2960
+ // ============================================================
2961
+ /**
2962
+ * Prepare a handoff document from the current session.
2963
+ * Captures decisions, active files, pending tasks, and working memory.
2964
+ *
2965
+ * @param args - Handoff preparation arguments
2966
+ * @returns The prepared handoff document
2967
+ */
2968
+ async prepareHandoff(args) {
2969
+ const { SessionHandoffStore } = await import("../handoff/SessionHandoffStore.js");
2970
+ const typedArgs = args;
2971
+ const { session_id, include = ["decisions", "files", "tasks", "memory"], project_path, } = typedArgs;
2972
+ const projectPath = this.resolveProjectPath(project_path);
2973
+ try {
2974
+ const store = new SessionHandoffStore(this.db.getDatabase());
2975
+ const handoff = store.prepareHandoff({
2976
+ sessionId: session_id,
2977
+ projectPath,
2978
+ include: include,
2979
+ });
2980
+ return {
2981
+ success: true,
2982
+ handoff: {
2983
+ id: handoff.id,
2984
+ from_session_id: handoff.fromSessionId,
2985
+ project_path: handoff.projectPath,
2986
+ created_at: new Date(handoff.createdAt).toISOString(),
2987
+ summary: handoff.contextSummary,
2988
+ decisions_count: handoff.decisions.length,
2989
+ files_count: handoff.activeFiles.length,
2990
+ tasks_count: handoff.pendingTasks.length,
2991
+ memory_count: handoff.workingMemory.length,
2992
+ },
2993
+ message: `Handoff prepared with ${handoff.decisions.length} decisions, ${handoff.activeFiles.length} files, ${handoff.pendingTasks.length} tasks, ${handoff.workingMemory.length} memory items.`,
2994
+ };
2995
+ }
2996
+ catch (error) {
2997
+ return {
2998
+ success: false,
2999
+ message: `Error preparing handoff: ${error.message}`,
3000
+ };
3001
+ }
3002
+ }
3003
+ /**
3004
+ * Resume from a handoff in a new session.
3005
+ * Loads context from a previous session for continuity.
3006
+ *
3007
+ * @param args - Resume arguments
3008
+ * @returns The resumed handoff context
3009
+ */
3010
+ async resumeFromHandoff(args) {
3011
+ const { SessionHandoffStore } = await import("../handoff/SessionHandoffStore.js");
3012
+ const typedArgs = args;
3013
+ const { handoff_id, new_session_id, inject_context = true, project_path, } = typedArgs;
3014
+ const projectPath = this.resolveProjectPath(project_path);
3015
+ try {
3016
+ const store = new SessionHandoffStore(this.db.getDatabase());
3017
+ const handoff = store.resumeFromHandoff({
3018
+ handoffId: handoff_id,
3019
+ projectPath,
3020
+ newSessionId: new_session_id,
3021
+ injectContext: inject_context,
3022
+ });
3023
+ if (!handoff) {
3024
+ return {
3025
+ success: true,
3026
+ found: false,
3027
+ message: "No unresumed handoff found for this project.",
3028
+ };
3029
+ }
3030
+ return {
3031
+ success: true,
3032
+ found: true,
3033
+ handoff: {
3034
+ id: handoff.id,
3035
+ from_session_id: handoff.fromSessionId,
3036
+ project_path: handoff.projectPath,
3037
+ created_at: new Date(handoff.createdAt).toISOString(),
3038
+ summary: handoff.contextSummary,
3039
+ decisions: handoff.decisions.map((d) => ({
3040
+ text: d.text,
3041
+ rationale: d.rationale,
3042
+ timestamp: new Date(d.timestamp).toISOString(),
3043
+ })),
3044
+ active_files: handoff.activeFiles.map((f) => ({
3045
+ path: f.path,
3046
+ last_action: f.lastAction,
3047
+ })),
3048
+ pending_tasks: handoff.pendingTasks.map((t) => ({
3049
+ description: t.description,
3050
+ status: t.status,
3051
+ })),
3052
+ memory_items: handoff.workingMemory.map((m) => ({
3053
+ key: m.key,
3054
+ value: m.value,
3055
+ })),
3056
+ },
3057
+ message: `Resumed from handoff: ${handoff.contextSummary}`,
3058
+ };
3059
+ }
3060
+ catch (error) {
3061
+ return {
3062
+ success: false,
3063
+ found: false,
3064
+ message: `Error resuming from handoff: ${error.message}`,
3065
+ };
3066
+ }
3067
+ }
3068
+ /**
3069
+ * List available handoffs for a project.
3070
+ *
3071
+ * @param args - List arguments
3072
+ * @returns List of available handoffs
3073
+ */
3074
+ async listHandoffs(args) {
3075
+ const { SessionHandoffStore } = await import("../handoff/SessionHandoffStore.js");
3076
+ const typedArgs = args;
3077
+ const { limit = 10, include_resumed = false, project_path, } = typedArgs;
3078
+ const projectPath = this.resolveProjectPath(project_path);
3079
+ try {
3080
+ const store = new SessionHandoffStore(this.db.getDatabase());
3081
+ const handoffs = store.listHandoffs(projectPath, {
3082
+ limit,
3083
+ includeResumed: include_resumed,
3084
+ });
3085
+ return {
3086
+ success: true,
3087
+ handoffs: handoffs.map((h) => ({
3088
+ id: h.id,
3089
+ from_session_id: h.fromSessionId,
3090
+ created_at: new Date(h.createdAt).toISOString(),
3091
+ resumed_by: h.resumedBy,
3092
+ resumed_at: h.resumedAt ? new Date(h.resumedAt).toISOString() : undefined,
3093
+ summary: h.summary,
3094
+ })),
3095
+ total_count: handoffs.length,
3096
+ message: `Found ${handoffs.length} handoff(s)`,
3097
+ };
3098
+ }
3099
+ catch (error) {
3100
+ return {
3101
+ success: false,
3102
+ handoffs: [],
3103
+ total_count: 0,
3104
+ message: `Error listing handoffs: ${error.message}`,
3105
+ };
3106
+ }
3107
+ }
3108
+ // ============================================================
3109
+ // CONTEXT INJECTION TOOLS
3110
+ // ============================================================
3111
+ /**
3112
+ * Get context to inject at the start of a new conversation.
3113
+ * Combines handoffs, decisions, working memory, and file history.
3114
+ *
3115
+ * @param args - Context injection arguments
3116
+ * @returns Structured context for injection
3117
+ */
3118
+ async getStartupContext(args) {
3119
+ const { ContextInjector } = await import("../context/ContextInjector.js");
3120
+ const typedArgs = args;
3121
+ const { query, max_tokens = 2000, sources = ["history", "decisions", "memory", "handoffs"], project_path, } = typedArgs;
3122
+ const projectPath = this.resolveProjectPath(project_path);
3123
+ try {
3124
+ const injector = new ContextInjector(this.db.getDatabase());
3125
+ const context = await injector.getRelevantContext({
3126
+ query,
3127
+ projectPath,
3128
+ maxTokens: max_tokens,
3129
+ sources: sources,
3130
+ });
3131
+ return {
3132
+ success: true,
3133
+ context: {
3134
+ handoff: context.handoff ? {
3135
+ id: context.handoff.id,
3136
+ from_session_id: context.handoff.fromSessionId,
3137
+ project_path: context.handoff.projectPath,
3138
+ created_at: new Date(context.handoff.createdAt).toISOString(),
3139
+ summary: context.handoff.contextSummary,
3140
+ decisions: context.handoff.decisions.map(d => ({
3141
+ text: d.text,
3142
+ rationale: d.rationale,
3143
+ timestamp: new Date(d.timestamp).toISOString(),
3144
+ })),
3145
+ active_files: context.handoff.activeFiles.map(f => ({
3146
+ path: f.path,
3147
+ last_action: f.lastAction,
3148
+ })),
3149
+ pending_tasks: context.handoff.pendingTasks.map(t => ({
3150
+ description: t.description,
3151
+ status: t.status,
3152
+ })),
3153
+ memory_items: context.handoff.workingMemory.map(m => ({
3154
+ key: m.key,
3155
+ value: m.value,
3156
+ })),
3157
+ } : undefined,
3158
+ decisions: context.decisions.map(d => ({
3159
+ id: d.id,
3160
+ text: d.text,
3161
+ rationale: d.rationale,
3162
+ timestamp: new Date(d.timestamp).toISOString(),
3163
+ })),
3164
+ memory: context.memory.map(m => ({
3165
+ id: m.id,
3166
+ key: m.key,
3167
+ value: m.value,
3168
+ context: m.context,
3169
+ tags: m.tags,
3170
+ created_at: new Date(m.createdAt).toISOString(),
3171
+ updated_at: new Date(m.updatedAt).toISOString(),
3172
+ })),
3173
+ recent_files: context.recentFiles.map(f => ({
3174
+ path: f.path,
3175
+ last_action: f.lastAction,
3176
+ timestamp: new Date(f.timestamp).toISOString(),
3177
+ })),
3178
+ summary: context.summary,
3179
+ },
3180
+ token_estimate: context.tokenEstimate,
3181
+ message: `Retrieved context: ${context.summary}`,
3182
+ };
3183
+ }
3184
+ catch (error) {
3185
+ return {
3186
+ success: false,
3187
+ context: {
3188
+ decisions: [],
3189
+ memory: [],
3190
+ recent_files: [],
3191
+ summary: "",
3192
+ },
3193
+ token_estimate: 0,
3194
+ message: `Error getting startup context: ${error.message}`,
3195
+ };
3196
+ }
3197
+ }
3198
+ /**
3199
+ * Inject relevant context based on the first message in a new conversation.
3200
+ * Returns formatted markdown context for direct use.
3201
+ *
3202
+ * @param args - Injection arguments
3203
+ * @returns Formatted context string
3204
+ */
3205
+ async injectRelevantContext(args) {
3206
+ const { ContextInjector } = await import("../context/ContextInjector.js");
3207
+ const typedArgs = args;
3208
+ const { message, max_tokens = 2000, sources = ["history", "decisions", "memory", "handoffs"], project_path, } = typedArgs;
3209
+ const projectPath = this.resolveProjectPath(project_path);
3210
+ if (!message) {
3211
+ return {
3212
+ success: false,
3213
+ injected_context: "",
3214
+ sources_used: [],
3215
+ token_count: 0,
3216
+ message: "message is required",
3217
+ };
3218
+ }
3219
+ try {
3220
+ const injector = new ContextInjector(this.db.getDatabase());
3221
+ const context = await injector.getRelevantContext({
3222
+ query: message,
3223
+ projectPath,
3224
+ maxTokens: max_tokens,
3225
+ sources: sources,
3226
+ });
3227
+ // Format for injection
3228
+ const formattedContext = injector.formatForInjection(context);
3229
+ // Track which sources were used
3230
+ const sourcesUsed = [];
3231
+ if (context.handoff) {
3232
+ sourcesUsed.push("handoffs");
3233
+ }
3234
+ if (context.decisions.length > 0) {
3235
+ sourcesUsed.push("decisions");
3236
+ }
3237
+ if (context.memory.length > 0) {
3238
+ sourcesUsed.push("memory");
3239
+ }
3240
+ if (context.recentFiles.length > 0) {
3241
+ sourcesUsed.push("history");
3242
+ }
3243
+ return {
3244
+ success: true,
3245
+ injected_context: formattedContext,
3246
+ sources_used: sourcesUsed,
3247
+ token_count: context.tokenEstimate,
3248
+ message: `Injected context from ${sourcesUsed.length} source(s)`,
3249
+ };
3250
+ }
3251
+ catch (error) {
3252
+ return {
3253
+ success: false,
3254
+ injected_context: "",
3255
+ sources_used: [],
3256
+ token_count: 0,
3257
+ message: `Error injecting context: ${error.message}`,
3258
+ };
3259
+ }
3260
+ }
3261
+ }
3262
+ //# sourceMappingURL=ToolHandlers.js.map