gnosys 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +1387 -0
  3. package/dist/cli.d.ts +7 -0
  4. package/dist/cli.d.ts.map +1 -0
  5. package/dist/cli.js +3753 -0
  6. package/dist/cli.js.map +1 -0
  7. package/dist/index.d.ts +8 -0
  8. package/dist/index.d.ts.map +1 -0
  9. package/dist/index.js +2267 -0
  10. package/dist/index.js.map +1 -0
  11. package/dist/lib/archive.d.ts +95 -0
  12. package/dist/lib/archive.d.ts.map +1 -0
  13. package/dist/lib/archive.js +311 -0
  14. package/dist/lib/archive.js.map +1 -0
  15. package/dist/lib/ask.d.ts +77 -0
  16. package/dist/lib/ask.d.ts.map +1 -0
  17. package/dist/lib/ask.js +316 -0
  18. package/dist/lib/ask.js.map +1 -0
  19. package/dist/lib/audit.d.ts +47 -0
  20. package/dist/lib/audit.d.ts.map +1 -0
  21. package/dist/lib/audit.js +136 -0
  22. package/dist/lib/audit.js.map +1 -0
  23. package/dist/lib/bootstrap.d.ts +56 -0
  24. package/dist/lib/bootstrap.d.ts.map +1 -0
  25. package/dist/lib/bootstrap.js +163 -0
  26. package/dist/lib/bootstrap.js.map +1 -0
  27. package/dist/lib/config.d.ts +239 -0
  28. package/dist/lib/config.d.ts.map +1 -0
  29. package/dist/lib/config.js +371 -0
  30. package/dist/lib/config.js.map +1 -0
  31. package/dist/lib/dashboard.d.ts +81 -0
  32. package/dist/lib/dashboard.d.ts.map +1 -0
  33. package/dist/lib/dashboard.js +314 -0
  34. package/dist/lib/dashboard.js.map +1 -0
  35. package/dist/lib/db.d.ts +182 -0
  36. package/dist/lib/db.d.ts.map +1 -0
  37. package/dist/lib/db.js +620 -0
  38. package/dist/lib/db.js.map +1 -0
  39. package/dist/lib/dbSearch.d.ts +65 -0
  40. package/dist/lib/dbSearch.d.ts.map +1 -0
  41. package/dist/lib/dbSearch.js +239 -0
  42. package/dist/lib/dbSearch.js.map +1 -0
  43. package/dist/lib/dbWrite.d.ts +56 -0
  44. package/dist/lib/dbWrite.d.ts.map +1 -0
  45. package/dist/lib/dbWrite.js +171 -0
  46. package/dist/lib/dbWrite.js.map +1 -0
  47. package/dist/lib/dream.d.ts +170 -0
  48. package/dist/lib/dream.d.ts.map +1 -0
  49. package/dist/lib/dream.js +706 -0
  50. package/dist/lib/dream.js.map +1 -0
  51. package/dist/lib/embeddings.d.ts +84 -0
  52. package/dist/lib/embeddings.d.ts.map +1 -0
  53. package/dist/lib/embeddings.js +226 -0
  54. package/dist/lib/embeddings.js.map +1 -0
  55. package/dist/lib/export.d.ts +92 -0
  56. package/dist/lib/export.d.ts.map +1 -0
  57. package/dist/lib/export.js +362 -0
  58. package/dist/lib/export.js.map +1 -0
  59. package/dist/lib/federated.d.ts +113 -0
  60. package/dist/lib/federated.d.ts.map +1 -0
  61. package/dist/lib/federated.js +346 -0
  62. package/dist/lib/federated.js.map +1 -0
  63. package/dist/lib/graph.d.ts +50 -0
  64. package/dist/lib/graph.d.ts.map +1 -0
  65. package/dist/lib/graph.js +118 -0
  66. package/dist/lib/graph.js.map +1 -0
  67. package/dist/lib/history.d.ts +39 -0
  68. package/dist/lib/history.d.ts.map +1 -0
  69. package/dist/lib/history.js +112 -0
  70. package/dist/lib/history.js.map +1 -0
  71. package/dist/lib/hybridSearch.d.ts +80 -0
  72. package/dist/lib/hybridSearch.d.ts.map +1 -0
  73. package/dist/lib/hybridSearch.js +296 -0
  74. package/dist/lib/hybridSearch.js.map +1 -0
  75. package/dist/lib/import.d.ts +52 -0
  76. package/dist/lib/import.d.ts.map +1 -0
  77. package/dist/lib/import.js +365 -0
  78. package/dist/lib/import.js.map +1 -0
  79. package/dist/lib/ingest.d.ts +51 -0
  80. package/dist/lib/ingest.d.ts.map +1 -0
  81. package/dist/lib/ingest.js +144 -0
  82. package/dist/lib/ingest.js.map +1 -0
  83. package/dist/lib/lensing.d.ts +35 -0
  84. package/dist/lib/lensing.d.ts.map +1 -0
  85. package/dist/lib/lensing.js +85 -0
  86. package/dist/lib/lensing.js.map +1 -0
  87. package/dist/lib/llm.d.ts +84 -0
  88. package/dist/lib/llm.d.ts.map +1 -0
  89. package/dist/lib/llm.js +386 -0
  90. package/dist/lib/llm.js.map +1 -0
  91. package/dist/lib/lock.d.ts +28 -0
  92. package/dist/lib/lock.d.ts.map +1 -0
  93. package/dist/lib/lock.js +145 -0
  94. package/dist/lib/lock.js.map +1 -0
  95. package/dist/lib/maintenance.d.ts +124 -0
  96. package/dist/lib/maintenance.d.ts.map +1 -0
  97. package/dist/lib/maintenance.js +587 -0
  98. package/dist/lib/maintenance.js.map +1 -0
  99. package/dist/lib/migrate.d.ts +19 -0
  100. package/dist/lib/migrate.d.ts.map +1 -0
  101. package/dist/lib/migrate.js +260 -0
  102. package/dist/lib/migrate.js.map +1 -0
  103. package/dist/lib/preferences.d.ts +49 -0
  104. package/dist/lib/preferences.d.ts.map +1 -0
  105. package/dist/lib/preferences.js +149 -0
  106. package/dist/lib/preferences.js.map +1 -0
  107. package/dist/lib/projectIdentity.d.ts +66 -0
  108. package/dist/lib/projectIdentity.d.ts.map +1 -0
  109. package/dist/lib/projectIdentity.js +148 -0
  110. package/dist/lib/projectIdentity.js.map +1 -0
  111. package/dist/lib/recall.d.ts +82 -0
  112. package/dist/lib/recall.d.ts.map +1 -0
  113. package/dist/lib/recall.js +289 -0
  114. package/dist/lib/recall.js.map +1 -0
  115. package/dist/lib/resolver.d.ts +116 -0
  116. package/dist/lib/resolver.d.ts.map +1 -0
  117. package/dist/lib/resolver.js +372 -0
  118. package/dist/lib/resolver.js.map +1 -0
  119. package/dist/lib/retry.d.ts +24 -0
  120. package/dist/lib/retry.d.ts.map +1 -0
  121. package/dist/lib/retry.js +60 -0
  122. package/dist/lib/retry.js.map +1 -0
  123. package/dist/lib/rulesGen.d.ts +51 -0
  124. package/dist/lib/rulesGen.d.ts.map +1 -0
  125. package/dist/lib/rulesGen.js +167 -0
  126. package/dist/lib/rulesGen.js.map +1 -0
  127. package/dist/lib/search.d.ts +51 -0
  128. package/dist/lib/search.d.ts.map +1 -0
  129. package/dist/lib/search.js +190 -0
  130. package/dist/lib/search.js.map +1 -0
  131. package/dist/lib/staticSearch.d.ts +70 -0
  132. package/dist/lib/staticSearch.d.ts.map +1 -0
  133. package/dist/lib/staticSearch.js +162 -0
  134. package/dist/lib/staticSearch.js.map +1 -0
  135. package/dist/lib/store.d.ts +79 -0
  136. package/dist/lib/store.d.ts.map +1 -0
  137. package/dist/lib/store.js +227 -0
  138. package/dist/lib/store.js.map +1 -0
  139. package/dist/lib/structuredIngest.d.ts +37 -0
  140. package/dist/lib/structuredIngest.d.ts.map +1 -0
  141. package/dist/lib/structuredIngest.js +208 -0
  142. package/dist/lib/structuredIngest.js.map +1 -0
  143. package/dist/lib/tags.d.ts +26 -0
  144. package/dist/lib/tags.d.ts.map +1 -0
  145. package/dist/lib/tags.js +109 -0
  146. package/dist/lib/tags.js.map +1 -0
  147. package/dist/lib/timeline.d.ts +34 -0
  148. package/dist/lib/timeline.d.ts.map +1 -0
  149. package/dist/lib/timeline.js +116 -0
  150. package/dist/lib/timeline.js.map +1 -0
  151. package/dist/lib/trace.d.ts +42 -0
  152. package/dist/lib/trace.d.ts.map +1 -0
  153. package/dist/lib/trace.js +338 -0
  154. package/dist/lib/trace.js.map +1 -0
  155. package/dist/lib/webIndex.d.ts +28 -0
  156. package/dist/lib/webIndex.d.ts.map +1 -0
  157. package/dist/lib/webIndex.js +208 -0
  158. package/dist/lib/webIndex.js.map +1 -0
  159. package/dist/lib/webIngest.d.ts +51 -0
  160. package/dist/lib/webIngest.d.ts.map +1 -0
  161. package/dist/lib/webIngest.js +533 -0
  162. package/dist/lib/webIngest.js.map +1 -0
  163. package/dist/lib/wikilinks.d.ts +63 -0
  164. package/dist/lib/wikilinks.d.ts.map +1 -0
  165. package/dist/lib/wikilinks.js +146 -0
  166. package/dist/lib/wikilinks.js.map +1 -0
  167. package/dist/sandbox/client.d.ts +82 -0
  168. package/dist/sandbox/client.d.ts.map +1 -0
  169. package/dist/sandbox/client.js +128 -0
  170. package/dist/sandbox/client.js.map +1 -0
  171. package/dist/sandbox/helper-template.d.ts +14 -0
  172. package/dist/sandbox/helper-template.d.ts.map +1 -0
  173. package/dist/sandbox/helper-template.js +285 -0
  174. package/dist/sandbox/helper-template.js.map +1 -0
  175. package/dist/sandbox/index.d.ts +10 -0
  176. package/dist/sandbox/index.d.ts.map +1 -0
  177. package/dist/sandbox/index.js +10 -0
  178. package/dist/sandbox/index.js.map +1 -0
  179. package/dist/sandbox/manager.d.ts +40 -0
  180. package/dist/sandbox/manager.d.ts.map +1 -0
  181. package/dist/sandbox/manager.js +220 -0
  182. package/dist/sandbox/manager.js.map +1 -0
  183. package/dist/sandbox/server.d.ts +44 -0
  184. package/dist/sandbox/server.d.ts.map +1 -0
  185. package/dist/sandbox/server.js +661 -0
  186. package/dist/sandbox/server.js.map +1 -0
  187. package/package.json +103 -0
  188. package/prompts/synthesize.md +21 -0
package/dist/index.js ADDED
@@ -0,0 +1,2267 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * Gnosys MCP Server — The core of Gnosys.
4
+ * Exposes memory operations as MCP tools that any agent can call.
5
+ * Supports layered stores: project (auto-discovered), personal, global, optional.
6
+ */
7
+ // Load API keys from ~/.config/gnosys/.env before anything else.
8
+ // IMPORTANT: We use dotenv.parse() instead of dotenv.config() because
9
+ // dotenv v17+ writes injection notices to stdout, which corrupts the
10
+ // MCP stdio JSON protocol. parse() is a pure function with no side effects.
11
+ import dotenv from "dotenv";
12
+ import path from "path";
13
+ import { readFileSync } from "fs";
14
+ const home = process.env.HOME || process.env.USERPROFILE || "/tmp";
15
+ try {
16
+ const envFile = readFileSync(path.join(home, ".config", "gnosys", ".env"), "utf8");
17
+ const parsed = dotenv.parse(envFile);
18
+ for (const [key, val] of Object.entries(parsed)) {
19
+ if (!(key in process.env))
20
+ process.env[key] = val;
21
+ }
22
+ }
23
+ catch {
24
+ // .env file not found — that's fine, env vars may be set elsewhere
25
+ }
26
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
27
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
28
+ import { z } from "zod";
29
+ import fs from "fs/promises";
30
+ import { GnosysSearch } from "./lib/search.js";
31
+ import { GnosysTagRegistry } from "./lib/tags.js";
32
+ import { performImport, formatImportSummary, estimateDuration } from "./lib/import.js";
33
+ import { GnosysIngestion } from "./lib/ingest.js";
34
+ import { GnosysResolver } from "./lib/resolver.js";
35
+ import { applyLens } from "./lib/lensing.js";
36
+ import { getFileHistory, rollbackToCommit, hasGitHistory } from "./lib/history.js";
37
+ import { groupByPeriod, computeStats } from "./lib/timeline.js";
38
+ import { buildLinkGraph, getBacklinks, getOutgoingLinks, formatGraphSummary } from "./lib/wikilinks.js";
39
+ import { bootstrap } from "./lib/bootstrap.js";
40
+ import { loadConfig, DEFAULT_CONFIG } from "./lib/config.js";
41
+ import { GnosysEmbeddings } from "./lib/embeddings.js";
42
+ import { GnosysHybridSearch } from "./lib/hybridSearch.js";
43
+ import { GnosysAsk } from "./lib/ask.js";
44
+ import { getLLMProvider } from "./lib/llm.js";
45
+ import { GnosysMaintenanceEngine, formatMaintenanceReport } from "./lib/maintenance.js";
46
+ import { recall, formatRecall } from "./lib/recall.js";
47
+ import { initAudit, readAuditLog, formatAuditTimeline } from "./lib/audit.js";
48
+ import { GnosysDB } from "./lib/db.js";
49
+ import { syncMemoryToDb, syncUpdateToDb, syncDearchiveToDb, syncReinforcementToDb, auditToDb } from "./lib/dbWrite.js";
50
+ import { GnosysDreamEngine, DreamScheduler, formatDreamReport } from "./lib/dream.js";
51
+ import { GnosysExporter, formatExportReport } from "./lib/export.js";
52
+ import { createProjectIdentity, readProjectIdentity } from "./lib/projectIdentity.js";
53
+ import { setPreference, getPreference, getAllPreferences, deletePreference } from "./lib/preferences.js";
54
+ import { syncRules } from "./lib/rulesGen.js";
55
+ import { federatedSearch, detectAmbiguity, generateBriefing, generateAllBriefings, getWorkingSet, formatWorkingSet, detectCurrentProject } from "./lib/federated.js";
56
+ // Initialize resolver (discovers all layered stores)
57
+ const resolver = new GnosysResolver();
58
+ let config = DEFAULT_CONFIG;
59
+ // Create MCP server
60
+ const server = new McpServer({
61
+ name: "gnosys",
62
+ version: "2.0.0",
63
+ });
64
+ // These are initialized in main() after resolver runs
65
+ let search = null;
66
+ let tagRegistry = null;
67
+ let ingestion = null;
68
+ let hybridSearch = null;
69
+ let askEngine = null;
70
+ /** v2.0: Unified SQLite store (available after migration) */
71
+ let gnosysDb = null;
72
+ /** v3.0: Central DB at ~/.gnosys/gnosys.db */
73
+ let centralDb = null;
74
+ /** v2.0: Dream scheduler (idle-time consolidation) */
75
+ let dreamScheduler = null;
76
+ // ─── Multi-Project Support ───────────────────────────────────────────────
77
+ // Each tool call can optionally pass a `projectRoot` to target a specific
78
+ // project's .gnosys store. This is STATELESS — no race conditions when
79
+ // multiple agents call tools in parallel.
80
+ /** Common Zod schema fragment for projectRoot parameter */
81
+ const projectRootParam = z.string().optional().describe("Optional project root path for multi-project support. When provided, this tool operates on projectRoot/.gnosys instead of the default store. Use gnosys_stores to see all available stores.");
82
+ async function resolveToolContext(projectRoot) {
83
+ if (!projectRoot) {
84
+ // Default context — use module-level state
85
+ const writeTarget = resolver.getWriteTarget();
86
+ // v3.0: Try to read project identity from the write target's parent dir
87
+ let projectId = null;
88
+ if (writeTarget) {
89
+ const parentDir = path.dirname(writeTarget.store.getStorePath());
90
+ const identity = await readProjectIdentity(parentDir);
91
+ projectId = identity?.projectId || null;
92
+ }
93
+ return {
94
+ resolver,
95
+ store: writeTarget?.store || null,
96
+ storePath: writeTarget?.store.getStorePath() || "",
97
+ config,
98
+ search,
99
+ gnosysDb,
100
+ centralDb,
101
+ projectId,
102
+ };
103
+ }
104
+ // Scoped context — resolve for this specific project
105
+ const scopedResolver = await GnosysResolver.resolveForProject(projectRoot);
106
+ const scopedWriteTarget = scopedResolver.getWriteTarget();
107
+ const scopedStorePath = scopedWriteTarget?.store.getStorePath() || "";
108
+ let scopedConfig = DEFAULT_CONFIG;
109
+ let scopedDb = null;
110
+ let scopedSearch = null;
111
+ // v3.0: Read project identity
112
+ const identity = await readProjectIdentity(path.resolve(projectRoot));
113
+ const projectId = identity?.projectId || null;
114
+ if (scopedStorePath) {
115
+ try {
116
+ scopedConfig = await loadConfig(scopedStorePath);
117
+ }
118
+ catch {
119
+ // Use defaults
120
+ }
121
+ // Initialize search for the scoped store
122
+ scopedSearch = new GnosysSearch(scopedStorePath);
123
+ if (scopedWriteTarget) {
124
+ await scopedSearch.addStoreMemories(scopedWriteTarget.store);
125
+ }
126
+ // Initialize GnosysDB for the scoped store
127
+ try {
128
+ scopedDb = new GnosysDB(scopedStorePath);
129
+ if (!scopedDb.isAvailable() || !scopedDb.isMigrated()) {
130
+ scopedDb = null;
131
+ }
132
+ }
133
+ catch {
134
+ scopedDb = null;
135
+ }
136
+ }
137
+ return {
138
+ resolver: scopedResolver,
139
+ store: scopedWriteTarget?.store || null,
140
+ storePath: scopedStorePath,
141
+ config: scopedConfig,
142
+ search: scopedSearch,
143
+ gnosysDb: scopedDb,
144
+ centralDb,
145
+ projectId,
146
+ };
147
+ }
148
+ // ─── Tool: gnosys_discover ──────────────────────────────────────────────
149
+ server.tool("gnosys_discover", "Discover relevant memories by describing what you're working on. Searches relevance keyword clouds across all stores. Returns lightweight metadata (title, path, relevance keywords) — NO file contents. Use gnosys_read to load specific memories you need. Call this FIRST when starting a task to find what Gnosys knows.", {
150
+ query: z
151
+ .string()
152
+ .describe("Describe what you're working on or looking for. Use keywords, not sentences. Example: 'auth JWT session tokens' or 'deployment CI/CD pipeline'"),
153
+ limit: z.number().optional().describe("Max results (default 20)"),
154
+ projectRoot: projectRootParam,
155
+ }, async ({ query, limit, projectRoot }) => {
156
+ const ctx = await resolveToolContext(projectRoot);
157
+ // v2.0 DB-backed fast path
158
+ if (ctx.gnosysDb?.isAvailable() && ctx.gnosysDb?.isMigrated()) {
159
+ const results = ctx.gnosysDb.discoverFts(query, limit || 20);
160
+ if (results.length === 0) {
161
+ return {
162
+ content: [{ type: "text", text: `No memories found for "${query}". Try different keywords.` }],
163
+ };
164
+ }
165
+ const formatted = results
166
+ .map((r) => `**${r.title}**\n ID: ${r.id}${r.relevance ? `\n Relevance: ${r.relevance}` : ""}`)
167
+ .join("\n\n");
168
+ return {
169
+ content: [{ type: "text", text: `Found ${results.length} relevant memories for "${query}":\n\n${formatted}\n\nUse gnosys_read to load any of these.` }],
170
+ };
171
+ }
172
+ // v1.x legacy path
173
+ if (!ctx.search) {
174
+ return {
175
+ content: [{ type: "text", text: "Search index not initialized." }],
176
+ isError: true,
177
+ };
178
+ }
179
+ const results = ctx.search.discover(query, limit || 20);
180
+ if (results.length === 0) {
181
+ return {
182
+ content: [
183
+ {
184
+ type: "text",
185
+ text: `No memories found for "${query}". Try different keywords or use gnosys_search for full-text search.`,
186
+ },
187
+ ],
188
+ };
189
+ }
190
+ const formatted = results
191
+ .map((r) => `**${r.title}**\n Path: ${r.relative_path}${r.relevance ? `\n Relevance: ${r.relevance}` : ""}`)
192
+ .join("\n\n");
193
+ return {
194
+ content: [
195
+ {
196
+ type: "text",
197
+ text: `Found ${results.length} relevant memories for "${query}":\n\n${formatted}\n\nUse gnosys_read to load any of these.`,
198
+ },
199
+ ],
200
+ };
201
+ });
202
+ // ─── Tool: gnosys_read ───────────────────────────────────────────────────
203
+ server.tool("gnosys_read", "Read a specific memory. Accepts a memory ID (e.g., 'arch-012') or layer-prefixed path (e.g., 'project:decisions/why-not-rag.md'). Without a prefix, searches all stores in precedence order.", {
204
+ path: z.string().describe("Memory ID or path, optionally prefixed with store layer"),
205
+ projectRoot: projectRootParam,
206
+ }, async ({ path: memPath, projectRoot }) => {
207
+ const ctx = await resolveToolContext(projectRoot);
208
+ // v2.0 DB-backed fast path: try reading by memory ID from gnosys.db first
209
+ if (ctx.gnosysDb?.isAvailable() && ctx.gnosysDb?.isMigrated()) {
210
+ const dbMem = ctx.gnosysDb.getMemory(memPath);
211
+ if (dbMem) {
212
+ const tags = dbMem.tags || "[]";
213
+ const header = [
214
+ `---`,
215
+ `id: ${dbMem.id}`,
216
+ `title: '${dbMem.title}'`,
217
+ `category: ${dbMem.category}`,
218
+ `tags: ${tags}`,
219
+ `relevance: ${dbMem.relevance}`,
220
+ `author: ${dbMem.author}`,
221
+ `authority: ${dbMem.authority}`,
222
+ `confidence: ${dbMem.confidence}`,
223
+ `status: ${dbMem.status}`,
224
+ `tier: ${dbMem.tier}`,
225
+ `created: '${dbMem.created}'`,
226
+ `modified: '${dbMem.modified}'`,
227
+ `---`,
228
+ ].join("\n");
229
+ return {
230
+ content: [{ type: "text", text: `[Source: gnosys.db]\n\n${header}\n\n${dbMem.content}` }],
231
+ };
232
+ }
233
+ // Not found in db — fall through to legacy path
234
+ }
235
+ // v1.x legacy path
236
+ const memory = await ctx.resolver.readMemory(memPath);
237
+ if (!memory) {
238
+ return {
239
+ content: [{ type: "text", text: `Memory not found: ${memPath}` }],
240
+ isError: true,
241
+ };
242
+ }
243
+ const raw = await fs.readFile(memory.filePath, "utf-8");
244
+ return {
245
+ content: [
246
+ {
247
+ type: "text",
248
+ text: `[Source: ${memory.sourceLabel}]\n\n${raw}`,
249
+ },
250
+ ],
251
+ };
252
+ });
253
+ // ─── Tool: gnosys_search ─────────────────────────────────────────────────
254
+ server.tool("gnosys_search", "Search memories by keyword across all stores. Returns matching file paths with relevance snippets.", {
255
+ query: z.string().describe("Search query (keywords)"),
256
+ limit: z.number().optional().describe("Max results (default 20)"),
257
+ projectRoot: projectRootParam,
258
+ }, async ({ query, limit, projectRoot }) => {
259
+ const ctx = await resolveToolContext(projectRoot);
260
+ // v2.0 DB-backed fast path
261
+ if (ctx.gnosysDb?.isAvailable() && ctx.gnosysDb?.isMigrated()) {
262
+ const results = ctx.gnosysDb.searchFts(query, limit || 20);
263
+ if (results.length === 0) {
264
+ return {
265
+ content: [{ type: "text", text: `No results for "${query}". Try different keywords.` }],
266
+ };
267
+ }
268
+ const formatted = results
269
+ .map((r) => `**${r.title}** (${r.id})\n${r.snippet.replace(/>>>/g, "**").replace(/<<</g, "**")}`)
270
+ .join("\n\n");
271
+ return {
272
+ content: [{ type: "text", text: `Found ${results.length} results for "${query}":\n\n${formatted}` }],
273
+ };
274
+ }
275
+ // v1.x legacy path
276
+ if (!ctx.search) {
277
+ return {
278
+ content: [{ type: "text", text: "Search index not initialized." }],
279
+ isError: true,
280
+ };
281
+ }
282
+ const results = ctx.search.search(query, limit || 20);
283
+ if (results.length === 0) {
284
+ return {
285
+ content: [
286
+ {
287
+ type: "text",
288
+ text: `No results for "${query}". Try different keywords or use gnosys_discover.`,
289
+ },
290
+ ],
291
+ };
292
+ }
293
+ const formatted = results
294
+ .map((r) => `**${r.title}** (${r.relative_path})\n${r.snippet.replace(/>>>/g, "**").replace(/<<</g, "**")}`)
295
+ .join("\n\n");
296
+ return {
297
+ content: [
298
+ {
299
+ type: "text",
300
+ text: `Found ${results.length} results for "${query}":\n\n${formatted}`,
301
+ },
302
+ ],
303
+ };
304
+ });
305
+ // ─── Tool: gnosys_list ───────────────────────────────────────────────────
306
+ server.tool("gnosys_list", "List memories across all stores, optionally filtered by category, tag, or store layer.", {
307
+ category: z.string().optional().describe("Filter by category"),
308
+ tag: z.string().optional().describe("Filter by tag"),
309
+ store: z.string().optional().describe("Filter by store layer (project/personal/global/optional)"),
310
+ status: z.string().optional().describe("Filter by status (active/archived/superseded)"),
311
+ projectRoot: projectRootParam,
312
+ }, async ({ category, tag, store: storeFilter, status, projectRoot }) => {
313
+ const ctx = await resolveToolContext(projectRoot);
314
+ let memories = await ctx.resolver.getAllMemories();
315
+ if (storeFilter) {
316
+ memories = memories.filter((m) => m.sourceLayer === storeFilter || m.sourceLabel === storeFilter);
317
+ }
318
+ if (category) {
319
+ memories = memories.filter((m) => m.frontmatter.category === category);
320
+ }
321
+ if (tag) {
322
+ memories = memories.filter((m) => {
323
+ const tags = Array.isArray(m.frontmatter.tags)
324
+ ? m.frontmatter.tags
325
+ : Object.values(m.frontmatter.tags).flat();
326
+ return tags.includes(tag);
327
+ });
328
+ }
329
+ if (status) {
330
+ memories = memories.filter((m) => m.frontmatter.status === status);
331
+ }
332
+ const lines = memories.map((m) => `- [${m.sourceLabel}] **${m.frontmatter.title}** (${m.relativePath}) [${m.frontmatter.status}]`);
333
+ return {
334
+ content: [
335
+ {
336
+ type: "text",
337
+ text: lines.length > 0
338
+ ? `${lines.length} memories:\n\n${lines.join("\n")}`
339
+ : "No memories match the filter.",
340
+ },
341
+ ],
342
+ };
343
+ });
344
+ // ─── Tool: gnosys_add ────────────────────────────────────────────────────
345
+ server.tool("gnosys_add", "Add a new memory. Accepts raw text — an LLM structures it into an atomic memory. Writes to the project store by default. Use store='personal' for cross-project knowledge, or store='global' to explicitly write to shared org knowledge.", {
346
+ input: z
347
+ .string()
348
+ .describe("Raw text input. Can be a decision, concept, fact, observation, or any knowledge."),
349
+ store: z
350
+ .enum(["project", "personal", "global"])
351
+ .optional()
352
+ .describe("Which store to write to (default: project). Global requires explicit intent."),
353
+ author: z
354
+ .enum(["human", "ai", "human+ai"])
355
+ .optional()
356
+ .describe("Who is adding this memory"),
357
+ authority: z
358
+ .enum(["declared", "observed", "imported", "inferred"])
359
+ .optional()
360
+ .describe("Epistemic trust level"),
361
+ projectRoot: projectRootParam,
362
+ }, async ({ input, store: targetStore, author, authority, projectRoot }) => {
363
+ const ctx = await resolveToolContext(projectRoot);
364
+ const writeTarget = ctx.resolver.getWriteTarget(targetStore || undefined);
365
+ if (!writeTarget) {
366
+ return {
367
+ content: [
368
+ {
369
+ type: "text",
370
+ text: "No writable store found. Create a .gnosys/ directory in your project root or set GNOSYS_PERSONAL.",
371
+ },
372
+ ],
373
+ isError: true,
374
+ };
375
+ }
376
+ // Note: ingestion remains module-level since it's heavy and project-agnostic
377
+ if (!ingestion) {
378
+ return {
379
+ content: [
380
+ { type: "text", text: "Ingestion module not initialized." },
381
+ ],
382
+ isError: true,
383
+ };
384
+ }
385
+ try {
386
+ const result = await ingestion.ingest(input);
387
+ const id = await writeTarget.store.generateId(result.category);
388
+ const today = new Date().toISOString().split("T")[0];
389
+ const frontmatter = {
390
+ id,
391
+ title: result.title,
392
+ category: result.category,
393
+ tags: result.tags,
394
+ relevance: result.relevance,
395
+ author: author || "ai",
396
+ authority: authority || "observed",
397
+ confidence: result.confidence,
398
+ created: today,
399
+ modified: today,
400
+ last_reviewed: today,
401
+ status: "active",
402
+ supersedes: null,
403
+ };
404
+ const filename = `${result.filename}.md`;
405
+ const content = `# ${result.title}\n\n${result.content}`;
406
+ const relativePath = await writeTarget.store.writeMemory(result.category, filename, frontmatter, content);
407
+ // v2.0: Dual-write to gnosys.db
408
+ if (ctx.gnosysDb?.isAvailable()) {
409
+ syncMemoryToDb(ctx.gnosysDb, frontmatter, content, relativePath);
410
+ auditToDb(ctx.gnosysDb, "write", id, { tool: "gnosys_add", category: result.category });
411
+ }
412
+ // Rebuild search index across all stores
413
+ if (ctx.search) {
414
+ await reindexAllStores();
415
+ }
416
+ let response = `Memory added to [${writeTarget.label}]: **${result.title}**\nPath: ${writeTarget.label}:${relativePath}\nCategory: ${result.category}\nConfidence: ${result.confidence}`;
417
+ if (result.proposedNewTags && result.proposedNewTags.length > 0) {
418
+ const proposed = result.proposedNewTags
419
+ .map((t) => `${t.category}:${t.tag}`)
420
+ .join(", ");
421
+ response += `\n\nProposed new tags (not yet in registry): ${proposed}\nUse gnosys_tags_add to approve them.`;
422
+ }
423
+ // Contradiction / overlap detection: search for closely related memories
424
+ if (ctx.search && result.relevance) {
425
+ const related = ctx.search.discover(result.relevance.split(" ").slice(0, 5).join(" "), 5);
426
+ // Filter out the memory we just added
427
+ const overlaps = related.filter((r) => !r.relative_path.endsWith(filename));
428
+ if (overlaps.length > 0) {
429
+ response += `\n\n⚠️ Potential overlaps detected — review these for contradictions:`;
430
+ for (const o of overlaps.slice(0, 3)) {
431
+ response += `\n - ${o.title} (${o.relative_path})`;
432
+ }
433
+ response += `\nUse gnosys_read to compare, then gnosys_update with supersedes/superseded_by if needed.`;
434
+ }
435
+ }
436
+ return { content: [{ type: "text", text: response }] };
437
+ }
438
+ catch (err) {
439
+ return {
440
+ content: [
441
+ {
442
+ type: "text",
443
+ text: `Error adding memory: ${err instanceof Error ? err.message : String(err)}`,
444
+ },
445
+ ],
446
+ isError: true,
447
+ };
448
+ }
449
+ });
450
+ // ─── Tool: gnosys_add_structured ─────────────────────────────────────────
451
+ server.tool("gnosys_add_structured", "Add a memory with structured input (no LLM needed). Writes to the project store by default. Use store='global' to explicitly write to shared org knowledge.", {
452
+ title: z.string().describe("Memory title"),
453
+ category: z.string().describe("Category directory name"),
454
+ tags: z
455
+ .record(z.string(), z.array(z.string()))
456
+ .describe("Tags object, e.g. { domain: ['auth'], type: ['decision'] }"),
457
+ relevance: z
458
+ .string()
459
+ .optional()
460
+ .describe("Keyword cloud for discovery search. Space-separated terms describing contexts where this memory is useful."),
461
+ content: z.string().describe("Memory content as markdown"),
462
+ store: z.enum(["project", "personal", "global"]).optional().describe("Target store (default: project). Global requires explicit intent."),
463
+ author: z.enum(["human", "ai", "human+ai"]).optional(),
464
+ authority: z
465
+ .enum(["declared", "observed", "imported", "inferred"])
466
+ .optional(),
467
+ confidence: z.number().min(0).max(1).optional(),
468
+ projectRoot: projectRootParam,
469
+ }, async ({ title, category, tags, relevance, content, store: targetStore, author, authority, confidence, projectRoot }) => {
470
+ const ctx = await resolveToolContext(projectRoot);
471
+ const writeTarget = ctx.resolver.getWriteTarget(targetStore || undefined);
472
+ if (!writeTarget) {
473
+ return {
474
+ content: [{ type: "text", text: "No writable store found." }],
475
+ isError: true,
476
+ };
477
+ }
478
+ const id = await writeTarget.store.generateId(category);
479
+ const slug = title
480
+ .toLowerCase()
481
+ .replace(/[^a-z0-9]+/g, "-")
482
+ .replace(/^-|-$/g, "")
483
+ .substring(0, 60);
484
+ const today = new Date().toISOString().split("T")[0];
485
+ const frontmatter = {
486
+ id,
487
+ title,
488
+ category,
489
+ tags: tags,
490
+ relevance: relevance || "",
491
+ author: author || "ai",
492
+ authority: authority || "observed",
493
+ confidence: confidence || 0.8,
494
+ created: today,
495
+ modified: today,
496
+ last_reviewed: today,
497
+ status: "active",
498
+ supersedes: null,
499
+ };
500
+ const fullContent = `# ${title}\n\n${content}`;
501
+ const relativePath = await writeTarget.store.writeMemory(category, `${slug}.md`, frontmatter, fullContent);
502
+ // v2.0: Dual-write to gnosys.db
503
+ if (ctx.gnosysDb?.isAvailable()) {
504
+ syncMemoryToDb(ctx.gnosysDb, frontmatter, fullContent, relativePath);
505
+ auditToDb(ctx.gnosysDb, "write", id, { tool: "gnosys_add_structured", category });
506
+ }
507
+ if (ctx.search)
508
+ await reindexAllStores();
509
+ return {
510
+ content: [
511
+ {
512
+ type: "text",
513
+ text: `Memory added to [${writeTarget.label}]: **${title}**\nPath: ${writeTarget.label}:${relativePath}`,
514
+ },
515
+ ],
516
+ };
517
+ });
518
+ // ─── Tool: gnosys_tags ───────────────────────────────────────────────────
519
+ server.tool("gnosys_tags", "List all tags in the registry, grouped by category.", { projectRoot: projectRootParam }, async ({ projectRoot }) => {
520
+ // Tag registry is module-level and shared across projects, projectRoot is for API consistency
521
+ if (!tagRegistry) {
522
+ return { content: [{ type: "text", text: "Tag registry not loaded." }], isError: true };
523
+ }
524
+ const registry = tagRegistry.getRegistry();
525
+ const lines = ["# Gnosys Tag Registry\n"];
526
+ for (const [category, tags] of Object.entries(registry)) {
527
+ lines.push(`## ${category}`);
528
+ lines.push(tags.sort().join(", "));
529
+ lines.push("");
530
+ }
531
+ return { content: [{ type: "text", text: lines.join("\n") }] };
532
+ });
533
+ // ─── Tool: gnosys_tags_add ───────────────────────────────────────────────
534
+ server.tool("gnosys_tags_add", "Add a new tag to the registry.", {
535
+ category: z.string().describe("Tag category (domain, type, concern, status_tag)"),
536
+ tag: z.string().describe("The new tag to add"),
537
+ projectRoot: projectRootParam,
538
+ }, async ({ category, tag, projectRoot }) => {
539
+ // Tag registry is module-level and shared across projects, projectRoot is for API consistency
540
+ if (!tagRegistry) {
541
+ return { content: [{ type: "text", text: "Tag registry not loaded." }], isError: true };
542
+ }
543
+ const added = await tagRegistry.addTag(category, tag);
544
+ if (added) {
545
+ return {
546
+ content: [{ type: "text", text: `Tag '${tag}' added to category '${category}'.` }],
547
+ };
548
+ }
549
+ return {
550
+ content: [{ type: "text", text: `Tag '${tag}' already exists in '${category}'.` }],
551
+ };
552
+ });
553
+ // ─── Tool: gnosys_reinforce ──────────────────────────────────────────────
554
+ server.tool("gnosys_reinforce", "Signal whether a memory was useful. 'useful' reinforces it (resets decay). 'not_relevant' means routing was wrong, not the memory (memory unchanged). 'outdated' flags for review.", {
555
+ memory_id: z.string().describe("The memory ID (from frontmatter)"),
556
+ signal: z
557
+ .enum(["useful", "not_relevant", "outdated"])
558
+ .describe("The reinforcement signal"),
559
+ context: z.string().optional().describe("Why this signal was given"),
560
+ projectRoot: projectRootParam,
561
+ }, async ({ memory_id, signal, context, projectRoot }) => {
562
+ const ctx = await resolveToolContext(projectRoot);
563
+ // Log to the first writable store's .config directory
564
+ const writeTarget = ctx.resolver.getWriteTarget();
565
+ if (writeTarget) {
566
+ const logPath = path.join(writeTarget.store.getStorePath(), ".config", "reinforcement.log");
567
+ const entry = JSON.stringify({
568
+ memory_id,
569
+ signal,
570
+ context,
571
+ timestamp: new Date().toISOString(),
572
+ });
573
+ await fs.appendFile(logPath, entry + "\n", "utf-8");
574
+ }
575
+ // If 'useful', find the memory across all stores and update if writable
576
+ if (signal === "useful") {
577
+ const allMemories = await ctx.resolver.getAllMemories();
578
+ const memory = allMemories.find((m) => m.frontmatter.id === memory_id);
579
+ if (memory) {
580
+ const sourceStore = ctx.resolver
581
+ .getStores()
582
+ .find((s) => s.label === memory.sourceLabel);
583
+ if (sourceStore?.writable) {
584
+ const count = (memory.frontmatter.reinforcement_count || 0) + 1;
585
+ await sourceStore.store.updateMemory(memory.relativePath, {
586
+ modified: new Date().toISOString().split("T")[0],
587
+ reinforcement_count: count,
588
+ last_reinforced: new Date().toISOString().split("T")[0],
589
+ });
590
+ // v2.0: Sync reinforcement to gnosys.db
591
+ if (ctx.gnosysDb?.isAvailable()) {
592
+ syncReinforcementToDb(ctx.gnosysDb, memory_id, count);
593
+ auditToDb(ctx.gnosysDb, "reinforce", memory_id, { signal, context });
594
+ }
595
+ }
596
+ }
597
+ }
598
+ const messages = {
599
+ useful: `Memory ${memory_id} reinforced. Decay clock reset.`,
600
+ not_relevant: `Routing feedback logged for ${memory_id}. Memory unchanged — consider reviewing its relevance keywords or tags.`,
601
+ outdated: `Memory ${memory_id} flagged for review as outdated.`,
602
+ };
603
+ return { content: [{ type: "text", text: messages[signal] }] };
604
+ });
605
+ // ─── Tool: gnosys_init ───────────────────────────────────────────────────
606
+ server.tool("gnosys_init", "Initialize Gnosys in a project directory. Creates .gnosys/ with project identity (gnosys.json), registers the project in the central DB (~/.gnosys/gnosys.db), and sets up tag registry + git. You MUST run this before any other Gnosys tool in a new project. Pass the full absolute path to the project root.", {
607
+ directory: z
608
+ .string()
609
+ .describe("Absolute path to the project directory to initialize. Required."),
610
+ projectName: z.string().optional().describe("Human-readable project name. Defaults to directory basename."),
611
+ projectRoot: projectRootParam,
612
+ }, async ({ directory, projectName, projectRoot }) => {
613
+ // Note: For gnosys_init, directory is the target, projectRoot is ignored since we're creating new
614
+ const targetDir = path.resolve(directory);
615
+ const storePath = path.join(targetDir, ".gnosys");
616
+ // Check if already exists — if so, re-sync identity instead of failing
617
+ let isResync = false;
618
+ try {
619
+ await fs.stat(storePath);
620
+ isResync = true;
621
+ }
622
+ catch {
623
+ // Good — doesn't exist yet
624
+ }
625
+ if (!isResync) {
626
+ // Create directory structure
627
+ await fs.mkdir(storePath, { recursive: true });
628
+ await fs.mkdir(path.join(storePath, ".config"), { recursive: true });
629
+ // Seed default tag registry
630
+ const defaultRegistry = {
631
+ domain: [
632
+ "architecture", "api", "auth", "database", "devops",
633
+ "frontend", "backend", "testing", "security", "performance",
634
+ ],
635
+ type: [
636
+ "decision", "concept", "convention", "requirement",
637
+ "observation", "fact", "question",
638
+ ],
639
+ concern: ["dx", "scalability", "maintainability", "reliability"],
640
+ status_tag: ["draft", "stable", "deprecated", "experimental"],
641
+ };
642
+ await fs.writeFile(path.join(storePath, ".config", "tags.json"), JSON.stringify(defaultRegistry, null, 2), "utf-8");
643
+ // Seed changelog
644
+ const changelog = `# Gnosys Changelog\n\n## ${new Date().toISOString().split("T")[0]}\n\n- Store initialized\n`;
645
+ await fs.writeFile(path.join(storePath, "CHANGELOG.md"), changelog, "utf-8");
646
+ // Init git
647
+ try {
648
+ const { execSync } = await import("child_process");
649
+ execSync("git init", { cwd: storePath, stdio: "pipe" });
650
+ execSync("git add -A", { cwd: storePath, stdio: "pipe" });
651
+ execSync('git commit -m "Initialize Gnosys store"', {
652
+ cwd: storePath,
653
+ stdio: "pipe",
654
+ });
655
+ }
656
+ catch {
657
+ // Git not available — that's fine
658
+ }
659
+ }
660
+ // v3.0: Create/update project identity and register in central DB
661
+ const identity = await createProjectIdentity(targetDir, {
662
+ projectName,
663
+ centralDb: centralDb || undefined,
664
+ });
665
+ // Register this project so the resolver finds it on future restarts
666
+ await resolver.registerProject(targetDir);
667
+ // Directly add the new store to the resolver (no re-resolve from cwd needed)
668
+ await resolver.addProjectStore(storePath);
669
+ // Initialize search, tags, and ingestion if this is the first store
670
+ const writeTarget = resolver.getWriteTarget();
671
+ if (writeTarget && !search) {
672
+ search = new GnosysSearch(writeTarget.store.getStorePath());
673
+ tagRegistry = new GnosysTagRegistry(writeTarget.store.getStorePath());
674
+ await tagRegistry.load();
675
+ ingestion = new GnosysIngestion(writeTarget.store, tagRegistry);
676
+ await reindexAllStores();
677
+ }
678
+ const action = isResync ? "re-synced" : "initialized";
679
+ return {
680
+ content: [
681
+ {
682
+ type: "text",
683
+ text: `Gnosys store ${action} at ${storePath}\n\nProject Identity:\n- ID: ${identity.projectId}\n- Name: ${identity.projectName}\n- Directory: ${identity.workingDirectory}\n- Agent rules target: ${identity.agentRulesTarget || "none detected"}\n- Central DB: ${centralDb?.isAvailable() ? "registered ✓" : "not available"}\n\n${isResync ? "Identity re-synced." : "Created:\n- gnosys.json (project identity)\n- .config/ (internal config)\n- tags.json (tag registry)\n- CHANGELOG.md\n- git repo"}\n\nThe store is ready. Use gnosys_discover to find existing memories or gnosys_add to create new ones.`,
684
+ },
685
+ ],
686
+ };
687
+ });
688
+ // ─── Tool: gnosys_update ─────────────────────────────────────────────────
689
+ server.tool("gnosys_update", "Update an existing memory's frontmatter and/or content. Specify the memory path and the fields to change.", {
690
+ path: z
691
+ .string()
692
+ .describe("Path to memory, optionally prefixed with store layer (e.g., 'project:decisions/auth.md')"),
693
+ title: z.string().optional().describe("New title"),
694
+ tags: z
695
+ .record(z.string(), z.array(z.string()))
696
+ .optional()
697
+ .describe("New tags object"),
698
+ status: z
699
+ .enum(["active", "archived", "superseded"])
700
+ .optional()
701
+ .describe("New status"),
702
+ confidence: z.number().min(0).max(1).optional().describe("New confidence"),
703
+ supersedes: z
704
+ .string()
705
+ .optional()
706
+ .describe("ID of memory this supersedes"),
707
+ relevance: z
708
+ .string()
709
+ .optional()
710
+ .describe("Updated relevance keyword cloud for discovery"),
711
+ superseded_by: z
712
+ .string()
713
+ .optional()
714
+ .describe("ID of memory that supersedes this one"),
715
+ content: z
716
+ .string()
717
+ .optional()
718
+ .describe("New markdown content (replaces existing body)"),
719
+ projectRoot: projectRootParam,
720
+ }, async ({ path: memPath, title, tags, status, confidence, relevance, supersedes, superseded_by, content: newContent, projectRoot, }) => {
721
+ const ctx = await resolveToolContext(projectRoot);
722
+ const memory = await ctx.resolver.readMemory(memPath);
723
+ if (!memory) {
724
+ return {
725
+ content: [{ type: "text", text: `Memory not found: ${memPath}` }],
726
+ isError: true,
727
+ };
728
+ }
729
+ // Find the source store and check if writable
730
+ const sourceStore = ctx.resolver
731
+ .getStores()
732
+ .find((s) => s.label === memory.sourceLabel);
733
+ if (!sourceStore?.writable) {
734
+ return {
735
+ content: [
736
+ {
737
+ type: "text",
738
+ text: `Cannot update: store [${memory.sourceLabel}] is read-only.`,
739
+ },
740
+ ],
741
+ isError: true,
742
+ };
743
+ }
744
+ // Build updates object — only include defined fields
745
+ const updates = {};
746
+ if (title !== undefined)
747
+ updates.title = title;
748
+ if (tags !== undefined)
749
+ updates.tags = tags;
750
+ if (status !== undefined)
751
+ updates.status = status;
752
+ if (confidence !== undefined)
753
+ updates.confidence = confidence;
754
+ if (relevance !== undefined)
755
+ updates.relevance = relevance;
756
+ if (supersedes !== undefined)
757
+ updates.supersedes = supersedes;
758
+ if (superseded_by !== undefined)
759
+ updates.superseded_by = superseded_by;
760
+ const fullContent = newContent ? `# ${title || memory.frontmatter.title}\n\n${newContent}` : undefined;
761
+ const updated = await sourceStore.store.updateMemory(memory.relativePath, updates, fullContent);
762
+ if (!updated) {
763
+ return {
764
+ content: [{ type: "text", text: `Failed to update: ${memPath}` }],
765
+ isError: true,
766
+ };
767
+ }
768
+ // Supersession cross-linking: if A supersedes B, mark B as superseded_by A
769
+ if (supersedes && updated.frontmatter.id) {
770
+ const allMemories = await ctx.resolver.getAllMemories();
771
+ const supersededMemory = allMemories.find((m) => m.frontmatter.id === supersedes);
772
+ if (supersededMemory) {
773
+ const supersededStore = ctx.resolver
774
+ .getStores()
775
+ .find((s) => s.label === supersededMemory.sourceLabel);
776
+ if (supersededStore?.writable) {
777
+ await supersededStore.store.updateMemory(supersededMemory.relativePath, {
778
+ superseded_by: updated.frontmatter.id,
779
+ status: "superseded",
780
+ });
781
+ }
782
+ }
783
+ }
784
+ // v2.0: Dual-write update to gnosys.db
785
+ if (ctx.gnosysDb?.isAvailable() && updated.frontmatter.id) {
786
+ syncUpdateToDb(ctx.gnosysDb, updated.frontmatter.id, updates, fullContent);
787
+ auditToDb(ctx.gnosysDb, "write", updated.frontmatter.id, { tool: "gnosys_update", changed: Object.keys(updates) });
788
+ // Cross-link supersession in db too
789
+ if (supersedes) {
790
+ syncUpdateToDb(ctx.gnosysDb, supersedes, { superseded_by: updated.frontmatter.id, status: "superseded" });
791
+ }
792
+ }
793
+ // Rebuild search index
794
+ if (ctx.search)
795
+ await reindexAllStores();
796
+ const changedFields = Object.keys(updates);
797
+ if (newContent)
798
+ changedFields.push("content");
799
+ return {
800
+ content: [
801
+ {
802
+ type: "text",
803
+ text: `Memory updated: **${updated.frontmatter.title}**\nPath: ${memory.sourceLabel}:${memory.relativePath}\nChanged: ${changedFields.join(", ")}`,
804
+ },
805
+ ],
806
+ };
807
+ });
808
+ // ─── Tool: gnosys_stale ─────────────────────────────────────────────────
809
+ server.tool("gnosys_stale", "Find memories that haven't been modified or reviewed within a given number of days. Useful for identifying knowledge that may be outdated.", {
810
+ days: z
811
+ .number()
812
+ .optional()
813
+ .describe("Number of days since last modification to consider stale (default: 90)"),
814
+ limit: z.number().optional().describe("Max results (default 20)"),
815
+ projectRoot: projectRootParam,
816
+ }, async ({ days, limit, projectRoot }) => {
817
+ const ctx = await resolveToolContext(projectRoot);
818
+ const threshold = days || 90;
819
+ const maxResults = limit || 20;
820
+ const cutoff = new Date();
821
+ cutoff.setDate(cutoff.getDate() - threshold);
822
+ const cutoffStr = cutoff.toISOString().split("T")[0];
823
+ const allMemories = await ctx.resolver.getAllMemories();
824
+ const stale = allMemories
825
+ .filter((m) => {
826
+ const lastTouched = m.frontmatter.last_reviewed || m.frontmatter.modified;
827
+ return lastTouched && lastTouched < cutoffStr;
828
+ })
829
+ .sort((a, b) => {
830
+ const aDate = a.frontmatter.last_reviewed || a.frontmatter.modified;
831
+ const bDate = b.frontmatter.last_reviewed || b.frontmatter.modified;
832
+ return (aDate || "").localeCompare(bDate || "");
833
+ })
834
+ .slice(0, maxResults);
835
+ if (stale.length === 0) {
836
+ return {
837
+ content: [
838
+ {
839
+ type: "text",
840
+ text: `No memories older than ${threshold} days found. Everything is fresh.`,
841
+ },
842
+ ],
843
+ };
844
+ }
845
+ const lines = stale.map((m) => `- **${m.frontmatter.title}** (${m.sourceLabel}:${m.relativePath})\n Last modified: ${m.frontmatter.modified}${m.frontmatter.last_reviewed ? `, Last reviewed: ${m.frontmatter.last_reviewed}` : ""}`);
846
+ return {
847
+ content: [
848
+ {
849
+ type: "text",
850
+ text: `Found ${stale.length} memories not touched in ${threshold}+ days:\n\n${lines.join("\n\n")}\n\nUse gnosys_read to review, then gnosys_update or gnosys_reinforce as needed.`,
851
+ },
852
+ ],
853
+ };
854
+ });
855
+ // ─── Tool: gnosys_commit_context ────────────────────────────────────────
856
+ server.tool("gnosys_commit_context", "Pre-compaction memory sweep. Call this before context is lost (e.g., before a long conversation compacts). Extracts important decisions, facts, and insights from the conversation and commits novel ones to memory. Checks existing memories to avoid duplicates — only adds what's genuinely new or augments what's changed.", {
857
+ context: z
858
+ .string()
859
+ .describe("Summary of the conversation or context to extract memories from. Include key decisions, facts, insights, and observations."),
860
+ dry_run: z
861
+ .boolean()
862
+ .optional()
863
+ .describe("If true, show what would be committed without actually writing. Default: false."),
864
+ projectRoot: projectRootParam,
865
+ }, async ({ context, dry_run, projectRoot }) => {
866
+ const ctx = await resolveToolContext(projectRoot);
867
+ // Note: ingestion is module-level since it's heavy
868
+ if (!ingestion || !ingestion.isLLMAvailable) {
869
+ return {
870
+ content: [
871
+ {
872
+ type: "text",
873
+ text: "Commit context requires an LLM. Configure a provider in gnosys.json or set ANTHROPIC_API_KEY.",
874
+ },
875
+ ],
876
+ isError: true,
877
+ };
878
+ }
879
+ const writeTarget = ctx.resolver.getWriteTarget();
880
+ if (!writeTarget) {
881
+ return {
882
+ content: [{ type: "text", text: "No writable store found." }],
883
+ isError: true,
884
+ };
885
+ }
886
+ // Step 1: Use LLM to extract candidate memories from the context
887
+ let extractProvider;
888
+ try {
889
+ extractProvider = getLLMProvider(ctx.config, "structuring");
890
+ }
891
+ catch (err) {
892
+ return {
893
+ content: [{ type: "text", text: `LLM not available: ${err instanceof Error ? err.message : String(err)}` }],
894
+ isError: true,
895
+ };
896
+ }
897
+ const extractText = await extractProvider.generate(`Extract atomic knowledge items from this context:\n\n${context}`, {
898
+ system: `You extract atomic knowledge items from conversations. Each item should be ONE decision, fact, insight, or observation — not compound.
899
+
900
+ Output a JSON array of objects, each with:
901
+ - summary: One-sentence description of the knowledge
902
+ - type: "decision" | "insight" | "fact" | "observation" | "requirement"
903
+ - search_terms: 3-5 keywords someone would search for to find if this already exists
904
+
905
+ Be selective. Only extract things worth remembering long-term. Skip small talk, debugging steps, and transient details. Focus on decisions made, architecture choices, requirements established, and insights gained.
906
+
907
+ Output ONLY the JSON array, no markdown fences.`,
908
+ maxTokens: 4000,
909
+ });
910
+ let candidates;
911
+ try {
912
+ const jsonMatch = extractText.match(/```json\s*([\s\S]*?)```/) ||
913
+ extractText.match(/```\s*([\s\S]*?)```/) || [null, extractText];
914
+ candidates = JSON.parse(jsonMatch[1] || extractText);
915
+ }
916
+ catch {
917
+ return {
918
+ content: [
919
+ {
920
+ type: "text",
921
+ text: `Failed to extract candidates from context. LLM output was not valid JSON.`,
922
+ },
923
+ ],
924
+ isError: true,
925
+ };
926
+ }
927
+ if (!Array.isArray(candidates) || candidates.length === 0) {
928
+ return {
929
+ content: [
930
+ {
931
+ type: "text",
932
+ text: "No extractable knowledge found in the provided context.",
933
+ },
934
+ ],
935
+ };
936
+ }
937
+ // Step 2: For each candidate, check if it's novel by searching existing memories
938
+ const results = [];
939
+ let added = 0;
940
+ let skipped = 0;
941
+ for (const candidate of candidates) {
942
+ const searchTerms = candidate.search_terms.join(" ");
943
+ // Check existing memories via discover
944
+ const existing = ctx.search
945
+ ? ctx.search.discover(searchTerms, 3)
946
+ : [];
947
+ const hasOverlap = existing.length > 0;
948
+ if (hasOverlap) {
949
+ const topMatch = existing[0];
950
+ results.push(`⏭ SKIP: "${candidate.summary}"\n Overlaps with: ${topMatch.title} (${topMatch.relative_path})`);
951
+ skipped++;
952
+ }
953
+ else {
954
+ if (dry_run) {
955
+ results.push(`➕ WOULD ADD: "${candidate.summary}" [${candidate.type}]`);
956
+ added++;
957
+ }
958
+ else {
959
+ // Actually add via ingestion
960
+ try {
961
+ const result = await ingestion.ingest(candidate.summary);
962
+ const id = await writeTarget.store.generateId(result.category);
963
+ const today = new Date().toISOString().split("T")[0];
964
+ const frontmatter = {
965
+ id,
966
+ title: result.title,
967
+ category: result.category,
968
+ tags: result.tags,
969
+ relevance: result.relevance,
970
+ author: "ai",
971
+ authority: "observed",
972
+ confidence: result.confidence,
973
+ created: today,
974
+ modified: today,
975
+ last_reviewed: today,
976
+ status: "active",
977
+ supersedes: null,
978
+ };
979
+ const filename = `${result.filename}.md`;
980
+ const content = `# ${result.title}\n\n${result.content}`;
981
+ const relPath = await writeTarget.store.writeMemory(result.category, filename, frontmatter, content);
982
+ results.push(`➕ ADDED: "${result.title}"\n Path: ${writeTarget.label}:${relPath}`);
983
+ added++;
984
+ }
985
+ catch (err) {
986
+ results.push(`❌ FAILED: "${candidate.summary}": ${err instanceof Error ? err.message : String(err)}`);
987
+ }
988
+ }
989
+ }
990
+ }
991
+ // Rebuild search index after all writes
992
+ if (!dry_run && ctx.search && added > 0) {
993
+ await reindexAllStores();
994
+ }
995
+ const header = dry_run
996
+ ? `DRY RUN — ${candidates.length} candidates extracted, ${added} would be added, ${skipped} duplicates skipped:`
997
+ : `Context committed — ${candidates.length} candidates extracted, ${added} added, ${skipped} duplicates skipped:`;
998
+ return {
999
+ content: [
1000
+ {
1001
+ type: "text",
1002
+ text: `${header}\n\n${results.join("\n\n")}`,
1003
+ },
1004
+ ],
1005
+ };
1006
+ });
1007
+ // ─── Tool: gnosys_history ────────────────────────────────────────────────
1008
+ server.tool("gnosys_history", "View version history for a memory. Shows what changed and when. Every memory write/update creates a git commit, so the full evolution is available.", {
1009
+ path: z.string().describe("Path to memory, optionally layer-prefixed"),
1010
+ limit: z.number().optional().describe("Max history entries (default 20)"),
1011
+ projectRoot: projectRootParam,
1012
+ }, async ({ path: memPath, limit, projectRoot }) => {
1013
+ const ctx = await resolveToolContext(projectRoot);
1014
+ const memory = await ctx.resolver.readMemory(memPath);
1015
+ if (!memory) {
1016
+ return { content: [{ type: "text", text: `Memory not found: ${memPath}` }], isError: true };
1017
+ }
1018
+ const sourceStore = ctx.resolver.getStores().find((s) => s.label === memory.sourceLabel);
1019
+ if (!sourceStore || !hasGitHistory(sourceStore.path)) {
1020
+ return { content: [{ type: "text", text: "No git history available for this store." }], isError: true };
1021
+ }
1022
+ const history = getFileHistory(sourceStore.path, memory.relativePath, limit || 20);
1023
+ if (history.length === 0) {
1024
+ return { content: [{ type: "text", text: "No history found for this memory." }] };
1025
+ }
1026
+ const lines = history.map((e) => `- \`${e.commitHash.substring(0, 7)}\` ${e.date} — ${e.message}`);
1027
+ return {
1028
+ content: [{
1029
+ type: "text",
1030
+ text: `History for **${memory.frontmatter.title}** (${history.length} entries):\n\n${lines.join("\n")}\n\nUse gnosys_rollback with a commit hash to revert to a prior version.`,
1031
+ }],
1032
+ };
1033
+ });
1034
+ // ─── Tool: gnosys_rollback ──────────────────────────────────────────────
1035
+ server.tool("gnosys_rollback", "Rollback a memory to its state at a specific commit. Non-destructive: creates a new commit with the reverted content. Use gnosys_history first to find the target commit hash.", {
1036
+ path: z.string().describe("Path to memory, optionally layer-prefixed"),
1037
+ commitHash: z.string().describe("Git commit hash to revert to (full or abbreviated)"),
1038
+ projectRoot: projectRootParam,
1039
+ }, async ({ path: memPath, commitHash, projectRoot }) => {
1040
+ const ctx = await resolveToolContext(projectRoot);
1041
+ const memory = await ctx.resolver.readMemory(memPath);
1042
+ if (!memory) {
1043
+ return { content: [{ type: "text", text: `Memory not found: ${memPath}` }], isError: true };
1044
+ }
1045
+ const sourceStore = ctx.resolver.getStores().find((s) => s.label === memory.sourceLabel);
1046
+ if (!sourceStore?.writable) {
1047
+ return { content: [{ type: "text", text: "Cannot rollback: store is read-only." }], isError: true };
1048
+ }
1049
+ const success = rollbackToCommit(sourceStore.path, memory.relativePath, commitHash);
1050
+ if (!success) {
1051
+ return { content: [{ type: "text", text: `Rollback failed. Verify the commit hash with gnosys_history.` }], isError: true };
1052
+ }
1053
+ // Reindex after rollback
1054
+ if (ctx.search)
1055
+ await reindexAllStores();
1056
+ // Read the reverted memory
1057
+ const reverted = await ctx.resolver.readMemory(memPath);
1058
+ return {
1059
+ content: [{
1060
+ type: "text",
1061
+ text: `Rolled back **${memory.frontmatter.title}** to commit ${commitHash.substring(0, 7)}.\n\nCurrent state: ${reverted?.frontmatter.title} [${reverted?.frontmatter.status}] (confidence: ${reverted?.frontmatter.confidence})`,
1062
+ }],
1063
+ };
1064
+ });
1065
+ // ─── Tool: gnosys_lens ──────────────────────────────────────────────────
1066
+ server.tool("gnosys_lens", "Filtered view of memories. Combine criteria to focus on specific subsets — e.g., 'active decisions about auth with confidence > 0.8'. Use AND (default) to require all criteria, or OR to match any.", {
1067
+ category: z.string().optional().describe("Filter by category"),
1068
+ tags: z.array(z.string()).optional().describe("Filter by tags"),
1069
+ tagMatchMode: z.enum(["any", "all"]).optional().describe("'any' = has any listed tag (default), 'all' = must have every listed tag"),
1070
+ status: z.array(z.enum(["active", "archived", "superseded"])).optional().describe("Filter by status"),
1071
+ author: z.array(z.enum(["human", "ai", "human+ai"])).optional().describe("Filter by author"),
1072
+ authority: z.array(z.enum(["declared", "observed", "imported", "inferred"])).optional().describe("Filter by authority"),
1073
+ minConfidence: z.number().min(0).max(1).optional().describe("Minimum confidence"),
1074
+ maxConfidence: z.number().min(0).max(1).optional().describe("Maximum confidence"),
1075
+ createdAfter: z.string().optional().describe("Created after ISO date"),
1076
+ createdBefore: z.string().optional().describe("Created before ISO date"),
1077
+ modifiedAfter: z.string().optional().describe("Modified after ISO date"),
1078
+ modifiedBefore: z.string().optional().describe("Modified before ISO date"),
1079
+ operator: z.enum(["AND", "OR"]).optional().describe("Compound operator when multiple filter groups are provided (default: AND)"),
1080
+ projectRoot: projectRootParam,
1081
+ }, async ({ category, tags, tagMatchMode, status, author, authority, minConfidence, maxConfidence, createdAfter, createdBefore, modifiedAfter, modifiedBefore, projectRoot }) => {
1082
+ const ctx = await resolveToolContext(projectRoot);
1083
+ const allMemories = await ctx.resolver.getAllMemories();
1084
+ const lens = {};
1085
+ if (category)
1086
+ lens.category = category;
1087
+ if (tags) {
1088
+ lens.tags = tags;
1089
+ lens.tagMatchMode = tagMatchMode || "any";
1090
+ }
1091
+ if (status)
1092
+ lens.status = status;
1093
+ if (author)
1094
+ lens.author = author;
1095
+ if (authority)
1096
+ lens.authority = authority;
1097
+ if (minConfidence !== undefined)
1098
+ lens.minConfidence = minConfidence;
1099
+ if (maxConfidence !== undefined)
1100
+ lens.maxConfidence = maxConfidence;
1101
+ if (createdAfter)
1102
+ lens.createdAfter = createdAfter;
1103
+ if (createdBefore)
1104
+ lens.createdBefore = createdBefore;
1105
+ if (modifiedAfter)
1106
+ lens.modifiedAfter = modifiedAfter;
1107
+ if (modifiedBefore)
1108
+ lens.modifiedBefore = modifiedBefore;
1109
+ const result = applyLens(allMemories, lens);
1110
+ if (result.length === 0) {
1111
+ return { content: [{ type: "text", text: "No memories match the lens filter." }] };
1112
+ }
1113
+ const lines = result.map((m) => `- **${m.frontmatter.title}** [${m.frontmatter.status}] (${m.frontmatter.confidence})\n ${m.sourceLabel ? m.sourceLabel + ":" : ""}${m.relativePath}`);
1114
+ return {
1115
+ content: [{ type: "text", text: `${result.length} memories match:\n\n${lines.join("\n\n")}` }],
1116
+ };
1117
+ });
1118
+ // ─── Tool: gnosys_timeline ───────────────────────────────────────────────
1119
+ server.tool("gnosys_timeline", "View memory creation and modification activity over time. Shows how knowledge evolves by grouping memories into time periods.", {
1120
+ period: z.enum(["day", "week", "month", "year"]).optional().describe("Grouping period (default: month)"),
1121
+ projectRoot: projectRootParam,
1122
+ }, async ({ period, projectRoot }) => {
1123
+ const ctx = await resolveToolContext(projectRoot);
1124
+ const allMemories = await ctx.resolver.getAllMemories();
1125
+ const entries = groupByPeriod(allMemories, period || "month");
1126
+ if (entries.length === 0) {
1127
+ return { content: [{ type: "text", text: "No memories found for timeline." }] };
1128
+ }
1129
+ const lines = entries.map((e) => `**${e.period}** — ${e.created} created, ${e.modified} modified\n ${e.titles.slice(0, 5).join(", ")}${e.titles.length > 5 ? ` (+${e.titles.length - 5} more)` : ""}`);
1130
+ return {
1131
+ content: [{ type: "text", text: `Knowledge Timeline (by ${period || "month"}):\n\n${lines.join("\n\n")}` }],
1132
+ };
1133
+ });
1134
+ // ─── Tool: gnosys_stats ─────────────────────────────────────────────────
1135
+ server.tool("gnosys_stats", "Summary statistics across all memories — totals by category, status, author, authority, average confidence, and date ranges.", { projectRoot: projectRootParam }, async ({ projectRoot }) => {
1136
+ const ctx = await resolveToolContext(projectRoot);
1137
+ const allMemories = await ctx.resolver.getAllMemories();
1138
+ const stats = computeStats(allMemories);
1139
+ if (stats.totalCount === 0) {
1140
+ return { content: [{ type: "text", text: "No memories found." }] };
1141
+ }
1142
+ const catLines = Object.entries(stats.byCategory).map(([k, v]) => ` ${k}: ${v}`).join("\n");
1143
+ const statusLines = Object.entries(stats.byStatus).map(([k, v]) => ` ${k}: ${v}`).join("\n");
1144
+ const authorLines = Object.entries(stats.byAuthor).map(([k, v]) => ` ${k}: ${v}`).join("\n");
1145
+ const authLines = Object.entries(stats.byAuthority).map(([k, v]) => ` ${k}: ${v}`).join("\n");
1146
+ const text = `Gnosys Memory Statistics
1147
+ Total: ${stats.totalCount} memories
1148
+
1149
+ By Category:
1150
+ ${catLines}
1151
+
1152
+ By Status:
1153
+ ${statusLines}
1154
+
1155
+ By Author:
1156
+ ${authorLines}
1157
+
1158
+ By Authority:
1159
+ ${authLines}
1160
+
1161
+ Average Confidence: ${stats.averageConfidence.toFixed(2)}
1162
+ Oldest: ${stats.oldestCreated || "—"}
1163
+ Newest: ${stats.newestCreated || "—"}
1164
+ Last Modified: ${stats.lastModified || "—"}`;
1165
+ return { content: [{ type: "text", text }] };
1166
+ });
1167
+ // ─── Tool: gnosys_links ─────────────────────────────────────────────────
1168
+ server.tool("gnosys_links", "Show wikilinks for a specific memory — outgoing [[links]] and backlinks from other memories. Obsidian-compatible [[Title]] and [[path|display]] syntax.", {
1169
+ path: z.string().describe("Path to memory, optionally layer-prefixed"),
1170
+ projectRoot: projectRootParam,
1171
+ }, async ({ path: memPath, projectRoot }) => {
1172
+ const ctx = await resolveToolContext(projectRoot);
1173
+ const memory = await ctx.resolver.readMemory(memPath);
1174
+ if (!memory) {
1175
+ return { content: [{ type: "text", text: `Memory not found: ${memPath}` }], isError: true };
1176
+ }
1177
+ const allMemories = await ctx.resolver.getAllMemories();
1178
+ const outgoing = getOutgoingLinks(allMemories, memory.relativePath);
1179
+ const backlinks = getBacklinks(allMemories, memory.relativePath);
1180
+ const parts = [`Links for **${memory.frontmatter.title}**:\n`];
1181
+ if (outgoing.length > 0) {
1182
+ parts.push(`Outgoing (${outgoing.length}):`);
1183
+ for (const link of outgoing) {
1184
+ const display = link.displayText ? ` (${link.displayText})` : "";
1185
+ parts.push(` → [[${link.target}]]${display}`);
1186
+ }
1187
+ }
1188
+ else {
1189
+ parts.push("No outgoing links.");
1190
+ }
1191
+ parts.push("");
1192
+ if (backlinks.length > 0) {
1193
+ parts.push(`Backlinks (${backlinks.length}):`);
1194
+ for (const link of backlinks) {
1195
+ parts.push(` ← ${link.sourceTitle} (${link.sourcePath})`);
1196
+ }
1197
+ }
1198
+ else {
1199
+ parts.push("No backlinks.");
1200
+ }
1201
+ return { content: [{ type: "text", text: parts.join("\n") }] };
1202
+ });
1203
+ // ─── Tool: gnosys_graph ─────────────────────────────────────────────────
1204
+ server.tool("gnosys_graph", "Show the full cross-reference graph across all memories. Reveals clusters, orphaned links, and the most-connected memories.", { projectRoot: projectRootParam }, async ({ projectRoot }) => {
1205
+ const ctx = await resolveToolContext(projectRoot);
1206
+ const allMemories = await ctx.resolver.getAllMemories();
1207
+ if (allMemories.length === 0) {
1208
+ return { content: [{ type: "text", text: "No memories found." }] };
1209
+ }
1210
+ const graph = buildLinkGraph(allMemories);
1211
+ return { content: [{ type: "text", text: formatGraphSummary(graph) }] };
1212
+ });
1213
+ // ─── Tool: gnosys_bootstrap ─────────────────────────────────────────────
1214
+ server.tool("gnosys_bootstrap", "Batch-import existing documents from a directory into the memory store. Scans for markdown files and creates memories. Use dry_run=true to preview.", {
1215
+ sourceDir: z.string().describe("Absolute path to directory containing documents to import"),
1216
+ patterns: z.array(z.string()).optional().describe("File glob patterns (default: ['**/*.md'])"),
1217
+ skipExisting: z.boolean().optional().describe("Skip files whose titles already exist (default: false)"),
1218
+ defaultCategory: z.string().optional().describe("Default category for imported files (default: imported)"),
1219
+ preserveFrontmatter: z.boolean().optional().describe("Preserve existing YAML frontmatter if present (default: false)"),
1220
+ dryRun: z.boolean().optional().describe("Preview what would be imported without writing (default: false)"),
1221
+ store: z.enum(["project", "personal", "global"]).optional().describe("Target store"),
1222
+ projectRoot: projectRootParam,
1223
+ }, async ({ sourceDir, patterns, skipExisting, defaultCategory, preserveFrontmatter, dryRun, store: targetStore, projectRoot }) => {
1224
+ const ctx = await resolveToolContext(projectRoot);
1225
+ const writeTarget = ctx.resolver.getWriteTarget(targetStore || undefined);
1226
+ if (!writeTarget) {
1227
+ return { content: [{ type: "text", text: "No writable store found." }], isError: true };
1228
+ }
1229
+ try {
1230
+ const result = await bootstrap(writeTarget.store, {
1231
+ sourceDir,
1232
+ patterns,
1233
+ skipExisting,
1234
+ defaultCategory,
1235
+ preserveFrontmatter,
1236
+ dryRun,
1237
+ });
1238
+ const mode = dryRun ? "DRY RUN" : "COMPLETE";
1239
+ const parts = [
1240
+ `Bootstrap ${mode}: ${result.totalScanned} scanned, ${result.imported.length} ${dryRun ? "would be" : ""} imported, ${result.skipped.length} skipped, ${result.failed.length} failed`,
1241
+ ];
1242
+ if (result.imported.length > 0) {
1243
+ parts.push(`\n${dryRun ? "Would import" : "Imported"}:`);
1244
+ for (const f of result.imported.slice(0, 20)) {
1245
+ parts.push(` + ${f}`);
1246
+ }
1247
+ if (result.imported.length > 20) {
1248
+ parts.push(` ... and ${result.imported.length - 20} more`);
1249
+ }
1250
+ }
1251
+ if (result.failed.length > 0) {
1252
+ parts.push("\nFailed:");
1253
+ for (const f of result.failed.slice(0, 10)) {
1254
+ parts.push(` ✗ ${f.path}: ${f.error}`);
1255
+ }
1256
+ }
1257
+ // Reindex after import
1258
+ if (!dryRun && result.imported.length > 0 && ctx.search) {
1259
+ await reindexAllStores();
1260
+ }
1261
+ return { content: [{ type: "text", text: parts.join("\n") }] };
1262
+ }
1263
+ catch (err) {
1264
+ return {
1265
+ content: [{ type: "text", text: `Bootstrap failed: ${err instanceof Error ? err.message : String(err)}` }],
1266
+ isError: true,
1267
+ };
1268
+ }
1269
+ });
1270
+ // ─── Tool: gnosys_import ─────────────────────────────────────────────────
1271
+ server.tool("gnosys_import", "Bulk import structured data (CSV, JSON, JSONL) into Gnosys memories. Map source fields to title/category/content/tags/relevance. Use mode='llm' for smart ingestion with keyword clouds, or 'structured' for fast direct mapping. For large datasets (>100 records with LLM), the CLI is recommended: gnosys import <file>", {
1272
+ format: z.enum(["csv", "json", "jsonl"]).describe("Data format"),
1273
+ data: z.string().describe("File path, URL, or inline data"),
1274
+ mapping: z
1275
+ .record(z.string(), z.string())
1276
+ .describe("Map source fields to Gnosys fields. Keys are source field names, values are: title, category, content, tags, relevance. Example: {\"name\":\"title\", \"group\":\"category\", \"description\":\"content\"}"),
1277
+ mode: z
1278
+ .enum(["llm", "structured"])
1279
+ .optional()
1280
+ .describe("Processing mode. 'llm' uses AI for keyword clouds and smart tagging (slower). 'structured' maps directly (fast). Default: structured"),
1281
+ dryRun: z.boolean().optional().describe("Preview without writing"),
1282
+ skipExisting: z.boolean().optional().describe("Skip records whose titles already exist"),
1283
+ limit: z.number().optional().describe("Max records to import"),
1284
+ offset: z.number().optional().describe("Skip first N records"),
1285
+ concurrency: z.number().optional().describe("Parallel LLM calls (default: 5)"),
1286
+ store: z
1287
+ .enum(["project", "personal", "global"])
1288
+ .optional()
1289
+ .describe("Target store (default: project)"),
1290
+ projectRoot: projectRootParam,
1291
+ }, async ({ format, data, mapping, mode, dryRun, skipExisting, limit, offset, concurrency, store: targetStore, projectRoot, }) => {
1292
+ const ctx = await resolveToolContext(projectRoot);
1293
+ const writeTarget = ctx.resolver.getWriteTarget(targetStore || undefined);
1294
+ if (!writeTarget) {
1295
+ return {
1296
+ content: [{ type: "text", text: "No writable store found." }],
1297
+ isError: true,
1298
+ };
1299
+ }
1300
+ if (!ingestion) {
1301
+ return {
1302
+ content: [{ type: "text", text: "Ingestion module not initialized." }],
1303
+ isError: true,
1304
+ };
1305
+ }
1306
+ const effectiveMode = mode || "structured";
1307
+ try {
1308
+ const result = await performImport(writeTarget.store, ingestion, {
1309
+ format: format,
1310
+ data,
1311
+ mapping: mapping,
1312
+ mode: effectiveMode,
1313
+ dryRun,
1314
+ skipExisting,
1315
+ limit,
1316
+ offset,
1317
+ concurrency,
1318
+ batchCommit: true,
1319
+ });
1320
+ // Reindex after import
1321
+ if (!dryRun && result.imported.length > 0 && search) {
1322
+ await reindexAllStores();
1323
+ }
1324
+ // v2.0: Sync imported memories to gnosys.db
1325
+ if (!dryRun && result.imported.length > 0 && gnosysDb?.isAvailable()) {
1326
+ try {
1327
+ const { migrate: migrateDb } = await import("./lib/migrate.js");
1328
+ await migrateDb(writeTarget.store.getStorePath());
1329
+ }
1330
+ catch {
1331
+ // Migration sync is best-effort
1332
+ }
1333
+ auditToDb(gnosysDb, "ingest", undefined, { format, count: result.imported.length, mode: effectiveMode });
1334
+ }
1335
+ let response = formatImportSummary(result);
1336
+ // Smart threshold guidance
1337
+ if (effectiveMode === "llm" &&
1338
+ result.totalProcessed > 100) {
1339
+ const estimate = estimateDuration(result.totalProcessed, "llm", concurrency || 5);
1340
+ response += `\n\n💡 Tip: For large LLM imports, the CLI offers progress tracking and resume:\n gnosys import ${data.length < 100 ? data : "<file>"} --format ${format} --mode llm --skip-existing`;
1341
+ }
1342
+ return { content: [{ type: "text", text: response }] };
1343
+ }
1344
+ catch (err) {
1345
+ return {
1346
+ content: [
1347
+ {
1348
+ type: "text",
1349
+ text: `Import failed: ${err instanceof Error ? err.message : String(err)}`,
1350
+ },
1351
+ ],
1352
+ isError: true,
1353
+ };
1354
+ }
1355
+ });
1356
+ // ─── Tool: gnosys_hybrid_search ──────────────────────────────────────────
1357
+ server.tool("gnosys_hybrid_search", "Search memories using hybrid keyword + semantic search with Reciprocal Rank Fusion. Combines FTS5 keyword matching with embedding-based semantic similarity for best results. Run gnosys_reindex first if embeddings don't exist yet.", {
1358
+ query: z.string().describe("Natural language search query"),
1359
+ limit: z.number().optional().describe("Max results (default 15)"),
1360
+ mode: z.enum(["keyword", "semantic", "hybrid"]).optional().describe("Search mode (default: hybrid)"),
1361
+ projectRoot: projectRootParam,
1362
+ }, async ({ query, limit, mode, projectRoot }) => {
1363
+ // Note: hybridSearch is module-level (heavy) and not scoped per project
1364
+ (projectRoot); // quiets unused warning if any
1365
+ if (!hybridSearch) {
1366
+ return {
1367
+ content: [{ type: "text", text: "Hybrid search not initialized. No stores found." }],
1368
+ isError: true,
1369
+ };
1370
+ }
1371
+ try {
1372
+ const results = await hybridSearch.hybridSearch(query, limit || 15, mode || "hybrid");
1373
+ if (results.length === 0) {
1374
+ return {
1375
+ content: [{ type: "text", text: `No results for "${query}". Try gnosys_reindex to build embeddings, or different keywords.` }],
1376
+ };
1377
+ }
1378
+ const formatted = results
1379
+ .map((r) => `**${r.title}** (score: ${r.score.toFixed(4)}, via: ${r.sources.join("+")})\n Path: ${r.relativePath}\n ${r.snippet.substring(0, 150)}...`)
1380
+ .join("\n\n");
1381
+ // Reinforce used memories (best-effort, non-blocking)
1382
+ // Use default resolver here since hybridSearch operates across all stores
1383
+ const writeTarget = resolver.getWriteTarget();
1384
+ if (writeTarget) {
1385
+ GnosysMaintenanceEngine.reinforceBatch(writeTarget.store, results.map((r) => r.relativePath)).catch(() => { }); // Fire-and-forget
1386
+ }
1387
+ const embCount = hybridSearch.embeddingCount();
1388
+ return {
1389
+ content: [
1390
+ {
1391
+ type: "text",
1392
+ text: `Found ${results.length} results for "${query}" (${embCount} embeddings indexed):\n\n${formatted}`,
1393
+ },
1394
+ ],
1395
+ };
1396
+ }
1397
+ catch (err) {
1398
+ return {
1399
+ content: [{ type: "text", text: `Search failed: ${err instanceof Error ? err.message : String(err)}` }],
1400
+ isError: true,
1401
+ };
1402
+ }
1403
+ });
1404
+ // ─── Tool: gnosys_semantic_search ────────────────────────────────────────
1405
+ server.tool("gnosys_semantic_search", "Search memories using semantic similarity only (no keyword matching). Finds conceptually related memories even without exact keyword matches. Requires embeddings — run gnosys_reindex first.", {
1406
+ query: z.string().describe("Natural language search query"),
1407
+ limit: z.number().optional().describe("Max results (default 15)"),
1408
+ projectRoot: projectRootParam,
1409
+ }, async ({ query, limit, projectRoot }) => {
1410
+ // Note: hybridSearch is module-level (heavy) and not scoped per project
1411
+ (projectRoot); // quiets unused warning if any
1412
+ if (!hybridSearch) {
1413
+ return {
1414
+ content: [{ type: "text", text: "Search not initialized. No stores found." }],
1415
+ isError: true,
1416
+ };
1417
+ }
1418
+ try {
1419
+ const results = await hybridSearch.hybridSearch(query, limit || 15, "semantic");
1420
+ if (results.length === 0) {
1421
+ return {
1422
+ content: [{ type: "text", text: `No semantic results for "${query}". Run gnosys_reindex first to build embeddings.` }],
1423
+ };
1424
+ }
1425
+ const formatted = results
1426
+ .map((r) => `**${r.title}** (similarity: ${r.score.toFixed(4)})\n Path: ${r.relativePath}\n ${r.snippet.substring(0, 150)}...`)
1427
+ .join("\n\n");
1428
+ return {
1429
+ content: [{ type: "text", text: `Found ${results.length} semantic results for "${query}":\n\n${formatted}` }],
1430
+ };
1431
+ }
1432
+ catch (err) {
1433
+ return {
1434
+ content: [{ type: "text", text: `Semantic search failed: ${err instanceof Error ? err.message : String(err)}` }],
1435
+ isError: true,
1436
+ };
1437
+ }
1438
+ });
1439
+ // ─── Tool: gnosys_reindex ────────────────────────────────────────────────
1440
+ server.tool("gnosys_reindex", "Rebuild all semantic embeddings from every memory file. Downloads the embedding model (~80 MB) on first run. Required before hybrid/semantic search can be used. Safe to re-run — fully regenerates the index.", { projectRoot: projectRootParam }, async ({ projectRoot }) => {
1441
+ // Note: reindex operates on all stores, projectRoot is for API consistency
1442
+ (projectRoot); // quiets unused warning if any
1443
+ if (!hybridSearch) {
1444
+ return {
1445
+ content: [{ type: "text", text: "No stores found. Initialize a store with gnosys_init first." }],
1446
+ isError: true,
1447
+ };
1448
+ }
1449
+ try {
1450
+ // Also rebuild FTS5 index
1451
+ await reindexAllStores();
1452
+ const count = await hybridSearch.reindex();
1453
+ return {
1454
+ content: [
1455
+ {
1456
+ type: "text",
1457
+ text: `Reindex complete: ${count} memories embedded. Hybrid search is now available.`,
1458
+ },
1459
+ ],
1460
+ };
1461
+ }
1462
+ catch (err) {
1463
+ return {
1464
+ content: [{ type: "text", text: `Reindex failed: ${err instanceof Error ? err.message : String(err)}` }],
1465
+ isError: true,
1466
+ };
1467
+ }
1468
+ });
1469
+ // ─── Tool: gnosys_ask ────────────────────────────────────────────────────
1470
+ server.tool("gnosys_ask", "Ask a natural-language question and get a synthesized answer with citations from the entire vault. Uses hybrid search to find relevant memories, then LLM to synthesize a cited response. Citations are Obsidian wikilinks [[filename.md]]. Requires an LLM provider (Anthropic or Ollama) and embeddings (run gnosys_reindex first).", {
1471
+ question: z.string().describe("Natural language question to answer from the vault"),
1472
+ limit: z.number().optional().describe("Max memories to retrieve (default 15)"),
1473
+ mode: z.enum(["keyword", "semantic", "hybrid"]).optional().describe("Search mode (default: hybrid)"),
1474
+ projectRoot: projectRootParam,
1475
+ }, async ({ question, limit, mode, projectRoot }) => {
1476
+ // Note: askEngine is module-level (heavy) and not scoped per project
1477
+ (projectRoot); // quiets unused warning if any
1478
+ if (!askEngine) {
1479
+ return {
1480
+ content: [{ type: "text", text: "Ask engine not initialized. Ensure stores exist and an LLM provider is configured." }],
1481
+ isError: true,
1482
+ };
1483
+ }
1484
+ try {
1485
+ const result = await askEngine.ask(question, {
1486
+ limit: limit || 15,
1487
+ mode: mode || "hybrid",
1488
+ });
1489
+ // Reinforce used memories (best-effort, non-blocking)
1490
+ const writeTarget = resolver.getWriteTarget();
1491
+ if (writeTarget && result.sources.length > 0) {
1492
+ GnosysMaintenanceEngine.reinforceBatch(writeTarget.store, result.sources.map((s) => s.relativePath)).catch(() => { }); // Fire-and-forget
1493
+ }
1494
+ const sourcesText = result.sources.length > 0
1495
+ ? "\n\n---\n**Sources:**\n" +
1496
+ result.sources
1497
+ .map((s) => `- [[${s.relativePath.split("/").pop()}]] — ${s.title}`)
1498
+ .join("\n")
1499
+ : "";
1500
+ const meta = [
1501
+ `Search mode: ${result.searchMode}`,
1502
+ result.deepQueryUsed ? "Deep query: yes (follow-up search performed)" : null,
1503
+ `Sources: ${result.sources.length}`,
1504
+ ]
1505
+ .filter(Boolean)
1506
+ .join(" | ");
1507
+ return {
1508
+ content: [
1509
+ {
1510
+ type: "text",
1511
+ text: `${result.answer}${sourcesText}\n\n_${meta}_`,
1512
+ },
1513
+ ],
1514
+ };
1515
+ }
1516
+ catch (err) {
1517
+ return {
1518
+ content: [{ type: "text", text: `Ask failed: ${err instanceof Error ? err.message : String(err)}` }],
1519
+ isError: true,
1520
+ };
1521
+ }
1522
+ });
1523
+ // ─── Tool: gnosys_maintain ────────────────────────────────────────────────
1524
+ server.tool("gnosys_maintain", "Run vault maintenance: detect duplicate memories, apply confidence decay, consolidate similar memories. Use --dry-run mode first to see what would change. Requires embeddings (run gnosys_reindex first).", {
1525
+ dryRun: z.boolean().optional().describe("Show what would change without modifying anything (default: true)"),
1526
+ autoApply: z.boolean().optional().describe("Automatically apply all changes (default: false)"),
1527
+ projectRoot: projectRootParam,
1528
+ }, async ({ dryRun, autoApply, projectRoot }) => {
1529
+ const ctx = await resolveToolContext(projectRoot);
1530
+ try {
1531
+ const engine = new GnosysMaintenanceEngine(ctx.resolver, ctx.config);
1532
+ const report = await engine.maintain({
1533
+ dryRun: dryRun ?? true,
1534
+ autoApply: autoApply ?? false,
1535
+ });
1536
+ // v2.0: Log maintenance run to gnosys.db
1537
+ if (ctx.gnosysDb?.isAvailable()) {
1538
+ auditToDb(ctx.gnosysDb, "maintain", undefined, {
1539
+ dryRun: dryRun ?? true,
1540
+ duplicatesFound: report.duplicates?.length || 0,
1541
+ consolidated: report.consolidated || 0,
1542
+ });
1543
+ }
1544
+ return {
1545
+ content: [{ type: "text", text: formatMaintenanceReport(report) }],
1546
+ };
1547
+ }
1548
+ catch (err) {
1549
+ return {
1550
+ content: [{ type: "text", text: `Maintenance failed: ${err instanceof Error ? err.message : String(err)}` }],
1551
+ isError: true,
1552
+ };
1553
+ }
1554
+ });
1555
+ // ─── Tool: gnosys_dearchive ──────────────────────────────────────────────
1556
+ server.tool("gnosys_dearchive", "Force-dearchive memories from archive.db back to active. Search the archive for memories matching a query, then restore them to the active layer. Used when you need specific archived knowledge that wasn't auto-dearchived by search/ask.", {
1557
+ query: z.string().describe("Search query to find archived memories to restore"),
1558
+ limit: z.number().optional().describe("Max memories to dearchive (default 5)"),
1559
+ projectRoot: projectRootParam,
1560
+ }, async ({ query, limit, projectRoot }) => {
1561
+ const ctx = await resolveToolContext(projectRoot);
1562
+ try {
1563
+ const { GnosysArchive } = await import("./lib/archive.js");
1564
+ const writeTarget = ctx.resolver.getWriteTarget();
1565
+ if (!writeTarget) {
1566
+ return {
1567
+ content: [{ type: "text", text: "No writable store found. Run gnosys_init first." }],
1568
+ isError: true,
1569
+ };
1570
+ }
1571
+ const archive = new GnosysArchive(writeTarget.path);
1572
+ if (!archive.isAvailable()) {
1573
+ return {
1574
+ content: [{ type: "text", text: "Archive not available. Is better-sqlite3 installed?" }],
1575
+ isError: true,
1576
+ };
1577
+ }
1578
+ const results = archive.searchArchive(query, limit || 5);
1579
+ if (results.length === 0) {
1580
+ archive.close();
1581
+ return {
1582
+ content: [{ type: "text", text: `No archived memories found matching "${query}".` }],
1583
+ };
1584
+ }
1585
+ const ids = results.map((r) => r.id);
1586
+ const restored = await archive.dearchiveBatch(ids, writeTarget.store);
1587
+ archive.close();
1588
+ // v2.0: Sync dearchive to gnosys.db
1589
+ if (ctx.gnosysDb?.isAvailable()) {
1590
+ for (const memId of ids) {
1591
+ syncDearchiveToDb(ctx.gnosysDb, memId);
1592
+ }
1593
+ auditToDb(ctx.gnosysDb, "dearchive", undefined, { query, count: restored.length });
1594
+ }
1595
+ const lines = [`Dearchived ${restored.length} memories back to active:`];
1596
+ for (const rp of restored) {
1597
+ lines.push(` → ${rp}`);
1598
+ }
1599
+ return {
1600
+ content: [{ type: "text", text: lines.join("\n") }],
1601
+ };
1602
+ }
1603
+ catch (err) {
1604
+ return {
1605
+ content: [{ type: "text", text: `Dearchive failed: ${err instanceof Error ? err.message : String(err)}` }],
1606
+ isError: true,
1607
+ };
1608
+ }
1609
+ });
1610
+ // ─── Tool: gnosys_reindex_graph ──────────────────────────────────────────
1611
+ server.tool("gnosys_reindex_graph", "Build or rebuild the wikilink graph (.gnosys/graph.json). Parses all [[wikilinks]] across memories and generates a persistent JSON graph with nodes, edges, and stats.", { projectRoot: projectRootParam }, async ({ projectRoot }) => {
1612
+ const ctx = await resolveToolContext(projectRoot);
1613
+ try {
1614
+ const { reindexGraph, formatGraphStats } = await import("./lib/graph.js");
1615
+ const stats = await reindexGraph(ctx.resolver);
1616
+ return {
1617
+ content: [{ type: "text", text: formatGraphStats(stats) }],
1618
+ };
1619
+ }
1620
+ catch (err) {
1621
+ return {
1622
+ content: [{ type: "text", text: `Graph reindex failed: ${err instanceof Error ? err.message : String(err)}` }],
1623
+ isError: true,
1624
+ };
1625
+ }
1626
+ });
1627
+ // ─── Tool: gnosys_dream ──────────────────────────────────────────────────
1628
+ server.tool("gnosys_dream", "Run a Dream Mode cycle — idle-time consolidation that decays confidence, generates category summaries, discovers relationships, and creates review suggestions. NEVER deletes memories. Safe to run anytime.", {
1629
+ maxRuntimeMinutes: z.number().int().min(1).max(120).default(30).optional().describe("Max runtime in minutes"),
1630
+ selfCritique: z.boolean().default(true).optional().describe("Enable self-critique scoring"),
1631
+ generateSummaries: z.boolean().default(true).optional().describe("Generate category summaries"),
1632
+ discoverRelationships: z.boolean().default(true).optional().describe("Discover relationships between memories"),
1633
+ projectRoot: projectRootParam,
1634
+ }, async (params) => {
1635
+ const ctx = await resolveToolContext(params.projectRoot);
1636
+ if (!ctx.gnosysDb || !ctx.gnosysDb.isAvailable() || !ctx.gnosysDb.isMigrated()) {
1637
+ return {
1638
+ content: [
1639
+ {
1640
+ type: "text",
1641
+ text: "Dream Mode requires gnosys.db (v2.0). Run `gnosys migrate` first.",
1642
+ },
1643
+ ],
1644
+ };
1645
+ }
1646
+ // Record activity to reset idle timer (if scheduler is running)
1647
+ dreamScheduler?.recordActivity();
1648
+ const dreamConfig = {
1649
+ enabled: true,
1650
+ idleMinutes: 0, // Run immediately (manual trigger)
1651
+ maxRuntimeMinutes: params.maxRuntimeMinutes ?? 30,
1652
+ selfCritique: params.selfCritique ?? true,
1653
+ generateSummaries: params.generateSummaries ?? true,
1654
+ discoverRelationships: params.discoverRelationships ?? true,
1655
+ minMemories: 1, // No minimum for manual trigger
1656
+ provider: ctx.config?.dream?.provider || "ollama",
1657
+ model: ctx.config?.dream?.model,
1658
+ };
1659
+ const engine = new GnosysDreamEngine(ctx.gnosysDb, ctx.config || DEFAULT_CONFIG, dreamConfig);
1660
+ const report = await engine.dream((phase, detail) => {
1661
+ console.error(`[dream:${phase}] ${detail}`);
1662
+ });
1663
+ return {
1664
+ content: [
1665
+ {
1666
+ type: "text",
1667
+ text: formatDreamReport(report),
1668
+ },
1669
+ ],
1670
+ };
1671
+ });
1672
+ // ─── Tool: gnosys_export ─────────────────────────────────────────────────
1673
+ server.tool("gnosys_export", "Export gnosys.db to Obsidian-compatible vault — atomic Markdown files with YAML frontmatter, [[wikilinks]], category summaries, and relationship graph. One-way export, never modifies gnosys.db.", {
1674
+ targetDir: z.string().describe("Target directory path for export"),
1675
+ activeOnly: z.boolean().default(true).optional().describe("Only export active memories (default: true)"),
1676
+ overwrite: z.boolean().default(false).optional().describe("Overwrite existing files"),
1677
+ includeSummaries: z.boolean().default(true).optional().describe("Include category summaries"),
1678
+ includeReviews: z.boolean().default(true).optional().describe("Include review suggestions from dream mode"),
1679
+ includeGraph: z.boolean().default(true).optional().describe("Include relationship graph"),
1680
+ projectRoot: projectRootParam,
1681
+ }, async (params) => {
1682
+ const ctx = await resolveToolContext(params.projectRoot);
1683
+ if (!ctx.gnosysDb || !ctx.gnosysDb.isAvailable() || !ctx.gnosysDb.isMigrated()) {
1684
+ return {
1685
+ content: [
1686
+ {
1687
+ type: "text",
1688
+ text: "Export requires gnosys.db (v2.0). Run `gnosys migrate` first.",
1689
+ },
1690
+ ],
1691
+ };
1692
+ }
1693
+ const exporter = new GnosysExporter(ctx.gnosysDb);
1694
+ const report = await exporter.export({
1695
+ targetDir: params.targetDir,
1696
+ activeOnly: params.activeOnly ?? true,
1697
+ overwrite: params.overwrite ?? false,
1698
+ includeSummaries: params.includeSummaries ?? true,
1699
+ includeReviews: params.includeReviews ?? true,
1700
+ includeGraph: params.includeGraph ?? true,
1701
+ });
1702
+ return {
1703
+ content: [
1704
+ {
1705
+ type: "text",
1706
+ text: formatExportReport(report),
1707
+ },
1708
+ ],
1709
+ };
1710
+ });
1711
+ // ─── Tool: gnosys_dashboard ──────────────────────────────────────────────
1712
+ server.tool("gnosys_dashboard", "Show the Gnosys system dashboard: memory counts, maintenance health, graph stats, LLM provider status. Returns structured JSON.", { projectRoot: projectRootParam }, async ({ projectRoot }) => {
1713
+ const ctx = await resolveToolContext(projectRoot);
1714
+ try {
1715
+ const { collectDashboardData, formatDashboardJSON } = await import("./lib/dashboard.js");
1716
+ const data = await collectDashboardData(ctx.resolver, ctx.config, "1.1.0", ctx.gnosysDb || undefined);
1717
+ return {
1718
+ content: [{ type: "text", text: formatDashboardJSON(data) }],
1719
+ };
1720
+ }
1721
+ catch (err) {
1722
+ return {
1723
+ content: [{ type: "text", text: `Dashboard failed: ${err instanceof Error ? err.message : String(err)}` }],
1724
+ isError: true,
1725
+ };
1726
+ }
1727
+ });
1728
+ // ─── Tool: gnosys_stores ─────────────────────────────────────────────────
1729
+ server.tool("gnosys_stores", "Debug tool — lists all detected Gnosys stores across registered projects, MCP workspace roots, cwd, and environment variables. Shows which store is active and helps diagnose multi-project routing.", {}, async () => {
1730
+ const lines = [];
1731
+ lines.push("GNOSYS STORES — Multi-Project Overview");
1732
+ lines.push("=".repeat(45));
1733
+ lines.push("");
1734
+ // Active stores
1735
+ lines.push("ACTIVE STORES:");
1736
+ lines.push(resolver.getSummary());
1737
+ lines.push("");
1738
+ // MCP roots
1739
+ const mcpRoots = GnosysResolver.getMcpRoots();
1740
+ lines.push(`MCP WORKSPACE ROOTS (${mcpRoots.length}):`);
1741
+ if (mcpRoots.length === 0) {
1742
+ lines.push(" (none — host may not support roots/list)");
1743
+ }
1744
+ else {
1745
+ for (const root of mcpRoots) {
1746
+ lines.push(` ${root}`);
1747
+ }
1748
+ }
1749
+ lines.push("");
1750
+ // All detected stores
1751
+ const detected = await resolver.detectAllStores();
1752
+ lines.push(`ALL DETECTED STORES (${detected.length}):`);
1753
+ for (const d of detected) {
1754
+ const status = d.isActive ? "✓ ACTIVE" : d.hasGnosys ? "available" : "no .gnosys";
1755
+ lines.push(` [${d.source}] ${d.path} — ${status}`);
1756
+ }
1757
+ lines.push("");
1758
+ // Usage hint
1759
+ lines.push("USAGE:");
1760
+ lines.push(" Pass projectRoot to any tool to target a specific project:");
1761
+ lines.push(' e.g. gnosys_add({ projectRoot: "/path/to/my-project", ... })');
1762
+ return { content: [{ type: "text", text: lines.join("\n") }] };
1763
+ });
1764
+ // ─── Helper: reindex search across all stores ────────────────────────────
1765
+ async function reindexAllStores() {
1766
+ if (!search)
1767
+ return;
1768
+ search.clearIndex();
1769
+ const allStores = resolver.getStores();
1770
+ for (const s of allStores) {
1771
+ await search.addStoreMemories(s.store, s.label);
1772
+ }
1773
+ }
1774
+ // ─── Resource: gnosys://recall (AUTOMATIC MEMORY INJECTION) ────────────
1775
+ // This is the primary recall mechanism. MCP hosts (Cursor, Claude Desktop,
1776
+ // Claude Code, Cowork) read this resource on every turn, automatically
1777
+ // injecting relevant memories into the model context — no tool call needed.
1778
+ //
1779
+ // Priority 1 + audience: assistant = hosts inject this before every message.
1780
+ server.resource("gnosys_recall", "gnosys://recall", {
1781
+ description: "Automatic memory injection. Hosts read this resource on every turn to inject the most relevant memories as context. Returns a <gnosys-recall> block with [[wikilinks]] and relevance scores. Priority 1 (highest) — designed for always-on context injection without any tool call. Configure aggressiveness in gnosys.json: recall.aggressive (default: true).",
1782
+ mimeType: "text/markdown",
1783
+ annotations: {
1784
+ audience: ["assistant"],
1785
+ priority: 1, // Highest priority — always inject
1786
+ },
1787
+ }, async () => {
1788
+ // Record activity for dream scheduler (this fires on every turn)
1789
+ dreamScheduler?.recordActivity();
1790
+ if (!search) {
1791
+ return {
1792
+ contents: [
1793
+ {
1794
+ uri: "gnosys://recall",
1795
+ mimeType: "text/markdown",
1796
+ text: "<gnosys: no-strong-recall-needed>",
1797
+ },
1798
+ ],
1799
+ };
1800
+ }
1801
+ const storePath = resolver.getWriteTarget()?.store.getStorePath() || "";
1802
+ const result = await recall("*", {
1803
+ limit: config.recall?.maxMemories || 8,
1804
+ search,
1805
+ resolver,
1806
+ storePath,
1807
+ recallConfig: config.recall,
1808
+ gnosysDb: gnosysDb || undefined,
1809
+ });
1810
+ return {
1811
+ contents: [
1812
+ {
1813
+ uri: "gnosys://recall",
1814
+ mimeType: "text/markdown",
1815
+ text: formatRecall(result),
1816
+ },
1817
+ ],
1818
+ };
1819
+ });
1820
+ // ─── Tool: gnosys_recall (query-specific fallback) ──────────────────────
1821
+ // For hosts that don't support MCP Resources, or when the agent wants to
1822
+ // recall memories for a specific query. The resource above is preferred.
1823
+ server.tool("gnosys_recall", "Fast memory recall — inject relevant memories as context. Returns <gnosys-recall> block. In aggressive mode (default), always returns top memories even at medium relevance. Prefer the gnosys://recall MCP Resource for automatic injection (no tool call needed).", {
1824
+ query: z
1825
+ .string()
1826
+ .describe("What the agent is currently working on. Use keywords. Example: 'auth JWT middleware' or 'database migration schema'"),
1827
+ limit: z.number().optional().describe("Max memories to return (default from config, max 15)"),
1828
+ traceId: z.string().optional().describe("Optional trace ID from the outer orchestrator for audit correlation"),
1829
+ aggressive: z.boolean().optional().describe("Override aggressive mode for this call. Default: from gnosys.json (true)"),
1830
+ projectRoot: projectRootParam,
1831
+ }, async ({ query, limit, traceId, aggressive, projectRoot }) => {
1832
+ const ctx = await resolveToolContext(projectRoot);
1833
+ if (!ctx.search) {
1834
+ return {
1835
+ content: [{ type: "text", text: "<gnosys: no-strong-recall-needed>" }],
1836
+ };
1837
+ }
1838
+ const storePath = ctx.resolver.getWriteTarget()?.store.getStorePath() || "";
1839
+ const recallConfig = {
1840
+ ...ctx.config.recall,
1841
+ ...(aggressive !== undefined ? { aggressive } : {}),
1842
+ };
1843
+ const result = await recall(query, {
1844
+ limit: Math.min(limit || recallConfig.maxMemories, 15),
1845
+ search: ctx.search,
1846
+ resolver: ctx.resolver,
1847
+ storePath,
1848
+ traceId,
1849
+ recallConfig,
1850
+ gnosysDb: ctx.gnosysDb || undefined,
1851
+ });
1852
+ return {
1853
+ content: [{ type: "text", text: formatRecall(result) }],
1854
+ };
1855
+ });
1856
+ // ─── Tool: gnosys_audit ──────────────────────────────────────────────────
1857
+ server.tool("gnosys_audit", "View the audit trail of all memory operations (reads, writes, reinforcements, dearchives, maintenance). Shows a timeline of what happened and when. Useful for debugging 'why did the agent forget X?'", {
1858
+ days: z.number().optional().describe("Number of days to look back (default 7)"),
1859
+ operation: z.string().optional().describe("Filter by operation type: read, write, reinforce, dearchive, archive, maintain, search, ask, recall"),
1860
+ limit: z.number().optional().describe("Max entries to return (default 100)"),
1861
+ projectRoot: projectRootParam,
1862
+ }, async ({ days, operation, limit, projectRoot }) => {
1863
+ const ctx = await resolveToolContext(projectRoot);
1864
+ const storePath = ctx.resolver.getWriteTarget()?.store.getStorePath();
1865
+ if (!storePath) {
1866
+ return {
1867
+ content: [{ type: "text", text: "No store found." }],
1868
+ isError: true,
1869
+ };
1870
+ }
1871
+ const entries = readAuditLog(storePath, {
1872
+ days: days || 7,
1873
+ operation: operation,
1874
+ limit: limit || 100,
1875
+ });
1876
+ return {
1877
+ content: [{ type: "text", text: formatAuditTimeline(entries) }],
1878
+ };
1879
+ });
1880
+ // ─── Tool: gnosys_preference_set ─────────────────────────────────────────
1881
+ server.tool("gnosys_preference_set", "Set a user preference. Preferences are stored in the central DB as user-scoped memories. They persist across all projects and are injected into agent rules files on `gnosys sync`. Use this to record workflow conventions, coding standards, tool preferences, etc.", {
1882
+ key: z.string().describe("Preference key, kebab-case. Examples: 'commit-convention', 'code-style', 'llm-provider', 'testing-approach', 'naming-convention'"),
1883
+ value: z.string().describe("The preference value. Can be a sentence or paragraph describing the convention."),
1884
+ title: z.string().optional().describe("Human-readable title. Auto-generated from key if omitted."),
1885
+ tags: z.array(z.string()).optional().describe("Optional tags for discovery."),
1886
+ projectRoot: projectRootParam,
1887
+ }, async ({ key, value, title, tags }) => {
1888
+ if (!centralDb?.isAvailable()) {
1889
+ return {
1890
+ content: [{ type: "text", text: "Central DB not available. Cannot store preferences." }],
1891
+ isError: true,
1892
+ };
1893
+ }
1894
+ try {
1895
+ const pref = setPreference(centralDb, key, value, { title, tags });
1896
+ return {
1897
+ content: [{
1898
+ type: "text",
1899
+ text: `Preference set: **${pref.title}**\n Key: ${pref.key}\n Value: ${pref.value}\n\nRun \`gnosys_sync\` to regenerate agent rules files with this preference.`,
1900
+ }],
1901
+ };
1902
+ }
1903
+ catch (err) {
1904
+ return {
1905
+ content: [{ type: "text", text: `Error setting preference: ${err instanceof Error ? err.message : String(err)}` }],
1906
+ isError: true,
1907
+ };
1908
+ }
1909
+ });
1910
+ // ─── Tool: gnosys_preference_get ─────────────────────────────────────────
1911
+ server.tool("gnosys_preference_get", "Get a user preference by key, or list all preferences.", {
1912
+ key: z.string().optional().describe("Preference key to retrieve. Omit to list all preferences."),
1913
+ projectRoot: projectRootParam,
1914
+ }, async ({ key }) => {
1915
+ if (!centralDb?.isAvailable()) {
1916
+ return {
1917
+ content: [{ type: "text", text: "Central DB not available." }],
1918
+ isError: true,
1919
+ };
1920
+ }
1921
+ if (key) {
1922
+ const pref = getPreference(centralDb, key);
1923
+ if (!pref) {
1924
+ return {
1925
+ content: [{ type: "text", text: `No preference found for key "${key}".` }],
1926
+ };
1927
+ }
1928
+ return {
1929
+ content: [{
1930
+ type: "text",
1931
+ text: `**${pref.title}** (${pref.key})\n\n${pref.value}\n\nConfidence: ${pref.confidence}\nModified: ${pref.modified}`,
1932
+ }],
1933
+ };
1934
+ }
1935
+ // List all
1936
+ const prefs = getAllPreferences(centralDb);
1937
+ if (prefs.length === 0) {
1938
+ return {
1939
+ content: [{ type: "text", text: "No preferences set. Use gnosys_preference_set to add some." }],
1940
+ };
1941
+ }
1942
+ const formatted = prefs
1943
+ .map((p) => `- **${p.title}** (\`${p.key}\`): ${p.value.split("\n")[0]}`)
1944
+ .join("\n");
1945
+ return {
1946
+ content: [{
1947
+ type: "text",
1948
+ text: `${prefs.length} user preference(s):\n\n${formatted}`,
1949
+ }],
1950
+ };
1951
+ });
1952
+ // ─── Tool: gnosys_preference_delete ──────────────────────────────────────
1953
+ server.tool("gnosys_preference_delete", "Delete a user preference by key.", {
1954
+ key: z.string().describe("Preference key to delete."),
1955
+ projectRoot: projectRootParam,
1956
+ }, async ({ key }) => {
1957
+ if (!centralDb?.isAvailable()) {
1958
+ return {
1959
+ content: [{ type: "text", text: "Central DB not available." }],
1960
+ isError: true,
1961
+ };
1962
+ }
1963
+ const deleted = deletePreference(centralDb, key);
1964
+ if (!deleted) {
1965
+ return {
1966
+ content: [{ type: "text", text: `No preference found for key "${key}".` }],
1967
+ };
1968
+ }
1969
+ return {
1970
+ content: [{
1971
+ type: "text",
1972
+ text: `Preference "${key}" deleted. Run \`gnosys_sync\` to update agent rules files.`,
1973
+ }],
1974
+ };
1975
+ });
1976
+ // ─── Tool: gnosys_sync ──────────────────────────────────────────────────
1977
+ server.tool("gnosys_sync", "Regenerate agent rules file from current user preferences and project conventions. Injects a GNOSYS:START/GNOSYS:END block into the detected agent rules file (CLAUDE.md, .cursor/rules/gnosys.mdc). User content outside the block is preserved.", {
1978
+ projectRoot: projectRootParam,
1979
+ }, async ({ projectRoot }) => {
1980
+ if (!centralDb?.isAvailable()) {
1981
+ return {
1982
+ content: [{ type: "text", text: "Central DB not available. Cannot sync rules." }],
1983
+ isError: true,
1984
+ };
1985
+ }
1986
+ const ctx = await resolveToolContext(projectRoot);
1987
+ const writeTarget = ctx.resolver.getWriteTarget();
1988
+ if (!writeTarget) {
1989
+ return {
1990
+ content: [{ type: "text", text: "No writable store found. Run gnosys_init first." }],
1991
+ isError: true,
1992
+ };
1993
+ }
1994
+ // Find project identity
1995
+ const storePath = writeTarget.store.getStorePath();
1996
+ const projectDir = path.dirname(storePath);
1997
+ const identity = await readProjectIdentity(projectDir);
1998
+ if (!identity) {
1999
+ return {
2000
+ content: [{ type: "text", text: "No project identity found. Run gnosys_init first." }],
2001
+ isError: true,
2002
+ };
2003
+ }
2004
+ if (!identity.agentRulesTarget) {
2005
+ return {
2006
+ content: [{
2007
+ type: "text",
2008
+ text: "No agent rules target detected (no .cursor/ or CLAUDE.md found). Create one of these first, then re-run gnosys_init to detect it.",
2009
+ }],
2010
+ };
2011
+ }
2012
+ const result = await syncRules(centralDb, projectDir, identity.agentRulesTarget, identity.projectId);
2013
+ if (!result) {
2014
+ return {
2015
+ content: [{ type: "text", text: "Sync failed — no agent rules target." }],
2016
+ isError: true,
2017
+ };
2018
+ }
2019
+ const action = result.created ? "Created" : "Updated";
2020
+ return {
2021
+ content: [{
2022
+ type: "text",
2023
+ text: `${action} rules file: ${result.filePath}\n\n Preferences injected: ${result.prefCount}\n Project conventions: ${result.conventionCount}\n\nContent is inside <!-- GNOSYS:START --> / <!-- GNOSYS:END --> markers.\nUser content outside these markers is preserved.`,
2024
+ }],
2025
+ };
2026
+ });
2027
+ // ─── Tool: gnosys_federated_search ───────────────────────────────────────
2028
+ server.tool("gnosys_federated_search", "Search across all scopes (project → user → global) with tier boosting. Results from the current project rank highest. Returns score breakdown showing which boosts were applied.", {
2029
+ query: z.string().describe("Search query"),
2030
+ limit: z.number().optional().describe("Max results (default: 20)"),
2031
+ projectRoot: z.string().optional().describe("Project root directory for context detection"),
2032
+ includeGlobal: z.boolean().optional().describe("Include global-scope memories (default: true)"),
2033
+ }, async ({ query, limit, projectRoot, includeGlobal }) => {
2034
+ if (!centralDb?.isAvailable()) {
2035
+ return { content: [{ type: "text", text: "Central DB not available. Run gnosys_init first." }], isError: true };
2036
+ }
2037
+ // Auto-detect current project
2038
+ const projectId = await detectCurrentProject(centralDb, projectRoot || undefined);
2039
+ const results = federatedSearch(centralDb, query, {
2040
+ limit: limit || 20,
2041
+ projectId,
2042
+ includeGlobal: includeGlobal !== false,
2043
+ });
2044
+ if (results.length === 0) {
2045
+ return { content: [{ type: "text", text: `No results for "${query}" across any scope.` }] };
2046
+ }
2047
+ const lines = results.map((r, i) => {
2048
+ const projectLabel = r.projectName ? ` [${r.projectName}]` : "";
2049
+ const boostLabel = r.boosts.length > 0 ? ` (${r.boosts.join(", ")})` : "";
2050
+ return `${i + 1}. **${r.title}** (${r.category})${projectLabel}\n scope: ${r.scope} | score: ${r.score.toFixed(4)}${boostLabel}\n ${r.snippet}`;
2051
+ });
2052
+ const contextNote = projectId ? `Context: project ${projectId}` : "Context: no project detected";
2053
+ return {
2054
+ content: [{ type: "text", text: `${contextNote}\n\n${lines.join("\n\n")}` }],
2055
+ };
2056
+ });
2057
+ // ─── Tool: gnosys_detect_ambiguity ──────────────────────────────────────
2058
+ server.tool("gnosys_detect_ambiguity", "Check if a query matches memories in multiple projects. Use before write operations to confirm the target project when ambiguity exists.", {
2059
+ query: z.string().describe("Query to check for cross-project ambiguity"),
2060
+ }, async ({ query }) => {
2061
+ if (!centralDb?.isAvailable()) {
2062
+ return { content: [{ type: "text", text: "Central DB not available." }], isError: true };
2063
+ }
2064
+ const ambiguity = detectAmbiguity(centralDb, query);
2065
+ if (!ambiguity) {
2066
+ return { content: [{ type: "text", text: `No ambiguity detected for "${query}" — matches at most one project.` }] };
2067
+ }
2068
+ const candidateLines = ambiguity.candidates.map((c) => `- **${c.projectName}** (${c.projectId})\n Dir: ${c.workingDirectory}\n Matching memories: ${c.memoryCount}`);
2069
+ return {
2070
+ content: [{
2071
+ type: "text",
2072
+ text: `⚠️ ${ambiguity.message}\n\nMatching projects:\n${candidateLines.join("\n\n")}`,
2073
+ }],
2074
+ };
2075
+ });
2076
+ // ─── Tool: gnosys_briefing ──────────────────────────────────────────────
2077
+ server.tool("gnosys_briefing", "Generate a project briefing — a summary of memory state, categories, recent activity, and top tags. Use for dream mode pre-computation or quick project status.", {
2078
+ projectId: z.string().optional().describe("Project ID (auto-detects from cwd if omitted)"),
2079
+ all: z.boolean().optional().describe("Generate briefings for ALL projects"),
2080
+ projectRoot: z.string().optional().describe("Project root for auto-detection"),
2081
+ }, async ({ projectId, all, projectRoot }) => {
2082
+ if (!centralDb?.isAvailable()) {
2083
+ return { content: [{ type: "text", text: "Central DB not available." }], isError: true };
2084
+ }
2085
+ if (all) {
2086
+ const briefings = generateAllBriefings(centralDb);
2087
+ if (briefings.length === 0) {
2088
+ return { content: [{ type: "text", text: "No projects registered." }] };
2089
+ }
2090
+ const summaries = briefings.map((b) => `## ${b.projectName}\n${b.summary}`);
2091
+ return {
2092
+ content: [{ type: "text", text: `# All Project Briefings\n\n${summaries.join("\n\n")}` }],
2093
+ };
2094
+ }
2095
+ // Auto-detect project if not provided
2096
+ let pid = projectId || null;
2097
+ if (!pid) {
2098
+ pid = await detectCurrentProject(centralDb, projectRoot || undefined);
2099
+ }
2100
+ if (!pid) {
2101
+ return { content: [{ type: "text", text: "No project specified and none detected from current directory." }], isError: true };
2102
+ }
2103
+ const briefing = generateBriefing(centralDb, pid);
2104
+ if (!briefing) {
2105
+ return { content: [{ type: "text", text: `Project not found: ${pid}` }], isError: true };
2106
+ }
2107
+ const catLines = Object.entries(briefing.categories)
2108
+ .sort((a, b) => b[1] - a[1])
2109
+ .map(([cat, count]) => ` ${cat}: ${count}`);
2110
+ const recentLines = briefing.recentActivity.map((r) => ` - ${r.title} (${r.modified})`);
2111
+ const tagLine = briefing.topTags.slice(0, 10).map((t) => `${t.tag}(${t.count})`).join(", ");
2112
+ const text = [
2113
+ `# Briefing: ${briefing.projectName}`,
2114
+ `Directory: ${briefing.workingDirectory}`,
2115
+ `Active memories: ${briefing.activeMemories} / ${briefing.totalMemories} total`,
2116
+ "",
2117
+ `## Categories\n${catLines.join("\n")}`,
2118
+ "",
2119
+ `## Recent Activity (7d)\n${recentLines.length > 0 ? recentLines.join("\n") : " None"}`,
2120
+ "",
2121
+ `## Top Tags\n ${tagLine || "None"}`,
2122
+ "",
2123
+ `## Summary\n${briefing.summary}`,
2124
+ ].join("\n");
2125
+ return { content: [{ type: "text", text }] };
2126
+ });
2127
+ // ─── Tool: gnosys_working_set ───────────────────────────────────────────
2128
+ server.tool("gnosys_working_set", "Get the implicit working set — recently modified memories for the current project. These represent the active context and get boosted in federated search.", {
2129
+ projectRoot: z.string().optional().describe("Project root for auto-detection"),
2130
+ windowHours: z.number().optional().describe("Lookback window in hours (default: 24)"),
2131
+ }, async ({ projectRoot, windowHours }) => {
2132
+ if (!centralDb?.isAvailable()) {
2133
+ return { content: [{ type: "text", text: "Central DB not available." }], isError: true };
2134
+ }
2135
+ const pid = await detectCurrentProject(centralDb, projectRoot || undefined);
2136
+ if (!pid) {
2137
+ return { content: [{ type: "text", text: "No project detected from current directory." }], isError: true };
2138
+ }
2139
+ const workingSet = getWorkingSet(centralDb, pid, {
2140
+ windowHours: windowHours || 24,
2141
+ });
2142
+ const formatted = formatWorkingSet(workingSet);
2143
+ return { content: [{ type: "text", text: formatted }] };
2144
+ });
2145
+ // ─── Start the server ────────────────────────────────────────────────────
2146
+ async function main() {
2147
+ // v3.0: Initialize central DB at ~/.gnosys/gnosys.db
2148
+ try {
2149
+ centralDb = GnosysDB.openCentral();
2150
+ if (centralDb.isAvailable()) {
2151
+ const projects = centralDb.getAllProjects();
2152
+ console.error(`Central DB: ready ✓ (${projects.length} projects registered, schema v${centralDb.getSchemaVersion()})`);
2153
+ }
2154
+ else {
2155
+ centralDb = null;
2156
+ console.error("Central DB: not available (better-sqlite3 missing)");
2157
+ }
2158
+ }
2159
+ catch (err) {
2160
+ centralDb = null;
2161
+ console.error(`Central DB: initialization failed — ${err instanceof Error ? err.message : err}`);
2162
+ }
2163
+ // Discover and initialize all layered stores
2164
+ const stores = await resolver.resolve();
2165
+ if (stores.length === 0) {
2166
+ console.error("Warning: No Gnosys stores found. Create a .gnosys/ directory or set GNOSYS_PERSONAL / GNOSYS_GLOBAL.");
2167
+ }
2168
+ console.error("Gnosys MCP server starting.");
2169
+ console.error("Active stores:");
2170
+ console.error(resolver.getSummary());
2171
+ // Initialize search from the first writable store
2172
+ const writeTarget = resolver.getWriteTarget();
2173
+ if (writeTarget) {
2174
+ search = new GnosysSearch(writeTarget.store.getStorePath());
2175
+ tagRegistry = new GnosysTagRegistry(writeTarget.store.getStorePath());
2176
+ await tagRegistry.load();
2177
+ // Load config from the primary store
2178
+ try {
2179
+ config = await loadConfig(writeTarget.store.getStorePath());
2180
+ }
2181
+ catch (err) {
2182
+ console.error(`Warning: Failed to load gnosys.json: ${err instanceof Error ? err.message : err}`);
2183
+ }
2184
+ ingestion = new GnosysIngestion(writeTarget.store, tagRegistry, config);
2185
+ // Initialize audit logging
2186
+ initAudit(writeTarget.store.getStorePath());
2187
+ // Build search index across all stores
2188
+ await reindexAllStores();
2189
+ // v2.0: Initialize GnosysDB (unified SQLite store)
2190
+ try {
2191
+ gnosysDb = new GnosysDB(writeTarget.store.getStorePath());
2192
+ if (gnosysDb.isAvailable() && gnosysDb.isMigrated()) {
2193
+ const counts = gnosysDb.getMemoryCount();
2194
+ console.error(`GnosysDB: migrated ✓ (${counts.active} active, ${counts.archived} archived, schema v${gnosysDb.getSchemaVersion()})`);
2195
+ }
2196
+ else if (gnosysDb.isAvailable()) {
2197
+ console.error("GnosysDB: available but not migrated. Run `gnosys migrate` to populate.");
2198
+ }
2199
+ else {
2200
+ gnosysDb = null;
2201
+ console.error("GnosysDB: not available (better-sqlite3 missing)");
2202
+ }
2203
+ }
2204
+ catch {
2205
+ gnosysDb = null;
2206
+ console.error("GnosysDB: initialization failed — using legacy paths");
2207
+ }
2208
+ // Initialize hybrid search + ask engine (embeddings loaded lazily)
2209
+ const embeddings = new GnosysEmbeddings(writeTarget.store.getStorePath());
2210
+ hybridSearch = new GnosysHybridSearch(search, embeddings, resolver, writeTarget.store.getStorePath(), gnosysDb || undefined);
2211
+ askEngine = new GnosysAsk(hybridSearch, config, resolver, writeTarget.store.getStorePath());
2212
+ const embCount = embeddings.hasEmbeddings() ? embeddings.count() : 0;
2213
+ console.error(`LLM ingestion: ${ingestion.isLLMAvailable ? `enabled (${ingestion.providerName})` : "disabled (configure LLM provider)"}`);
2214
+ console.error(`Hybrid search: ${embCount > 0 ? `ready (${embCount} embeddings)` : "available (run gnosys_reindex to build embeddings)"}`);
2215
+ console.error(`Ask engine: ${askEngine.isLLMAvailable ? `ready (${askEngine.providerName}/${askEngine.modelName})` : "disabled (configure LLM provider)"}`);
2216
+ // v2.0: Initialize Dream Mode (idle-time consolidation)
2217
+ if (gnosysDb && config.dream?.enabled) {
2218
+ const dreamEngine = new GnosysDreamEngine(gnosysDb, config, config.dream);
2219
+ dreamScheduler = new DreamScheduler(dreamEngine, config.dream);
2220
+ dreamScheduler.start();
2221
+ console.error(`Dream Mode: enabled (idle ${config.dream.idleMinutes}min, max ${config.dream.maxRuntimeMinutes}min)`);
2222
+ }
2223
+ else {
2224
+ console.error(`Dream Mode: disabled (enable in gnosys.json: dream.enabled = true)`);
2225
+ }
2226
+ }
2227
+ const transport = new StdioServerTransport();
2228
+ await server.connect(transport);
2229
+ // ─── MCP Roots Support (multi-project awareness) ───────────────────────
2230
+ // After connecting, request workspace roots from the host. This lets us
2231
+ // discover .gnosys stores in all open projects, not just the cwd.
2232
+ try {
2233
+ const rootsResult = await server.server.listRoots();
2234
+ if (rootsResult.roots && rootsResult.roots.length > 0) {
2235
+ GnosysResolver.setMcpRoots(rootsResult.roots);
2236
+ console.error(`MCP roots: ${rootsResult.roots.map((r) => r.name || r.uri).join(", ")}`);
2237
+ }
2238
+ }
2239
+ catch {
2240
+ // Host doesn't support roots/list — that's fine, fall back to cwd
2241
+ console.error("MCP roots: not supported by host (using cwd fallback)");
2242
+ }
2243
+ // Listen for roots changes (e.g. user opens/closes folders)
2244
+ try {
2245
+ const { RootsListChangedNotificationSchema } = await import("@modelcontextprotocol/sdk/types.js");
2246
+ server.server.setNotificationHandler(RootsListChangedNotificationSchema, async () => {
2247
+ try {
2248
+ const updated = await server.server.listRoots();
2249
+ if (updated.roots) {
2250
+ GnosysResolver.setMcpRoots(updated.roots);
2251
+ console.error(`MCP roots updated: ${updated.roots.map((r) => r.name || r.uri).join(", ")}`);
2252
+ }
2253
+ }
2254
+ catch {
2255
+ // Ignore errors during roots refresh
2256
+ }
2257
+ });
2258
+ }
2259
+ catch {
2260
+ // Notification handler setup failed — non-critical
2261
+ }
2262
+ }
2263
+ main().catch((err) => {
2264
+ console.error("Fatal error:", err);
2265
+ process.exit(1);
2266
+ });
2267
+ //# sourceMappingURL=index.js.map