@goondocks/myco 0.2.7 → 0.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (290) hide show
  1. package/.claude-plugin/plugin.json +1 -1
  2. package/CONTRIBUTING.md +1 -1
  3. package/commands/init.md +10 -26
  4. package/dist/chunk-4JML636J.js +52 -0
  5. package/dist/chunk-4JML636J.js.map +1 -0
  6. package/dist/chunk-AOMX45LH.js +8974 -0
  7. package/dist/chunk-AOMX45LH.js.map +1 -0
  8. package/dist/chunk-I7PMGO6S.js +58 -0
  9. package/dist/chunk-I7PMGO6S.js.map +1 -0
  10. package/dist/chunk-N33KUCFP.js +33 -0
  11. package/dist/chunk-N33KUCFP.js.map +1 -0
  12. package/dist/chunk-NYNEJ5QY.js +71 -0
  13. package/dist/chunk-NYNEJ5QY.js.map +1 -0
  14. package/dist/chunk-PA3VMINE.js +111 -0
  15. package/dist/chunk-PA3VMINE.js.map +1 -0
  16. package/dist/chunk-PZUWP5VK.js +44 -0
  17. package/dist/chunk-PZUWP5VK.js.map +1 -0
  18. package/dist/chunk-SVUINMDD.js +104 -0
  19. package/dist/chunk-SVUINMDD.js.map +1 -0
  20. package/dist/chunk-TH6GIBXG.js +91 -0
  21. package/dist/chunk-TH6GIBXG.js.map +1 -0
  22. package/dist/chunk-TWDS6MSU.js +354 -0
  23. package/dist/chunk-TWDS6MSU.js.map +1 -0
  24. package/dist/chunk-UIIZRTJU.js +21172 -0
  25. package/dist/chunk-UIIZRTJU.js.map +1 -0
  26. package/dist/chunk-YMYJ7FNH.js +19 -0
  27. package/dist/chunk-YMYJ7FNH.js.map +1 -0
  28. package/dist/chunk-ZJQ5G637.js +21 -0
  29. package/dist/chunk-ZJQ5G637.js.map +1 -0
  30. package/dist/chunk-ZTZVX5E6.js +421 -0
  31. package/dist/chunk-ZTZVX5E6.js.map +1 -0
  32. package/dist/cli-K5FSKLQC.js +625 -0
  33. package/dist/cli-K5FSKLQC.js.map +1 -0
  34. package/dist/client-4JMOYNKK.js +11 -0
  35. package/dist/client-4JMOYNKK.js.map +1 -0
  36. package/dist/main-5W4ADOBG.js +3224 -0
  37. package/dist/main-5W4ADOBG.js.map +1 -0
  38. package/dist/server-PIEPVUUH.js +14725 -0
  39. package/dist/server-PIEPVUUH.js.map +1 -0
  40. package/dist/session-start-2NNQHT5S.js +189 -0
  41. package/dist/session-start-2NNQHT5S.js.map +1 -0
  42. package/dist/src/cli.js +9 -582
  43. package/dist/src/cli.js.map +1 -1
  44. package/dist/src/daemon/main.js +9 -737
  45. package/dist/src/daemon/main.js.map +1 -1
  46. package/dist/src/hooks/post-tool-use.js +47 -35
  47. package/dist/src/hooks/post-tool-use.js.map +1 -1
  48. package/dist/src/hooks/session-end.js +29 -18
  49. package/dist/src/hooks/session-end.js.map +1 -1
  50. package/dist/src/hooks/session-start.js +9 -48
  51. package/dist/src/hooks/session-start.js.map +1 -1
  52. package/dist/src/hooks/stop.js +39 -30
  53. package/dist/src/hooks/stop.js.map +1 -1
  54. package/dist/src/hooks/user-prompt-submit.js +48 -40
  55. package/dist/src/hooks/user-prompt-submit.js.map +1 -1
  56. package/dist/src/mcp/server.js +9 -304
  57. package/dist/src/mcp/server.js.map +1 -1
  58. package/package.json +3 -2
  59. package/dist/src/agents/adapter.d.ts +0 -76
  60. package/dist/src/agents/adapter.d.ts.map +0 -1
  61. package/dist/src/agents/adapter.js +0 -124
  62. package/dist/src/agents/adapter.js.map +0 -1
  63. package/dist/src/agents/claude-code.d.ts +0 -3
  64. package/dist/src/agents/claude-code.d.ts.map +0 -1
  65. package/dist/src/agents/claude-code.js +0 -22
  66. package/dist/src/agents/claude-code.js.map +0 -1
  67. package/dist/src/agents/cursor.d.ts +0 -3
  68. package/dist/src/agents/cursor.d.ts.map +0 -1
  69. package/dist/src/agents/cursor.js +0 -154
  70. package/dist/src/agents/cursor.js.map +0 -1
  71. package/dist/src/agents/index.d.ts +0 -6
  72. package/dist/src/agents/index.d.ts.map +0 -1
  73. package/dist/src/agents/index.js +0 -5
  74. package/dist/src/agents/index.js.map +0 -1
  75. package/dist/src/agents/registry.d.ts +0 -34
  76. package/dist/src/agents/registry.d.ts.map +0 -1
  77. package/dist/src/agents/registry.js +0 -95
  78. package/dist/src/agents/registry.js.map +0 -1
  79. package/dist/src/artifacts/candidates.d.ts +0 -20
  80. package/dist/src/artifacts/candidates.d.ts.map +0 -1
  81. package/dist/src/artifacts/candidates.js +0 -84
  82. package/dist/src/artifacts/candidates.js.map +0 -1
  83. package/dist/src/artifacts/slugify.d.ts +0 -2
  84. package/dist/src/artifacts/slugify.d.ts.map +0 -1
  85. package/dist/src/artifacts/slugify.js +0 -22
  86. package/dist/src/artifacts/slugify.js.map +0 -1
  87. package/dist/src/capture/buffer.d.ts +0 -20
  88. package/dist/src/capture/buffer.d.ts.map +0 -1
  89. package/dist/src/capture/buffer.js +0 -55
  90. package/dist/src/capture/buffer.js.map +0 -1
  91. package/dist/src/capture/transcript-miner.d.ts +0 -31
  92. package/dist/src/capture/transcript-miner.d.ts.map +0 -1
  93. package/dist/src/capture/transcript-miner.js +0 -61
  94. package/dist/src/capture/transcript-miner.js.map +0 -1
  95. package/dist/src/cli.d.ts +0 -3
  96. package/dist/src/cli.d.ts.map +0 -1
  97. package/dist/src/config/loader.d.ts +0 -4
  98. package/dist/src/config/loader.d.ts.map +0 -1
  99. package/dist/src/config/loader.js +0 -32
  100. package/dist/src/config/loader.js.map +0 -1
  101. package/dist/src/config/schema.d.ts +0 -83
  102. package/dist/src/config/schema.d.ts.map +0 -1
  103. package/dist/src/config/schema.js +0 -55
  104. package/dist/src/config/schema.js.map +0 -1
  105. package/dist/src/constants.d.ts +0 -73
  106. package/dist/src/constants.d.ts.map +0 -1
  107. package/dist/src/constants.js +0 -86
  108. package/dist/src/constants.js.map +0 -1
  109. package/dist/src/context/injector.d.ts +0 -18
  110. package/dist/src/context/injector.d.ts.map +0 -1
  111. package/dist/src/context/injector.js +0 -71
  112. package/dist/src/context/injector.js.map +0 -1
  113. package/dist/src/context/relevance.d.ts +0 -13
  114. package/dist/src/context/relevance.d.ts.map +0 -1
  115. package/dist/src/context/relevance.js +0 -44
  116. package/dist/src/context/relevance.js.map +0 -1
  117. package/dist/src/daemon/batch.d.ts +0 -22
  118. package/dist/src/daemon/batch.d.ts.map +0 -1
  119. package/dist/src/daemon/batch.js +0 -38
  120. package/dist/src/daemon/batch.js.map +0 -1
  121. package/dist/src/daemon/lifecycle.d.ts +0 -27
  122. package/dist/src/daemon/lifecycle.d.ts.map +0 -1
  123. package/dist/src/daemon/lifecycle.js +0 -50
  124. package/dist/src/daemon/lifecycle.js.map +0 -1
  125. package/dist/src/daemon/lineage.d.ts +0 -42
  126. package/dist/src/daemon/lineage.d.ts.map +0 -1
  127. package/dist/src/daemon/lineage.js +0 -116
  128. package/dist/src/daemon/lineage.js.map +0 -1
  129. package/dist/src/daemon/logger.d.ts +0 -33
  130. package/dist/src/daemon/logger.d.ts.map +0 -1
  131. package/dist/src/daemon/logger.js +0 -88
  132. package/dist/src/daemon/logger.js.map +0 -1
  133. package/dist/src/daemon/main.d.ts +0 -2
  134. package/dist/src/daemon/main.d.ts.map +0 -1
  135. package/dist/src/daemon/processor.d.ts +0 -44
  136. package/dist/src/daemon/processor.d.ts.map +0 -1
  137. package/dist/src/daemon/processor.js +0 -142
  138. package/dist/src/daemon/processor.js.map +0 -1
  139. package/dist/src/daemon/server.d.ts +0 -24
  140. package/dist/src/daemon/server.d.ts.map +0 -1
  141. package/dist/src/daemon/server.js +0 -117
  142. package/dist/src/daemon/server.js.map +0 -1
  143. package/dist/src/daemon/watcher.d.ts +0 -29
  144. package/dist/src/daemon/watcher.d.ts.map +0 -1
  145. package/dist/src/daemon/watcher.js +0 -67
  146. package/dist/src/daemon/watcher.js.map +0 -1
  147. package/dist/src/hooks/client.d.ts +0 -20
  148. package/dist/src/hooks/client.d.ts.map +0 -1
  149. package/dist/src/hooks/client.js +0 -111
  150. package/dist/src/hooks/client.js.map +0 -1
  151. package/dist/src/hooks/post-tool-use.d.ts +0 -2
  152. package/dist/src/hooks/post-tool-use.d.ts.map +0 -1
  153. package/dist/src/hooks/read-stdin.d.ts +0 -2
  154. package/dist/src/hooks/read-stdin.d.ts.map +0 -1
  155. package/dist/src/hooks/read-stdin.js +0 -10
  156. package/dist/src/hooks/read-stdin.js.map +0 -1
  157. package/dist/src/hooks/session-end.d.ts +0 -2
  158. package/dist/src/hooks/session-end.d.ts.map +0 -1
  159. package/dist/src/hooks/session-start.d.ts +0 -2
  160. package/dist/src/hooks/session-start.d.ts.map +0 -1
  161. package/dist/src/hooks/stop.d.ts +0 -2
  162. package/dist/src/hooks/stop.d.ts.map +0 -1
  163. package/dist/src/hooks/user-prompt-submit.d.ts +0 -2
  164. package/dist/src/hooks/user-prompt-submit.d.ts.map +0 -1
  165. package/dist/src/index/fts.d.ts +0 -16
  166. package/dist/src/index/fts.d.ts.map +0 -1
  167. package/dist/src/index/fts.js +0 -53
  168. package/dist/src/index/fts.js.map +0 -1
  169. package/dist/src/index/rebuild.d.ts +0 -4
  170. package/dist/src/index/rebuild.d.ts.map +0 -1
  171. package/dist/src/index/rebuild.js +0 -40
  172. package/dist/src/index/rebuild.js.map +0 -1
  173. package/dist/src/index/sqlite.d.ts +0 -33
  174. package/dist/src/index/sqlite.d.ts.map +0 -1
  175. package/dist/src/index/sqlite.js +0 -99
  176. package/dist/src/index/sqlite.js.map +0 -1
  177. package/dist/src/index/vectors.d.ts +0 -24
  178. package/dist/src/index/vectors.d.ts.map +0 -1
  179. package/dist/src/index/vectors.js +0 -97
  180. package/dist/src/index/vectors.js.map +0 -1
  181. package/dist/src/intelligence/anthropic.d.ts +0 -17
  182. package/dist/src/intelligence/anthropic.d.ts.map +0 -1
  183. package/dist/src/intelligence/anthropic.js +0 -36
  184. package/dist/src/intelligence/anthropic.js.map +0 -1
  185. package/dist/src/intelligence/embeddings.d.ts +0 -3
  186. package/dist/src/intelligence/embeddings.d.ts.map +0 -1
  187. package/dist/src/intelligence/embeddings.js +0 -15
  188. package/dist/src/intelligence/embeddings.js.map +0 -1
  189. package/dist/src/intelligence/llm.d.ts +0 -33
  190. package/dist/src/intelligence/llm.d.ts.map +0 -1
  191. package/dist/src/intelligence/llm.js +0 -26
  192. package/dist/src/intelligence/llm.js.map +0 -1
  193. package/dist/src/intelligence/lm-studio.d.ts +0 -20
  194. package/dist/src/intelligence/lm-studio.d.ts.map +0 -1
  195. package/dist/src/intelligence/lm-studio.js +0 -59
  196. package/dist/src/intelligence/lm-studio.js.map +0 -1
  197. package/dist/src/intelligence/ollama.d.ts +0 -22
  198. package/dist/src/intelligence/ollama.d.ts.map +0 -1
  199. package/dist/src/intelligence/ollama.js +0 -64
  200. package/dist/src/intelligence/ollama.js.map +0 -1
  201. package/dist/src/intelligence/response.d.ts +0 -29
  202. package/dist/src/intelligence/response.d.ts.map +0 -1
  203. package/dist/src/intelligence/response.js +0 -71
  204. package/dist/src/intelligence/response.js.map +0 -1
  205. package/dist/src/logs/format.d.ts +0 -6
  206. package/dist/src/logs/format.d.ts.map +0 -1
  207. package/dist/src/logs/format.js +0 -46
  208. package/dist/src/logs/format.js.map +0 -1
  209. package/dist/src/logs/reader.d.ts +0 -28
  210. package/dist/src/logs/reader.d.ts.map +0 -1
  211. package/dist/src/logs/reader.js +0 -106
  212. package/dist/src/logs/reader.js.map +0 -1
  213. package/dist/src/mcp/server.d.ts +0 -16
  214. package/dist/src/mcp/server.d.ts.map +0 -1
  215. package/dist/src/mcp/tools/consolidate.d.ts +0 -15
  216. package/dist/src/mcp/tools/consolidate.d.ts.map +0 -1
  217. package/dist/src/mcp/tools/consolidate.js +0 -49
  218. package/dist/src/mcp/tools/consolidate.js.map +0 -1
  219. package/dist/src/mcp/tools/graph.d.ts +0 -30
  220. package/dist/src/mcp/tools/graph.d.ts.map +0 -1
  221. package/dist/src/mcp/tools/graph.js +0 -106
  222. package/dist/src/mcp/tools/graph.js.map +0 -1
  223. package/dist/src/mcp/tools/logs.d.ts +0 -3
  224. package/dist/src/mcp/tools/logs.d.ts.map +0 -1
  225. package/dist/src/mcp/tools/logs.js +0 -7
  226. package/dist/src/mcp/tools/logs.js.map +0 -1
  227. package/dist/src/mcp/tools/plans.d.ts +0 -23
  228. package/dist/src/mcp/tools/plans.d.ts.map +0 -1
  229. package/dist/src/mcp/tools/plans.js +0 -63
  230. package/dist/src/mcp/tools/plans.js.map +0 -1
  231. package/dist/src/mcp/tools/recall.d.ts +0 -30
  232. package/dist/src/mcp/tools/recall.d.ts.map +0 -1
  233. package/dist/src/mcp/tools/recall.js +0 -34
  234. package/dist/src/mcp/tools/recall.js.map +0 -1
  235. package/dist/src/mcp/tools/remember.d.ts +0 -15
  236. package/dist/src/mcp/tools/remember.d.ts.map +0 -1
  237. package/dist/src/mcp/tools/remember.js +0 -18
  238. package/dist/src/mcp/tools/remember.js.map +0 -1
  239. package/dist/src/mcp/tools/search.d.ts +0 -19
  240. package/dist/src/mcp/tools/search.d.ts.map +0 -1
  241. package/dist/src/mcp/tools/search.js +0 -59
  242. package/dist/src/mcp/tools/search.js.map +0 -1
  243. package/dist/src/mcp/tools/sessions.d.ts +0 -21
  244. package/dist/src/mcp/tools/sessions.d.ts.map +0 -1
  245. package/dist/src/mcp/tools/sessions.js +0 -36
  246. package/dist/src/mcp/tools/sessions.js.map +0 -1
  247. package/dist/src/mcp/tools/supersede.d.ts +0 -14
  248. package/dist/src/mcp/tools/supersede.d.ts.map +0 -1
  249. package/dist/src/mcp/tools/supersede.js +0 -30
  250. package/dist/src/mcp/tools/supersede.js.map +0 -1
  251. package/dist/src/mcp/tools/team.d.ts +0 -16
  252. package/dist/src/mcp/tools/team.d.ts.map +0 -1
  253. package/dist/src/mcp/tools/team.js +0 -32
  254. package/dist/src/mcp/tools/team.js.map +0 -1
  255. package/dist/src/obsidian/formatter.d.ts +0 -80
  256. package/dist/src/obsidian/formatter.d.ts.map +0 -1
  257. package/dist/src/obsidian/formatter.js +0 -227
  258. package/dist/src/obsidian/formatter.js.map +0 -1
  259. package/dist/src/prompts/index.d.ts +0 -13
  260. package/dist/src/prompts/index.d.ts.map +0 -1
  261. package/dist/src/prompts/index.js +0 -75
  262. package/dist/src/prompts/index.js.map +0 -1
  263. package/dist/src/vault/frontmatter.d.ts +0 -6
  264. package/dist/src/vault/frontmatter.d.ts.map +0 -1
  265. package/dist/src/vault/frontmatter.js +0 -10
  266. package/dist/src/vault/frontmatter.js.map +0 -1
  267. package/dist/src/vault/observations.d.ts +0 -10
  268. package/dist/src/vault/observations.d.ts.map +0 -1
  269. package/dist/src/vault/observations.js +0 -33
  270. package/dist/src/vault/observations.js.map +0 -1
  271. package/dist/src/vault/reader.d.ts +0 -10
  272. package/dist/src/vault/reader.d.ts.map +0 -1
  273. package/dist/src/vault/reader.js +0 -48
  274. package/dist/src/vault/reader.js.map +0 -1
  275. package/dist/src/vault/resolve.d.ts +0 -18
  276. package/dist/src/vault/resolve.d.ts.map +0 -1
  277. package/dist/src/vault/resolve.js +0 -51
  278. package/dist/src/vault/resolve.js.map +0 -1
  279. package/dist/src/vault/session-id.d.ts +0 -16
  280. package/dist/src/vault/session-id.d.ts.map +0 -1
  281. package/dist/src/vault/session-id.js +0 -29
  282. package/dist/src/vault/session-id.js.map +0 -1
  283. package/dist/src/vault/types.d.ts +0 -88
  284. package/dist/src/vault/types.d.ts.map +0 -1
  285. package/dist/src/vault/types.js +0 -94
  286. package/dist/src/vault/types.js.map +0 -1
  287. package/dist/src/vault/writer.d.ts +0 -66
  288. package/dist/src/vault/writer.d.ts.map +0 -1
  289. package/dist/src/vault/writer.js +0 -217
  290. package/dist/src/vault/writer.js.map +0 -1
@@ -0,0 +1,58 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+
3
+ // src/capture/buffer.ts
4
+ import fs from "fs";
5
+ import path from "path";
6
+ var EventBuffer = class {
7
+ constructor(bufferDir, sessionId, options = {}) {
8
+ this.bufferDir = bufferDir;
9
+ this.sessionId = sessionId;
10
+ this.filePath = path.join(bufferDir, `${sessionId}.jsonl`);
11
+ this.maxEvents = options.maxEvents ?? 500;
12
+ if (fs.existsSync(this.filePath)) {
13
+ const content = fs.readFileSync(this.filePath, "utf-8").trim();
14
+ this.eventCount = content ? content.split("\n").length : 0;
15
+ }
16
+ }
17
+ filePath;
18
+ maxEvents;
19
+ eventCount = 0;
20
+ append(event) {
21
+ fs.mkdirSync(this.bufferDir, { recursive: true });
22
+ const line = JSON.stringify({
23
+ ...event,
24
+ timestamp: event.timestamp ?? (/* @__PURE__ */ new Date()).toISOString()
25
+ });
26
+ fs.appendFileSync(this.filePath, line + "\n");
27
+ this.eventCount++;
28
+ }
29
+ readAll() {
30
+ if (!fs.existsSync(this.filePath)) return [];
31
+ const content = fs.readFileSync(this.filePath, "utf-8").trim();
32
+ if (!content) return [];
33
+ return content.split("\n").map((line) => JSON.parse(line));
34
+ }
35
+ count() {
36
+ return this.eventCount;
37
+ }
38
+ exists() {
39
+ return fs.existsSync(this.filePath);
40
+ }
41
+ delete() {
42
+ if (fs.existsSync(this.filePath)) {
43
+ fs.unlinkSync(this.filePath);
44
+ }
45
+ this.eventCount = 0;
46
+ }
47
+ isOverflow() {
48
+ return this.eventCount > this.maxEvents;
49
+ }
50
+ getFilePath() {
51
+ return this.filePath;
52
+ }
53
+ };
54
+
55
+ export {
56
+ EventBuffer
57
+ };
58
+ //# sourceMappingURL=chunk-I7PMGO6S.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/capture/buffer.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\n\ninterface BufferOptions {\n maxEvents?: number;\n}\n\nexport class EventBuffer {\n private filePath: string;\n private maxEvents: number;\n private eventCount = 0;\n\n constructor(\n private bufferDir: string,\n private sessionId: string,\n options: BufferOptions = {},\n ) {\n this.filePath = path.join(bufferDir, `${sessionId}.jsonl`);\n this.maxEvents = options.maxEvents ?? 500;\n\n if (fs.existsSync(this.filePath)) {\n const content = fs.readFileSync(this.filePath, 'utf-8').trim();\n this.eventCount = content ? content.split('\\n').length : 0;\n }\n }\n\n append(event: Record<string, unknown>): void {\n fs.mkdirSync(this.bufferDir, { recursive: true });\n\n const line = JSON.stringify({\n ...event,\n timestamp: event.timestamp ?? new Date().toISOString(),\n });\n\n fs.appendFileSync(this.filePath, line + '\\n');\n this.eventCount++;\n }\n\n readAll(): Array<Record<string, unknown>> {\n if (!fs.existsSync(this.filePath)) return [];\n const content = fs.readFileSync(this.filePath, 'utf-8').trim();\n if (!content) return [];\n return content.split('\\n').map((line) => JSON.parse(line));\n }\n\n count(): number {\n return this.eventCount;\n }\n\n exists(): boolean {\n return fs.existsSync(this.filePath);\n }\n\n delete(): void {\n if (fs.existsSync(this.filePath)) {\n fs.unlinkSync(this.filePath);\n }\n this.eventCount = 0;\n }\n\n isOverflow(): boolean {\n return this.eventCount > this.maxEvents;\n }\n\n getFilePath(): string {\n return this.filePath;\n }\n}\n"],"mappings":";;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAMV,IAAM,cAAN,MAAkB;AAAA,EAKvB,YACU,WACA,WACR,UAAyB,CAAC,GAC1B;AAHQ;AACA;AAGR,SAAK,WAAW,KAAK,KAAK,WAAW,GAAG,SAAS,QAAQ;AACzD,SAAK,YAAY,QAAQ,aAAa;AAEtC,QAAI,GAAG,WAAW,KAAK,QAAQ,GAAG;AAChC,YAAM,UAAU,GAAG,aAAa,KAAK,UAAU,OAAO,EAAE,KAAK;AAC7D,WAAK,aAAa,UAAU,QAAQ,MAAM,IAAI,EAAE,SAAS;AAAA,IAC3D;AAAA,EACF;AAAA,EAhBQ;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EAgBrB,OAAO,OAAsC;AAC3C,OAAG,UAAU,KAAK,WAAW,EAAE,WAAW,KAAK,CAAC;AAEhD,UAAM,OAAO,KAAK,UAAU;AAAA,MAC1B,GAAG;AAAA,MACH,WAAW,MAAM,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACvD,CAAC;AAED,OAAG,eAAe,KAAK,UAAU,OAAO,IAAI;AAC5C,SAAK;AAAA,EACP;AAAA,EAEA,UAA0C;AACxC,QAAI,CAAC,GAAG,WAAW,KAAK,QAAQ,EAAG,QAAO,CAAC;AAC3C,UAAM,UAAU,GAAG,aAAa,KAAK,UAAU,OAAO,EAAE,KAAK;AAC7D,QAAI,CAAC,QAAS,QAAO,CAAC;AACtB,WAAO,QAAQ,MAAM,IAAI,EAAE,IAAI,CAAC,SAAS,KAAK,MAAM,IAAI,CAAC;AAAA,EAC3D;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,SAAkB;AAChB,WAAO,GAAG,WAAW,KAAK,QAAQ;AAAA,EACpC;AAAA,EAEA,SAAe;AACb,QAAI,GAAG,WAAW,KAAK,QAAQ,GAAG;AAChC,SAAG,WAAW,KAAK,QAAQ;AAAA,IAC7B;AACA,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,aAAsB;AACpB,WAAO,KAAK,aAAa,KAAK;AAAA,EAChC;AAAA,EAEA,cAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AACF;","names":[]}
@@ -0,0 +1,33 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+
3
+ // src/vault/resolve.ts
4
+ import path from "path";
5
+ import os from "os";
6
+ import { execFileSync } from "child_process";
7
+ function resolveVaultDir(cwd = process.cwd()) {
8
+ if (process.env.MYCO_VAULT_DIR) {
9
+ const dir = process.env.MYCO_VAULT_DIR;
10
+ if (dir.startsWith("~/")) {
11
+ return path.join(os.homedir(), dir.slice(2));
12
+ }
13
+ return dir;
14
+ }
15
+ return path.join(resolveRepoRoot(cwd), ".myco");
16
+ }
17
+ function resolveRepoRoot(cwd) {
18
+ try {
19
+ const gitCommon = execFileSync(
20
+ "git",
21
+ ["rev-parse", "--git-common-dir"],
22
+ { cwd, encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }
23
+ ).trim();
24
+ return path.resolve(cwd, gitCommon, "..");
25
+ } catch {
26
+ return cwd;
27
+ }
28
+ }
29
+
30
+ export {
31
+ resolveVaultDir
32
+ };
33
+ //# sourceMappingURL=chunk-N33KUCFP.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/vault/resolve.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport os from 'node:os';\nimport { execFileSync } from 'node:child_process';\n\n/**\n * Resolve the vault directory.\n *\n * Priority:\n * 1. MYCO_VAULT_DIR env var (override for public repos or shared vaults)\n * 2. .myco/ in the repo root (default — vault lives with the project)\n *\n * The default is project-local: the vault is committed to git alongside\n * the code, so the team's institutional memory travels with the repo.\n * For public repos or cases where the vault should be separate, set\n * MYCO_VAULT_DIR to an external path.\n *\n * Uses git to find the repo root so this works correctly in\n * git worktrees — worktree agents resolve to the same vault\n * as the main working tree.\n */\nexport function resolveVaultDir(cwd = process.cwd()): string {\n // Override: external vault location\n if (process.env.MYCO_VAULT_DIR) {\n const dir = process.env.MYCO_VAULT_DIR;\n if (dir.startsWith('~/')) {\n return path.join(os.homedir(), dir.slice(2));\n }\n return dir;\n }\n\n // Default: .myco/ in the project root\n return path.join(resolveRepoRoot(cwd), '.myco');\n}\n\n/**\n * Find the main repo root, even from a git worktree.\n *\n * `git rev-parse --git-common-dir` returns the shared .git directory:\n * - In a normal repo: \".git\" (relative)\n * - In a worktree: \"/abs/path/to/main-repo/.git\" (absolute)\n *\n * The repo root is the parent of that path.\n * Falls back to cwd if not in a git repo.\n */\nfunction resolveRepoRoot(cwd: string): string {\n try {\n const gitCommon = execFileSync(\n 'git', ['rev-parse', '--git-common-dir'],\n { cwd, encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] },\n ).trim();\n return path.resolve(cwd, gitCommon, '..');\n } catch {\n return cwd;\n }\n}\n"],"mappings":";;;AACA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,oBAAoB;AAkBtB,SAAS,gBAAgB,MAAM,QAAQ,IAAI,GAAW;AAE3D,MAAI,QAAQ,IAAI,gBAAgB;AAC9B,UAAM,MAAM,QAAQ,IAAI;AACxB,QAAI,IAAI,WAAW,IAAI,GAAG;AACxB,aAAO,KAAK,KAAK,GAAG,QAAQ,GAAG,IAAI,MAAM,CAAC,CAAC;AAAA,IAC7C;AACA,WAAO;AAAA,EACT;AAGA,SAAO,KAAK,KAAK,gBAAgB,GAAG,GAAG,OAAO;AAChD;AAYA,SAAS,gBAAgB,KAAqB;AAC5C,MAAI;AACF,UAAM,YAAY;AAAA,MAChB;AAAA,MAAO,CAAC,aAAa,kBAAkB;AAAA,MACvC,EAAE,KAAK,UAAU,SAAS,OAAO,CAAC,QAAQ,QAAQ,MAAM,EAAE;AAAA,IAC5D,EAAE,KAAK;AACP,WAAO,KAAK,QAAQ,KAAK,WAAW,IAAI;AAAA,EAC1C,QAAQ;AACN,WAAO;AAAA,EACT;AACF;","names":[]}
@@ -0,0 +1,71 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+
3
+ // src/constants.ts
4
+ var CHARS_PER_TOKEN = 4;
5
+ var EMBEDDING_INPUT_LIMIT = 8e3;
6
+ var PROMPT_PREVIEW_CHARS = 300;
7
+ var AI_RESPONSE_PREVIEW_CHARS = 500;
8
+ var COMMAND_PREVIEW_CHARS = 80;
9
+ var CONTENT_SNIPPET_CHARS = 120;
10
+ var TOOL_OUTPUT_PREVIEW_CHARS = 200;
11
+ var SESSION_SUMMARY_PREVIEW_CHARS = 300;
12
+ var RECALL_SUMMARY_PREVIEW_CHARS = 200;
13
+ var CONTEXT_PLAN_PREVIEW_CHARS = 100;
14
+ var CONTEXT_SESSION_PREVIEW_CHARS = 80;
15
+ var CONTEXT_MEMORY_PREVIEW_CHARS = 80;
16
+ var EXTRACTION_MAX_TOKENS = 2048;
17
+ var SUMMARY_MAX_TOKENS = 512;
18
+ var TITLE_MAX_TOKENS = 32;
19
+ var CLASSIFICATION_MAX_TOKENS = 1024;
20
+ var DAEMON_CLIENT_TIMEOUT_MS = 2e3;
21
+ var DAEMON_HEALTH_CHECK_TIMEOUT_MS = 500;
22
+ var LLM_REQUEST_TIMEOUT_MS = 3e4;
23
+ var EMBEDDING_REQUEST_TIMEOUT_MS = 1e4;
24
+ var STDIN_TIMEOUT_MS = 100;
25
+ var FILE_WATCH_STABILITY_MS = 1e3;
26
+ var STALE_BUFFER_MAX_AGE_MS = 24 * 60 * 60 * 1e3;
27
+ var DAEMON_HEALTH_RETRY_DELAYS = [100, 200, 400, 800, 1500];
28
+ var MAX_SLUG_LENGTH = 100;
29
+ var CANDIDATE_CONTENT_PREVIEW = 2e3;
30
+ var LINEAGE_RECENT_SESSIONS_LIMIT = 5;
31
+ var RELATED_MEMORIES_LIMIT = 50;
32
+ var SESSION_CONTEXT_MAX_PLANS = 3;
33
+ var PROMPT_CONTEXT_MAX_MEMORIES = 3;
34
+ var PROMPT_CONTEXT_MIN_SIMILARITY = 0.3;
35
+ var PROMPT_CONTEXT_MIN_LENGTH = 10;
36
+
37
+ export {
38
+ CHARS_PER_TOKEN,
39
+ EMBEDDING_INPUT_LIMIT,
40
+ PROMPT_PREVIEW_CHARS,
41
+ AI_RESPONSE_PREVIEW_CHARS,
42
+ COMMAND_PREVIEW_CHARS,
43
+ CONTENT_SNIPPET_CHARS,
44
+ TOOL_OUTPUT_PREVIEW_CHARS,
45
+ SESSION_SUMMARY_PREVIEW_CHARS,
46
+ RECALL_SUMMARY_PREVIEW_CHARS,
47
+ CONTEXT_PLAN_PREVIEW_CHARS,
48
+ CONTEXT_SESSION_PREVIEW_CHARS,
49
+ CONTEXT_MEMORY_PREVIEW_CHARS,
50
+ EXTRACTION_MAX_TOKENS,
51
+ SUMMARY_MAX_TOKENS,
52
+ TITLE_MAX_TOKENS,
53
+ CLASSIFICATION_MAX_TOKENS,
54
+ DAEMON_CLIENT_TIMEOUT_MS,
55
+ DAEMON_HEALTH_CHECK_TIMEOUT_MS,
56
+ LLM_REQUEST_TIMEOUT_MS,
57
+ EMBEDDING_REQUEST_TIMEOUT_MS,
58
+ STDIN_TIMEOUT_MS,
59
+ FILE_WATCH_STABILITY_MS,
60
+ STALE_BUFFER_MAX_AGE_MS,
61
+ DAEMON_HEALTH_RETRY_DELAYS,
62
+ MAX_SLUG_LENGTH,
63
+ CANDIDATE_CONTENT_PREVIEW,
64
+ LINEAGE_RECENT_SESSIONS_LIMIT,
65
+ RELATED_MEMORIES_LIMIT,
66
+ SESSION_CONTEXT_MAX_PLANS,
67
+ PROMPT_CONTEXT_MAX_MEMORIES,
68
+ PROMPT_CONTEXT_MIN_SIMILARITY,
69
+ PROMPT_CONTEXT_MIN_LENGTH
70
+ };
71
+ //# sourceMappingURL=chunk-NYNEJ5QY.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/constants.ts"],"sourcesContent":["/**\n * Shared constants for the Myco codebase.\n * Per CLAUDE.md: \"No Magic Literals — Numeric and string constants\n * MUST NOT appear inline in logic.\"\n */\n\n// --- Token estimation ---\n/** Approximate characters per token for the chars/4 heuristic. */\nexport const CHARS_PER_TOKEN = 4;\n\n// --- Embedding ---\n/** Max characters of text sent to the embedding model. */\nexport const EMBEDDING_INPUT_LIMIT = 8000;\n\n// --- Truncation limits (display/preview) ---\n/** Max chars for a user prompt preview in event summaries. */\nexport const PROMPT_PREVIEW_CHARS = 300;\n/** Max chars for an AI response preview in event summaries. */\nexport const AI_RESPONSE_PREVIEW_CHARS = 500;\n/** Max chars for a command string preview. */\nexport const COMMAND_PREVIEW_CHARS = 80;\n/** Max chars for a content snippet in search results. */\nexport const CONTENT_SNIPPET_CHARS = 120;\n/** Max chars for a tool output preview in hooks. */\nexport const TOOL_OUTPUT_PREVIEW_CHARS = 200;\n/** Max chars for a session summary preview in MCP tools. */\nexport const SESSION_SUMMARY_PREVIEW_CHARS = 300;\n/** Max chars for a recall summary preview. */\nexport const RECALL_SUMMARY_PREVIEW_CHARS = 200;\n\n// --- Context injection layer budgets (chars, not tokens — used with .slice()) ---\nexport const CONTEXT_PLAN_PREVIEW_CHARS = 100;\nexport const CONTEXT_SESSION_PREVIEW_CHARS = 80;\nexport const CONTEXT_MEMORY_PREVIEW_CHARS = 80;\n\n// --- Processor maxTokens budgets ---\n/** Response token budget for observation extraction. */\nexport const EXTRACTION_MAX_TOKENS = 2048;\n/** Response token budget for session summary. */\nexport const SUMMARY_MAX_TOKENS = 512;\n/** Response token budget for session title generation. */\nexport const TITLE_MAX_TOKENS = 32;\n/** Response token budget for artifact classification. */\nexport const CLASSIFICATION_MAX_TOKENS = 1024;\n\n// --- Timeouts ---\n/** Daemon client HTTP request timeout (ms). */\nexport const DAEMON_CLIENT_TIMEOUT_MS = 2000;\n/** Health check timeout (ms) — fail fast if daemon isn't responding. */\nexport const DAEMON_HEALTH_CHECK_TIMEOUT_MS = 500;\n/** LLM request timeout (ms) — prevents hung requests from blocking stop processing. */\nexport const LLM_REQUEST_TIMEOUT_MS = 30_000;\n/** Embedding request timeout (ms). */\nexport const EMBEDDING_REQUEST_TIMEOUT_MS = 10_000;\n/** Stdin read timeout for hooks (ms). */\nexport const STDIN_TIMEOUT_MS = 100;\n/** Chokidar write stability threshold (ms). */\nexport const FILE_WATCH_STABILITY_MS = 1000;\n\n// --- Buffer cleanup ---\n/** Max age for stale buffer files before cleanup (ms). */\nexport const STALE_BUFFER_MAX_AGE_MS = 24 * 60 * 60 * 1000;\n\n// --- Retry backoff ---\n/** Retry delays for daemon health check (ms). */\nexport const DAEMON_HEALTH_RETRY_DELAYS = [100, 200, 400, 800, 1500];\n\n// --- Slug limits ---\n/** Max length for slugified artifact IDs. */\nexport const MAX_SLUG_LENGTH = 100;\n\n// --- Content preview for classification prompt ---\n/** Max chars of file content per candidate in classification prompt. */\nexport const CANDIDATE_CONTENT_PREVIEW = 2000;\n\n// --- Transcript mining ---\n/** Minimum content length to consider a transcript entry meaningful. */\nexport const MIN_TRANSCRIPT_CONTENT_LENGTH = 10;\n\n// --- Query limits ---\n/** Max recent sessions to check for lineage heuristics. */\nexport const LINEAGE_RECENT_SESSIONS_LIMIT = 5;\n/** Max related memories to query for session notes. */\nexport const RELATED_MEMORIES_LIMIT = 50;\n\n// --- Context injection ---\n/** Max active plans to inject at session start. */\nexport const SESSION_CONTEXT_MAX_PLANS = 3;\n/** Max memories to inject per prompt. */\nexport const PROMPT_CONTEXT_MAX_MEMORIES = 3;\n/** Minimum similarity score for prompt context injection (0-1). */\nexport const PROMPT_CONTEXT_MIN_SIMILARITY = 0.3;\n/** Max token budget for session-start context injection. */\nexport const SESSION_CONTEXT_MAX_TOKENS = 500;\n/** Max token budget for per-prompt context injection. */\nexport const PROMPT_CONTEXT_MAX_TOKENS = 300;\n/** Minimum prompt length to trigger context search. */\nexport const PROMPT_CONTEXT_MIN_LENGTH = 10;\n"],"mappings":";;;AAQO,IAAM,kBAAkB;AAIxB,IAAM,wBAAwB;AAI9B,IAAM,uBAAuB;AAE7B,IAAM,4BAA4B;AAElC,IAAM,wBAAwB;AAE9B,IAAM,wBAAwB;AAE9B,IAAM,4BAA4B;AAElC,IAAM,gCAAgC;AAEtC,IAAM,+BAA+B;AAGrC,IAAM,6BAA6B;AACnC,IAAM,gCAAgC;AACtC,IAAM,+BAA+B;AAIrC,IAAM,wBAAwB;AAE9B,IAAM,qBAAqB;AAE3B,IAAM,mBAAmB;AAEzB,IAAM,4BAA4B;AAIlC,IAAM,2BAA2B;AAEjC,IAAM,iCAAiC;AAEvC,IAAM,yBAAyB;AAE/B,IAAM,+BAA+B;AAErC,IAAM,mBAAmB;AAEzB,IAAM,0BAA0B;AAIhC,IAAM,0BAA0B,KAAK,KAAK,KAAK;AAI/C,IAAM,6BAA6B,CAAC,KAAK,KAAK,KAAK,KAAK,IAAI;AAI5D,IAAM,kBAAkB;AAIxB,IAAM,4BAA4B;AAQlC,IAAM,gCAAgC;AAEtC,IAAM,yBAAyB;AAI/B,IAAM,4BAA4B;AAElC,IAAM,8BAA8B;AAEpC,IAAM,gCAAgC;AAMtC,IAAM,4BAA4B;","names":[]}
@@ -0,0 +1,111 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+
3
+ // src/index/sqlite.ts
4
+ import Database from "better-sqlite3";
5
+ var MycoIndex = class {
6
+ db;
7
+ constructor(dbPath) {
8
+ this.db = new Database(dbPath);
9
+ this.db.pragma("journal_mode = WAL");
10
+ this.db.pragma("foreign_keys = ON");
11
+ this.init();
12
+ }
13
+ init() {
14
+ this.db.exec(`
15
+ CREATE TABLE IF NOT EXISTS notes (
16
+ path TEXT PRIMARY KEY,
17
+ type TEXT NOT NULL,
18
+ id TEXT NOT NULL,
19
+ title TEXT NOT NULL DEFAULT '',
20
+ content TEXT NOT NULL DEFAULT '',
21
+ frontmatter TEXT NOT NULL DEFAULT '{}',
22
+ created TEXT NOT NULL,
23
+ updated_at TEXT NOT NULL DEFAULT (datetime('now'))
24
+ );
25
+
26
+ CREATE INDEX IF NOT EXISTS idx_notes_type ON notes(type);
27
+ CREATE INDEX IF NOT EXISTS idx_notes_id ON notes(id);
28
+ CREATE INDEX IF NOT EXISTS idx_notes_created ON notes(created);
29
+ `);
30
+ }
31
+ getPragma(name) {
32
+ return this.db.pragma(`${name}`, { simple: true });
33
+ }
34
+ getDb() {
35
+ return this.db;
36
+ }
37
+ upsertNote(note) {
38
+ const stmt = this.db.prepare(`
39
+ INSERT INTO notes (path, type, id, title, content, frontmatter, created, updated_at)
40
+ VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))
41
+ ON CONFLICT(path) DO UPDATE SET
42
+ type = excluded.type,
43
+ id = excluded.id,
44
+ title = excluded.title,
45
+ content = excluded.content,
46
+ frontmatter = excluded.frontmatter,
47
+ created = excluded.created,
48
+ updated_at = datetime('now')
49
+ `);
50
+ stmt.run(
51
+ note.path,
52
+ note.type,
53
+ note.id,
54
+ note.title,
55
+ note.content,
56
+ JSON.stringify(note.frontmatter),
57
+ note.created
58
+ );
59
+ }
60
+ getNoteByPath(notePath) {
61
+ const row = this.db.prepare("SELECT * FROM notes WHERE path = ?").get(notePath);
62
+ if (!row) return null;
63
+ return { ...row, frontmatter: JSON.parse(row.frontmatter) };
64
+ }
65
+ deleteNote(notePath) {
66
+ this.db.prepare("DELETE FROM notes WHERE path = ?").run(notePath);
67
+ }
68
+ query(options) {
69
+ const conditions = [];
70
+ const params = [];
71
+ if (options.type) {
72
+ conditions.push("type = ?");
73
+ params.push(options.type);
74
+ }
75
+ if (options.id) {
76
+ conditions.push("id = ?");
77
+ params.push(options.id);
78
+ }
79
+ if (options.since) {
80
+ conditions.push("created >= ?");
81
+ params.push(options.since);
82
+ }
83
+ if (options.frontmatter) {
84
+ for (const [key, value] of Object.entries(options.frontmatter)) {
85
+ conditions.push(`json_extract(frontmatter, '$.' || ?) = ?`);
86
+ params.push(key, value);
87
+ }
88
+ }
89
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
90
+ const limitClause = options.limit ? "LIMIT ?" : "";
91
+ if (options.limit) params.push(options.limit);
92
+ const sql = `SELECT * FROM notes ${where} ORDER BY created DESC ${limitClause}`;
93
+ const rows = this.db.prepare(sql).all(...params);
94
+ return rows.map((row) => ({ ...row, frontmatter: JSON.parse(row.frontmatter) }));
95
+ }
96
+ queryByIds(ids) {
97
+ if (ids.length === 0) return [];
98
+ const placeholders = ids.map(() => "?").join(",");
99
+ const sql = `SELECT * FROM notes WHERE id IN (${placeholders})`;
100
+ const rows = this.db.prepare(sql).all(...ids);
101
+ return rows.map((row) => ({ ...row, frontmatter: JSON.parse(row.frontmatter) }));
102
+ }
103
+ close() {
104
+ this.db.close();
105
+ }
106
+ };
107
+
108
+ export {
109
+ MycoIndex
110
+ };
111
+ //# sourceMappingURL=chunk-PA3VMINE.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index/sqlite.ts"],"sourcesContent":["import Database from 'better-sqlite3';\n\nexport interface IndexedNote {\n path: string;\n type: string;\n id: string;\n title: string;\n content: string;\n frontmatter: Record<string, unknown>;\n created: string;\n updated_at?: string;\n}\n\nexport interface QueryOptions {\n type?: string;\n id?: string;\n limit?: number;\n since?: string;\n /** Filter by frontmatter fields using json_extract. Applied before LIMIT. */\n frontmatter?: Record<string, string>;\n}\n\nexport class MycoIndex {\n private db: Database.Database;\n\n constructor(dbPath: string) {\n this.db = new Database(dbPath);\n this.db.pragma('journal_mode = WAL');\n this.db.pragma('foreign_keys = ON');\n this.init();\n }\n\n private init(): void {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS notes (\n path TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n id TEXT NOT NULL,\n title TEXT NOT NULL DEFAULT '',\n content TEXT NOT NULL DEFAULT '',\n frontmatter TEXT NOT NULL DEFAULT '{}',\n created TEXT NOT NULL,\n updated_at TEXT NOT NULL DEFAULT (datetime('now'))\n );\n\n CREATE INDEX IF NOT EXISTS idx_notes_type ON notes(type);\n CREATE INDEX IF NOT EXISTS idx_notes_id ON notes(id);\n CREATE INDEX IF NOT EXISTS idx_notes_created ON notes(created);\n `);\n }\n\n getPragma(name: string): unknown {\n return this.db.pragma(`${name}`, { simple: true });\n }\n\n getDb(): Database.Database {\n return this.db;\n }\n\n upsertNote(note: Omit<IndexedNote, 'updated_at'>): void {\n const stmt = this.db.prepare(`\n INSERT INTO notes (path, type, id, title, content, frontmatter, created, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))\n ON CONFLICT(path) DO UPDATE SET\n type = excluded.type,\n id = excluded.id,\n title = excluded.title,\n content = excluded.content,\n frontmatter = excluded.frontmatter,\n created = excluded.created,\n updated_at = datetime('now')\n `);\n stmt.run(\n note.path,\n note.type,\n note.id,\n note.title,\n note.content,\n JSON.stringify(note.frontmatter),\n note.created,\n );\n }\n\n getNoteByPath(notePath: string): IndexedNote | null {\n const row = this.db.prepare('SELECT * FROM notes WHERE path = ?').get(notePath) as any;\n if (!row) return null;\n return { ...row, frontmatter: JSON.parse(row.frontmatter) };\n }\n\n deleteNote(notePath: string): void {\n this.db.prepare('DELETE FROM notes WHERE path = ?').run(notePath);\n }\n\n query(options: QueryOptions): IndexedNote[] {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.type) {\n conditions.push('type = ?');\n params.push(options.type);\n }\n if (options.id) {\n conditions.push('id = ?');\n params.push(options.id);\n }\n if (options.since) {\n conditions.push('created >= ?');\n params.push(options.since);\n }\n if (options.frontmatter) {\n for (const [key, value] of Object.entries(options.frontmatter)) {\n conditions.push(`json_extract(frontmatter, '$.' || ?) = ?`);\n params.push(key, value);\n }\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limitClause = options.limit ? 'LIMIT ?' : '';\n if (options.limit) params.push(options.limit);\n const sql = `SELECT * FROM notes ${where} ORDER BY created DESC ${limitClause}`;\n\n const rows = this.db.prepare(sql).all(...params) as any[];\n return rows.map((row) => ({ ...row, frontmatter: JSON.parse(row.frontmatter) }));\n }\n\n queryByIds(ids: string[]): IndexedNote[] {\n if (ids.length === 0) return [];\n const placeholders = ids.map(() => '?').join(',');\n const sql = `SELECT * FROM notes WHERE id IN (${placeholders})`;\n const rows = this.db.prepare(sql).all(...ids) as any[];\n return rows.map((row) => ({ ...row, frontmatter: JSON.parse(row.frontmatter) }));\n }\n\n close(): void {\n this.db.close();\n }\n}\n"],"mappings":";;;AAAA,OAAO,cAAc;AAsBd,IAAM,YAAN,MAAgB;AAAA,EACb;AAAA,EAER,YAAY,QAAgB;AAC1B,SAAK,KAAK,IAAI,SAAS,MAAM;AAC7B,SAAK,GAAG,OAAO,oBAAoB;AACnC,SAAK,GAAG,OAAO,mBAAmB;AAClC,SAAK,KAAK;AAAA,EACZ;AAAA,EAEQ,OAAa;AACnB,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAeZ;AAAA,EACH;AAAA,EAEA,UAAU,MAAuB;AAC/B,WAAO,KAAK,GAAG,OAAO,GAAG,IAAI,IAAI,EAAE,QAAQ,KAAK,CAAC;AAAA,EACnD;AAAA,EAEA,QAA2B;AACzB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,MAA6C;AACtD,UAAM,OAAO,KAAK,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAW5B;AACD,SAAK;AAAA,MACH,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,UAAU,KAAK,WAAW;AAAA,MAC/B,KAAK;AAAA,IACP;AAAA,EACF;AAAA,EAEA,cAAc,UAAsC;AAClD,UAAM,MAAM,KAAK,GAAG,QAAQ,oCAAoC,EAAE,IAAI,QAAQ;AAC9E,QAAI,CAAC,IAAK,QAAO;AACjB,WAAO,EAAE,GAAG,KAAK,aAAa,KAAK,MAAM,IAAI,WAAW,EAAE;AAAA,EAC5D;AAAA,EAEA,WAAW,UAAwB;AACjC,SAAK,GAAG,QAAQ,kCAAkC,EAAE,IAAI,QAAQ;AAAA,EAClE;AAAA,EAEA,MAAM,SAAsC;AAC1C,UAAM,aAAuB,CAAC;AAC9B,UAAM,SAAoB,CAAC;AAE3B,QAAI,QAAQ,MAAM;AAChB,iBAAW,KAAK,UAAU;AAC1B,aAAO,KAAK,QAAQ,IAAI;AAAA,IAC1B;AACA,QAAI,QAAQ,IAAI;AACd,iBAAW,KAAK,QAAQ;AACxB,aAAO,KAAK,QAAQ,EAAE;AAAA,IACxB;AACA,QAAI,QAAQ,OAAO;AACjB,iBAAW,KAAK,cAAc;AAC9B,aAAO,KAAK,QAAQ,KAAK;AAAA,IAC3B;AACA,QAAI,QAAQ,aAAa;AACvB,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,WAAW,GAAG;AAC9D,mBAAW,KAAK,0CAA0C;AAC1D,eAAO,KAAK,KAAK,KAAK;AAAA,MACxB;AAAA,IACF;AAEA,UAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAC5E,UAAM,cAAc,QAAQ,QAAQ,YAAY;AAChD,QAAI,QAAQ,MAAO,QAAO,KAAK,QAAQ,KAAK;AAC5C,UAAM,MAAM,uBAAuB,KAAK,0BAA0B,WAAW;AAE7E,UAAM,OAAO,KAAK,GAAG,QAAQ,GAAG,EAAE,IAAI,GAAG,MAAM;AAC/C,WAAO,KAAK,IAAI,CAAC,SAAS,EAAE,GAAG,KAAK,aAAa,KAAK,MAAM,IAAI,WAAW,EAAE,EAAE;AAAA,EACjF;AAAA,EAEA,WAAW,KAA8B;AACvC,QAAI,IAAI,WAAW,EAAG,QAAO,CAAC;AAC9B,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAChD,UAAM,MAAM,oCAAoC,YAAY;AAC5D,UAAM,OAAO,KAAK,GAAG,QAAQ,GAAG,EAAE,IAAI,GAAG,GAAG;AAC5C,WAAO,KAAK,IAAI,CAAC,SAAS,EAAE,GAAG,KAAK,aAAa,KAAK,MAAM,IAAI,WAAW,EAAE,EAAE;AAAA,EACjF;AAAA,EAEA,QAAc;AACZ,SAAK,GAAG,MAAM;AAAA,EAChB;AACF;","names":[]}
@@ -0,0 +1,44 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
9
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
10
+ }) : x)(function(x) {
11
+ if (typeof require !== "undefined") return require.apply(this, arguments);
12
+ throw Error('Dynamic require of "' + x + '" is not supported');
13
+ });
14
+ var __commonJS = (cb, mod) => function __require2() {
15
+ return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
16
+ };
17
+ var __export = (target, all) => {
18
+ for (var name in all)
19
+ __defProp(target, name, { get: all[name], enumerable: true });
20
+ };
21
+ var __copyProps = (to, from, except, desc) => {
22
+ if (from && typeof from === "object" || typeof from === "function") {
23
+ for (let key of __getOwnPropNames(from))
24
+ if (!__hasOwnProp.call(to, key) && key !== except)
25
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
26
+ }
27
+ return to;
28
+ };
29
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
30
+ // If the importer is in node compatibility mode or this is not an ESM
31
+ // file that has been converted to a CommonJS file using a Babel-
32
+ // compatible transform (i.e. "__esModule" has not been set), then set
33
+ // "default" to the CommonJS "module.exports" for node compatibility.
34
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
35
+ mod
36
+ ));
37
+
38
+ export {
39
+ __require,
40
+ __commonJS,
41
+ __export,
42
+ __toESM
43
+ };
44
+ //# sourceMappingURL=chunk-PZUWP5VK.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -0,0 +1,104 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+ import {
3
+ AgentRegistry
4
+ } from "./chunk-TWDS6MSU.js";
5
+ import {
6
+ DAEMON_CLIENT_TIMEOUT_MS,
7
+ DAEMON_HEALTH_CHECK_TIMEOUT_MS,
8
+ DAEMON_HEALTH_RETRY_DELAYS
9
+ } from "./chunk-NYNEJ5QY.js";
10
+
11
+ // src/hooks/client.ts
12
+ import fs from "fs";
13
+ import path from "path";
14
+ import { spawn } from "child_process";
15
+ var DaemonClient = class {
16
+ vaultDir;
17
+ constructor(vaultDir) {
18
+ this.vaultDir = vaultDir;
19
+ }
20
+ async post(endpoint, body) {
21
+ try {
22
+ const info = this.readDaemonJson();
23
+ if (!info) return { ok: false };
24
+ const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {
25
+ method: "POST",
26
+ headers: { "Content-Type": "application/json" },
27
+ body: JSON.stringify(body),
28
+ signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS)
29
+ });
30
+ if (!res.ok) return { ok: false };
31
+ const data = await res.json();
32
+ return { ok: true, data };
33
+ } catch {
34
+ return { ok: false };
35
+ }
36
+ }
37
+ async get(endpoint) {
38
+ try {
39
+ const info = this.readDaemonJson();
40
+ if (!info) return { ok: false };
41
+ const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {
42
+ signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS)
43
+ });
44
+ if (!res.ok) return { ok: false };
45
+ const data = await res.json();
46
+ return { ok: true, data };
47
+ } catch {
48
+ return { ok: false };
49
+ }
50
+ }
51
+ async isHealthy() {
52
+ try {
53
+ const info = this.readDaemonJson();
54
+ if (!info) return false;
55
+ const res = await fetch(`http://127.0.0.1:${info.port}/health`, {
56
+ signal: AbortSignal.timeout(DAEMON_HEALTH_CHECK_TIMEOUT_MS)
57
+ });
58
+ if (!res.ok) return false;
59
+ const data = await res.json();
60
+ return data.myco === true;
61
+ } catch {
62
+ return false;
63
+ }
64
+ }
65
+ /**
66
+ * Ensure the daemon is running. Spawns it if unhealthy and waits for it
67
+ * to become ready. Returns true if the daemon is healthy after this call.
68
+ */
69
+ async ensureRunning() {
70
+ if (await this.isHealthy()) return true;
71
+ this.spawnDaemon();
72
+ for (const delay of DAEMON_HEALTH_RETRY_DELAYS) {
73
+ await new Promise((r) => setTimeout(r, delay));
74
+ if (await this.isHealthy()) return true;
75
+ }
76
+ return false;
77
+ }
78
+ spawnDaemon() {
79
+ const pluginRoot = new AgentRegistry().resolvePluginRoot();
80
+ const daemonScript = pluginRoot ? path.join(pluginRoot, "dist", "src", "daemon", "main.js") : path.resolve(import.meta.dirname, "..", "daemon", "main.js");
81
+ if (!fs.existsSync(daemonScript)) return;
82
+ const child = spawn("node", [daemonScript, "--vault", this.vaultDir], {
83
+ detached: true,
84
+ stdio: "ignore"
85
+ });
86
+ child.unref();
87
+ }
88
+ readDaemonJson() {
89
+ try {
90
+ const jsonPath = path.join(this.vaultDir, "daemon.json");
91
+ const content = fs.readFileSync(jsonPath, "utf-8");
92
+ const info = JSON.parse(content);
93
+ if (typeof info.port !== "number") return null;
94
+ return info;
95
+ } catch {
96
+ return null;
97
+ }
98
+ }
99
+ };
100
+
101
+ export {
102
+ DaemonClient
103
+ };
104
+ //# sourceMappingURL=chunk-SVUINMDD.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/hooks/client.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport { spawn } from 'node:child_process';\nimport { DAEMON_CLIENT_TIMEOUT_MS, DAEMON_HEALTH_CHECK_TIMEOUT_MS, DAEMON_HEALTH_RETRY_DELAYS } from '../constants.js';\nimport { AgentRegistry } from '../agents/registry.js';\n\ninterface DaemonInfo {\n pid: number;\n port: number;\n}\n\ninterface ClientResult {\n ok: boolean;\n data?: any;\n}\n\nexport class DaemonClient {\n private vaultDir: string;\n\n constructor(vaultDir: string) {\n this.vaultDir = vaultDir;\n }\n\n async post(endpoint: string, body: unknown): Promise<ClientResult> {\n try {\n const info = this.readDaemonJson();\n if (!info) return { ok: false };\n\n const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n\n if (!res.ok) return { ok: false };\n const data = await res.json();\n return { ok: true, data };\n } catch {\n return { ok: false };\n }\n }\n\n async get(endpoint: string): Promise<ClientResult> {\n try {\n const info = this.readDaemonJson();\n if (!info) return { ok: false };\n\n const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n\n if (!res.ok) return { ok: false };\n const data = await res.json();\n return { ok: true, data };\n } catch {\n return { ok: false };\n }\n }\n\n async isHealthy(): Promise<boolean> {\n try {\n const info = this.readDaemonJson();\n if (!info) return false;\n\n // Health checks use a shorter timeout than regular requests —\n // if the daemon doesn't respond in 500ms it's effectively down.\n const res = await fetch(`http://127.0.0.1:${info.port}/health`, {\n signal: AbortSignal.timeout(DAEMON_HEALTH_CHECK_TIMEOUT_MS),\n });\n if (!res.ok) return false;\n const data = await res.json() as Record<string, unknown>;\n return data.myco === true;\n } catch {\n return false;\n }\n }\n\n /**\n * Ensure the daemon is running. Spawns it if unhealthy and waits for it\n * to become ready. Returns true if the daemon is healthy after this call.\n */\n async ensureRunning(): Promise<boolean> {\n if (await this.isHealthy()) return true;\n\n this.spawnDaemon();\n\n for (const delay of DAEMON_HEALTH_RETRY_DELAYS) {\n await new Promise((r) => setTimeout(r, delay));\n if (await this.isHealthy()) return true;\n }\n return false;\n }\n\n spawnDaemon(): void {\n // Resolve daemon script via agent registry (checks all known agent env vars)\n // or fall back to relative path from this module.\n const pluginRoot = new AgentRegistry().resolvePluginRoot();\n const daemonScript = pluginRoot\n ? path.join(pluginRoot, 'dist', 'src', 'daemon', 'main.js')\n : path.resolve(import.meta.dirname, '..', 'daemon', 'main.js');\n if (!fs.existsSync(daemonScript)) return;\n\n const child = spawn('node', [daemonScript, '--vault', this.vaultDir], {\n detached: true,\n stdio: 'ignore',\n });\n child.unref();\n }\n\n private readDaemonJson(): DaemonInfo | null {\n try {\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n const content = fs.readFileSync(jsonPath, 'utf-8');\n const info = JSON.parse(content);\n if (typeof info.port !== 'number') return null;\n return info as DaemonInfo;\n } catch {\n return null;\n }\n }\n}\n"],"mappings":";;;;;;;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,SAAS,aAAa;AAcf,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EAER,YAAY,UAAkB;AAC5B,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAM,KAAK,UAAkB,MAAsC;AACjE,QAAI;AACF,YAAM,OAAO,KAAK,eAAe;AACjC,UAAI,CAAC,KAAM,QAAO,EAAE,IAAI,MAAM;AAE9B,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,GAAG,QAAQ,IAAI;AAAA,QAClE,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AAED,UAAI,CAAC,IAAI,GAAI,QAAO,EAAE,IAAI,MAAM;AAChC,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,EAAE,IAAI,MAAM,KAAK;AAAA,IAC1B,QAAQ;AACN,aAAO,EAAE,IAAI,MAAM;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,UAAyC;AACjD,QAAI;AACF,YAAM,OAAO,KAAK,eAAe;AACjC,UAAI,CAAC,KAAM,QAAO,EAAE,IAAI,MAAM;AAE9B,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,GAAG,QAAQ,IAAI;AAAA,QAClE,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AAED,UAAI,CAAC,IAAI,GAAI,QAAO,EAAE,IAAI,MAAM;AAChC,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,EAAE,IAAI,MAAM,KAAK;AAAA,IAC1B,QAAQ;AACN,aAAO,EAAE,IAAI,MAAM;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,YAA8B;AAClC,QAAI;AACF,YAAM,OAAO,KAAK,eAAe;AACjC,UAAI,CAAC,KAAM,QAAO;AAIlB,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,WAAW;AAAA,QAC9D,QAAQ,YAAY,QAAQ,8BAA8B;AAAA,MAC5D,CAAC;AACD,UAAI,CAAC,IAAI,GAAI,QAAO;AACpB,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,KAAK,SAAS;AAAA,IACvB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBAAkC;AACtC,QAAI,MAAM,KAAK,UAAU,EAAG,QAAO;AAEnC,SAAK,YAAY;AAEjB,eAAW,SAAS,4BAA4B;AAC9C,YAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,CAAC;AAC7C,UAAI,MAAM,KAAK,UAAU,EAAG,QAAO;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AAAA,EAEA,cAAoB;AAGlB,UAAM,aAAa,IAAI,cAAc,EAAE,kBAAkB;AACzD,UAAM,eAAe,aACjB,KAAK,KAAK,YAAY,QAAQ,OAAO,UAAU,SAAS,IACxD,KAAK,QAAQ,YAAY,SAAS,MAAM,UAAU,SAAS;AAC/D,QAAI,CAAC,GAAG,WAAW,YAAY,EAAG;AAElC,UAAM,QAAQ,MAAM,QAAQ,CAAC,cAAc,WAAW,KAAK,QAAQ,GAAG;AAAA,MACpE,UAAU;AAAA,MACV,OAAO;AAAA,IACT,CAAC;AACD,UAAM,MAAM;AAAA,EACd;AAAA,EAEQ,iBAAoC;AAC1C,QAAI;AACF,YAAM,WAAW,KAAK,KAAK,KAAK,UAAU,aAAa;AACvD,YAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,YAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,UAAI,OAAO,KAAK,SAAS,SAAU,QAAO;AAC1C,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,91 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+ import {
3
+ LEVEL_ORDER
4
+ } from "./chunk-AOMX45LH.js";
5
+
6
+ // src/logs/reader.ts
7
+ import fs from "fs";
8
+ import path from "path";
9
+ var DEFAULT_LOG_TAIL = 50;
10
+ var MAX_LOG_QUERY_LIMIT = 1e4;
11
+ var DAEMON_LOG_PATTERN = /^daemon(?:\.(\d+))?\.log$/;
12
+ var MCP_LOG_FILE = "mcp.jsonl";
13
+ function queryLogs(logDir, query = {}) {
14
+ const limit = Math.min(query.limit ?? DEFAULT_LOG_TAIL, MAX_LOG_QUERY_LIMIT);
15
+ const logFiles = discoverLogFiles(logDir);
16
+ if (logFiles.length === 0) {
17
+ return { entries: [], total: 0, truncated: false };
18
+ }
19
+ const allEntries = readAndParse(logFiles);
20
+ allEntries.sort((a, b) => (a.timestamp ?? "").localeCompare(b.timestamp ?? ""));
21
+ const filtered = applyFilters(allEntries, query);
22
+ const total = filtered.length;
23
+ const truncated = total > limit;
24
+ const entries = truncated ? filtered.slice(total - limit) : filtered;
25
+ return { entries, total, truncated };
26
+ }
27
+ function discoverLogFiles(logDir) {
28
+ let files;
29
+ try {
30
+ files = fs.readdirSync(logDir);
31
+ } catch {
32
+ return [];
33
+ }
34
+ const matched = [];
35
+ for (const file of files) {
36
+ if (file === MCP_LOG_FILE) {
37
+ matched.push({ path: path.join(logDir, file), order: -1 });
38
+ continue;
39
+ }
40
+ const m = DAEMON_LOG_PATTERN.exec(file);
41
+ if (!m) continue;
42
+ const rotationNum = m[1] ? parseInt(m[1], 10) : 0;
43
+ matched.push({ path: path.join(logDir, file), order: rotationNum });
44
+ }
45
+ matched.sort((a, b) => {
46
+ if (a.order === 0) return 1;
47
+ if (b.order === 0) return -1;
48
+ return b.order - a.order;
49
+ });
50
+ return matched.map((m) => m.path);
51
+ }
52
+ function readAndParse(filePaths) {
53
+ const entries = [];
54
+ for (const filePath of filePaths) {
55
+ let content;
56
+ try {
57
+ content = fs.readFileSync(filePath, "utf-8");
58
+ } catch {
59
+ continue;
60
+ }
61
+ for (const line of content.split("\n")) {
62
+ if (!line.trim()) continue;
63
+ try {
64
+ entries.push(JSON.parse(line));
65
+ } catch {
66
+ }
67
+ }
68
+ }
69
+ return entries;
70
+ }
71
+ function matchesFilter(entry, query) {
72
+ if (query.level) {
73
+ const entryOrder = LEVEL_ORDER[entry.level] ?? 0;
74
+ const minOrder = LEVEL_ORDER[query.level];
75
+ if (entryOrder < minOrder) return false;
76
+ }
77
+ if (query.component && entry.component !== query.component) return false;
78
+ if (query.since && entry.timestamp < query.since) return false;
79
+ if (query.until && entry.timestamp > query.until) return false;
80
+ return true;
81
+ }
82
+ function applyFilters(entries, query) {
83
+ return entries.filter((entry) => matchesFilter(entry, query));
84
+ }
85
+
86
+ export {
87
+ DEFAULT_LOG_TAIL,
88
+ queryLogs,
89
+ matchesFilter
90
+ };
91
+ //# sourceMappingURL=chunk-TH6GIBXG.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/logs/reader.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport type { LogEntry } from '../daemon/logger.js';\nimport { LEVEL_ORDER } from '../daemon/logger.js';\nimport type { LogLevel } from '../daemon/logger.js';\n\nexport { LEVEL_ORDER };\nexport type { LogEntry, LogLevel };\n\nexport interface LogQuery {\n limit?: number;\n level?: LogLevel;\n component?: string;\n since?: string;\n until?: string;\n}\n\nexport interface LogQueryResult {\n entries: LogEntry[];\n total: number;\n truncated: boolean;\n}\n\n/** Default number of entries returned when no limit is specified. */\nexport const DEFAULT_LOG_TAIL = 50;\n\n/** Hard ceiling on entries returned to prevent memory issues. */\nconst MAX_LOG_QUERY_LIMIT = 10_000;\n\n/** Matches daemon.log, rotated daemon.N.log, and mcp.jsonl. */\nconst DAEMON_LOG_PATTERN = /^daemon(?:\\.(\\d+))?\\.log$/;\nconst MCP_LOG_FILE = 'mcp.jsonl';\n\n/**\n * Query parsed log entries from all JSONL log files on disk.\n * Reads both daemon logs and MCP activity logs.\n * Returns the last N matching entries (tail behavior).\n */\nexport function queryLogs(logDir: string, query: LogQuery = {}): LogQueryResult {\n const limit = Math.min(query.limit ?? DEFAULT_LOG_TAIL, MAX_LOG_QUERY_LIMIT);\n\n const logFiles = discoverLogFiles(logDir);\n if (logFiles.length === 0) {\n return { entries: [], total: 0, truncated: false };\n }\n const allEntries = readAndParse(logFiles);\n\n // Sort all entries by timestamp so daemon + MCP logs interleave correctly\n allEntries.sort((a, b) => (a.timestamp ?? '').localeCompare(b.timestamp ?? ''));\n\n const filtered = applyFilters(allEntries, query);\n\n const total = filtered.length;\n const truncated = total > limit;\n const entries = truncated ? filtered.slice(total - limit) : filtered;\n\n return { entries, total, truncated };\n}\n\n/** Discover and sort log files: MCP first, then rotated daemon logs oldest-first, current daemon last. */\nfunction discoverLogFiles(logDir: string): string[] {\n let files: string[];\n try {\n files = fs.readdirSync(logDir);\n } catch {\n return [];\n }\n\n const matched: Array<{ path: string; order: number }> = [];\n for (const file of files) {\n if (file === MCP_LOG_FILE) {\n matched.push({ path: path.join(logDir, file), order: -1 });\n continue;\n }\n const m = DAEMON_LOG_PATTERN.exec(file);\n if (!m) continue;\n const rotationNum = m[1] ? parseInt(m[1], 10) : 0;\n matched.push({ path: path.join(logDir, file), order: rotationNum });\n }\n\n matched.sort((a, b) => {\n if (a.order === 0) return 1;\n if (b.order === 0) return -1;\n return b.order - a.order;\n });\n\n return matched.map((m) => m.path);\n}\n\n/** Read all log files and parse each line as JSON. Malformed lines are skipped. */\nfunction readAndParse(filePaths: string[]): LogEntry[] {\n const entries: LogEntry[] = [];\n for (const filePath of filePaths) {\n let content: string;\n try {\n content = fs.readFileSync(filePath, 'utf-8');\n } catch {\n continue;\n }\n for (const line of content.split('\\n')) {\n if (!line.trim()) continue;\n try {\n entries.push(JSON.parse(line) as LogEntry);\n } catch {\n // Malformed line — skip\n }\n }\n }\n return entries;\n}\n\n/** Test whether a single entry matches the query filters. */\nexport function matchesFilter(entry: LogEntry, query: LogQuery): boolean {\n if (query.level) {\n const entryOrder = LEVEL_ORDER[entry.level as LogLevel] ?? 0;\n const minOrder = LEVEL_ORDER[query.level];\n if (entryOrder < minOrder) return false;\n }\n if (query.component && entry.component !== query.component) return false;\n if (query.since && entry.timestamp < query.since) return false;\n if (query.until && entry.timestamp > query.until) return false;\n return true;\n}\n\n/** Apply level, component, and time range filters. */\nfunction applyFilters(entries: LogEntry[], query: LogQuery): LogEntry[] {\n return entries.filter((entry) => matchesFilter(entry, query));\n}\n"],"mappings":";;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAuBV,IAAM,mBAAmB;AAGhC,IAAM,sBAAsB;AAG5B,IAAM,qBAAqB;AAC3B,IAAM,eAAe;AAOd,SAAS,UAAU,QAAgB,QAAkB,CAAC,GAAmB;AAC9E,QAAM,QAAQ,KAAK,IAAI,MAAM,SAAS,kBAAkB,mBAAmB;AAE3E,QAAM,WAAW,iBAAiB,MAAM;AACxC,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,EAAE,SAAS,CAAC,GAAG,OAAO,GAAG,WAAW,MAAM;AAAA,EACnD;AACA,QAAM,aAAa,aAAa,QAAQ;AAGxC,aAAW,KAAK,CAAC,GAAG,OAAO,EAAE,aAAa,IAAI,cAAc,EAAE,aAAa,EAAE,CAAC;AAE9E,QAAM,WAAW,aAAa,YAAY,KAAK;AAE/C,QAAM,QAAQ,SAAS;AACvB,QAAM,YAAY,QAAQ;AAC1B,QAAM,UAAU,YAAY,SAAS,MAAM,QAAQ,KAAK,IAAI;AAE5D,SAAO,EAAE,SAAS,OAAO,UAAU;AACrC;AAGA,SAAS,iBAAiB,QAA0B;AAClD,MAAI;AACJ,MAAI;AACF,YAAQ,GAAG,YAAY,MAAM;AAAA,EAC/B,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAkD,CAAC;AACzD,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,cAAc;AACzB,cAAQ,KAAK,EAAE,MAAM,KAAK,KAAK,QAAQ,IAAI,GAAG,OAAO,GAAG,CAAC;AACzD;AAAA,IACF;AACA,UAAM,IAAI,mBAAmB,KAAK,IAAI;AACtC,QAAI,CAAC,EAAG;AACR,UAAM,cAAc,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC,GAAG,EAAE,IAAI;AAChD,YAAQ,KAAK,EAAE,MAAM,KAAK,KAAK,QAAQ,IAAI,GAAG,OAAO,YAAY,CAAC;AAAA,EACpE;AAEA,UAAQ,KAAK,CAAC,GAAG,MAAM;AACrB,QAAI,EAAE,UAAU,EAAG,QAAO;AAC1B,QAAI,EAAE,UAAU,EAAG,QAAO;AAC1B,WAAO,EAAE,QAAQ,EAAE;AAAA,EACrB,CAAC;AAED,SAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI;AAClC;AAGA,SAAS,aAAa,WAAiC;AACrD,QAAM,UAAsB,CAAC;AAC7B,aAAW,YAAY,WAAW;AAChC,QAAI;AACJ,QAAI;AACF,gBAAU,GAAG,aAAa,UAAU,OAAO;AAAA,IAC7C,QAAQ;AACN;AAAA,IACF;AACA,eAAW,QAAQ,QAAQ,MAAM,IAAI,GAAG;AACtC,UAAI,CAAC,KAAK,KAAK,EAAG;AAClB,UAAI;AACF,gBAAQ,KAAK,KAAK,MAAM,IAAI,CAAa;AAAA,MAC3C,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAGO,SAAS,cAAc,OAAiB,OAA0B;AACvE,MAAI,MAAM,OAAO;AACf,UAAM,aAAa,YAAY,MAAM,KAAiB,KAAK;AAC3D,UAAM,WAAW,YAAY,MAAM,KAAK;AACxC,QAAI,aAAa,SAAU,QAAO;AAAA,EACpC;AACA,MAAI,MAAM,aAAa,MAAM,cAAc,MAAM,UAAW,QAAO;AACnE,MAAI,MAAM,SAAS,MAAM,YAAY,MAAM,MAAO,QAAO;AACzD,MAAI,MAAM,SAAS,MAAM,YAAY,MAAM,MAAO,QAAO;AACzD,SAAO;AACT;AAGA,SAAS,aAAa,SAAqB,OAA6B;AACtE,SAAO,QAAQ,OAAO,CAAC,UAAU,cAAc,OAAO,KAAK,CAAC;AAC9D;","names":[]}