@goondocks/myco 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,199 @@
1
+ import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
+ import {
3
+ EMBEDDING_BATCH_CONCURRENCY,
4
+ LLM_BATCH_CONCURRENCY,
5
+ batchExecute
6
+ } from "./chunk-3JCXYLHD.js";
7
+ import {
8
+ BufferProcessor,
9
+ TranscriptMiner,
10
+ writeObservationNotes
11
+ } from "./chunk-JIQISBPI.js";
12
+ import {
13
+ VaultWriter,
14
+ bareSessionId,
15
+ sessionNoteId
16
+ } from "./chunk-P2Q77C5F.js";
17
+ import {
18
+ indexNote,
19
+ require_gray_matter
20
+ } from "./chunk-72OAG4SF.js";
21
+ import {
22
+ generateEmbedding
23
+ } from "./chunk-RGVBGTD6.js";
24
+ import {
25
+ initFts
26
+ } from "./chunk-6FQISQNA.js";
27
+ import {
28
+ createEmbeddingProvider,
29
+ createLlmProvider
30
+ } from "./chunk-N6IAW33G.js";
31
+ import {
32
+ VectorIndex
33
+ } from "./chunk-XQXXF6MU.js";
34
+ import "./chunk-PAUPHPOC.js";
35
+ import {
36
+ parseStringFlag
37
+ } from "./chunk-SAKJMNSR.js";
38
+ import {
39
+ MycoIndex
40
+ } from "./chunk-PA3VMINE.js";
41
+ import "./chunk-XW3OL55U.js";
42
+ import {
43
+ loadConfig
44
+ } from "./chunk-ISCT2SI6.js";
45
+ import "./chunk-EF4JVH24.js";
46
+ import {
47
+ claudeCodeAdapter,
48
+ createPerProjectAdapter
49
+ } from "./chunk-2QEJKG7R.js";
50
+ import {
51
+ EMBEDDING_INPUT_LIMIT
52
+ } from "./chunk-Q7BEFSOV.js";
53
+ import {
54
+ __toESM
55
+ } from "./chunk-PZUWP5VK.js";
56
+
57
+ // src/cli/reprocess.ts
58
+ import fs from "fs";
59
+ import path from "path";
60
+ var import_gray_matter = __toESM(require_gray_matter(), 1);
61
+ async function run(args, vaultDir) {
62
+ const sessionFilter = parseStringFlag(args, "--session");
63
+ const skipLlm = args.includes("--index-only");
64
+ const config = loadConfig(vaultDir);
65
+ const index = new MycoIndex(path.join(vaultDir, "index.db"));
66
+ initFts(index);
67
+ const llmProvider = skipLlm ? null : createLlmProvider(config.intelligence.llm);
68
+ const embeddingProvider = createEmbeddingProvider(config.intelligence.embedding);
69
+ let vectorIndex = null;
70
+ try {
71
+ const testEmbed = await embeddingProvider.embed("test");
72
+ vectorIndex = new VectorIndex(path.join(vaultDir, "vectors.db"), testEmbed.dimensions);
73
+ } catch (e) {
74
+ console.log(`Vector index unavailable: ${e.message}`);
75
+ }
76
+ const processor = llmProvider ? new BufferProcessor(llmProvider, config.intelligence.llm.context_window) : null;
77
+ const writer = new VaultWriter(vaultDir);
78
+ const miner = new TranscriptMiner({
79
+ additionalAdapters: config.capture.transcript_paths.map(
80
+ (p) => createPerProjectAdapter(p, claudeCodeAdapter.parseTurns)
81
+ )
82
+ });
83
+ const sessionsDir = path.join(vaultDir, "sessions");
84
+ if (!fs.existsSync(sessionsDir)) {
85
+ console.log("No sessions directory found.");
86
+ index.close();
87
+ vectorIndex?.close();
88
+ return;
89
+ }
90
+ const sessionFiles = [];
91
+ for (const dateDir of fs.readdirSync(sessionsDir)) {
92
+ const datePath = path.join(sessionsDir, dateDir);
93
+ if (!fs.statSync(datePath).isDirectory()) continue;
94
+ for (const file of fs.readdirSync(datePath)) {
95
+ if (!file.startsWith("session-") || !file.endsWith(".md")) continue;
96
+ const sessionId = file.replace("session-", "").replace(".md", "");
97
+ if (sessionFilter && !sessionId.includes(sessionFilter)) continue;
98
+ sessionFiles.push({
99
+ relativePath: path.join("sessions", dateDir, file),
100
+ sessionId
101
+ });
102
+ }
103
+ }
104
+ if (sessionFiles.length === 0) {
105
+ console.log(sessionFilter ? `No sessions matching "${sessionFilter}" found.` : "No sessions found.");
106
+ index.close();
107
+ vectorIndex?.close();
108
+ return;
109
+ }
110
+ const tasks = sessionFiles.map(({ relativePath, sessionId }) => {
111
+ const raw = fs.readFileSync(path.join(vaultDir, relativePath), "utf-8");
112
+ const { data: frontmatter } = (0, import_gray_matter.default)(raw);
113
+ const bare = bareSessionId(sessionId);
114
+ const turnsResult = miner.getAllTurnsWithSource(bare);
115
+ const batchEvents = turnsResult && turnsResult.turns.length > 0 ? turnsResult.turns.map((t) => ({
116
+ type: "turn",
117
+ prompt: t.prompt,
118
+ tool_count: t.toolCount,
119
+ response: t.aiResponse ?? "",
120
+ timestamp: t.timestamp
121
+ })) : null;
122
+ return { relativePath, sessionId, bare, frontmatter, batchEvents, turnCount: turnsResult?.turns.length ?? 0 };
123
+ });
124
+ console.log(`Reprocessing ${tasks.length} session(s)...
125
+ `);
126
+ const embedJobs = [];
127
+ let totalObservations = 0;
128
+ const extractionResult = await batchExecute(
129
+ tasks,
130
+ async (task) => {
131
+ let obs = 0;
132
+ process.stdout.write(` ${task.sessionId.slice(0, 12)}... ${task.turnCount} turns`);
133
+ if (processor && task.batchEvents) {
134
+ const result = await processor.process(task.batchEvents, task.bare);
135
+ if (result.observations.length > 0) {
136
+ writeObservationNotes(result.observations, task.bare, writer, index, vaultDir);
137
+ obs = result.observations.length;
138
+ process.stdout.write(` \u2192 ${obs} observations`);
139
+ for (const o of result.observations) {
140
+ embedJobs.push({
141
+ id: `${o.type}-${task.bare.slice(-6)}-${Date.now()}`,
142
+ text: `${o.title}
143
+ ${o.content}`.slice(0, EMBEDDING_INPUT_LIMIT),
144
+ metadata: { type: "memory", session_id: task.bare }
145
+ });
146
+ }
147
+ }
148
+ }
149
+ indexNote(index, vaultDir, task.relativePath);
150
+ const embText = `${task.frontmatter.title ?? ""}
151
+ ${task.frontmatter.summary ?? ""}`.slice(0, EMBEDDING_INPUT_LIMIT);
152
+ if (embText.trim()) {
153
+ embedJobs.push({
154
+ id: sessionNoteId(task.bare),
155
+ text: embText,
156
+ metadata: { type: "session", session_id: task.bare }
157
+ });
158
+ }
159
+ process.stdout.write("\n");
160
+ return obs;
161
+ },
162
+ {
163
+ concurrency: LLM_BATCH_CONCURRENCY,
164
+ onProgress: (done, total) => {
165
+ if (done === total) console.log(`
166
+ Extraction complete: ${done} sessions processed.`);
167
+ }
168
+ }
169
+ );
170
+ for (const r of extractionResult.results) {
171
+ if (r.status === "fulfilled") totalObservations += r.value;
172
+ }
173
+ if (vectorIndex && embedJobs.length > 0) {
174
+ console.log(`Embedding ${embedJobs.length} notes (concurrency: ${EMBEDDING_BATCH_CONCURRENCY})...`);
175
+ const embResult = await batchExecute(
176
+ embedJobs,
177
+ async (job) => {
178
+ const emb = await generateEmbedding(embeddingProvider, job.text);
179
+ vectorIndex.upsert(job.id, emb.embedding, job.metadata);
180
+ },
181
+ {
182
+ concurrency: EMBEDDING_BATCH_CONCURRENCY,
183
+ onProgress: (done, total) => process.stdout.write(`\r Embedded ${done}/${total}`)
184
+ }
185
+ );
186
+ process.stdout.write("\n");
187
+ if (embResult.failed > 0) {
188
+ console.log(` ${embResult.failed} embedding(s) failed.`);
189
+ }
190
+ }
191
+ console.log(`
192
+ Done: ${tasks.length} sessions reprocessed, ${totalObservations} observations extracted.`);
193
+ index.close();
194
+ vectorIndex?.close();
195
+ }
196
+ export {
197
+ run
198
+ };
199
+ //# sourceMappingURL=reprocess-EM5RIRH4.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/cli/reprocess.ts"],"sourcesContent":["/**\n * myco reprocess — re-run the observation extraction and summarization pipeline\n * for existing sessions. Useful after bugs or when the LLM backend changes.\n *\n * Reads transcripts (the source of truth), re-extracts observations, regenerates\n * summaries, and re-indexes everything. Existing memory files from those sessions\n * are preserved — new observations are additive.\n */\nimport fs from 'node:fs';\nimport path from 'node:path';\nimport { MycoIndex } from '../index/sqlite.js';\nimport { VectorIndex } from '../index/vectors.js';\nimport { initFts } from '../index/fts.js';\nimport { indexNote } from '../index/rebuild.js';\nimport { loadConfig } from '../config/loader.js';\nimport { createLlmProvider, createEmbeddingProvider } from '../intelligence/llm.js';\nimport { generateEmbedding } from '../intelligence/embeddings.js';\nimport { batchExecute, LLM_BATCH_CONCURRENCY, EMBEDDING_BATCH_CONCURRENCY } from '../intelligence/batch.js';\nimport { BufferProcessor } from '../daemon/processor.js';\nimport { TranscriptMiner } from '../capture/transcript-miner.js';\nimport { VaultWriter } from '../vault/writer.js';\nimport { writeObservationNotes } from '../vault/observations.js';\nimport { createPerProjectAdapter } from '../agents/adapter.js';\nimport { claudeCodeAdapter } from '../agents/claude-code.js';\nimport { sessionNoteId, bareSessionId } from '../vault/session-id.js';\nimport { EMBEDDING_INPUT_LIMIT } from '../constants.js';\nimport { parseStringFlag } from './shared.js';\nimport matter from 'gray-matter';\n\ninterface EmbedJob {\n id: string;\n text: string;\n metadata: Record<string, string>;\n}\n\ninterface SessionTask {\n relativePath: string;\n sessionId: string;\n bare: string;\n frontmatter: Record<string, unknown>;\n batchEvents: Array<Record<string, unknown>> | null;\n turnCount: number;\n}\n\nexport async function run(args: string[], vaultDir: string): Promise<void> {\n const sessionFilter = parseStringFlag(args, '--session');\n const skipLlm = args.includes('--index-only');\n\n const config = loadConfig(vaultDir);\n const index = new MycoIndex(path.join(vaultDir, 'index.db'));\n initFts(index);\n\n const llmProvider = skipLlm ? null : createLlmProvider(config.intelligence.llm);\n const embeddingProvider = createEmbeddingProvider(config.intelligence.embedding);\n\n let vectorIndex: VectorIndex | null = null;\n try {\n const testEmbed = await embeddingProvider.embed('test');\n vectorIndex = new VectorIndex(path.join(vaultDir, 'vectors.db'), testEmbed.dimensions);\n } catch (e) {\n console.log(`Vector index unavailable: ${(e as Error).message}`);\n }\n\n const processor = llmProvider\n ? new BufferProcessor(llmProvider, config.intelligence.llm.context_window)\n : null;\n const writer = new VaultWriter(vaultDir);\n const miner = new TranscriptMiner({\n additionalAdapters: config.capture.transcript_paths.map((p: string) =>\n createPerProjectAdapter(p, claudeCodeAdapter.parseTurns),\n ),\n });\n\n // Find sessions to reprocess\n const sessionsDir = path.join(vaultDir, 'sessions');\n if (!fs.existsSync(sessionsDir)) {\n console.log('No sessions directory found.');\n index.close();\n vectorIndex?.close();\n return;\n }\n\n const sessionFiles: Array<{ relativePath: string; sessionId: string }> = [];\n for (const dateDir of fs.readdirSync(sessionsDir)) {\n const datePath = path.join(sessionsDir, dateDir);\n if (!fs.statSync(datePath).isDirectory()) continue;\n for (const file of fs.readdirSync(datePath)) {\n if (!file.startsWith('session-') || !file.endsWith('.md')) continue;\n const sessionId = file.replace('session-', '').replace('.md', '');\n if (sessionFilter && !sessionId.includes(sessionFilter)) continue;\n sessionFiles.push({\n relativePath: path.join('sessions', dateDir, file),\n sessionId,\n });\n }\n }\n\n if (sessionFiles.length === 0) {\n console.log(sessionFilter ? `No sessions matching \"${sessionFilter}\" found.` : 'No sessions found.');\n index.close();\n vectorIndex?.close();\n return;\n }\n\n // Prepare tasks: read transcripts, build extraction inputs\n const tasks: SessionTask[] = sessionFiles.map(({ relativePath, sessionId }) => {\n const raw = fs.readFileSync(path.join(vaultDir, relativePath), 'utf-8');\n const { data: frontmatter } = matter(raw);\n const bare = bareSessionId(sessionId);\n const turnsResult = miner.getAllTurnsWithSource(bare);\n\n const batchEvents = turnsResult && turnsResult.turns.length > 0\n ? turnsResult.turns.map((t) => ({\n type: 'turn' as const,\n prompt: t.prompt,\n tool_count: t.toolCount,\n response: t.aiResponse ?? '',\n timestamp: t.timestamp,\n }))\n : null;\n\n return { relativePath, sessionId, bare, frontmatter, batchEvents, turnCount: turnsResult?.turns.length ?? 0 };\n });\n\n console.log(`Reprocessing ${tasks.length} session(s)...\\n`);\n\n // Phase 1: LLM extraction (concurrency-limited) + FTS re-indexing\n const embedJobs: EmbedJob[] = [];\n let totalObservations = 0;\n\n const extractionResult = await batchExecute(\n tasks,\n async (task) => {\n let obs = 0;\n process.stdout.write(` ${task.sessionId.slice(0, 12)}... ${task.turnCount} turns`);\n\n if (processor && task.batchEvents) {\n const result = await processor.process(task.batchEvents, task.bare);\n if (result.observations.length > 0) {\n writeObservationNotes(result.observations, task.bare, writer, index, vaultDir);\n obs = result.observations.length;\n process.stdout.write(` → ${obs} observations`);\n\n for (const o of result.observations) {\n embedJobs.push({\n id: `${o.type}-${task.bare.slice(-6)}-${Date.now()}`,\n text: `${o.title}\\n${o.content}`.slice(0, EMBEDDING_INPUT_LIMIT),\n metadata: { type: 'memory', session_id: task.bare },\n });\n }\n }\n }\n\n // FTS re-index (fast, no LLM)\n indexNote(index, vaultDir, task.relativePath);\n\n // Queue session embedding\n const embText = `${task.frontmatter.title ?? ''}\\n${task.frontmatter.summary ?? ''}`.slice(0, EMBEDDING_INPUT_LIMIT);\n if (embText.trim()) {\n embedJobs.push({\n id: sessionNoteId(task.bare),\n text: embText,\n metadata: { type: 'session', session_id: task.bare },\n });\n }\n\n process.stdout.write('\\n');\n return obs;\n },\n {\n concurrency: LLM_BATCH_CONCURRENCY,\n onProgress: (done, total) => {\n if (done === total) console.log(`\\nExtraction complete: ${done} sessions processed.`);\n },\n },\n );\n\n for (const r of extractionResult.results) {\n if (r.status === 'fulfilled') totalObservations += r.value;\n }\n\n // Phase 2: Parallel embeddings\n if (vectorIndex && embedJobs.length > 0) {\n console.log(`Embedding ${embedJobs.length} notes (concurrency: ${EMBEDDING_BATCH_CONCURRENCY})...`);\n\n const embResult = await batchExecute(\n embedJobs,\n async (job) => {\n const emb = await generateEmbedding(embeddingProvider, job.text);\n vectorIndex!.upsert(job.id, emb.embedding, job.metadata);\n },\n {\n concurrency: EMBEDDING_BATCH_CONCURRENCY,\n onProgress: (done, total) => process.stdout.write(`\\r Embedded ${done}/${total}`),\n },\n );\n\n process.stdout.write('\\n');\n if (embResult.failed > 0) {\n console.log(` ${embResult.failed} embedding(s) failed.`);\n }\n }\n\n console.log(`\\nDone: ${tasks.length} sessions reprocessed, ${totalObservations} observations extracted.`);\n\n index.close();\n vectorIndex?.close();\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAQA,OAAO,QAAQ;AACf,OAAO,UAAU;AAkBjB,yBAAmB;AAiBnB,eAAsB,IAAI,MAAgB,UAAiC;AACzE,QAAM,gBAAgB,gBAAgB,MAAM,WAAW;AACvD,QAAM,UAAU,KAAK,SAAS,cAAc;AAE5C,QAAM,SAAS,WAAW,QAAQ;AAClC,QAAM,QAAQ,IAAI,UAAU,KAAK,KAAK,UAAU,UAAU,CAAC;AAC3D,UAAQ,KAAK;AAEb,QAAM,cAAc,UAAU,OAAO,kBAAkB,OAAO,aAAa,GAAG;AAC9E,QAAM,oBAAoB,wBAAwB,OAAO,aAAa,SAAS;AAE/E,MAAI,cAAkC;AACtC,MAAI;AACF,UAAM,YAAY,MAAM,kBAAkB,MAAM,MAAM;AACtD,kBAAc,IAAI,YAAY,KAAK,KAAK,UAAU,YAAY,GAAG,UAAU,UAAU;AAAA,EACvF,SAAS,GAAG;AACV,YAAQ,IAAI,6BAA8B,EAAY,OAAO,EAAE;AAAA,EACjE;AAEA,QAAM,YAAY,cACd,IAAI,gBAAgB,aAAa,OAAO,aAAa,IAAI,cAAc,IACvE;AACJ,QAAM,SAAS,IAAI,YAAY,QAAQ;AACvC,QAAM,QAAQ,IAAI,gBAAgB;AAAA,IAChC,oBAAoB,OAAO,QAAQ,iBAAiB;AAAA,MAAI,CAAC,MACvD,wBAAwB,GAAG,kBAAkB,UAAU;AAAA,IACzD;AAAA,EACF,CAAC;AAGD,QAAM,cAAc,KAAK,KAAK,UAAU,UAAU;AAClD,MAAI,CAAC,GAAG,WAAW,WAAW,GAAG;AAC/B,YAAQ,IAAI,8BAA8B;AAC1C,UAAM,MAAM;AACZ,iBAAa,MAAM;AACnB;AAAA,EACF;AAEA,QAAM,eAAmE,CAAC;AAC1E,aAAW,WAAW,GAAG,YAAY,WAAW,GAAG;AACjD,UAAM,WAAW,KAAK,KAAK,aAAa,OAAO;AAC/C,QAAI,CAAC,GAAG,SAAS,QAAQ,EAAE,YAAY,EAAG;AAC1C,eAAW,QAAQ,GAAG,YAAY,QAAQ,GAAG;AAC3C,UAAI,CAAC,KAAK,WAAW,UAAU,KAAK,CAAC,KAAK,SAAS,KAAK,EAAG;AAC3D,YAAM,YAAY,KAAK,QAAQ,YAAY,EAAE,EAAE,QAAQ,OAAO,EAAE;AAChE,UAAI,iBAAiB,CAAC,UAAU,SAAS,aAAa,EAAG;AACzD,mBAAa,KAAK;AAAA,QAChB,cAAc,KAAK,KAAK,YAAY,SAAS,IAAI;AAAA,QACjD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,aAAa,WAAW,GAAG;AAC7B,YAAQ,IAAI,gBAAgB,yBAAyB,aAAa,aAAa,oBAAoB;AACnG,UAAM,MAAM;AACZ,iBAAa,MAAM;AACnB;AAAA,EACF;AAGA,QAAM,QAAuB,aAAa,IAAI,CAAC,EAAE,cAAc,UAAU,MAAM;AAC7E,UAAM,MAAM,GAAG,aAAa,KAAK,KAAK,UAAU,YAAY,GAAG,OAAO;AACtE,UAAM,EAAE,MAAM,YAAY,QAAI,mBAAAA,SAAO,GAAG;AACxC,UAAM,OAAO,cAAc,SAAS;AACpC,UAAM,cAAc,MAAM,sBAAsB,IAAI;AAEpD,UAAM,cAAc,eAAe,YAAY,MAAM,SAAS,IAC1D,YAAY,MAAM,IAAI,CAAC,OAAO;AAAA,MAC5B,MAAM;AAAA,MACN,QAAQ,EAAE;AAAA,MACV,YAAY,EAAE;AAAA,MACd,UAAU,EAAE,cAAc;AAAA,MAC1B,WAAW,EAAE;AAAA,IACf,EAAE,IACF;AAEJ,WAAO,EAAE,cAAc,WAAW,MAAM,aAAa,aAAa,WAAW,aAAa,MAAM,UAAU,EAAE;AAAA,EAC9G,CAAC;AAED,UAAQ,IAAI,gBAAgB,MAAM,MAAM;AAAA,CAAkB;AAG1D,QAAM,YAAwB,CAAC;AAC/B,MAAI,oBAAoB;AAExB,QAAM,mBAAmB,MAAM;AAAA,IAC7B;AAAA,IACA,OAAO,SAAS;AACd,UAAI,MAAM;AACV,cAAQ,OAAO,MAAM,KAAK,KAAK,UAAU,MAAM,GAAG,EAAE,CAAC,OAAO,KAAK,SAAS,QAAQ;AAElF,UAAI,aAAa,KAAK,aAAa;AACjC,cAAM,SAAS,MAAM,UAAU,QAAQ,KAAK,aAAa,KAAK,IAAI;AAClE,YAAI,OAAO,aAAa,SAAS,GAAG;AAClC,gCAAsB,OAAO,cAAc,KAAK,MAAM,QAAQ,OAAO,QAAQ;AAC7E,gBAAM,OAAO,aAAa;AAC1B,kBAAQ,OAAO,MAAM,WAAM,GAAG,eAAe;AAE7C,qBAAW,KAAK,OAAO,cAAc;AACnC,sBAAU,KAAK;AAAA,cACb,IAAI,GAAG,EAAE,IAAI,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC,IAAI,KAAK,IAAI,CAAC;AAAA,cAClD,MAAM,GAAG,EAAE,KAAK;AAAA,EAAK,EAAE,OAAO,GAAG,MAAM,GAAG,qBAAqB;AAAA,cAC/D,UAAU,EAAE,MAAM,UAAU,YAAY,KAAK,KAAK;AAAA,YACpD,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAGA,gBAAU,OAAO,UAAU,KAAK,YAAY;AAG5C,YAAM,UAAU,GAAG,KAAK,YAAY,SAAS,EAAE;AAAA,EAAK,KAAK,YAAY,WAAW,EAAE,GAAG,MAAM,GAAG,qBAAqB;AACnH,UAAI,QAAQ,KAAK,GAAG;AAClB,kBAAU,KAAK;AAAA,UACb,IAAI,cAAc,KAAK,IAAI;AAAA,UAC3B,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,YAAY,KAAK,KAAK;AAAA,QACrD,CAAC;AAAA,MACH;AAEA,cAAQ,OAAO,MAAM,IAAI;AACzB,aAAO;AAAA,IACT;AAAA,IACA;AAAA,MACE,aAAa;AAAA,MACb,YAAY,CAAC,MAAM,UAAU;AAC3B,YAAI,SAAS,MAAO,SAAQ,IAAI;AAAA,uBAA0B,IAAI,sBAAsB;AAAA,MACtF;AAAA,IACF;AAAA,EACF;AAEA,aAAW,KAAK,iBAAiB,SAAS;AACxC,QAAI,EAAE,WAAW,YAAa,sBAAqB,EAAE;AAAA,EACvD;AAGA,MAAI,eAAe,UAAU,SAAS,GAAG;AACvC,YAAQ,IAAI,aAAa,UAAU,MAAM,wBAAwB,2BAA2B,MAAM;AAElG,UAAM,YAAY,MAAM;AAAA,MACtB;AAAA,MACA,OAAO,QAAQ;AACb,cAAM,MAAM,MAAM,kBAAkB,mBAAmB,IAAI,IAAI;AAC/D,oBAAa,OAAO,IAAI,IAAI,IAAI,WAAW,IAAI,QAAQ;AAAA,MACzD;AAAA,MACA;AAAA,QACE,aAAa;AAAA,QACb,YAAY,CAAC,MAAM,UAAU,QAAQ,OAAO,MAAM,gBAAgB,IAAI,IAAI,KAAK,EAAE;AAAA,MACnF;AAAA,IACF;AAEA,YAAQ,OAAO,MAAM,IAAI;AACzB,QAAI,UAAU,SAAS,GAAG;AACxB,cAAQ,IAAI,KAAK,UAAU,MAAM,uBAAuB;AAAA,IAC1D;AAAA,EACF;AAEA,UAAQ,IAAI;AAAA,QAAW,MAAM,MAAM,0BAA0B,iBAAiB,0BAA0B;AAExG,QAAM,MAAM;AACZ,eAAa,MAAM;AACrB;","names":["matter"]}
@@ -8,7 +8,7 @@ import {
8
8
  import {
9
9
  PlanFrontmatterSchema,
10
10
  indexNote
11
- } from "./chunk-QQWUV3TC.js";
11
+ } from "./chunk-72OAG4SF.js";
12
12
  import {
13
13
  generateEmbedding
14
14
  } from "./chunk-RGVBGTD6.js";
@@ -14049,7 +14049,9 @@ var StdioServerTransport = class {
14049
14049
  };
14050
14050
 
14051
14051
  // src/mcp/server.ts
14052
- import { createRequire } from "module";
14052
+ import fs3 from "fs";
14053
+ import path4 from "path";
14054
+ import { fileURLToPath } from "url";
14053
14055
 
14054
14056
  // src/mcp/tools/search.ts
14055
14057
  async function handleMycoSearch(index, input, vectorIndex, backend) {
@@ -14457,12 +14459,17 @@ Superseded by:: [[${wisdomId}]]`;
14457
14459
  }
14458
14460
 
14459
14461
  // src/mcp/server.ts
14460
- import path4 from "path";
14461
- import fs3 from "fs";
14462
14462
  function getPackageVersion() {
14463
14463
  try {
14464
- const require2 = createRequire(import.meta.url);
14465
- return require2("../../../package.json").version;
14464
+ let dir = path4.dirname(fileURLToPath(import.meta.url));
14465
+ for (let i = 0; i < 5; i++) {
14466
+ const pkgPath = path4.join(dir, "package.json");
14467
+ if (fs3.existsSync(pkgPath)) {
14468
+ return JSON.parse(fs3.readFileSync(pkgPath, "utf-8")).version;
14469
+ }
14470
+ dir = path4.dirname(dir);
14471
+ }
14472
+ return "0.0.0";
14466
14473
  } catch {
14467
14474
  return "0.0.0";
14468
14475
  }
@@ -14725,4 +14732,4 @@ export {
14725
14732
  createMycoServer,
14726
14733
  main
14727
14734
  };
14728
- //# sourceMappingURL=server-DLBATUNG.js.map
14735
+ //# sourceMappingURL=server-I7MRMIOP.js.map