amalfa 0.0.0-reserved → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. package/.biomeignore +19 -0
  2. package/:memory: +0 -0
  3. package/:memory:-shm +0 -0
  4. package/:memory:-wal +0 -0
  5. package/CHANGELOG.md.old +43 -0
  6. package/LICENSE +21 -0
  7. package/README.md +359 -13
  8. package/README.old.md +112 -0
  9. package/ROADMAP.md +316 -0
  10. package/TEST_PLAN.md +561 -0
  11. package/agents.config.json +11 -0
  12. package/amalfa.config.example.ts +102 -0
  13. package/biome.json +49 -0
  14. package/bun.lock +371 -0
  15. package/docs/AGENT_PROTOCOLS.md +28 -0
  16. package/docs/ARCHITECTURAL_OVERVIEW.md +123 -0
  17. package/docs/BENTO_BOXING_DEPRECATION.md +281 -0
  18. package/docs/Bun-SQLite.html +464 -0
  19. package/docs/COMMIT_GUIDELINES.md +367 -0
  20. package/docs/DEVELOPER_ONBOARDING.md +36 -0
  21. package/docs/Graph and Vector Database Best Practices.md +214 -0
  22. package/docs/PERFORMANCE_BASELINES.md +88 -0
  23. package/docs/REPOSITORY_CLEANUP_SUMMARY.md +261 -0
  24. package/docs/edge-generation-methods.md +57 -0
  25. package/docs/elevator-pitch.md +118 -0
  26. package/docs/graph-and-vector-database-playbook.html +480 -0
  27. package/docs/hardened-sqlite.md +85 -0
  28. package/docs/headless-knowledge-management.md +79 -0
  29. package/docs/john-kaye-flux-prompt.md +46 -0
  30. package/docs/keyboard-shortcuts.md +80 -0
  31. package/docs/opinion-proceed-pattern.md +29 -0
  32. package/docs/polyvis-nodes-edges-schema.md +77 -0
  33. package/docs/protocols/lab-protocol.md +30 -0
  34. package/docs/reaction-iquest-loop-coder.md +46 -0
  35. package/docs/services.md +60 -0
  36. package/docs/sqlite-wal-readonly-trap.md +228 -0
  37. package/docs/strategy/css-architecture.md +40 -0
  38. package/docs/test-document-cycle.md +83 -0
  39. package/docs/test_lifecycle_E2E.md +4 -0
  40. package/docs/the-bicameral-graph.md +83 -0
  41. package/docs/user-guide.md +70 -0
  42. package/docs/vision-helper.md +53 -0
  43. package/drizzle/0000_minor_iron_fist.sql +19 -0
  44. package/drizzle/meta/0000_snapshot.json +139 -0
  45. package/drizzle/meta/_journal.json +13 -0
  46. package/example_usage.ts +39 -0
  47. package/experiment.sh +35 -0
  48. package/hello +2 -0
  49. package/index.html +52 -0
  50. package/knowledge/excalibur.md +12 -0
  51. package/package.json +60 -15
  52. package/plans/experience-graph-integration.md +60 -0
  53. package/prompts/gemini-king-mode-prompt.md +46 -0
  54. package/public/docs/MCP_TOOLS.md +372 -0
  55. package/schemas/README.md +20 -0
  56. package/schemas/cda.schema.json +84 -0
  57. package/schemas/conceptual-lexicon.schema.json +75 -0
  58. package/scratchpads/dummy-debrief-boxed.md +39 -0
  59. package/scratchpads/dummy-debrief.md +27 -0
  60. package/scratchpads/scratchpad-design.md +50 -0
  61. package/scratchpads/scratchpad-scrolling.md +20 -0
  62. package/scratchpads/scratchpad-toc-disappearance.md +23 -0
  63. package/scratchpads/scratchpad-toc.md +28 -0
  64. package/scratchpads/test_gardener.md +7 -0
  65. package/src/EnlightenedTriad.ts +146 -0
  66. package/src/JIT_Triad.ts +137 -0
  67. package/src/cli.ts +364 -0
  68. package/src/config/constants.ts +7 -0
  69. package/src/config/defaults.ts +99 -0
  70. package/src/core/BentoNormalizer.ts +113 -0
  71. package/src/core/EdgeWeaver.ts +145 -0
  72. package/src/core/FractureLogic.ts +22 -0
  73. package/src/core/Harvester.ts +73 -0
  74. package/src/core/LLMClient.ts +93 -0
  75. package/src/core/LouvainGate.ts +67 -0
  76. package/src/core/MarkdownMasker.ts +49 -0
  77. package/src/core/README.md +11 -0
  78. package/src/core/SemanticMatcher.ts +89 -0
  79. package/src/core/SemanticWeaver.ts +96 -0
  80. package/src/core/TagEngine.ts +56 -0
  81. package/src/core/TimelineWeaver.ts +61 -0
  82. package/src/core/VectorEngine.ts +232 -0
  83. package/src/daemon/index.ts +225 -0
  84. package/src/data/experience/test_doc_1.md +2 -0
  85. package/src/data/experience/test_doc_2.md +2 -0
  86. package/src/db/schema.ts +46 -0
  87. package/src/demo-triad.ts +45 -0
  88. package/src/gardeners/AutoTagger.ts +116 -0
  89. package/src/gardeners/BaseGardener.ts +55 -0
  90. package/src/llm/EnlightenedProvider.ts +95 -0
  91. package/src/mcp/README.md +6 -0
  92. package/src/mcp/index.ts +341 -0
  93. package/src/pipeline/AmalfaIngestor.ts +272 -0
  94. package/src/pipeline/HarvesterPipeline.ts +101 -0
  95. package/src/pipeline/Ingestor.ts +555 -0
  96. package/src/pipeline/PreFlightAnalyzer.ts +434 -0
  97. package/src/pipeline/README.md +7 -0
  98. package/src/pipeline/SemanticHarvester.ts +222 -0
  99. package/src/resonance/DatabaseFactory.ts +100 -0
  100. package/src/resonance/README.md +148 -0
  101. package/src/resonance/cli/README.md +7 -0
  102. package/src/resonance/cli/ingest.ts +41 -0
  103. package/src/resonance/cli/migrate.ts +54 -0
  104. package/src/resonance/config.ts +40 -0
  105. package/src/resonance/daemon.ts +236 -0
  106. package/src/resonance/db.ts +424 -0
  107. package/src/resonance/pipeline/README.md +7 -0
  108. package/src/resonance/pipeline/extract.ts +89 -0
  109. package/src/resonance/pipeline/transform_docs.ts +60 -0
  110. package/src/resonance/schema.ts +156 -0
  111. package/src/resonance/services/embedder.ts +131 -0
  112. package/src/resonance/services/simpleTokenizer.ts +119 -0
  113. package/src/resonance/services/stats.ts +327 -0
  114. package/src/resonance/services/tokenizer.ts +159 -0
  115. package/src/resonance/transform/cda.ts +393 -0
  116. package/src/resonance/types/enriched-cda.ts +112 -0
  117. package/src/services/README.md +56 -0
  118. package/src/services/llama.ts +59 -0
  119. package/src/services/llamauv.ts +56 -0
  120. package/src/services/olmo3.ts +58 -0
  121. package/src/services/phi.ts +52 -0
  122. package/src/types/artifact.ts +12 -0
  123. package/src/utils/EnvironmentVerifier.ts +67 -0
  124. package/src/utils/Logger.ts +21 -0
  125. package/src/utils/ServiceLifecycle.ts +207 -0
  126. package/src/utils/ZombieDefense.ts +244 -0
  127. package/src/utils/validator.ts +264 -0
  128. package/substack/substack-playbook-1.md +95 -0
  129. package/substack/substack-playbook-2.md +78 -0
  130. package/tasks/ui-investigation.md +26 -0
  131. package/test-db +0 -0
  132. package/test-db-shm +0 -0
  133. package/test-db-wal +0 -0
  134. package/tests/canary/verify_pinch_check.ts +44 -0
  135. package/tests/fixtures/ingest_test.md +12 -0
  136. package/tests/fixtures/ingest_test_boxed.md +13 -0
  137. package/tests/fixtures/safety_test.md +45 -0
  138. package/tests/fixtures/safety_test_boxed.md +49 -0
  139. package/tests/fixtures/tagged_output.md +49 -0
  140. package/tests/fixtures/tagged_test.md +49 -0
  141. package/tests/mcp-server-settings.json +8 -0
  142. package/tsconfig.json +46 -0
  143. package/verify-embedder.ts +54 -0
@@ -0,0 +1,116 @@
1
+ import { TagEngine } from "../core/TagEngine";
2
+ import { BaseGardener, type Candidate } from "./BaseGardener";
3
+
4
+ export class AutoTagger extends BaseGardener {
5
+ name = "Auto-Tagger";
6
+ private tagEngine: TagEngine | null = null;
7
+
8
+ async scan(limit: number): Promise<Candidate[]> {
9
+ const types = ["note", "debrief", "section", "document"];
10
+ const candidates: Candidate[] = [];
11
+
12
+ for (const type of types) {
13
+ if (candidates.length >= limit) break;
14
+
15
+ // FAFCAS Optimization: Fetch only what we need for this batch
16
+ // We fetch 'limit' nodes of this type.
17
+ // Warning: If we have 1000 tagged notes and 0 untagged, we fetch 1000, filter all out, and get 0 candidates.
18
+ // Then we move to next type.
19
+ // Ideally we'd filter in SQL: `content NOT LIKE '%<!-- tags:%'`.
20
+ // But strict SQL for that is messy with JSON/Text mix.
21
+ // Let's stick to memory filter but at least limit the fetch to a reasonable batch size (e.g. 5x limit) to avoid dumping whole DB.
22
+
23
+ const batchSize = Math.max(limit * 5, 200);
24
+
25
+ const nodes = this.db.getNodes({
26
+ type,
27
+ limit: batchSize,
28
+ // We need content to check for existing tags
29
+ excludeContent: false,
30
+ });
31
+
32
+ for (const node of nodes) {
33
+ if (candidates.length >= limit) break;
34
+
35
+ // Check if source file exists
36
+ if (!node.meta?.source) continue;
37
+
38
+ // Heuristic: If it already has tags in raw content, skip
39
+ if (node.content?.includes("<!-- tags:")) continue;
40
+
41
+ candidates.push({
42
+ nodeId: node.id,
43
+ filePath: String(node.meta.source),
44
+ content: node.content || "", // Fallback
45
+ type: node.type,
46
+ });
47
+ }
48
+ }
49
+
50
+ return candidates;
51
+ }
52
+
53
+ async cultivate(candidate: Candidate): Promise<void> {
54
+ if (!this.tagEngine) {
55
+ try {
56
+ this.tagEngine = await TagEngine.getInstance();
57
+ } catch (_e) {
58
+ this.log.warn("⚠️ TagEngine failed to safe load, continuing...");
59
+ }
60
+ }
61
+
62
+ this.log.debug(
63
+ { gardener: this.name, candidate: candidate.nodeId },
64
+ "🏷️ Tagging candidate",
65
+ );
66
+
67
+ // MOCK MODE: If LLM is slow/down, we use deterministic tags for testing
68
+ const tags = [
69
+ `[concept: auto-generated-tag]`,
70
+ `[concept: ${candidate.type}]`,
71
+ ];
72
+
73
+ if (tags.length === 0) {
74
+ this.log.warn({ candidate: candidate.nodeId }, "⚠️ No tags generated.");
75
+ return;
76
+ }
77
+
78
+ const tagBlock = `\n<!-- tags: ${tags.join(", ")} -->\n`;
79
+
80
+ // Injection Strategy
81
+ const fileContent = await Bun.file(candidate.filePath).text();
82
+
83
+ if (candidate.type === "section") {
84
+ // Locus-Aware Injection
85
+ // We need to find the specific locus block for this section
86
+ // The node.meta.box_id should ideally be stored, but if not, we rely on the locus ID from the node ID?
87
+ // Usually ID is `filename#slug-locusId`.
88
+
89
+ // Let's assume we search for the content or just append to end of file if it's a "whole file" node.
90
+ // But for sections... let's try to match the content? Risky.
91
+ // Better: Scan for `<!-- locus:BOX_ID -->` if we have BOX_ID.
92
+ // Current DB schema might not strictly store original box_id in a queryable way unless specific meta is set.
93
+ // Let's check candidate.node.meta.box_id?
94
+
95
+ // For V1 Safety: We will only process "Atomic" files (Debriefs, Notes) where one file = one node.
96
+ // We will implementation Section injection later to avoid regex corruption risk without more robust testing.
97
+ this.log.warn(
98
+ { candidate: candidate.nodeId },
99
+ "⚠️ Section injection postponed for safety.",
100
+ );
101
+ return;
102
+ } else {
103
+ // Atomic File Injection (Append)
104
+ // Check if file already ends with newline
105
+ const newContent = fileContent.endsWith("\n")
106
+ ? fileContent + tagBlock
107
+ : `${fileContent}\n${tagBlock}`;
108
+
109
+ await Bun.write(candidate.filePath, newContent);
110
+ this.log.info(
111
+ { candidate: candidate.nodeId, tagCount: tags.length },
112
+ "✅ Injected tags",
113
+ );
114
+ }
115
+ }
116
+ }
@@ -0,0 +1,55 @@
1
+ import type { ResonanceDB } from "@src/resonance/db";
2
+ import { getLogger, type Logger } from "@src/utils/Logger";
3
+
4
+ export interface Candidate {
5
+ nodeId: string;
6
+ filePath: string;
7
+ content: string;
8
+ type: string;
9
+ }
10
+
11
+ export abstract class BaseGardener {
12
+ protected db: ResonanceDB;
13
+ protected log: Logger;
14
+
15
+ constructor(db: ResonanceDB) {
16
+ this.db = db;
17
+ // We use a generic name initially, subclasses can override or we rely on 'name' property later
18
+ // Actually, we can't access 'this.name' safely in constructor if it's a property.
19
+ // Let's use "Gardener" as the component.
20
+ this.log = getLogger("Gardener");
21
+ }
22
+
23
+ abstract name: string;
24
+
25
+ /**
26
+ * Finds candidates that need attention.
27
+ */
28
+ abstract scan(limit: number): Promise<Candidate[]>;
29
+
30
+ /**
31
+ * Applies changes to a single candidate.
32
+ */
33
+ abstract cultivate(candidate: Candidate): Promise<void>;
34
+
35
+ /**
36
+ * Main loop.
37
+ */
38
+ public async run(limit: number = 10) {
39
+ this.log.info({ gardener: this.name }, "🌿 Gardener starting...");
40
+ const candidates = await this.scan(limit);
41
+ this.log.info(
42
+ { gardener: this.name, count: candidates.length },
43
+ "Found candidates",
44
+ );
45
+
46
+ for (const candidate of candidates) {
47
+ this.log.debug(
48
+ { gardener: this.name, nodeId: candidate.nodeId },
49
+ "Processing candidate",
50
+ );
51
+ await this.cultivate(candidate);
52
+ }
53
+ this.log.info({ gardener: this.name }, "✅ Gardener finished");
54
+ }
55
+ }
@@ -0,0 +1,95 @@
1
+ // src/llm/EnlightenedProvider.ts
2
+
3
+ export interface Message {
4
+ role: "system" | "user" | "assistant";
5
+ content: string;
6
+ }
7
+
8
+ export interface EnlightenmentConfig {
9
+ port?: number;
10
+ modelAlias?: string;
11
+ temperature?: number;
12
+ maxTokens?: number;
13
+ }
14
+
15
+ /**
16
+ * THE ENLIGHTENED PROVIDER
17
+ * A specialized adapter for the "Kirkcaldy Accountant" vector-steered model.
18
+ * Default Port: 8083
19
+ */
20
+ export class EnlightenedProvider {
21
+ private baseUrl: string;
22
+ private modelAlias: string;
23
+ private defaultTemp: number;
24
+
25
+ constructor(config: EnlightenmentConfig = {}) {
26
+ this.baseUrl = `http://127.0.0.1:${config.port || 8083}/v1`;
27
+ this.modelAlias = config.modelAlias || "enlightened-llama";
28
+ this.defaultTemp = config.temperature || 0.1; // Keep it cold for logic
29
+ }
30
+
31
+ /**
32
+ * THE RATIONALITY CHECK
33
+ * Pings the server to ensure the Enlightenment engine is online.
34
+ */
35
+ async isOnline(): Promise<boolean> {
36
+ try {
37
+ const response = await fetch(`${this.baseUrl}/models`);
38
+ return response.ok;
39
+ } catch {
40
+ return false;
41
+ }
42
+ }
43
+
44
+ /**
45
+ * THE THINK METHOD
46
+ * Sends the prompt to the vector-clamped model.
47
+ */
48
+ async think(messages: Message[]): Promise<string> {
49
+ try {
50
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
51
+ method: "POST",
52
+ headers: {
53
+ "Content-Type": "application/json",
54
+ Authorization: "Bearer sk-dummy-key", // Required by protocol, ignored by server
55
+ },
56
+ body: JSON.stringify({
57
+ model: this.modelAlias,
58
+ messages: messages,
59
+ temperature: this.defaultTemp,
60
+ max_tokens: 1024,
61
+ stream: false,
62
+ }),
63
+ });
64
+
65
+ if (!response.ok) {
66
+ throw new Error(
67
+ `Enlightenment Error: ${response.status} ${response.statusText}`,
68
+ );
69
+ }
70
+
71
+ const data = (await response.json()) as {
72
+ choices: { message: { content: string } }[];
73
+ };
74
+ return data?.choices?.[0]?.message?.content?.trim() || "";
75
+ } catch (error) {
76
+ console.error("🏴󠁧󠁢󠁳󠁣󠁴󠁿 The Philosopher is silent (Connection Error).", error);
77
+ throw error;
78
+ }
79
+ }
80
+
81
+ /**
82
+ * SPECIALIST METHOD: DE-FLUFF
83
+ * A pre-configured routine to strip buzzwords from text.
84
+ */
85
+ async defluff(inputText: string): Promise<string> {
86
+ return this.think([
87
+ {
88
+ role: "system",
89
+ content:
90
+ "You are a ruthless editor. Rewrite the following text to be concise, factual, and free of corporate buzzwords. Return ONLY the rewritten text.",
91
+ },
92
+ { role: "user", content: inputText },
93
+ ]);
94
+ }
95
+ }
@@ -0,0 +1,6 @@
1
+ # 🔌 MCP Server
2
+
3
+ The Model Context Protocol (MCP) server implementation for Polyvis.
4
+
5
+ ## Contents
6
+ - **`index.ts`**: Entry point for the MCP server. Exposes tools (`search_documents`, `read_node_content`, etc.) and resources.
@@ -0,0 +1,341 @@
1
+ import { appendFileSync } from "node:fs";
2
+ import { join } from "node:path";
3
+ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
4
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
5
+ import {
6
+ CallToolRequestSchema,
7
+ ListResourcesRequestSchema,
8
+ ListToolsRequestSchema,
9
+ ReadResourceRequestSchema,
10
+ } from "@modelcontextprotocol/sdk/types.js";
11
+ import { VectorEngine } from "@src/core/VectorEngine";
12
+ import { ResonanceDB } from "@src/resonance/db";
13
+ import { EnvironmentVerifier } from "../utils/EnvironmentVerifier";
14
+ import { getLogger } from "../utils/Logger";
15
+ import { ServiceLifecycle } from "../utils/ServiceLifecycle";
16
+
17
+ const args = process.argv.slice(2);
18
+ const command = args[0] || "serve";
19
+ const log = getLogger("MCP");
20
+
21
+ // --- Service Lifecycle ---
22
+
23
+ const lifecycle = new ServiceLifecycle({
24
+ name: "MCP",
25
+ pidFile: ".mcp.pid",
26
+ logFile: ".mcp.log",
27
+ entryPoint: "src/mcp/index.ts",
28
+ });
29
+
30
+ // --- Server Logic ---
31
+
32
+ // Helper function to create fresh database connection per request
33
+ function createConnection() {
34
+ const dbPath = join(import.meta.dir, "../../.amalfa/resonance.db");
35
+ const db = new ResonanceDB(dbPath);
36
+ const vectorEngine = new VectorEngine(db.getRawDb());
37
+ return { db, vectorEngine };
38
+ }
39
+
40
+ async function runServer() {
41
+ // 0. Verify Environment
42
+ // TODO: Update EnvironmentVerifier for AMALFA paths (not PolyVis)
43
+ // await EnvironmentVerifier.verifyOrExit();
44
+
45
+ log.info("🚀 AMALFA MCP Server Initializing...");
46
+
47
+ // 1. Setup Server
48
+ const server = new Server(
49
+ { name: "amalfa-mcp", version: "1.0.0" },
50
+ { capabilities: { tools: {}, resources: {} } },
51
+ );
52
+
53
+ // 2. Define Constants
54
+ const TOOLS = {
55
+ SEARCH: "search_documents",
56
+ READ: "read_node_content",
57
+ EXPLORE: "explore_links",
58
+ LIST: "list_directory_structure",
59
+ GARDEN: "inject_tags",
60
+ };
61
+
62
+ // 3. Register Handlers
63
+ server.setRequestHandler(ListToolsRequestSchema, async () => {
64
+ return {
65
+ tools: [
66
+ {
67
+ name: TOOLS.SEARCH,
68
+ description:
69
+ "Search the Knowledge Graph using Vector (semantic) search.",
70
+ inputSchema: {
71
+ type: "object",
72
+ properties: {
73
+ query: { type: "string" },
74
+ limit: { type: "number", default: 20 },
75
+ },
76
+ required: ["query"],
77
+ },
78
+ },
79
+ {
80
+ name: TOOLS.READ,
81
+ description: "Read the full markdown content of a specific node.",
82
+ inputSchema: {
83
+ type: "object",
84
+ properties: { id: { type: "string" } },
85
+ required: ["id"],
86
+ },
87
+ },
88
+ {
89
+ name: TOOLS.EXPLORE,
90
+ description: "Find related nodes (Graph Traversal).",
91
+ inputSchema: {
92
+ type: "object",
93
+ properties: {
94
+ id: { type: "string" },
95
+ relation: { type: "string" },
96
+ },
97
+ required: ["id"],
98
+ },
99
+ },
100
+ {
101
+ name: TOOLS.LIST,
102
+ description: "List the directory structure of the document set.",
103
+ inputSchema: { type: "object", properties: {} },
104
+ },
105
+ {
106
+ name: TOOLS.GARDEN,
107
+ description:
108
+ "Inject semantic tags into a source file (Gardener Agent).",
109
+ inputSchema: {
110
+ type: "object",
111
+ properties: {
112
+ file_path: { type: "string" },
113
+ tags: { type: "array", items: { type: "string" } },
114
+ },
115
+ required: ["file_path", "tags"],
116
+ },
117
+ },
118
+ ],
119
+ };
120
+ });
121
+
122
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
123
+ const { name, arguments: args } = request.params;
124
+ try {
125
+ if (name === TOOLS.SEARCH) {
126
+ // Create fresh connection for this request
127
+ const { db, vectorEngine } = createConnection();
128
+ try {
129
+ const query = String(args?.query);
130
+ const limit = Number(args?.limit || 20);
131
+ const candidates = new Map<
132
+ string,
133
+ { id: string; score: number; preview: string; source: string }
134
+ >();
135
+ const errors: string[] = [];
136
+
137
+ // Vector Search only (FTS removed in Hollow Node migration)
138
+ try {
139
+ const vectorResults = await vectorEngine.search(query, limit);
140
+ for (const r of vectorResults) {
141
+ candidates.set(r.id, {
142
+ id: r.id,
143
+ score: r.score,
144
+ preview: r.content.slice(0, 200).replace(/\n/g, " "),
145
+ source: "vector",
146
+ });
147
+ }
148
+ } catch (e: unknown) {
149
+ const msg = e instanceof Error ? e.message : String(e);
150
+ log.error({ err: e }, "Vector Search Error");
151
+ errors.push(msg);
152
+ }
153
+
154
+ const results = Array.from(candidates.values())
155
+ .sort((a, b) => b.score - a.score)
156
+ .slice(0, limit)
157
+ .map((r) => ({ ...r, score: r.score.toFixed(3) }));
158
+
159
+ if (results.length === 0 && errors.length > 0) {
160
+ return {
161
+ content: [
162
+ { type: "text", text: `Search Error: ${errors.join(", ")}` },
163
+ ],
164
+ isError: true,
165
+ };
166
+ }
167
+ return {
168
+ content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
169
+ };
170
+ } finally {
171
+ // Cleanup connection
172
+ db.close();
173
+ }
174
+ }
175
+
176
+ if (name === TOOLS.READ) {
177
+ // Hollow Node: Read content from filesystem via meta.source
178
+ const { db } = createConnection();
179
+ try {
180
+ const id = String(args?.id);
181
+ const row = db
182
+ .getRawDb()
183
+ .query("SELECT meta FROM nodes WHERE id = ?")
184
+ .get(id) as { meta: string | null } | null;
185
+
186
+ if (!row) {
187
+ return { content: [{ type: "text", text: "Node not found." }] };
188
+ }
189
+
190
+ const meta = row.meta ? JSON.parse(row.meta) : {};
191
+ const sourcePath = meta.source;
192
+
193
+ if (!sourcePath) {
194
+ return {
195
+ content: [
196
+ { type: "text", text: `No source file for node: ${id}` },
197
+ ],
198
+ };
199
+ }
200
+
201
+ // Read content from filesystem
202
+ try {
203
+ const content = await Bun.file(sourcePath).text();
204
+ return { content: [{ type: "text", text: content }] };
205
+ } catch {
206
+ return {
207
+ content: [
208
+ { type: "text", text: `File not found: ${sourcePath}` },
209
+ ],
210
+ };
211
+ }
212
+ } finally {
213
+ db.close();
214
+ }
215
+ }
216
+
217
+ if (name === TOOLS.EXPLORE) {
218
+ // Create fresh connection for this request
219
+ const { db } = createConnection();
220
+ try {
221
+ const id = String(args?.id);
222
+ const relation = args?.relation ? String(args.relation) : undefined;
223
+ let sql = "SELECT target, type FROM edges WHERE source = ?";
224
+ const params = [id];
225
+ if (relation) {
226
+ sql += " AND type = ?";
227
+ params.push(relation);
228
+ }
229
+ const rows = db
230
+ .getRawDb()
231
+ .query(sql)
232
+ .all(...params) as Record<string, unknown>[];
233
+ return {
234
+ content: [{ type: "text", text: JSON.stringify(rows, null, 2) }],
235
+ };
236
+ } finally {
237
+ db.close();
238
+ }
239
+ }
240
+
241
+ if (name === TOOLS.LIST) {
242
+ // TODO: Make this configurable via amalfa.config.ts
243
+ const structure = [
244
+ "docs/",
245
+ "notes/",
246
+ ];
247
+ return {
248
+ content: [{ type: "text", text: JSON.stringify(structure, null, 2) }],
249
+ };
250
+ }
251
+
252
+ if (name === TOOLS.GARDEN) {
253
+ const filePath = String(args?.file_path);
254
+ const tags = args?.tags as string[];
255
+ const content = await Bun.file(filePath).text();
256
+ const tagBlock = `\n<!-- tags: ${tags.join(", ")} -->\n`;
257
+ const newContent = content.endsWith("\n")
258
+ ? content + tagBlock
259
+ : `${content}\n${tagBlock}`;
260
+ await Bun.write(filePath, newContent);
261
+ return {
262
+ content: [
263
+ {
264
+ type: "text",
265
+ text: `Injected ${tags.length} tags into ${filePath}`,
266
+ },
267
+ ],
268
+ };
269
+ }
270
+
271
+ return {
272
+ content: [{ type: "text", text: `Tool ${name} not found.` }],
273
+ isError: true,
274
+ };
275
+ } catch (error) {
276
+ log.error({ err: error, tool: name }, "Tool execution failed");
277
+ return {
278
+ content: [{ type: "text", text: `Error: ${error}` }],
279
+ isError: true,
280
+ };
281
+ }
282
+ });
283
+
284
+ server.setRequestHandler(ListResourcesRequestSchema, async () => {
285
+ return {
286
+ resources: [
287
+ {
288
+ uri: "amalfa://stats/summary",
289
+ name: "System Stats",
290
+ mimeType: "text/plain",
291
+ },
292
+ ],
293
+ };
294
+ });
295
+
296
+ server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
297
+ if (request.params.uri === "amalfa://stats/summary") {
298
+ // Create fresh connection for this request
299
+ const { db } = createConnection();
300
+ try {
301
+ const stats = db.getStats();
302
+ const text = `Nodes: ${stats.nodes}\nEdges: ${stats.edges}\nVectors: ${stats.vectors}\nSize: ${(stats.db_size_bytes / 1024 / 1024).toFixed(2)} MB`;
303
+ return {
304
+ contents: [{ uri: request.params.uri, mimeType: "text/plain", text }],
305
+ };
306
+ } finally {
307
+ db.close();
308
+ }
309
+ }
310
+ throw new Error("Resource not found");
311
+ });
312
+
313
+ // 4. Connect Transport
314
+ const transport = new StdioServerTransport();
315
+ await server.connect(transport);
316
+ log.info("✅ AMALFA MCP Server Running (Per-Request Connections)");
317
+ }
318
+
319
+ // --- Global Error Handling ---
320
+
321
+ process.on("uncaughtException", (error) => {
322
+ log.fatal({ err: error }, "UNKNOWN MCP ERROR");
323
+ // Original crash log logic preserved for safety? Or redundant?
324
+ // Let's keep specific crash log as backup for now, but log via pino too
325
+ const msg = `[${new Date().toISOString()}] UNKNOWN MCP ERROR: ${error instanceof Error ? error.stack : error}\n`;
326
+ try {
327
+ appendFileSync(".mcp.crash.log", msg);
328
+ } catch {}
329
+ });
330
+
331
+ process.on("unhandledRejection", (reason) => {
332
+ log.fatal({ err: reason }, "UNHANDLED REJECTION");
333
+ const msg = `[${new Date().toISOString()}] UNHANDLED REJECTION: ${reason}\n`;
334
+ try {
335
+ appendFileSync(".mcp.crash.log", msg);
336
+ } catch {}
337
+ });
338
+
339
+ // --- Dispatch ---
340
+
341
+ await lifecycle.run(command, runServer, false);