amalfa 1.0.35 → 1.0.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +1 -1
- package/package.json +10 -7
- package/src/README.md +51 -0
- package/src/cli/README.md +29 -0
- package/src/config/README.md +43 -0
- package/src/config/scripts-registry.json +0 -7
- package/src/core/README.md +11 -1
- package/src/daemon/README.md +25 -0
- package/src/daemon/sonar-agent.ts +6 -124
- package/src/daemon/sonar-server.ts +133 -0
- package/src/mcp/README.md +10 -1
- package/src/resonance/DatabaseFactory.ts +3 -3
- package/src/resonance/README.md +12 -3
- package/src/resonance/drizzle/README.md +25 -0
- package/src/resonance/drizzle/migrations/0000_happy_thaddeus_ross.sql +30 -0
- package/src/resonance/drizzle/migrations/meta/0000_snapshot.json +199 -0
- package/src/resonance/drizzle/migrations/meta/_journal.json +13 -0
- package/src/resonance/drizzle/schema.ts +60 -0
- package/src/resonance/services/README.md +42 -0
- package/src/resonance/types/README.md +24 -0
- package/src/types/README.md +33 -0
- package/src/utils/README.md +30 -0
- package/src/pipeline/SemanticHarvester.ts +0 -222
- package/src/resonance/cli/README.md +0 -7
- package/src/resonance/pipeline/README.md +0 -7
package/LICENSE
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
MIT License
|
|
2
2
|
|
|
3
|
-
Copyright (c)
|
|
3
|
+
Copyright (c) 2026 Virtual Information Systems
|
|
4
4
|
|
|
5
5
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
6
|
of this software and associated documentation files (the "Software"), to deal
|
package/README.md
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "amalfa",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.37",
|
|
4
4
|
"description": "Local-first knowledge graph engine for AI agents. Transforms markdown into searchable memory with MCP protocol.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"homepage": "https://github.com/pjsvis/amalfa#readme",
|
|
@@ -45,9 +45,9 @@
|
|
|
45
45
|
"devDependencies": {
|
|
46
46
|
"@biomejs/biome": "2.3.8",
|
|
47
47
|
"@types/bun": "1.3.4",
|
|
48
|
-
"only-allow": "
|
|
49
|
-
"pino-pretty": "
|
|
50
|
-
"typescript": "
|
|
48
|
+
"only-allow": "1.2.2",
|
|
49
|
+
"pino-pretty": "13.1.3",
|
|
50
|
+
"typescript": "5.9.3"
|
|
51
51
|
},
|
|
52
52
|
"scripts": {
|
|
53
53
|
"precommit": "bun run scripts/maintenance/pre-commit.ts",
|
|
@@ -64,9 +64,12 @@
|
|
|
64
64
|
},
|
|
65
65
|
"dependencies": {
|
|
66
66
|
"@modelcontextprotocol/sdk": "1.25.2",
|
|
67
|
+
"drizzle-kit": "0.31.8",
|
|
68
|
+
"drizzle-orm": "0.45.1",
|
|
67
69
|
"fastembed": "2.0.0",
|
|
68
|
-
"graphology": "
|
|
69
|
-
"graphology-library": "
|
|
70
|
-
"
|
|
70
|
+
"graphology": "0.26.0",
|
|
71
|
+
"graphology-library": "0.8.0",
|
|
72
|
+
"hono": "4.11.3",
|
|
73
|
+
"pino": "10.1.0"
|
|
71
74
|
}
|
|
72
75
|
}
|
package/src/README.md
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
amalfa/src/README.md
|
|
2
|
+
```
|
|
3
|
+
|
|
4
|
+
# Source Directory
|
|
5
|
+
|
|
6
|
+
## Purpose
|
|
7
|
+
|
|
8
|
+
This directory contains the core source code for the Amalfa project. Amalfa is an AI-powered documentation and knowledge management system that evolved from patterns discovered in the PolyVis project. The system enables agents and users to maintain living documentation through brief-debrief-playbook workflows.
|
|
9
|
+
|
|
10
|
+
## Key Files
|
|
11
|
+
|
|
12
|
+
- `cli.ts` - Main CLI entry point for the application
|
|
13
|
+
- `cli/` - CLI command implementations
|
|
14
|
+
- `config/` - Configuration management and loading
|
|
15
|
+
- `core/` - Core application logic and services
|
|
16
|
+
- `daemon/` - Background services (Vector Daemon, Sonar Agent)
|
|
17
|
+
- `mcp/` - Model Context Protocol server implementation
|
|
18
|
+
- `pipeline/` - Data processing pipelines
|
|
19
|
+
- `resonance/` - Knowledge graph and semantic services
|
|
20
|
+
- `types/` - TypeScript type definitions
|
|
21
|
+
- `utils/` - Utility functions and helpers
|
|
22
|
+
|
|
23
|
+
## Patterns
|
|
24
|
+
|
|
25
|
+
### Module Organization
|
|
26
|
+
Each major feature area has its own directory with a colocated README documenting its purpose, key exports, and stability status.
|
|
27
|
+
|
|
28
|
+
### Configuration
|
|
29
|
+
- Uses `amalfa.config.ts` for user configuration
|
|
30
|
+
- Supports JSON fallback (`amalfa.config.json`)
|
|
31
|
+
- Configuration is loaded via `config/` module
|
|
32
|
+
|
|
33
|
+
### CLI Architecture
|
|
34
|
+
- Main entry: `cli.ts`
|
|
35
|
+
- Commands are implemented as subdirectories in `cli/`
|
|
36
|
+
- Uses a command pattern for extensibility
|
|
37
|
+
|
|
38
|
+
### Service Architecture
|
|
39
|
+
- Daemon processes run in `daemon/` for long-running services
|
|
40
|
+
- MCP server in `mcp/` provides external API access
|
|
41
|
+
- Resonance services in `resonance/` handle knowledge graph operations
|
|
42
|
+
|
|
43
|
+
## ⚠️ Stability
|
|
44
|
+
|
|
45
|
+
This module is stable and intentionally designed.
|
|
46
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
47
|
+
1. Consulting the user first
|
|
48
|
+
2. Having a documented, compelling reason
|
|
49
|
+
3. Understanding WHY the current design exists
|
|
50
|
+
|
|
51
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
amalfa/src/cli/README.md
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
# CLI Directory
|
|
5
|
+
|
|
6
|
+
## Purpose
|
|
7
|
+
Command-line interface implementation for Amalfa, providing the primary user-facing interface for interacting with the system.
|
|
8
|
+
|
|
9
|
+
## Key Files
|
|
10
|
+
|
|
11
|
+
| File | Purpose |
|
|
12
|
+
|------|---------|
|
|
13
|
+
| `index.ts` | CLI entry point and command routing |
|
|
14
|
+
| `commands/` | Individual command implementations |
|
|
15
|
+
|
|
16
|
+
## Patterns
|
|
17
|
+
|
|
18
|
+
- Uses a command pattern for extensibility
|
|
19
|
+
- Supports subcommands for different operations
|
|
20
|
+
- Consistent help and argument parsing
|
|
21
|
+
|
|
22
|
+
## ⚠️ Stability
|
|
23
|
+
This module is stable and intentionally designed.
|
|
24
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
25
|
+
1. Consulting the user first
|
|
26
|
+
2. Having a documented, compelling reason
|
|
27
|
+
3. Understanding WHY the current design exists
|
|
28
|
+
|
|
29
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
amalfa/src/config/README.md
|
|
2
|
+
```
|
|
3
|
+
|
|
4
|
+
# Configuration Directory
|
|
5
|
+
|
|
6
|
+
## Purpose
|
|
7
|
+
The `config/` directory handles configuration loading, validation, and management for the Amalfa application. It provides a unified interface for accessing configuration values from TypeScript and JSON configuration files.
|
|
8
|
+
|
|
9
|
+
## Key Files
|
|
10
|
+
|
|
11
|
+
| File | Purpose |
|
|
12
|
+
|------|---------|
|
|
13
|
+
| `index.ts` | Main export barrel and configuration interface |
|
|
14
|
+
| `loader.ts` | Configuration file loading logic |
|
|
15
|
+
| `validator.ts` | Schema validation for configuration values |
|
|
16
|
+
| `defaults.ts` | Default configuration values |
|
|
17
|
+
|
|
18
|
+
## Patterns
|
|
19
|
+
|
|
20
|
+
### Configuration Loading
|
|
21
|
+
- Primary: `amalfa.config.ts` (TypeScript module)
|
|
22
|
+
- Fallback: `amalfa.config.json` (JSON format)
|
|
23
|
+
- Environment variables can override config values
|
|
24
|
+
|
|
25
|
+
### Validation
|
|
26
|
+
- Uses schema validation to ensure configuration integrity
|
|
27
|
+
- Provides helpful error messages for missing or invalid values
|
|
28
|
+
|
|
29
|
+
### Access Pattern
|
|
30
|
+
```typescript
|
|
31
|
+
import { config } from './config';
|
|
32
|
+
|
|
33
|
+
const apiKey = config.get('api.key');
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## ⚠️ Stability
|
|
37
|
+
This module is stable and intentionally designed.
|
|
38
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
39
|
+
1. Consulting the user first
|
|
40
|
+
2. Having a documented, compelling reason
|
|
41
|
+
3. Understanding WHY the current design exists
|
|
42
|
+
|
|
43
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -13,13 +13,6 @@
|
|
|
13
13
|
"category": "core",
|
|
14
14
|
"type": "dev"
|
|
15
15
|
},
|
|
16
|
-
{
|
|
17
|
-
"path": "scripts/setup_mcp.ts",
|
|
18
|
-
"command": "amalfa setup-mcp",
|
|
19
|
-
"description": "Generates the Model Context Protocol configuration JSON for Claude Desktop.",
|
|
20
|
-
"category": "setup",
|
|
21
|
-
"type": "user"
|
|
22
|
-
},
|
|
23
16
|
{
|
|
24
17
|
"path": "scripts/maintenance/pre-commit.ts",
|
|
25
18
|
"command": "bun run precommit",
|
package/src/core/README.md
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
|
|
1
2
|
# 🧠 Core Logic
|
|
2
3
|
|
|
3
|
-
The foundational business logic and processing engines for
|
|
4
|
+
The foundational business logic and processing engines for Amalfa.
|
|
4
5
|
|
|
5
6
|
## Contents
|
|
6
7
|
- **`BentoNormalizer`**: Ensures document structure (H1/H2 hierarchy).
|
|
@@ -9,3 +10,12 @@ The foundational business logic and processing engines for Polyvis.
|
|
|
9
10
|
- **`VectorEngine`**: Interface for vector operations (search/embed).
|
|
10
11
|
- **`TagEngine`**: Auto-tagging logic (LLM based).
|
|
11
12
|
- **`SemanticWeaver`**: Logic for "rescuing" orphaned nodes using embeddings.
|
|
13
|
+
|
|
14
|
+
## ⚠️ Stability
|
|
15
|
+
This module is stable and intentionally designed.
|
|
16
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
17
|
+
1. Consulting the user first
|
|
18
|
+
2. Having a documented, compelling reason
|
|
19
|
+
3. Understanding WHY the current design exists
|
|
20
|
+
|
|
21
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Daemon Directory
|
|
2
|
+
|
|
3
|
+
## Purpose
|
|
4
|
+
Background services and long-running processes for Amalfa, including the Vector Daemon and Sonar Agent.
|
|
5
|
+
|
|
6
|
+
## Key Files
|
|
7
|
+
|
|
8
|
+
- `index.ts` - Daemon entry point
|
|
9
|
+
- `vector-daemon.ts` - Vector storage service
|
|
10
|
+
- `sonar-agent.ts` - Semantic analysis agent
|
|
11
|
+
|
|
12
|
+
## Patterns
|
|
13
|
+
|
|
14
|
+
- Services run as background processes
|
|
15
|
+
- Use event-driven architecture
|
|
16
|
+
- Support graceful shutdown
|
|
17
|
+
|
|
18
|
+
## ⚠️ Stability
|
|
19
|
+
This module is stable and intentionally designed.
|
|
20
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
21
|
+
1. Consulting the user first
|
|
22
|
+
2. Having a documented, compelling reason
|
|
23
|
+
3. Understanding WHY the current design exists
|
|
24
|
+
|
|
25
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -24,27 +24,14 @@ import { ServiceLifecycle } from "@src/utils/ServiceLifecycle";
|
|
|
24
24
|
import { inferenceState } from "./sonar-inference";
|
|
25
25
|
import {
|
|
26
26
|
handleBatchEnhancement,
|
|
27
|
-
handleChat,
|
|
28
|
-
handleContextExtraction,
|
|
29
27
|
handleGardenTask,
|
|
30
|
-
handleMetadataEnhancement,
|
|
31
28
|
handleResearchTask,
|
|
32
|
-
handleResultReranking,
|
|
33
|
-
handleSearchAnalysis,
|
|
34
29
|
handleSynthesisTask,
|
|
35
30
|
handleTimelineTask,
|
|
36
31
|
type SonarContext,
|
|
37
32
|
} from "./sonar-logic";
|
|
38
33
|
import { getTaskModel } from "./sonar-strategies";
|
|
39
|
-
import type {
|
|
40
|
-
ChatRequest,
|
|
41
|
-
ChatSession,
|
|
42
|
-
MetadataEnhanceRequest,
|
|
43
|
-
SearchAnalyzeRequest,
|
|
44
|
-
SearchContextRequest,
|
|
45
|
-
SearchRerankRequest,
|
|
46
|
-
SonarTask,
|
|
47
|
-
} from "./sonar-types";
|
|
34
|
+
import type { ChatSession, SonarTask } from "./sonar-types";
|
|
48
35
|
|
|
49
36
|
const args = process.argv.slice(2);
|
|
50
37
|
const command = args[0] || "serve";
|
|
@@ -128,123 +115,18 @@ async function main() {
|
|
|
128
115
|
}
|
|
129
116
|
}
|
|
130
117
|
|
|
118
|
+
import { createSonarApp } from "./sonar-server";
|
|
119
|
+
|
|
131
120
|
/**
|
|
132
|
-
* Start Bun HTTP Server
|
|
121
|
+
* Start Bun HTTP Server via Hono
|
|
133
122
|
*/
|
|
134
123
|
function startServer(port: number) {
|
|
135
|
-
const corsHeaders = {
|
|
136
|
-
"Access-Control-Allow-Origin": "*",
|
|
137
|
-
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
|
138
|
-
"Access-Control-Allow-Headers": "Content-Type",
|
|
139
|
-
};
|
|
140
|
-
|
|
141
124
|
const context: SonarContext = { db, graphEngine, gardener, chatSessions };
|
|
125
|
+
const app = createSonarApp(context);
|
|
142
126
|
|
|
143
127
|
Bun.serve({
|
|
144
128
|
port,
|
|
145
|
-
|
|
146
|
-
if (req.method === "OPTIONS")
|
|
147
|
-
return new Response(null, { headers: corsHeaders });
|
|
148
|
-
const url = new URL(req.url);
|
|
149
|
-
|
|
150
|
-
// Health check
|
|
151
|
-
if (url.pathname === "/health") {
|
|
152
|
-
const cfg = await loadConfig();
|
|
153
|
-
const provider = cfg.sonar.cloud?.enabled ? "cloud" : "local";
|
|
154
|
-
const model = cfg.sonar.cloud?.enabled
|
|
155
|
-
? cfg.sonar.cloud.model
|
|
156
|
-
: inferenceState.ollamaModel || cfg.sonar.model;
|
|
157
|
-
return Response.json(
|
|
158
|
-
{
|
|
159
|
-
status: "ok",
|
|
160
|
-
ollama: inferenceState.ollamaAvailable,
|
|
161
|
-
provider,
|
|
162
|
-
model,
|
|
163
|
-
},
|
|
164
|
-
{ headers: corsHeaders },
|
|
165
|
-
);
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
// Chat endpoint
|
|
169
|
-
if (url.pathname === "/chat" && req.method === "POST") {
|
|
170
|
-
try {
|
|
171
|
-
const body = (await req.json()) as ChatRequest;
|
|
172
|
-
const { sessionId, message, model } = body;
|
|
173
|
-
const result = await handleChat(sessionId, message, context, model);
|
|
174
|
-
return Response.json(result, { headers: corsHeaders });
|
|
175
|
-
} catch (error) {
|
|
176
|
-
return Response.json(
|
|
177
|
-
{ error: String(error) },
|
|
178
|
-
{ status: 500, headers: corsHeaders },
|
|
179
|
-
);
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
// Metadata enhancement endpoint
|
|
184
|
-
if (url.pathname === "/metadata/enhance" && req.method === "POST") {
|
|
185
|
-
try {
|
|
186
|
-
const body = (await req.json()) as MetadataEnhanceRequest;
|
|
187
|
-
const { docId } = body;
|
|
188
|
-
await handleMetadataEnhancement(docId, context);
|
|
189
|
-
return Response.json({ status: "success" }, { headers: corsHeaders });
|
|
190
|
-
} catch (error) {
|
|
191
|
-
return Response.json(
|
|
192
|
-
{ error: String(error) },
|
|
193
|
-
{ status: 500, headers: corsHeaders },
|
|
194
|
-
);
|
|
195
|
-
}
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
// Graph Stats endpoint
|
|
199
|
-
if (url.pathname === "/graph/stats" && req.method === "GET") {
|
|
200
|
-
return Response.json(graphEngine.getStats(), { headers: corsHeaders });
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
// Search endpoints (analysis, rerank, context)
|
|
204
|
-
if (url.pathname === "/search/analyze" && req.method === "POST") {
|
|
205
|
-
try {
|
|
206
|
-
const body = (await req.json()) as SearchAnalyzeRequest;
|
|
207
|
-
const { query } = body;
|
|
208
|
-
const result = await handleSearchAnalysis(query, context);
|
|
209
|
-
return Response.json(result, { headers: corsHeaders });
|
|
210
|
-
} catch (error) {
|
|
211
|
-
return Response.json(
|
|
212
|
-
{ error: String(error) },
|
|
213
|
-
{ status: 500, headers: corsHeaders },
|
|
214
|
-
);
|
|
215
|
-
}
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
if (url.pathname === "/search/rerank" && req.method === "POST") {
|
|
219
|
-
try {
|
|
220
|
-
const body = (await req.json()) as SearchRerankRequest;
|
|
221
|
-
const { results, query, intent } = body;
|
|
222
|
-
const result = await handleResultReranking(results, query, intent);
|
|
223
|
-
return Response.json(result, { headers: corsHeaders });
|
|
224
|
-
} catch (error) {
|
|
225
|
-
return Response.json(
|
|
226
|
-
{ error: String(error) },
|
|
227
|
-
{ status: 500, headers: corsHeaders },
|
|
228
|
-
);
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
if (url.pathname === "/search/context" && req.method === "POST") {
|
|
233
|
-
try {
|
|
234
|
-
const body = (await req.json()) as SearchContextRequest;
|
|
235
|
-
const { result, query } = body;
|
|
236
|
-
const contextResult = await handleContextExtraction(result, query);
|
|
237
|
-
return Response.json(contextResult, { headers: corsHeaders });
|
|
238
|
-
} catch (error) {
|
|
239
|
-
return Response.json(
|
|
240
|
-
{ error: String(error) },
|
|
241
|
-
{ status: 500, headers: corsHeaders },
|
|
242
|
-
);
|
|
243
|
-
}
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
return new Response("Not Found", { status: 404, headers: corsHeaders });
|
|
247
|
-
},
|
|
129
|
+
fetch: app.fetch,
|
|
248
130
|
});
|
|
249
131
|
|
|
250
132
|
log.info(`Server started on port ${port}`);
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { loadConfig } from "@src/config/defaults";
|
|
2
|
+
import { Hono } from "hono";
|
|
3
|
+
import { cors } from "hono/cors";
|
|
4
|
+
import { inferenceState } from "./sonar-inference";
|
|
5
|
+
import {
|
|
6
|
+
handleChat,
|
|
7
|
+
handleContextExtraction,
|
|
8
|
+
handleMetadataEnhancement,
|
|
9
|
+
handleResultReranking,
|
|
10
|
+
handleSearchAnalysis,
|
|
11
|
+
type SonarContext,
|
|
12
|
+
} from "./sonar-logic";
|
|
13
|
+
import type {
|
|
14
|
+
ChatRequest,
|
|
15
|
+
MetadataEnhanceRequest,
|
|
16
|
+
SearchAnalyzeRequest,
|
|
17
|
+
SearchContextRequest,
|
|
18
|
+
SearchRerankRequest,
|
|
19
|
+
} from "./sonar-types";
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Creates the Hono application for the Sonar Agent
|
|
23
|
+
*/
|
|
24
|
+
export function createSonarApp(context: SonarContext) {
|
|
25
|
+
const app = new Hono();
|
|
26
|
+
|
|
27
|
+
// Global Middleware
|
|
28
|
+
app.use(
|
|
29
|
+
"*",
|
|
30
|
+
cors({
|
|
31
|
+
origin: "*",
|
|
32
|
+
allowMethods: ["GET", "POST", "OPTIONS"],
|
|
33
|
+
allowHeaders: ["Content-Type"],
|
|
34
|
+
}),
|
|
35
|
+
);
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Health Check
|
|
39
|
+
*/
|
|
40
|
+
app.get("/health", async (c) => {
|
|
41
|
+
const cfg = await loadConfig();
|
|
42
|
+
const provider = cfg.sonar.cloud?.enabled ? "cloud" : "local";
|
|
43
|
+
const model = cfg.sonar.cloud?.enabled
|
|
44
|
+
? cfg.sonar.cloud.model
|
|
45
|
+
: inferenceState.ollamaModel || cfg.sonar.model;
|
|
46
|
+
|
|
47
|
+
return c.json({
|
|
48
|
+
status: "ok",
|
|
49
|
+
ollama: inferenceState.ollamaAvailable,
|
|
50
|
+
provider,
|
|
51
|
+
model,
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Chat Interface
|
|
57
|
+
*/
|
|
58
|
+
app.post("/chat", async (c) => {
|
|
59
|
+
try {
|
|
60
|
+
const body = await c.req.json<ChatRequest>();
|
|
61
|
+
const { sessionId, message, model } = body;
|
|
62
|
+
const result = await handleChat(sessionId, message, context, model);
|
|
63
|
+
return c.json(result);
|
|
64
|
+
} catch (error) {
|
|
65
|
+
return c.json({ error: String(error) }, 500);
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Metadata Enhancement
|
|
71
|
+
*/
|
|
72
|
+
app.post("/metadata/enhance", async (c) => {
|
|
73
|
+
try {
|
|
74
|
+
const body = await c.req.json<MetadataEnhanceRequest>();
|
|
75
|
+
const { docId } = body;
|
|
76
|
+
await handleMetadataEnhancement(docId, context);
|
|
77
|
+
return c.json({ status: "success" });
|
|
78
|
+
} catch (error) {
|
|
79
|
+
return c.json({ error: String(error) }, 500);
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Graph Stats
|
|
85
|
+
*/
|
|
86
|
+
app.get("/graph/stats", (c) => {
|
|
87
|
+
return c.json(context.graphEngine.getStats());
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Search: Query Analysis
|
|
92
|
+
*/
|
|
93
|
+
app.post("/search/analyze", async (c) => {
|
|
94
|
+
try {
|
|
95
|
+
const body = await c.req.json<SearchAnalyzeRequest>();
|
|
96
|
+
const { query } = body;
|
|
97
|
+
const result = await handleSearchAnalysis(query, context);
|
|
98
|
+
return c.json(result);
|
|
99
|
+
} catch (error) {
|
|
100
|
+
return c.json({ error: String(error) }, 500);
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Search: Reranking
|
|
106
|
+
*/
|
|
107
|
+
app.post("/search/rerank", async (c) => {
|
|
108
|
+
try {
|
|
109
|
+
const body = await c.req.json<SearchRerankRequest>();
|
|
110
|
+
const { results, query, intent } = body;
|
|
111
|
+
const result = await handleResultReranking(results, query, intent);
|
|
112
|
+
return c.json(result);
|
|
113
|
+
} catch (error) {
|
|
114
|
+
return c.json({ error: String(error) }, 500);
|
|
115
|
+
}
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Search: Context Extraction
|
|
120
|
+
*/
|
|
121
|
+
app.post("/search/context", async (c) => {
|
|
122
|
+
try {
|
|
123
|
+
const body = await c.req.json<SearchContextRequest>();
|
|
124
|
+
const { result, query } = body;
|
|
125
|
+
const contextResult = await handleContextExtraction(result, query);
|
|
126
|
+
return c.json(contextResult);
|
|
127
|
+
} catch (error) {
|
|
128
|
+
return c.json({ error: String(error) }, 500);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
return app;
|
|
133
|
+
}
|
package/src/mcp/README.md
CHANGED
|
@@ -1,6 +1,15 @@
|
|
|
1
1
|
# 🔌 MCP Server
|
|
2
2
|
|
|
3
|
-
The Model Context Protocol (MCP) server implementation for
|
|
3
|
+
The Model Context Protocol (MCP) server implementation for Amalfa.
|
|
4
4
|
|
|
5
5
|
## Contents
|
|
6
6
|
- **`index.ts`**: Entry point for the MCP server. Exposes tools (`search_documents`, `read_node_content`, etc.) and resources.
|
|
7
|
+
|
|
8
|
+
## ⚠️ Stability
|
|
9
|
+
This module is stable and intentionally designed.
|
|
10
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
11
|
+
1. Consulting the user first
|
|
12
|
+
2. Having a documented, compelling reason
|
|
13
|
+
3. Understanding WHY the current design exists
|
|
14
|
+
|
|
15
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -3,8 +3,8 @@ import { Database } from "bun:sqlite";
|
|
|
3
3
|
/**
|
|
4
4
|
* 🏭 DATABASE FACTORY (The Enforcer)
|
|
5
5
|
*
|
|
6
|
-
* Single Source of Truth for instantiating SQLite connections in
|
|
7
|
-
*
|
|
6
|
+
* Single Source of Truth for instantiating SQLite connections in Amalfa.
|
|
7
|
+
* Strictly enforces the configuration defined in `playbooks/sqlite-standards.md`.
|
|
8
8
|
*
|
|
9
9
|
* USAGE:
|
|
10
10
|
* import { DatabaseFactory } from "@src/resonance/DatabaseFactory";
|
|
@@ -13,7 +13,7 @@ import { Database } from "bun:sqlite";
|
|
|
13
13
|
export const DatabaseFactory = {
|
|
14
14
|
/**
|
|
15
15
|
* Connects specifically to the main Resonance Graph database.
|
|
16
|
-
*
|
|
16
|
+
* Kept for convenience and backward compatibility.
|
|
17
17
|
*/
|
|
18
18
|
connectToResonance(
|
|
19
19
|
dbPath: string = ".amalfa/resonance.db",
|
package/src/resonance/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# 🔮 Resonance Engine
|
|
2
2
|
|
|
3
|
-
The vector database and semantic core of
|
|
3
|
+
The vector database and semantic core of Amalfa.
|
|
4
4
|
|
|
5
5
|
## Contents
|
|
6
6
|
- **`daemon.ts`**: The Vector Service (HTTP) and Lifecycle Manager.
|
|
@@ -23,7 +23,7 @@ The vector database and semantic core of Polyvis.
|
|
|
23
23
|
- **Accuracy:** High (51-52% on MTEB retrieval benchmarks)
|
|
24
24
|
- **Training:** Optimized for retrieval tasks on 1B+ text pairs
|
|
25
25
|
|
|
26
|
-
**Performance on
|
|
26
|
+
**Performance on Amalfa corpus:**
|
|
27
27
|
- 85.2% average best match (excellent semantic understanding)
|
|
28
28
|
- 21.1% average spread (clear differentiation)
|
|
29
29
|
- 76.3% average corpus score (cohesive knowledge base)
|
|
@@ -97,7 +97,7 @@ bun run inspect-db public/resonance.db
|
|
|
97
97
|
|
|
98
98
|
### Current: Two-Tier Search (Post-Migration v5)
|
|
99
99
|
|
|
100
|
-
|
|
100
|
+
Amalfa uses a **hybrid search strategy** optimized for semantic understanding and exact matches:
|
|
101
101
|
|
|
102
102
|
**1. Vector Search (Primary)**
|
|
103
103
|
- **Purpose:** Semantic similarity, concept discovery
|
|
@@ -146,3 +146,12 @@ Query type?
|
|
|
146
146
|
| ~~FTS~~ | ~~5-20ms~~ | ~~70%~~ | ~~(Removed)~~ |
|
|
147
147
|
|
|
148
148
|
**Conclusion:** Two-tier search is simpler, faster, and more accurate than FTS middle ground.
|
|
149
|
+
|
|
150
|
+
## ⚠️ Stability
|
|
151
|
+
This module is stable and intentionally designed.
|
|
152
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
153
|
+
1. Consulting the user first
|
|
154
|
+
2. Having a documented, compelling reason
|
|
155
|
+
3. Understanding WHY the current design exists
|
|
156
|
+
|
|
157
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Drizzle Schema Management
|
|
2
|
+
|
|
3
|
+
## ⚠️ CRITICAL ARCHITECTURE RULE
|
|
4
|
+
|
|
5
|
+
**Drizzle ORM is used HERE strictly for:**
|
|
6
|
+
1. **Schema Definition:** Single source of truth for table structures (`schema.ts`).
|
|
7
|
+
2. **Migrations:** Generating and running SQL migrations (`drizzle-kit`).
|
|
8
|
+
|
|
9
|
+
**Drizzle ORM is NOT used for:**
|
|
10
|
+
- **Runtime Queries:** Do NOT use Drizzle `db.select()` query builders in critical paths.
|
|
11
|
+
- **Data Access:** All runtime data access uses `bun:sqlite` with raw parameterized SQL (`db.prepare()`).
|
|
12
|
+
|
|
13
|
+
## Why?
|
|
14
|
+
Our **FAFCAS** (Fast, Autonomous, Fractionally-Connected Agent System) architecture demands raw IO performance. We treat SQLite as a raw storage engine. The overhead of an ORM query builder—even a light one—is unnecessary.
|
|
15
|
+
|
|
16
|
+
## ⛔ Agent Guardrail
|
|
17
|
+
If you are an AI Agent reading this:
|
|
18
|
+
- **DO NOT** refactor the existing `ResonanceDB` SQL queries to use Drizzle.
|
|
19
|
+
- **DO NOT** introduce Drizzle usage into `GraphEngine` or `VectorEngine`.
|
|
20
|
+
- **ONLY** modify this folder if the Database Schema (tables/columns) needs to change.
|
|
21
|
+
|
|
22
|
+
## Workflow for Schema Changes
|
|
23
|
+
1. Edit `schema.ts`
|
|
24
|
+
2. Run `bunx drizzle-kit generate`
|
|
25
|
+
3. Run migrations (automated via scripts)
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
CREATE TABLE `edges` (
|
|
2
|
+
`source` text NOT NULL,
|
|
3
|
+
`target` text NOT NULL,
|
|
4
|
+
`type` text NOT NULL,
|
|
5
|
+
`confidence` real DEFAULT 1,
|
|
6
|
+
`veracity` real DEFAULT 1,
|
|
7
|
+
`context_source` text,
|
|
8
|
+
PRIMARY KEY(`source`, `target`, `type`)
|
|
9
|
+
);
|
|
10
|
+
--> statement-breakpoint
|
|
11
|
+
CREATE INDEX `idx_edges_source` ON `edges` (`source`);--> statement-breakpoint
|
|
12
|
+
CREATE INDEX `idx_edges_target` ON `edges` (`target`);--> statement-breakpoint
|
|
13
|
+
CREATE TABLE `ember_state` (
|
|
14
|
+
`file_path` text PRIMARY KEY NOT NULL,
|
|
15
|
+
`last_analyzed` text,
|
|
16
|
+
`sidecar_created` integer,
|
|
17
|
+
`confidence` real
|
|
18
|
+
);
|
|
19
|
+
--> statement-breakpoint
|
|
20
|
+
CREATE TABLE `nodes` (
|
|
21
|
+
`id` text PRIMARY KEY NOT NULL,
|
|
22
|
+
`type` text,
|
|
23
|
+
`title` text,
|
|
24
|
+
`domain` text,
|
|
25
|
+
`layer` text,
|
|
26
|
+
`embedding` blob,
|
|
27
|
+
`hash` text,
|
|
28
|
+
`meta` text,
|
|
29
|
+
`date` text
|
|
30
|
+
);
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": "6",
|
|
3
|
+
"dialect": "sqlite",
|
|
4
|
+
"id": "577c9f07-f198-49d2-9fa3-f222a6ecee23",
|
|
5
|
+
"prevId": "00000000-0000-0000-0000-000000000000",
|
|
6
|
+
"tables": {
|
|
7
|
+
"edges": {
|
|
8
|
+
"name": "edges",
|
|
9
|
+
"columns": {
|
|
10
|
+
"source": {
|
|
11
|
+
"name": "source",
|
|
12
|
+
"type": "text",
|
|
13
|
+
"primaryKey": false,
|
|
14
|
+
"notNull": true,
|
|
15
|
+
"autoincrement": false
|
|
16
|
+
},
|
|
17
|
+
"target": {
|
|
18
|
+
"name": "target",
|
|
19
|
+
"type": "text",
|
|
20
|
+
"primaryKey": false,
|
|
21
|
+
"notNull": true,
|
|
22
|
+
"autoincrement": false
|
|
23
|
+
},
|
|
24
|
+
"type": {
|
|
25
|
+
"name": "type",
|
|
26
|
+
"type": "text",
|
|
27
|
+
"primaryKey": false,
|
|
28
|
+
"notNull": true,
|
|
29
|
+
"autoincrement": false
|
|
30
|
+
},
|
|
31
|
+
"confidence": {
|
|
32
|
+
"name": "confidence",
|
|
33
|
+
"type": "real",
|
|
34
|
+
"primaryKey": false,
|
|
35
|
+
"notNull": false,
|
|
36
|
+
"autoincrement": false,
|
|
37
|
+
"default": 1
|
|
38
|
+
},
|
|
39
|
+
"veracity": {
|
|
40
|
+
"name": "veracity",
|
|
41
|
+
"type": "real",
|
|
42
|
+
"primaryKey": false,
|
|
43
|
+
"notNull": false,
|
|
44
|
+
"autoincrement": false,
|
|
45
|
+
"default": 1
|
|
46
|
+
},
|
|
47
|
+
"context_source": {
|
|
48
|
+
"name": "context_source",
|
|
49
|
+
"type": "text",
|
|
50
|
+
"primaryKey": false,
|
|
51
|
+
"notNull": false,
|
|
52
|
+
"autoincrement": false
|
|
53
|
+
}
|
|
54
|
+
},
|
|
55
|
+
"indexes": {
|
|
56
|
+
"idx_edges_source": {
|
|
57
|
+
"name": "idx_edges_source",
|
|
58
|
+
"columns": ["source"],
|
|
59
|
+
"isUnique": false
|
|
60
|
+
},
|
|
61
|
+
"idx_edges_target": {
|
|
62
|
+
"name": "idx_edges_target",
|
|
63
|
+
"columns": ["target"],
|
|
64
|
+
"isUnique": false
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
"foreignKeys": {},
|
|
68
|
+
"compositePrimaryKeys": {
|
|
69
|
+
"edges_source_target_type_pk": {
|
|
70
|
+
"columns": ["source", "target", "type"],
|
|
71
|
+
"name": "edges_source_target_type_pk"
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
"uniqueConstraints": {},
|
|
75
|
+
"checkConstraints": {}
|
|
76
|
+
},
|
|
77
|
+
"ember_state": {
|
|
78
|
+
"name": "ember_state",
|
|
79
|
+
"columns": {
|
|
80
|
+
"file_path": {
|
|
81
|
+
"name": "file_path",
|
|
82
|
+
"type": "text",
|
|
83
|
+
"primaryKey": true,
|
|
84
|
+
"notNull": true,
|
|
85
|
+
"autoincrement": false
|
|
86
|
+
},
|
|
87
|
+
"last_analyzed": {
|
|
88
|
+
"name": "last_analyzed",
|
|
89
|
+
"type": "text",
|
|
90
|
+
"primaryKey": false,
|
|
91
|
+
"notNull": false,
|
|
92
|
+
"autoincrement": false
|
|
93
|
+
},
|
|
94
|
+
"sidecar_created": {
|
|
95
|
+
"name": "sidecar_created",
|
|
96
|
+
"type": "integer",
|
|
97
|
+
"primaryKey": false,
|
|
98
|
+
"notNull": false,
|
|
99
|
+
"autoincrement": false
|
|
100
|
+
},
|
|
101
|
+
"confidence": {
|
|
102
|
+
"name": "confidence",
|
|
103
|
+
"type": "real",
|
|
104
|
+
"primaryKey": false,
|
|
105
|
+
"notNull": false,
|
|
106
|
+
"autoincrement": false
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
"indexes": {},
|
|
110
|
+
"foreignKeys": {},
|
|
111
|
+
"compositePrimaryKeys": {},
|
|
112
|
+
"uniqueConstraints": {},
|
|
113
|
+
"checkConstraints": {}
|
|
114
|
+
},
|
|
115
|
+
"nodes": {
|
|
116
|
+
"name": "nodes",
|
|
117
|
+
"columns": {
|
|
118
|
+
"id": {
|
|
119
|
+
"name": "id",
|
|
120
|
+
"type": "text",
|
|
121
|
+
"primaryKey": true,
|
|
122
|
+
"notNull": true,
|
|
123
|
+
"autoincrement": false
|
|
124
|
+
},
|
|
125
|
+
"type": {
|
|
126
|
+
"name": "type",
|
|
127
|
+
"type": "text",
|
|
128
|
+
"primaryKey": false,
|
|
129
|
+
"notNull": false,
|
|
130
|
+
"autoincrement": false
|
|
131
|
+
},
|
|
132
|
+
"title": {
|
|
133
|
+
"name": "title",
|
|
134
|
+
"type": "text",
|
|
135
|
+
"primaryKey": false,
|
|
136
|
+
"notNull": false,
|
|
137
|
+
"autoincrement": false
|
|
138
|
+
},
|
|
139
|
+
"domain": {
|
|
140
|
+
"name": "domain",
|
|
141
|
+
"type": "text",
|
|
142
|
+
"primaryKey": false,
|
|
143
|
+
"notNull": false,
|
|
144
|
+
"autoincrement": false
|
|
145
|
+
},
|
|
146
|
+
"layer": {
|
|
147
|
+
"name": "layer",
|
|
148
|
+
"type": "text",
|
|
149
|
+
"primaryKey": false,
|
|
150
|
+
"notNull": false,
|
|
151
|
+
"autoincrement": false
|
|
152
|
+
},
|
|
153
|
+
"embedding": {
|
|
154
|
+
"name": "embedding",
|
|
155
|
+
"type": "blob",
|
|
156
|
+
"primaryKey": false,
|
|
157
|
+
"notNull": false,
|
|
158
|
+
"autoincrement": false
|
|
159
|
+
},
|
|
160
|
+
"hash": {
|
|
161
|
+
"name": "hash",
|
|
162
|
+
"type": "text",
|
|
163
|
+
"primaryKey": false,
|
|
164
|
+
"notNull": false,
|
|
165
|
+
"autoincrement": false
|
|
166
|
+
},
|
|
167
|
+
"meta": {
|
|
168
|
+
"name": "meta",
|
|
169
|
+
"type": "text",
|
|
170
|
+
"primaryKey": false,
|
|
171
|
+
"notNull": false,
|
|
172
|
+
"autoincrement": false
|
|
173
|
+
},
|
|
174
|
+
"date": {
|
|
175
|
+
"name": "date",
|
|
176
|
+
"type": "text",
|
|
177
|
+
"primaryKey": false,
|
|
178
|
+
"notNull": false,
|
|
179
|
+
"autoincrement": false
|
|
180
|
+
}
|
|
181
|
+
},
|
|
182
|
+
"indexes": {},
|
|
183
|
+
"foreignKeys": {},
|
|
184
|
+
"compositePrimaryKeys": {},
|
|
185
|
+
"uniqueConstraints": {},
|
|
186
|
+
"checkConstraints": {}
|
|
187
|
+
}
|
|
188
|
+
},
|
|
189
|
+
"views": {},
|
|
190
|
+
"enums": {},
|
|
191
|
+
"_meta": {
|
|
192
|
+
"schemas": {},
|
|
193
|
+
"tables": {},
|
|
194
|
+
"columns": {}
|
|
195
|
+
},
|
|
196
|
+
"internal": {
|
|
197
|
+
"indexes": {}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import {
|
|
2
|
+
blob,
|
|
3
|
+
index,
|
|
4
|
+
integer,
|
|
5
|
+
primaryKey,
|
|
6
|
+
real,
|
|
7
|
+
sqliteTable,
|
|
8
|
+
text,
|
|
9
|
+
} from "drizzle-orm/sqlite-core";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* NODES Table
|
|
13
|
+
* Core entity storage. Now "Hollow" (no content).
|
|
14
|
+
*/
|
|
15
|
+
export const nodes = sqliteTable("nodes", {
|
|
16
|
+
id: text("id").primaryKey(),
|
|
17
|
+
type: text("type"),
|
|
18
|
+
title: text("title"),
|
|
19
|
+
domain: text("domain"),
|
|
20
|
+
layer: text("layer"),
|
|
21
|
+
// Embeddings are stored as raw BLOBs (Float32Array bytes)
|
|
22
|
+
embedding: blob("embedding"),
|
|
23
|
+
hash: text("hash"),
|
|
24
|
+
meta: text("meta"), // JSON string
|
|
25
|
+
date: text("date"), // ISO string or YYYY-MM-DD
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* EDGES Table
|
|
30
|
+
* Defines relationships between nodes.
|
|
31
|
+
*/
|
|
32
|
+
export const edges = sqliteTable(
|
|
33
|
+
"edges",
|
|
34
|
+
{
|
|
35
|
+
source: text("source").notNull(),
|
|
36
|
+
target: text("target").notNull(),
|
|
37
|
+
type: text("type").notNull(),
|
|
38
|
+
confidence: real("confidence").default(1.0),
|
|
39
|
+
veracity: real("veracity").default(1.0),
|
|
40
|
+
contextSource: text("context_source"),
|
|
41
|
+
},
|
|
42
|
+
(table) => ({
|
|
43
|
+
// Composite Primary Key
|
|
44
|
+
pk: primaryKey({ columns: [table.source, table.target, table.type] }),
|
|
45
|
+
// Indices for traversal speed
|
|
46
|
+
sourceIdx: index("idx_edges_source").on(table.source),
|
|
47
|
+
targetIdx: index("idx_edges_target").on(table.target),
|
|
48
|
+
}),
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* EMBER STATE Table (Pilot)
|
|
53
|
+
* Tracks the state of the Ember Service (automated enrichment).
|
|
54
|
+
*/
|
|
55
|
+
export const emberState = sqliteTable("ember_state", {
|
|
56
|
+
filePath: text("file_path").primaryKey(),
|
|
57
|
+
lastAnalyzed: text("last_analyzed"),
|
|
58
|
+
sidecarCreated: integer("sidecar_created", { mode: "boolean" }),
|
|
59
|
+
confidence: real("confidence"),
|
|
60
|
+
});
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# Resonance Services Directory
|
|
2
|
+
|
|
3
|
+
## Purpose
|
|
4
|
+
Service implementations for the Resonance engine - Amalfa's semantic memory and knowledge graph system. These services handle core operations like database interactions, graph traversal, and semantic queries.
|
|
5
|
+
|
|
6
|
+
## Key Services
|
|
7
|
+
|
|
8
|
+
| Service | Purpose |
|
|
9
|
+
|---------|---------|
|
|
10
|
+
| `DatabaseFactory` | Factory for creating database connections |
|
|
11
|
+
| `GraphService` | Knowledge graph operations (nodes, edges, queries) |
|
|
12
|
+
| `MemoryService` | Semantic memory storage and retrieval |
|
|
13
|
+
| `EmbeddingService` | Text embedding generation and management |
|
|
14
|
+
|
|
15
|
+
## Key Files
|
|
16
|
+
|
|
17
|
+
- `index.ts` - Main exports of all services
|
|
18
|
+
- `database.ts` - Database connection and query services
|
|
19
|
+
- `graph.ts` - Knowledge graph traversal and manipulation
|
|
20
|
+
- `semantic.ts` - Semantic similarity and search services
|
|
21
|
+
|
|
22
|
+
## Patterns
|
|
23
|
+
|
|
24
|
+
- Services are stateless where possible
|
|
25
|
+
- Dependency injection for testability
|
|
26
|
+
- Async/await for all I/O operations
|
|
27
|
+
- Error handling with context-rich messages
|
|
28
|
+
|
|
29
|
+
## Related
|
|
30
|
+
|
|
31
|
+
- See also: `src/resonance/README.md` for overall resonance documentation
|
|
32
|
+
- See also: `src/resonance/types/` for type definitions
|
|
33
|
+
- See also: `src/resonance/pipeline/` for data processing pipelines
|
|
34
|
+
|
|
35
|
+
## ⚠️ Stability
|
|
36
|
+
This module is stable and intentionally designed.
|
|
37
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
38
|
+
1. Consulting the user first
|
|
39
|
+
2. Having a documented, compelling reason
|
|
40
|
+
3. Understanding WHY the current design exists
|
|
41
|
+
|
|
42
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# Resonance Types Directory
|
|
2
|
+
|
|
3
|
+
## Purpose
|
|
4
|
+
Type definitions specific to the Resonance engine - Amalfa's semantic memory and knowledge graph system.
|
|
5
|
+
|
|
6
|
+
## Key Files
|
|
7
|
+
|
|
8
|
+
- `index.ts` - Main exports of resonance types
|
|
9
|
+
- `graph.ts` - Knowledge graph node and edge types
|
|
10
|
+
- `semantic.ts` - Semantic embedding and similarity types
|
|
11
|
+
|
|
12
|
+
## Related
|
|
13
|
+
|
|
14
|
+
- See also: `src/resonance/README.md` for overall resonance documentation
|
|
15
|
+
- See also: `src/types/` for core Amalfa types
|
|
16
|
+
|
|
17
|
+
## ⚠️ Stability
|
|
18
|
+
This module is stable and intentionally designed.
|
|
19
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
20
|
+
1. Consulting the user first
|
|
21
|
+
2. Having a documented, compelling reason
|
|
22
|
+
3. Understanding WHY the current design exists
|
|
23
|
+
|
|
24
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
amalfa/src/types/README.md
|
|
2
|
+
```
|
|
3
|
+
|
|
4
|
+
# Type Definitions
|
|
5
|
+
|
|
6
|
+
## Purpose
|
|
7
|
+
The `types/` directory contains TypeScript type definitions used throughout the Amalfa application. These types ensure type safety and provide a single source of truth for data structures.
|
|
8
|
+
|
|
9
|
+
## Key Files
|
|
10
|
+
|
|
11
|
+
| File | Purpose |
|
|
12
|
+
|------|---------|
|
|
13
|
+
| `index.ts` | Main export barrel for all types |
|
|
14
|
+
| `config.ts` | Configuration-related type definitions |
|
|
15
|
+
| `resonance.ts` | Resonance engine type definitions |
|
|
16
|
+
| `daemon.ts` | Daemon service type definitions |
|
|
17
|
+
| `cli.ts` | CLI command type definitions |
|
|
18
|
+
|
|
19
|
+
## Patterns
|
|
20
|
+
|
|
21
|
+
- Use interfaces for object shapes
|
|
22
|
+
- Use type aliases for unions and intersections
|
|
23
|
+
- Export all types from `index.ts` for easy importing
|
|
24
|
+
- Keep types focused and composable
|
|
25
|
+
|
|
26
|
+
## ⚠️ Stability
|
|
27
|
+
This module is stable and intentionally designed.
|
|
28
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
29
|
+
1. Consulting the user first
|
|
30
|
+
2. Having a documented, compelling reason
|
|
31
|
+
3. Understanding WHY the current design exists
|
|
32
|
+
|
|
33
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
|
|
2
|
+
# Utils Directory
|
|
3
|
+
|
|
4
|
+
## Purpose
|
|
5
|
+
The `utils/` directory contains shared utility functions and helper modules used throughout the Amalfa application.
|
|
6
|
+
|
|
7
|
+
## Key Files
|
|
8
|
+
|
|
9
|
+
| File | Purpose |
|
|
10
|
+
|------|---------|
|
|
11
|
+
| `index.ts` | Main exports of utility functions |
|
|
12
|
+
| `file.ts` | File system operations |
|
|
13
|
+
| `logger.ts` | Logging utilities |
|
|
14
|
+
| `validation.ts` | Common validation helpers |
|
|
15
|
+
|
|
16
|
+
## Patterns
|
|
17
|
+
|
|
18
|
+
- Pure functions where possible
|
|
19
|
+
- Reusable across multiple modules
|
|
20
|
+
- Well-documented with JSDoc comments
|
|
21
|
+
- Side effects are clearly isolated
|
|
22
|
+
|
|
23
|
+
## ⚠️ Stability
|
|
24
|
+
This module is stable and intentionally designed.
|
|
25
|
+
Do NOT refactor, rewrite, or change the architecture without:
|
|
26
|
+
1. Consulting the user first
|
|
27
|
+
2. Having a documented, compelling reason
|
|
28
|
+
3. Understanding WHY the current design exists
|
|
29
|
+
|
|
30
|
+
If something looks "wrong," it may be intentional. Ask before you chop.
|
|
@@ -1,222 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* SemanticHarvester: TypeScript Bridge to Python Sieve+Net Pipeline
|
|
3
|
-
*
|
|
4
|
-
* Invokes the Python harvester via subprocess and loads the resulting
|
|
5
|
-
* knowledge_graph.json artifact for integration with ResonanceDB.
|
|
6
|
-
*
|
|
7
|
-
* @example
|
|
8
|
-
* const harvester = new SemanticHarvester();
|
|
9
|
-
* const graph = await harvester.harvest("playbooks/");
|
|
10
|
-
* await harvester.loadIntoResonance(graph);
|
|
11
|
-
*/
|
|
12
|
-
|
|
13
|
-
import { existsSync } from "node:fs";
|
|
14
|
-
import { join } from "node:path";
|
|
15
|
-
import { getLogger } from "@src/utils/Logger";
|
|
16
|
-
import { $ } from "bun";
|
|
17
|
-
|
|
18
|
-
export interface SemanticNode {
|
|
19
|
-
name: string;
|
|
20
|
-
type: "concept" | "document";
|
|
21
|
-
uri?: string;
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
export interface SemanticEdge {
|
|
25
|
-
source: string;
|
|
26
|
-
rel: string;
|
|
27
|
-
target: string;
|
|
28
|
-
confidence_score: number;
|
|
29
|
-
context_source?: string;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
export interface KnowledgeGraph {
|
|
33
|
-
nodes: Record<string, { type: string; uri?: string }>;
|
|
34
|
-
edges: SemanticEdge[];
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
export class SemanticHarvester {
|
|
38
|
-
private readonly ingestDir: string;
|
|
39
|
-
private readonly venvPython: string;
|
|
40
|
-
private log = getLogger("Harvester");
|
|
41
|
-
|
|
42
|
-
constructor(projectRoot?: string) {
|
|
43
|
-
const root = projectRoot ?? process.cwd();
|
|
44
|
-
this.ingestDir = join(root, "ingest");
|
|
45
|
-
this.venvPython = join(this.ingestDir, ".venv", "bin", "python");
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
/**
|
|
49
|
-
* Check if the Python environment is ready.
|
|
50
|
-
*/
|
|
51
|
-
async isReady(): Promise<boolean> {
|
|
52
|
-
// Check venv exists
|
|
53
|
-
if (!existsSync(this.venvPython)) {
|
|
54
|
-
this.log.warn(
|
|
55
|
-
"⚠️ Python venv not found. Run: cd ingest && python3 -m venv .venv && .venv/bin/pip install -r requirements.txt",
|
|
56
|
-
);
|
|
57
|
-
return false;
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
// Check classifier model exists
|
|
61
|
-
const classifierPath = join(this.ingestDir, "polyvis_classifier_v1");
|
|
62
|
-
if (!existsSync(classifierPath)) {
|
|
63
|
-
this.log.warn(
|
|
64
|
-
"⚠️ Classifier not trained. Run: cd ingest && .venv/bin/python train_classifier.py",
|
|
65
|
-
);
|
|
66
|
-
return false;
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
return true;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
/**
|
|
73
|
-
* Harvest semantic triples from a file or directory.
|
|
74
|
-
*
|
|
75
|
-
* @param target - Path to file or directory to process
|
|
76
|
-
* @returns The extracted knowledge graph
|
|
77
|
-
*/
|
|
78
|
-
async harvest(target?: string): Promise<KnowledgeGraph> {
|
|
79
|
-
if (!(await this.isReady())) {
|
|
80
|
-
throw new Error("SemanticHarvester not ready. Check Python environment.");
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
this.log.info("🌾 Running Python Harvester...");
|
|
84
|
-
|
|
85
|
-
const harvesterScript = join(this.ingestDir, "harvester.py");
|
|
86
|
-
const args = target ? [harvesterScript, target] : [harvesterScript];
|
|
87
|
-
|
|
88
|
-
try {
|
|
89
|
-
const result = await $`${this.venvPython} ${args}`.quiet();
|
|
90
|
-
|
|
91
|
-
if (result.exitCode !== 0) {
|
|
92
|
-
this.log.error(
|
|
93
|
-
{ stderr: result.stderr.toString() },
|
|
94
|
-
"Harvester Failed",
|
|
95
|
-
);
|
|
96
|
-
throw new Error(`Harvester exited with code ${result.exitCode}`);
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
this.log.info(
|
|
100
|
-
{ output: result.stdout.toString().trim() },
|
|
101
|
-
"Harvester Success",
|
|
102
|
-
);
|
|
103
|
-
} catch (error) {
|
|
104
|
-
this.log.error({ err: error }, "Harvester Execution Error");
|
|
105
|
-
throw error;
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
// Load the artifact
|
|
109
|
-
const artifactPath = join(this.ingestDir, "knowledge_graph.json");
|
|
110
|
-
const artifact = await Bun.file(artifactPath).json();
|
|
111
|
-
|
|
112
|
-
return artifact as KnowledgeGraph;
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
/**
|
|
116
|
-
* Get statistics about an extracted knowledge graph.
|
|
117
|
-
*/
|
|
118
|
-
getStats(graph: KnowledgeGraph): {
|
|
119
|
-
nodes: number;
|
|
120
|
-
edges: number;
|
|
121
|
-
concepts: number;
|
|
122
|
-
documents: number;
|
|
123
|
-
} {
|
|
124
|
-
const nodes = Object.keys(graph.nodes).length;
|
|
125
|
-
const edges = graph.edges.length;
|
|
126
|
-
const concepts = Object.values(graph.nodes).filter(
|
|
127
|
-
(n) => n.type === "concept",
|
|
128
|
-
).length;
|
|
129
|
-
const documents = Object.values(graph.nodes).filter(
|
|
130
|
-
(n) => n.type === "document",
|
|
131
|
-
).length;
|
|
132
|
-
|
|
133
|
-
return { nodes, edges, concepts, documents };
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
/**
|
|
137
|
-
* Load a harvested knowledge graph into ResonanceDB.
|
|
138
|
-
*
|
|
139
|
-
* @param graph - The extracted knowledge graph from harvest()
|
|
140
|
-
* @returns Statistics about the loaded data
|
|
141
|
-
*/
|
|
142
|
-
async loadIntoResonance(
|
|
143
|
-
graph: KnowledgeGraph,
|
|
144
|
-
): Promise<{ nodesLoaded: number; edgesLoaded: number }> {
|
|
145
|
-
// Lazy import to avoid circular dependencies
|
|
146
|
-
const { ResonanceDB } = await import("@src/resonance/db");
|
|
147
|
-
|
|
148
|
-
const db = ResonanceDB.init();
|
|
149
|
-
let nodesLoaded = 0;
|
|
150
|
-
let edgesLoaded = 0;
|
|
151
|
-
|
|
152
|
-
try {
|
|
153
|
-
db.beginTransaction();
|
|
154
|
-
|
|
155
|
-
// Load nodes
|
|
156
|
-
for (const [name, meta] of Object.entries(graph.nodes)) {
|
|
157
|
-
const nodeId = `semantic:${name.toLowerCase().replace(/\s+/g, "-")}`;
|
|
158
|
-
db.insertNode({
|
|
159
|
-
id: nodeId,
|
|
160
|
-
type: meta.type,
|
|
161
|
-
label: name,
|
|
162
|
-
domain: "semantic",
|
|
163
|
-
layer: "extracted",
|
|
164
|
-
meta: { uri: meta.uri, originalName: name },
|
|
165
|
-
});
|
|
166
|
-
nodesLoaded++;
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
// Load edges
|
|
170
|
-
for (const edge of graph.edges) {
|
|
171
|
-
const sourceId = `semantic:${edge.source.toLowerCase().replace(/\s+/g, "-")}`;
|
|
172
|
-
const targetId = `semantic:${edge.target.toLowerCase().replace(/\s+/g, "-")}`;
|
|
173
|
-
|
|
174
|
-
db.insertSemanticEdge(
|
|
175
|
-
sourceId,
|
|
176
|
-
targetId,
|
|
177
|
-
edge.rel.toLowerCase(),
|
|
178
|
-
edge.confidence_score,
|
|
179
|
-
1.0, // Default veracity
|
|
180
|
-
edge.context_source,
|
|
181
|
-
);
|
|
182
|
-
edgesLoaded++;
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
db.commit();
|
|
186
|
-
this.log.info(
|
|
187
|
-
{ nodes: nodesLoaded, edges: edgesLoaded },
|
|
188
|
-
"✅ Loaded Knowledge Graph into ResonanceDB",
|
|
189
|
-
);
|
|
190
|
-
} catch (error) {
|
|
191
|
-
db.rollback();
|
|
192
|
-
throw error;
|
|
193
|
-
} finally {
|
|
194
|
-
db.close();
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
return { nodesLoaded, edgesLoaded };
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
// --- CLI Test ---
|
|
202
|
-
if (import.meta.main) {
|
|
203
|
-
const harvester = new SemanticHarvester();
|
|
204
|
-
// For CLI output, we can probably rely on the logger since it goes to stderr.
|
|
205
|
-
// Maybe we want pure console.log for "user facing" CLI output?
|
|
206
|
-
// But Logger.ts is configured to use pino. pino writes JSON.
|
|
207
|
-
// If the user runs this manually, they might pipe to pino-pretty.
|
|
208
|
-
// Let's keep it structured.
|
|
209
|
-
|
|
210
|
-
const log = getLogger("CLI");
|
|
211
|
-
|
|
212
|
-
log.info("Checking readiness...");
|
|
213
|
-
const ready = await harvester.isReady();
|
|
214
|
-
log.info({ ready }, "Readiness Check");
|
|
215
|
-
|
|
216
|
-
if (ready) {
|
|
217
|
-
const target = process.argv[2];
|
|
218
|
-
const graph = await harvester.harvest(target);
|
|
219
|
-
const stats = harvester.getStats(graph);
|
|
220
|
-
log.info({ stats }, "📊 Extraction Stats");
|
|
221
|
-
}
|
|
222
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
# 🔄 Resonance Pipeline
|
|
2
|
-
|
|
3
|
-
Data processing and extraction steps for the Resonance Engine.
|
|
4
|
-
|
|
5
|
-
## Contents
|
|
6
|
-
- **`extract.ts`**: Extracts high-value terms from the knowledge graph for frontend use.
|
|
7
|
-
- **`transform_docs.ts`**: Prepares markdown documents for ingestion.
|