amalfa 1.0.15 → 1.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +34 -19
- package/bun.lock +3 -0
- package/package.json +2 -1
- package/src/cli.ts +115 -11
- package/src/config/defaults.ts +20 -0
- package/src/core/EdgeWeaver.ts +31 -0
- package/src/daemon/index.ts +5 -16
- package/src/mcp/index.ts +3 -2
- package/src/pipeline/AmalfaIngestor.ts +33 -16
- package/src/pipeline/PreFlightAnalyzer.ts +5 -1
- package/src/resonance/services/vector-daemon.ts +4 -2
- package/src/utils/DaemonManager.ts +8 -6
- package/src/utils/Notifications.ts +65 -0
- package/src/utils/ServiceLifecycle.ts +11 -0
- package/src/utils/StatsTracker.ts +212 -0
- package/src/utils/ZombieDefense.ts +10 -0
- package/amalfa.svg +0 -87
package/README.md
CHANGED
|
@@ -229,34 +229,47 @@ Agents generate knowledge through structured reflection. Amalfa provides semanti
|
|
|
229
229
|
|
|
230
230
|
## Implementation Status
|
|
231
231
|
|
|
232
|
-
###
|
|
232
|
+
### ✅ Core Functionality (v1.0 - Released)
|
|
233
233
|
|
|
234
|
-
-
|
|
235
|
-
-
|
|
236
|
-
-
|
|
237
|
-
-
|
|
238
|
-
-
|
|
234
|
+
- ✅ **MCP Server** - stdio transport, tools, resources
|
|
235
|
+
- ✅ **Vector Search** - FastEmbed embeddings (384-dim), semantic search
|
|
236
|
+
- ✅ **Database** - SQLite with hollow nodes, FAFCAS protocol
|
|
237
|
+
- ✅ **Ingestion Pipeline** - Markdown → nodes + embeddings
|
|
238
|
+
- ✅ **CLI** - init, serve, stats, doctor, servers, daemon, vector
|
|
239
|
+
- ✅ **Service Management** - Vector daemon, file watcher, lifecycle
|
|
240
|
+
- ✅ **Pre-flight Validation** - Check markdown before ingestion
|
|
239
241
|
|
|
240
|
-
### Phase
|
|
242
|
+
### 🚧 Phase 1: Auto-Augmentation (In Progress)
|
|
243
|
+
|
|
244
|
+
- [ ] Entity extraction from markdown
|
|
245
|
+
- [ ] Auto-linking (wiki-style [[links]])
|
|
246
|
+
- [ ] Tag extraction and indexing
|
|
247
|
+
- [ ] Git-based auditing for augmentations
|
|
248
|
+
- [ ] Automated file watcher updates
|
|
249
|
+
|
|
250
|
+
### 📋 Phase 2: Latent Space Organization (Planned)
|
|
241
251
|
|
|
242
252
|
- [ ] Document clustering (HDBSCAN)
|
|
243
253
|
- [ ] Cluster label generation
|
|
244
254
|
- [ ] Confidence-based tagging
|
|
245
255
|
- [ ] Topic modeling (BERTopic)
|
|
256
|
+
- [ ] Self-organizing taxonomy
|
|
246
257
|
|
|
247
|
-
### Phase 3:
|
|
258
|
+
### 🔗 Phase 3: Graph Intelligence (Planned)
|
|
248
259
|
|
|
249
|
-
- [ ] K-nearest neighbor
|
|
260
|
+
- [ ] K-nearest neighbor recommendations
|
|
250
261
|
- [ ] Suggested reading lists
|
|
251
|
-
- [ ] Temporal
|
|
262
|
+
- [ ] Temporal sequence tracking
|
|
252
263
|
- [ ] Backlink maintenance
|
|
264
|
+
- [ ] Graph traversal tools
|
|
253
265
|
|
|
254
|
-
### Phase 4: Learning from
|
|
266
|
+
### 🎯 Phase 4: Learning from Feedback (Future)
|
|
255
267
|
|
|
256
|
-
- [ ] Track human edits
|
|
268
|
+
- [ ] Track human edits to augmentations
|
|
257
269
|
- [ ] Adjust confidence thresholds
|
|
258
|
-
- [ ] Improve extraction
|
|
259
|
-
- [ ] Weekly digest
|
|
270
|
+
- [ ] Improve extraction heuristics
|
|
271
|
+
- [ ] Weekly knowledge digest
|
|
272
|
+
- [ ] Multi-agent coordination
|
|
260
273
|
|
|
261
274
|
---
|
|
262
275
|
|
|
@@ -332,12 +345,14 @@ bun run format # Biome format
|
|
|
332
345
|
|
|
333
346
|
## Contributing
|
|
334
347
|
|
|
335
|
-
Amalfa is in
|
|
348
|
+
Amalfa is in active development. Contributions are welcome!
|
|
336
349
|
|
|
337
|
-
**
|
|
338
|
-
- ⭐ Star the repo if
|
|
339
|
-
-
|
|
340
|
-
-
|
|
350
|
+
**How to contribute:**
|
|
351
|
+
- ⭐ Star the repo if you find it useful
|
|
352
|
+
- 🐛 Report bugs or request features via issues
|
|
353
|
+
- 📝 Improve documentation
|
|
354
|
+
- 🚀 Submit PRs for new features or fixes
|
|
355
|
+
- 💬 Join discussions about the vision and roadmap
|
|
341
356
|
|
|
342
357
|
---
|
|
343
358
|
|
package/bun.lock
CHANGED
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
"name": "polyvis",
|
|
6
6
|
"dependencies": {
|
|
7
7
|
"@modelcontextprotocol/sdk": "1.25.0",
|
|
8
|
+
"amalfa": "^1.0.16",
|
|
8
9
|
"fastembed": "2.0.0",
|
|
9
10
|
"pino": "^10.1.0",
|
|
10
11
|
},
|
|
@@ -61,6 +62,8 @@
|
|
|
61
62
|
|
|
62
63
|
"ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="],
|
|
63
64
|
|
|
65
|
+
"amalfa": ["amalfa@1.0.16", "", { "dependencies": { "@modelcontextprotocol/sdk": "1.25.0", "fastembed": "2.0.0", "pino": "^10.1.0" }, "bin": { "amalfa": "src/cli.ts" } }, "sha512-bpURaOxfbUR8j2t8R06gJgVurVXMCigLbD8f9TY+9PraF4vXj2Flpy1yc6RKA1S6SnB4fNiLX+BteKbvh2Cavw=="],
|
|
66
|
+
|
|
64
67
|
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
|
|
65
68
|
|
|
66
69
|
"body-parser": ["body-parser@2.2.1", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.3", "http-errors": "^2.0.0", "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.1", "type-is": "^2.0.1" } }, "sha512-nfDwkulwiZYQIGwxdy0RUmowMhKcFVcYXUU7m4QlKYim1rUtg83xm2yjZ40QjDuc291AJjjeSc9b++AWHSgSHw=="],
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "amalfa",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.17",
|
|
4
4
|
"description": "Local-first knowledge graph engine for AI agents. Transforms markdown into searchable memory with MCP protocol.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"homepage": "https://github.com/pjsvis/amalfa#readme",
|
|
@@ -54,6 +54,7 @@
|
|
|
54
54
|
},
|
|
55
55
|
"dependencies": {
|
|
56
56
|
"@modelcontextprotocol/sdk": "1.25.0",
|
|
57
|
+
"amalfa": "^1.0.17",
|
|
57
58
|
"fastembed": "2.0.0",
|
|
58
59
|
"pino": "^10.1.0"
|
|
59
60
|
}
|
package/src/cli.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { existsSync, statSync } from "node:fs";
|
|
|
3
3
|
import { join } from "node:path";
|
|
4
4
|
import { spawn } from "node:child_process";
|
|
5
5
|
|
|
6
|
-
const VERSION = "1.0.
|
|
6
|
+
const VERSION = "1.0.16";
|
|
7
7
|
|
|
8
8
|
// Database path loaded from config (lazy loaded per command)
|
|
9
9
|
let DB_PATH: string | null = null;
|
|
@@ -23,6 +23,7 @@ Commands:
|
|
|
23
23
|
init [--force] Initialize database from markdown files
|
|
24
24
|
serve Start MCP server (stdio transport)
|
|
25
25
|
stats Show database statistics
|
|
26
|
+
validate Validate database health (pre-publish gate)
|
|
26
27
|
doctor Check installation and configuration
|
|
27
28
|
setup-mcp Generate MCP configuration JSON
|
|
28
29
|
daemon <action> Manage file watcher (start|stop|status|restart)
|
|
@@ -67,9 +68,7 @@ async function checkDatabase(): Promise<boolean> {
|
|
|
67
68
|
|
|
68
69
|
To initialize AMALFA:
|
|
69
70
|
1. Create markdown files in ./docs/ (or your preferred location)
|
|
70
|
-
2. Run: amalfa init
|
|
71
|
-
|
|
72
|
-
For now, you can manually create the .amalfa/ directory and database.
|
|
71
|
+
2. Run: amalfa init
|
|
73
72
|
`);
|
|
74
73
|
return false;
|
|
75
74
|
}
|
|
@@ -129,7 +128,7 @@ Source: ./docs (markdown files)
|
|
|
129
128
|
Last modified: ${new Date(statSync(dbPath).mtime).toISOString()}
|
|
130
129
|
|
|
131
130
|
🔍 To search: Use with Claude Desktop or other MCP client
|
|
132
|
-
📝 To update: Run 'amalfa daemon start'
|
|
131
|
+
📝 To update: Run 'amalfa daemon start' to watch for file changes
|
|
133
132
|
`);
|
|
134
133
|
} catch (error) {
|
|
135
134
|
console.error("❌ Failed to read database statistics:", error);
|
|
@@ -174,7 +173,7 @@ async function cmdInit() {
|
|
|
174
173
|
for (const issue of report.issues.filter((i) => i.severity === "error")) {
|
|
175
174
|
console.error(` - ${issue.path}: ${issue.details}`);
|
|
176
175
|
}
|
|
177
|
-
|
|
176
|
+
console.error("\nSee .amalfa/logs/pre-flight.log for details and recommendations");
|
|
178
177
|
console.error("\nFix these issues and try again.");
|
|
179
178
|
process.exit(1);
|
|
180
179
|
}
|
|
@@ -185,14 +184,14 @@ async function cmdInit() {
|
|
|
185
184
|
for (const issue of report.issues.filter((i) => i.severity === "warning")) {
|
|
186
185
|
console.warn(` - ${issue.path}: ${issue.details}`);
|
|
187
186
|
}
|
|
188
|
-
|
|
187
|
+
console.warn("\nSee .amalfa/logs/pre-flight.log for recommendations");
|
|
189
188
|
console.warn("\nTo proceed anyway, use: amalfa init --force");
|
|
190
189
|
process.exit(1);
|
|
191
190
|
}
|
|
192
191
|
|
|
193
192
|
if (report.validFiles === 0) {
|
|
194
193
|
console.error("\n❌ No valid markdown files found");
|
|
195
|
-
|
|
194
|
+
console.error("See .amalfa/logs/pre-flight.log for details");
|
|
196
195
|
process.exit(1);
|
|
197
196
|
}
|
|
198
197
|
|
|
@@ -226,6 +225,21 @@ async function cmdInit() {
|
|
|
226
225
|
db.close();
|
|
227
226
|
|
|
228
227
|
if (result.success) {
|
|
228
|
+
// Record database snapshot for tracking
|
|
229
|
+
const { StatsTracker } = await import("./utils/StatsTracker");
|
|
230
|
+
const tracker = new StatsTracker();
|
|
231
|
+
const fileSize = statSync(dbPath).size;
|
|
232
|
+
const dbSizeMB = fileSize / 1024 / 1024;
|
|
233
|
+
|
|
234
|
+
await tracker.recordSnapshot({
|
|
235
|
+
timestamp: new Date().toISOString(),
|
|
236
|
+
nodes: result.stats.nodes,
|
|
237
|
+
edges: result.stats.edges,
|
|
238
|
+
embeddings: result.stats.vectors,
|
|
239
|
+
dbSizeMB,
|
|
240
|
+
version: VERSION,
|
|
241
|
+
});
|
|
242
|
+
|
|
229
243
|
console.log("\n✅ Initialization complete!");
|
|
230
244
|
console.log("\n📊 Summary:");
|
|
231
245
|
console.log(` Files processed: ${result.stats.files}`);
|
|
@@ -233,6 +247,7 @@ async function cmdInit() {
|
|
|
233
247
|
console.log(` Edges created: ${result.stats.edges}`);
|
|
234
248
|
console.log(` Embeddings: ${result.stats.vectors}`);
|
|
235
249
|
console.log(` Duration: ${result.stats.durationSec.toFixed(2)}s\n`);
|
|
250
|
+
console.log("📊 Snapshot saved to: .amalfa/stats-history.json\n");
|
|
236
251
|
console.log("Next steps:");
|
|
237
252
|
console.log(" amalfa serve # Start MCP server");
|
|
238
253
|
console.log(" amalfa daemon # Watch for file changes (coming soon)");
|
|
@@ -341,11 +356,12 @@ async function cmdSetupMcp() {
|
|
|
341
356
|
|
|
342
357
|
async function cmdServers() {
|
|
343
358
|
const showDot = args.includes("--dot");
|
|
359
|
+
const { AMALFA_DIRS } = await import("./config/defaults");
|
|
344
360
|
|
|
345
361
|
const SERVICES = [
|
|
346
|
-
{ name: "MCP Server", pidFile: "
|
|
347
|
-
{ name: "Vector Daemon", pidFile: "
|
|
348
|
-
{ name: "File Watcher", pidFile: "
|
|
362
|
+
{ name: "MCP Server", pidFile: join(AMALFA_DIRS.runtime, "mcp.pid"), port: "stdio", id: "mcp", cmd: "amalfa serve" },
|
|
363
|
+
{ name: "Vector Daemon", pidFile: join(AMALFA_DIRS.runtime, "vector-daemon.pid"), port: "3010", id: "vector", cmd: "amalfa vector start" },
|
|
364
|
+
{ name: "File Watcher", pidFile: join(AMALFA_DIRS.runtime, "daemon.pid"), port: "-", id: "watcher", cmd: "amalfa daemon start" },
|
|
349
365
|
];
|
|
350
366
|
|
|
351
367
|
async function isRunning(pid: number): Promise<boolean> {
|
|
@@ -461,6 +477,90 @@ async function cmdServers() {
|
|
|
461
477
|
console.log("\n💡 Commands: amalfa serve | amalfa vector start | amalfa daemon start\n");
|
|
462
478
|
}
|
|
463
479
|
|
|
480
|
+
async function cmdValidate() {
|
|
481
|
+
console.log("🛡️ AMALFA Database Validation\n");
|
|
482
|
+
|
|
483
|
+
// Check database exists
|
|
484
|
+
if (!(await checkDatabase())) {
|
|
485
|
+
console.error("\n❌ Validation failed: Database not found");
|
|
486
|
+
process.exit(1);
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
const dbPath = await getDbPath();
|
|
490
|
+
const { ResonanceDB } = await import("./resonance/db");
|
|
491
|
+
const { StatsTracker } = await import("./utils/StatsTracker");
|
|
492
|
+
|
|
493
|
+
const db = new ResonanceDB(dbPath);
|
|
494
|
+
const tracker = new StatsTracker();
|
|
495
|
+
|
|
496
|
+
try {
|
|
497
|
+
// Get current stats
|
|
498
|
+
const stats = db.getStats();
|
|
499
|
+
const fileSize = statSync(dbPath).size;
|
|
500
|
+
const dbSizeMB = fileSize / 1024 / 1024;
|
|
501
|
+
|
|
502
|
+
const currentSnapshot = {
|
|
503
|
+
timestamp: new Date().toISOString(),
|
|
504
|
+
nodes: stats.nodes,
|
|
505
|
+
edges: stats.edges,
|
|
506
|
+
embeddings: stats.vectors,
|
|
507
|
+
dbSizeMB,
|
|
508
|
+
version: VERSION,
|
|
509
|
+
};
|
|
510
|
+
|
|
511
|
+
// Validate against history
|
|
512
|
+
const validation = tracker.validate(currentSnapshot);
|
|
513
|
+
|
|
514
|
+
console.log("📊 Current State:");
|
|
515
|
+
console.log(` Nodes: ${stats.nodes}`);
|
|
516
|
+
console.log(` Edges: ${stats.edges}`);
|
|
517
|
+
console.log(` Embeddings: ${stats.vectors}`);
|
|
518
|
+
console.log(` Database size: ${dbSizeMB.toFixed(2)} MB\n`);
|
|
519
|
+
|
|
520
|
+
if (validation.errors.length > 0) {
|
|
521
|
+
console.error("❌ ERRORS (Must Fix):");
|
|
522
|
+
for (const error of validation.errors) {
|
|
523
|
+
console.error(` - ${error}`);
|
|
524
|
+
}
|
|
525
|
+
console.error("");
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
if (validation.warnings.length > 0) {
|
|
529
|
+
console.warn("⚠️ WARNINGS:");
|
|
530
|
+
for (const warning of validation.warnings) {
|
|
531
|
+
console.warn(` - ${warning}`);
|
|
532
|
+
}
|
|
533
|
+
console.warn("");
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
// Show historical trend
|
|
537
|
+
const snapshots = tracker.getAllSnapshots();
|
|
538
|
+
if (snapshots.length > 1) {
|
|
539
|
+
console.log(tracker.getSummary());
|
|
540
|
+
console.log("");
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
if (validation.valid) {
|
|
544
|
+
console.log("✅ Validation passed! Database is healthy.");
|
|
545
|
+
if (validation.warnings.length > 0) {
|
|
546
|
+
console.log("\n💡 Consider addressing warnings before publishing.");
|
|
547
|
+
}
|
|
548
|
+
} else {
|
|
549
|
+
console.error("❌ Validation failed! Database has critical issues.");
|
|
550
|
+
console.error("\nFix errors before publishing:");
|
|
551
|
+
console.error(" - Run: amalfa init");
|
|
552
|
+
db.close();
|
|
553
|
+
process.exit(1);
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
db.close();
|
|
557
|
+
} catch (error) {
|
|
558
|
+
db.close();
|
|
559
|
+
console.error("❌ Validation failed:", error);
|
|
560
|
+
process.exit(1);
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
464
564
|
async function cmdDoctor() {
|
|
465
565
|
console.log("🩺 AMALFA Health Check\n");
|
|
466
566
|
|
|
@@ -554,6 +654,10 @@ async function main() {
|
|
|
554
654
|
await cmdDoctor();
|
|
555
655
|
break;
|
|
556
656
|
|
|
657
|
+
case "validate":
|
|
658
|
+
await cmdValidate();
|
|
659
|
+
break;
|
|
660
|
+
|
|
557
661
|
case "init":
|
|
558
662
|
await cmdInit();
|
|
559
663
|
break;
|
package/src/config/defaults.ts
CHANGED
|
@@ -3,6 +3,26 @@
|
|
|
3
3
|
* Default settings that can be overridden via amalfa.config.{ts,js,json}
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
+
import { mkdirSync, existsSync } from "node:fs";
|
|
7
|
+
import { join } from "node:path";
|
|
8
|
+
|
|
9
|
+
/** AMALFA directory structure */
|
|
10
|
+
export const AMALFA_DIRS = {
|
|
11
|
+
base: ".amalfa",
|
|
12
|
+
get logs() { return join(this.base, "logs"); },
|
|
13
|
+
get runtime() { return join(this.base, "runtime"); },
|
|
14
|
+
} as const;
|
|
15
|
+
|
|
16
|
+
/** Initialize AMALFA directory structure */
|
|
17
|
+
export function initAmalfaDirs(): void {
|
|
18
|
+
const dirs = [AMALFA_DIRS.base, AMALFA_DIRS.logs, AMALFA_DIRS.runtime];
|
|
19
|
+
for (const dir of dirs) {
|
|
20
|
+
if (!existsSync(dir)) {
|
|
21
|
+
mkdirSync(dir, { recursive: true });
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
6
26
|
export interface AmalfaConfig {
|
|
7
27
|
/** @deprecated Use sources array instead */
|
|
8
28
|
source?: string;
|
package/src/core/EdgeWeaver.ts
CHANGED
|
@@ -48,6 +48,7 @@ export class EdgeWeaver {
|
|
|
48
48
|
this.processTags(sourceNodeId, content);
|
|
49
49
|
this.processWikiLinks(sourceNodeId, content);
|
|
50
50
|
this.processMetadataTags(sourceNodeId, content);
|
|
51
|
+
this.processMarkdownLinks(sourceNodeId, content);
|
|
51
52
|
}
|
|
52
53
|
|
|
53
54
|
private processTags(sourceId: string, content: string): void {
|
|
@@ -127,6 +128,36 @@ export class EdgeWeaver {
|
|
|
127
128
|
}
|
|
128
129
|
}
|
|
129
130
|
|
|
131
|
+
private processMarkdownLinks(sourceId: string, content: string): void {
|
|
132
|
+
// Match standard markdown links: [text](./file.md) or [text](file.md)
|
|
133
|
+
const matches = content.matchAll(/\[([^\]]+)\]\(([^)]+\.md)\)/g);
|
|
134
|
+
|
|
135
|
+
for (const match of matches) {
|
|
136
|
+
if (!match[2]) continue;
|
|
137
|
+
let linkPath = match[2].trim();
|
|
138
|
+
|
|
139
|
+
// Skip external links (http/https)
|
|
140
|
+
if (linkPath.startsWith("http://") || linkPath.startsWith("https://")) {
|
|
141
|
+
continue;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// Extract filename from path
|
|
145
|
+
const filename = linkPath.split("/").pop();
|
|
146
|
+
if (!filename) continue;
|
|
147
|
+
|
|
148
|
+
// Convert filename to node ID (same logic as ingestion)
|
|
149
|
+
const targetId = filename
|
|
150
|
+
.replace(".md", "")
|
|
151
|
+
.toLowerCase()
|
|
152
|
+
.replace(/[^a-z0-9-]/g, "-");
|
|
153
|
+
|
|
154
|
+
// Insert edge only if target node exists in lexicon
|
|
155
|
+
if (this.lexicon.has(targetId)) {
|
|
156
|
+
this.safeInsertEdge(sourceId, targetId, "LINKS_TO");
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
130
161
|
private safeInsertEdge(source: string, target: string, type: string) {
|
|
131
162
|
const check = LouvainGate.check(this.db.getRawDb(), source, target);
|
|
132
163
|
if (check.allowed) {
|
package/src/daemon/index.ts
CHANGED
|
@@ -7,11 +7,12 @@
|
|
|
7
7
|
import { existsSync } from "node:fs";
|
|
8
8
|
import { watch } from "node:fs";
|
|
9
9
|
import { join } from "node:path";
|
|
10
|
-
import { loadConfig } from "@src/config/defaults";
|
|
10
|
+
import { loadConfig, AMALFA_DIRS } from "@src/config/defaults";
|
|
11
11
|
import { AmalfaIngestor } from "@src/pipeline/AmalfaIngestor";
|
|
12
12
|
import { ResonanceDB } from "@src/resonance/db";
|
|
13
13
|
import { getLogger } from "@src/utils/Logger";
|
|
14
14
|
import { ServiceLifecycle } from "@src/utils/ServiceLifecycle";
|
|
15
|
+
import { sendNotification } from "@src/utils/Notifications";
|
|
15
16
|
|
|
16
17
|
const args = process.argv.slice(2);
|
|
17
18
|
const command = args[0] || "serve";
|
|
@@ -20,8 +21,8 @@ const log = getLogger("AmalfaDaemon");
|
|
|
20
21
|
// Service lifecycle management
|
|
21
22
|
const lifecycle = new ServiceLifecycle({
|
|
22
23
|
name: "AMALFA-Daemon",
|
|
23
|
-
pidFile: "
|
|
24
|
-
logFile: "
|
|
24
|
+
pidFile: join(AMALFA_DIRS.runtime, "daemon.pid"),
|
|
25
|
+
logFile: join(AMALFA_DIRS.logs, "daemon.log"),
|
|
25
26
|
entryPoint: "src/daemon/index.ts",
|
|
26
27
|
});
|
|
27
28
|
|
|
@@ -154,7 +155,7 @@ function triggerIngestion(debounceMs: number) {
|
|
|
154
155
|
retryQueue.delete(file);
|
|
155
156
|
}
|
|
156
157
|
|
|
157
|
-
// Send notification
|
|
158
|
+
// Send notification
|
|
158
159
|
await sendNotification(
|
|
159
160
|
"AMALFA",
|
|
160
161
|
`Knowledge graph updated (${batchSize} file${batchSize > 1 ? "s" : ""})`,
|
|
@@ -209,17 +210,5 @@ function triggerIngestion(debounceMs: number) {
|
|
|
209
210
|
}, debounceMs);
|
|
210
211
|
}
|
|
211
212
|
|
|
212
|
-
/**
|
|
213
|
-
* Send native notification (macOS)
|
|
214
|
-
*/
|
|
215
|
-
async function sendNotification(title: string, message: string) {
|
|
216
|
-
try {
|
|
217
|
-
const script = `display notification "${message}" with title "${title}"`;
|
|
218
|
-
await Bun.spawn(["osascript", "-e", script]);
|
|
219
|
-
} catch (e) {
|
|
220
|
-
log.debug({ err: e }, "Failed to send notification");
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
|
|
224
213
|
// Run service lifecycle dispatcher
|
|
225
214
|
await lifecycle.run(command, main);
|
package/src/mcp/index.ts
CHANGED
|
@@ -14,6 +14,7 @@ import { ResonanceDB } from "@src/resonance/db";
|
|
|
14
14
|
import { DaemonManager } from "../utils/DaemonManager";
|
|
15
15
|
import { getLogger } from "../utils/Logger";
|
|
16
16
|
import { ServiceLifecycle } from "../utils/ServiceLifecycle";
|
|
17
|
+
import { AMALFA_DIRS } from "@src/config/defaults";
|
|
17
18
|
|
|
18
19
|
const args = process.argv.slice(2);
|
|
19
20
|
const command = args[0] || "serve";
|
|
@@ -23,8 +24,8 @@ const log = getLogger("MCP");
|
|
|
23
24
|
|
|
24
25
|
const lifecycle = new ServiceLifecycle({
|
|
25
26
|
name: "MCP",
|
|
26
|
-
pidFile: "
|
|
27
|
-
logFile: "
|
|
27
|
+
pidFile: join(AMALFA_DIRS.runtime, "mcp.pid"),
|
|
28
|
+
logFile: join(AMALFA_DIRS.logs, "mcp.log"),
|
|
28
29
|
entryPoint: "src/mcp/index.ts",
|
|
29
30
|
});
|
|
30
31
|
|
|
@@ -66,25 +66,25 @@ export class AmalfaIngestor {
|
|
|
66
66
|
};
|
|
67
67
|
}
|
|
68
68
|
|
|
69
|
-
//
|
|
70
|
-
|
|
71
|
-
|
|
69
|
+
// TWO-PASS INGESTION:
|
|
70
|
+
// Pass 1: Create all nodes (without edges)
|
|
71
|
+
// Pass 2: Create edges (now that all nodes exist in lexicon)
|
|
72
72
|
|
|
73
|
-
// Process files in batches
|
|
74
73
|
const BATCH_SIZE = 50;
|
|
75
74
|
let processedCount = 0;
|
|
76
75
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
76
|
+
// PASS 1: Nodes only
|
|
77
|
+
for (let i = 0; i < files.length; i++) {
|
|
78
|
+
const filePath = files[i];
|
|
79
|
+
if (!filePath) continue;
|
|
80
80
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
81
|
+
// Start batch transaction
|
|
82
|
+
if (i % BATCH_SIZE === 0) {
|
|
83
|
+
this.db.beginTransaction();
|
|
84
|
+
}
|
|
85
85
|
|
|
86
|
-
|
|
87
|
-
|
|
86
|
+
await this.processFile(filePath, embedder, null, tokenizer); // null = skip edge weaving
|
|
87
|
+
processedCount++;
|
|
88
88
|
|
|
89
89
|
// Progress indicator
|
|
90
90
|
if (processedCount % 10 === 0 || processedCount === files.length) {
|
|
@@ -98,6 +98,21 @@ export class AmalfaIngestor {
|
|
|
98
98
|
}
|
|
99
99
|
}
|
|
100
100
|
|
|
101
|
+
// PASS 2: Edges (now lexicon is populated)
|
|
102
|
+
const lexicon = this.buildLexicon();
|
|
103
|
+
const weaver = new EdgeWeaver(this.db, lexicon);
|
|
104
|
+
|
|
105
|
+
console.log("\n🔗 Creating edges...");
|
|
106
|
+
this.db.beginTransaction();
|
|
107
|
+
for (const filePath of files) {
|
|
108
|
+
if (!filePath) continue;
|
|
109
|
+
const content = await Bun.file(filePath).text();
|
|
110
|
+
const filename = filePath.split("/").pop() || "unknown";
|
|
111
|
+
const id = filename.replace(".md", "").toLowerCase().replace(/[^a-z0-9-]/g, "-");
|
|
112
|
+
weaver.weave(id, content);
|
|
113
|
+
}
|
|
114
|
+
this.db.commit();
|
|
115
|
+
|
|
101
116
|
// Force WAL checkpoint for persistence
|
|
102
117
|
this.log.info("💾 Forcing WAL checkpoint...");
|
|
103
118
|
this.db.getRawDb().run("PRAGMA wal_checkpoint(TRUNCATE);");
|
|
@@ -192,7 +207,7 @@ export class AmalfaIngestor {
|
|
|
192
207
|
private async processFile(
|
|
193
208
|
filePath: string,
|
|
194
209
|
embedder: Embedder,
|
|
195
|
-
weaver: EdgeWeaver,
|
|
210
|
+
weaver: EdgeWeaver | null,
|
|
196
211
|
tokenizer: TokenizerService,
|
|
197
212
|
): Promise<void> {
|
|
198
213
|
try {
|
|
@@ -249,8 +264,10 @@ export class AmalfaIngestor {
|
|
|
249
264
|
|
|
250
265
|
this.db.insertNode(node);
|
|
251
266
|
|
|
252
|
-
// Weave edges
|
|
253
|
-
weaver
|
|
267
|
+
// Weave edges (only if weaver provided - skipped in pass 1)
|
|
268
|
+
if (weaver) {
|
|
269
|
+
weaver.weave(id, content);
|
|
270
|
+
}
|
|
254
271
|
} catch (e) {
|
|
255
272
|
this.log.warn({ err: e, file: filePath }, "⚠️ Failed to process file");
|
|
256
273
|
}
|
|
@@ -7,13 +7,14 @@
|
|
|
7
7
|
* - Empty or invalid files
|
|
8
8
|
* - Estimated resource usage
|
|
9
9
|
*
|
|
10
|
-
* Generates .amalfa
|
|
10
|
+
* Generates .amalfa/logs/pre-flight.log with recommendations.
|
|
11
11
|
*/
|
|
12
12
|
|
|
13
13
|
import { existsSync, lstatSync, readdirSync, realpathSync, statSync, writeFileSync } from "node:fs";
|
|
14
14
|
import { join, relative } from "node:path";
|
|
15
15
|
import { getLogger } from "@src/utils/Logger";
|
|
16
16
|
import type { AmalfaConfig } from "@src/config/defaults";
|
|
17
|
+
import { AMALFA_DIRS, initAmalfaDirs } from "@src/config/defaults";
|
|
17
18
|
|
|
18
19
|
const log = getLogger("PreFlightAnalyzer");
|
|
19
20
|
|
|
@@ -48,9 +49,12 @@ export class PreFlightAnalyzer {
|
|
|
48
49
|
private config: AmalfaConfig;
|
|
49
50
|
private visitedPaths = new Set<string>();
|
|
50
51
|
private issues: FileIssue[] = [];
|
|
52
|
+
private logPath = join(AMALFA_DIRS.logs, "pre-flight.log");
|
|
51
53
|
|
|
52
54
|
constructor(config: AmalfaConfig) {
|
|
53
55
|
this.config = config;
|
|
56
|
+
// Ensure .amalfa directories exist
|
|
57
|
+
initAmalfaDirs();
|
|
54
58
|
}
|
|
55
59
|
|
|
56
60
|
/**
|
|
@@ -5,10 +5,12 @@
|
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
import { serve } from "bun";
|
|
8
|
+
import { join } from "node:path";
|
|
8
9
|
import { EmbeddingModel, FlagEmbedding } from "fastembed";
|
|
9
10
|
import { toFafcas } from "@src/resonance/db";
|
|
10
11
|
import { getLogger } from "@src/utils/Logger";
|
|
11
12
|
import { ServiceLifecycle } from "@src/utils/ServiceLifecycle";
|
|
13
|
+
import { AMALFA_DIRS } from "@src/config/defaults";
|
|
12
14
|
|
|
13
15
|
const log = getLogger("VectorDaemon");
|
|
14
16
|
const PORT = Number(process.env.VECTOR_PORT || 3010);
|
|
@@ -16,8 +18,8 @@ const PORT = Number(process.env.VECTOR_PORT || 3010);
|
|
|
16
18
|
// Service lifecycle management
|
|
17
19
|
const lifecycle = new ServiceLifecycle({
|
|
18
20
|
name: "Vector-Daemon",
|
|
19
|
-
pidFile: "
|
|
20
|
-
logFile: "
|
|
21
|
+
pidFile: join(AMALFA_DIRS.runtime, "vector-daemon.pid"),
|
|
22
|
+
logFile: join(AMALFA_DIRS.logs, "vector-daemon.log"),
|
|
21
23
|
entryPoint: "src/resonance/services/vector-daemon.ts",
|
|
22
24
|
});
|
|
23
25
|
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { existsSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
2
3
|
import { ServiceLifecycle } from "./ServiceLifecycle";
|
|
4
|
+
import { AMALFA_DIRS } from "@src/config/defaults";
|
|
3
5
|
|
|
4
6
|
export interface DaemonStatus {
|
|
5
7
|
running: boolean;
|
|
@@ -18,15 +20,15 @@ export class DaemonManager {
|
|
|
18
20
|
constructor() {
|
|
19
21
|
this.vectorLifecycle = new ServiceLifecycle({
|
|
20
22
|
name: "Vector-Daemon",
|
|
21
|
-
pidFile: "
|
|
22
|
-
logFile: "
|
|
23
|
+
pidFile: join(AMALFA_DIRS.runtime, "vector-daemon.pid"),
|
|
24
|
+
logFile: join(AMALFA_DIRS.logs, "vector-daemon.log"),
|
|
23
25
|
entryPoint: "src/resonance/services/vector-daemon.ts",
|
|
24
26
|
});
|
|
25
27
|
|
|
26
28
|
this.watcherLifecycle = new ServiceLifecycle({
|
|
27
29
|
name: "File-Watcher",
|
|
28
|
-
pidFile: "
|
|
29
|
-
logFile: "
|
|
30
|
+
pidFile: join(AMALFA_DIRS.runtime, "daemon.pid"),
|
|
31
|
+
logFile: join(AMALFA_DIRS.logs, "daemon.log"),
|
|
30
32
|
entryPoint: "src/daemon/index.ts",
|
|
31
33
|
});
|
|
32
34
|
}
|
|
@@ -63,7 +65,7 @@ export class DaemonManager {
|
|
|
63
65
|
* Check if vector daemon is running
|
|
64
66
|
*/
|
|
65
67
|
async checkVectorDaemon(): Promise<DaemonStatus> {
|
|
66
|
-
const pid = await this.readPid("
|
|
68
|
+
const pid = await this.readPid(join(AMALFA_DIRS.runtime, "vector-daemon.pid"));
|
|
67
69
|
if (!pid) {
|
|
68
70
|
return { running: false };
|
|
69
71
|
}
|
|
@@ -96,7 +98,7 @@ export class DaemonManager {
|
|
|
96
98
|
* Check if file watcher daemon is running
|
|
97
99
|
*/
|
|
98
100
|
async checkFileWatcher(): Promise<DaemonStatus> {
|
|
99
|
-
const pid = await this.readPid("
|
|
101
|
+
const pid = await this.readPid(join(AMALFA_DIRS.runtime, "daemon.pid"));
|
|
100
102
|
if (!pid) {
|
|
101
103
|
return { running: false };
|
|
102
104
|
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import { platform } from "node:os";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Send a native desktop notification
|
|
6
|
+
* Works on macOS (osascript) and Linux (notify-send)
|
|
7
|
+
*/
|
|
8
|
+
export async function sendNotification(
|
|
9
|
+
title: string,
|
|
10
|
+
message: string,
|
|
11
|
+
): Promise<void> {
|
|
12
|
+
const os = platform();
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
if (os === "darwin") {
|
|
16
|
+
// macOS: Use osascript
|
|
17
|
+
const script = `display notification "${escapeForAppleScript(message)}" with title "${escapeForAppleScript(title)}"`;
|
|
18
|
+
await execCommand("osascript", ["-e", script]);
|
|
19
|
+
} else if (os === "linux") {
|
|
20
|
+
// Linux: Use notify-send (requires libnotify)
|
|
21
|
+
await execCommand("notify-send", [title, message]);
|
|
22
|
+
} else {
|
|
23
|
+
// Windows/other: Not supported yet
|
|
24
|
+
// Could add Windows toast notifications via PowerShell if needed
|
|
25
|
+
}
|
|
26
|
+
} catch (_e) {
|
|
27
|
+
// Silently fail - notifications are non-critical
|
|
28
|
+
// Could log to debug if needed
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Escape strings for AppleScript
|
|
34
|
+
*/
|
|
35
|
+
function escapeForAppleScript(str: string): string {
|
|
36
|
+
return str.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Execute a command and wait for completion
|
|
41
|
+
*/
|
|
42
|
+
function execCommand(command: string, args: string[]): Promise<void> {
|
|
43
|
+
return new Promise((resolve, reject) => {
|
|
44
|
+
const proc = spawn(command, args, {
|
|
45
|
+
stdio: "ignore",
|
|
46
|
+
detached: false,
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
proc.on("close", (code) => {
|
|
50
|
+
if (code === 0) {
|
|
51
|
+
resolve();
|
|
52
|
+
} else {
|
|
53
|
+
reject(new Error(`Command exited with code ${code}`));
|
|
54
|
+
}
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
proc.on("error", reject);
|
|
58
|
+
|
|
59
|
+
// Timeout after 5 seconds
|
|
60
|
+
setTimeout(() => {
|
|
61
|
+
proc.kill();
|
|
62
|
+
reject(new Error("Notification timeout"));
|
|
63
|
+
}, 5000);
|
|
64
|
+
});
|
|
65
|
+
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { existsSync } from "node:fs";
|
|
2
2
|
import { unlink } from "node:fs/promises";
|
|
3
|
+
import { join } from "node:path";
|
|
3
4
|
import { ZombieDefense } from "./ZombieDefense";
|
|
5
|
+
import { AMALFA_DIRS, initAmalfaDirs } from "@src/config/defaults";
|
|
4
6
|
|
|
5
7
|
export interface ServiceConfig {
|
|
6
8
|
name: string; // e.g. "Daemon"
|
|
@@ -25,6 +27,9 @@ export class ServiceLifecycle {
|
|
|
25
27
|
* Start the service in the background (detached).
|
|
26
28
|
*/
|
|
27
29
|
async start() {
|
|
30
|
+
// Ensure .amalfa directories exist
|
|
31
|
+
initAmalfaDirs();
|
|
32
|
+
|
|
28
33
|
// Enforce clean state first (kill duplicates)
|
|
29
34
|
await ZombieDefense.assertClean(this.config.name, true);
|
|
30
35
|
|
|
@@ -126,11 +131,17 @@ export class ServiceLifecycle {
|
|
|
126
131
|
* Use this to wrap your actual server startup code.
|
|
127
132
|
*/
|
|
128
133
|
async serve(serverLogic: () => Promise<void>, checkZombies = true) {
|
|
134
|
+
// Ensure .amalfa directories exist
|
|
135
|
+
initAmalfaDirs();
|
|
136
|
+
|
|
129
137
|
// Enforce clean state (ensure we aren't running as a zombie of ourselves)
|
|
130
138
|
if (checkZombies) {
|
|
131
139
|
await ZombieDefense.assertClean(`${this.config.name} (Serve)`);
|
|
132
140
|
}
|
|
133
141
|
|
|
142
|
+
// Write PID file for this serving process
|
|
143
|
+
await Bun.write(this.config.pidFile, process.pid.toString());
|
|
144
|
+
|
|
134
145
|
// Register cleanup handlers to remove PID file on exit/crash/kill
|
|
135
146
|
let cleanupCalled = false;
|
|
136
147
|
const cleanup = async (signal?: string) => {
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import { existsSync, mkdirSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { AMALFA_DIRS } from "@src/config/defaults";
|
|
4
|
+
|
|
5
|
+
export interface DatabaseSnapshot {
|
|
6
|
+
timestamp: string;
|
|
7
|
+
nodes: number;
|
|
8
|
+
edges: number;
|
|
9
|
+
embeddings: number;
|
|
10
|
+
dbSizeMB: number;
|
|
11
|
+
version?: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface StatsHistory {
|
|
15
|
+
snapshots: DatabaseSnapshot[];
|
|
16
|
+
created: string;
|
|
17
|
+
lastUpdated: string;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Tracks database statistics over time for health monitoring
|
|
22
|
+
* and pre-publish validation gates
|
|
23
|
+
*/
|
|
24
|
+
export class StatsTracker {
|
|
25
|
+
private statsFile: string;
|
|
26
|
+
|
|
27
|
+
constructor() {
|
|
28
|
+
this.statsFile = join(AMALFA_DIRS.base, "stats-history.json");
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Load historical stats from disk
|
|
33
|
+
*/
|
|
34
|
+
private loadHistory(): StatsHistory {
|
|
35
|
+
if (!existsSync(this.statsFile)) {
|
|
36
|
+
return {
|
|
37
|
+
snapshots: [],
|
|
38
|
+
created: new Date().toISOString(),
|
|
39
|
+
lastUpdated: new Date().toISOString(),
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const content = Bun.file(this.statsFile).text();
|
|
45
|
+
return JSON.parse(content as unknown as string) as StatsHistory;
|
|
46
|
+
} catch {
|
|
47
|
+
// If file is corrupted, start fresh
|
|
48
|
+
return {
|
|
49
|
+
snapshots: [],
|
|
50
|
+
created: new Date().toISOString(),
|
|
51
|
+
lastUpdated: new Date().toISOString(),
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Save historical stats to disk
|
|
58
|
+
*/
|
|
59
|
+
private async saveHistory(history: StatsHistory): Promise<void> {
|
|
60
|
+
// Ensure .amalfa directory exists
|
|
61
|
+
if (!existsSync(AMALFA_DIRS.base)) {
|
|
62
|
+
mkdirSync(AMALFA_DIRS.base, { recursive: true });
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
await Bun.write(this.statsFile, JSON.stringify(history, null, 2));
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Record a new database snapshot
|
|
70
|
+
*/
|
|
71
|
+
async recordSnapshot(snapshot: DatabaseSnapshot): Promise<void> {
|
|
72
|
+
const history = this.loadHistory();
|
|
73
|
+
|
|
74
|
+
// Add timestamp if not provided
|
|
75
|
+
if (!snapshot.timestamp) {
|
|
76
|
+
snapshot.timestamp = new Date().toISOString();
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
history.snapshots.push(snapshot);
|
|
80
|
+
history.lastUpdated = snapshot.timestamp;
|
|
81
|
+
|
|
82
|
+
// Keep only last 100 snapshots to avoid unbounded growth
|
|
83
|
+
if (history.snapshots.length > 100) {
|
|
84
|
+
history.snapshots = history.snapshots.slice(-100);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
await this.saveHistory(history);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Get the most recent snapshot
|
|
92
|
+
*/
|
|
93
|
+
getLatestSnapshot(): DatabaseSnapshot | null {
|
|
94
|
+
const history = this.loadHistory();
|
|
95
|
+
return history.snapshots.length > 0
|
|
96
|
+
? history.snapshots[history.snapshots.length - 1]
|
|
97
|
+
: null;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Get all snapshots
|
|
102
|
+
*/
|
|
103
|
+
getAllSnapshots(): DatabaseSnapshot[] {
|
|
104
|
+
const history = this.loadHistory();
|
|
105
|
+
return history.snapshots;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Validate current database state against history
|
|
110
|
+
* Returns validation result with any warnings or errors
|
|
111
|
+
*/
|
|
112
|
+
validate(current: DatabaseSnapshot): {
|
|
113
|
+
valid: boolean;
|
|
114
|
+
warnings: string[];
|
|
115
|
+
errors: string[];
|
|
116
|
+
} {
|
|
117
|
+
const warnings: string[] = [];
|
|
118
|
+
const errors: string[] = [];
|
|
119
|
+
|
|
120
|
+
// Critical: Database must have content
|
|
121
|
+
if (current.nodes === 0) {
|
|
122
|
+
errors.push("Database has 0 nodes - database is empty");
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
if (current.embeddings === 0) {
|
|
126
|
+
errors.push("Database has 0 embeddings - vector search will not work");
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Get previous snapshot for comparison
|
|
130
|
+
const latest = this.getLatestSnapshot();
|
|
131
|
+
if (latest) {
|
|
132
|
+
// Warning: Significant regression in metrics
|
|
133
|
+
if (current.nodes < latest.nodes * 0.8) {
|
|
134
|
+
warnings.push(
|
|
135
|
+
`Node count decreased significantly: ${latest.nodes} → ${current.nodes} (-${Math.round(((latest.nodes - current.nodes) / latest.nodes) * 100)}%)`,
|
|
136
|
+
);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
if (current.edges < latest.edges * 0.8) {
|
|
140
|
+
warnings.push(
|
|
141
|
+
`Edge count decreased significantly: ${latest.edges} → ${current.edges} (-${Math.round(((latest.edges - current.edges) / latest.edges) * 100)}%)`,
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Info: Metrics should generally increase over time
|
|
146
|
+
if (current.nodes === latest.nodes && current.edges === latest.edges) {
|
|
147
|
+
warnings.push(
|
|
148
|
+
"No growth in nodes or edges since last snapshot - is documentation being updated?",
|
|
149
|
+
);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Edge density check: edges should be proportional to nodes
|
|
154
|
+
const edgeDensity = current.nodes > 0 ? current.edges / current.nodes : 0;
|
|
155
|
+
if (edgeDensity < 0.1) {
|
|
156
|
+
warnings.push(
|
|
157
|
+
`Low edge density: ${edgeDensity.toFixed(2)} edges per node - documents may be poorly linked`,
|
|
158
|
+
);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Embedding coverage check
|
|
162
|
+
if (current.embeddings < current.nodes * 0.9) {
|
|
163
|
+
warnings.push(
|
|
164
|
+
`Only ${Math.round((current.embeddings / current.nodes) * 100)}% of nodes have embeddings - some nodes may not be searchable`,
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
return {
|
|
169
|
+
valid: errors.length === 0,
|
|
170
|
+
warnings,
|
|
171
|
+
errors,
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Get stats file path for external access
|
|
177
|
+
*/
|
|
178
|
+
getStatsFilePath(): string {
|
|
179
|
+
return this.statsFile;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Display a summary of historical trends
|
|
184
|
+
*/
|
|
185
|
+
getSummary(): string {
|
|
186
|
+
const history = this.loadHistory();
|
|
187
|
+
if (history.snapshots.length === 0) {
|
|
188
|
+
return "No historical data available";
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
const latest = history.snapshots[history.snapshots.length - 1];
|
|
192
|
+
const oldest = history.snapshots[0];
|
|
193
|
+
|
|
194
|
+
const nodeGrowth = latest.nodes - oldest.nodes;
|
|
195
|
+
const edgeGrowth = latest.edges - oldest.edges;
|
|
196
|
+
|
|
197
|
+
const summary = [
|
|
198
|
+
"📈 Database Growth Summary",
|
|
199
|
+
` First snapshot: ${new Date(oldest.timestamp).toLocaleString()}`,
|
|
200
|
+
` Latest snapshot: ${new Date(latest.timestamp).toLocaleString()}`,
|
|
201
|
+
` Total snapshots: ${history.snapshots.length}`,
|
|
202
|
+
"",
|
|
203
|
+
"📊 Current State:",
|
|
204
|
+
` Nodes: ${latest.nodes} (${nodeGrowth >= 0 ? "+" : ""}${nodeGrowth} from first)`,
|
|
205
|
+
` Edges: ${latest.edges} (${edgeGrowth >= 0 ? "+" : ""}${edgeGrowth} from first)`,
|
|
206
|
+
` Embeddings: ${latest.embeddings}`,
|
|
207
|
+
` Database size: ${latest.dbSizeMB.toFixed(2)} MB`,
|
|
208
|
+
];
|
|
209
|
+
|
|
210
|
+
return summary.join("\n");
|
|
211
|
+
}
|
|
212
|
+
}
|
|
@@ -82,6 +82,16 @@ export const ZombieDefense = {
|
|
|
82
82
|
const match = p.match(/\s+(\d+)\s+/);
|
|
83
83
|
if (match?.[1] && protectedPids.has(match[1])) return;
|
|
84
84
|
|
|
85
|
+
// Ignore CLI commands that are just launching services (parent processes)
|
|
86
|
+
if (
|
|
87
|
+
p.includes("src/cli.ts") ||
|
|
88
|
+
p.includes("cli.ts daemon") ||
|
|
89
|
+
p.includes("cli.ts vector") ||
|
|
90
|
+
p.includes("cli.ts servers")
|
|
91
|
+
) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
|
|
85
95
|
// Strict Filter: Must be in our CWD or explicit bun run
|
|
86
96
|
if (!p.includes(process.cwd()) && !p.includes("bun run")) return;
|
|
87
97
|
|
package/amalfa.svg
DELETED
|
@@ -1,87 +0,0 @@
|
|
|
1
|
-
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="304pt" height="443pt" viewBox="0.00 0.00 304.00 443.00">
|
|
2
|
-
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 438.8)">
|
|
3
|
-
<title>workflow</title>
|
|
4
|
-
<polygon fill="white" stroke="none" points="-4,4 -4,-438.8 299.54,-438.8 299.54,4 -4,4"/>
|
|
5
|
-
<!-- brief -->
|
|
6
|
-
<g id="node1" class="node">
|
|
7
|
-
<title>brief</title>
|
|
8
|
-
<polygon fill="lightyellow" stroke="black" points="155.52,-434.8 72.27,-434.8 72.27,-393.2 155.52,-393.2 155.52,-434.8"/>
|
|
9
|
-
<text xml:space="preserve" text-anchor="middle" x="113.9" y="-418.2" font-family="Times,serif" font-size="14.00">Brief</text>
|
|
10
|
-
<text xml:space="preserve" text-anchor="middle" x="113.9" y="-401.4" font-family="Times,serif" font-size="14.00">(Task Spec)</text>
|
|
11
|
-
</g>
|
|
12
|
-
<!-- work -->
|
|
13
|
-
<g id="node2" class="node">
|
|
14
|
-
<title>work</title>
|
|
15
|
-
<polygon fill="lightblue" stroke="black" points="124.88,-340.4 10.91,-340.4 10.91,-298.8 124.88,-298.8 124.88,-340.4"/>
|
|
16
|
-
<text xml:space="preserve" text-anchor="middle" x="67.9" y="-323.8" font-family="Times,serif" font-size="14.00">Work</text>
|
|
17
|
-
<text xml:space="preserve" text-anchor="middle" x="67.9" y="-307" font-family="Times,serif" font-size="14.00">(Implementation)</text>
|
|
18
|
-
</g>
|
|
19
|
-
<!-- brief->work -->
|
|
20
|
-
<g id="edge1" class="edge">
|
|
21
|
-
<title>brief->work</title>
|
|
22
|
-
<path fill="none" stroke="black" d="M103.91,-392.95C97.77,-380.6 89.77,-364.53 82.87,-350.68"/>
|
|
23
|
-
<polygon fill="black" stroke="black" points="86.12,-349.36 78.53,-341.97 79.85,-352.48 86.12,-349.36"/>
|
|
24
|
-
<text xml:space="preserve" text-anchor="middle" x="113.29" y="-362.6" font-family="Times,serif" font-size="14.00">guides</text>
|
|
25
|
-
</g>
|
|
26
|
-
<!-- debrief -->
|
|
27
|
-
<g id="node3" class="node">
|
|
28
|
-
<title>debrief</title>
|
|
29
|
-
<polygon fill="lightgreen" stroke="black" points="119.79,-246 0,-246 0,-204.4 119.79,-204.4 119.79,-246"/>
|
|
30
|
-
<text xml:space="preserve" text-anchor="middle" x="59.9" y="-229.4" font-family="Times,serif" font-size="14.00">Debrief</text>
|
|
31
|
-
<text xml:space="preserve" text-anchor="middle" x="59.9" y="-212.6" font-family="Times,serif" font-size="14.00">(Lessons Learned)</text>
|
|
32
|
-
</g>
|
|
33
|
-
<!-- work->debrief -->
|
|
34
|
-
<g id="edge2" class="edge">
|
|
35
|
-
<title>work->debrief</title>
|
|
36
|
-
<path fill="none" stroke="black" d="M66.16,-298.55C65.12,-286.56 63.78,-271.07 62.61,-257.5"/>
|
|
37
|
-
<polygon fill="black" stroke="black" points="66.11,-257.38 61.76,-247.72 59.13,-257.98 66.11,-257.38"/>
|
|
38
|
-
<text xml:space="preserve" text-anchor="middle" x="87.93" y="-268.2" font-family="Times,serif" font-size="14.00">captures</text>
|
|
39
|
-
</g>
|
|
40
|
-
<!-- playbook -->
|
|
41
|
-
<g id="node4" class="node">
|
|
42
|
-
<title>playbook</title>
|
|
43
|
-
<polygon fill="orange" stroke="black" points="125.63,-151.6 8.16,-151.6 8.16,-110 125.63,-110 125.63,-151.6"/>
|
|
44
|
-
<text xml:space="preserve" text-anchor="middle" x="66.9" y="-135" font-family="Times,serif" font-size="14.00">Playbook</text>
|
|
45
|
-
<text xml:space="preserve" text-anchor="middle" x="66.9" y="-118.2" font-family="Times,serif" font-size="14.00">(Codified Pattern)</text>
|
|
46
|
-
</g>
|
|
47
|
-
<!-- debrief->playbook -->
|
|
48
|
-
<g id="edge3" class="edge">
|
|
49
|
-
<title>debrief->playbook</title>
|
|
50
|
-
<path fill="none" stroke="black" d="M61.41,-204.15C62.32,-192.16 63.5,-176.67 64.52,-163.1"/>
|
|
51
|
-
<polygon fill="black" stroke="black" points="68,-163.56 65.26,-153.32 61.02,-163.03 68,-163.56"/>
|
|
52
|
-
<text xml:space="preserve" text-anchor="middle" x="95.71" y="-173.8" font-family="Times,serif" font-size="14.00">abstracts to</text>
|
|
53
|
-
</g>
|
|
54
|
-
<!-- db -->
|
|
55
|
-
<g id="node5" class="node">
|
|
56
|
-
<title>db</title>
|
|
57
|
-
<path fill="lightgray" stroke="black" d="M171.41,-52C171.41,-54.87 144.73,-57.2 111.9,-57.2 79.06,-57.2 52.38,-54.87 52.38,-52 52.38,-52 52.38,-5.2 52.38,-5.2 52.38,-2.33 79.06,0 111.9,0 144.73,0 171.41,-2.33 171.41,-5.2 171.41,-5.2 171.41,-52 171.41,-52"/>
|
|
58
|
-
<path fill="none" stroke="black" d="M171.41,-52C171.41,-49.13 144.73,-46.8 111.9,-46.8 79.06,-46.8 52.38,-49.13 52.38,-52"/>
|
|
59
|
-
<text xml:space="preserve" text-anchor="middle" x="111.9" y="-32.8" font-family="Times,serif" font-size="14.00">AMALFA</text>
|
|
60
|
-
<text xml:space="preserve" text-anchor="middle" x="111.9" y="-16" font-family="Times,serif" font-size="14.00">Knowledge Graph</text>
|
|
61
|
-
</g>
|
|
62
|
-
<!-- playbook->db -->
|
|
63
|
-
<g id="edge4" class="edge">
|
|
64
|
-
<title>playbook->db</title>
|
|
65
|
-
<path fill="none" stroke="black" d="M71.12,-109.58C73.7,-99.06 77.45,-86.15 82.42,-75.2 83.65,-72.48 85.03,-69.73 86.5,-67"/>
|
|
66
|
-
<polygon fill="black" stroke="black" points="89.37,-69.03 91.37,-58.63 83.32,-65.51 89.37,-69.03"/>
|
|
67
|
-
<text xml:space="preserve" text-anchor="middle" x="112.16" y="-79.4" font-family="Times,serif" font-size="14.00">indexed as</text>
|
|
68
|
-
</g>
|
|
69
|
-
<!-- db->brief -->
|
|
70
|
-
<g id="edge5" class="edge">
|
|
71
|
-
<title>db->brief</title>
|
|
72
|
-
<path fill="none" stroke="black" stroke-dasharray="5,2" d="M132.14,-57.23C135.75,-62.98 139.21,-69.13 141.9,-75.2 152.03,-98.11 155.9,-104.75 155.9,-129.8 155.9,-320.6 155.9,-320.6 155.9,-320.6 155.9,-342.99 145.55,-365.92 135.13,-383.33"/>
|
|
73
|
-
<polygon fill="black" stroke="black" points="132.27,-381.3 129.87,-391.62 138.18,-385.05 132.27,-381.3"/>
|
|
74
|
-
<text xml:space="preserve" text-anchor="middle" x="180.97" y="-229.4" font-family="Times,serif" font-size="14.00">informs</text>
|
|
75
|
-
<text xml:space="preserve" text-anchor="middle" x="180.97" y="-212.6" font-family="Times,serif" font-size="14.00">next task</text>
|
|
76
|
-
</g>
|
|
77
|
-
<!-- db->db -->
|
|
78
|
-
<g id="edge6" class="edge">
|
|
79
|
-
<title>db->db</title>
|
|
80
|
-
<path fill="none" stroke="black" stroke-dasharray="1,5" d="M171.69,-38.27C182.14,-36.98 189.41,-33.76 189.41,-28.6 189.41,-25.54 186.85,-23.16 182.58,-21.46"/>
|
|
81
|
-
<polygon fill="black" stroke="black" points="183.7,-18.13 173.17,-19.27 182.12,-24.95 183.7,-18.13"/>
|
|
82
|
-
<text xml:space="preserve" text-anchor="middle" x="242.47" y="-41.2" font-family="Times,serif" font-size="14.00">semantic search</text>
|
|
83
|
-
<text xml:space="preserve" text-anchor="middle" x="242.47" y="-24.4" font-family="Times,serif" font-size="14.00">vector embeddings</text>
|
|
84
|
-
<text xml:space="preserve" text-anchor="middle" x="242.47" y="-7.6" font-family="Times,serif" font-size="14.00">graph traversal</text>
|
|
85
|
-
</g>
|
|
86
|
-
</g>
|
|
87
|
-
</svg>
|