amalfa 1.0.24 → 1.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/config/defaults.ts +18 -1
- package/src/daemon/sonar-agent.ts +102 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "amalfa",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.26",
|
|
4
4
|
"description": "Local-first knowledge graph engine for AI agents. Transforms markdown into searchable memory with MCP protocol.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"homepage": "https://github.com/pjsvis/amalfa#readme",
|
package/src/config/defaults.ts
CHANGED
|
@@ -15,11 +15,28 @@ export const AMALFA_DIRS = {
|
|
|
15
15
|
get runtime() {
|
|
16
16
|
return join(this.base, "runtime");
|
|
17
17
|
},
|
|
18
|
+
get agent() {
|
|
19
|
+
return join(this.base, "agent");
|
|
20
|
+
},
|
|
21
|
+
get tasks() {
|
|
22
|
+
return {
|
|
23
|
+
pending: join(this.base, "agent", "tasks", "pending"),
|
|
24
|
+
processing: join(this.base, "agent", "tasks", "processing"),
|
|
25
|
+
completed: join(this.base, "agent", "tasks", "completed"),
|
|
26
|
+
};
|
|
27
|
+
},
|
|
18
28
|
} as const;
|
|
19
29
|
|
|
20
30
|
/** Initialize AMALFA directory structure */
|
|
21
31
|
export function initAmalfaDirs(): void {
|
|
22
|
-
const dirs = [
|
|
32
|
+
const dirs = [
|
|
33
|
+
AMALFA_DIRS.base,
|
|
34
|
+
AMALFA_DIRS.logs,
|
|
35
|
+
AMALFA_DIRS.runtime,
|
|
36
|
+
AMALFA_DIRS.tasks.pending,
|
|
37
|
+
AMALFA_DIRS.tasks.processing,
|
|
38
|
+
AMALFA_DIRS.tasks.completed,
|
|
39
|
+
];
|
|
23
40
|
for (const dir of dirs) {
|
|
24
41
|
if (!existsSync(dir)) {
|
|
25
42
|
mkdirSync(dir, { recursive: true });
|
|
@@ -5,9 +5,11 @@
|
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
7
|
import { join } from "path";
|
|
8
|
+
import { readdirSync, existsSync, renameSync, writeFileSync } from "node:fs";
|
|
8
9
|
import { loadConfig, AMALFA_DIRS } from "@src/config/defaults";
|
|
9
10
|
import { getLogger } from "@src/utils/Logger";
|
|
10
11
|
import { ServiceLifecycle } from "@src/utils/ServiceLifecycle";
|
|
12
|
+
import { sendNotification } from "@src/utils/Notifications";
|
|
11
13
|
import {
|
|
12
14
|
checkOllamaHealth,
|
|
13
15
|
discoverOllamaCapabilities,
|
|
@@ -483,7 +485,11 @@ Return JSON:
|
|
|
483
485
|
|
|
484
486
|
const content = response.message.content;
|
|
485
487
|
try {
|
|
486
|
-
|
|
488
|
+
const parsed = JSON.parse(content);
|
|
489
|
+
if (!parsed.snippet && !parsed.context) {
|
|
490
|
+
throw new Error("Missing snippet/context in JSON");
|
|
491
|
+
}
|
|
492
|
+
return parsed;
|
|
487
493
|
} catch {
|
|
488
494
|
// Fallback: return simple snippet
|
|
489
495
|
const words = result.content.split(" ");
|
|
@@ -768,6 +774,101 @@ async function main() {
|
|
|
768
774
|
|
|
769
775
|
log.info(`✅ HTTP server listening on port ${port}`);
|
|
770
776
|
log.info("⏳ Daemon ready to handle requests");
|
|
777
|
+
|
|
778
|
+
// Task Watcher Loop
|
|
779
|
+
log.info(`👀 Watching for tasks in ${AMALFA_DIRS.tasks.pending}`);
|
|
780
|
+
|
|
781
|
+
// Check every 5 seconds
|
|
782
|
+
setInterval(async () => {
|
|
783
|
+
try {
|
|
784
|
+
await processPendingTasks();
|
|
785
|
+
} catch (error) {
|
|
786
|
+
log.error({ error }, "Task processing error");
|
|
787
|
+
}
|
|
788
|
+
}, 5000);
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
/**
|
|
792
|
+
* Scan and process pending tasks
|
|
793
|
+
*/
|
|
794
|
+
async function processPendingTasks() {
|
|
795
|
+
if (!ollamaAvailable) return;
|
|
796
|
+
|
|
797
|
+
const pendingDir = AMALFA_DIRS.tasks.pending;
|
|
798
|
+
if (!existsSync(pendingDir)) return;
|
|
799
|
+
|
|
800
|
+
const files = readdirSync(pendingDir);
|
|
801
|
+
for (const file of files) {
|
|
802
|
+
if (!file.endsWith(".json")) continue;
|
|
803
|
+
|
|
804
|
+
const taskPath = join(pendingDir, file);
|
|
805
|
+
const processingPath = join(AMALFA_DIRS.tasks.processing, file);
|
|
806
|
+
|
|
807
|
+
try {
|
|
808
|
+
// Move to processing
|
|
809
|
+
renameSync(taskPath, processingPath);
|
|
810
|
+
log.info({ file }, "🔄 Processing task...");
|
|
811
|
+
|
|
812
|
+
const taskContent = await Bun.file(processingPath).json();
|
|
813
|
+
const report = await executeTask(taskContent);
|
|
814
|
+
|
|
815
|
+
// Save report
|
|
816
|
+
const reportName = file.replace(".json", "-report.md");
|
|
817
|
+
const reportPath = join(AMALFA_DIRS.tasks.completed, reportName);
|
|
818
|
+
writeFileSync(reportPath, report);
|
|
819
|
+
|
|
820
|
+
// Move original task to completed
|
|
821
|
+
const completedPath = join(AMALFA_DIRS.tasks.completed, file);
|
|
822
|
+
renameSync(processingPath, completedPath);
|
|
823
|
+
|
|
824
|
+
log.info({ file }, "✅ Task completed");
|
|
825
|
+
|
|
826
|
+
// Notification
|
|
827
|
+
if (taskContent.notify !== false) {
|
|
828
|
+
await sendNotification("Sonar Agent", `Task Complete: ${file}`);
|
|
829
|
+
}
|
|
830
|
+
} catch (error) {
|
|
831
|
+
log.error({ file, error }, "❌ Task failed");
|
|
832
|
+
// Move back to pending? Or to a failed dir? For now, leave in processing or move to failed could be better.
|
|
833
|
+
// Let's create a failed report so user knows.
|
|
834
|
+
const reportName = file.replace(".json", "-FAILED.md");
|
|
835
|
+
const reportPath = join(AMALFA_DIRS.tasks.completed, reportName);
|
|
836
|
+
writeFileSync(reportPath, `# Task Failed\n\nError: ${error}`);
|
|
837
|
+
|
|
838
|
+
// Move to completed so we don't loop forever
|
|
839
|
+
const completedPath = join(AMALFA_DIRS.tasks.completed, file);
|
|
840
|
+
renameSync(processingPath, completedPath);
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
|
|
845
|
+
/**
|
|
846
|
+
* Execute a specific task based on its type
|
|
847
|
+
*/
|
|
848
|
+
async function executeTask(task: any): Promise<string> {
|
|
849
|
+
const startTime = Date.now();
|
|
850
|
+
let output = `# Task Report: ${task.type}\nDate: ${new Date().toISOString()}\n\n`;
|
|
851
|
+
|
|
852
|
+
if (task.type === "enhance_batch") {
|
|
853
|
+
const limit = task.limit || 10;
|
|
854
|
+
output += `## Objective\nEnhance ${limit} documents with metadata.\n\n`;
|
|
855
|
+
|
|
856
|
+
const result = await handleBatchEnhancement(limit);
|
|
857
|
+
|
|
858
|
+
output += `## Results\n`;
|
|
859
|
+
output += `- Total: ${result.total}\n`;
|
|
860
|
+
output += `- Successful: ${result.successful}\n`;
|
|
861
|
+
output += `- Failed: ${result.failed}\n\n`;
|
|
862
|
+
|
|
863
|
+
output += `Check daemon logs for detailed errors per document.\n`;
|
|
864
|
+
} else {
|
|
865
|
+
output += `Error: Unknown task type '${task.type}'\n`;
|
|
866
|
+
}
|
|
867
|
+
|
|
868
|
+
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
|
869
|
+
output += `\n---\n**Duration:** ${duration}s\n`;
|
|
870
|
+
|
|
871
|
+
return output;
|
|
771
872
|
}
|
|
772
873
|
|
|
773
874
|
// Run service lifecycle dispatcher
|