amalfa 1.0.19 → 1.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +10 -3
- package/src/cli/enhance-commands.ts +81 -0
- package/src/cli/list-scripts.ts +67 -0
- package/src/cli/sonar-chat.ts +95 -0
- package/src/cli.ts +349 -95
- package/src/config/defaults.ts +137 -28
- package/src/config/scripts-registry.json +72 -0
- package/src/core/MarkdownMasker.ts +1 -1
- package/src/daemon/index.ts +44 -27
- package/src/daemon/sonar-agent.ts +774 -0
- package/src/mcp/index.ts +145 -23
- package/src/pipeline/AmalfaIngestor.ts +40 -17
- package/src/pipeline/PreFlightAnalyzer.ts +2 -2
- package/src/pipeline/SemanticHarvester.ts +2 -2
- package/src/resonance/db.ts +13 -0
- package/src/resonance/services/embedder.ts +1 -1
- package/src/resonance/services/vector-daemon.ts +1 -1
- package/src/utils/DaemonManager.ts +70 -6
- package/src/utils/Notifications.ts +2 -2
- package/src/utils/ServiceLifecycle.ts +198 -211
- package/src/utils/StatsTracker.ts +10 -6
- package/src/utils/ollama-discovery.ts +190 -0
- package/src/utils/sonar-client.ts +294 -0
- package/.biomeignore +0 -19
- package/amalfa.config.example.ts +0 -113
- package/biome.json +0 -49
- package/bun.lock +0 -369
- package/src/utils/ZombieDefense.ts +0 -258
- package/tsconfig.json +0 -46
|
@@ -0,0 +1,774 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
/**
|
|
3
|
+
* AMALFA Sonar Multi-Purpose Sub-Agent
|
|
4
|
+
* Daemon for search intelligence, metadata enhancement, and interactive chat
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { join } from "path";
|
|
8
|
+
import { loadConfig, AMALFA_DIRS } from "@src/config/defaults";
|
|
9
|
+
import { getLogger } from "@src/utils/Logger";
|
|
10
|
+
import { ServiceLifecycle } from "@src/utils/ServiceLifecycle";
|
|
11
|
+
import {
|
|
12
|
+
checkOllamaHealth,
|
|
13
|
+
discoverOllamaCapabilities,
|
|
14
|
+
} from "@src/utils/ollama-discovery";
|
|
15
|
+
|
|
16
|
+
const args = process.argv.slice(2);
|
|
17
|
+
const command = args[0] || "serve";
|
|
18
|
+
const log = getLogger("SonarAgent");
|
|
19
|
+
|
|
20
|
+
// Database initialization
|
|
21
|
+
import { ResonanceDB } from "@src/resonance/db";
|
|
22
|
+
let DB_PATH: string;
|
|
23
|
+
|
|
24
|
+
// Service lifecycle management
|
|
25
|
+
const lifecycle = new ServiceLifecycle({
|
|
26
|
+
name: "SonarAgent",
|
|
27
|
+
pidFile: join(AMALFA_DIRS.runtime, "sonar.pid"),
|
|
28
|
+
logFile: join(AMALFA_DIRS.logs, "sonar.log"),
|
|
29
|
+
entryPoint: "src/daemon/sonar-agent.ts",
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
// Global state
|
|
33
|
+
// Global state
|
|
34
|
+
let server: Bun.Server<unknown> | null = null;
|
|
35
|
+
let ollamaAvailable = false;
|
|
36
|
+
let ollamaModel = "phi3:latest";
|
|
37
|
+
|
|
38
|
+
// Chat Session Management
|
|
39
|
+
const chatSessions = new Map<string, ChatSession>();
|
|
40
|
+
|
|
41
|
+
interface ChatSession {
|
|
42
|
+
id: string;
|
|
43
|
+
messages: Message[];
|
|
44
|
+
startedAt: Date;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Message interface for chat API
|
|
49
|
+
*/
|
|
50
|
+
interface Message {
|
|
51
|
+
role: "system" | "user" | "assistant";
|
|
52
|
+
content: string;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Request options for Ollama API
|
|
57
|
+
*/
|
|
58
|
+
interface RequestOptions {
|
|
59
|
+
temperature?: number;
|
|
60
|
+
num_predict?: number;
|
|
61
|
+
stream?: boolean;
|
|
62
|
+
format?: "json"; // Enable GBNF-constrained JSON output
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Call Ollama HTTP API for inference
|
|
67
|
+
* This is the preferred method for inference (faster, supports streaming)
|
|
68
|
+
*/
|
|
69
|
+
async function callOllama(
|
|
70
|
+
messages: Message[],
|
|
71
|
+
options: RequestOptions = {},
|
|
72
|
+
): Promise<{ message: Message }> {
|
|
73
|
+
const config = await loadConfig();
|
|
74
|
+
// @ts-ignore
|
|
75
|
+
const hostArgs = config.sonar || config.phi3 || {};
|
|
76
|
+
const host = hostArgs.host || "localhost:11434";
|
|
77
|
+
// Use discovered model if available, otherwise config or default
|
|
78
|
+
const model = ollamaModel || hostArgs.model || "phi3:latest";
|
|
79
|
+
|
|
80
|
+
// Extract format from options to put at root level of request
|
|
81
|
+
const { format, ...modelOptions } = options;
|
|
82
|
+
|
|
83
|
+
const response = await fetch(`http://${host}/api/chat`, {
|
|
84
|
+
method: "POST",
|
|
85
|
+
headers: { "Content-Type": "application/json" },
|
|
86
|
+
body: JSON.stringify({
|
|
87
|
+
model,
|
|
88
|
+
messages,
|
|
89
|
+
stream: false,
|
|
90
|
+
format, // Pass format (e.g. "json") to enable GBNF grammar
|
|
91
|
+
options: {
|
|
92
|
+
temperature: 0.1,
|
|
93
|
+
num_predict: 200,
|
|
94
|
+
...modelOptions,
|
|
95
|
+
},
|
|
96
|
+
}),
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
if (!response.ok) {
|
|
100
|
+
throw new Error(`Ollama API error: ${response.statusText}`);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return (await response.json()) as { message: Message };
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Handle search analysis task
|
|
108
|
+
* Analyzes query intent, entities, and technical level
|
|
109
|
+
*/
|
|
110
|
+
async function handleSearchAnalysis(query: string): Promise<unknown> {
|
|
111
|
+
if (!ollamaAvailable) {
|
|
112
|
+
throw new Error("Sonar is not available");
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
try {
|
|
116
|
+
const response = await callOllama(
|
|
117
|
+
[
|
|
118
|
+
{
|
|
119
|
+
role: "system",
|
|
120
|
+
content: "You are a search query analyzer. Return JSON only.",
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
role: "user",
|
|
124
|
+
content: `Analyze this query: "${query}"
|
|
125
|
+
|
|
126
|
+
Return JSON:
|
|
127
|
+
{
|
|
128
|
+
"intent": "implementation|conceptual|example",
|
|
129
|
+
"entities": ["term1", "term2"],
|
|
130
|
+
"technical_level": "high|medium|low",
|
|
131
|
+
"suggested_queries": ["query1", "query2"]
|
|
132
|
+
}`,
|
|
133
|
+
},
|
|
134
|
+
],
|
|
135
|
+
{
|
|
136
|
+
temperature: 0.1,
|
|
137
|
+
num_predict: 200,
|
|
138
|
+
format: "json", // Force valid JSON output
|
|
139
|
+
},
|
|
140
|
+
);
|
|
141
|
+
|
|
142
|
+
// Parse JSON response
|
|
143
|
+
const content = response.message.content;
|
|
144
|
+
try {
|
|
145
|
+
return JSON.parse(content);
|
|
146
|
+
} catch {
|
|
147
|
+
// Fallback if not JSON
|
|
148
|
+
return {
|
|
149
|
+
intent: "unknown",
|
|
150
|
+
entities: [],
|
|
151
|
+
technical_level: "medium",
|
|
152
|
+
suggested_queries: [],
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
} catch (error) {
|
|
156
|
+
log.error({ error, query }, "Search analysis failed");
|
|
157
|
+
throw error;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Handle metadata enhancement task
|
|
163
|
+
* Comprehensive document analysis for enhanced metadata
|
|
164
|
+
*/
|
|
165
|
+
async function handleMetadataEnhancement(docId: string): Promise<unknown> {
|
|
166
|
+
if (!ollamaAvailable) {
|
|
167
|
+
throw new Error("Sonar is not available");
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
try {
|
|
171
|
+
// Connect to DB and fetch node source path
|
|
172
|
+
const db = new ResonanceDB(DB_PATH);
|
|
173
|
+
const node = db.getNode(docId);
|
|
174
|
+
if (!node) {
|
|
175
|
+
throw new Error(`Node not found: ${docId}`);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const meta = node.meta || {};
|
|
179
|
+
const sourcePath = meta.source as string | undefined;
|
|
180
|
+
if (!sourcePath) {
|
|
181
|
+
throw new Error(`No source file for node: ${docId}`);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Read content from filesystem
|
|
185
|
+
const file = Bun.file(sourcePath);
|
|
186
|
+
if (!(await file.exists())) {
|
|
187
|
+
throw new Error(`File not found: ${sourcePath}`);
|
|
188
|
+
}
|
|
189
|
+
const content = await file.text();
|
|
190
|
+
|
|
191
|
+
const response = await callOllama(
|
|
192
|
+
[
|
|
193
|
+
{
|
|
194
|
+
role: "system",
|
|
195
|
+
content:
|
|
196
|
+
"You are a document analyzer. Extract comprehensive metadata.",
|
|
197
|
+
},
|
|
198
|
+
{
|
|
199
|
+
role: "user",
|
|
200
|
+
content: `Analyze this document comprehensively:
|
|
201
|
+
|
|
202
|
+
Content: ${content}
|
|
203
|
+
|
|
204
|
+
Return JSON:
|
|
205
|
+
{
|
|
206
|
+
"themes": ["theme1", "theme2"],
|
|
207
|
+
"code_patterns": ["pattern1", "pattern2"],
|
|
208
|
+
"summary": "2-3 sentence summary",
|
|
209
|
+
"doc_type": "implementation|conceptual|architecture|reference",
|
|
210
|
+
"technical_depth": "deep|medium|shallow",
|
|
211
|
+
"audience": "developer|user|architect",
|
|
212
|
+
"related_docs": ["doc1", "doc2"]
|
|
213
|
+
}`,
|
|
214
|
+
},
|
|
215
|
+
],
|
|
216
|
+
{
|
|
217
|
+
temperature: 0.2,
|
|
218
|
+
num_predict: 500,
|
|
219
|
+
format: "json", // Force valid JSON output
|
|
220
|
+
},
|
|
221
|
+
);
|
|
222
|
+
|
|
223
|
+
// Save enhanced metadata back to DB
|
|
224
|
+
const contentStr = response.message.content;
|
|
225
|
+
let enhancedMeta: Record<string, unknown>;
|
|
226
|
+
try {
|
|
227
|
+
enhancedMeta = JSON.parse(contentStr);
|
|
228
|
+
} catch {
|
|
229
|
+
enhancedMeta = {
|
|
230
|
+
themes: [],
|
|
231
|
+
code_patterns: [],
|
|
232
|
+
summary: "",
|
|
233
|
+
doc_type: "unknown",
|
|
234
|
+
technical_depth: "medium",
|
|
235
|
+
audience: "developer",
|
|
236
|
+
related_docs: [],
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Update node metadata
|
|
241
|
+
const newMeta = {
|
|
242
|
+
...node.meta,
|
|
243
|
+
sonar_enhanced: true,
|
|
244
|
+
sonar_enhanced_at: new Date().toISOString(),
|
|
245
|
+
...enhancedMeta,
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
db.updateNodeMeta(docId, newMeta);
|
|
249
|
+
return enhancedMeta;
|
|
250
|
+
} catch (error) {
|
|
251
|
+
log.error({ error, docId }, "Metadata enhancement failed");
|
|
252
|
+
throw error;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
/**
|
|
257
|
+
* Handle batch enhancement task
|
|
258
|
+
* Processes multiple documents for metadata enhancement
|
|
259
|
+
*/
|
|
260
|
+
async function handleBatchEnhancement(limit = 50): Promise<{
|
|
261
|
+
successful: number;
|
|
262
|
+
failed: number;
|
|
263
|
+
total: number;
|
|
264
|
+
}> {
|
|
265
|
+
if (!ollamaAvailable) {
|
|
266
|
+
throw new Error("Sonar is not available");
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const db = new ResonanceDB(DB_PATH);
|
|
270
|
+
|
|
271
|
+
// Find unenhanced nodes
|
|
272
|
+
// Note: We need to query nodes that don't have 'sonar_enhanced' in meta
|
|
273
|
+
const allNodes = db.getRawDb().query("SELECT id, meta FROM nodes").all() as {
|
|
274
|
+
id: string;
|
|
275
|
+
meta: string;
|
|
276
|
+
}[];
|
|
277
|
+
|
|
278
|
+
const unenhanced = allNodes
|
|
279
|
+
.filter((row) => {
|
|
280
|
+
try {
|
|
281
|
+
const meta = JSON.parse(row.meta);
|
|
282
|
+
// Check for sonar_enhanced OR phi3_enhanced (migration)
|
|
283
|
+
return !meta.sonar_enhanced && !meta.phi3_enhanced;
|
|
284
|
+
} catch {
|
|
285
|
+
return false;
|
|
286
|
+
}
|
|
287
|
+
})
|
|
288
|
+
.map((row) => ({ id: row.id }));
|
|
289
|
+
|
|
290
|
+
const batch = unenhanced.slice(0, limit);
|
|
291
|
+
|
|
292
|
+
log.info(`🔄 Enhancing ${batch.length} docs with Sonar...`);
|
|
293
|
+
|
|
294
|
+
const results = await Promise.allSettled(
|
|
295
|
+
batch.map((node) => handleMetadataEnhancement(node.id)),
|
|
296
|
+
);
|
|
297
|
+
|
|
298
|
+
const successful = results.filter((r) => r.status === "fulfilled").length;
|
|
299
|
+
const failed = results.filter((r) => r.status === "rejected").length;
|
|
300
|
+
|
|
301
|
+
log.info(`✅ Enhanced: ${successful}, ❌ Failed: ${failed}`);
|
|
302
|
+
|
|
303
|
+
return { successful, failed, total: batch.length };
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/**
|
|
307
|
+
* Handle result re-ranking
|
|
308
|
+
* Re-ranks search results based on query intent and context
|
|
309
|
+
*/
|
|
310
|
+
async function handleResultReranking(
|
|
311
|
+
results: Array<{ id: string; content: string; score: number }>,
|
|
312
|
+
query: string,
|
|
313
|
+
intent?: string,
|
|
314
|
+
): Promise<
|
|
315
|
+
Array<{ id: string; content: string; score: number; relevance_score: number }>
|
|
316
|
+
> {
|
|
317
|
+
if (!ollamaAvailable) {
|
|
318
|
+
throw new Error("Sonar is not available");
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
try {
|
|
322
|
+
const response = await callOllama(
|
|
323
|
+
[
|
|
324
|
+
{
|
|
325
|
+
role: "system",
|
|
326
|
+
content:
|
|
327
|
+
"You are a search result re-ranker. Analyze relevance and provide scores.",
|
|
328
|
+
},
|
|
329
|
+
{
|
|
330
|
+
role: "user",
|
|
331
|
+
content: `Re-rank these search results for query: "${query}"${
|
|
332
|
+
intent ? `\nQuery intent: ${intent}` : ""
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
Results:
|
|
336
|
+
${results.map((r, i) => `${i + 1}. ${r.content.slice(0, 200)}`).join("\n")}
|
|
337
|
+
|
|
338
|
+
Return JSON array with relevance scores (0.0 to 1.0):
|
|
339
|
+
[
|
|
340
|
+
{"index": 1, "relevance": 0.95, "reason": "Direct match"},
|
|
341
|
+
{"index": 2, "relevance": 0.7, "reason": "Related concept"}
|
|
342
|
+
]`,
|
|
343
|
+
},
|
|
344
|
+
],
|
|
345
|
+
{
|
|
346
|
+
temperature: 0.2,
|
|
347
|
+
num_predict: 300,
|
|
348
|
+
format: "json", // Force valid JSON output
|
|
349
|
+
},
|
|
350
|
+
);
|
|
351
|
+
|
|
352
|
+
const content = response.message.content;
|
|
353
|
+
try {
|
|
354
|
+
const rankings = JSON.parse(content);
|
|
355
|
+
|
|
356
|
+
// Apply rankings to results
|
|
357
|
+
return results.map((result, idx) => {
|
|
358
|
+
const ranking = rankings.find(
|
|
359
|
+
(r: { index: number }) => r.index === idx + 1,
|
|
360
|
+
);
|
|
361
|
+
return {
|
|
362
|
+
...result,
|
|
363
|
+
relevance_score: ranking?.relevance || 0.5,
|
|
364
|
+
};
|
|
365
|
+
});
|
|
366
|
+
} catch {
|
|
367
|
+
// Fallback: return original scores
|
|
368
|
+
return results.map((r) => ({ ...r, relevance_score: r.score }));
|
|
369
|
+
}
|
|
370
|
+
} catch (error) {
|
|
371
|
+
log.error({ error, query }, "Result re-ranking failed");
|
|
372
|
+
throw error;
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
/**
|
|
377
|
+
* Handle chat request
|
|
378
|
+
* Maintains session context and converses with user
|
|
379
|
+
*/
|
|
380
|
+
async function handleChat(
|
|
381
|
+
sessionId: string,
|
|
382
|
+
userMessage: string,
|
|
383
|
+
): Promise<{ message: Message; sessionId: string }> {
|
|
384
|
+
if (!ollamaAvailable) {
|
|
385
|
+
throw new Error("Sonar is not available");
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
// Get or create session
|
|
389
|
+
let session = chatSessions.get(sessionId);
|
|
390
|
+
if (!session) {
|
|
391
|
+
session = {
|
|
392
|
+
id: sessionId,
|
|
393
|
+
messages: [
|
|
394
|
+
{
|
|
395
|
+
role: "system",
|
|
396
|
+
content: `You are AMALFA Corpus Assistant. Help users understand and explore their knowledge base.
|
|
397
|
+
Current Date: ${new Date().toISOString().split("T")[0]}
|
|
398
|
+
|
|
399
|
+
User can ask you about:
|
|
400
|
+
1. Corpus structure and themes
|
|
401
|
+
2. What you're currently working on
|
|
402
|
+
3. Search for documents by theme/type
|
|
403
|
+
4. Guide enhancement process
|
|
404
|
+
5. Natural language queries to knowledge base`,
|
|
405
|
+
},
|
|
406
|
+
],
|
|
407
|
+
startedAt: new Date(),
|
|
408
|
+
};
|
|
409
|
+
chatSessions.set(sessionId, session);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// Add user message
|
|
413
|
+
session.messages.push({ role: "user", content: userMessage });
|
|
414
|
+
|
|
415
|
+
// Maintain context window (keep system msg + last 10 messages)
|
|
416
|
+
const contextMessages = [
|
|
417
|
+
session.messages[0],
|
|
418
|
+
...session.messages.slice(-10),
|
|
419
|
+
].filter((m): m is Message => m !== undefined);
|
|
420
|
+
|
|
421
|
+
try {
|
|
422
|
+
// NOTE: No format: "json" for chat! We want natural language.
|
|
423
|
+
const response = await callOllama(contextMessages, {
|
|
424
|
+
temperature: 0.7,
|
|
425
|
+
num_predict: 500,
|
|
426
|
+
});
|
|
427
|
+
|
|
428
|
+
// Add assistant response to history
|
|
429
|
+
session.messages.push(response.message);
|
|
430
|
+
|
|
431
|
+
return {
|
|
432
|
+
message: response.message,
|
|
433
|
+
sessionId: session.id,
|
|
434
|
+
};
|
|
435
|
+
} catch (error) {
|
|
436
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
437
|
+
log.error({ err: error, sessionId, errorMessage }, "Chat request failed");
|
|
438
|
+
throw error;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
/**
|
|
443
|
+
* Handle context extraction
|
|
444
|
+
* Generates smart snippets with context awareness
|
|
445
|
+
*/
|
|
446
|
+
async function handleContextExtraction(
|
|
447
|
+
result: { id: string; content: string },
|
|
448
|
+
query: string,
|
|
449
|
+
): Promise<{ snippet: string; context: string; confidence: number }> {
|
|
450
|
+
if (!ollamaAvailable) {
|
|
451
|
+
throw new Error("Sonar is not available");
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
try {
|
|
455
|
+
const response = await callOllama(
|
|
456
|
+
[
|
|
457
|
+
{
|
|
458
|
+
role: "system",
|
|
459
|
+
content:
|
|
460
|
+
"You are a context extractor. Provide relevant snippets with context.",
|
|
461
|
+
},
|
|
462
|
+
{
|
|
463
|
+
role: "user",
|
|
464
|
+
content: `Extract relevant context for query: "${query}"
|
|
465
|
+
|
|
466
|
+
Content:
|
|
467
|
+
${result.content}
|
|
468
|
+
|
|
469
|
+
Return JSON:
|
|
470
|
+
{
|
|
471
|
+
"snippet": "Most relevant 2-3 sentences",
|
|
472
|
+
"context": "Brief explanation of relevance",
|
|
473
|
+
"confidence": 0.9
|
|
474
|
+
}`,
|
|
475
|
+
},
|
|
476
|
+
],
|
|
477
|
+
{
|
|
478
|
+
temperature: 0.1,
|
|
479
|
+
num_predict: 200,
|
|
480
|
+
format: "json", // Force valid JSON output
|
|
481
|
+
},
|
|
482
|
+
);
|
|
483
|
+
|
|
484
|
+
const content = response.message.content;
|
|
485
|
+
try {
|
|
486
|
+
return JSON.parse(content);
|
|
487
|
+
} catch {
|
|
488
|
+
// Fallback: return simple snippet
|
|
489
|
+
const words = result.content.split(" ");
|
|
490
|
+
const snippet = words.slice(0, 50).join(" ");
|
|
491
|
+
return {
|
|
492
|
+
snippet,
|
|
493
|
+
context: "Full content available",
|
|
494
|
+
confidence: 0.5,
|
|
495
|
+
};
|
|
496
|
+
}
|
|
497
|
+
} catch (error) {
|
|
498
|
+
log.error({ error, resultId: result.id }, "Context extraction failed");
|
|
499
|
+
throw error;
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
/**
|
|
504
|
+
* Main daemon logic
|
|
505
|
+
*/
|
|
506
|
+
async function main() {
|
|
507
|
+
const config = await loadConfig();
|
|
508
|
+
DB_PATH = join(process.cwd(), config.database);
|
|
509
|
+
|
|
510
|
+
// @ts-ignore
|
|
511
|
+
const isEnabled = config.sonar?.enabled ?? config.phi3?.enabled;
|
|
512
|
+
|
|
513
|
+
if (!isEnabled) {
|
|
514
|
+
log.warn("⚠️ Sonar is disabled in configuration. Exiting.");
|
|
515
|
+
process.exit(0);
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
log.info("🚀 Sonar Agent starting...");
|
|
519
|
+
|
|
520
|
+
// Check Ollama availability
|
|
521
|
+
log.info("🔍 Checking Ollama availability...");
|
|
522
|
+
const capabilities = await discoverOllamaCapabilities();
|
|
523
|
+
ollamaAvailable = capabilities.available;
|
|
524
|
+
|
|
525
|
+
if (ollamaAvailable) {
|
|
526
|
+
log.info("✅ Ollama is available and healthy");
|
|
527
|
+
// Use discovered preferred model (e.g., tinydolphin) unless overridden in config
|
|
528
|
+
// @ts-ignore
|
|
529
|
+
ollamaModel =
|
|
530
|
+
config.sonar?.model ||
|
|
531
|
+
config.phi3?.model ||
|
|
532
|
+
capabilities.model ||
|
|
533
|
+
"phi3:latest";
|
|
534
|
+
log.info(`✅ Using model: ${ollamaModel}`);
|
|
535
|
+
} else {
|
|
536
|
+
log.warn("⚠️ Ollama is not available");
|
|
537
|
+
log.warn(" Sonar features will be disabled");
|
|
538
|
+
log.info(" Install: curl -fsSL https://ollama.ai/install.sh | sh");
|
|
539
|
+
log.info(" Then run: ollama pull phi3:latest (or minidolphin)");
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
log.info("✅ Sonar Agent ready");
|
|
543
|
+
|
|
544
|
+
// Register signal handlers for graceful shutdown
|
|
545
|
+
const shutdown = async (signal: string) => {
|
|
546
|
+
log.info(`🛑 Received ${signal}, shutting down...`);
|
|
547
|
+
if (server) {
|
|
548
|
+
server.stop();
|
|
549
|
+
server = null;
|
|
550
|
+
}
|
|
551
|
+
process.exit(0);
|
|
552
|
+
};
|
|
553
|
+
|
|
554
|
+
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
555
|
+
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
556
|
+
|
|
557
|
+
// Start HTTP server
|
|
558
|
+
// @ts-ignore
|
|
559
|
+
const port = (config.sonar || config.phi3)?.port || 3012;
|
|
560
|
+
|
|
561
|
+
log.info(`🚀 Starting HTTP server on port ${port}`);
|
|
562
|
+
log.info("📋 Available endpoints:");
|
|
563
|
+
log.info(" POST /search/analyze - Query analysis");
|
|
564
|
+
log.info(" POST /search/rerank - Result re-ranking");
|
|
565
|
+
log.info(" POST /search/context - Smart snippet generation");
|
|
566
|
+
log.info(" GET /health - Health check");
|
|
567
|
+
|
|
568
|
+
server = Bun.serve({
|
|
569
|
+
port,
|
|
570
|
+
async fetch(req) {
|
|
571
|
+
const url = new URL(req.url);
|
|
572
|
+
|
|
573
|
+
// CORS headers
|
|
574
|
+
const corsHeaders = {
|
|
575
|
+
"Access-Control-Allow-Origin": "*",
|
|
576
|
+
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
|
577
|
+
"Access-Control-Allow-Headers": "Content-Type",
|
|
578
|
+
};
|
|
579
|
+
|
|
580
|
+
// Handle preflight requests
|
|
581
|
+
if (req.method === "OPTIONS") {
|
|
582
|
+
return new Response(null, { headers: corsHeaders });
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
// Health check endpoint
|
|
586
|
+
if (url.pathname === "/health") {
|
|
587
|
+
const healthy = await checkOllamaHealth();
|
|
588
|
+
return Response.json(
|
|
589
|
+
{
|
|
590
|
+
status: healthy ? "healthy" : "unhealthy",
|
|
591
|
+
ollama_available: ollamaAvailable,
|
|
592
|
+
model: ollamaModel,
|
|
593
|
+
},
|
|
594
|
+
{ headers: corsHeaders },
|
|
595
|
+
);
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
// Search analysis endpoint
|
|
599
|
+
if (url.pathname === "/search/analyze" && req.method === "POST") {
|
|
600
|
+
try {
|
|
601
|
+
const body = (await req.json()) as { query: unknown };
|
|
602
|
+
const { query } = body;
|
|
603
|
+
|
|
604
|
+
if (!query || typeof query !== "string") {
|
|
605
|
+
return Response.json(
|
|
606
|
+
{ error: "Missing or invalid 'query' parameter" },
|
|
607
|
+
{ status: 400, headers: corsHeaders },
|
|
608
|
+
);
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
const analysis = await handleSearchAnalysis(query);
|
|
612
|
+
return Response.json(analysis, { headers: corsHeaders });
|
|
613
|
+
} catch (error) {
|
|
614
|
+
log.error({ error }, "Search analysis failed");
|
|
615
|
+
return Response.json(
|
|
616
|
+
{ error: error instanceof Error ? error.message : "Unknown error" },
|
|
617
|
+
{ status: 500, headers: corsHeaders },
|
|
618
|
+
);
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
// Result re-ranking endpoint
|
|
623
|
+
if (url.pathname === "/search/rerank" && req.method === "POST") {
|
|
624
|
+
try {
|
|
625
|
+
const body = (await req.json()) as {
|
|
626
|
+
results: unknown;
|
|
627
|
+
query: unknown;
|
|
628
|
+
intent: unknown;
|
|
629
|
+
};
|
|
630
|
+
const { results, query, intent } = body;
|
|
631
|
+
|
|
632
|
+
if (
|
|
633
|
+
!results ||
|
|
634
|
+
!Array.isArray(results) ||
|
|
635
|
+
!query ||
|
|
636
|
+
typeof query !== "string"
|
|
637
|
+
) {
|
|
638
|
+
return Response.json(
|
|
639
|
+
{ error: "Missing or invalid 'results' parameter" },
|
|
640
|
+
{ status: 400, headers: corsHeaders },
|
|
641
|
+
);
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
const ranked = await handleResultReranking(
|
|
645
|
+
results as { id: string; content: string; score: number }[],
|
|
646
|
+
query,
|
|
647
|
+
intent as string | undefined,
|
|
648
|
+
);
|
|
649
|
+
return Response.json(ranked, { headers: corsHeaders });
|
|
650
|
+
} catch (error) {
|
|
651
|
+
log.error({ error }, "Result re-ranking failed");
|
|
652
|
+
return Response.json(
|
|
653
|
+
{ error: error instanceof Error ? error.message : "Unknown error" },
|
|
654
|
+
{ status: 500, headers: corsHeaders },
|
|
655
|
+
);
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
// Context extraction endpoint
|
|
660
|
+
if (url.pathname === "/search/context" && req.method === "POST") {
|
|
661
|
+
try {
|
|
662
|
+
const body = (await req.json()) as {
|
|
663
|
+
result: unknown;
|
|
664
|
+
query: unknown;
|
|
665
|
+
};
|
|
666
|
+
const { result, query } = body;
|
|
667
|
+
|
|
668
|
+
if (!result || !query || typeof query !== "string") {
|
|
669
|
+
return Response.json(
|
|
670
|
+
{ error: "Missing 'result' or 'query' parameter" },
|
|
671
|
+
{ status: 400, headers: corsHeaders },
|
|
672
|
+
);
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
const context = await handleContextExtraction(
|
|
676
|
+
result as { id: string; content: string },
|
|
677
|
+
query,
|
|
678
|
+
);
|
|
679
|
+
return Response.json(context, { headers: corsHeaders });
|
|
680
|
+
} catch (error) {
|
|
681
|
+
log.error({ error }, "Context extraction failed");
|
|
682
|
+
return Response.json(
|
|
683
|
+
{ error: error instanceof Error ? error.message : "Unknown error" },
|
|
684
|
+
{ status: 500, headers: corsHeaders },
|
|
685
|
+
);
|
|
686
|
+
}
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
// Metadata enhancement endpoint
|
|
690
|
+
if (url.pathname === "/metadata/enhance" && req.method === "POST") {
|
|
691
|
+
try {
|
|
692
|
+
const body = (await req.json()) as { docId: unknown };
|
|
693
|
+
const { docId } = body;
|
|
694
|
+
|
|
695
|
+
if (!docId || typeof docId !== "string") {
|
|
696
|
+
return Response.json(
|
|
697
|
+
{ error: "Missing 'docId' parameter" },
|
|
698
|
+
{ status: 400, headers: corsHeaders },
|
|
699
|
+
);
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
const enhancement = await handleMetadataEnhancement(docId);
|
|
703
|
+
return Response.json(enhancement, { headers: corsHeaders });
|
|
704
|
+
} catch (error) {
|
|
705
|
+
log.error({ error }, "Metadata enhancement endpoint failed");
|
|
706
|
+
return Response.json(
|
|
707
|
+
{ error: error instanceof Error ? error.message : "Unknown error" },
|
|
708
|
+
{ status: 500, headers: corsHeaders },
|
|
709
|
+
);
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
// Batch enhancement endpoint
|
|
714
|
+
if (url.pathname === "/metadata/batch" && req.method === "POST") {
|
|
715
|
+
try {
|
|
716
|
+
const body = (await req.json()) as { limit: unknown };
|
|
717
|
+
const limit = typeof body.limit === "number" ? body.limit : 50;
|
|
718
|
+
|
|
719
|
+
const result = await handleBatchEnhancement(limit);
|
|
720
|
+
return Response.json(result, { headers: corsHeaders });
|
|
721
|
+
} catch (error) {
|
|
722
|
+
log.error({ error }, "Batch enhancement endpoint failed");
|
|
723
|
+
return Response.json(
|
|
724
|
+
{ error: error instanceof Error ? error.message : "Unknown error" },
|
|
725
|
+
{ status: 500, headers: corsHeaders },
|
|
726
|
+
);
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
// Chat endpoint
|
|
731
|
+
if (url.pathname === "/chat" && req.method === "POST") {
|
|
732
|
+
try {
|
|
733
|
+
const body = (await req.json()) as {
|
|
734
|
+
sessionId?: unknown;
|
|
735
|
+
message: unknown;
|
|
736
|
+
};
|
|
737
|
+
const sessionId =
|
|
738
|
+
typeof body.sessionId === "string"
|
|
739
|
+
? body.sessionId
|
|
740
|
+
: crypto.randomUUID();
|
|
741
|
+
const message = body.message;
|
|
742
|
+
|
|
743
|
+
if (!message || typeof message !== "string") {
|
|
744
|
+
return Response.json(
|
|
745
|
+
{ error: "Missing 'message' parameter" },
|
|
746
|
+
{ status: 400, headers: corsHeaders },
|
|
747
|
+
);
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
const response = await handleChat(sessionId, message);
|
|
751
|
+
return Response.json(response, { headers: corsHeaders });
|
|
752
|
+
} catch (error) {
|
|
753
|
+
log.error({ error }, "Chat endpoint failed");
|
|
754
|
+
return Response.json(
|
|
755
|
+
{ error: error instanceof Error ? error.message : "Unknown error" },
|
|
756
|
+
{ status: 500, headers: corsHeaders },
|
|
757
|
+
);
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
// 404 for unknown endpoints
|
|
762
|
+
return Response.json(
|
|
763
|
+
{ error: "Not found" },
|
|
764
|
+
{ status: 404, headers: corsHeaders },
|
|
765
|
+
);
|
|
766
|
+
},
|
|
767
|
+
});
|
|
768
|
+
|
|
769
|
+
log.info(`✅ HTTP server listening on port ${port}`);
|
|
770
|
+
log.info("⏳ Daemon ready to handle requests");
|
|
771
|
+
}
|
|
772
|
+
|
|
773
|
+
// Run service lifecycle dispatcher
|
|
774
|
+
await lifecycle.run(command, main);
|