morpheus-cli 0.9.5 → 0.9.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +63 -43
- package/dist/channels/discord.js +3 -6
- package/dist/channels/telegram.js +3 -6
- package/dist/cli/commands/restart.js +15 -0
- package/dist/cli/commands/start.js +16 -0
- package/dist/config/manager.js +61 -0
- package/dist/config/paths.js +1 -0
- package/dist/config/schemas.js +11 -3
- package/dist/http/api.js +3 -0
- package/dist/http/routers/link.js +239 -0
- package/dist/http/routers/skills.js +1 -8
- package/dist/runtime/apoc.js +1 -1
- package/dist/runtime/audit/repository.js +1 -1
- package/dist/runtime/link-chunker.js +214 -0
- package/dist/runtime/link-repository.js +301 -0
- package/dist/runtime/link-search.js +298 -0
- package/dist/runtime/link-worker.js +284 -0
- package/dist/runtime/link.js +295 -0
- package/dist/runtime/memory/sati/service.js +1 -1
- package/dist/runtime/neo.js +1 -1
- package/dist/runtime/oracle.js +81 -44
- package/dist/runtime/scaffold.js +4 -17
- package/dist/runtime/skills/__tests__/loader.test.js +7 -10
- package/dist/runtime/skills/__tests__/registry.test.js +2 -18
- package/dist/runtime/skills/__tests__/tool.test.js +55 -224
- package/dist/runtime/skills/index.js +1 -2
- package/dist/runtime/skills/loader.js +0 -2
- package/dist/runtime/skills/registry.js +8 -20
- package/dist/runtime/skills/schema.js +0 -4
- package/dist/runtime/skills/tool.js +42 -209
- package/dist/runtime/smiths/delegator.js +1 -1
- package/dist/runtime/smiths/registry.js +1 -1
- package/dist/runtime/tasks/worker.js +12 -44
- package/dist/runtime/trinity.js +1 -1
- package/dist/types/config.js +14 -0
- package/dist/ui/assets/AuditDashboard-93LCGHG1.js +1 -0
- package/dist/ui/assets/{Chat-BNtutgja.js → Chat-CK5sNcQ1.js} +8 -8
- package/dist/ui/assets/{Chronos-3C8RPZcl.js → Chronos-m2h--GEe.js} +1 -1
- package/dist/ui/assets/{ConfirmationModal-ZQPBeJ2Z.js → ConfirmationModal-Dd5pUJme.js} +1 -1
- package/dist/ui/assets/{Dashboard-CqkHzr2F.js → Dashboard-ODwl7d-a.js} +1 -1
- package/dist/ui/assets/{DeleteConfirmationModal-CioxFWn_.js → DeleteConfirmationModal-CCcojDmr.js} +1 -1
- package/dist/ui/assets/Documents-dWnSoxFO.js +7 -0
- package/dist/ui/assets/{Logs-DBVanS0O.js → Logs-Dc9Z2LBj.js} +1 -1
- package/dist/ui/assets/{MCPManager-vXfL3P2U.js → MCPManager-CMkb8vMn.js} +1 -1
- package/dist/ui/assets/{ModelPricing-DyfdunLT.js → ModelPricing-DtHPPbEQ.js} +1 -1
- package/dist/ui/assets/{Notifications-VL-vep6d.js → Notifications-BPvo-DWP.js} +1 -1
- package/dist/ui/assets/{Pagination-oTGieBLM.js → Pagination-BHZKk42X.js} +1 -1
- package/dist/ui/assets/{SatiMemories-jaadkW0U.js → SatiMemories-BUPu1Lxr.js} +1 -1
- package/dist/ui/assets/SessionAudit-CFKF4DA8.js +9 -0
- package/dist/ui/assets/Settings-C4JrXfsR.js +47 -0
- package/dist/ui/assets/{Skills-DE3zziXL.js → Skills-BUlvJgJ4.js} +1 -1
- package/dist/ui/assets/{Smiths-pmogN1mU.js → Smiths-CDtJdY0I.js} +1 -1
- package/dist/ui/assets/{Tasks-Bs8s34Jc.js → Tasks-DK_cOsNK.js} +1 -1
- package/dist/ui/assets/{TrinityDatabases-D7uihcdp.js → TrinityDatabases-X07by-19.js} +1 -1
- package/dist/ui/assets/{UsageStats-B9gePLZ0.js → UsageStats-dYcgckLq.js} +1 -1
- package/dist/ui/assets/{WebhookManager-B2L3rCLM.js → WebhookManager-DDw5eX2R.js} +1 -1
- package/dist/ui/assets/{audit-Cggeu9mM.js → audit-DZ5WLUEm.js} +1 -1
- package/dist/ui/assets/{chronos-D3-sWhfU.js → chronos-B_HI4mlq.js} +1 -1
- package/dist/ui/assets/{config-CBqRUPgn.js → config-B-YxlVrc.js} +1 -1
- package/dist/ui/assets/index-DVjwJ8jT.css +1 -0
- package/dist/ui/assets/{index-zKplfrXZ.js → index-DfJwcKqG.js} +5 -5
- package/dist/ui/assets/{mcp-uL1R9hyA.js → mcp-k-_pwbqA.js} +1 -1
- package/dist/ui/assets/{skills-jmw8yTJs.js → skills-xMXangks.js} +1 -1
- package/dist/ui/assets/{stats-HOms6GnM.js → stats-C4QZIv5O.js} +1 -1
- package/dist/ui/assets/{vendor-icons-DMd9RGvJ.js → vendor-icons-NHF9HNeN.js} +1 -1
- package/dist/ui/index.html +3 -3
- package/dist/ui/sw.js +1 -1
- package/package.json +3 -1
- package/dist/runtime/__tests__/keymaker.test.js +0 -148
- package/dist/runtime/keymaker.js +0 -157
- package/dist/ui/assets/AuditDashboard-DliJ1CX0.js +0 -1
- package/dist/ui/assets/SessionAudit-BsXrWlwz.js +0 -9
- package/dist/ui/assets/Settings-B4eezRcg.js +0 -47
- package/dist/ui/assets/index-D4fzIKy1.css +0 -1
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import { homedir } from 'os';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import fs from 'fs-extra';
|
|
4
|
+
import { LinkRepository } from './link-repository.js';
|
|
5
|
+
import { LinkSearch } from './link-search.js';
|
|
6
|
+
import { hashFile, processDocument, isSupportedFormat } from './link-chunker.js';
|
|
7
|
+
import { EmbeddingService } from './memory/embedding.service.js';
|
|
8
|
+
import { ConfigManager } from '../config/manager.js';
|
|
9
|
+
import { DisplayManager } from './display.js';
|
|
10
|
+
/**
|
|
11
|
+
* LinkWorker - Background worker for document indexing
|
|
12
|
+
*
|
|
13
|
+
* Scans ~/.morpheus/docs folder, processes new/changed documents,
|
|
14
|
+
* generates embeddings, and removes deleted documents from the index.
|
|
15
|
+
*/
|
|
16
|
+
export class LinkWorker {
|
|
17
|
+
static instance = null;
|
|
18
|
+
repository;
|
|
19
|
+
search;
|
|
20
|
+
embeddingService = null;
|
|
21
|
+
intervalId = null;
|
|
22
|
+
isRunning = false;
|
|
23
|
+
display = DisplayManager.getInstance();
|
|
24
|
+
docsPath;
|
|
25
|
+
constructor() {
|
|
26
|
+
this.repository = LinkRepository.getInstance();
|
|
27
|
+
this.search = LinkSearch.getInstance();
|
|
28
|
+
this.docsPath = path.join(homedir(), '.morpheus', 'docs');
|
|
29
|
+
}
|
|
30
|
+
static getInstance() {
|
|
31
|
+
if (!LinkWorker.instance) {
|
|
32
|
+
LinkWorker.instance = new LinkWorker();
|
|
33
|
+
}
|
|
34
|
+
return LinkWorker.instance;
|
|
35
|
+
}
|
|
36
|
+
static setInstance(instance) {
|
|
37
|
+
LinkWorker.instance = instance;
|
|
38
|
+
}
|
|
39
|
+
static resetInstance() {
|
|
40
|
+
if (LinkWorker.instance) {
|
|
41
|
+
LinkWorker.instance.stop();
|
|
42
|
+
}
|
|
43
|
+
LinkWorker.instance = null;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Start the background worker with interval-based scanning.
|
|
47
|
+
*/
|
|
48
|
+
start() {
|
|
49
|
+
if (this.isRunning)
|
|
50
|
+
return;
|
|
51
|
+
const config = ConfigManager.getInstance().getLinkConfig();
|
|
52
|
+
const intervalMs = config.scan_interval_ms;
|
|
53
|
+
this.isRunning = true;
|
|
54
|
+
this.display.log('LinkWorker started', { source: 'Link' });
|
|
55
|
+
// Run initial scan immediately
|
|
56
|
+
this.tick().catch(err => {
|
|
57
|
+
this.display.log(`LinkWorker initial scan failed: ${err.message}`, { source: 'Link', level: 'error' });
|
|
58
|
+
});
|
|
59
|
+
// Schedule periodic scans
|
|
60
|
+
this.intervalId = setInterval(() => {
|
|
61
|
+
this.tick().catch(err => {
|
|
62
|
+
this.display.log(`LinkWorker tick failed: ${err.message}`, { source: 'Link', level: 'error' });
|
|
63
|
+
});
|
|
64
|
+
}, intervalMs);
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Stop the background worker.
|
|
68
|
+
*/
|
|
69
|
+
stop() {
|
|
70
|
+
if (this.intervalId) {
|
|
71
|
+
clearInterval(this.intervalId);
|
|
72
|
+
this.intervalId = null;
|
|
73
|
+
}
|
|
74
|
+
this.isRunning = false;
|
|
75
|
+
this.display.log('LinkWorker stopped', { source: 'Link' });
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Update the scan interval (hot-reload).
|
|
79
|
+
*/
|
|
80
|
+
updateInterval(intervalMs) {
|
|
81
|
+
if (this.intervalId) {
|
|
82
|
+
clearInterval(this.intervalId);
|
|
83
|
+
this.intervalId = setInterval(() => {
|
|
84
|
+
this.tick().catch(err => {
|
|
85
|
+
this.display.log(`LinkWorker tick failed: ${err.message}`, { source: 'Link', level: 'error' });
|
|
86
|
+
});
|
|
87
|
+
}, intervalMs);
|
|
88
|
+
this.display.log(`LinkWorker interval updated to ${intervalMs}ms`, { source: 'Link' });
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Perform a single scan cycle.
|
|
93
|
+
*/
|
|
94
|
+
async tick() {
|
|
95
|
+
// Ensure embedding service is initialized
|
|
96
|
+
if (!this.embeddingService) {
|
|
97
|
+
this.embeddingService = await EmbeddingService.getInstance();
|
|
98
|
+
}
|
|
99
|
+
// Ensure docs folder exists
|
|
100
|
+
await fs.ensureDir(this.docsPath);
|
|
101
|
+
const stats = {
|
|
102
|
+
indexed: 0,
|
|
103
|
+
removed: 0,
|
|
104
|
+
errors: 0,
|
|
105
|
+
};
|
|
106
|
+
try {
|
|
107
|
+
// Scan for new/changed documents
|
|
108
|
+
const files = await this.scanFolder();
|
|
109
|
+
this.display.log(`LinkWorker found ${files.length} files`, { source: 'Link', level: 'debug' });
|
|
110
|
+
// Process each file
|
|
111
|
+
for (const filePath of files) {
|
|
112
|
+
try {
|
|
113
|
+
const result = await this.processDocument(filePath);
|
|
114
|
+
if (result === 'indexed') {
|
|
115
|
+
stats.indexed++;
|
|
116
|
+
}
|
|
117
|
+
else if (result === 'error') {
|
|
118
|
+
stats.errors++;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
catch (err) {
|
|
122
|
+
this.display.log(`Failed to process ${filePath}: ${err.message}`, { source: 'Link', level: 'error' });
|
|
123
|
+
stats.errors++;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Remove deleted documents
|
|
127
|
+
stats.removed = await this.removeDeletedDocuments(files);
|
|
128
|
+
if (stats.indexed > 0 || stats.removed > 0) {
|
|
129
|
+
this.display.log(`LinkWorker: indexed ${stats.indexed}, removed ${stats.removed}, errors ${stats.errors}`, { source: 'Link', level: 'info' });
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
catch (err) {
|
|
133
|
+
this.display.log(`LinkWorker tick error: ${err.message}`, { source: 'Link', level: 'error' });
|
|
134
|
+
stats.errors++;
|
|
135
|
+
}
|
|
136
|
+
return stats;
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Scan the docs folder for supported files.
|
|
140
|
+
*/
|
|
141
|
+
async scanFolder() {
|
|
142
|
+
const files = [];
|
|
143
|
+
const entries = await fs.readdir(this.docsPath, { withFileTypes: true });
|
|
144
|
+
for (const entry of entries) {
|
|
145
|
+
if (entry.isFile()) {
|
|
146
|
+
const filePath = path.join(this.docsPath, entry.name);
|
|
147
|
+
if (isSupportedFormat(filePath)) {
|
|
148
|
+
files.push(filePath);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
return files;
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Process a single document: check hash, parse, chunk, embed.
|
|
156
|
+
*/
|
|
157
|
+
async processDocument(filePath) {
|
|
158
|
+
const existingDoc = this.repository.getDocumentByPath(filePath);
|
|
159
|
+
// Calculate file hash
|
|
160
|
+
let fileHash;
|
|
161
|
+
try {
|
|
162
|
+
fileHash = await hashFile(filePath);
|
|
163
|
+
}
|
|
164
|
+
catch (err) {
|
|
165
|
+
// File might not be readable
|
|
166
|
+
if (existingDoc) {
|
|
167
|
+
this.repository.updateDocumentStatus(existingDoc.id, 'error', `Failed to read file: ${err.message}`);
|
|
168
|
+
}
|
|
169
|
+
return 'error';
|
|
170
|
+
}
|
|
171
|
+
// Check if document already indexed with same hash
|
|
172
|
+
if (existingDoc && existingDoc.file_hash === fileHash && existingDoc.status === 'indexed') {
|
|
173
|
+
return 'skipped';
|
|
174
|
+
}
|
|
175
|
+
// Get file stats
|
|
176
|
+
const stats = await fs.stat(filePath);
|
|
177
|
+
const fileSize = stats.size;
|
|
178
|
+
// Check max file size
|
|
179
|
+
const config = ConfigManager.getInstance().getLinkConfig();
|
|
180
|
+
const maxSizeBytes = config.max_file_size_mb * 1024 * 1024;
|
|
181
|
+
if (fileSize > maxSizeBytes) {
|
|
182
|
+
if (existingDoc) {
|
|
183
|
+
this.repository.updateDocumentStatus(existingDoc.id, 'error', `File exceeds max size of ${config.max_file_size_mb}MB`);
|
|
184
|
+
}
|
|
185
|
+
return 'error';
|
|
186
|
+
}
|
|
187
|
+
// Create or update document record
|
|
188
|
+
const filename = path.basename(filePath);
|
|
189
|
+
let document;
|
|
190
|
+
if (existingDoc) {
|
|
191
|
+
// Update existing document - delete old chunks first
|
|
192
|
+
this.repository.deleteChunksByDocument(existingDoc.id);
|
|
193
|
+
this.repository.deleteEmbeddingsByDocument(existingDoc.id);
|
|
194
|
+
this.repository.updateDocumentStatus(existingDoc.id, 'indexing');
|
|
195
|
+
document = existingDoc;
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
// Create new document
|
|
199
|
+
document = this.repository.createDocument({
|
|
200
|
+
filename,
|
|
201
|
+
file_path: filePath,
|
|
202
|
+
file_hash: fileHash,
|
|
203
|
+
file_size: fileSize,
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
try {
|
|
207
|
+
// Index the document
|
|
208
|
+
await this.indexDocument(document.id, filePath, fileHash);
|
|
209
|
+
// Update status to indexed
|
|
210
|
+
const chunks = this.repository.getChunksByDocument(document.id);
|
|
211
|
+
this.repository.updateDocumentChunkCount(document.id, chunks.length);
|
|
212
|
+
this.display.log(`Indexed document: ${filename} (${chunks.length} chunks)`, { source: 'Link', level: 'debug' });
|
|
213
|
+
return 'indexed';
|
|
214
|
+
}
|
|
215
|
+
catch (err) {
|
|
216
|
+
this.repository.updateDocumentStatus(document.id, 'error', err.message);
|
|
217
|
+
return 'error';
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Index a document: parse, chunk, generate embeddings.
|
|
222
|
+
*/
|
|
223
|
+
async indexDocument(documentId, filePath, fileHash) {
|
|
224
|
+
const config = ConfigManager.getInstance().getLinkConfig();
|
|
225
|
+
const chunkSize = config.chunk_size;
|
|
226
|
+
// Parse and chunk the document
|
|
227
|
+
const processed = await processDocument(filePath, chunkSize);
|
|
228
|
+
// Verify hash matches (file might have changed during processing)
|
|
229
|
+
if (processed.hash !== fileHash) {
|
|
230
|
+
throw new Error('File changed during processing - hash mismatch');
|
|
231
|
+
}
|
|
232
|
+
// Create chunks in database
|
|
233
|
+
const chunkInputs = processed.chunks.map(chunk => ({
|
|
234
|
+
document_id: documentId,
|
|
235
|
+
position: chunk.position,
|
|
236
|
+
content: chunk.content,
|
|
237
|
+
char_start: chunk.char_start,
|
|
238
|
+
char_end: chunk.char_end,
|
|
239
|
+
}));
|
|
240
|
+
this.repository.createChunks(chunkInputs);
|
|
241
|
+
// Get the created chunks with IDs
|
|
242
|
+
const chunks = this.repository.getChunksByDocument(documentId);
|
|
243
|
+
// Generate embeddings for each chunk
|
|
244
|
+
await this.generateEmbeddings(chunks);
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Generate embeddings for chunks using Sati's EmbeddingService.
|
|
248
|
+
*/
|
|
249
|
+
async generateEmbeddings(chunks) {
|
|
250
|
+
if (!this.embeddingService) {
|
|
251
|
+
this.embeddingService = await EmbeddingService.getInstance();
|
|
252
|
+
}
|
|
253
|
+
const embeddings = [];
|
|
254
|
+
// Process in batches to avoid memory issues
|
|
255
|
+
const batchSize = 50;
|
|
256
|
+
for (let i = 0; i < chunks.length; i += batchSize) {
|
|
257
|
+
const batch = chunks.slice(i, i + batchSize);
|
|
258
|
+
const batchEmbeddings = await Promise.all(batch.map(async (chunk) => {
|
|
259
|
+
const embedding = await this.embeddingService.generate(chunk.content);
|
|
260
|
+
return { chunk_id: chunk.id, embedding };
|
|
261
|
+
}));
|
|
262
|
+
embeddings.push(...batchEmbeddings);
|
|
263
|
+
}
|
|
264
|
+
// Store embeddings in database
|
|
265
|
+
this.repository.createEmbeddings(embeddings);
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Remove documents that no longer exist in the docs folder.
|
|
269
|
+
*/
|
|
270
|
+
async removeDeletedDocuments(existingFiles) {
|
|
271
|
+
const existingPaths = new Set(existingFiles);
|
|
272
|
+
const documents = this.repository.listDocuments();
|
|
273
|
+
let removed = 0;
|
|
274
|
+
for (const doc of documents) {
|
|
275
|
+
if (!existingPaths.has(doc.file_path)) {
|
|
276
|
+
// Document file no longer exists - remove from index
|
|
277
|
+
this.repository.deleteDocument(doc.id);
|
|
278
|
+
removed++;
|
|
279
|
+
this.display.log(`Removed deleted document: ${doc.filename}`, { source: 'Link', level: 'debug' });
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return removed;
|
|
283
|
+
}
|
|
284
|
+
}
|
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
import { HumanMessage, SystemMessage, AIMessage } from "@langchain/core/messages";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
4
|
+
import { ConfigManager } from '../config/manager.js';
|
|
5
|
+
import { LinkRepository } from './link-repository.js';
|
|
6
|
+
import { LinkSearch } from './link-search.js';
|
|
7
|
+
import { ProviderFactory } from './providers/factory.js';
|
|
8
|
+
import { ProviderError } from './errors.js';
|
|
9
|
+
import { DisplayManager } from './display.js';
|
|
10
|
+
import { TaskRequestContext } from './tasks/context.js';
|
|
11
|
+
import { extractRawUsage, persistAgentMessage, buildAgentResult, emitToolAuditEvents } from './subagent-utils.js';
|
|
12
|
+
import { buildDelegationTool } from './tools/delegation-utils.js';
|
|
13
|
+
const LINK_BASE_DESCRIPTION = `Delegate to Link, the documentation specialist subagent.
|
|
14
|
+
|
|
15
|
+
Link has access to indexed user documents (PDFs, Markdown, TXT, DOCX) stored in ~/.morpheus/docs.
|
|
16
|
+
It uses an LLM to search, reason over, and synthesize answers from document content.
|
|
17
|
+
|
|
18
|
+
Use this tool when the user asks about information that might be in their uploaded documents.
|
|
19
|
+
Input should be a natural language query or question about the user's documentation.`;
|
|
20
|
+
function buildDocumentCatalogSection(repository) {
|
|
21
|
+
try {
|
|
22
|
+
const docs = repository.listDocuments('indexed');
|
|
23
|
+
if (docs.length === 0) {
|
|
24
|
+
return '\n\nIndexed documents: none currently indexed.';
|
|
25
|
+
}
|
|
26
|
+
const lines = docs.map((d) => `- ${d.filename} (${d.chunk_count} chunks)`);
|
|
27
|
+
return `\n\nIndexed documents:\n${lines.join('\n')}`;
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
return '\n\nIndexed documents: unable to retrieve list.';
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Link - Documentation Specialist Subagent
|
|
35
|
+
*
|
|
36
|
+
* Provides RAG (Retrieval-Augmented Generation) capabilities over user documents.
|
|
37
|
+
* Uses a ReactAgent with an LLM to reason over search results and synthesize answers.
|
|
38
|
+
*/
|
|
39
|
+
export class Link {
|
|
40
|
+
static instance = null;
|
|
41
|
+
static currentSessionId = undefined;
|
|
42
|
+
static _delegateTool = null;
|
|
43
|
+
config;
|
|
44
|
+
agentConfig;
|
|
45
|
+
repository;
|
|
46
|
+
search;
|
|
47
|
+
agent;
|
|
48
|
+
display = DisplayManager.getInstance();
|
|
49
|
+
constructor(config) {
|
|
50
|
+
this.config = config;
|
|
51
|
+
this.agentConfig = ConfigManager.getInstance().getLinkConfig();
|
|
52
|
+
this.repository = LinkRepository.getInstance();
|
|
53
|
+
this.search = LinkSearch.getInstance();
|
|
54
|
+
}
|
|
55
|
+
static getInstance(config) {
|
|
56
|
+
if (!Link.instance) {
|
|
57
|
+
if (!config) {
|
|
58
|
+
config = ConfigManager.getInstance().get();
|
|
59
|
+
}
|
|
60
|
+
Link.instance = new Link(config);
|
|
61
|
+
}
|
|
62
|
+
return Link.instance;
|
|
63
|
+
}
|
|
64
|
+
static resetInstance() {
|
|
65
|
+
Link.instance = null;
|
|
66
|
+
Link._delegateTool = null;
|
|
67
|
+
}
|
|
68
|
+
static setSessionId(id) {
|
|
69
|
+
Link.currentSessionId = id;
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Build the internal search tool that the ReactAgent will use.
|
|
73
|
+
*/
|
|
74
|
+
buildTools() {
|
|
75
|
+
const search = this.search;
|
|
76
|
+
const repository = this.repository;
|
|
77
|
+
const agentConfig = this.agentConfig;
|
|
78
|
+
const searchTool = new DynamicStructuredTool({
|
|
79
|
+
name: 'link_search_documents',
|
|
80
|
+
description: 'Search ALL indexed user documents using hybrid vector + keyword search. Returns the most relevant document chunks for a given query. Use this for broad searches when you don\'t know which document contains the answer.',
|
|
81
|
+
schema: z.object({
|
|
82
|
+
query: z.string().describe('The search query to find relevant document passages'),
|
|
83
|
+
limit: z.number().optional().describe('Maximum number of results to return (default: max_results from config)'),
|
|
84
|
+
}),
|
|
85
|
+
func: async ({ query, limit }) => {
|
|
86
|
+
const maxResults = limit ?? agentConfig.max_results;
|
|
87
|
+
const threshold = agentConfig.score_threshold;
|
|
88
|
+
const results = await search.search(query, maxResults, threshold);
|
|
89
|
+
if (results.length === 0) {
|
|
90
|
+
return `No relevant documents found for query: "${query}"`;
|
|
91
|
+
}
|
|
92
|
+
const formatted = results
|
|
93
|
+
.map((r, i) => `[${i + 1}] Source: ${r.filename} (chunk ${r.position}, score: ${r.score.toFixed(3)})\n${r.content}`)
|
|
94
|
+
.join('\n\n---\n\n');
|
|
95
|
+
return `Found ${results.length} relevant passages:\n\n${formatted}`;
|
|
96
|
+
},
|
|
97
|
+
});
|
|
98
|
+
const listDocumentsTool = new DynamicStructuredTool({
|
|
99
|
+
name: 'link_list_documents',
|
|
100
|
+
description: 'List indexed documents. Use this to find documents by filename before searching within a specific one. Supports optional name filter (case-insensitive partial match).',
|
|
101
|
+
schema: z.object({
|
|
102
|
+
name_filter: z.string().optional().describe('Optional partial filename to filter by (case-insensitive). E.g. "CV", "contrato", "readme"'),
|
|
103
|
+
}),
|
|
104
|
+
func: async ({ name_filter }) => {
|
|
105
|
+
const docs = repository.listDocuments('indexed');
|
|
106
|
+
if (docs.length === 0) {
|
|
107
|
+
return 'No indexed documents found.';
|
|
108
|
+
}
|
|
109
|
+
let filtered = docs;
|
|
110
|
+
if (name_filter) {
|
|
111
|
+
const lower = name_filter.toLowerCase();
|
|
112
|
+
filtered = docs.filter(d => d.filename.toLowerCase().includes(lower));
|
|
113
|
+
}
|
|
114
|
+
if (filtered.length === 0) {
|
|
115
|
+
const allNames = docs.map(d => `- ${d.filename}`).join('\n');
|
|
116
|
+
return `No documents matching "${name_filter}". Available documents:\n${allNames}`;
|
|
117
|
+
}
|
|
118
|
+
const lines = filtered.map(d => `- [${d.id}] ${d.filename} (${d.chunk_count} chunks)`);
|
|
119
|
+
return `Found ${filtered.length} document(s):\n${lines.join('\n')}`;
|
|
120
|
+
},
|
|
121
|
+
});
|
|
122
|
+
const searchInDocumentTool = new DynamicStructuredTool({
|
|
123
|
+
name: 'link_search_in_document',
|
|
124
|
+
description: 'Search within a SPECIFIC document by its ID. Use this when you know which document to search (e.g. after using link_list_documents to find it). More precise than link_search_documents for targeted queries.',
|
|
125
|
+
schema: z.object({
|
|
126
|
+
document_id: z.string().describe('The document ID to search within (get this from link_list_documents)'),
|
|
127
|
+
query: z.string().describe('The search query to find relevant passages within this document'),
|
|
128
|
+
limit: z.number().optional().describe('Maximum number of results (default: max_results from config)'),
|
|
129
|
+
}),
|
|
130
|
+
func: async ({ document_id, query, limit }) => {
|
|
131
|
+
const doc = repository.getDocument(document_id);
|
|
132
|
+
if (!doc) {
|
|
133
|
+
return `Document not found: ${document_id}`;
|
|
134
|
+
}
|
|
135
|
+
const maxResults = limit ?? agentConfig.max_results;
|
|
136
|
+
const threshold = agentConfig.score_threshold;
|
|
137
|
+
const results = await search.searchInDocument(query, document_id, maxResults, threshold);
|
|
138
|
+
if (results.length === 0) {
|
|
139
|
+
return `No relevant passages found in "${doc.filename}" for query: "${query}"`;
|
|
140
|
+
}
|
|
141
|
+
const formatted = results
|
|
142
|
+
.map((r, i) => `[${i + 1}] (chunk ${r.position}, score: ${r.score.toFixed(3)})\n${r.content}`)
|
|
143
|
+
.join('\n\n---\n\n');
|
|
144
|
+
return `Found ${results.length} passages in "${doc.filename}":\n\n${formatted}`;
|
|
145
|
+
},
|
|
146
|
+
});
|
|
147
|
+
return [listDocumentsTool, searchTool, searchInDocumentTool];
|
|
148
|
+
}
|
|
149
|
+
async initialize() {
|
|
150
|
+
this.repository.initialize();
|
|
151
|
+
await this.search.initialize();
|
|
152
|
+
const linkConfig = this.agentConfig;
|
|
153
|
+
const personality = linkConfig.personality || 'documentation_specialist';
|
|
154
|
+
const tools = this.buildTools();
|
|
155
|
+
// Update delegate tool description with current document catalog
|
|
156
|
+
if (Link._delegateTool) {
|
|
157
|
+
const full = `${LINK_BASE_DESCRIPTION}${buildDocumentCatalogSection(this.repository)}`;
|
|
158
|
+
Link._delegateTool.description = full;
|
|
159
|
+
}
|
|
160
|
+
this.display.log(`Link initialized with personality: ${personality}.`, { source: 'Link' });
|
|
161
|
+
try {
|
|
162
|
+
this.agent = await ProviderFactory.create(linkConfig, tools);
|
|
163
|
+
}
|
|
164
|
+
catch (err) {
|
|
165
|
+
throw new ProviderError(linkConfig.provider, err, 'Link subagent initialization failed');
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Search documents for relevant information (used internally by search tool and HTTP API).
|
|
170
|
+
*/
|
|
171
|
+
async searchDocuments(query, limit) {
|
|
172
|
+
const maxResults = limit ?? this.agentConfig.max_results;
|
|
173
|
+
const threshold = this.agentConfig.score_threshold;
|
|
174
|
+
const results = await this.search.search(query, maxResults, threshold);
|
|
175
|
+
return {
|
|
176
|
+
results: results.map(r => ({
|
|
177
|
+
chunk_id: r.chunk_id,
|
|
178
|
+
content: r.content,
|
|
179
|
+
document_id: r.document_id,
|
|
180
|
+
filename: r.filename,
|
|
181
|
+
position: r.position,
|
|
182
|
+
score: r.score,
|
|
183
|
+
})),
|
|
184
|
+
total: results.length,
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Execute a query using the LLM-powered ReactAgent.
|
|
189
|
+
*/
|
|
190
|
+
async execute(task, context, sessionId, taskContext) {
|
|
191
|
+
const linkConfig = this.agentConfig;
|
|
192
|
+
if (!this.agent) {
|
|
193
|
+
await this.initialize();
|
|
194
|
+
}
|
|
195
|
+
this.display.log(`Executing delegated task in Link: ${task.slice(0, 80)}...`, {
|
|
196
|
+
source: 'Link',
|
|
197
|
+
});
|
|
198
|
+
const personality = linkConfig.personality || 'documentation_specialist';
|
|
199
|
+
const systemMessage = new SystemMessage(`
|
|
200
|
+
You are Link, ${personality === 'documentation_specialist' ? 'a documentation specialist and knowledge synthesizer' : personality}, a subagent in Morpheus.
|
|
201
|
+
|
|
202
|
+
You have access to the user's indexed documents via the link_search_documents tool.
|
|
203
|
+
|
|
204
|
+
Rules:
|
|
205
|
+
1. ALWAYS search the documents before answering. Never answer from general knowledge alone when documents may contain relevant information.
|
|
206
|
+
2. Synthesize search results into a clear, natural response. Do not just dump raw chunks.
|
|
207
|
+
3. Cite sources by filename when referencing specific information (e.g., "According to readme.md, ...").
|
|
208
|
+
4. If no relevant documents are found, clearly state that no matching documentation was found.
|
|
209
|
+
5. NEVER fabricate or invent document content. Only report what the search actually returns.
|
|
210
|
+
6. If the query is ambiguous, search with multiple relevant terms to maximize coverage.
|
|
211
|
+
7. Keep responses concise and focused on the user's question.
|
|
212
|
+
8. Respond in the language requested by the user. If not explicit, use the dominant language of the task/context.
|
|
213
|
+
|
|
214
|
+
## Tool Selection Strategy
|
|
215
|
+
- When the user refers to a SPECIFIC document (by name or partial name like "meu currículo", "CV", "contrato"):
|
|
216
|
+
1. First call **link_list_documents** with a name filter to find the document ID.
|
|
217
|
+
2. Then call **link_search_in_document** with that document ID for a targeted search.
|
|
218
|
+
- When the user asks a general question without referencing a specific document:
|
|
219
|
+
- Use **link_search_documents** for a broad search across all documents.
|
|
220
|
+
- When unsure which document contains the answer, start with **link_search_documents**, then narrow down with **link_search_in_document** if results point to a specific file.
|
|
221
|
+
|
|
222
|
+
${context ? `Context:\n${context}` : ''}
|
|
223
|
+
`);
|
|
224
|
+
const userMessage = new HumanMessage(task);
|
|
225
|
+
const messages = [systemMessage, userMessage];
|
|
226
|
+
try {
|
|
227
|
+
const invokeContext = {
|
|
228
|
+
origin_channel: taskContext?.origin_channel ?? 'api',
|
|
229
|
+
session_id: taskContext?.session_id ?? sessionId ?? 'default',
|
|
230
|
+
origin_message_id: taskContext?.origin_message_id,
|
|
231
|
+
origin_user_id: taskContext?.origin_user_id,
|
|
232
|
+
};
|
|
233
|
+
const inputCount = messages.length;
|
|
234
|
+
const startMs = Date.now();
|
|
235
|
+
const response = await TaskRequestContext.run(invokeContext, () => this.agent.invoke({ messages }, { recursionLimit: 25 }));
|
|
236
|
+
const durationMs = Date.now() - startMs;
|
|
237
|
+
const lastMessage = response.messages[response.messages.length - 1];
|
|
238
|
+
const content = typeof lastMessage.content === 'string'
|
|
239
|
+
? lastMessage.content
|
|
240
|
+
: JSON.stringify(lastMessage.content);
|
|
241
|
+
const rawUsage = extractRawUsage(lastMessage);
|
|
242
|
+
const stepCount = response.messages.filter((m) => m instanceof AIMessage).length;
|
|
243
|
+
const targetSession = sessionId ?? Link.currentSessionId ?? 'link';
|
|
244
|
+
await persistAgentMessage('link', content, linkConfig, targetSession, rawUsage, durationMs);
|
|
245
|
+
emitToolAuditEvents(response.messages.slice(inputCount), targetSession, 'link');
|
|
246
|
+
this.display.log('Link task completed.', { source: 'Link' });
|
|
247
|
+
return buildAgentResult(content, linkConfig, rawUsage, durationMs, stepCount);
|
|
248
|
+
}
|
|
249
|
+
catch (err) {
|
|
250
|
+
throw new ProviderError(linkConfig.provider, err, 'Link task execution failed');
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
/**
|
|
254
|
+
* Create the delegation tool for Oracle.
|
|
255
|
+
*/
|
|
256
|
+
createDelegateTool() {
|
|
257
|
+
if (!Link._delegateTool) {
|
|
258
|
+
Link._delegateTool = buildDelegationTool({
|
|
259
|
+
name: 'link_delegate',
|
|
260
|
+
description: LINK_BASE_DESCRIPTION,
|
|
261
|
+
agentKey: 'link',
|
|
262
|
+
agentLabel: 'Link',
|
|
263
|
+
auditAgent: 'link',
|
|
264
|
+
isSync: () => ConfigManager.getInstance().getLinkConfig().execution_mode === 'sync',
|
|
265
|
+
notifyText: '📚 Link is searching your documentation...',
|
|
266
|
+
executeSync: (task, context, sessionId, ctx) => Link.getInstance().execute(task, context, sessionId, {
|
|
267
|
+
origin_channel: ctx?.origin_channel ?? 'api',
|
|
268
|
+
session_id: sessionId,
|
|
269
|
+
origin_message_id: ctx?.origin_message_id,
|
|
270
|
+
origin_user_id: ctx?.origin_user_id,
|
|
271
|
+
}),
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
return Link._delegateTool;
|
|
275
|
+
}
|
|
276
|
+
/**
|
|
277
|
+
* Refresh the delegate tool description with current document catalog.
|
|
278
|
+
*/
|
|
279
|
+
static async refreshDelegateCatalog() {
|
|
280
|
+
if (Link._delegateTool) {
|
|
281
|
+
try {
|
|
282
|
+
const repository = LinkRepository.getInstance();
|
|
283
|
+
const full = `${LINK_BASE_DESCRIPTION}${buildDocumentCatalogSection(repository)}`;
|
|
284
|
+
Link._delegateTool.description = full;
|
|
285
|
+
}
|
|
286
|
+
catch { /* non-critical */ }
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
async reload() {
|
|
290
|
+
this.config = ConfigManager.getInstance().get();
|
|
291
|
+
this.agentConfig = ConfigManager.getInstance().getLinkConfig();
|
|
292
|
+
this.agent = undefined;
|
|
293
|
+
await this.initialize();
|
|
294
|
+
}
|
|
295
|
+
}
|
|
@@ -97,7 +97,7 @@ export class SatiService {
|
|
|
97
97
|
console.warn('[SatiService] Failed to persist input log:', e);
|
|
98
98
|
}
|
|
99
99
|
const satiStartMs = Date.now();
|
|
100
|
-
const response = await agent.invoke({ messages }, { recursionLimit:
|
|
100
|
+
const response = await agent.invoke({ messages }, { recursionLimit: 10 });
|
|
101
101
|
const satiDurationMs = Date.now() - satiStartMs;
|
|
102
102
|
const lastMessage = response.messages[response.messages.length - 1];
|
|
103
103
|
let content = lastMessage.content.toString();
|
package/dist/runtime/neo.js
CHANGED
|
@@ -137,7 +137,7 @@ ${context ? `Context:\n${context}` : ""}
|
|
|
137
137
|
};
|
|
138
138
|
const inputCount = messages.length;
|
|
139
139
|
const startMs = Date.now();
|
|
140
|
-
const response = await TaskRequestContext.run(invokeContext, () => this.agent.invoke({ messages }, { recursionLimit:
|
|
140
|
+
const response = await TaskRequestContext.run(invokeContext, () => this.agent.invoke({ messages }, { recursionLimit: 10 }));
|
|
141
141
|
const durationMs = Date.now() - startMs;
|
|
142
142
|
const lastMessage = response.messages[response.messages.length - 1];
|
|
143
143
|
const content = typeof lastMessage.content === "string"
|