morpheus-cli 0.5.3 → 0.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -180,38 +180,39 @@ PHASE 1 — Query Design
180
180
  PHASE 2 — Source Discovery
181
181
  1. Call browser_search.
182
182
  2. Collect results.
183
- 3. Prioritize:
184
- - Official sources
185
- - Major authoritative publications
183
+ 3. Prioritize official sources and major publications.
186
184
  4. Reformulate query if necessary.
185
+ 5. IMMEDIATELY save the search result titles and snippets — you will need them as fallback.
187
186
 
188
- PHASE 3 — Source Validation (MANDATORY)
189
- 1. Open at least 3 distinct URLs with browser_navigate.
190
- 2. Read actual page content.
191
- 3. NEVER rely only on search snippets.
192
- 4. Ignore inaccessible pages.
187
+ PHASE 3 — Source Validation
188
+ 1. Try to open up to 3 distinct URLs with browser_navigate.
189
+ - For news/sports/media sites (GE, Globo, UOL, Terra, ESPN, etc.): ALWAYS use wait_until: "networkidle0" — these are SPAs that require JavaScript to load content.
190
+ - For simple/static pages: use wait_until: "domcontentloaded".
191
+ 2. Read actual page content from accessible pages.
192
+ 3. Ignore inaccessible pages (timeouts, bot blocks, errors).
193
+ 4. If ALL navigations fail OR page content does not contain useful information:
194
+ - DO NOT attempt further workarounds (wget, curl, python scripts, http_request).
195
+ - Use the search snippets from Phase 2 as your source and proceed to Phase 5.
193
196
 
194
197
  PHASE 4 — Cross-Verification
195
- 1. Extract relevant information from each source.
196
- 2. Compare findings:
197
- - Agreement verified
198
- - Minor differences report variation
199
- - Conflict → report discrepancy
200
- 3. Require confirmation from at least 2 reliable sources.
201
- 4. If not confirmed, state clearly:
202
- "Information could not be confidently verified."
198
+ 1. Extract relevant information from each accessible source.
199
+ 2. Compare findings across sources when possible.
200
+ 3. If content came from snippets only, state clearly:
201
+ "Source: DuckDuckGo search snippets (direct page access unavailable)."
203
202
 
204
203
  PHASE 5 — Structured Report
205
204
  Include:
206
- - Direct answer
207
- - Short explanation
208
- - Source URLs
205
+ - Direct answer based ONLY on what was found online
206
+ - Source URLs (from search results or navigated pages)
209
207
  - Confidence level (High / Medium / Low)
210
208
 
211
- ANTI-HALLUCINATION RULES
212
- - Never answer from prior knowledge without verification.
213
- - Never stop after reading only one source.
214
- - Treat time-sensitive information as volatile.
209
+ ABSOLUTE RULES — NEVER VIOLATE
210
+ 1. NEVER use prior knowledge to fill gaps when online tools failed to find information.
211
+ 2. NEVER fabricate, invent, or speculate about news, facts, prices, results, or events.
212
+ 3. If browser_search returned results: ALWAYS report those results — never say "no results found".
213
+ 4. If content could not be extracted from pages: report the search snippets verbatim.
214
+ 5. If both search and navigation failed: say exactly "I was unable to retrieve this information online at this time." Stop there. Do not continue with "based on general knowledge...".
215
+ 6. Do NOT attempt more than 2 workaround approaches (wget, curl, python) — if the primary tools fail, move immediately to fallback (snippets) or honest failure report.
215
216
 
216
217
 
217
218
 
@@ -40,6 +40,15 @@ function wrapToolWithSanitizedSchema(tool) {
40
40
  }
41
41
  return tool;
42
42
  }
43
+ /** Timeout (ms) for connecting to each MCP server and fetching its tools list. */
44
+ const MCP_CONNECT_TIMEOUT_MS = 15_000;
45
+ /**
46
+ * Returns a promise that rejects after `ms` milliseconds with a timeout error.
47
+ * Used to guard `client.getTools()` against servers that never respond.
48
+ */
49
+ function connectTimeout(serverName, ms) {
50
+ return new Promise((_, reject) => setTimeout(() => reject(new Error(`MCP server '${serverName}' timed out after ${ms}ms`)), ms));
51
+ }
43
52
  export class Construtor {
44
53
  static async probe() {
45
54
  const mcpServers = await loadMCPConfig();
@@ -50,7 +59,10 @@ export class Construtor {
50
59
  mcpServers: { [serverName]: serverConfig },
51
60
  onConnectionError: "ignore",
52
61
  });
53
- const tools = await client.getTools();
62
+ const tools = await Promise.race([
63
+ client.getTools(),
64
+ connectTimeout(serverName, MCP_CONNECT_TIMEOUT_MS),
65
+ ]);
54
66
  results.push({ name: serverName, ok: true, toolCount: tools.length });
55
67
  }
56
68
  catch (error) {
@@ -63,7 +75,6 @@ export class Construtor {
63
75
  const display = DisplayManager.getInstance();
64
76
  const mcpServers = await loadMCPConfig();
65
77
  const serverCount = Object.keys(mcpServers).length;
66
- // console.log(mcpServers);
67
78
  if (serverCount === 0) {
68
79
  display.log('No MCP servers configured in mcps.json', { level: 'info', source: 'Construtor' });
69
80
  return [];
@@ -78,13 +89,14 @@ export class Construtor {
78
89
  onConnectionError: "ignore",
79
90
  });
80
91
  try {
81
- const tools = await client.getTools();
92
+ const tools = await Promise.race([
93
+ client.getTools(),
94
+ connectTimeout(serverName, MCP_CONNECT_TIMEOUT_MS),
95
+ ]);
82
96
  // Rename tools to include server prefix to avoid collisions
83
97
  tools.forEach(tool => {
84
- const originalName = tool.name;
85
- const newName = `${serverName}_${originalName}`;
98
+ const newName = `${serverName}_${tool.name}`;
86
99
  Object.defineProperty(tool, "name", { value: newName });
87
- const shortDesc = tool.description && typeof tool.description === 'string' ? tool.description.slice(0, 100) + '...' : '';
88
100
  display.log(`Loaded MCP tool: ${tool.name} (from ${serverName})`, { level: 'info', source: 'Construtor' });
89
101
  });
90
102
  // Sanitize tool schemas to remove fields not supported by Gemini
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "morpheus-cli",
3
- "version": "0.5.3",
3
+ "version": "0.5.5",
4
4
  "description": "Morpheus is a local AI agent for developers, running as a CLI daemon that connects to LLMs, local tools, and MCPs, enabling interaction via Terminal, Telegram, and Discord. Inspired by the character Morpheus from *The Matrix*, the project acts as an intelligent orchestrator, bridging the gap between the developer and complex systems.",
5
5
  "bin": {
6
6
  "morpheus": "./bin/morpheus.js"