npcsh 0.1.2__py3-none-any.whl → 1.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +3508 -0
- npcsh/alicanto.py +65 -0
- npcsh/build.py +291 -0
- npcsh/completion.py +206 -0
- npcsh/config.py +163 -0
- npcsh/corca.py +50 -0
- npcsh/execution.py +185 -0
- npcsh/guac.py +46 -0
- npcsh/mcp_helpers.py +357 -0
- npcsh/mcp_server.py +299 -0
- npcsh/npc.py +323 -0
- npcsh/npc_team/alicanto.npc +2 -0
- npcsh/npc_team/alicanto.png +0 -0
- npcsh/npc_team/corca.npc +12 -0
- npcsh/npc_team/corca.png +0 -0
- npcsh/npc_team/corca_example.png +0 -0
- npcsh/npc_team/foreman.npc +7 -0
- npcsh/npc_team/frederic.npc +6 -0
- npcsh/npc_team/frederic4.png +0 -0
- npcsh/npc_team/guac.png +0 -0
- npcsh/npc_team/jinxs/code/python.jinx +11 -0
- npcsh/npc_team/jinxs/code/sh.jinx +34 -0
- npcsh/npc_team/jinxs/code/sql.jinx +16 -0
- npcsh/npc_team/jinxs/modes/alicanto.jinx +194 -0
- npcsh/npc_team/jinxs/modes/corca.jinx +249 -0
- npcsh/npc_team/jinxs/modes/guac.jinx +317 -0
- npcsh/npc_team/jinxs/modes/plonk.jinx +214 -0
- npcsh/npc_team/jinxs/modes/pti.jinx +170 -0
- npcsh/npc_team/jinxs/modes/spool.jinx +161 -0
- npcsh/npc_team/jinxs/modes/wander.jinx +186 -0
- npcsh/npc_team/jinxs/modes/yap.jinx +262 -0
- npcsh/npc_team/jinxs/npc_studio/npc-studio.jinx +77 -0
- npcsh/npc_team/jinxs/utils/agent.jinx +17 -0
- npcsh/npc_team/jinxs/utils/chat.jinx +44 -0
- npcsh/npc_team/jinxs/utils/cmd.jinx +44 -0
- npcsh/npc_team/jinxs/utils/compress.jinx +140 -0
- npcsh/npc_team/jinxs/utils/core/build.jinx +65 -0
- npcsh/npc_team/jinxs/utils/core/compile.jinx +50 -0
- npcsh/npc_team/jinxs/utils/core/help.jinx +52 -0
- npcsh/npc_team/jinxs/utils/core/init.jinx +41 -0
- npcsh/npc_team/jinxs/utils/core/jinxs.jinx +32 -0
- npcsh/npc_team/jinxs/utils/core/set.jinx +40 -0
- npcsh/npc_team/jinxs/utils/edit_file.jinx +94 -0
- npcsh/npc_team/jinxs/utils/load_file.jinx +35 -0
- npcsh/npc_team/jinxs/utils/ots.jinx +61 -0
- npcsh/npc_team/jinxs/utils/roll.jinx +68 -0
- npcsh/npc_team/jinxs/utils/sample.jinx +56 -0
- npcsh/npc_team/jinxs/utils/search.jinx +130 -0
- npcsh/npc_team/jinxs/utils/serve.jinx +26 -0
- npcsh/npc_team/jinxs/utils/sleep.jinx +116 -0
- npcsh/npc_team/jinxs/utils/trigger.jinx +61 -0
- npcsh/npc_team/jinxs/utils/usage.jinx +33 -0
- npcsh/npc_team/jinxs/utils/vixynt.jinx +144 -0
- npcsh/npc_team/kadiefa.npc +3 -0
- npcsh/npc_team/kadiefa.png +0 -0
- npcsh/npc_team/npcsh.ctx +18 -0
- npcsh/npc_team/npcsh_sibiji.png +0 -0
- npcsh/npc_team/plonk.npc +2 -0
- npcsh/npc_team/plonk.png +0 -0
- npcsh/npc_team/plonkjr.npc +2 -0
- npcsh/npc_team/plonkjr.png +0 -0
- npcsh/npc_team/sibiji.npc +3 -0
- npcsh/npc_team/sibiji.png +0 -0
- npcsh/npc_team/spool.png +0 -0
- npcsh/npc_team/yap.png +0 -0
- npcsh/npcsh.py +296 -112
- npcsh/parsing.py +118 -0
- npcsh/plonk.py +54 -0
- npcsh/pti.py +54 -0
- npcsh/routes.py +139 -0
- npcsh/spool.py +48 -0
- npcsh/ui.py +199 -0
- npcsh/wander.py +62 -0
- npcsh/yap.py +50 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/agent.jinx +17 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/alicanto.jinx +194 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/alicanto.npc +2 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/alicanto.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/build.jinx +65 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/chat.jinx +44 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/cmd.jinx +44 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/compile.jinx +50 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/compress.jinx +140 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/corca.jinx +249 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/corca.npc +12 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/corca.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/corca_example.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/edit_file.jinx +94 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/foreman.npc +7 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/frederic.npc +6 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/frederic4.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/guac.jinx +317 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/guac.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/help.jinx +52 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/init.jinx +41 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/jinxs.jinx +32 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/kadiefa.npc +3 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/kadiefa.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/load_file.jinx +35 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/npc-studio.jinx +77 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/npcsh.ctx +18 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/ots.jinx +61 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/plonk.jinx +214 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/plonk.npc +2 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/plonk.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/plonkjr.npc +2 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/plonkjr.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/pti.jinx +170 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/python.jinx +11 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/roll.jinx +68 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/sample.jinx +56 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/search.jinx +130 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/serve.jinx +26 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/set.jinx +40 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/sh.jinx +34 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/sibiji.npc +3 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/sibiji.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/sleep.jinx +116 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/spool.jinx +161 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/spool.png +0 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/sql.jinx +16 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/trigger.jinx +61 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/usage.jinx +33 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/vixynt.jinx +144 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/wander.jinx +186 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/yap.jinx +262 -0
- npcsh-1.1.13.data/data/npcsh/npc_team/yap.png +0 -0
- npcsh-1.1.13.dist-info/METADATA +522 -0
- npcsh-1.1.13.dist-info/RECORD +135 -0
- {npcsh-0.1.2.dist-info → npcsh-1.1.13.dist-info}/WHEEL +1 -1
- npcsh-1.1.13.dist-info/entry_points.txt +9 -0
- {npcsh-0.1.2.dist-info → npcsh-1.1.13.dist-info/licenses}/LICENSE +1 -1
- npcsh/command_history.py +0 -81
- npcsh/helpers.py +0 -36
- npcsh/llm_funcs.py +0 -295
- npcsh/main.py +0 -5
- npcsh/modes.py +0 -343
- npcsh/npc_compiler.py +0 -124
- npcsh-0.1.2.dist-info/METADATA +0 -99
- npcsh-0.1.2.dist-info/RECORD +0 -14
- npcsh-0.1.2.dist-info/entry_points.txt +0 -2
- {npcsh-0.1.2.dist-info → npcsh-1.1.13.dist-info}/top_level.txt +0 -0
npcsh/mcp_helpers.py
ADDED
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
Raw MCP client with no exception handling and full visibility.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import json
|
|
10
|
+
try:
|
|
11
|
+
import inspect
|
|
12
|
+
except:
|
|
13
|
+
pass
|
|
14
|
+
from typing import Optional, List, Dict, Any
|
|
15
|
+
from contextlib import AsyncExitStack
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
from mcp import ClientSession, StdioServerParameters
|
|
19
|
+
from mcp.client.stdio import stdio_client
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
from npcpy.gen.response import get_litellm_response
|
|
23
|
+
from npcsh._state import (
|
|
24
|
+
NPCSH_CHAT_MODEL,
|
|
25
|
+
NPCSH_CHAT_PROVIDER,
|
|
26
|
+
NPCSH_API_URL,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
class MCPClient:
|
|
30
|
+
"""
|
|
31
|
+
Raw MCP Client with no exception handling.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
model: str = NPCSH_CHAT_MODEL,
|
|
37
|
+
provider: str = NPCSH_CHAT_PROVIDER,
|
|
38
|
+
api_url: str = NPCSH_API_URL,
|
|
39
|
+
api_key: Optional[str] = None,
|
|
40
|
+
debug: bool = True,
|
|
41
|
+
):
|
|
42
|
+
self.model = model
|
|
43
|
+
self.provider = provider
|
|
44
|
+
self.api_url = api_url
|
|
45
|
+
self.api_key = api_key
|
|
46
|
+
self.debug = debug
|
|
47
|
+
self.session = None
|
|
48
|
+
self.exit_stack = AsyncExitStack()
|
|
49
|
+
self.tools = []
|
|
50
|
+
self.available_tools = []
|
|
51
|
+
|
|
52
|
+
def _log(self, message: str) -> None:
|
|
53
|
+
"""Log debug messages."""
|
|
54
|
+
if self.debug:
|
|
55
|
+
print(f"[MCP Client] {message}")
|
|
56
|
+
|
|
57
|
+
async def connect_to_server(self, server_script_path: str) -> None:
|
|
58
|
+
"""
|
|
59
|
+
Connect to an MCP server.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
server_script_path: Path to the server script (.py or .js)
|
|
63
|
+
"""
|
|
64
|
+
self._log(f"Connecting to server: {server_script_path}")
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
command = "python" if server_script_path.endswith('.py') else "node"
|
|
68
|
+
server_params = StdioServerParameters(
|
|
69
|
+
command=command,
|
|
70
|
+
args=[server_script_path],
|
|
71
|
+
env=None
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
|
|
76
|
+
read, write = stdio_transport
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
self.session = await self.exit_stack.enter_async_context(ClientSession(read, write))
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
await self.session.initialize()
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
response = await self.session.list_tools()
|
|
86
|
+
self.tools = response.tools
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
for tool in self.tools:
|
|
90
|
+
print(f"\nJinx: {tool.name}")
|
|
91
|
+
print(f"Description: {tool.description}")
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
for attribute_name in dir(tool):
|
|
95
|
+
if not attribute_name.startswith('_'):
|
|
96
|
+
attribute = getattr(tool, attribute_name)
|
|
97
|
+
if not callable(attribute):
|
|
98
|
+
print(f" {attribute_name}: {attribute}")
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
if hasattr(tool, 'source'):
|
|
102
|
+
print(f"Source: {tool.source}")
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
tool_module = inspect.getmodule(tool)
|
|
107
|
+
if tool_module:
|
|
108
|
+
print(f"Module: {tool_module.__name__}")
|
|
109
|
+
if hasattr(tool_module, tool.name):
|
|
110
|
+
tool_func = getattr(tool_module, tool.name)
|
|
111
|
+
if callable(tool_func):
|
|
112
|
+
print(f"Function signature: {inspect.signature(tool_func)}")
|
|
113
|
+
except:
|
|
114
|
+
pass
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
self.available_tools = []
|
|
118
|
+
for tool in self.tools:
|
|
119
|
+
|
|
120
|
+
schema = getattr(tool, "inputSchema", {})
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
tool_info = {
|
|
124
|
+
"type": "function",
|
|
125
|
+
"function": {
|
|
126
|
+
"name": tool.name,
|
|
127
|
+
"description": tool.description,
|
|
128
|
+
"parameters": schema
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
self.available_tools.append(tool_info)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
print(f"\nJinx schema for {tool.name}:")
|
|
135
|
+
print(json.dumps(schema, indent=2))
|
|
136
|
+
|
|
137
|
+
tool_names = [tool.name for tool in self.tools]
|
|
138
|
+
self._log(f"Available tools: {', '.join(tool_names)}")
|
|
139
|
+
|
|
140
|
+
async def process_query(
|
|
141
|
+
self,
|
|
142
|
+
query: str,
|
|
143
|
+
messages: Optional[List[Dict[str, str]]] = None,
|
|
144
|
+
stream: bool = False
|
|
145
|
+
) -> Dict[str, Any]:
|
|
146
|
+
"""
|
|
147
|
+
Process a query using the LLM and available tools.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
query: User query
|
|
151
|
+
messages: Optional conversation history
|
|
152
|
+
stream: Whether to stream the response
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
Dict with response text and updated messages
|
|
156
|
+
"""
|
|
157
|
+
self._log(f"Processing query: {query}")
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
if messages is None:
|
|
161
|
+
messages = []
|
|
162
|
+
|
|
163
|
+
current_messages = messages.copy()
|
|
164
|
+
if not current_messages or current_messages[-1]["role"] != "user":
|
|
165
|
+
current_messages.append({"role": "user", "content": query})
|
|
166
|
+
elif current_messages[-1]["role"] == "user":
|
|
167
|
+
current_messages[-1]["content"] = query
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
self._log("Making initial LLM call with tools")
|
|
171
|
+
response = get_litellm_response(
|
|
172
|
+
model=self.model,
|
|
173
|
+
provider=self.provider,
|
|
174
|
+
api_url=self.api_url,
|
|
175
|
+
api_key=self.api_key,
|
|
176
|
+
messages=current_messages,
|
|
177
|
+
tools=self.available_tools,
|
|
178
|
+
stream=False
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
print("\nLLM Response:")
|
|
183
|
+
print(json.dumps(response, indent=2, default=str))
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
response_content = response.get("response", "")
|
|
187
|
+
tool_calls = response.get("tool_calls", [])
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
print("\nJinx Calls:")
|
|
191
|
+
print(json.dumps(tool_calls, indent=2, default=str))
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
final_text = []
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
if response_content and not tool_calls:
|
|
198
|
+
final_text.append(response_content)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
current_messages.append({
|
|
202
|
+
"role": "assistant",
|
|
203
|
+
"content": response_content
|
|
204
|
+
})
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
if tool_calls:
|
|
208
|
+
self._log(f"Processing {len(tool_calls)} tool calls")
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
assistant_message = {
|
|
212
|
+
"role": "assistant",
|
|
213
|
+
"content": response_content if response_content else None,
|
|
214
|
+
"tool_calls": []
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
for tool_call in tool_calls:
|
|
219
|
+
|
|
220
|
+
if isinstance(tool_call, dict):
|
|
221
|
+
tool_id = tool_call.get("id", "")
|
|
222
|
+
tool_name = tool_call.get("function", {}).get("name", "")
|
|
223
|
+
tool_args = tool_call.get("function", {}).get("arguments", {})
|
|
224
|
+
else:
|
|
225
|
+
|
|
226
|
+
tool_id = getattr(tool_call, "id", "")
|
|
227
|
+
tool_name = getattr(tool_call.function, "name", "")
|
|
228
|
+
tool_args = getattr(tool_call.function, "arguments", {})
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
if isinstance(tool_args, str):
|
|
232
|
+
print(f"\nJinx args is string: {tool_args}")
|
|
233
|
+
tool_args = json.loads(tool_args)
|
|
234
|
+
print(f"Parsed to: {tool_args}")
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
assistant_message["tool_calls"].append({
|
|
238
|
+
"id": tool_id,
|
|
239
|
+
"type": "function",
|
|
240
|
+
"function": {
|
|
241
|
+
"name": tool_name,
|
|
242
|
+
"arguments": json.dumps(tool_args) if isinstance(tool_args, dict) else tool_args
|
|
243
|
+
}
|
|
244
|
+
})
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
self._log(f"Executing tool: {tool_name} with args: {tool_args}")
|
|
248
|
+
print(f"\nExecuting tool call:")
|
|
249
|
+
print(f" Jinx name: {tool_name}")
|
|
250
|
+
print(f" Jinx args: {tool_args}")
|
|
251
|
+
print(f" Jinx args type: {type(tool_args)}")
|
|
252
|
+
|
|
253
|
+
final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
result = await self.session.call_tool(tool_name, tool_args)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
print("\nJinx Result:")
|
|
260
|
+
print(f" Result: {result}")
|
|
261
|
+
print(f" Content: {result.content}")
|
|
262
|
+
print(f" Content type: {type(result.content)}")
|
|
263
|
+
|
|
264
|
+
tool_result = result.content
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
if hasattr(tool_result, 'text'):
|
|
268
|
+
print(f" TextContent detected, text: {tool_result.text}")
|
|
269
|
+
tool_result = tool_result.text
|
|
270
|
+
elif isinstance(tool_result, list) and all(hasattr(item, 'text') for item in tool_result):
|
|
271
|
+
print(f" List of TextContent detected")
|
|
272
|
+
tool_result = [item.text for item in tool_result]
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
current_messages.append(assistant_message)
|
|
276
|
+
current_messages.append({
|
|
277
|
+
"role": "tool",
|
|
278
|
+
"tool_call_id": tool_id,
|
|
279
|
+
"content": json.dumps(tool_result) if not isinstance(tool_result, str) else str(tool_result)
|
|
280
|
+
})
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
print("\nUpdated Messages:")
|
|
284
|
+
print(json.dumps(current_messages, indent=2, default=str))
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
self._log("Getting final response after tool calls")
|
|
288
|
+
final_response = get_litellm_response(
|
|
289
|
+
model=self.model,
|
|
290
|
+
provider=self.provider,
|
|
291
|
+
api_url=self.api_url,
|
|
292
|
+
api_key=self.api_key,
|
|
293
|
+
messages=current_messages,
|
|
294
|
+
stream=stream
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
final_text.append(final_response.get("response", ""))
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
current_messages.append({
|
|
301
|
+
"role": "assistant",
|
|
302
|
+
"content": final_response.get("response", "")
|
|
303
|
+
})
|
|
304
|
+
|
|
305
|
+
return {
|
|
306
|
+
"response": "\n".join(final_text),
|
|
307
|
+
"messages": current_messages
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
async def chat_loop(self):
|
|
311
|
+
"""Run an interactive chat loop"""
|
|
312
|
+
print("\nMCP Client Started!")
|
|
313
|
+
print("Type your queries or 'quit' to exit.")
|
|
314
|
+
|
|
315
|
+
messages = []
|
|
316
|
+
|
|
317
|
+
while True:
|
|
318
|
+
query = input("\nQuery: ").strip()
|
|
319
|
+
|
|
320
|
+
if query.lower() == 'quit':
|
|
321
|
+
break
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
result = await self.process_query(query, messages)
|
|
325
|
+
messages = result.get("messages", [])
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
print("\nResponse:")
|
|
329
|
+
print(result.get("response", ""))
|
|
330
|
+
|
|
331
|
+
async def cleanup(self):
|
|
332
|
+
"""Clean up resources"""
|
|
333
|
+
self._log("Cleaning up resources")
|
|
334
|
+
await self.exit_stack.aclose()
|
|
335
|
+
|
|
336
|
+
async def main():
|
|
337
|
+
"""Entry point for the MCP client."""
|
|
338
|
+
if len(sys.argv) < 2:
|
|
339
|
+
print("Usage: python raw_mcp_client.py <path_to_server_script>")
|
|
340
|
+
sys.exit(1)
|
|
341
|
+
|
|
342
|
+
server_script = sys.argv[1]
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
client = MCPClient()
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
await client.connect_to_server(server_script)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
await client.chat_loop()
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
await client.cleanup()
|
|
355
|
+
|
|
356
|
+
if __name__ == "__main__":
|
|
357
|
+
asyncio.run(main())
|
npcsh/mcp_server.py
ADDED
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
|
|
2
|
+
"""
|
|
3
|
+
Enhanced MCP server that incorporates functionality from npcpy.routes,
|
|
4
|
+
npcpy.llm_funcs, and npcpy.npc_compiler as tools.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import subprocess
|
|
9
|
+
import json
|
|
10
|
+
import asyncio
|
|
11
|
+
|
|
12
|
+
from typing import Optional, Dict, Any, List, Union, Callable
|
|
13
|
+
|
|
14
|
+
from mcp.server.fastmcp import FastMCP
|
|
15
|
+
import importlib
|
|
16
|
+
|
|
17
|
+
from sqlalchemy import text
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
import os
|
|
21
|
+
import subprocess
|
|
22
|
+
import json
|
|
23
|
+
import asyncio
|
|
24
|
+
try:
|
|
25
|
+
import inspect
|
|
26
|
+
except:
|
|
27
|
+
pass
|
|
28
|
+
from typing import Optional, Dict, Any, List, Union, Callable, get_type_hints
|
|
29
|
+
|
|
30
|
+
from functools import wraps
|
|
31
|
+
import sys
|
|
32
|
+
|
|
33
|
+
from npcpy.llm_funcs import generate_group_candidates, abstract, extract_facts, zoom_in, execute_llm_command, gen_image
|
|
34
|
+
from npcpy.memory.search import search_similar_texts, execute_search_command, execute_rag_command, answer_with_rag, execute_brainblast_command
|
|
35
|
+
from npcpy.data.load import load_file_contents
|
|
36
|
+
from npcpy.memory.command_history import CommandHistory
|
|
37
|
+
from npcpy.data.image import capture_screenshot
|
|
38
|
+
from npcpy.data.web import search_web
|
|
39
|
+
|
|
40
|
+
from npcsh._state import NPCSH_DB_PATH
|
|
41
|
+
|
|
42
|
+
command_history = CommandHistory(db=NPCSH_DB_PATH)
|
|
43
|
+
|
|
44
|
+
mcp = FastMCP("npcsh_mcp")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
DEFAULT_WORKSPACE = os.path.join(os.getcwd(), "workspace")
|
|
48
|
+
os.makedirs(DEFAULT_WORKSPACE, exist_ok=True)
|
|
49
|
+
|
|
50
|
+
@mcp.tool()
|
|
51
|
+
async def add_memory(
|
|
52
|
+
npc_name: str,
|
|
53
|
+
team_name: str,
|
|
54
|
+
content: str,
|
|
55
|
+
memory_type: str = "observation",
|
|
56
|
+
directory_path: str = None
|
|
57
|
+
) -> str:
|
|
58
|
+
"""
|
|
59
|
+
Add a memory entry to the database.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
npc_name: Name of the NPC this memory belongs to
|
|
63
|
+
team_name: Name of the team the NPC belongs to
|
|
64
|
+
content: The memory content to store
|
|
65
|
+
memory_type: Type of memory (observation, preference, achievement, etc.)
|
|
66
|
+
directory_path: Directory path context (defaults to current working directory)
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Success message with memory ID or error message
|
|
70
|
+
"""
|
|
71
|
+
if directory_path is None:
|
|
72
|
+
directory_path = os.getcwd()
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
from npcpy.memory.command_history import generate_message_id
|
|
76
|
+
message_id = generate_message_id()
|
|
77
|
+
|
|
78
|
+
memory_id = command_history.add_memory_to_database(
|
|
79
|
+
message_id=message_id,
|
|
80
|
+
conversation_id='mcp_direct',
|
|
81
|
+
npc=npc_name,
|
|
82
|
+
team=team_name,
|
|
83
|
+
directory_path=directory_path,
|
|
84
|
+
initial_memory=content,
|
|
85
|
+
status='active',
|
|
86
|
+
model=None,
|
|
87
|
+
provider=None
|
|
88
|
+
)
|
|
89
|
+
return f"Memory created successfully with ID: {memory_id}"
|
|
90
|
+
except Exception as e:
|
|
91
|
+
return f"Error creating memory: {str(e)}"
|
|
92
|
+
|
|
93
|
+
@mcp.tool()
|
|
94
|
+
async def search_memory(
|
|
95
|
+
query: str,
|
|
96
|
+
npc_name: str = None,
|
|
97
|
+
team_name: str = None,
|
|
98
|
+
directory_path: str = None,
|
|
99
|
+
status_filter: str = None,
|
|
100
|
+
limit: int = 10
|
|
101
|
+
) -> str:
|
|
102
|
+
"""
|
|
103
|
+
Search memories in the database.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
query: Search query text
|
|
107
|
+
npc_name: Filter by specific NPC (optional)
|
|
108
|
+
team_name: Filter by specific team (optional)
|
|
109
|
+
directory_path: Filter by directory path (optional)
|
|
110
|
+
status_filter: Filter by memory status (active, archived, etc.)
|
|
111
|
+
limit: Maximum number of results to return
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
JSON string of matching memories or error message
|
|
115
|
+
"""
|
|
116
|
+
if directory_path is None:
|
|
117
|
+
directory_path = os.getcwd()
|
|
118
|
+
|
|
119
|
+
try:
|
|
120
|
+
results = command_history.search_memory(
|
|
121
|
+
query=query,
|
|
122
|
+
npc=npc_name,
|
|
123
|
+
team=team_name,
|
|
124
|
+
directory_path=directory_path,
|
|
125
|
+
status_filter=status_filter,
|
|
126
|
+
limit=limit
|
|
127
|
+
)
|
|
128
|
+
return json.dumps(results, indent=2)
|
|
129
|
+
except Exception as e:
|
|
130
|
+
return f"Error searching memories: {str(e)}"
|
|
131
|
+
|
|
132
|
+
@mcp.tool()
|
|
133
|
+
async def query_npcsh_database(sql_query: str) -> str:
|
|
134
|
+
"""
|
|
135
|
+
Execute a SQL query against the npcsh_history.db database.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
sql_query: SQL query to execute (SELECT statements only for safety)
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
JSON string of query results or error message
|
|
142
|
+
"""
|
|
143
|
+
# Safety check - only allow SELECT queries
|
|
144
|
+
if not sql_query.strip().upper().startswith('SELECT'):
|
|
145
|
+
return "Error: Only SELECT queries are allowed for safety"
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
with command_history.engine.connect() as conn:
|
|
149
|
+
result = conn.execute(text(sql_query))
|
|
150
|
+
rows = result.fetchall()
|
|
151
|
+
|
|
152
|
+
if not rows:
|
|
153
|
+
return "Query executed successfully but returned no results"
|
|
154
|
+
|
|
155
|
+
# Convert to list of dictionaries
|
|
156
|
+
columns = result.keys()
|
|
157
|
+
results = []
|
|
158
|
+
for row in rows:
|
|
159
|
+
row_dict = dict(zip(columns, row))
|
|
160
|
+
results.append(row_dict)
|
|
161
|
+
|
|
162
|
+
return json.dumps(results, indent=2, default=str)
|
|
163
|
+
except Exception as e:
|
|
164
|
+
return f"Database query error: {str(e)}"
|
|
165
|
+
@mcp.tool()
|
|
166
|
+
async def run_server_command(command: str, wd: str) -> str:
|
|
167
|
+
"""
|
|
168
|
+
Run a terminal command in the workspace.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
command: The shell command to run
|
|
172
|
+
wd: The working directory to run the command in
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
The command output or an error message.
|
|
176
|
+
"""
|
|
177
|
+
try:
|
|
178
|
+
result = subprocess.run(
|
|
179
|
+
command,
|
|
180
|
+
cwd=wd,
|
|
181
|
+
shell=True,
|
|
182
|
+
capture_output=True,
|
|
183
|
+
text=True,
|
|
184
|
+
timeout=30
|
|
185
|
+
)
|
|
186
|
+
return result.stdout or result.stderr or "Command completed with no output"
|
|
187
|
+
except subprocess.TimeoutExpired:
|
|
188
|
+
return "Command timed out after 30 seconds"
|
|
189
|
+
except Exception as e:
|
|
190
|
+
return str(e)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def make_async_wrapper(func: Callable) -> Callable:
|
|
195
|
+
"""Create an async wrapper for sync functions."""
|
|
196
|
+
|
|
197
|
+
@wraps(func)
|
|
198
|
+
async def async_wrapper(**kwargs):
|
|
199
|
+
func_name = func.__name__
|
|
200
|
+
print(f"MCP SERVER DEBUG: {func_name} called with kwargs={kwargs}", flush=True)
|
|
201
|
+
|
|
202
|
+
try:
|
|
203
|
+
result = func(**kwargs)
|
|
204
|
+
print(f"MCP SERVER DEBUG: {func_name} returned type={type(result)}, result={result[:500] if isinstance(result, str) else result}", flush=True)
|
|
205
|
+
return result
|
|
206
|
+
|
|
207
|
+
except Exception as e:
|
|
208
|
+
print(f"MCP SERVER DEBUG: {func_name} exception: {e}", flush=True)
|
|
209
|
+
import traceback
|
|
210
|
+
traceback.print_exc()
|
|
211
|
+
return f"Error in {func_name}: {e}"
|
|
212
|
+
|
|
213
|
+
async_wrapper.__name__ = func.__name__
|
|
214
|
+
async_wrapper.__doc__ = func.__doc__
|
|
215
|
+
async_wrapper.__annotations__ = func.__annotations__
|
|
216
|
+
|
|
217
|
+
return async_wrapper
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def register_module_tools(module_name: str) -> None:
|
|
222
|
+
"""
|
|
223
|
+
Register all suitable functions from a module as MCP tools with improved argument handling.
|
|
224
|
+
"""
|
|
225
|
+
functions = load_module_functions(module_name)
|
|
226
|
+
for func in functions:
|
|
227
|
+
|
|
228
|
+
if not func.__doc__:
|
|
229
|
+
print(f"Skipping function without docstring: {func.__name__}")
|
|
230
|
+
continue
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
async_func = make_async_wrapper(func)
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
try:
|
|
237
|
+
mcp.tool()(async_func)
|
|
238
|
+
print(f"Registered tool: {func.__name__}")
|
|
239
|
+
except Exception as e:
|
|
240
|
+
print(f"Failed to register {func.__name__}: {e}")
|
|
241
|
+
def load_module_functions(module_name: str) -> List[Callable]:
|
|
242
|
+
"""
|
|
243
|
+
Dynamically load functions from a module.
|
|
244
|
+
"""
|
|
245
|
+
try:
|
|
246
|
+
module = importlib.import_module(module_name)
|
|
247
|
+
|
|
248
|
+
functions = []
|
|
249
|
+
for name, func in inspect.getmembers(module, callable):
|
|
250
|
+
if not name.startswith('_'):
|
|
251
|
+
|
|
252
|
+
if inspect.isfunction(func) or inspect.ismethod(func):
|
|
253
|
+
functions.append(func)
|
|
254
|
+
return functions
|
|
255
|
+
except ImportError as e:
|
|
256
|
+
print(f"Warning: Could not import module {module_name}: {e}")
|
|
257
|
+
return []
|
|
258
|
+
|
|
259
|
+
print("Loading tools from npcpy modules...")
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def register_selected_npcpy_tools():
|
|
266
|
+
tools = [
|
|
267
|
+
gen_image,
|
|
268
|
+
load_file_contents,
|
|
269
|
+
capture_screenshot,
|
|
270
|
+
search_web, ]
|
|
271
|
+
|
|
272
|
+
for func in tools:
|
|
273
|
+
|
|
274
|
+
if not (getattr(func, "__doc__", None) and func.__doc__.strip()):
|
|
275
|
+
fallback_doc = f"Tool wrapper for {func.__name__}."
|
|
276
|
+
try:
|
|
277
|
+
func.__doc__ = fallback_doc
|
|
278
|
+
except Exception:
|
|
279
|
+
pass
|
|
280
|
+
|
|
281
|
+
try:
|
|
282
|
+
async_func = make_async_wrapper(func)
|
|
283
|
+
mcp.tool()(async_func)
|
|
284
|
+
print(f"Registered npcpy tool: {func.__name__}")
|
|
285
|
+
except Exception as e:
|
|
286
|
+
print(f"Failed to register npcpy tool {func.__name__}: {e}")
|
|
287
|
+
register_selected_npcpy_tools()
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
if __name__ == "__main__":
|
|
295
|
+
print(f"Starting enhanced NPCPY MCP server...")
|
|
296
|
+
print(f"Workspace: {DEFAULT_WORKSPACE}")
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
mcp.run(transport="stdio")
|