npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcpy/__init__.py +0 -7
- npcpy/data/audio.py +16 -99
- npcpy/data/image.py +43 -42
- npcpy/data/load.py +83 -124
- npcpy/data/text.py +28 -28
- npcpy/data/video.py +8 -32
- npcpy/data/web.py +51 -23
- npcpy/ft/diff.py +110 -0
- npcpy/ft/ge.py +115 -0
- npcpy/ft/memory_trainer.py +171 -0
- npcpy/ft/model_ensembler.py +357 -0
- npcpy/ft/rl.py +360 -0
- npcpy/ft/sft.py +248 -0
- npcpy/ft/usft.py +128 -0
- npcpy/gen/audio_gen.py +24 -0
- npcpy/gen/embeddings.py +13 -13
- npcpy/gen/image_gen.py +262 -117
- npcpy/gen/response.py +615 -415
- npcpy/gen/video_gen.py +53 -7
- npcpy/llm_funcs.py +1869 -437
- npcpy/main.py +1 -1
- npcpy/memory/command_history.py +844 -510
- npcpy/memory/kg_vis.py +833 -0
- npcpy/memory/knowledge_graph.py +892 -1845
- npcpy/memory/memory_processor.py +81 -0
- npcpy/memory/search.py +188 -90
- npcpy/mix/debate.py +192 -3
- npcpy/npc_compiler.py +1672 -801
- npcpy/npc_sysenv.py +593 -1266
- npcpy/serve.py +3120 -0
- npcpy/sql/ai_function_tools.py +257 -0
- npcpy/sql/database_ai_adapters.py +186 -0
- npcpy/sql/database_ai_functions.py +163 -0
- npcpy/sql/model_runner.py +19 -19
- npcpy/sql/npcsql.py +706 -507
- npcpy/sql/sql_model_compiler.py +156 -0
- npcpy/tools.py +183 -0
- npcpy/work/plan.py +13 -279
- npcpy/work/trigger.py +3 -3
- npcpy-1.2.32.dist-info/METADATA +803 -0
- npcpy-1.2.32.dist-info/RECORD +54 -0
- npcpy/data/dataframes.py +0 -171
- npcpy/memory/deep_research.py +0 -125
- npcpy/memory/sleep.py +0 -557
- npcpy/modes/_state.py +0 -78
- npcpy/modes/alicanto.py +0 -1075
- npcpy/modes/guac.py +0 -785
- npcpy/modes/mcp_npcsh.py +0 -822
- npcpy/modes/npc.py +0 -213
- npcpy/modes/npcsh.py +0 -1158
- npcpy/modes/plonk.py +0 -409
- npcpy/modes/pti.py +0 -234
- npcpy/modes/serve.py +0 -1637
- npcpy/modes/spool.py +0 -312
- npcpy/modes/wander.py +0 -549
- npcpy/modes/yap.py +0 -572
- npcpy/npc_team/alicanto.npc +0 -2
- npcpy/npc_team/alicanto.png +0 -0
- npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
- npcpy/npc_team/corca.npc +0 -13
- npcpy/npc_team/foreman.npc +0 -7
- npcpy/npc_team/frederic.npc +0 -6
- npcpy/npc_team/frederic4.png +0 -0
- npcpy/npc_team/guac.png +0 -0
- npcpy/npc_team/jinxs/automator.jinx +0 -18
- npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
- npcpy/npc_team/jinxs/calculator.jinx +0 -11
- npcpy/npc_team/jinxs/edit_file.jinx +0 -96
- npcpy/npc_team/jinxs/file_chat.jinx +0 -14
- npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
- npcpy/npc_team/jinxs/image_generation.jinx +0 -29
- npcpy/npc_team/jinxs/internet_search.jinx +0 -30
- npcpy/npc_team/jinxs/local_search.jinx +0 -152
- npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
- npcpy/npc_team/jinxs/python_executor.jinx +0 -8
- npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
- npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
- npcpy/npc_team/kadiefa.npc +0 -3
- npcpy/npc_team/kadiefa.png +0 -0
- npcpy/npc_team/npcsh.ctx +0 -9
- npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy/npc_team/plonk.npc +0 -2
- npcpy/npc_team/plonk.png +0 -0
- npcpy/npc_team/plonkjr.npc +0 -2
- npcpy/npc_team/plonkjr.png +0 -0
- npcpy/npc_team/sibiji.npc +0 -5
- npcpy/npc_team/sibiji.png +0 -0
- npcpy/npc_team/spool.png +0 -0
- npcpy/npc_team/templates/analytics/celona.npc +0 -0
- npcpy/npc_team/templates/hr_support/raone.npc +0 -0
- npcpy/npc_team/templates/humanities/eriane.npc +0 -4
- npcpy/npc_team/templates/it_support/lineru.npc +0 -0
- npcpy/npc_team/templates/marketing/slean.npc +0 -4
- npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
- npcpy/npc_team/templates/sales/turnic.npc +0 -4
- npcpy/npc_team/templates/software/welxor.npc +0 -0
- npcpy/npc_team/yap.png +0 -0
- npcpy/routes.py +0 -958
- npcpy/work/mcp_helpers.py +0 -357
- npcpy/work/mcp_server.py +0 -194
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
- npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
- npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
- npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
- npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
- npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
- npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
- npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
- npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
- npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
- npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
- npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
- npcpy-1.0.26.dist-info/METADATA +0 -827
- npcpy-1.0.26.dist-info/RECORD +0 -139
- npcpy-1.0.26.dist-info/entry_points.txt +0 -11
- /npcpy/{modes → ft}/__init__.py +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
npcpy/work/mcp_helpers.py
DELETED
|
@@ -1,357 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
"""
|
|
3
|
-
Raw MCP client with no exception handling and full visibility.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import asyncio
|
|
7
|
-
import os
|
|
8
|
-
import sys
|
|
9
|
-
import json
|
|
10
|
-
try:
|
|
11
|
-
import inspect
|
|
12
|
-
except:
|
|
13
|
-
pass
|
|
14
|
-
from typing import Optional, List, Dict, Any
|
|
15
|
-
from contextlib import AsyncExitStack
|
|
16
|
-
|
|
17
|
-
# MCP imports
|
|
18
|
-
from mcp import ClientSession, StdioServerParameters
|
|
19
|
-
from mcp.client.stdio import stdio_client
|
|
20
|
-
|
|
21
|
-
# Local imports from npcpy
|
|
22
|
-
from npcpy.gen.response import get_litellm_response
|
|
23
|
-
from npcpy.npc_sysenv import (
|
|
24
|
-
NPCSH_CHAT_MODEL,
|
|
25
|
-
NPCSH_CHAT_PROVIDER,
|
|
26
|
-
NPCSH_API_URL,
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
class MCPClient:
|
|
30
|
-
"""
|
|
31
|
-
Raw MCP Client with no exception handling.
|
|
32
|
-
"""
|
|
33
|
-
|
|
34
|
-
def __init__(
|
|
35
|
-
self,
|
|
36
|
-
model: str = NPCSH_CHAT_MODEL,
|
|
37
|
-
provider: str = NPCSH_CHAT_PROVIDER,
|
|
38
|
-
api_url: str = NPCSH_API_URL,
|
|
39
|
-
api_key: Optional[str] = None,
|
|
40
|
-
debug: bool = True,
|
|
41
|
-
):
|
|
42
|
-
self.model = model
|
|
43
|
-
self.provider = provider
|
|
44
|
-
self.api_url = api_url
|
|
45
|
-
self.api_key = api_key
|
|
46
|
-
self.debug = debug
|
|
47
|
-
self.session = None
|
|
48
|
-
self.exit_stack = AsyncExitStack()
|
|
49
|
-
self.tools = []
|
|
50
|
-
self.available_tools = []
|
|
51
|
-
|
|
52
|
-
def _log(self, message: str) -> None:
|
|
53
|
-
"""Log debug messages."""
|
|
54
|
-
if self.debug:
|
|
55
|
-
print(f"[MCP Client] {message}")
|
|
56
|
-
|
|
57
|
-
async def connect_to_server(self, server_script_path: str) -> None:
|
|
58
|
-
"""
|
|
59
|
-
Connect to an MCP server.
|
|
60
|
-
|
|
61
|
-
Args:
|
|
62
|
-
server_script_path: Path to the server script (.py or .js)
|
|
63
|
-
"""
|
|
64
|
-
self._log(f"Connecting to server: {server_script_path}")
|
|
65
|
-
|
|
66
|
-
# Configure server parameters
|
|
67
|
-
command = "python" if server_script_path.endswith('.py') else "node"
|
|
68
|
-
server_params = StdioServerParameters(
|
|
69
|
-
command=command,
|
|
70
|
-
args=[server_script_path],
|
|
71
|
-
env=None
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
# Set up the connection
|
|
75
|
-
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
|
|
76
|
-
read, write = stdio_transport
|
|
77
|
-
|
|
78
|
-
# Create the session
|
|
79
|
-
self.session = await self.exit_stack.enter_async_context(ClientSession(read, write))
|
|
80
|
-
|
|
81
|
-
# Initialize the session
|
|
82
|
-
await self.session.initialize()
|
|
83
|
-
|
|
84
|
-
# List available tools
|
|
85
|
-
response = await self.session.list_tools()
|
|
86
|
-
self.tools = response.tools
|
|
87
|
-
|
|
88
|
-
# Display tool details for debugging
|
|
89
|
-
for tool in self.tools:
|
|
90
|
-
print(f"\nJinx: {tool.name}")
|
|
91
|
-
print(f"Description: {tool.description}")
|
|
92
|
-
|
|
93
|
-
# Print all attributes
|
|
94
|
-
for attribute_name in dir(tool):
|
|
95
|
-
if not attribute_name.startswith('_'):
|
|
96
|
-
attribute = getattr(tool, attribute_name)
|
|
97
|
-
if not callable(attribute):
|
|
98
|
-
print(f" {attribute_name}: {attribute}")
|
|
99
|
-
|
|
100
|
-
# Check if the tool has source or function definition
|
|
101
|
-
if hasattr(tool, 'source'):
|
|
102
|
-
print(f"Source: {tool.source}")
|
|
103
|
-
|
|
104
|
-
# Try to inspect the tool function
|
|
105
|
-
try:
|
|
106
|
-
tool_module = inspect.getmodule(tool)
|
|
107
|
-
if tool_module:
|
|
108
|
-
print(f"Module: {tool_module.__name__}")
|
|
109
|
-
if hasattr(tool_module, tool.name):
|
|
110
|
-
tool_func = getattr(tool_module, tool.name)
|
|
111
|
-
if callable(tool_func):
|
|
112
|
-
print(f"Function signature: {inspect.signature(tool_func)}")
|
|
113
|
-
except:
|
|
114
|
-
pass
|
|
115
|
-
|
|
116
|
-
# Convert tools to the format expected by the LLM
|
|
117
|
-
self.available_tools = []
|
|
118
|
-
for tool in self.tools:
|
|
119
|
-
# Use inputSchema if available, otherwise create a default schema
|
|
120
|
-
schema = getattr(tool, "inputSchema", {})
|
|
121
|
-
|
|
122
|
-
# Create tool definition for LLM
|
|
123
|
-
tool_info = {
|
|
124
|
-
"type": "function",
|
|
125
|
-
"function": {
|
|
126
|
-
"name": tool.name,
|
|
127
|
-
"description": tool.description,
|
|
128
|
-
"parameters": schema
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
self.available_tools.append(tool_info)
|
|
132
|
-
|
|
133
|
-
# Print the schema for debugging
|
|
134
|
-
print(f"\nJinx schema for {tool.name}:")
|
|
135
|
-
print(json.dumps(schema, indent=2))
|
|
136
|
-
|
|
137
|
-
tool_names = [tool.name for tool in self.tools]
|
|
138
|
-
self._log(f"Available tools: {', '.join(tool_names)}")
|
|
139
|
-
|
|
140
|
-
async def process_query(
|
|
141
|
-
self,
|
|
142
|
-
query: str,
|
|
143
|
-
messages: Optional[List[Dict[str, str]]] = None,
|
|
144
|
-
stream: bool = False
|
|
145
|
-
) -> Dict[str, Any]:
|
|
146
|
-
"""
|
|
147
|
-
Process a query using the LLM and available tools.
|
|
148
|
-
|
|
149
|
-
Args:
|
|
150
|
-
query: User query
|
|
151
|
-
messages: Optional conversation history
|
|
152
|
-
stream: Whether to stream the response
|
|
153
|
-
|
|
154
|
-
Returns:
|
|
155
|
-
Dict with response text and updated messages
|
|
156
|
-
"""
|
|
157
|
-
self._log(f"Processing query: {query}")
|
|
158
|
-
|
|
159
|
-
# Initialize or update messages
|
|
160
|
-
if messages is None:
|
|
161
|
-
messages = []
|
|
162
|
-
|
|
163
|
-
current_messages = messages.copy()
|
|
164
|
-
if not current_messages or current_messages[-1]["role"] != "user":
|
|
165
|
-
current_messages.append({"role": "user", "content": query})
|
|
166
|
-
elif current_messages[-1]["role"] == "user":
|
|
167
|
-
current_messages[-1]["content"] = query
|
|
168
|
-
|
|
169
|
-
# Initial LLM call with tools
|
|
170
|
-
self._log("Making initial LLM call with tools")
|
|
171
|
-
response = get_litellm_response(
|
|
172
|
-
model=self.model,
|
|
173
|
-
provider=self.provider,
|
|
174
|
-
api_url=self.api_url,
|
|
175
|
-
api_key=self.api_key,
|
|
176
|
-
messages=current_messages,
|
|
177
|
-
tools=self.available_tools,
|
|
178
|
-
stream=False # Don't stream the initial call
|
|
179
|
-
)
|
|
180
|
-
|
|
181
|
-
# Print full response for debugging
|
|
182
|
-
print("\nLLM Response:")
|
|
183
|
-
print(json.dumps(response, indent=2, default=str))
|
|
184
|
-
|
|
185
|
-
# Extract response content and tool calls
|
|
186
|
-
response_content = response.get("response", "")
|
|
187
|
-
tool_calls = response.get("tool_calls", [])
|
|
188
|
-
|
|
189
|
-
# Print tool calls for debugging
|
|
190
|
-
print("\nJinx Calls:")
|
|
191
|
-
print(json.dumps(tool_calls, indent=2, default=str))
|
|
192
|
-
|
|
193
|
-
# Create final text buffer
|
|
194
|
-
final_text = []
|
|
195
|
-
|
|
196
|
-
# If we have plain text response with no tool calls
|
|
197
|
-
if response_content and not tool_calls:
|
|
198
|
-
final_text.append(response_content)
|
|
199
|
-
|
|
200
|
-
# Update messages with assistant response
|
|
201
|
-
current_messages.append({
|
|
202
|
-
"role": "assistant",
|
|
203
|
-
"content": response_content
|
|
204
|
-
})
|
|
205
|
-
|
|
206
|
-
# Process tool calls if any
|
|
207
|
-
if tool_calls:
|
|
208
|
-
self._log(f"Processing {len(tool_calls)} tool calls")
|
|
209
|
-
|
|
210
|
-
# Get the assistant message with tool calls
|
|
211
|
-
assistant_message = {
|
|
212
|
-
"role": "assistant",
|
|
213
|
-
"content": response_content if response_content else None,
|
|
214
|
-
"tool_calls": []
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
# Process each tool call
|
|
218
|
-
for tool_call in tool_calls:
|
|
219
|
-
# Extract tool info based on format
|
|
220
|
-
if isinstance(tool_call, dict):
|
|
221
|
-
tool_id = tool_call.get("id", "")
|
|
222
|
-
tool_name = tool_call.get("function", {}).get("name", "")
|
|
223
|
-
tool_args = tool_call.get("function", {}).get("arguments", {})
|
|
224
|
-
else:
|
|
225
|
-
# Assume object with attributes
|
|
226
|
-
tool_id = getattr(tool_call, "id", "")
|
|
227
|
-
tool_name = getattr(tool_call.function, "name", "")
|
|
228
|
-
tool_args = getattr(tool_call.function, "arguments", {})
|
|
229
|
-
|
|
230
|
-
# Parse arguments if it's a string
|
|
231
|
-
if isinstance(tool_args, str):
|
|
232
|
-
print(f"\nJinx args is string: {tool_args}")
|
|
233
|
-
tool_args = json.loads(tool_args)
|
|
234
|
-
print(f"Parsed to: {tool_args}")
|
|
235
|
-
|
|
236
|
-
# Add tool call to assistant message
|
|
237
|
-
assistant_message["tool_calls"].append({
|
|
238
|
-
"id": tool_id,
|
|
239
|
-
"type": "function",
|
|
240
|
-
"function": {
|
|
241
|
-
"name": tool_name,
|
|
242
|
-
"arguments": json.dumps(tool_args) if isinstance(tool_args, dict) else tool_args
|
|
243
|
-
}
|
|
244
|
-
})
|
|
245
|
-
|
|
246
|
-
# Execute tool call
|
|
247
|
-
self._log(f"Executing tool: {tool_name} with args: {tool_args}")
|
|
248
|
-
print(f"\nExecuting tool call:")
|
|
249
|
-
print(f" Jinx name: {tool_name}")
|
|
250
|
-
print(f" Jinx args: {tool_args}")
|
|
251
|
-
print(f" Jinx args type: {type(tool_args)}")
|
|
252
|
-
|
|
253
|
-
final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
|
|
254
|
-
|
|
255
|
-
# Call the tool with the arguments exactly as received
|
|
256
|
-
result = await self.session.call_tool(tool_name, tool_args)
|
|
257
|
-
|
|
258
|
-
# Print full result for debugging
|
|
259
|
-
print("\nJinx Result:")
|
|
260
|
-
print(f" Result: {result}")
|
|
261
|
-
print(f" Content: {result.content}")
|
|
262
|
-
print(f" Content type: {type(result.content)}")
|
|
263
|
-
|
|
264
|
-
tool_result = result.content
|
|
265
|
-
|
|
266
|
-
# Handle TextContent objects
|
|
267
|
-
if hasattr(tool_result, 'text'):
|
|
268
|
-
print(f" TextContent detected, text: {tool_result.text}")
|
|
269
|
-
tool_result = tool_result.text
|
|
270
|
-
elif isinstance(tool_result, list) and all(hasattr(item, 'text') for item in tool_result):
|
|
271
|
-
print(f" List of TextContent detected")
|
|
272
|
-
tool_result = [item.text for item in tool_result]
|
|
273
|
-
|
|
274
|
-
# Add tool result to messages
|
|
275
|
-
current_messages.append(assistant_message)
|
|
276
|
-
current_messages.append({
|
|
277
|
-
"role": "tool",
|
|
278
|
-
"tool_call_id": tool_id,
|
|
279
|
-
"content": json.dumps(tool_result) if not isinstance(tool_result, str) else str(tool_result)
|
|
280
|
-
})
|
|
281
|
-
|
|
282
|
-
# Print updated messages for debugging
|
|
283
|
-
print("\nUpdated Messages:")
|
|
284
|
-
print(json.dumps(current_messages, indent=2, default=str))
|
|
285
|
-
|
|
286
|
-
# Get final response with tool results
|
|
287
|
-
self._log("Getting final response after tool calls")
|
|
288
|
-
final_response = get_litellm_response(
|
|
289
|
-
model=self.model,
|
|
290
|
-
provider=self.provider,
|
|
291
|
-
api_url=self.api_url,
|
|
292
|
-
api_key=self.api_key,
|
|
293
|
-
messages=current_messages,
|
|
294
|
-
stream=stream
|
|
295
|
-
)
|
|
296
|
-
|
|
297
|
-
final_text.append(final_response.get("response", ""))
|
|
298
|
-
|
|
299
|
-
# Update messages with final assistant response
|
|
300
|
-
current_messages.append({
|
|
301
|
-
"role": "assistant",
|
|
302
|
-
"content": final_response.get("response", "")
|
|
303
|
-
})
|
|
304
|
-
|
|
305
|
-
return {
|
|
306
|
-
"response": "\n".join(final_text),
|
|
307
|
-
"messages": current_messages
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
async def chat_loop(self):
|
|
311
|
-
"""Run an interactive chat loop"""
|
|
312
|
-
print("\nMCP Client Started!")
|
|
313
|
-
print("Type your queries or 'quit' to exit.")
|
|
314
|
-
|
|
315
|
-
messages = []
|
|
316
|
-
|
|
317
|
-
while True:
|
|
318
|
-
query = input("\nQuery: ").strip()
|
|
319
|
-
|
|
320
|
-
if query.lower() == 'quit':
|
|
321
|
-
break
|
|
322
|
-
|
|
323
|
-
# Process the query
|
|
324
|
-
result = await self.process_query(query, messages)
|
|
325
|
-
messages = result.get("messages", [])
|
|
326
|
-
|
|
327
|
-
# Display the response
|
|
328
|
-
print("\nResponse:")
|
|
329
|
-
print(result.get("response", ""))
|
|
330
|
-
|
|
331
|
-
async def cleanup(self):
|
|
332
|
-
"""Clean up resources"""
|
|
333
|
-
self._log("Cleaning up resources")
|
|
334
|
-
await self.exit_stack.aclose()
|
|
335
|
-
|
|
336
|
-
async def main():
|
|
337
|
-
"""Entry point for the MCP client."""
|
|
338
|
-
if len(sys.argv) < 2:
|
|
339
|
-
print("Usage: python raw_mcp_client.py <path_to_server_script>")
|
|
340
|
-
sys.exit(1)
|
|
341
|
-
|
|
342
|
-
server_script = sys.argv[1]
|
|
343
|
-
|
|
344
|
-
# Create and configure the client
|
|
345
|
-
client = MCPClient()
|
|
346
|
-
|
|
347
|
-
# Connect to the server
|
|
348
|
-
await client.connect_to_server(server_script)
|
|
349
|
-
|
|
350
|
-
# Run the interactive chat loop
|
|
351
|
-
await client.chat_loop()
|
|
352
|
-
|
|
353
|
-
# Clean up resources
|
|
354
|
-
await client.cleanup()
|
|
355
|
-
|
|
356
|
-
if __name__ == "__main__":
|
|
357
|
-
asyncio.run(main())
|
npcpy/work/mcp_server.py
DELETED
|
@@ -1,194 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
"""
|
|
3
|
-
Enhanced MCP server that incorporates functionality from npcpy.routes,
|
|
4
|
-
npcpy.llm_funcs, and npcpy.npc_compiler as tools.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import os
|
|
8
|
-
import subprocess
|
|
9
|
-
import json
|
|
10
|
-
import asyncio
|
|
11
|
-
|
|
12
|
-
from typing import Optional, Dict, Any, List, Union, Callable
|
|
13
|
-
# MCP imports
|
|
14
|
-
from mcp.server.fastmcp import FastMCP
|
|
15
|
-
import importlib
|
|
16
|
-
# npcpy imports
|
|
17
|
-
from npcpy.gen.response import get_litellm_response
|
|
18
|
-
from npcpy.npc_sysenv import (
|
|
19
|
-
NPCSH_CHAT_MODEL,
|
|
20
|
-
NPCSH_CHAT_PROVIDER,
|
|
21
|
-
NPCSH_API_URL,
|
|
22
|
-
NPCSH_IMAGE_GEN_MODEL,
|
|
23
|
-
NPCSH_IMAGE_GEN_PROVIDER,
|
|
24
|
-
NPCSH_VIDEO_GEN_MODEL,
|
|
25
|
-
NPCSH_VIDEO_GEN_PROVIDER,
|
|
26
|
-
get_model_and_provider,
|
|
27
|
-
lookup_provider,
|
|
28
|
-
)
|
|
29
|
-
|
|
30
|
-
import os
|
|
31
|
-
import subprocess
|
|
32
|
-
import json
|
|
33
|
-
import asyncio
|
|
34
|
-
try:
|
|
35
|
-
import inspect
|
|
36
|
-
except:
|
|
37
|
-
pass
|
|
38
|
-
from typing import Optional, Dict, Any, List, Union, Callable, get_type_hints
|
|
39
|
-
# Add these imports to the top of your file
|
|
40
|
-
from functools import wraps
|
|
41
|
-
# Initialize the MCP server
|
|
42
|
-
mcp = FastMCP("npcpy_enhanced")
|
|
43
|
-
|
|
44
|
-
# Define the default workspace
|
|
45
|
-
DEFAULT_WORKSPACE = os.path.join(os.getcwd(), "workspace")
|
|
46
|
-
os.makedirs(DEFAULT_WORKSPACE, exist_ok=True)
|
|
47
|
-
|
|
48
|
-
# ==================== SYSTEM TOOLS ====================
|
|
49
|
-
|
|
50
|
-
@mcp.tool()
|
|
51
|
-
async def run_server_command(command: str) -> str:
|
|
52
|
-
"""
|
|
53
|
-
Run a terminal command in the workspace.
|
|
54
|
-
|
|
55
|
-
Args:
|
|
56
|
-
command: The shell command to run
|
|
57
|
-
|
|
58
|
-
Returns:
|
|
59
|
-
The command output or an error message.
|
|
60
|
-
"""
|
|
61
|
-
try:
|
|
62
|
-
result = subprocess.run(
|
|
63
|
-
command,
|
|
64
|
-
cwd=DEFAULT_WORKSPACE,
|
|
65
|
-
shell=True,
|
|
66
|
-
capture_output=True,
|
|
67
|
-
text=True
|
|
68
|
-
)
|
|
69
|
-
return result.stdout or result.stderr
|
|
70
|
-
except Exception as e:
|
|
71
|
-
return str(e)
|
|
72
|
-
def make_async_wrapper(func: Callable) -> Callable:
|
|
73
|
-
"""Create an async wrapper for sync functions that fixes schema validation issues."""
|
|
74
|
-
|
|
75
|
-
@wraps(func)
|
|
76
|
-
async def async_wrapper(*args, **kwargs):
|
|
77
|
-
# Direct parameter dict case (most common failure)
|
|
78
|
-
if len(args) == 1 and isinstance(args[0], dict):
|
|
79
|
-
params = args[0]
|
|
80
|
-
|
|
81
|
-
# Fix for search_web - add required kwargs parameter
|
|
82
|
-
if "kwargs" not in params:
|
|
83
|
-
# Create a new dict with the kwargs parameter added
|
|
84
|
-
params = {**params, "kwargs": ""}
|
|
85
|
-
|
|
86
|
-
# Call the function with the parameters
|
|
87
|
-
if asyncio.iscoroutinefunction(func):
|
|
88
|
-
return await func(**params)
|
|
89
|
-
else:
|
|
90
|
-
return await asyncio.to_thread(func, **params)
|
|
91
|
-
|
|
92
|
-
# Normal function call or other cases
|
|
93
|
-
if asyncio.iscoroutinefunction(func):
|
|
94
|
-
return await func(*args, **kwargs)
|
|
95
|
-
else:
|
|
96
|
-
return await asyncio.to_thread(func, *args, **kwargs)
|
|
97
|
-
|
|
98
|
-
# Preserve function metadata
|
|
99
|
-
async_wrapper.__name__ = func.__name__
|
|
100
|
-
async_wrapper.__doc__ = func.__doc__
|
|
101
|
-
async_wrapper.__annotations__ = func.__annotations__
|
|
102
|
-
|
|
103
|
-
return async_wrapper
|
|
104
|
-
|
|
105
|
-
# Update your register_module_tools function to use this improved wrapper
|
|
106
|
-
def register_module_tools(module_name: str) -> None:
|
|
107
|
-
"""
|
|
108
|
-
Register all suitable functions from a module as MCP tools with improved argument handling.
|
|
109
|
-
"""
|
|
110
|
-
functions = load_module_functions(module_name)
|
|
111
|
-
for func in functions:
|
|
112
|
-
# Skip functions that don't have docstrings
|
|
113
|
-
if not func.__doc__:
|
|
114
|
-
print(f"Skipping function without docstring: {func.__name__}")
|
|
115
|
-
continue
|
|
116
|
-
|
|
117
|
-
# Create async wrapper with improved argument handling
|
|
118
|
-
async_func = make_async_wrapper(func)
|
|
119
|
-
|
|
120
|
-
# Register as MCP tool
|
|
121
|
-
try:
|
|
122
|
-
mcp.tool()(async_func)
|
|
123
|
-
print(f"Registered tool: {func.__name__}")
|
|
124
|
-
except Exception as e:
|
|
125
|
-
print(f"Failed to register {func.__name__}: {e}")
|
|
126
|
-
def load_module_functions(module_name: str) -> List[Callable]:
|
|
127
|
-
"""
|
|
128
|
-
Dynamically load functions from a module.
|
|
129
|
-
"""
|
|
130
|
-
try:
|
|
131
|
-
module = importlib.import_module(module_name)
|
|
132
|
-
# Get all callables from the module that don't start with underscore
|
|
133
|
-
functions = []
|
|
134
|
-
for name, func in inspect.getmembers(module, callable):
|
|
135
|
-
if not name.startswith('_'):
|
|
136
|
-
# Check if it's a function, not a class
|
|
137
|
-
if inspect.isfunction(func) or inspect.ismethod(func):
|
|
138
|
-
functions.append(func)
|
|
139
|
-
return functions
|
|
140
|
-
except ImportError as e:
|
|
141
|
-
print(f"Warning: Could not import module {module_name}: {e}")
|
|
142
|
-
return []
|
|
143
|
-
|
|
144
|
-
print("Loading tools from npcpy modules...")
|
|
145
|
-
|
|
146
|
-
# Load modules from npcpy.routes
|
|
147
|
-
try:
|
|
148
|
-
from npcpy.routes import routes
|
|
149
|
-
for route_name, route_func in routes.items():
|
|
150
|
-
if callable(route_func):
|
|
151
|
-
async_func = make_async_wrapper(route_func)
|
|
152
|
-
try:
|
|
153
|
-
mcp.tool()(async_func)
|
|
154
|
-
print(f"Registered route: {route_name}")
|
|
155
|
-
except Exception as e:
|
|
156
|
-
print(f"Failed to register route {route_name}: {e}")
|
|
157
|
-
except ImportError as e:
|
|
158
|
-
print(f"Warning: Could not import routes: {e}")
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
# Load npc_compiler functions
|
|
162
|
-
print("Loading functions from npcpy.npc_compiler...")
|
|
163
|
-
try:
|
|
164
|
-
import importlib.util
|
|
165
|
-
if importlib.util.find_spec("npcpy.npc_compiler"):
|
|
166
|
-
register_module_tools("npcpy.npc_compiler")
|
|
167
|
-
except ImportError:
|
|
168
|
-
print("npcpy.npc_compiler not found, skipping...")
|
|
169
|
-
|
|
170
|
-
# Load npc_sysenv functions
|
|
171
|
-
#print("Loading functions from npcpy.npc_sysenv...")
|
|
172
|
-
#register_module_tools("npcpy.npc_sysenv")
|
|
173
|
-
register_module_tools("npcpy.memory.search")
|
|
174
|
-
|
|
175
|
-
register_module_tools("npcpy.work.plan")
|
|
176
|
-
register_module_tools("npcpy.work.trigger")
|
|
177
|
-
register_module_tools("npcpy.work.desktop")
|
|
178
|
-
|
|
179
|
-
#print("Loading functions from npcpy.command_history...")
|
|
180
|
-
#register_module_tools("npcpy.memory.command_history")
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
#print("Loading functions from npcpy.npc_sysenv...")
|
|
184
|
-
#register_module_tools("npcpy.llm_funcs")
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
# ==================== MAIN ENTRY POINT ====================
|
|
188
|
-
|
|
189
|
-
if __name__ == "__main__":
|
|
190
|
-
print(f"Starting enhanced NPCPY MCP server...")
|
|
191
|
-
print(f"Workspace: {DEFAULT_WORKSPACE}")
|
|
192
|
-
|
|
193
|
-
# Run the server
|
|
194
|
-
mcp.run(transport="stdio")
|
|
Binary file
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
jinx_name: automator
|
|
2
|
-
description: Issue npc shell requests. Uses one of the NPC macros.
|
|
3
|
-
inputs:
|
|
4
|
-
- request
|
|
5
|
-
- type
|
|
6
|
-
steps:
|
|
7
|
-
- engine: "python"
|
|
8
|
-
code: |
|
|
9
|
-
type = '{{type}}'
|
|
10
|
-
request = '{{request}}'
|
|
11
|
-
if type == 'plan':
|
|
12
|
-
from npcpy.work.plan import execute_plan_command
|
|
13
|
-
output = execute_plan_command(request, npc=npc)
|
|
14
|
-
elif type == 'trigger':
|
|
15
|
-
from npcpy.work.trigger import execute_trigger_command
|
|
16
|
-
output = execute_trigger_command(request, npc=npc)
|
|
17
|
-
else:
|
|
18
|
-
raise ValueError("Invalid type. Must be 'plan' or 'trigger'.")
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
jinx_name: bash_executor
|
|
2
|
-
description: Execute bash queries.
|
|
3
|
-
inputs:
|
|
4
|
-
- bash_command
|
|
5
|
-
- user_request
|
|
6
|
-
steps:
|
|
7
|
-
- engine: python
|
|
8
|
-
code: |
|
|
9
|
-
import subprocess
|
|
10
|
-
import os
|
|
11
|
-
cmd = '{{bash_command}}' # Properly quote the command input
|
|
12
|
-
def run_command(cmd):
|
|
13
|
-
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
14
|
-
stdout, stderr = process.communicate()
|
|
15
|
-
if stderr:
|
|
16
|
-
print(f"Error: {stderr.decode('utf-8')}")
|
|
17
|
-
return stderr
|
|
18
|
-
return stdout
|
|
19
|
-
result = run_command(cmd)
|
|
20
|
-
output = result.decode('utf-8')
|
|
21
|
-
|
|
22
|
-
- engine: natural
|
|
23
|
-
code: |
|
|
24
|
-
|
|
25
|
-
Here is the result of the bash command:
|
|
26
|
-
```
|
|
27
|
-
{{ output }}
|
|
28
|
-
```
|
|
29
|
-
This was the original user request: {{ user_request }}
|
|
30
|
-
|
|
31
|
-
Please provide a response accordingly.
|
|
File without changes
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
name: corca
|
|
2
|
-
primary_directive: |
|
|
3
|
-
You are corca, a distinguished member of the NPC team.
|
|
4
|
-
Your expertise is in the area of software development and
|
|
5
|
-
you have a kanck for thinking through problems carefully.
|
|
6
|
-
You favor solutions that prioritize simplicity and clarity and
|
|
7
|
-
ought to always consider how some suggestion may increase rather than reduce tech debt
|
|
8
|
-
unnecessarily. Now, the key is in this last term, "unnecessarily".
|
|
9
|
-
You must distinguish carefully and when in doubt, opt to ask for further
|
|
10
|
-
information or clarification with concrete clear options that make it
|
|
11
|
-
easy for a user to choose.
|
|
12
|
-
model: gpt-4o-mini
|
|
13
|
-
provider: openai
|