npcsh 0.3.32__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. npcsh/_state.py +942 -0
  2. npcsh/alicanto.py +1074 -0
  3. npcsh/guac.py +785 -0
  4. npcsh/mcp_helpers.py +357 -0
  5. npcsh/mcp_npcsh.py +822 -0
  6. npcsh/mcp_server.py +184 -0
  7. npcsh/npc.py +218 -0
  8. npcsh/npcsh.py +1161 -0
  9. npcsh/plonk.py +387 -269
  10. npcsh/pti.py +234 -0
  11. npcsh/routes.py +958 -0
  12. npcsh/spool.py +315 -0
  13. npcsh/wander.py +550 -0
  14. npcsh/yap.py +573 -0
  15. npcsh-1.0.0.dist-info/METADATA +596 -0
  16. npcsh-1.0.0.dist-info/RECORD +21 -0
  17. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/WHEEL +1 -1
  18. npcsh-1.0.0.dist-info/entry_points.txt +9 -0
  19. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/licenses/LICENSE +1 -1
  20. npcsh/audio.py +0 -569
  21. npcsh/audio_gen.py +0 -1
  22. npcsh/cli.py +0 -543
  23. npcsh/command_history.py +0 -566
  24. npcsh/conversation.py +0 -54
  25. npcsh/data_models.py +0 -46
  26. npcsh/dataframes.py +0 -171
  27. npcsh/embeddings.py +0 -168
  28. npcsh/helpers.py +0 -646
  29. npcsh/image.py +0 -298
  30. npcsh/image_gen.py +0 -79
  31. npcsh/knowledge_graph.py +0 -1006
  32. npcsh/llm_funcs.py +0 -2195
  33. npcsh/load_data.py +0 -83
  34. npcsh/main.py +0 -5
  35. npcsh/model_runner.py +0 -189
  36. npcsh/npc_compiler.py +0 -2879
  37. npcsh/npc_sysenv.py +0 -388
  38. npcsh/npc_team/assembly_lines/test_pipeline.py +0 -181
  39. npcsh/npc_team/corca.npc +0 -13
  40. npcsh/npc_team/foreman.npc +0 -7
  41. npcsh/npc_team/npcsh.ctx +0 -11
  42. npcsh/npc_team/sibiji.npc +0 -4
  43. npcsh/npc_team/templates/analytics/celona.npc +0 -0
  44. npcsh/npc_team/templates/hr_support/raone.npc +0 -0
  45. npcsh/npc_team/templates/humanities/eriane.npc +0 -4
  46. npcsh/npc_team/templates/it_support/lineru.npc +0 -0
  47. npcsh/npc_team/templates/marketing/slean.npc +0 -4
  48. npcsh/npc_team/templates/philosophy/maurawa.npc +0 -0
  49. npcsh/npc_team/templates/sales/turnic.npc +0 -4
  50. npcsh/npc_team/templates/software/welxor.npc +0 -0
  51. npcsh/npc_team/tools/bash_executer.tool +0 -32
  52. npcsh/npc_team/tools/calculator.tool +0 -8
  53. npcsh/npc_team/tools/code_executor.tool +0 -16
  54. npcsh/npc_team/tools/generic_search.tool +0 -27
  55. npcsh/npc_team/tools/image_generation.tool +0 -25
  56. npcsh/npc_team/tools/local_search.tool +0 -149
  57. npcsh/npc_team/tools/npcsh_executor.tool +0 -9
  58. npcsh/npc_team/tools/screen_cap.tool +0 -27
  59. npcsh/npc_team/tools/sql_executor.tool +0 -26
  60. npcsh/response.py +0 -272
  61. npcsh/search.py +0 -252
  62. npcsh/serve.py +0 -1467
  63. npcsh/shell.py +0 -524
  64. npcsh/shell_helpers.py +0 -3919
  65. npcsh/stream.py +0 -233
  66. npcsh/video.py +0 -52
  67. npcsh/video_gen.py +0 -69
  68. npcsh-0.3.32.data/data/npcsh/npc_team/bash_executer.tool +0 -32
  69. npcsh-0.3.32.data/data/npcsh/npc_team/calculator.tool +0 -8
  70. npcsh-0.3.32.data/data/npcsh/npc_team/celona.npc +0 -0
  71. npcsh-0.3.32.data/data/npcsh/npc_team/code_executor.tool +0 -16
  72. npcsh-0.3.32.data/data/npcsh/npc_team/corca.npc +0 -13
  73. npcsh-0.3.32.data/data/npcsh/npc_team/eriane.npc +0 -4
  74. npcsh-0.3.32.data/data/npcsh/npc_team/foreman.npc +0 -7
  75. npcsh-0.3.32.data/data/npcsh/npc_team/generic_search.tool +0 -27
  76. npcsh-0.3.32.data/data/npcsh/npc_team/image_generation.tool +0 -25
  77. npcsh-0.3.32.data/data/npcsh/npc_team/lineru.npc +0 -0
  78. npcsh-0.3.32.data/data/npcsh/npc_team/local_search.tool +0 -149
  79. npcsh-0.3.32.data/data/npcsh/npc_team/maurawa.npc +0 -0
  80. npcsh-0.3.32.data/data/npcsh/npc_team/npcsh.ctx +0 -11
  81. npcsh-0.3.32.data/data/npcsh/npc_team/npcsh_executor.tool +0 -9
  82. npcsh-0.3.32.data/data/npcsh/npc_team/raone.npc +0 -0
  83. npcsh-0.3.32.data/data/npcsh/npc_team/screen_cap.tool +0 -27
  84. npcsh-0.3.32.data/data/npcsh/npc_team/sibiji.npc +0 -4
  85. npcsh-0.3.32.data/data/npcsh/npc_team/slean.npc +0 -4
  86. npcsh-0.3.32.data/data/npcsh/npc_team/sql_executor.tool +0 -26
  87. npcsh-0.3.32.data/data/npcsh/npc_team/test_pipeline.py +0 -181
  88. npcsh-0.3.32.data/data/npcsh/npc_team/turnic.npc +0 -4
  89. npcsh-0.3.32.data/data/npcsh/npc_team/welxor.npc +0 -0
  90. npcsh-0.3.32.dist-info/METADATA +0 -779
  91. npcsh-0.3.32.dist-info/RECORD +0 -78
  92. npcsh-0.3.32.dist-info/entry_points.txt +0 -3
  93. {npcsh-0.3.32.dist-info → npcsh-1.0.0.dist-info}/top_level.txt +0 -0
npcsh/mcp_helpers.py ADDED
@@ -0,0 +1,357 @@
1
+ #!/usr/bin/env python
2
+ """
3
+ Raw MCP client with no exception handling and full visibility.
4
+ """
5
+
6
+ import asyncio
7
+ import os
8
+ import sys
9
+ import json
10
+ try:
11
+ import inspect
12
+ except:
13
+ pass
14
+ from typing import Optional, List, Dict, Any
15
+ from contextlib import AsyncExitStack
16
+
17
+ # MCP imports
18
+ from mcp import ClientSession, StdioServerParameters
19
+ from mcp.client.stdio import stdio_client
20
+
21
+ # Local imports from npcpy
22
+ from npcpy.gen.response import get_litellm_response
23
+ from npcsh._state import (
24
+ NPCSH_CHAT_MODEL,
25
+ NPCSH_CHAT_PROVIDER,
26
+ NPCSH_API_URL,
27
+ )
28
+
29
+ class MCPClient:
30
+ """
31
+ Raw MCP Client with no exception handling.
32
+ """
33
+
34
+ def __init__(
35
+ self,
36
+ model: str = NPCSH_CHAT_MODEL,
37
+ provider: str = NPCSH_CHAT_PROVIDER,
38
+ api_url: str = NPCSH_API_URL,
39
+ api_key: Optional[str] = None,
40
+ debug: bool = True,
41
+ ):
42
+ self.model = model
43
+ self.provider = provider
44
+ self.api_url = api_url
45
+ self.api_key = api_key
46
+ self.debug = debug
47
+ self.session = None
48
+ self.exit_stack = AsyncExitStack()
49
+ self.tools = []
50
+ self.available_tools = []
51
+
52
+ def _log(self, message: str) -> None:
53
+ """Log debug messages."""
54
+ if self.debug:
55
+ print(f"[MCP Client] {message}")
56
+
57
+ async def connect_to_server(self, server_script_path: str) -> None:
58
+ """
59
+ Connect to an MCP server.
60
+
61
+ Args:
62
+ server_script_path: Path to the server script (.py or .js)
63
+ """
64
+ self._log(f"Connecting to server: {server_script_path}")
65
+
66
+ # Configure server parameters
67
+ command = "python" if server_script_path.endswith('.py') else "node"
68
+ server_params = StdioServerParameters(
69
+ command=command,
70
+ args=[server_script_path],
71
+ env=None
72
+ )
73
+
74
+ # Set up the connection
75
+ stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
76
+ read, write = stdio_transport
77
+
78
+ # Create the session
79
+ self.session = await self.exit_stack.enter_async_context(ClientSession(read, write))
80
+
81
+ # Initialize the session
82
+ await self.session.initialize()
83
+
84
+ # List available tools
85
+ response = await self.session.list_tools()
86
+ self.tools = response.tools
87
+
88
+ # Display tool details for debugging
89
+ for tool in self.tools:
90
+ print(f"\nJinx: {tool.name}")
91
+ print(f"Description: {tool.description}")
92
+
93
+ # Print all attributes
94
+ for attribute_name in dir(tool):
95
+ if not attribute_name.startswith('_'):
96
+ attribute = getattr(tool, attribute_name)
97
+ if not callable(attribute):
98
+ print(f" {attribute_name}: {attribute}")
99
+
100
+ # Check if the tool has source or function definition
101
+ if hasattr(tool, 'source'):
102
+ print(f"Source: {tool.source}")
103
+
104
+ # Try to inspect the tool function
105
+ try:
106
+ tool_module = inspect.getmodule(tool)
107
+ if tool_module:
108
+ print(f"Module: {tool_module.__name__}")
109
+ if hasattr(tool_module, tool.name):
110
+ tool_func = getattr(tool_module, tool.name)
111
+ if callable(tool_func):
112
+ print(f"Function signature: {inspect.signature(tool_func)}")
113
+ except:
114
+ pass
115
+
116
+ # Convert tools to the format expected by the LLM
117
+ self.available_tools = []
118
+ for tool in self.tools:
119
+ # Use inputSchema if available, otherwise create a default schema
120
+ schema = getattr(tool, "inputSchema", {})
121
+
122
+ # Create tool definition for LLM
123
+ tool_info = {
124
+ "type": "function",
125
+ "function": {
126
+ "name": tool.name,
127
+ "description": tool.description,
128
+ "parameters": schema
129
+ }
130
+ }
131
+ self.available_tools.append(tool_info)
132
+
133
+ # Print the schema for debugging
134
+ print(f"\nJinx schema for {tool.name}:")
135
+ print(json.dumps(schema, indent=2))
136
+
137
+ tool_names = [tool.name for tool in self.tools]
138
+ self._log(f"Available tools: {', '.join(tool_names)}")
139
+
140
+ async def process_query(
141
+ self,
142
+ query: str,
143
+ messages: Optional[List[Dict[str, str]]] = None,
144
+ stream: bool = False
145
+ ) -> Dict[str, Any]:
146
+ """
147
+ Process a query using the LLM and available tools.
148
+
149
+ Args:
150
+ query: User query
151
+ messages: Optional conversation history
152
+ stream: Whether to stream the response
153
+
154
+ Returns:
155
+ Dict with response text and updated messages
156
+ """
157
+ self._log(f"Processing query: {query}")
158
+
159
+ # Initialize or update messages
160
+ if messages is None:
161
+ messages = []
162
+
163
+ current_messages = messages.copy()
164
+ if not current_messages or current_messages[-1]["role"] != "user":
165
+ current_messages.append({"role": "user", "content": query})
166
+ elif current_messages[-1]["role"] == "user":
167
+ current_messages[-1]["content"] = query
168
+
169
+ # Initial LLM call with tools
170
+ self._log("Making initial LLM call with tools")
171
+ response = get_litellm_response(
172
+ model=self.model,
173
+ provider=self.provider,
174
+ api_url=self.api_url,
175
+ api_key=self.api_key,
176
+ messages=current_messages,
177
+ tools=self.available_tools,
178
+ stream=False # Don't stream the initial call
179
+ )
180
+
181
+ # Print full response for debugging
182
+ print("\nLLM Response:")
183
+ print(json.dumps(response, indent=2, default=str))
184
+
185
+ # Extract response content and tool calls
186
+ response_content = response.get("response", "")
187
+ tool_calls = response.get("tool_calls", [])
188
+
189
+ # Print tool calls for debugging
190
+ print("\nJinx Calls:")
191
+ print(json.dumps(tool_calls, indent=2, default=str))
192
+
193
+ # Create final text buffer
194
+ final_text = []
195
+
196
+ # If we have plain text response with no tool calls
197
+ if response_content and not tool_calls:
198
+ final_text.append(response_content)
199
+
200
+ # Update messages with assistant response
201
+ current_messages.append({
202
+ "role": "assistant",
203
+ "content": response_content
204
+ })
205
+
206
+ # Process tool calls if any
207
+ if tool_calls:
208
+ self._log(f"Processing {len(tool_calls)} tool calls")
209
+
210
+ # Get the assistant message with tool calls
211
+ assistant_message = {
212
+ "role": "assistant",
213
+ "content": response_content if response_content else None,
214
+ "tool_calls": []
215
+ }
216
+
217
+ # Process each tool call
218
+ for tool_call in tool_calls:
219
+ # Extract tool info based on format
220
+ if isinstance(tool_call, dict):
221
+ tool_id = tool_call.get("id", "")
222
+ tool_name = tool_call.get("function", {}).get("name", "")
223
+ tool_args = tool_call.get("function", {}).get("arguments", {})
224
+ else:
225
+ # Assume object with attributes
226
+ tool_id = getattr(tool_call, "id", "")
227
+ tool_name = getattr(tool_call.function, "name", "")
228
+ tool_args = getattr(tool_call.function, "arguments", {})
229
+
230
+ # Parse arguments if it's a string
231
+ if isinstance(tool_args, str):
232
+ print(f"\nJinx args is string: {tool_args}")
233
+ tool_args = json.loads(tool_args)
234
+ print(f"Parsed to: {tool_args}")
235
+
236
+ # Add tool call to assistant message
237
+ assistant_message["tool_calls"].append({
238
+ "id": tool_id,
239
+ "type": "function",
240
+ "function": {
241
+ "name": tool_name,
242
+ "arguments": json.dumps(tool_args) if isinstance(tool_args, dict) else tool_args
243
+ }
244
+ })
245
+
246
+ # Execute tool call
247
+ self._log(f"Executing tool: {tool_name} with args: {tool_args}")
248
+ print(f"\nExecuting tool call:")
249
+ print(f" Jinx name: {tool_name}")
250
+ print(f" Jinx args: {tool_args}")
251
+ print(f" Jinx args type: {type(tool_args)}")
252
+
253
+ final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
254
+
255
+ # Call the tool with the arguments exactly as received
256
+ result = await self.session.call_tool(tool_name, tool_args)
257
+
258
+ # Print full result for debugging
259
+ print("\nJinx Result:")
260
+ print(f" Result: {result}")
261
+ print(f" Content: {result.content}")
262
+ print(f" Content type: {type(result.content)}")
263
+
264
+ tool_result = result.content
265
+
266
+ # Handle TextContent objects
267
+ if hasattr(tool_result, 'text'):
268
+ print(f" TextContent detected, text: {tool_result.text}")
269
+ tool_result = tool_result.text
270
+ elif isinstance(tool_result, list) and all(hasattr(item, 'text') for item in tool_result):
271
+ print(f" List of TextContent detected")
272
+ tool_result = [item.text for item in tool_result]
273
+
274
+ # Add tool result to messages
275
+ current_messages.append(assistant_message)
276
+ current_messages.append({
277
+ "role": "tool",
278
+ "tool_call_id": tool_id,
279
+ "content": json.dumps(tool_result) if not isinstance(tool_result, str) else str(tool_result)
280
+ })
281
+
282
+ # Print updated messages for debugging
283
+ print("\nUpdated Messages:")
284
+ print(json.dumps(current_messages, indent=2, default=str))
285
+
286
+ # Get final response with tool results
287
+ self._log("Getting final response after tool calls")
288
+ final_response = get_litellm_response(
289
+ model=self.model,
290
+ provider=self.provider,
291
+ api_url=self.api_url,
292
+ api_key=self.api_key,
293
+ messages=current_messages,
294
+ stream=stream
295
+ )
296
+
297
+ final_text.append(final_response.get("response", ""))
298
+
299
+ # Update messages with final assistant response
300
+ current_messages.append({
301
+ "role": "assistant",
302
+ "content": final_response.get("response", "")
303
+ })
304
+
305
+ return {
306
+ "response": "\n".join(final_text),
307
+ "messages": current_messages
308
+ }
309
+
310
+ async def chat_loop(self):
311
+ """Run an interactive chat loop"""
312
+ print("\nMCP Client Started!")
313
+ print("Type your queries or 'quit' to exit.")
314
+
315
+ messages = []
316
+
317
+ while True:
318
+ query = input("\nQuery: ").strip()
319
+
320
+ if query.lower() == 'quit':
321
+ break
322
+
323
+ # Process the query
324
+ result = await self.process_query(query, messages)
325
+ messages = result.get("messages", [])
326
+
327
+ # Display the response
328
+ print("\nResponse:")
329
+ print(result.get("response", ""))
330
+
331
+ async def cleanup(self):
332
+ """Clean up resources"""
333
+ self._log("Cleaning up resources")
334
+ await self.exit_stack.aclose()
335
+
336
+ async def main():
337
+ """Entry point for the MCP client."""
338
+ if len(sys.argv) < 2:
339
+ print("Usage: python raw_mcp_client.py <path_to_server_script>")
340
+ sys.exit(1)
341
+
342
+ server_script = sys.argv[1]
343
+
344
+ # Create and configure the client
345
+ client = MCPClient()
346
+
347
+ # Connect to the server
348
+ await client.connect_to_server(server_script)
349
+
350
+ # Run the interactive chat loop
351
+ await client.chat_loop()
352
+
353
+ # Clean up resources
354
+ await client.cleanup()
355
+
356
+ if __name__ == "__main__":
357
+ asyncio.run(main())