npcsh 1.0.26__py3-none-any.whl → 1.0.28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. npcsh/_state.py +115 -111
  2. npcsh/alicanto.py +88 -88
  3. npcsh/corca.py +423 -95
  4. npcsh/guac.py +110 -107
  5. npcsh/mcp_helpers.py +45 -45
  6. npcsh/mcp_server.py +16 -17
  7. npcsh/npc.py +16 -17
  8. npcsh/npc_team/jinxs/bash_executer.jinx +1 -1
  9. npcsh/npc_team/jinxs/edit_file.jinx +6 -6
  10. npcsh/npc_team/jinxs/image_generation.jinx +5 -5
  11. npcsh/npc_team/jinxs/screen_cap.jinx +2 -2
  12. npcsh/npcsh.py +15 -6
  13. npcsh/plonk.py +8 -8
  14. npcsh/routes.py +77 -77
  15. npcsh/spool.py +13 -13
  16. npcsh/wander.py +37 -37
  17. npcsh/yap.py +72 -72
  18. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/bash_executer.jinx +1 -1
  19. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/edit_file.jinx +6 -6
  20. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/image_generation.jinx +5 -5
  21. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/screen_cap.jinx +2 -2
  22. {npcsh-1.0.26.dist-info → npcsh-1.0.28.dist-info}/METADATA +1 -1
  23. npcsh-1.0.28.dist-info/RECORD +73 -0
  24. npcsh-1.0.26.dist-info/RECORD +0 -73
  25. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  26. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/alicanto.png +0 -0
  27. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/corca.npc +0 -0
  28. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/corca.png +0 -0
  29. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/foreman.npc +0 -0
  30. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/frederic.npc +0 -0
  31. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/frederic4.png +0 -0
  32. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/guac.png +0 -0
  33. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/internet_search.jinx +0 -0
  34. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  35. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  36. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  37. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  38. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/plonk.npc +0 -0
  39. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/plonk.png +0 -0
  40. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
  41. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  42. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/python_executor.jinx +0 -0
  43. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  44. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/sibiji.png +0 -0
  45. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/spool.png +0 -0
  46. {npcsh-1.0.26.data → npcsh-1.0.28.data}/data/npcsh/npc_team/yap.png +0 -0
  47. {npcsh-1.0.26.dist-info → npcsh-1.0.28.dist-info}/WHEEL +0 -0
  48. {npcsh-1.0.26.dist-info → npcsh-1.0.28.dist-info}/entry_points.txt +0 -0
  49. {npcsh-1.0.26.dist-info → npcsh-1.0.28.dist-info}/licenses/LICENSE +0 -0
  50. {npcsh-1.0.26.dist-info → npcsh-1.0.28.dist-info}/top_level.txt +0 -0
npcsh/corca.py CHANGED
@@ -5,6 +5,8 @@ import shlex
5
5
  import argparse
6
6
  from contextlib import AsyncExitStack
7
7
  from typing import Optional, Callable, Dict, Any, Tuple, List
8
+ import shutil
9
+ import traceback
8
10
 
9
11
  try:
10
12
  from mcp import ClientSession, StdioServerParameters
@@ -29,16 +31,26 @@ from npcsh._state import (
29
31
  readline_safe_prompt,
30
32
  setup_shell,
31
33
  should_skip_kg_processing,
32
-
34
+ NPCSH_CHAT_PROVIDER,
35
+ NPCSH_CHAT_MODEL,
33
36
  )
34
37
  import yaml
35
-
38
+ from pathlib import Path
36
39
 
37
40
  class MCPClientNPC:
38
41
  def __init__(self, debug: bool = True):
39
42
  self.debug = debug
40
43
  self.session: Optional[ClientSession] = None
41
- self._exit_stack = asyncio.new_event_loop().run_until_complete(self._init_stack())
44
+ try:
45
+ self._loop = asyncio.get_event_loop()
46
+ if self._loop.is_closed():
47
+ self._loop = asyncio.new_event_loop()
48
+ asyncio.set_event_loop(self._loop)
49
+ except RuntimeError:
50
+ self._loop = asyncio.new_event_loop()
51
+ asyncio.set_event_loop(self._loop)
52
+
53
+ self._exit_stack = self._loop.run_until_complete(self._init_stack())
42
54
  self.available_tools_llm: List[Dict[str, Any]] = []
43
55
  self.tool_map: Dict[str, Callable] = {}
44
56
  self.server_script_path: Optional[str] = None
@@ -66,9 +78,9 @@ class MCPClientNPC:
66
78
 
67
79
  server_params = StdioServerParameters(
68
80
  command=cmd_parts[0],
69
- args=['-c', f'import sys; sys.path.pop(0) if sys.path[0] == "{os.path.dirname(abs_path)}" else None; exec(open("{abs_path}").read())'],
81
+ args=[abs_path],
70
82
  env=os.environ.copy(),
71
- cwd=os.path.dirname(os.path.dirname(abs_path)) # Run from project root
83
+ cwd=Path(abs_path).parent
72
84
  )
73
85
  if self.session:
74
86
  await self._exit_stack.aclose()
@@ -95,43 +107,35 @@ class MCPClientNPC:
95
107
  }
96
108
  self.available_tools_llm.append(tool_def)
97
109
 
98
- async def execute_tool(tool_name: str, args: dict):
99
- if not self.session:
100
- return {"error": "No MCP session"}
101
-
102
- print(f"DEBUG: About to call MCP tool {tool_name}")
103
- try:
104
- # Add a timeout
105
- result = await asyncio.wait_for(
106
- self.session.call_tool(tool_name, args),
107
- timeout=30.0
108
- )
109
- print(f"DEBUG: MCP tool {tool_name} returned: {type(result)}")
110
- return result
111
- except asyncio.TimeoutError:
112
- print(f"DEBUG: Tool {tool_name} timed out after 30 seconds")
113
- return {"error": f"Tool {tool_name} timed out"}
114
- except Exception as e:
115
- print(f"DEBUG: Tool {tool_name} error: {e}")
116
- return {"error": str(e)}
117
-
118
- def make_tool_func(tool_name):
110
+ def make_tool_func(tool_name_closure):
119
111
  async def tool_func(**kwargs):
120
- print(f"DEBUG: Tool wrapper called for {tool_name} with {kwargs}")
121
- # Clean up None string values
122
- cleaned_kwargs = {}
123
- for k, v in kwargs.items():
124
- if v == 'None':
125
- cleaned_kwargs[k] = None
126
- else:
127
- cleaned_kwargs[k] = v
128
- result = await execute_tool(tool_name, cleaned_kwargs)
129
- print(f"DEBUG: Tool wrapper got result: {type(result)}")
130
- return result
112
+ if not self.session:
113
+ return {"error": "No MCP session"}
114
+
115
+ self._log(f"About to call MCP tool {tool_name_closure}")
116
+ try:
117
+ cleaned_kwargs = {}
118
+ for k, v in kwargs.items():
119
+ if v == 'None':
120
+ cleaned_kwargs[k] = None
121
+ else:
122
+ cleaned_kwargs[k] = v
123
+ result = await asyncio.wait_for(
124
+ self.session.call_tool(tool_name_closure, cleaned_kwargs),
125
+ timeout=30.0
126
+ )
127
+ self._log(f"MCP tool {tool_name_closure} returned: {type(result)}")
128
+ return result
129
+ except asyncio.TimeoutError:
130
+ self._log(f"Tool {tool_name_closure} timed out after 30 seconds", "red")
131
+ return {"error": f"Tool {tool_name_closure} timed out"}
132
+ except Exception as e:
133
+ self._log(f"Tool {tool_name_closure} error: {e}", "red")
134
+ return {"error": str(e)}
131
135
 
132
136
  def sync_wrapper(**kwargs):
133
- print(f"DEBUG: Sync wrapper called for {tool_name}")
134
- return asyncio.run(tool_func(**kwargs))
137
+ self._log(f"Sync wrapper called for {tool_name_closure}")
138
+ return self._loop.run_until_complete(tool_func(**kwargs))
135
139
 
136
140
  return sync_wrapper
137
141
  self.tool_map[mcp_tool.name] = make_tool_func(mcp_tool.name)
@@ -139,10 +143,12 @@ class MCPClientNPC:
139
143
  self._log(f"Connection successful. Tools: {', '.join(tool_names) if tool_names else 'None'}")
140
144
 
141
145
  def connect_sync(self, server_script_path: str) -> bool:
142
- loop = asyncio.get_event_loop_policy().get_event_loop()
146
+ loop = self._loop
143
147
  if loop.is_closed():
144
- loop = asyncio.new_event_loop()
145
- asyncio.set_event_loop(loop)
148
+ self._loop = asyncio.new_event_loop()
149
+ asyncio.set_event_loop(self._loop)
150
+ loop = self._loop
151
+
146
152
  try:
147
153
  loop.run_until_complete(self._connect_async(server_script_path))
148
154
  return True
@@ -153,26 +159,28 @@ class MCPClientNPC:
153
159
  def disconnect_sync(self):
154
160
  if self.session:
155
161
  self._log("Disconnecting MCP session.")
156
- loop = asyncio.get_event_loop_policy().get_event_loop()
162
+ loop = self._loop
157
163
  if not loop.is_closed():
158
164
  try:
159
165
  async def close_session():
160
166
  await self.session.close()
167
+ await self._exit_stack.aclose()
161
168
  loop.run_until_complete(close_session())
162
169
  except RuntimeError:
163
170
  pass
171
+ except Exception as e:
172
+ print(f"Error during MCP client disconnect: {e}", file=sys.stderr)
164
173
  self.session = None
174
+ self._exit_stack = None
165
175
 
166
176
 
167
177
  def process_mcp_stream(stream_response, active_npc):
168
- """Process streaming response and extract content + tool calls for both Ollama and OpenAI providers"""
169
178
  collected_content = ""
170
179
  tool_calls = []
171
180
 
172
181
  interrupted = False
173
182
 
174
- # Save cursor position at the start
175
- sys.stdout.write('\033[s') # Save cursor position
183
+ sys.stdout.write('\033[s')
176
184
  sys.stdout.flush()
177
185
  try:
178
186
  for chunk in stream_response:
@@ -200,7 +208,6 @@ def process_mcp_stream(stream_response, active_npc):
200
208
  collected_content += chunk.message.content
201
209
  print(chunk.message.content, end='', flush=True)
202
210
 
203
- # Handle OpenAI-style responses (including gpt-oss)
204
211
  else:
205
212
  if hasattr(chunk, 'choices') and chunk.choices:
206
213
  delta = chunk.choices[0].delta
@@ -233,23 +240,30 @@ def process_mcp_stream(stream_response, active_npc):
233
240
  except KeyboardInterrupt:
234
241
  interrupted = True
235
242
  print('\n⚠️ Stream interrupted by user')
236
- if interrupted:
237
- str_output += "\n\n[⚠️ Response interrupted by user]"
238
- # Always restore cursor position and clear everything after it
239
- sys.stdout.write('\033[u') # Restore cursor position
240
- sys.stdout.write('\033[J') # Clear from cursor down
243
+
244
+ sys.stdout.write('\033[u')
245
+ sys.stdout.write('\033[J')
241
246
  sys.stdout.flush()
242
247
 
243
- # Now render the markdown at the restored position
244
248
  render_markdown(collected_content)
245
249
  print('\n')
246
250
  return collected_content, tool_calls
247
251
 
248
- def execute_command_corca(command: str, state: ShellState, command_history) -> Tuple[ShellState, Any]:
249
- mcp_tools = []
252
+ def execute_command_corca(command: str, state: ShellState, command_history, selected_mcp_tools_names: Optional[List[str]] = None) -> Tuple[ShellState, Any]:
253
+ mcp_tools_for_llm = []
250
254
 
251
255
  if hasattr(state, 'mcp_client') and state.mcp_client and state.mcp_client.session:
252
- mcp_tools = state.mcp_client.available_tools_llm
256
+ all_available_mcp_tools = state.mcp_client.available_tools_llm
257
+
258
+ if selected_mcp_tools_names and len(selected_mcp_tools_names) > 0:
259
+ mcp_tools_for_llm = [
260
+ tool_def for tool_def in all_available_mcp_tools
261
+ if tool_def['function']['name'] in selected_mcp_tools_names
262
+ ]
263
+ if not mcp_tools_for_llm:
264
+ cprint("Warning: No selected MCP tools found or matched. Corca will proceed without tools.", "yellow", file=sys.stderr)
265
+ else:
266
+ mcp_tools_for_llm = all_available_mcp_tools
253
267
  else:
254
268
  cprint("Warning: Corca agent has no tools. No MCP server connected.", "yellow", file=sys.stderr)
255
269
 
@@ -257,11 +271,9 @@ def execute_command_corca(command: str, state: ShellState, command_history) -> T
257
271
 
258
272
  response_dict = get_llm_response(
259
273
  prompt=command,
260
- model=active_npc.model or state.chat_model,
261
- provider=active_npc.provider or state.chat_provider,
262
274
  npc=state.npc,
263
275
  messages=state.messages,
264
- tools=mcp_tools,
276
+ tools=mcp_tools_for_llm,
265
277
  auto_process_tool_calls=False,
266
278
  stream=state.stream_output
267
279
  )
@@ -288,6 +300,88 @@ def execute_command_corca(command: str, state: ShellState, command_history) -> T
288
300
  "messages": state.messages
289
301
  }
290
302
 
303
+
304
+ def _resolve_and_copy_mcp_server_path(
305
+ explicit_path: Optional[str],
306
+ current_path: Optional[str],
307
+ team_ctx_mcp_servers: Optional[List[Dict[str, str]]],
308
+ interactive: bool = False,
309
+ auto_copy_bypass: bool = False # <-- New parameter
310
+ ) -> Optional[str]:
311
+ default_mcp_server_name = "mcp_server.py"
312
+ npcsh_default_template_path = Path(__file__).parent / default_mcp_server_name
313
+
314
+ def _copy_template_if_missing(destination_dir: Path, description: str) -> Optional[Path]:
315
+ destination_file = destination_dir / default_mcp_server_name
316
+ if not npcsh_default_template_path.exists():
317
+ cprint(f"Error: Default {default_mcp_server_name} template not found at {npcsh_default_template_path}", "red")
318
+ return None
319
+
320
+ if not destination_file.exists():
321
+ # Check auto_copy_bypass first
322
+ if auto_copy_bypass or not interactive: # If bypass is true OR not interactive, auto-copy
323
+ destination_dir.mkdir(parents=True, exist_ok=True)
324
+ shutil.copy(npcsh_default_template_path, destination_file)
325
+ print(colored(f"Automatically copied default {default_mcp_server_name} to {destination_file}", "green"))
326
+ return destination_file
327
+ else: # Only ask if interactive and no bypass
328
+ choice = input(colored(f"No {default_mcp_server_name} found in {description}. Copy default template to {destination_file}? (y/N): ", "yellow")).strip().lower()
329
+ if choice == 'y':
330
+ destination_dir.mkdir(parents=True, exist_ok=True)
331
+ shutil.copy(npcsh_default_template_path, destination_file)
332
+ print(colored(f"Copied default {default_mcp_server_name} to {destination_file}", "green"))
333
+ return destination_file
334
+ else:
335
+ print(colored("Skipping copy.", "yellow"))
336
+ return None
337
+ return destination_file
338
+
339
+ if explicit_path:
340
+ abs_explicit_path = Path(explicit_path).expanduser().resolve()
341
+ if abs_explicit_path.exists():
342
+ print(f"Using explicit MCP server path: {abs_explicit_path}")
343
+ return str(abs_explicit_path)
344
+ else:
345
+ cprint(f"Warning: Explicit MCP server path not found: {abs_explicit_path}", "yellow")
346
+
347
+ if team_ctx_mcp_servers:
348
+ for server_entry in team_ctx_mcp_servers:
349
+ server_path_from_ctx = server_entry.get("value")
350
+ if server_path_from_ctx:
351
+ abs_ctx_path = Path(server_path_from_ctx).expanduser().resolve()
352
+ if abs_ctx_path.exists():
353
+ print(f"Using MCP server path from team context: {abs_ctx_path}")
354
+ return str(abs_ctx_path)
355
+ else:
356
+ cprint(f"Warning: MCP server path from team context not found: {abs_ctx_path}", "yellow")
357
+
358
+ if current_path:
359
+ project_npc_team_dir = Path(current_path).resolve() / "npc_team"
360
+ project_mcp_server_file = project_npc_team_dir / default_mcp_server_name
361
+
362
+ if project_mcp_server_file.exists():
363
+ print(f"Using project-specific MCP server path: {project_mcp_server_file}")
364
+ return str(project_mcp_server_file)
365
+ else:
366
+ copied_path = _copy_template_if_missing(project_npc_team_dir, "project's npc_team directory")
367
+ if copied_path:
368
+ return str(copied_path)
369
+
370
+ global_npc_team_dir = Path.home() / ".npcsh" / "npc_team"
371
+ global_mcp_server_file = global_npc_team_dir / default_mcp_server_name
372
+
373
+ if global_mcp_server_file.exists():
374
+ print(f"Using global MCP server path: {global_mcp_server_file}")
375
+ return str(global_mcp_server_file)
376
+ else:
377
+ copied_path = _copy_template_if_missing(global_npc_team_dir, "global npc_team directory")
378
+ if copied_path:
379
+ return str(copied_path)
380
+
381
+ cprint("No MCP server script found in any expected location.", "yellow")
382
+ return None
383
+
384
+
291
385
  def print_corca_welcome_message():
292
386
  turq = "\033[38;2;64;224;208m"
293
387
  chrome = "\033[38;2;211;211;211m"
@@ -307,6 +401,54 @@ An MCP-powered shell for advanced agentic workflows.
307
401
  """
308
402
  )
309
403
 
404
+ def create_corca_state_and_mcp_client(conversation_id, command_history, npc=None, team=None,
405
+ current_path=None, mcp_server_path_from_request: Optional[str] = None):
406
+ from npcsh._state import ShellState
407
+
408
+ state = ShellState(
409
+ conversation_id=conversation_id,
410
+ stream_output=True,
411
+ current_mode="corca",
412
+ chat_model=os.environ.get("NPCSH_CHAT_MODEL", "gemma3:4b"),
413
+ chat_provider=os.environ.get("NPCSH_CHAT_PROVIDER", "ollama"),
414
+ current_path=current_path or os.getcwd(),
415
+ npc=npc,
416
+ team=team
417
+ )
418
+ state.command_history = command_history
419
+
420
+ # Read NPCSH_CORCA_AUTO_COPY_MCP_SERVER from environment for non-interactive calls
421
+ auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
422
+
423
+ resolved_server_path = _resolve_and_copy_mcp_server_path(
424
+ explicit_path=mcp_server_path_from_request,
425
+ current_path=current_path,
426
+ team_ctx_mcp_servers=team.team_ctx.get('mcp_servers', []) if team and hasattr(team, 'team_ctx') else None,
427
+ interactive=False, # Always non-interactive for Flask API calls
428
+ auto_copy_bypass=auto_copy_bypass # Pass env var setting
429
+ )
430
+
431
+ state.mcp_client = None
432
+ if resolved_server_path:
433
+ try:
434
+ client_instance = MCPClientNPC()
435
+ if client_instance.connect_sync(resolved_server_path):
436
+ state.mcp_client = client_instance
437
+ print(f"Successfully connected MCP client for {conversation_id} to {resolved_server_path}")
438
+ else:
439
+ print(f"Failed to connect MCP client for {conversation_id} to {resolved_server_path}. Tools will be unavailable.")
440
+ except ImportError:
441
+ print("WARNING: npcsh.corca or MCPClientNPC not found. Cannot initialize MCP client.", file=sys.stderr)
442
+ except FileNotFoundError as e:
443
+ print(f"MCP Client Error: {e}")
444
+ except ValueError as e:
445
+ print(f"MCP Client Error: {e}")
446
+ except Exception as e:
447
+ print(f"An unexpected error occurred during MCP client initialization: {e}")
448
+ traceback.print_exc()
449
+
450
+ return state
451
+
310
452
 
311
453
  def process_corca_result(
312
454
  user_input: str,
@@ -358,7 +500,6 @@ def process_corca_result(
358
500
  tool_args = {}
359
501
 
360
502
  try:
361
- print(f" Calling MCP tool: {tool_name} with args: {tool_args}")
362
503
 
363
504
  loop = asyncio.get_event_loop()
364
505
  if loop.is_closed():
@@ -369,9 +510,6 @@ def process_corca_result(
369
510
  result_state.mcp_client.session.call_tool(tool_name, tool_args)
370
511
  )
371
512
 
372
- print(f"DEBUG: MCP result type: {type(mcp_result)}")
373
- print(f"DEBUG: MCP result: {mcp_result}")
374
- print(f"DEBUG: MCP result attributes: {dir(mcp_result)}")
375
513
 
376
514
  tool_content = ""
377
515
  if hasattr(mcp_result, 'content') and mcp_result.content:
@@ -473,26 +611,16 @@ def process_corca_result(
473
611
  result_state.mcp_client.session.call_tool(tool_name, tool_args)
474
612
  )
475
613
 
476
- print(f"DEBUG: MCP result type: {type(mcp_result)}")
477
- print(f"DEBUG: MCP result: {mcp_result}")
478
- print(f"DEBUG: MCP result.isError: {mcp_result.isError}")
479
- print(f"DEBUG: MCP result.meta: {mcp_result.meta}")
480
- print(f"DEBUG: MCP result.content length: {len(mcp_result.content)}")
481
614
 
482
615
  tool_content = ""
483
616
  if hasattr(mcp_result, 'content') and mcp_result.content:
484
617
  for i, content_item in enumerate(mcp_result.content):
485
- print(f"DEBUG: content_item[{i}] full object: {repr(content_item)}")
486
- print(f"DEBUG: content_item[{i}] text attribute: '{content_item.text}'")
487
- print(f"DEBUG: content_item[{i}] text length: {len(content_item.text) if content_item.text else 0}")
488
618
 
489
619
  if hasattr(content_item, 'text') and content_item.text:
490
620
  tool_content += content_item.text
491
621
  elif hasattr(content_item, 'data'):
492
- print(f"DEBUG: content_item[{i}] has data: {content_item.data}")
493
622
  tool_content += str(content_item.data)
494
623
  else:
495
- print(f"DEBUG: content_item[{i}] converting to string: {str(content_item)}")
496
624
  tool_content += str(content_item)
497
625
  result_state.messages.append({
498
626
  "role": "tool",
@@ -589,8 +717,11 @@ def process_corca_result(
589
717
 
590
718
  Current Context: "{current_context}".
591
719
 
592
- Respond with JSON: {{"suggestion": "Your sentence."
593
- }}"""
720
+ Respond with JSON: """ + """
721
+ {
722
+ "suggestion": "Your sentence.
723
+ }
724
+ """
594
725
  response = get_llm_response(prompt, npc=active_npc, format="json")
595
726
  suggestion = response.get("response", {}).get("suggestion")
596
727
 
@@ -609,9 +740,175 @@ def process_corca_result(
609
740
  import traceback
610
741
  print(colored(f"Could not generate team suggestions: {e}", "yellow"))
611
742
  traceback.print_exc()
612
-
613
- def enter_corca_mode(command: str,
614
- **kwargs):
743
+
744
+
745
+
746
+
747
+
748
+ def _read_npcsh_global_env() -> Dict[str, str]:
749
+ global_env_file = Path(".npcsh_global")
750
+ env_vars = {}
751
+ if global_env_file.exists():
752
+ try:
753
+ with open(global_env_file, 'r') as f:
754
+ for line in f:
755
+ line = line.strip()
756
+ if line and '=' in line and not line.startswith('#'):
757
+ key, value = line.split('=', 1)
758
+ env_vars[key.strip()] = value.strip()
759
+ except Exception as e:
760
+ print(f"Warning: Could not read .npcsh_global: {e}")
761
+ return env_vars
762
+
763
+ def _write_to_npcsh_global(key: str, value: str) -> None:
764
+ global_env_file = Path(".npcsh_global")
765
+ env_vars = _read_npcsh_global_env()
766
+ env_vars[key] = value
767
+
768
+ try:
769
+ with open(global_env_file, 'w') as f:
770
+ for k, v in env_vars.items():
771
+ f.write(f"{k}={v}\n")
772
+ except Exception as e:
773
+ print(f"Warning: Could not write to .npcsh_global: {e}")
774
+
775
+ def _resolve_and_copy_mcp_server_path(
776
+ explicit_path: Optional[str],
777
+ current_path: Optional[str],
778
+ team_ctx_mcp_servers: Optional[List[Dict[str, str]]],
779
+ interactive: bool = False,
780
+ auto_copy_bypass: bool = False,
781
+ force_global: bool = False
782
+ ) -> Optional[str]:
783
+ default_mcp_server_name = "mcp_server.py"
784
+ npcsh_default_template_path = Path(__file__).parent / default_mcp_server_name
785
+
786
+ global_env = _read_npcsh_global_env()
787
+ prefer_global = global_env.get("NPCSH_PREFER_GLOBAL_MCP_SERVER", "false").lower() == "true"
788
+
789
+ def _copy_template_if_missing(destination_dir: Path, description: str) -> Optional[Path]:
790
+ destination_file = destination_dir / default_mcp_server_name
791
+ if not npcsh_default_template_path.exists():
792
+ cprint(f"Error: Default {default_mcp_server_name} template not found at {npcsh_default_template_path}", "red")
793
+ return None
794
+
795
+ if not destination_file.exists():
796
+ if auto_copy_bypass or not interactive:
797
+ destination_dir.mkdir(parents=True, exist_ok=True)
798
+ shutil.copy(npcsh_default_template_path, destination_file)
799
+ print(colored(f"Automatically copied default {default_mcp_server_name} to {destination_file}", "green"))
800
+ return destination_file
801
+ else:
802
+ choice = input(colored(f"No {default_mcp_server_name} found in {description}. Copy default template to {destination_file}? (y/N/g for global): ", "yellow")).strip().lower()
803
+ if choice == 'y':
804
+ destination_dir.mkdir(parents=True, exist_ok=True)
805
+ shutil.copy(npcsh_default_template_path, destination_file)
806
+ print(colored(f"Copied default {default_mcp_server_name} to {destination_file}", "green"))
807
+ return destination_file
808
+ elif choice == 'g':
809
+ _write_to_npcsh_global("NPCSH_PREFER_GLOBAL_MCP_SERVER", "true")
810
+ print(colored("Set preference to use global MCP server.", "green"))
811
+ return None
812
+ else:
813
+ print(colored("Skipping copy.", "yellow"))
814
+ return None
815
+ return destination_file
816
+
817
+ if explicit_path:
818
+ abs_explicit_path = Path(explicit_path).expanduser().resolve()
819
+ if abs_explicit_path.exists():
820
+ print(f"Using explicit MCP server path: {abs_explicit_path}")
821
+ return str(abs_explicit_path)
822
+ else:
823
+ cprint(f"Warning: Explicit MCP server path not found: {abs_explicit_path}", "yellow")
824
+
825
+ if team_ctx_mcp_servers:
826
+ for server_entry in team_ctx_mcp_servers:
827
+ server_path_from_ctx = server_entry.get("value")
828
+ if server_path_from_ctx:
829
+ abs_ctx_path = Path(server_path_from_ctx).expanduser().resolve()
830
+ if abs_ctx_path.exists():
831
+ print(f"Using MCP server path from team context: {abs_ctx_path}")
832
+ return str(abs_ctx_path)
833
+ else:
834
+ cprint(f"Warning: MCP server path from team context not found: {abs_ctx_path}", "yellow")
835
+
836
+ if not (force_global or prefer_global):
837
+ if current_path:
838
+ project_npc_team_dir = Path(current_path).resolve() / "npc_team"
839
+ project_mcp_server_file = project_npc_team_dir / default_mcp_server_name
840
+
841
+ if project_mcp_server_file.exists():
842
+ print(f"Using project-specific MCP server path: {project_mcp_server_file}")
843
+ return str(project_mcp_server_file)
844
+ else:
845
+ copied_path = _copy_template_if_missing(project_npc_team_dir, "project's npc_team directory")
846
+ if copied_path:
847
+ return str(copied_path)
848
+
849
+ global_npc_team_dir = Path.home() / ".npcsh" / "npc_team"
850
+ global_mcp_server_file = global_npc_team_dir / default_mcp_server_name
851
+
852
+ if global_mcp_server_file.exists():
853
+ print(f"Using global MCP server path: {global_mcp_server_file}")
854
+ return str(global_mcp_server_file)
855
+ else:
856
+ copied_path = _copy_template_if_missing(global_npc_team_dir, "global npc_team directory")
857
+ if copied_path:
858
+ return str(copied_path)
859
+
860
+ cprint("No MCP server script found in any expected location.", "yellow")
861
+ return None
862
+
863
+ def create_corca_state_and_mcp_client(conversation_id, command_history, npc=None, team=None,
864
+ current_path=None, mcp_server_path_from_request: Optional[str] = None):
865
+ from npcsh._state import ShellState
866
+
867
+ state = ShellState(
868
+ conversation_id=conversation_id,
869
+ stream_output=True,
870
+ current_mode="corca",
871
+ chat_model=os.environ.get("NPCSH_CHAT_MODEL", "gemma3:4b"),
872
+ chat_provider=os.environ.get("NPCSH_CHAT_PROVIDER", "ollama"),
873
+ current_path=current_path or os.getcwd(),
874
+ npc=npc,
875
+ team=team
876
+ )
877
+ state.command_history = command_history
878
+
879
+ auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
880
+
881
+ resolved_server_path = _resolve_and_copy_mcp_server_path(
882
+ explicit_path=mcp_server_path_from_request,
883
+ current_path=current_path,
884
+ team_ctx_mcp_servers=team.team_ctx.get('mcp_servers', []) if team and hasattr(team, 'team_ctx') else None,
885
+ interactive=False,
886
+ auto_copy_bypass=auto_copy_bypass,
887
+ force_global=False
888
+ )
889
+
890
+ state.mcp_client = None
891
+ if resolved_server_path:
892
+ try:
893
+ client_instance = MCPClientNPC()
894
+ if client_instance.connect_sync(resolved_server_path):
895
+ state.mcp_client = client_instance
896
+ print(f"Successfully connected MCP client for {conversation_id} to {resolved_server_path}")
897
+ else:
898
+ print(f"Failed to connect MCP client for {conversation_id} to {resolved_server_path}. Tools will be unavailable.")
899
+ except ImportError:
900
+ print("WARNING: npcsh.corca or MCPClientNPC not found. Cannot initialize MCP client.", file=sys.stderr)
901
+ except FileNotFoundError as e:
902
+ print(f"MCP Client Error: {e}")
903
+ except ValueError as e:
904
+ print(f"MCP Client Error: {e}")
905
+ except Exception as e:
906
+ print(f"An unexpected error occurred during MCP client initialization: {e}")
907
+ traceback.print_exc()
908
+
909
+ return state
910
+
911
+ def enter_corca_mode(command: str, **kwargs):
615
912
  state: ShellState = kwargs.get('shell_state')
616
913
  command_history: CommandHistory = kwargs.get('command_history')
617
914
 
@@ -619,28 +916,43 @@ def enter_corca_mode(command: str,
619
916
  return {"output": "Error: Corca mode requires shell state and history.", "messages": kwargs.get('messages', [])}
620
917
 
621
918
  all_command_parts = shlex.split(command)
622
- parsed_args = all_command_parts[1:]
623
-
624
919
  parser = argparse.ArgumentParser(prog="/corca", description="Enter Corca MCP-powered mode.")
625
920
  parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script.")
921
+ parser.add_argument("-g", "--global", dest="force_global", action="store_true", help="Force use of global MCP server.")
626
922
 
627
923
  try:
628
- args = parser.parse_args(parsed_args)
924
+ known_args, remaining_args = parser.parse_known_args(all_command_parts[1:])
629
925
  except SystemExit:
630
926
  return {"output": "Invalid arguments for /corca. See /help corca.", "messages": state.messages}
631
927
 
632
928
  print_corca_welcome_message()
633
929
 
634
- mcp_client = MCPClientNPC()
635
- server_path = args.mcp_server_path
636
- if not server_path and state.team and hasattr(state.team, 'team_ctx'):
637
- server_path = state.team.team_ctx.get('mcp_server')
638
-
639
- if server_path:
640
- if mcp_client.connect_sync(server_path):
641
- state.mcp_client = mcp_client
930
+ auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
931
+
932
+ resolved_server_path = _resolve_and_copy_mcp_server_path(
933
+ explicit_path=known_args.mcp_server_path,
934
+ current_path=state.current_path,
935
+ team_ctx_mcp_servers=state.team.team_ctx.get('mcp_servers', []) if state.team and hasattr(state.team, 'team_ctx') else None,
936
+ interactive=True,
937
+ auto_copy_bypass=auto_copy_bypass,
938
+ force_global=known_args.force_global
939
+ )
940
+
941
+ mcp_client = None
942
+ if resolved_server_path:
943
+ try:
944
+ mcp_client = MCPClientNPC()
945
+ if mcp_client.connect_sync(resolved_server_path):
946
+ state.mcp_client = mcp_client
947
+ else:
948
+ cprint(f"Failed to connect to MCP server at {resolved_server_path}. Corca mode will have limited agent functionality.", "yellow")
949
+ state.mcp_client = None
950
+ except Exception as e:
951
+ cprint(f"Error connecting to MCP server: {e}. Corca mode will have limited agent functionality.", "red")
952
+ traceback.print_exc()
953
+ state.mcp_client = None
642
954
  else:
643
- cprint("No MCP server path provided. Corca mode will have limited agent functionality.", "yellow")
955
+ cprint("No MCP server path provided or found. Corca mode will have limited agent functionality.", "yellow")
644
956
  state.mcp_client = None
645
957
 
646
958
  while True:
@@ -652,7 +964,11 @@ def enter_corca_mode(command: str,
652
964
  prompt_str = f"{colored(os.path.basename(state.current_path), 'blue')}:{prompt_npc_name}🦌> "
653
965
  prompt = readline_safe_prompt(prompt_str)
654
966
 
655
- user_input = get_multiline_input(prompt).strip()
967
+ if remaining_args:
968
+ user_input = " ".join(remaining_args)
969
+ remaining_args = []
970
+ else:
971
+ user_input = get_multiline_input(prompt).strip()
656
972
 
657
973
  if user_input.lower() in ["exit", "quit", "done"]:
658
974
  break
@@ -681,14 +997,15 @@ def enter_corca_mode(command: str,
681
997
 
682
998
  render_markdown("\n# Exiting Corca Mode")
683
999
  return {"output": "", "messages": state.messages}
1000
+
684
1001
  def main():
685
1002
  parser = argparse.ArgumentParser(description="Corca - An MCP-powered npcsh shell.")
686
1003
  parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script to connect to.")
1004
+ parser.add_argument("-g", "--global", dest="force_global", action="store_true", help="Force use of global MCP server.")
687
1005
  args = parser.parse_args()
688
1006
 
689
1007
  command_history, team, default_npc = setup_shell()
690
1008
 
691
- # Override default_npc with corca priority
692
1009
  project_team_path = os.path.abspath('./npc_team/')
693
1010
  global_team_path = os.path.expanduser('~/.npcsh/npc_team/')
694
1011
 
@@ -703,9 +1020,17 @@ def main():
703
1020
  db_conn=command_history.engine)
704
1021
  print('Team Default: ', team.provider, team.model)
705
1022
  if default_npc.model is None:
706
- default_npc.model = team.model
1023
+ if team.model is not None:
1024
+ default_npc.model = team.model
1025
+ else:
1026
+ default_npc.model = NPCSH_CHAT_MODEL
1027
+
707
1028
  if default_npc.provider is None:
708
- default_npc.provider = team.provider
1029
+ if team.provider is not None:
1030
+ default_npc.provider = team.provider
1031
+ else:
1032
+ default_npc.provider = NPCSH_CHAT_PROVIDER
1033
+
709
1034
  from npcsh._state import initial_state
710
1035
  initial_shell_state = initial_state
711
1036
  initial_shell_state.team = team
@@ -714,6 +1039,8 @@ def main():
714
1039
  fake_command_str = "/corca"
715
1040
  if args.mcp_server_path:
716
1041
  fake_command_str = f'/corca --mcp-server-path "{args.mcp_server_path}"'
1042
+ elif args.force_global:
1043
+ fake_command_str = "/corca --global"
717
1044
 
718
1045
  kwargs = {
719
1046
  'command': fake_command_str,
@@ -722,5 +1049,6 @@ def main():
722
1049
  }
723
1050
 
724
1051
  enter_corca_mode(**kwargs)
1052
+
725
1053
  if __name__ == "__main__":
726
- main()
1054
+ main()