npcsh 1.0.26__py3-none-any.whl → 1.0.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. npcsh/_state.py +105 -105
  2. npcsh/alicanto.py +88 -88
  3. npcsh/corca.py +423 -81
  4. npcsh/guac.py +110 -107
  5. npcsh/mcp_helpers.py +45 -45
  6. npcsh/mcp_server.py +16 -17
  7. npcsh/npc.py +16 -17
  8. npcsh/npc_team/jinxs/bash_executer.jinx +1 -1
  9. npcsh/npc_team/jinxs/edit_file.jinx +6 -6
  10. npcsh/npc_team/jinxs/image_generation.jinx +5 -5
  11. npcsh/npc_team/jinxs/screen_cap.jinx +2 -2
  12. npcsh/npcsh.py +5 -2
  13. npcsh/plonk.py +8 -8
  14. npcsh/routes.py +77 -77
  15. npcsh/spool.py +13 -13
  16. npcsh/wander.py +37 -37
  17. npcsh/yap.py +72 -72
  18. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/bash_executer.jinx +1 -1
  19. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/edit_file.jinx +6 -6
  20. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/image_generation.jinx +5 -5
  21. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/screen_cap.jinx +2 -2
  22. {npcsh-1.0.26.dist-info → npcsh-1.0.27.dist-info}/METADATA +1 -1
  23. npcsh-1.0.27.dist-info/RECORD +73 -0
  24. npcsh-1.0.26.dist-info/RECORD +0 -73
  25. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  26. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/alicanto.png +0 -0
  27. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/corca.npc +0 -0
  28. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/corca.png +0 -0
  29. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/foreman.npc +0 -0
  30. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/frederic.npc +0 -0
  31. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/frederic4.png +0 -0
  32. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/guac.png +0 -0
  33. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/internet_search.jinx +0 -0
  34. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  35. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  36. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  37. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  38. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/plonk.npc +0 -0
  39. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/plonk.png +0 -0
  40. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
  41. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  42. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/python_executor.jinx +0 -0
  43. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  44. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/sibiji.png +0 -0
  45. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/spool.png +0 -0
  46. {npcsh-1.0.26.data → npcsh-1.0.27.data}/data/npcsh/npc_team/yap.png +0 -0
  47. {npcsh-1.0.26.dist-info → npcsh-1.0.27.dist-info}/WHEEL +0 -0
  48. {npcsh-1.0.26.dist-info → npcsh-1.0.27.dist-info}/entry_points.txt +0 -0
  49. {npcsh-1.0.26.dist-info → npcsh-1.0.27.dist-info}/licenses/LICENSE +0 -0
  50. {npcsh-1.0.26.dist-info → npcsh-1.0.27.dist-info}/top_level.txt +0 -0
npcsh/corca.py CHANGED
@@ -5,6 +5,8 @@ import shlex
5
5
  import argparse
6
6
  from contextlib import AsyncExitStack
7
7
  from typing import Optional, Callable, Dict, Any, Tuple, List
8
+ import shutil
9
+ import traceback
8
10
 
9
11
  try:
10
12
  from mcp import ClientSession, StdioServerParameters
@@ -29,16 +31,26 @@ from npcsh._state import (
29
31
  readline_safe_prompt,
30
32
  setup_shell,
31
33
  should_skip_kg_processing,
32
-
34
+ NPCSH_CHAT_PROVIDER,
35
+ NPCSH_CHAT_MODEL,
33
36
  )
34
37
  import yaml
35
-
38
+ from pathlib import Path
36
39
 
37
40
  class MCPClientNPC:
38
41
  def __init__(self, debug: bool = True):
39
42
  self.debug = debug
40
43
  self.session: Optional[ClientSession] = None
41
- self._exit_stack = asyncio.new_event_loop().run_until_complete(self._init_stack())
44
+ try:
45
+ self._loop = asyncio.get_event_loop()
46
+ if self._loop.is_closed():
47
+ self._loop = asyncio.new_event_loop()
48
+ asyncio.set_event_loop(self._loop)
49
+ except RuntimeError:
50
+ self._loop = asyncio.new_event_loop()
51
+ asyncio.set_event_loop(self._loop)
52
+
53
+ self._exit_stack = self._loop.run_until_complete(self._init_stack())
42
54
  self.available_tools_llm: List[Dict[str, Any]] = []
43
55
  self.tool_map: Dict[str, Callable] = {}
44
56
  self.server_script_path: Optional[str] = None
@@ -66,9 +78,9 @@ class MCPClientNPC:
66
78
 
67
79
  server_params = StdioServerParameters(
68
80
  command=cmd_parts[0],
69
- args=['-c', f'import sys; sys.path.pop(0) if sys.path[0] == "{os.path.dirname(abs_path)}" else None; exec(open("{abs_path}").read())'],
81
+ args=[abs_path],
70
82
  env=os.environ.copy(),
71
- cwd=os.path.dirname(os.path.dirname(abs_path)) # Run from project root
83
+ cwd=Path(abs_path).parent
72
84
  )
73
85
  if self.session:
74
86
  await self._exit_stack.aclose()
@@ -95,43 +107,35 @@ class MCPClientNPC:
95
107
  }
96
108
  self.available_tools_llm.append(tool_def)
97
109
 
98
- async def execute_tool(tool_name: str, args: dict):
99
- if not self.session:
100
- return {"error": "No MCP session"}
101
-
102
- print(f"DEBUG: About to call MCP tool {tool_name}")
103
- try:
104
- # Add a timeout
105
- result = await asyncio.wait_for(
106
- self.session.call_tool(tool_name, args),
107
- timeout=30.0
108
- )
109
- print(f"DEBUG: MCP tool {tool_name} returned: {type(result)}")
110
- return result
111
- except asyncio.TimeoutError:
112
- print(f"DEBUG: Tool {tool_name} timed out after 30 seconds")
113
- return {"error": f"Tool {tool_name} timed out"}
114
- except Exception as e:
115
- print(f"DEBUG: Tool {tool_name} error: {e}")
116
- return {"error": str(e)}
117
-
118
- def make_tool_func(tool_name):
110
+ def make_tool_func(tool_name_closure):
119
111
  async def tool_func(**kwargs):
120
- print(f"DEBUG: Tool wrapper called for {tool_name} with {kwargs}")
121
- # Clean up None string values
122
- cleaned_kwargs = {}
123
- for k, v in kwargs.items():
124
- if v == 'None':
125
- cleaned_kwargs[k] = None
126
- else:
127
- cleaned_kwargs[k] = v
128
- result = await execute_tool(tool_name, cleaned_kwargs)
129
- print(f"DEBUG: Tool wrapper got result: {type(result)}")
130
- return result
112
+ if not self.session:
113
+ return {"error": "No MCP session"}
114
+
115
+ self._log(f"About to call MCP tool {tool_name_closure}")
116
+ try:
117
+ cleaned_kwargs = {}
118
+ for k, v in kwargs.items():
119
+ if v == 'None':
120
+ cleaned_kwargs[k] = None
121
+ else:
122
+ cleaned_kwargs[k] = v
123
+ result = await asyncio.wait_for(
124
+ self.session.call_tool(tool_name_closure, cleaned_kwargs),
125
+ timeout=30.0
126
+ )
127
+ self._log(f"MCP tool {tool_name_closure} returned: {type(result)}")
128
+ return result
129
+ except asyncio.TimeoutError:
130
+ self._log(f"Tool {tool_name_closure} timed out after 30 seconds", "red")
131
+ return {"error": f"Tool {tool_name_closure} timed out"}
132
+ except Exception as e:
133
+ self._log(f"Tool {tool_name_closure} error: {e}", "red")
134
+ return {"error": str(e)}
131
135
 
132
136
  def sync_wrapper(**kwargs):
133
- print(f"DEBUG: Sync wrapper called for {tool_name}")
134
- return asyncio.run(tool_func(**kwargs))
137
+ self._log(f"Sync wrapper called for {tool_name_closure}")
138
+ return self._loop.run_until_complete(tool_func(**kwargs))
135
139
 
136
140
  return sync_wrapper
137
141
  self.tool_map[mcp_tool.name] = make_tool_func(mcp_tool.name)
@@ -139,10 +143,12 @@ class MCPClientNPC:
139
143
  self._log(f"Connection successful. Tools: {', '.join(tool_names) if tool_names else 'None'}")
140
144
 
141
145
  def connect_sync(self, server_script_path: str) -> bool:
142
- loop = asyncio.get_event_loop_policy().get_event_loop()
146
+ loop = self._loop
143
147
  if loop.is_closed():
144
- loop = asyncio.new_event_loop()
145
- asyncio.set_event_loop(loop)
148
+ self._loop = asyncio.new_event_loop()
149
+ asyncio.set_event_loop(self._loop)
150
+ loop = self._loop
151
+
146
152
  try:
147
153
  loop.run_until_complete(self._connect_async(server_script_path))
148
154
  return True
@@ -153,26 +159,28 @@ class MCPClientNPC:
153
159
  def disconnect_sync(self):
154
160
  if self.session:
155
161
  self._log("Disconnecting MCP session.")
156
- loop = asyncio.get_event_loop_policy().get_event_loop()
162
+ loop = self._loop
157
163
  if not loop.is_closed():
158
164
  try:
159
165
  async def close_session():
160
166
  await self.session.close()
167
+ await self._exit_stack.aclose()
161
168
  loop.run_until_complete(close_session())
162
169
  except RuntimeError:
163
170
  pass
171
+ except Exception as e:
172
+ print(f"Error during MCP client disconnect: {e}", file=sys.stderr)
164
173
  self.session = None
174
+ self._exit_stack = None
165
175
 
166
176
 
167
177
  def process_mcp_stream(stream_response, active_npc):
168
- """Process streaming response and extract content + tool calls for both Ollama and OpenAI providers"""
169
178
  collected_content = ""
170
179
  tool_calls = []
171
180
 
172
181
  interrupted = False
173
182
 
174
- # Save cursor position at the start
175
- sys.stdout.write('\033[s') # Save cursor position
183
+ sys.stdout.write('\033[s')
176
184
  sys.stdout.flush()
177
185
  try:
178
186
  for chunk in stream_response:
@@ -200,7 +208,6 @@ def process_mcp_stream(stream_response, active_npc):
200
208
  collected_content += chunk.message.content
201
209
  print(chunk.message.content, end='', flush=True)
202
210
 
203
- # Handle OpenAI-style responses (including gpt-oss)
204
211
  else:
205
212
  if hasattr(chunk, 'choices') and chunk.choices:
206
213
  delta = chunk.choices[0].delta
@@ -233,23 +240,30 @@ def process_mcp_stream(stream_response, active_npc):
233
240
  except KeyboardInterrupt:
234
241
  interrupted = True
235
242
  print('\n⚠️ Stream interrupted by user')
236
- if interrupted:
237
- str_output += "\n\n[⚠️ Response interrupted by user]"
238
- # Always restore cursor position and clear everything after it
239
- sys.stdout.write('\033[u') # Restore cursor position
240
- sys.stdout.write('\033[J') # Clear from cursor down
243
+
244
+ sys.stdout.write('\033[u')
245
+ sys.stdout.write('\033[J')
241
246
  sys.stdout.flush()
242
247
 
243
- # Now render the markdown at the restored position
244
248
  render_markdown(collected_content)
245
249
  print('\n')
246
250
  return collected_content, tool_calls
247
251
 
248
- def execute_command_corca(command: str, state: ShellState, command_history) -> Tuple[ShellState, Any]:
249
- mcp_tools = []
252
+ def execute_command_corca(command: str, state: ShellState, command_history, selected_mcp_tools_names: Optional[List[str]] = None) -> Tuple[ShellState, Any]:
253
+ mcp_tools_for_llm = []
250
254
 
251
255
  if hasattr(state, 'mcp_client') and state.mcp_client and state.mcp_client.session:
252
- mcp_tools = state.mcp_client.available_tools_llm
256
+ all_available_mcp_tools = state.mcp_client.available_tools_llm
257
+
258
+ if selected_mcp_tools_names and len(selected_mcp_tools_names) > 0:
259
+ mcp_tools_for_llm = [
260
+ tool_def for tool_def in all_available_mcp_tools
261
+ if tool_def['function']['name'] in selected_mcp_tools_names
262
+ ]
263
+ if not mcp_tools_for_llm:
264
+ cprint("Warning: No selected MCP tools found or matched. Corca will proceed without tools.", "yellow", file=sys.stderr)
265
+ else:
266
+ mcp_tools_for_llm = all_available_mcp_tools
253
267
  else:
254
268
  cprint("Warning: Corca agent has no tools. No MCP server connected.", "yellow", file=sys.stderr)
255
269
 
@@ -257,11 +271,9 @@ def execute_command_corca(command: str, state: ShellState, command_history) -> T
257
271
 
258
272
  response_dict = get_llm_response(
259
273
  prompt=command,
260
- model=active_npc.model or state.chat_model,
261
- provider=active_npc.provider or state.chat_provider,
262
274
  npc=state.npc,
263
275
  messages=state.messages,
264
- tools=mcp_tools,
276
+ tools=mcp_tools_for_llm,
265
277
  auto_process_tool_calls=False,
266
278
  stream=state.stream_output
267
279
  )
@@ -288,6 +300,88 @@ def execute_command_corca(command: str, state: ShellState, command_history) -> T
288
300
  "messages": state.messages
289
301
  }
290
302
 
303
+
304
+ def _resolve_and_copy_mcp_server_path(
305
+ explicit_path: Optional[str],
306
+ current_path: Optional[str],
307
+ team_ctx_mcp_servers: Optional[List[Dict[str, str]]],
308
+ interactive: bool = False,
309
+ auto_copy_bypass: bool = False # <-- New parameter
310
+ ) -> Optional[str]:
311
+ default_mcp_server_name = "mcp_server.py"
312
+ npcsh_default_template_path = Path(__file__).parent / default_mcp_server_name
313
+
314
+ def _copy_template_if_missing(destination_dir: Path, description: str) -> Optional[Path]:
315
+ destination_file = destination_dir / default_mcp_server_name
316
+ if not npcsh_default_template_path.exists():
317
+ cprint(f"Error: Default {default_mcp_server_name} template not found at {npcsh_default_template_path}", "red")
318
+ return None
319
+
320
+ if not destination_file.exists():
321
+ # Check auto_copy_bypass first
322
+ if auto_copy_bypass or not interactive: # If bypass is true OR not interactive, auto-copy
323
+ destination_dir.mkdir(parents=True, exist_ok=True)
324
+ shutil.copy(npcsh_default_template_path, destination_file)
325
+ print(colored(f"Automatically copied default {default_mcp_server_name} to {destination_file}", "green"))
326
+ return destination_file
327
+ else: # Only ask if interactive and no bypass
328
+ choice = input(colored(f"No {default_mcp_server_name} found in {description}. Copy default template to {destination_file}? (y/N): ", "yellow")).strip().lower()
329
+ if choice == 'y':
330
+ destination_dir.mkdir(parents=True, exist_ok=True)
331
+ shutil.copy(npcsh_default_template_path, destination_file)
332
+ print(colored(f"Copied default {default_mcp_server_name} to {destination_file}", "green"))
333
+ return destination_file
334
+ else:
335
+ print(colored("Skipping copy.", "yellow"))
336
+ return None
337
+ return destination_file
338
+
339
+ if explicit_path:
340
+ abs_explicit_path = Path(explicit_path).expanduser().resolve()
341
+ if abs_explicit_path.exists():
342
+ print(f"Using explicit MCP server path: {abs_explicit_path}")
343
+ return str(abs_explicit_path)
344
+ else:
345
+ cprint(f"Warning: Explicit MCP server path not found: {abs_explicit_path}", "yellow")
346
+
347
+ if team_ctx_mcp_servers:
348
+ for server_entry in team_ctx_mcp_servers:
349
+ server_path_from_ctx = server_entry.get("value")
350
+ if server_path_from_ctx:
351
+ abs_ctx_path = Path(server_path_from_ctx).expanduser().resolve()
352
+ if abs_ctx_path.exists():
353
+ print(f"Using MCP server path from team context: {abs_ctx_path}")
354
+ return str(abs_ctx_path)
355
+ else:
356
+ cprint(f"Warning: MCP server path from team context not found: {abs_ctx_path}", "yellow")
357
+
358
+ if current_path:
359
+ project_npc_team_dir = Path(current_path).resolve() / "npc_team"
360
+ project_mcp_server_file = project_npc_team_dir / default_mcp_server_name
361
+
362
+ if project_mcp_server_file.exists():
363
+ print(f"Using project-specific MCP server path: {project_mcp_server_file}")
364
+ return str(project_mcp_server_file)
365
+ else:
366
+ copied_path = _copy_template_if_missing(project_npc_team_dir, "project's npc_team directory")
367
+ if copied_path:
368
+ return str(copied_path)
369
+
370
+ global_npc_team_dir = Path.home() / ".npcsh" / "npc_team"
371
+ global_mcp_server_file = global_npc_team_dir / default_mcp_server_name
372
+
373
+ if global_mcp_server_file.exists():
374
+ print(f"Using global MCP server path: {global_mcp_server_file}")
375
+ return str(global_mcp_server_file)
376
+ else:
377
+ copied_path = _copy_template_if_missing(global_npc_team_dir, "global npc_team directory")
378
+ if copied_path:
379
+ return str(copied_path)
380
+
381
+ cprint("No MCP server script found in any expected location.", "yellow")
382
+ return None
383
+
384
+
291
385
  def print_corca_welcome_message():
292
386
  turq = "\033[38;2;64;224;208m"
293
387
  chrome = "\033[38;2;211;211;211m"
@@ -307,6 +401,54 @@ An MCP-powered shell for advanced agentic workflows.
307
401
  """
308
402
  )
309
403
 
404
+ def create_corca_state_and_mcp_client(conversation_id, command_history, npc=None, team=None,
405
+ current_path=None, mcp_server_path_from_request: Optional[str] = None):
406
+ from npcsh._state import ShellState
407
+
408
+ state = ShellState(
409
+ conversation_id=conversation_id,
410
+ stream_output=True,
411
+ current_mode="corca",
412
+ chat_model=os.environ.get("NPCSH_CHAT_MODEL", "gemma3:4b"),
413
+ chat_provider=os.environ.get("NPCSH_CHAT_PROVIDER", "ollama"),
414
+ current_path=current_path or os.getcwd(),
415
+ npc=npc,
416
+ team=team
417
+ )
418
+ state.command_history = command_history
419
+
420
+ # Read NPCSH_CORCA_AUTO_COPY_MCP_SERVER from environment for non-interactive calls
421
+ auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
422
+
423
+ resolved_server_path = _resolve_and_copy_mcp_server_path(
424
+ explicit_path=mcp_server_path_from_request,
425
+ current_path=current_path,
426
+ team_ctx_mcp_servers=team.team_ctx.get('mcp_servers', []) if team and hasattr(team, 'team_ctx') else None,
427
+ interactive=False, # Always non-interactive for Flask API calls
428
+ auto_copy_bypass=auto_copy_bypass # Pass env var setting
429
+ )
430
+
431
+ state.mcp_client = None
432
+ if resolved_server_path:
433
+ try:
434
+ client_instance = MCPClientNPC()
435
+ if client_instance.connect_sync(resolved_server_path):
436
+ state.mcp_client = client_instance
437
+ print(f"Successfully connected MCP client for {conversation_id} to {resolved_server_path}")
438
+ else:
439
+ print(f"Failed to connect MCP client for {conversation_id} to {resolved_server_path}. Tools will be unavailable.")
440
+ except ImportError:
441
+ print("WARNING: npcsh.corca or MCPClientNPC not found. Cannot initialize MCP client.", file=sys.stderr)
442
+ except FileNotFoundError as e:
443
+ print(f"MCP Client Error: {e}")
444
+ except ValueError as e:
445
+ print(f"MCP Client Error: {e}")
446
+ except Exception as e:
447
+ print(f"An unexpected error occurred during MCP client initialization: {e}")
448
+ traceback.print_exc()
449
+
450
+ return state
451
+
310
452
 
311
453
  def process_corca_result(
312
454
  user_input: str,
@@ -589,8 +731,11 @@ def process_corca_result(
589
731
 
590
732
  Current Context: "{current_context}".
591
733
 
592
- Respond with JSON: {{"suggestion": "Your sentence."
593
- }}"""
734
+ Respond with JSON: """ + """
735
+ {
736
+ "suggestion": "Your sentence.
737
+ }
738
+ """
594
739
  response = get_llm_response(prompt, npc=active_npc, format="json")
595
740
  suggestion = response.get("response", {}).get("suggestion")
596
741
 
@@ -609,9 +754,175 @@ def process_corca_result(
609
754
  import traceback
610
755
  print(colored(f"Could not generate team suggestions: {e}", "yellow"))
611
756
  traceback.print_exc()
612
-
613
- def enter_corca_mode(command: str,
614
- **kwargs):
757
+
758
+
759
+
760
+
761
+
762
+ def _read_npcsh_global_env() -> Dict[str, str]:
763
+ global_env_file = Path(".npcsh_global")
764
+ env_vars = {}
765
+ if global_env_file.exists():
766
+ try:
767
+ with open(global_env_file, 'r') as f:
768
+ for line in f:
769
+ line = line.strip()
770
+ if line and '=' in line and not line.startswith('#'):
771
+ key, value = line.split('=', 1)
772
+ env_vars[key.strip()] = value.strip()
773
+ except Exception as e:
774
+ print(f"Warning: Could not read .npcsh_global: {e}")
775
+ return env_vars
776
+
777
+ def _write_to_npcsh_global(key: str, value: str) -> None:
778
+ global_env_file = Path(".npcsh_global")
779
+ env_vars = _read_npcsh_global_env()
780
+ env_vars[key] = value
781
+
782
+ try:
783
+ with open(global_env_file, 'w') as f:
784
+ for k, v in env_vars.items():
785
+ f.write(f"{k}={v}\n")
786
+ except Exception as e:
787
+ print(f"Warning: Could not write to .npcsh_global: {e}")
788
+
789
+ def _resolve_and_copy_mcp_server_path(
790
+ explicit_path: Optional[str],
791
+ current_path: Optional[str],
792
+ team_ctx_mcp_servers: Optional[List[Dict[str, str]]],
793
+ interactive: bool = False,
794
+ auto_copy_bypass: bool = False,
795
+ force_global: bool = False
796
+ ) -> Optional[str]:
797
+ default_mcp_server_name = "mcp_server.py"
798
+ npcsh_default_template_path = Path(__file__).parent / default_mcp_server_name
799
+
800
+ global_env = _read_npcsh_global_env()
801
+ prefer_global = global_env.get("NPCSH_PREFER_GLOBAL_MCP_SERVER", "false").lower() == "true"
802
+
803
+ def _copy_template_if_missing(destination_dir: Path, description: str) -> Optional[Path]:
804
+ destination_file = destination_dir / default_mcp_server_name
805
+ if not npcsh_default_template_path.exists():
806
+ cprint(f"Error: Default {default_mcp_server_name} template not found at {npcsh_default_template_path}", "red")
807
+ return None
808
+
809
+ if not destination_file.exists():
810
+ if auto_copy_bypass or not interactive:
811
+ destination_dir.mkdir(parents=True, exist_ok=True)
812
+ shutil.copy(npcsh_default_template_path, destination_file)
813
+ print(colored(f"Automatically copied default {default_mcp_server_name} to {destination_file}", "green"))
814
+ return destination_file
815
+ else:
816
+ choice = input(colored(f"No {default_mcp_server_name} found in {description}. Copy default template to {destination_file}? (y/N/g for global): ", "yellow")).strip().lower()
817
+ if choice == 'y':
818
+ destination_dir.mkdir(parents=True, exist_ok=True)
819
+ shutil.copy(npcsh_default_template_path, destination_file)
820
+ print(colored(f"Copied default {default_mcp_server_name} to {destination_file}", "green"))
821
+ return destination_file
822
+ elif choice == 'g':
823
+ _write_to_npcsh_global("NPCSH_PREFER_GLOBAL_MCP_SERVER", "true")
824
+ print(colored("Set preference to use global MCP server.", "green"))
825
+ return None
826
+ else:
827
+ print(colored("Skipping copy.", "yellow"))
828
+ return None
829
+ return destination_file
830
+
831
+ if explicit_path:
832
+ abs_explicit_path = Path(explicit_path).expanduser().resolve()
833
+ if abs_explicit_path.exists():
834
+ print(f"Using explicit MCP server path: {abs_explicit_path}")
835
+ return str(abs_explicit_path)
836
+ else:
837
+ cprint(f"Warning: Explicit MCP server path not found: {abs_explicit_path}", "yellow")
838
+
839
+ if team_ctx_mcp_servers:
840
+ for server_entry in team_ctx_mcp_servers:
841
+ server_path_from_ctx = server_entry.get("value")
842
+ if server_path_from_ctx:
843
+ abs_ctx_path = Path(server_path_from_ctx).expanduser().resolve()
844
+ if abs_ctx_path.exists():
845
+ print(f"Using MCP server path from team context: {abs_ctx_path}")
846
+ return str(abs_ctx_path)
847
+ else:
848
+ cprint(f"Warning: MCP server path from team context not found: {abs_ctx_path}", "yellow")
849
+
850
+ if not (force_global or prefer_global):
851
+ if current_path:
852
+ project_npc_team_dir = Path(current_path).resolve() / "npc_team"
853
+ project_mcp_server_file = project_npc_team_dir / default_mcp_server_name
854
+
855
+ if project_mcp_server_file.exists():
856
+ print(f"Using project-specific MCP server path: {project_mcp_server_file}")
857
+ return str(project_mcp_server_file)
858
+ else:
859
+ copied_path = _copy_template_if_missing(project_npc_team_dir, "project's npc_team directory")
860
+ if copied_path:
861
+ return str(copied_path)
862
+
863
+ global_npc_team_dir = Path.home() / ".npcsh" / "npc_team"
864
+ global_mcp_server_file = global_npc_team_dir / default_mcp_server_name
865
+
866
+ if global_mcp_server_file.exists():
867
+ print(f"Using global MCP server path: {global_mcp_server_file}")
868
+ return str(global_mcp_server_file)
869
+ else:
870
+ copied_path = _copy_template_if_missing(global_npc_team_dir, "global npc_team directory")
871
+ if copied_path:
872
+ return str(copied_path)
873
+
874
+ cprint("No MCP server script found in any expected location.", "yellow")
875
+ return None
876
+
877
+ def create_corca_state_and_mcp_client(conversation_id, command_history, npc=None, team=None,
878
+ current_path=None, mcp_server_path_from_request: Optional[str] = None):
879
+ from npcsh._state import ShellState
880
+
881
+ state = ShellState(
882
+ conversation_id=conversation_id,
883
+ stream_output=True,
884
+ current_mode="corca",
885
+ chat_model=os.environ.get("NPCSH_CHAT_MODEL", "gemma3:4b"),
886
+ chat_provider=os.environ.get("NPCSH_CHAT_PROVIDER", "ollama"),
887
+ current_path=current_path or os.getcwd(),
888
+ npc=npc,
889
+ team=team
890
+ )
891
+ state.command_history = command_history
892
+
893
+ auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
894
+
895
+ resolved_server_path = _resolve_and_copy_mcp_server_path(
896
+ explicit_path=mcp_server_path_from_request,
897
+ current_path=current_path,
898
+ team_ctx_mcp_servers=team.team_ctx.get('mcp_servers', []) if team and hasattr(team, 'team_ctx') else None,
899
+ interactive=False,
900
+ auto_copy_bypass=auto_copy_bypass,
901
+ force_global=False
902
+ )
903
+
904
+ state.mcp_client = None
905
+ if resolved_server_path:
906
+ try:
907
+ client_instance = MCPClientNPC()
908
+ if client_instance.connect_sync(resolved_server_path):
909
+ state.mcp_client = client_instance
910
+ print(f"Successfully connected MCP client for {conversation_id} to {resolved_server_path}")
911
+ else:
912
+ print(f"Failed to connect MCP client for {conversation_id} to {resolved_server_path}. Tools will be unavailable.")
913
+ except ImportError:
914
+ print("WARNING: npcsh.corca or MCPClientNPC not found. Cannot initialize MCP client.", file=sys.stderr)
915
+ except FileNotFoundError as e:
916
+ print(f"MCP Client Error: {e}")
917
+ except ValueError as e:
918
+ print(f"MCP Client Error: {e}")
919
+ except Exception as e:
920
+ print(f"An unexpected error occurred during MCP client initialization: {e}")
921
+ traceback.print_exc()
922
+
923
+ return state
924
+
925
+ def enter_corca_mode(command: str, **kwargs):
615
926
  state: ShellState = kwargs.get('shell_state')
616
927
  command_history: CommandHistory = kwargs.get('command_history')
617
928
 
@@ -619,28 +930,43 @@ def enter_corca_mode(command: str,
619
930
  return {"output": "Error: Corca mode requires shell state and history.", "messages": kwargs.get('messages', [])}
620
931
 
621
932
  all_command_parts = shlex.split(command)
622
- parsed_args = all_command_parts[1:]
623
-
624
933
  parser = argparse.ArgumentParser(prog="/corca", description="Enter Corca MCP-powered mode.")
625
934
  parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script.")
935
+ parser.add_argument("-g", "--global", dest="force_global", action="store_true", help="Force use of global MCP server.")
626
936
 
627
937
  try:
628
- args = parser.parse_args(parsed_args)
938
+ known_args, remaining_args = parser.parse_known_args(all_command_parts[1:])
629
939
  except SystemExit:
630
940
  return {"output": "Invalid arguments for /corca. See /help corca.", "messages": state.messages}
631
941
 
632
942
  print_corca_welcome_message()
633
943
 
634
- mcp_client = MCPClientNPC()
635
- server_path = args.mcp_server_path
636
- if not server_path and state.team and hasattr(state.team, 'team_ctx'):
637
- server_path = state.team.team_ctx.get('mcp_server')
638
-
639
- if server_path:
640
- if mcp_client.connect_sync(server_path):
641
- state.mcp_client = mcp_client
944
+ auto_copy_bypass = os.getenv("NPCSH_CORCA_AUTO_COPY_MCP_SERVER", "false").lower() == "true"
945
+
946
+ resolved_server_path = _resolve_and_copy_mcp_server_path(
947
+ explicit_path=known_args.mcp_server_path,
948
+ current_path=state.current_path,
949
+ team_ctx_mcp_servers=state.team.team_ctx.get('mcp_servers', []) if state.team and hasattr(state.team, 'team_ctx') else None,
950
+ interactive=True,
951
+ auto_copy_bypass=auto_copy_bypass,
952
+ force_global=known_args.force_global
953
+ )
954
+
955
+ mcp_client = None
956
+ if resolved_server_path:
957
+ try:
958
+ mcp_client = MCPClientNPC()
959
+ if mcp_client.connect_sync(resolved_server_path):
960
+ state.mcp_client = mcp_client
961
+ else:
962
+ cprint(f"Failed to connect to MCP server at {resolved_server_path}. Corca mode will have limited agent functionality.", "yellow")
963
+ state.mcp_client = None
964
+ except Exception as e:
965
+ cprint(f"Error connecting to MCP server: {e}. Corca mode will have limited agent functionality.", "red")
966
+ traceback.print_exc()
967
+ state.mcp_client = None
642
968
  else:
643
- cprint("No MCP server path provided. Corca mode will have limited agent functionality.", "yellow")
969
+ cprint("No MCP server path provided or found. Corca mode will have limited agent functionality.", "yellow")
644
970
  state.mcp_client = None
645
971
 
646
972
  while True:
@@ -652,7 +978,11 @@ def enter_corca_mode(command: str,
652
978
  prompt_str = f"{colored(os.path.basename(state.current_path), 'blue')}:{prompt_npc_name}🦌> "
653
979
  prompt = readline_safe_prompt(prompt_str)
654
980
 
655
- user_input = get_multiline_input(prompt).strip()
981
+ if remaining_args:
982
+ user_input = " ".join(remaining_args)
983
+ remaining_args = []
984
+ else:
985
+ user_input = get_multiline_input(prompt).strip()
656
986
 
657
987
  if user_input.lower() in ["exit", "quit", "done"]:
658
988
  break
@@ -681,14 +1011,15 @@ def enter_corca_mode(command: str,
681
1011
 
682
1012
  render_markdown("\n# Exiting Corca Mode")
683
1013
  return {"output": "", "messages": state.messages}
1014
+
684
1015
  def main():
685
1016
  parser = argparse.ArgumentParser(description="Corca - An MCP-powered npcsh shell.")
686
1017
  parser.add_argument("--mcp-server-path", type=str, help="Path to an MCP server script to connect to.")
1018
+ parser.add_argument("-g", "--global", dest="force_global", action="store_true", help="Force use of global MCP server.")
687
1019
  args = parser.parse_args()
688
1020
 
689
1021
  command_history, team, default_npc = setup_shell()
690
1022
 
691
- # Override default_npc with corca priority
692
1023
  project_team_path = os.path.abspath('./npc_team/')
693
1024
  global_team_path = os.path.expanduser('~/.npcsh/npc_team/')
694
1025
 
@@ -703,9 +1034,17 @@ def main():
703
1034
  db_conn=command_history.engine)
704
1035
  print('Team Default: ', team.provider, team.model)
705
1036
  if default_npc.model is None:
706
- default_npc.model = team.model
1037
+ if team.model is not None:
1038
+ default_npc.model = team.model
1039
+ else:
1040
+ default_npc.model = NPCSH_CHAT_MODEL
1041
+
707
1042
  if default_npc.provider is None:
708
- default_npc.provider = team.provider
1043
+ if team.provider is not None:
1044
+ default_npc.provider = team.provider
1045
+ else:
1046
+ default_npc.provider = NPCSH_CHAT_PROVIDER
1047
+
709
1048
  from npcsh._state import initial_state
710
1049
  initial_shell_state = initial_state
711
1050
  initial_shell_state.team = team
@@ -714,6 +1053,8 @@ def main():
714
1053
  fake_command_str = "/corca"
715
1054
  if args.mcp_server_path:
716
1055
  fake_command_str = f'/corca --mcp-server-path "{args.mcp_server_path}"'
1056
+ elif args.force_global:
1057
+ fake_command_str = "/corca --global"
717
1058
 
718
1059
  kwargs = {
719
1060
  'command': fake_command_str,
@@ -722,5 +1063,6 @@ def main():
722
1063
  }
723
1064
 
724
1065
  enter_corca_mode(**kwargs)
1066
+
725
1067
  if __name__ == "__main__":
726
- main()
1068
+ main()