code-puppy 0.0.130__py3-none-any.whl → 0.0.132__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- code_puppy/command_line/mcp_commands.py +591 -106
- code_puppy/mcp/blocking_startup.py +404 -0
- code_puppy/mcp/captured_stdio_server.py +282 -0
- code_puppy/mcp/config_wizard.py +151 -117
- code_puppy/mcp/managed_server.py +55 -1
- code_puppy/mcp/server_registry_catalog.py +346 -46
- code_puppy/mcp/system_tools.py +214 -0
- code_puppy/messaging/__init__.py +4 -0
- code_puppy/messaging/message_queue.py +86 -0
- code_puppy/messaging/renderers.py +94 -0
- code_puppy/tui/app.py +24 -1
- code_puppy/tui/components/chat_view.py +33 -18
- code_puppy/tui/components/human_input_modal.py +171 -0
- code_puppy/tui/screens/__init__.py +3 -1
- code_puppy/tui/screens/mcp_install_wizard.py +593 -0
- {code_puppy-0.0.130.dist-info → code_puppy-0.0.132.dist-info}/METADATA +1 -1
- {code_puppy-0.0.130.dist-info → code_puppy-0.0.132.dist-info}/RECORD +21 -16
- {code_puppy-0.0.130.data → code_puppy-0.0.132.data}/data/code_puppy/models.json +0 -0
- {code_puppy-0.0.130.dist-info → code_puppy-0.0.132.dist-info}/WHEEL +0 -0
- {code_puppy-0.0.130.dist-info → code_puppy-0.0.132.dist-info}/entry_points.txt +0 -0
- {code_puppy-0.0.130.dist-info → code_puppy-0.0.132.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
"""
|
|
2
|
+
System tool detection and validation for MCP server requirements.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
from typing import Dict, List, Optional, Tuple
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ToolInfo:
|
|
13
|
+
"""Information about a detected system tool."""
|
|
14
|
+
name: str
|
|
15
|
+
available: bool
|
|
16
|
+
version: Optional[str] = None
|
|
17
|
+
path: Optional[str] = None
|
|
18
|
+
error: Optional[str] = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class SystemToolDetector:
|
|
22
|
+
"""Detect and validate system tools required by MCP servers."""
|
|
23
|
+
|
|
24
|
+
# Tool version commands
|
|
25
|
+
VERSION_COMMANDS = {
|
|
26
|
+
"node": ["node", "--version"],
|
|
27
|
+
"npm": ["npm", "--version"],
|
|
28
|
+
"npx": ["npx", "--version"],
|
|
29
|
+
"python": ["python", "--version"],
|
|
30
|
+
"python3": ["python3", "--version"],
|
|
31
|
+
"pip": ["pip", "--version"],
|
|
32
|
+
"pip3": ["pip3", "--version"],
|
|
33
|
+
"git": ["git", "--version"],
|
|
34
|
+
"docker": ["docker", "--version"],
|
|
35
|
+
"java": ["java", "-version"],
|
|
36
|
+
"go": ["go", "version"],
|
|
37
|
+
"rust": ["rustc", "--version"],
|
|
38
|
+
"cargo": ["cargo", "--version"],
|
|
39
|
+
"julia": ["julia", "--version"],
|
|
40
|
+
"R": ["R", "--version"],
|
|
41
|
+
"php": ["php", "--version"],
|
|
42
|
+
"ruby": ["ruby", "--version"],
|
|
43
|
+
"perl": ["perl", "--version"],
|
|
44
|
+
"swift": ["swift", "--version"],
|
|
45
|
+
"dotnet": ["dotnet", "--version"],
|
|
46
|
+
"jupyter": ["jupyter", "--version"],
|
|
47
|
+
"code": ["code", "--version"], # VS Code
|
|
48
|
+
"vim": ["vim", "--version"],
|
|
49
|
+
"emacs": ["emacs", "--version"],
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def detect_tool(cls, tool_name: str) -> ToolInfo:
|
|
54
|
+
"""Detect if a tool is available and get its version."""
|
|
55
|
+
# First check if tool is in PATH
|
|
56
|
+
tool_path = shutil.which(tool_name)
|
|
57
|
+
|
|
58
|
+
if not tool_path:
|
|
59
|
+
return ToolInfo(
|
|
60
|
+
name=tool_name,
|
|
61
|
+
available=False,
|
|
62
|
+
error=f"{tool_name} not found in PATH"
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
# Try to get version
|
|
66
|
+
version_cmd = cls.VERSION_COMMANDS.get(tool_name)
|
|
67
|
+
version = None
|
|
68
|
+
error = None
|
|
69
|
+
|
|
70
|
+
if version_cmd:
|
|
71
|
+
try:
|
|
72
|
+
# Run version command
|
|
73
|
+
result = subprocess.run(
|
|
74
|
+
version_cmd,
|
|
75
|
+
capture_output=True,
|
|
76
|
+
text=True,
|
|
77
|
+
timeout=10
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if result.returncode == 0:
|
|
81
|
+
# Parse version from output
|
|
82
|
+
output = result.stdout.strip() or result.stderr.strip()
|
|
83
|
+
version = cls._parse_version(tool_name, output)
|
|
84
|
+
else:
|
|
85
|
+
error = f"Version check failed: {result.stderr.strip()}"
|
|
86
|
+
|
|
87
|
+
except subprocess.TimeoutExpired:
|
|
88
|
+
error = "Version check timed out"
|
|
89
|
+
except Exception as e:
|
|
90
|
+
error = f"Version check error: {str(e)}"
|
|
91
|
+
|
|
92
|
+
return ToolInfo(
|
|
93
|
+
name=tool_name,
|
|
94
|
+
available=True,
|
|
95
|
+
version=version,
|
|
96
|
+
path=tool_path,
|
|
97
|
+
error=error
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
@classmethod
|
|
101
|
+
def detect_tools(cls, tool_names: List[str]) -> Dict[str, ToolInfo]:
|
|
102
|
+
"""Detect multiple tools."""
|
|
103
|
+
return {name: cls.detect_tool(name) for name in tool_names}
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def _parse_version(cls, tool_name: str, output: str) -> Optional[str]:
|
|
107
|
+
"""Parse version string from command output."""
|
|
108
|
+
if not output:
|
|
109
|
+
return None
|
|
110
|
+
|
|
111
|
+
# Common version patterns
|
|
112
|
+
import re
|
|
113
|
+
|
|
114
|
+
# Try to find version pattern like "v1.2.3" or "1.2.3"
|
|
115
|
+
version_patterns = [
|
|
116
|
+
r'v?(\d+\.\d+\.\d+(?:\.\d+)?)', # Standard semver
|
|
117
|
+
r'(\d+\.\d+\.\d+)', # Simple version
|
|
118
|
+
r'version\s+v?(\d+\.\d+\.\d+)', # "version 1.2.3"
|
|
119
|
+
r'v?(\d+\.\d+)', # Major.minor only
|
|
120
|
+
]
|
|
121
|
+
|
|
122
|
+
for pattern in version_patterns:
|
|
123
|
+
match = re.search(pattern, output, re.IGNORECASE)
|
|
124
|
+
if match:
|
|
125
|
+
return match.group(1)
|
|
126
|
+
|
|
127
|
+
# If no pattern matches, return first line (common for many tools)
|
|
128
|
+
first_line = output.split('\n')[0].strip()
|
|
129
|
+
if len(first_line) < 100: # Reasonable length for a version string
|
|
130
|
+
return first_line
|
|
131
|
+
|
|
132
|
+
return None
|
|
133
|
+
|
|
134
|
+
@classmethod
|
|
135
|
+
def check_package_dependencies(cls, packages: List[str]) -> Dict[str, bool]:
|
|
136
|
+
"""Check if package dependencies are available."""
|
|
137
|
+
results = {}
|
|
138
|
+
|
|
139
|
+
for package in packages:
|
|
140
|
+
available = False
|
|
141
|
+
|
|
142
|
+
# Try different package managers/methods
|
|
143
|
+
if package.startswith('@') or '/' in package:
|
|
144
|
+
# Likely npm package
|
|
145
|
+
available = cls._check_npm_package(package)
|
|
146
|
+
elif package in ['jupyter', 'pandas', 'numpy', 'matplotlib']:
|
|
147
|
+
# Python packages
|
|
148
|
+
available = cls._check_python_package(package)
|
|
149
|
+
else:
|
|
150
|
+
# Try both npm and python
|
|
151
|
+
available = cls._check_npm_package(package) or cls._check_python_package(package)
|
|
152
|
+
|
|
153
|
+
results[package] = available
|
|
154
|
+
|
|
155
|
+
return results
|
|
156
|
+
|
|
157
|
+
@classmethod
|
|
158
|
+
def _check_npm_package(cls, package: str) -> bool:
|
|
159
|
+
"""Check if an npm package is available."""
|
|
160
|
+
try:
|
|
161
|
+
result = subprocess.run(
|
|
162
|
+
["npm", "list", "-g", package],
|
|
163
|
+
capture_output=True,
|
|
164
|
+
text=True,
|
|
165
|
+
timeout=10
|
|
166
|
+
)
|
|
167
|
+
return result.returncode == 0
|
|
168
|
+
except:
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
@classmethod
|
|
172
|
+
def _check_python_package(cls, package: str) -> bool:
|
|
173
|
+
"""Check if a Python package is available."""
|
|
174
|
+
try:
|
|
175
|
+
import importlib
|
|
176
|
+
importlib.import_module(package)
|
|
177
|
+
return True
|
|
178
|
+
except ImportError:
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
@classmethod
|
|
182
|
+
def get_installation_suggestions(cls, tool_name: str) -> List[str]:
|
|
183
|
+
"""Get installation suggestions for a missing tool."""
|
|
184
|
+
suggestions = {
|
|
185
|
+
"node": [
|
|
186
|
+
"Install Node.js from https://nodejs.org",
|
|
187
|
+
"Or use package manager: brew install node (macOS) / sudo apt install nodejs (Ubuntu)"
|
|
188
|
+
],
|
|
189
|
+
"npm": ["Usually comes with Node.js - install Node.js first"],
|
|
190
|
+
"npx": ["Usually comes with npm 5.2+ - update npm: npm install -g npm"],
|
|
191
|
+
"python": [
|
|
192
|
+
"Install Python from https://python.org",
|
|
193
|
+
"Or use package manager: brew install python (macOS) / sudo apt install python3 (Ubuntu)"
|
|
194
|
+
],
|
|
195
|
+
"python3": ["Same as python - install Python 3.x"],
|
|
196
|
+
"pip": ["Usually comes with Python - try: python -m ensurepip"],
|
|
197
|
+
"pip3": ["Usually comes with Python 3 - try: python3 -m ensurepip"],
|
|
198
|
+
"git": [
|
|
199
|
+
"Install Git from https://git-scm.com",
|
|
200
|
+
"Or use package manager: brew install git (macOS) / sudo apt install git (Ubuntu)"
|
|
201
|
+
],
|
|
202
|
+
"docker": ["Install Docker from https://docker.com"],
|
|
203
|
+
"java": [
|
|
204
|
+
"Install OpenJDK from https://openjdk.java.net",
|
|
205
|
+
"Or use package manager: brew install openjdk (macOS) / sudo apt install default-jdk (Ubuntu)"
|
|
206
|
+
],
|
|
207
|
+
"jupyter": ["Install with pip: pip install jupyter"],
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
return suggestions.get(tool_name, [f"Please install {tool_name} manually"])
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
# Global detector instance
|
|
214
|
+
detector = SystemToolDetector()
|
code_puppy/messaging/__init__.py
CHANGED
|
@@ -10,12 +10,14 @@ from .message_queue import (
|
|
|
10
10
|
emit_info,
|
|
11
11
|
emit_message,
|
|
12
12
|
emit_planned_next_steps,
|
|
13
|
+
emit_prompt,
|
|
13
14
|
emit_success,
|
|
14
15
|
emit_system_message,
|
|
15
16
|
emit_tool_output,
|
|
16
17
|
emit_warning,
|
|
17
18
|
get_buffered_startup_messages,
|
|
18
19
|
get_global_queue,
|
|
20
|
+
provide_prompt_response,
|
|
19
21
|
)
|
|
20
22
|
from .queue_console import QueueConsole, get_queue_console
|
|
21
23
|
from .renderers import InteractiveRenderer, SynchronousInteractiveRenderer, TUIRenderer
|
|
@@ -37,6 +39,8 @@ __all__ = [
|
|
|
37
39
|
"emit_planned_next_steps",
|
|
38
40
|
"emit_agent_response",
|
|
39
41
|
"emit_system_message",
|
|
42
|
+
"emit_prompt",
|
|
43
|
+
"provide_prompt_response",
|
|
40
44
|
"get_buffered_startup_messages",
|
|
41
45
|
"InteractiveRenderer",
|
|
42
46
|
"TUIRenderer",
|
|
@@ -37,6 +37,9 @@ class MessageType(Enum):
|
|
|
37
37
|
AGENT_RESPONSE = "agent_response"
|
|
38
38
|
AGENT_STATUS = "agent_status"
|
|
39
39
|
|
|
40
|
+
# Human interaction types
|
|
41
|
+
HUMAN_INPUT_REQUEST = "human_input_request"
|
|
42
|
+
|
|
40
43
|
# System types
|
|
41
44
|
SYSTEM = "system"
|
|
42
45
|
DEBUG = "debug"
|
|
@@ -71,6 +74,8 @@ class MessageQueue:
|
|
|
71
74
|
self._startup_buffer = [] # Buffer messages before any renderer starts
|
|
72
75
|
self._has_active_renderer = False
|
|
73
76
|
self._event_loop = None # Store reference to the event loop
|
|
77
|
+
self._prompt_responses = {} # Store responses to human input requests
|
|
78
|
+
self._prompt_id_counter = 0 # Counter for unique prompt IDs
|
|
74
79
|
|
|
75
80
|
def start(self):
|
|
76
81
|
"""Start the queue processing."""
|
|
@@ -192,6 +197,53 @@ class MessageQueue:
|
|
|
192
197
|
"""Mark that no renderer is currently active."""
|
|
193
198
|
self._has_active_renderer = False
|
|
194
199
|
|
|
200
|
+
def create_prompt_request(self, prompt_text: str) -> str:
|
|
201
|
+
"""Create a human input request and return its unique ID."""
|
|
202
|
+
self._prompt_id_counter += 1
|
|
203
|
+
prompt_id = f"prompt_{self._prompt_id_counter}"
|
|
204
|
+
|
|
205
|
+
# Emit the human input request message
|
|
206
|
+
message = UIMessage(
|
|
207
|
+
type=MessageType.HUMAN_INPUT_REQUEST,
|
|
208
|
+
content=prompt_text,
|
|
209
|
+
metadata={"prompt_id": prompt_id}
|
|
210
|
+
)
|
|
211
|
+
self.emit(message)
|
|
212
|
+
|
|
213
|
+
return prompt_id
|
|
214
|
+
|
|
215
|
+
def wait_for_prompt_response(self, prompt_id: str, timeout: float = None) -> str:
|
|
216
|
+
"""Wait for a response to a human input request."""
|
|
217
|
+
import time
|
|
218
|
+
start_time = time.time()
|
|
219
|
+
|
|
220
|
+
# Check if we're in TUI mode - if so, try to yield control to the event loop
|
|
221
|
+
from code_puppy.state_management import is_tui_mode
|
|
222
|
+
sleep_interval = 0.05 if is_tui_mode() else 0.1
|
|
223
|
+
|
|
224
|
+
# Debug logging for TUI mode
|
|
225
|
+
if is_tui_mode():
|
|
226
|
+
print(f"[DEBUG] Waiting for prompt response: {prompt_id}")
|
|
227
|
+
|
|
228
|
+
while True:
|
|
229
|
+
if prompt_id in self._prompt_responses:
|
|
230
|
+
response = self._prompt_responses.pop(prompt_id)
|
|
231
|
+
if is_tui_mode():
|
|
232
|
+
print(f"[DEBUG] Got response for {prompt_id}: {response[:20]}...")
|
|
233
|
+
return response
|
|
234
|
+
|
|
235
|
+
if timeout and (time.time() - start_time) > timeout:
|
|
236
|
+
raise TimeoutError(f"No response received for prompt {prompt_id} within {timeout} seconds")
|
|
237
|
+
|
|
238
|
+
time.sleep(sleep_interval)
|
|
239
|
+
|
|
240
|
+
def provide_prompt_response(self, prompt_id: str, response: str):
|
|
241
|
+
"""Provide a response to a human input request."""
|
|
242
|
+
from code_puppy.state_management import is_tui_mode
|
|
243
|
+
if is_tui_mode():
|
|
244
|
+
print(f"[DEBUG] Providing response for {prompt_id}: {response[:20]}...")
|
|
245
|
+
self._prompt_responses[prompt_id] = response
|
|
246
|
+
|
|
195
247
|
|
|
196
248
|
# Global message queue instance
|
|
197
249
|
_global_queue: Optional[MessageQueue] = None
|
|
@@ -286,3 +338,37 @@ def emit_divider(content: str = "[dim]" + "─" * 100 + "\n" + "[/dim]", **metad
|
|
|
286
338
|
emit_message(MessageType.DIVIDER, content, **metadata)
|
|
287
339
|
else:
|
|
288
340
|
pass
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
def emit_prompt(prompt_text: str, timeout: float = None) -> str:
|
|
344
|
+
"""Emit a human input request and wait for response."""
|
|
345
|
+
from code_puppy.state_management import is_tui_mode
|
|
346
|
+
|
|
347
|
+
# In interactive mode, use direct input instead of the queue system
|
|
348
|
+
if not is_tui_mode():
|
|
349
|
+
# Emit the prompt as a message for display
|
|
350
|
+
from code_puppy.messaging import emit_info
|
|
351
|
+
emit_info(f"[yellow]{prompt_text}[/yellow]")
|
|
352
|
+
|
|
353
|
+
# Get input directly
|
|
354
|
+
try:
|
|
355
|
+
# Try to use rich console for better formatting
|
|
356
|
+
from rich.console import Console
|
|
357
|
+
console = Console()
|
|
358
|
+
response = console.input("[cyan]>>> [/cyan]")
|
|
359
|
+
return response
|
|
360
|
+
except:
|
|
361
|
+
# Fallback to basic input
|
|
362
|
+
response = input(">>> ")
|
|
363
|
+
return response
|
|
364
|
+
|
|
365
|
+
# In TUI mode, use the queue system
|
|
366
|
+
queue = get_global_queue()
|
|
367
|
+
prompt_id = queue.create_prompt_request(prompt_text)
|
|
368
|
+
return queue.wait_for_prompt_response(prompt_id, timeout)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def provide_prompt_response(prompt_id: str, response: str):
|
|
372
|
+
"""Provide a response to a human input request."""
|
|
373
|
+
queue = get_global_queue()
|
|
374
|
+
queue.provide_prompt_response(prompt_id, response)
|
|
@@ -81,6 +81,11 @@ class InteractiveRenderer(MessageRenderer):
|
|
|
81
81
|
|
|
82
82
|
async def render_message(self, message: UIMessage):
|
|
83
83
|
"""Render a message using Rich console."""
|
|
84
|
+
# Handle human input requests
|
|
85
|
+
if message.type == MessageType.HUMAN_INPUT_REQUEST:
|
|
86
|
+
await self._handle_human_input_request(message)
|
|
87
|
+
return
|
|
88
|
+
|
|
84
89
|
# Convert message type to appropriate Rich styling
|
|
85
90
|
if message.type == MessageType.ERROR:
|
|
86
91
|
style = "bold red"
|
|
@@ -125,6 +130,15 @@ class InteractiveRenderer(MessageRenderer):
|
|
|
125
130
|
if hasattr(self.console.file, "flush"):
|
|
126
131
|
self.console.file.flush()
|
|
127
132
|
|
|
133
|
+
async def _handle_human_input_request(self, message: UIMessage):
|
|
134
|
+
"""Handle a human input request in async mode."""
|
|
135
|
+
# This renderer is not currently used in practice, but if it were:
|
|
136
|
+
# We would need async input handling here
|
|
137
|
+
# For now, just render as a system message
|
|
138
|
+
self.console.print(f"[bold cyan]INPUT REQUESTED:[/bold cyan] {message.content}")
|
|
139
|
+
if hasattr(self.console.file, "flush"):
|
|
140
|
+
self.console.file.flush()
|
|
141
|
+
|
|
128
142
|
|
|
129
143
|
class TUIRenderer(MessageRenderer):
|
|
130
144
|
"""Renderer for TUI mode that adds messages to the chat view."""
|
|
@@ -142,6 +156,11 @@ class TUIRenderer(MessageRenderer):
|
|
|
142
156
|
if not self.tui_app:
|
|
143
157
|
return
|
|
144
158
|
|
|
159
|
+
# Handle human input requests
|
|
160
|
+
if message.type == MessageType.HUMAN_INPUT_REQUEST:
|
|
161
|
+
await self._handle_human_input_request(message)
|
|
162
|
+
return
|
|
163
|
+
|
|
145
164
|
# Extract group_id from message metadata (fixing the key name)
|
|
146
165
|
group_id = message.metadata.get("message_group") if message.metadata else None
|
|
147
166
|
|
|
@@ -199,6 +218,48 @@ class TUIRenderer(MessageRenderer):
|
|
|
199
218
|
# Default to system message
|
|
200
219
|
self.tui_app.add_system_message(content_str, message_group=group_id)
|
|
201
220
|
|
|
221
|
+
async def _handle_human_input_request(self, message: UIMessage):
|
|
222
|
+
"""Handle a human input request in TUI mode."""
|
|
223
|
+
try:
|
|
224
|
+
print(f"[DEBUG] TUI renderer handling human input request")
|
|
225
|
+
|
|
226
|
+
# Check if tui_app is available
|
|
227
|
+
if not self.tui_app:
|
|
228
|
+
print(f"[DEBUG] No tui_app available, falling back to error response")
|
|
229
|
+
prompt_id = message.metadata.get("prompt_id") if message.metadata else None
|
|
230
|
+
if prompt_id:
|
|
231
|
+
from code_puppy.messaging import provide_prompt_response
|
|
232
|
+
provide_prompt_response(prompt_id, "")
|
|
233
|
+
return
|
|
234
|
+
|
|
235
|
+
prompt_id = message.metadata.get("prompt_id") if message.metadata else None
|
|
236
|
+
if not prompt_id:
|
|
237
|
+
print(f"[DEBUG] No prompt_id in message metadata")
|
|
238
|
+
self.tui_app.add_error_message("Error: Invalid human input request")
|
|
239
|
+
return
|
|
240
|
+
|
|
241
|
+
# For now, use a simple fallback instead of modal to avoid crashes
|
|
242
|
+
print(f"[DEBUG] Using fallback approach - showing prompt as message")
|
|
243
|
+
self.tui_app.add_system_message(f"[yellow]INPUT NEEDED:[/yellow] {str(message.content)}")
|
|
244
|
+
self.tui_app.add_system_message("[dim]This would normally show a modal, but using fallback to prevent crashes[/dim]")
|
|
245
|
+
|
|
246
|
+
# Provide empty response for now to unblock the waiting thread
|
|
247
|
+
from code_puppy.messaging import provide_prompt_response
|
|
248
|
+
provide_prompt_response(prompt_id, "")
|
|
249
|
+
|
|
250
|
+
except Exception as e:
|
|
251
|
+
print(f"[DEBUG] Top-level exception in _handle_human_input_request: {e}")
|
|
252
|
+
import traceback
|
|
253
|
+
traceback.print_exc()
|
|
254
|
+
# Last resort - provide empty response to prevent hanging
|
|
255
|
+
try:
|
|
256
|
+
prompt_id = message.metadata.get("prompt_id") if message.metadata else None
|
|
257
|
+
if prompt_id:
|
|
258
|
+
from code_puppy.messaging import provide_prompt_response
|
|
259
|
+
provide_prompt_response(prompt_id, "")
|
|
260
|
+
except Exception:
|
|
261
|
+
pass # Can't do anything more
|
|
262
|
+
|
|
202
263
|
|
|
203
264
|
class SynchronousInteractiveRenderer:
|
|
204
265
|
"""
|
|
@@ -262,6 +323,11 @@ class SynchronousInteractiveRenderer:
|
|
|
262
323
|
|
|
263
324
|
def _render_message(self, message: UIMessage):
|
|
264
325
|
"""Render a message using Rich console."""
|
|
326
|
+
# Handle human input requests
|
|
327
|
+
if message.type == MessageType.HUMAN_INPUT_REQUEST:
|
|
328
|
+
self._handle_human_input_request(message)
|
|
329
|
+
return
|
|
330
|
+
|
|
265
331
|
# Convert message type to appropriate Rich styling
|
|
266
332
|
if message.type == MessageType.ERROR:
|
|
267
333
|
style = "bold red"
|
|
@@ -303,3 +369,31 @@ class SynchronousInteractiveRenderer:
|
|
|
303
369
|
# This fixes the issue where messages don't appear until user input
|
|
304
370
|
if hasattr(self.console.file, "flush"):
|
|
305
371
|
self.console.file.flush()
|
|
372
|
+
|
|
373
|
+
def _handle_human_input_request(self, message: UIMessage):
|
|
374
|
+
"""Handle a human input request in interactive mode."""
|
|
375
|
+
prompt_id = message.metadata.get("prompt_id") if message.metadata else None
|
|
376
|
+
if not prompt_id:
|
|
377
|
+
self.console.print("[bold red]Error: Invalid human input request[/bold red]")
|
|
378
|
+
return
|
|
379
|
+
|
|
380
|
+
# Display the prompt
|
|
381
|
+
self.console.print(f"[bold cyan]{message.content}[/bold cyan]")
|
|
382
|
+
if hasattr(self.console.file, "flush"):
|
|
383
|
+
self.console.file.flush()
|
|
384
|
+
|
|
385
|
+
# Get user input
|
|
386
|
+
try:
|
|
387
|
+
# Use basic input for now - could be enhanced with prompt_toolkit later
|
|
388
|
+
response = input(">>> ")
|
|
389
|
+
|
|
390
|
+
# Provide the response back to the queue
|
|
391
|
+
from .message_queue import provide_prompt_response
|
|
392
|
+
provide_prompt_response(prompt_id, response)
|
|
393
|
+
|
|
394
|
+
except (EOFError, KeyboardInterrupt):
|
|
395
|
+
# Handle Ctrl+C or Ctrl+D
|
|
396
|
+
provide_prompt_response(prompt_id, "")
|
|
397
|
+
except Exception as e:
|
|
398
|
+
self.console.print(f"[bold red]Error getting input: {e}[/bold red]")
|
|
399
|
+
provide_prompt_response(prompt_id, "")
|
code_puppy/tui/app.py
CHANGED
|
@@ -46,7 +46,7 @@ from .. import state_management
|
|
|
46
46
|
# Import shared message classes
|
|
47
47
|
from .messages import CommandSelected, HistoryEntrySelected
|
|
48
48
|
from .models import ChatMessage, MessageType
|
|
49
|
-
from .screens import HelpScreen, SettingsScreen, ToolsScreen
|
|
49
|
+
from .screens import HelpScreen, SettingsScreen, ToolsScreen, MCPInstallWizardScreen
|
|
50
50
|
|
|
51
51
|
|
|
52
52
|
class CodePuppyTUI(App):
|
|
@@ -82,6 +82,7 @@ class CodePuppyTUI(App):
|
|
|
82
82
|
Binding("ctrl+4", "show_tools", "Tools"),
|
|
83
83
|
Binding("ctrl+5", "focus_input", "Focus Prompt"),
|
|
84
84
|
Binding("ctrl+6", "focus_chat", "Focus Response"),
|
|
85
|
+
Binding("ctrl+t", "open_mcp_wizard", "MCP Install Wizard"),
|
|
85
86
|
]
|
|
86
87
|
|
|
87
88
|
# Reactive variables for app state
|
|
@@ -629,6 +630,28 @@ class CodePuppyTUI(App):
|
|
|
629
630
|
|
|
630
631
|
self.push_screen(SettingsScreen(), handle_settings_result)
|
|
631
632
|
|
|
633
|
+
def action_open_mcp_wizard(self) -> None:
|
|
634
|
+
"""Open the MCP Install Wizard."""
|
|
635
|
+
|
|
636
|
+
def handle_wizard_result(result):
|
|
637
|
+
if result and result.get("success"):
|
|
638
|
+
# Show success message
|
|
639
|
+
self.add_system_message(result.get("message", "MCP server installed successfully"))
|
|
640
|
+
|
|
641
|
+
# If a server was installed, suggest starting it
|
|
642
|
+
if result.get("server_name"):
|
|
643
|
+
server_name = result["server_name"]
|
|
644
|
+
self.add_system_message(f"💡 Use '/mcp start {server_name}' to start the server")
|
|
645
|
+
elif (
|
|
646
|
+
result
|
|
647
|
+
and not result.get("success")
|
|
648
|
+
and "cancelled" not in result.get("message", "").lower()
|
|
649
|
+
):
|
|
650
|
+
# Show error message (but not for cancellation)
|
|
651
|
+
self.add_error_message(result.get("message", "MCP installation failed"))
|
|
652
|
+
|
|
653
|
+
self.push_screen(MCPInstallWizardScreen(), handle_wizard_result)
|
|
654
|
+
|
|
632
655
|
def process_initial_command(self) -> None:
|
|
633
656
|
"""Process the initial command provided when starting the TUI."""
|
|
634
657
|
if self.initial_command:
|
|
@@ -11,7 +11,7 @@ from rich.syntax import Syntax
|
|
|
11
11
|
from rich.text import Text
|
|
12
12
|
from textual import on
|
|
13
13
|
from textual.containers import Vertical, VerticalScroll
|
|
14
|
-
from textual.widgets import Static
|
|
14
|
+
from textual.widgets import Static, Collapsible
|
|
15
15
|
|
|
16
16
|
from ..models import ChatMessage, MessageType
|
|
17
17
|
from .copy_button import CopyButton
|
|
@@ -30,14 +30,16 @@ class ChatView(VerticalScroll):
|
|
|
30
30
|
}
|
|
31
31
|
|
|
32
32
|
.user-message {
|
|
33
|
-
background:
|
|
33
|
+
background: $primary-darken-3;
|
|
34
34
|
color: #ffffff;
|
|
35
35
|
margin: 0 0 1 0;
|
|
36
36
|
margin-top: 0;
|
|
37
|
-
padding:
|
|
38
|
-
padding-top:
|
|
37
|
+
padding: 1;
|
|
38
|
+
padding-top: 1;
|
|
39
39
|
text-wrap: wrap;
|
|
40
|
-
border:
|
|
40
|
+
border: none;
|
|
41
|
+
border-left: thick $accent;
|
|
42
|
+
text-style: bold;
|
|
41
43
|
}
|
|
42
44
|
|
|
43
45
|
.agent-message {
|
|
@@ -48,7 +50,7 @@ class ChatView(VerticalScroll):
|
|
|
48
50
|
padding: 0;
|
|
49
51
|
padding-top: 0;
|
|
50
52
|
text-wrap: wrap;
|
|
51
|
-
border:
|
|
53
|
+
border: none;
|
|
52
54
|
}
|
|
53
55
|
|
|
54
56
|
.system-message {
|
|
@@ -60,7 +62,7 @@ class ChatView(VerticalScroll):
|
|
|
60
62
|
padding-top: 0;
|
|
61
63
|
text-style: italic;
|
|
62
64
|
text-wrap: wrap;
|
|
63
|
-
border:
|
|
65
|
+
border: none;
|
|
64
66
|
}
|
|
65
67
|
|
|
66
68
|
.error-message {
|
|
@@ -71,7 +73,7 @@ class ChatView(VerticalScroll):
|
|
|
71
73
|
padding: 0;
|
|
72
74
|
padding-top: 0;
|
|
73
75
|
text-wrap: wrap;
|
|
74
|
-
border:
|
|
76
|
+
border: none;
|
|
75
77
|
}
|
|
76
78
|
|
|
77
79
|
.agent_reasoning-message {
|
|
@@ -83,7 +85,7 @@ class ChatView(VerticalScroll):
|
|
|
83
85
|
padding-top: 0;
|
|
84
86
|
text-wrap: wrap;
|
|
85
87
|
text-style: italic;
|
|
86
|
-
border:
|
|
88
|
+
border: none;
|
|
87
89
|
}
|
|
88
90
|
|
|
89
91
|
.planned_next_steps-message {
|
|
@@ -95,7 +97,7 @@ class ChatView(VerticalScroll):
|
|
|
95
97
|
padding-top: 0;
|
|
96
98
|
text-wrap: wrap;
|
|
97
99
|
text-style: italic;
|
|
98
|
-
border:
|
|
100
|
+
border: none;
|
|
99
101
|
}
|
|
100
102
|
|
|
101
103
|
.agent_response-message {
|
|
@@ -106,7 +108,7 @@ class ChatView(VerticalScroll):
|
|
|
106
108
|
padding: 0;
|
|
107
109
|
padding-top: 0;
|
|
108
110
|
text-wrap: wrap;
|
|
109
|
-
border:
|
|
111
|
+
border: none;
|
|
110
112
|
}
|
|
111
113
|
|
|
112
114
|
.info-message {
|
|
@@ -117,7 +119,7 @@ class ChatView(VerticalScroll):
|
|
|
117
119
|
padding: 0;
|
|
118
120
|
padding-top: 0;
|
|
119
121
|
text-wrap: wrap;
|
|
120
|
-
border:
|
|
122
|
+
border: none;
|
|
121
123
|
}
|
|
122
124
|
|
|
123
125
|
.success-message {
|
|
@@ -128,7 +130,7 @@ class ChatView(VerticalScroll):
|
|
|
128
130
|
padding: 0;
|
|
129
131
|
padding-top: 0;
|
|
130
132
|
text-wrap: wrap;
|
|
131
|
-
border:
|
|
133
|
+
border: none;
|
|
132
134
|
}
|
|
133
135
|
|
|
134
136
|
.warning-message {
|
|
@@ -139,7 +141,7 @@ class ChatView(VerticalScroll):
|
|
|
139
141
|
padding: 0;
|
|
140
142
|
padding-top: 0;
|
|
141
143
|
text-wrap: wrap;
|
|
142
|
-
border:
|
|
144
|
+
border: none;
|
|
143
145
|
}
|
|
144
146
|
|
|
145
147
|
.tool_output-message {
|
|
@@ -150,7 +152,7 @@ class ChatView(VerticalScroll):
|
|
|
150
152
|
padding: 0;
|
|
151
153
|
padding-top: 0;
|
|
152
154
|
text-wrap: wrap;
|
|
153
|
-
border:
|
|
155
|
+
border: none;
|
|
154
156
|
}
|
|
155
157
|
|
|
156
158
|
.command_output-message {
|
|
@@ -161,7 +163,7 @@ class ChatView(VerticalScroll):
|
|
|
161
163
|
padding: 0;
|
|
162
164
|
padding-top: 0;
|
|
163
165
|
text-wrap: wrap;
|
|
164
|
-
border:
|
|
166
|
+
border: none;
|
|
165
167
|
}
|
|
166
168
|
|
|
167
169
|
.message-container {
|
|
@@ -339,8 +341,21 @@ class ChatView(VerticalScroll):
|
|
|
339
341
|
css_class = f"{message.type.value}-message"
|
|
340
342
|
|
|
341
343
|
if message.type == MessageType.USER:
|
|
342
|
-
|
|
343
|
-
|
|
344
|
+
# Add user indicator and make it stand out
|
|
345
|
+
content_lines = message.content.split('\n')
|
|
346
|
+
if len(content_lines) > 1:
|
|
347
|
+
# Multi-line user message
|
|
348
|
+
formatted_content = f"╔══ USER ══╗\n{message.content}\n╚══════════╝"
|
|
349
|
+
else:
|
|
350
|
+
# Single line user message
|
|
351
|
+
formatted_content = f"▶ USER: {message.content}"
|
|
352
|
+
|
|
353
|
+
message_widget = Static(Text(formatted_content), classes=css_class)
|
|
354
|
+
# User messages are not collapsible - mount directly
|
|
355
|
+
self.mount(message_widget)
|
|
356
|
+
# Auto-scroll to bottom
|
|
357
|
+
self._schedule_scroll()
|
|
358
|
+
return
|
|
344
359
|
elif message.type == MessageType.AGENT:
|
|
345
360
|
prefix = "AGENT: "
|
|
346
361
|
content = f"{message.content}"
|