ripperdoc 0.2.10__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +164 -57
  3. ripperdoc/cli/commands/__init__.py +4 -0
  4. ripperdoc/cli/commands/agents_cmd.py +3 -7
  5. ripperdoc/cli/commands/doctor_cmd.py +29 -0
  6. ripperdoc/cli/commands/memory_cmd.py +2 -1
  7. ripperdoc/cli/commands/models_cmd.py +61 -5
  8. ripperdoc/cli/commands/resume_cmd.py +1 -0
  9. ripperdoc/cli/commands/skills_cmd.py +103 -0
  10. ripperdoc/cli/commands/stats_cmd.py +4 -4
  11. ripperdoc/cli/commands/status_cmd.py +10 -0
  12. ripperdoc/cli/commands/tasks_cmd.py +6 -3
  13. ripperdoc/cli/commands/themes_cmd.py +139 -0
  14. ripperdoc/cli/ui/file_mention_completer.py +63 -13
  15. ripperdoc/cli/ui/helpers.py +6 -3
  16. ripperdoc/cli/ui/interrupt_handler.py +34 -0
  17. ripperdoc/cli/ui/panels.py +13 -8
  18. ripperdoc/cli/ui/rich_ui.py +451 -32
  19. ripperdoc/cli/ui/spinner.py +68 -5
  20. ripperdoc/cli/ui/tool_renderers.py +10 -9
  21. ripperdoc/cli/ui/wizard.py +18 -11
  22. ripperdoc/core/agents.py +4 -0
  23. ripperdoc/core/config.py +235 -0
  24. ripperdoc/core/default_tools.py +1 -0
  25. ripperdoc/core/hooks/llm_callback.py +0 -1
  26. ripperdoc/core/hooks/manager.py +6 -0
  27. ripperdoc/core/permissions.py +82 -5
  28. ripperdoc/core/providers/openai.py +55 -9
  29. ripperdoc/core/query.py +349 -108
  30. ripperdoc/core/query_utils.py +17 -14
  31. ripperdoc/core/skills.py +1 -0
  32. ripperdoc/core/theme.py +298 -0
  33. ripperdoc/core/tool.py +8 -3
  34. ripperdoc/protocol/__init__.py +14 -0
  35. ripperdoc/protocol/models.py +300 -0
  36. ripperdoc/protocol/stdio.py +1453 -0
  37. ripperdoc/tools/background_shell.py +49 -5
  38. ripperdoc/tools/bash_tool.py +75 -9
  39. ripperdoc/tools/file_edit_tool.py +98 -29
  40. ripperdoc/tools/file_read_tool.py +139 -8
  41. ripperdoc/tools/file_write_tool.py +46 -3
  42. ripperdoc/tools/grep_tool.py +98 -8
  43. ripperdoc/tools/lsp_tool.py +9 -15
  44. ripperdoc/tools/multi_edit_tool.py +26 -3
  45. ripperdoc/tools/skill_tool.py +52 -1
  46. ripperdoc/tools/task_tool.py +33 -8
  47. ripperdoc/utils/file_watch.py +12 -6
  48. ripperdoc/utils/image_utils.py +125 -0
  49. ripperdoc/utils/log.py +30 -3
  50. ripperdoc/utils/lsp.py +9 -3
  51. ripperdoc/utils/mcp.py +80 -18
  52. ripperdoc/utils/message_formatting.py +2 -2
  53. ripperdoc/utils/messages.py +177 -32
  54. ripperdoc/utils/pending_messages.py +50 -0
  55. ripperdoc/utils/permissions/shell_command_validation.py +3 -3
  56. ripperdoc/utils/permissions/tool_permission_utils.py +9 -3
  57. ripperdoc/utils/platform.py +198 -0
  58. ripperdoc/utils/session_heatmap.py +1 -3
  59. ripperdoc/utils/session_history.py +2 -2
  60. ripperdoc/utils/session_stats.py +1 -0
  61. ripperdoc/utils/shell_utils.py +8 -5
  62. ripperdoc/utils/todo.py +0 -6
  63. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.0.dist-info}/METADATA +49 -17
  64. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.0.dist-info}/RECORD +68 -61
  65. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.0.dist-info}/WHEEL +1 -1
  66. ripperdoc/sdk/__init__.py +0 -9
  67. ripperdoc/sdk/client.py +0 -408
  68. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.0.dist-info}/entry_points.txt +0 -0
  69. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.0.dist-info}/licenses/LICENSE +0 -0
  70. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.0.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,7 @@ import os
7
7
  import threading
8
8
  import time
9
9
  from dataclasses import dataclass
10
- from typing import Any, AsyncGenerator, Callable, Dict, Iterable, List, Optional, Sequence
10
+ from typing import Any, AsyncGenerator, Callable, Dict, Iterable, List, Optional, Sequence, Union
11
11
  from uuid import uuid4
12
12
 
13
13
  from pydantic import BaseModel, Field
@@ -39,7 +39,7 @@ from ripperdoc.utils.log import get_logger
39
39
  logger = get_logger()
40
40
 
41
41
 
42
- MessageType = UserMessage | AssistantMessage
42
+ MessageType = Union[UserMessage, AssistantMessage]
43
43
 
44
44
 
45
45
  @dataclass
@@ -152,6 +152,7 @@ async def cancel_agent_run(agent_id: str) -> bool:
152
152
  record.task = None
153
153
  return True
154
154
 
155
+
155
156
  class TaskToolInput(BaseModel):
156
157
  """Input schema for delegating to a subagent."""
157
158
 
@@ -264,7 +265,7 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
264
265
  f"- Fetch background results by calling {background_fetch_tool_name} with resume=<agent_id>. If the agent is still running, set wait=true to block or wait=false to get status only.\n"
265
266
  "- To continue a completed agent, call Task with resume=<agent_id> and a new prompt.\n"
266
267
  "- Provide clear, detailed prompts so the agent can work autonomously and return exactly the information you need.\n"
267
- '- Agents can opt into parent context by setting fork_context: true in their frontmatter. When enabled, they receive the full conversation history before the tool call.\n'
268
+ "- Agents can opt into parent context by setting fork_context: true in their frontmatter. When enabled, they receive the full conversation history before the tool call.\n"
268
269
  "- The agent's outputs should generally be trusted\n"
269
270
  "- Clearly tell the agent whether you expect it to write code or just to do research (search, file reads, web fetches, etc.), since it is not aware of the user's intent\n"
270
271
  "- If the agent description mentions that it should be used proactively, then you should try your best to use it without the user having to ask for it first. Use your judgement.\n"
@@ -439,6 +440,9 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
439
440
 
440
441
  yield ToolProgress(content=f"Resuming subagent '{record.agent_type}'")
441
442
 
443
+ # Get the Task tool's tool_use_id to set as parent_tool_use_id for subagent messages
444
+ parent_tool_use_id = context.message_id
445
+
442
446
  assistant_messages: List[AssistantMessage] = []
443
447
  tool_use_count = 0
444
448
  async for message in query(
@@ -448,8 +452,11 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
448
452
  subagent_context,
449
453
  context.permission_checker,
450
454
  ):
451
- if getattr(message, "type", "") == "progress":
455
+ msg_type = getattr(message, "type", "")
456
+ if msg_type == "progress":
452
457
  continue
458
+
459
+ # Track the message for internal state
453
460
  tool_use_count, updates = self._track_subagent_message(
454
461
  message,
455
462
  record.history,
@@ -459,6 +466,12 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
459
466
  for update in updates:
460
467
  yield ToolProgress(content=update)
461
468
 
469
+ # CRITICAL: Also yield subagent messages to SDK for compatibility
470
+ if msg_type in ("assistant", "user"):
471
+ # Set parent_tool_use_id to link subagent messages to the Task tool call
472
+ message_with_parent = message.model_copy(update={"parent_tool_use_id": parent_tool_use_id})
473
+ yield ToolProgress(content=message_with_parent, is_subagent_message=True)
474
+
462
475
  duration_ms = (time.time() - record.start_time) * 1000
463
476
  result_text = (
464
477
  self._extract_text(assistant_messages[-1])
@@ -563,6 +576,9 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
563
576
 
564
577
  yield ToolProgress(content=f"Launching subagent '{target_agent.agent_type}'")
565
578
 
579
+ # Get the Task tool's tool_use_id to set as parent_tool_use_id for subagent messages
580
+ parent_tool_use_id = context.message_id
581
+
566
582
  assistant_messages = []
567
583
  tool_use_count = 0
568
584
  async for message in query(
@@ -572,8 +588,11 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
572
588
  subagent_context,
573
589
  context.permission_checker,
574
590
  ):
575
- if getattr(message, "type", "") == "progress":
591
+ msg_type = getattr(message, "type", "")
592
+ if msg_type == "progress":
576
593
  continue
594
+
595
+ # Track the message for internal state
577
596
  tool_use_count, updates = self._track_subagent_message(
578
597
  message,
579
598
  record.history,
@@ -583,6 +602,14 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
583
602
  for update in updates:
584
603
  yield ToolProgress(content=update)
585
604
 
605
+ # CRITICAL: Also yield subagent messages to SDK for compatibility
606
+ # This allows SDK clients to see the full subagent conversation
607
+ if msg_type in ("assistant", "user"):
608
+ # Set parent_tool_use_id to link subagent messages to the Task tool call
609
+ # Use model_copy() to create a new message with the parent_tool_use_id set
610
+ message_with_parent = message.model_copy(update={"parent_tool_use_id": parent_tool_use_id})
611
+ yield ToolProgress(content=message_with_parent, is_subagent_message=True)
612
+
586
613
  duration_ms = (time.time() - record.start_time) * 1000
587
614
  result_text = (
588
615
  self._extract_text(assistant_messages[-1])
@@ -634,9 +661,7 @@ class TaskTool(Tool[TaskToolInput, TaskToolOutput]):
634
661
  error=error_override or record.error,
635
662
  )
636
663
 
637
- def _coerce_parent_history(
638
- self, messages: Optional[Sequence[object]]
639
- ) -> List[MessageType]:
664
+ def _coerce_parent_history(self, messages: Optional[Sequence[object]]) -> List[MessageType]:
640
665
  if not messages:
641
666
  return []
642
667
  history: List[MessageType] = []
@@ -8,7 +8,7 @@ import sys
8
8
  import threading
9
9
  from collections import OrderedDict
10
10
  from dataclasses import dataclass
11
- from typing import Dict, Iterator, List, Optional, Tuple
11
+ from typing import Dict, Iterator, List, Optional, Tuple, Union
12
12
 
13
13
  from ripperdoc.utils.log import get_logger
14
14
 
@@ -27,6 +27,7 @@ class FileSnapshot:
27
27
  timestamp: float
28
28
  offset: int = 0
29
29
  limit: Optional[int] = None
30
+ encoding: str = "utf-8"
30
31
 
31
32
  def memory_size(self) -> int:
32
33
  """Estimate memory usage of this snapshot in bytes."""
@@ -230,7 +231,7 @@ class ChangedFileNotice:
230
231
 
231
232
 
232
233
  # Type alias for cache - supports both Dict and BoundedFileCache
233
- FileCacheType = Dict[str, FileSnapshot] | BoundedFileCache
234
+ FileCacheType = Union[Dict[str, FileSnapshot], BoundedFileCache]
234
235
 
235
236
 
236
237
  def record_snapshot(
@@ -240,6 +241,7 @@ def record_snapshot(
240
241
  *,
241
242
  offset: int = 0,
242
243
  limit: Optional[int] = None,
244
+ encoding: str = "utf-8",
243
245
  ) -> None:
244
246
  """Store the current contents and mtime for a file."""
245
247
  try:
@@ -247,12 +249,14 @@ def record_snapshot(
247
249
  except OSError:
248
250
  timestamp = 0.0
249
251
  cache[file_path] = FileSnapshot(
250
- content=content, timestamp=timestamp, offset=offset, limit=limit
252
+ content=content, timestamp=timestamp, offset=offset, limit=limit, encoding=encoding
251
253
  )
252
254
 
253
255
 
254
- def _read_portion(file_path: str, offset: int, limit: Optional[int]) -> str:
255
- with open(file_path, "r", encoding="utf-8", errors="replace") as handle:
256
+ def _read_portion(
257
+ file_path: str, offset: int, limit: Optional[int], encoding: str = "utf-8"
258
+ ) -> str:
259
+ with open(file_path, "r", encoding=encoding, errors="replace") as handle:
256
260
  lines = handle.readlines()
257
261
  start = max(offset, 0)
258
262
  if limit is None:
@@ -304,7 +308,9 @@ def detect_changed_files(
304
308
  continue
305
309
 
306
310
  try:
307
- new_content = _read_portion(file_path, snapshot.offset, snapshot.limit)
311
+ new_content = _read_portion(
312
+ file_path, snapshot.offset, snapshot.limit, snapshot.encoding
313
+ )
308
314
  except (
309
315
  OSError,
310
316
  IOError,
@@ -0,0 +1,125 @@
1
+ """Image processing utilities for Ripperdoc."""
2
+
3
+ import base64
4
+ import mimetypes
5
+ from pathlib import Path
6
+ from typing import Optional, Tuple
7
+
8
+ from ripperdoc.utils.log import get_logger
9
+
10
+ logger = get_logger()
11
+
12
+ # Supported image formats
13
+ SUPPORTED_IMAGE_EXTENSIONS = {".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp"}
14
+ SUPPORTED_IMAGE_MIME_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp", "image/bmp"}
15
+
16
+ MAX_IMAGE_SIZE_BYTES = 32 * 1024 * 1024 # 32MB
17
+
18
+
19
+ def is_image_file(file_path: Path) -> bool:
20
+ """Check if a file is a supported image format.
21
+
22
+ Args:
23
+ file_path: Path to the file
24
+
25
+ Returns:
26
+ True if the file has a supported image extension
27
+ """
28
+ return file_path.suffix.lower() in SUPPORTED_IMAGE_EXTENSIONS
29
+
30
+
31
+ def detect_mime_type(file_path: Path) -> str:
32
+ """Detect the MIME type of an image file.
33
+
34
+ Args:
35
+ file_path: Path to the image file
36
+
37
+ Returns:
38
+ MIME type string (e.g., "image/jpeg", "image/png")
39
+ """
40
+ mime_type, _ = mimetypes.guess_type(file_path)
41
+ if mime_type in SUPPORTED_IMAGE_MIME_TYPES:
42
+ return mime_type
43
+
44
+ # Fallback to extension-based detection
45
+ ext = file_path.suffix.lower()
46
+ if ext in {".jpg", ".jpeg"}:
47
+ return "image/jpeg"
48
+ if ext == ".png":
49
+ return "image/png"
50
+ if ext == ".gif":
51
+ return "image/gif"
52
+ if ext == ".webp":
53
+ return "image/webp"
54
+ if ext == ".bmp":
55
+ return "image/bmp"
56
+ return "image/jpeg" # Default fallback
57
+
58
+
59
+ def read_image_as_base64(file_path: Path) -> Optional[Tuple[str, str]]:
60
+ """Read an image file and return its base64-encoded data and MIME type.
61
+
62
+ Args:
63
+ file_path: Absolute path to the image file
64
+
65
+ Returns:
66
+ (base64_data, mime_type) tuple or None if reading fails
67
+ """
68
+ if not file_path.exists():
69
+ logger.warning(
70
+ "[image_utils] Image file not found",
71
+ extra={"path": str(file_path)},
72
+ )
73
+ return None
74
+
75
+ if not file_path.is_file():
76
+ logger.warning(
77
+ "[image_utils] Not a file",
78
+ extra={"path": str(file_path)},
79
+ )
80
+ return None
81
+
82
+ # Check file size
83
+ file_size = file_path.stat().st_size
84
+ if file_size > MAX_IMAGE_SIZE_BYTES:
85
+ logger.warning(
86
+ "[image_utils] Image too large",
87
+ extra={
88
+ "path": str(file_path),
89
+ "size_bytes": file_size,
90
+ "max_bytes": MAX_IMAGE_SIZE_BYTES,
91
+ },
92
+ )
93
+ return None
94
+
95
+ if not is_image_file(file_path):
96
+ logger.warning(
97
+ "[image_utils] Not a supported image format",
98
+ extra={"path": str(file_path)},
99
+ )
100
+ return None
101
+
102
+ try:
103
+ with open(file_path, "rb") as f:
104
+ image_bytes = f.read()
105
+
106
+ base64_data = base64.b64encode(image_bytes).decode("utf-8")
107
+ mime_type = detect_mime_type(file_path)
108
+
109
+ logger.debug(
110
+ "[image_utils] Loaded image",
111
+ extra={
112
+ "path": str(file_path),
113
+ "size_bytes": len(image_bytes),
114
+ "mime_type": mime_type,
115
+ },
116
+ )
117
+
118
+ return (base64_data, mime_type)
119
+
120
+ except (OSError, IOError) as e:
121
+ logger.error(
122
+ "[image_utils] Failed to read image",
123
+ extra={"path": str(file_path), "error": str(e)},
124
+ )
125
+ return None
ripperdoc/utils/log.py CHANGED
@@ -4,13 +4,40 @@ import json
4
4
  import logging
5
5
  import sys
6
6
  import os
7
- from datetime import datetime
7
+ from datetime import datetime, timezone
8
8
  from pathlib import Path
9
9
  from typing import Any, Optional
10
10
 
11
11
  from ripperdoc.utils.path_utils import sanitize_project_path
12
12
 
13
13
 
14
+ class SpinnerSafeStreamHandler(logging.StreamHandler):
15
+ """StreamHandler that clears the current line before ERROR/WARNING messages.
16
+
17
+ This prevents log messages from appearing after a spinner's text,
18
+ which would cause formatting issues.
19
+ """
20
+
21
+ def emit(self, record: logging.LogRecord) -> None:
22
+ """Emit a log record, clearing the line first for ERROR/WARNING."""
23
+ try:
24
+ msg = self.format(record)
25
+ stream = self.stream
26
+
27
+ # Clear the current line before ERROR/WARNING to avoid spinner interference
28
+ if record.levelno >= logging.ERROR:
29
+ # Use \r to return to start, then clear with spaces, then \r again
30
+ stream.write("\r" + " " * 100 + "\r")
31
+ elif record.levelno >= logging.WARNING:
32
+ # Also clear for WARNING
33
+ stream.write("\r" + " " * 100 + "\r")
34
+
35
+ stream.write(msg + self.terminator)
36
+ self.flush()
37
+ except Exception:
38
+ self.handleError(record)
39
+
40
+
14
41
  _LOG_RECORD_FIELDS = {
15
42
  "name",
16
43
  "msg",
@@ -42,7 +69,7 @@ class StructuredFormatter(logging.Formatter):
42
69
  """Formatter with ISO timestamps and context."""
43
70
 
44
71
  def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -> str:
45
- timestamp = datetime.utcfromtimestamp(record.created)
72
+ timestamp = datetime.fromtimestamp(record.created, tz=timezone.utc)
46
73
  return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
47
74
 
48
75
  def format(self, record: logging.LogRecord) -> str:
@@ -74,7 +101,7 @@ class RipperdocLogger:
74
101
 
75
102
  # Avoid adding duplicate handlers if an existing logger is reused.
76
103
  if not self.logger.handlers:
77
- console_handler = logging.StreamHandler(sys.stderr)
104
+ console_handler = SpinnerSafeStreamHandler(sys.stderr)
78
105
  console_handler.setLevel(level)
79
106
  console_formatter = logging.Formatter("%(levelname)s: %(message)s")
80
107
  console_handler.setFormatter(console_formatter)
ripperdoc/utils/lsp.py CHANGED
@@ -15,6 +15,7 @@ from urllib.parse import unquote, urlparse
15
15
 
16
16
  from ripperdoc.utils.git_utils import get_git_root, is_git_repository
17
17
  from ripperdoc.utils.log import get_logger
18
+ from ripperdoc.utils.platform import is_windows
18
19
 
19
20
 
20
21
  logger = get_logger()
@@ -102,7 +103,7 @@ def uri_to_path(uri: str) -> Optional[Path]:
102
103
  if parsed.scheme and parsed.scheme != "file":
103
104
  return None
104
105
  path = unquote(parsed.path)
105
- if os.name == "nt" and re.match(r"^/[A-Za-z]:", path):
106
+ if is_windows() and re.match(r"^/[A-Za-z]:", path):
106
107
  path = path[1:]
107
108
  return Path(path)
108
109
 
@@ -227,7 +228,9 @@ def _parse_server_config(name: str, raw: Dict[str, Any]) -> Optional[LspServerCo
227
228
  env = {str(k): str(v) for k, v in env.items()} if env else {}
228
229
 
229
230
  initialization_options = (
230
- raw.get("initializationOptions") if isinstance(raw.get("initializationOptions"), dict) else {}
231
+ raw.get("initializationOptions")
232
+ if isinstance(raw.get("initializationOptions"), dict)
233
+ else {}
231
234
  )
232
235
  settings = raw.get("settings") if isinstance(raw.get("settings"), dict) else {}
233
236
 
@@ -432,7 +435,10 @@ class LspServer:
432
435
  break
433
436
  logger.debug(
434
437
  "[lsp] stderr",
435
- extra={"server": self.config.name, "line": line.decode(errors="replace").strip()},
438
+ extra={
439
+ "server": self.config.name,
440
+ "line": line.decode(errors="replace").strip(),
441
+ },
436
442
  )
437
443
  except (asyncio.CancelledError, RuntimeError):
438
444
  return
ripperdoc/utils/mcp.py CHANGED
@@ -17,12 +17,13 @@ from ripperdoc.utils.token_estimation import estimate_tokens
17
17
 
18
18
  logger = get_logger()
19
19
 
20
+
20
21
  try:
21
22
  import mcp.types as mcp_types # type: ignore[import-not-found]
22
23
  from mcp.client.session import ClientSession # type: ignore[import-not-found]
23
24
  from mcp.client.sse import sse_client # type: ignore[import-not-found]
24
25
  from mcp.client.stdio import StdioServerParameters, stdio_client # type: ignore[import-not-found]
25
- from mcp.client.streamable_http import streamablehttp_client # type: ignore[import-not-found]
26
+ from mcp.client.streamable_http import streamable_http_client # type: ignore[import-not-found]
26
27
 
27
28
  MCP_AVAILABLE = True
28
29
  except (ImportError, ModuleNotFoundError): # pragma: no cover - handled gracefully at runtime
@@ -217,6 +218,14 @@ class McpRuntime:
217
218
  self.sessions: Dict[str, ClientSession] = {}
218
219
  self.servers: List[McpServerInfo] = []
219
220
  self._closed = False
221
+ # Track MCP streams for proper cleanup ordering
222
+ # We need to close write streams BEFORE exiting the stdio_client context
223
+ # to allow the internal tasks to exit cleanly
224
+ self._mcp_write_streams: List[Any] = []
225
+ # Track the underlying async generators from @asynccontextmanager wrappers
226
+ # These need to be explicitly closed after exit stack cleanup to prevent
227
+ # shutdown_asyncgens() from trying to close them in a different task
228
+ self._raw_async_generators: List[Any] = []
220
229
 
221
230
  async def connect(self, configs: Dict[str, McpServerInfo]) -> List[McpServerInfo]:
222
231
  logger.info(
@@ -281,19 +290,24 @@ class McpRuntime:
281
290
  if config.type in ("sse", "sse-ide"):
282
291
  if not config.url:
283
292
  raise ValueError("SSE MCP server requires a 'url'.")
284
- read_stream, write_stream = await self._exit_stack.enter_async_context(
285
- sse_client(config.url, headers=config.headers or None)
286
- )
293
+ cm = sse_client(config.url, headers=config.headers or None)
294
+ # Track the underlying async generator for explicit cleanup
295
+ if hasattr(cm, "gen"):
296
+ self._raw_async_generators.append(cm.gen)
297
+ read_stream, write_stream = await self._exit_stack.enter_async_context(cm)
298
+ self._mcp_write_streams.append(write_stream)
287
299
  elif config.type in ("http", "streamable-http"):
288
300
  if not config.url:
289
301
  raise ValueError("HTTP MCP server requires a 'url'.")
290
- read_stream, write_stream, _ = await self._exit_stack.enter_async_context(
291
- streamablehttp_client(
292
- url=config.url,
293
- headers=config.headers or None,
294
- terminate_on_close=True,
295
- )
302
+ cm = streamable_http_client( # type: ignore[call-arg]
303
+ url=config.url,
304
+ terminate_on_close=True,
296
305
  )
306
+ # Track the underlying async generator for explicit cleanup
307
+ if hasattr(cm, "gen"):
308
+ self._raw_async_generators.append(cm.gen)
309
+ read_stream, write_stream, _ = await self._exit_stack.enter_async_context(cm)
310
+ self._mcp_write_streams.append(write_stream)
297
311
  else:
298
312
  if not config.command:
299
313
  raise ValueError("Stdio MCP server requires a 'command'.")
@@ -303,9 +317,12 @@ class McpRuntime:
303
317
  env=config.env or None,
304
318
  cwd=self.project_path,
305
319
  )
306
- read_stream, write_stream = await self._exit_stack.enter_async_context(
307
- stdio_client(stdio_params)
308
- )
320
+ cm = stdio_client(stdio_params)
321
+ # Track the underlying async generator for explicit cleanup
322
+ if hasattr(cm, "gen"):
323
+ self._raw_async_generators.append(cm.gen)
324
+ read_stream, write_stream = await self._exit_stack.enter_async_context(cm)
325
+ self._mcp_write_streams.append(write_stream)
309
326
 
310
327
  if read_stream is None or write_stream is None:
311
328
  raise ValueError("Failed to create read/write streams for MCP server")
@@ -392,17 +409,39 @@ class McpRuntime:
392
409
  "[mcp] Shutting down MCP runtime",
393
410
  extra={"project_path": str(self.project_path), "session_count": len(self.sessions)},
394
411
  )
412
+
413
+ # CRITICAL: Close all MCP write streams FIRST to signal internal tasks to stop.
414
+ for write_stream in self._mcp_write_streams:
415
+ try:
416
+ await write_stream.aclose()
417
+ except BaseException: # pragma: no cover
418
+ pass
419
+ self._mcp_write_streams.clear()
420
+
421
+ # Small delay to allow internal tasks to notice stream closure and exit
422
+ await asyncio.sleep(0.1)
423
+
424
+ # CRITICAL: Close the raw async generators BEFORE the exit stack cleanup.
425
+ # This prevents asyncio's shutdown_asyncgens() from trying to close them
426
+ # later, which would cause the "cancel scope in different task" error.
427
+ for gen in self._raw_async_generators:
428
+ try:
429
+ await gen.aclose()
430
+ except BaseException: # pragma: no cover
431
+ pass
432
+ self._raw_async_generators.clear()
433
+
434
+ # Now close the exit stack
395
435
  try:
396
436
  await self._exit_stack.aclose()
397
437
  except BaseException as exc: # pragma: no cover - defensive shutdown
398
- # Swallow noisy ExceptionGroups from stdio_client cancel scopes during exit.
399
438
  logger.debug(
400
- "[mcp] Suppressed MCP shutdown error",
439
+ "[mcp] Suppressed MCP shutdown error during exit_stack.aclose()",
401
440
  extra={"error": str(exc), "project_path": str(self.project_path)},
402
441
  )
403
- finally:
404
- self.sessions.clear()
405
- self.servers.clear()
442
+
443
+ self.sessions.clear()
444
+ self.servers.clear()
406
445
 
407
446
 
408
447
  _runtime_var: contextvars.ContextVar[Optional[McpRuntime]] = contextvars.ContextVar(
@@ -453,6 +492,29 @@ async def ensure_mcp_runtime(project_path: Optional[Path] = None) -> McpRuntime:
453
492
  # Keep a module-level reference so sync callers that hop event loops can reuse it.
454
493
  global _global_runtime
455
494
  _global_runtime = runtime
495
+
496
+ # Install custom exception handler to suppress MCP asyncgen cleanup errors.
497
+ # These errors occur due to anyio cancel scope issues when stdio_client async
498
+ # generators are finalized by Python's asyncgen hooks. The errors are harmless
499
+ # but noisy, so we suppress them here.
500
+ loop = asyncio.get_running_loop()
501
+ original_handler = loop.get_exception_handler()
502
+
503
+ def mcp_exception_handler(loop: asyncio.AbstractEventLoop, context: Dict[str, Any]) -> None:
504
+ asyncgen = context.get("asyncgen")
505
+ # Suppress MCP stdio_client asyncgen cleanup errors
506
+ if asyncgen and "stdio_client" in str(asyncgen):
507
+ logger.debug("[mcp] Suppressed asyncgen cleanup error for stdio_client")
508
+ return
509
+ # Call original handler for other errors
510
+ if original_handler:
511
+ original_handler(loop, context)
512
+ else:
513
+ loop.default_exception_handler(context)
514
+
515
+ loop.set_exception_handler(mcp_exception_handler)
516
+ logger.debug("[mcp] Installed custom exception handler for asyncgen cleanup")
517
+
456
518
  return runtime
457
519
 
458
520
 
@@ -151,10 +151,10 @@ def format_reasoning_preview(reasoning: Any, show_full_thinking: bool = False) -
151
151
  text = "\n".join(p for p in parts if p)
152
152
  else:
153
153
  text = str(reasoning)
154
-
154
+
155
155
  if show_full_thinking:
156
156
  return text
157
-
157
+
158
158
  lines = text.strip().splitlines()
159
159
  if not lines:
160
160
  return ""