ripperdoc 0.1.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +75 -15
  3. ripperdoc/cli/commands/__init__.py +4 -0
  4. ripperdoc/cli/commands/agents_cmd.py +23 -1
  5. ripperdoc/cli/commands/context_cmd.py +13 -3
  6. ripperdoc/cli/commands/cost_cmd.py +1 -1
  7. ripperdoc/cli/commands/doctor_cmd.py +200 -0
  8. ripperdoc/cli/commands/memory_cmd.py +209 -0
  9. ripperdoc/cli/commands/models_cmd.py +25 -0
  10. ripperdoc/cli/commands/resume_cmd.py +3 -3
  11. ripperdoc/cli/commands/status_cmd.py +5 -5
  12. ripperdoc/cli/commands/tasks_cmd.py +32 -5
  13. ripperdoc/cli/ui/context_display.py +4 -3
  14. ripperdoc/cli/ui/rich_ui.py +205 -43
  15. ripperdoc/cli/ui/spinner.py +3 -4
  16. ripperdoc/core/agents.py +10 -6
  17. ripperdoc/core/config.py +48 -3
  18. ripperdoc/core/default_tools.py +26 -6
  19. ripperdoc/core/permissions.py +19 -0
  20. ripperdoc/core/query.py +238 -302
  21. ripperdoc/core/query_utils.py +537 -0
  22. ripperdoc/core/system_prompt.py +2 -1
  23. ripperdoc/core/tool.py +14 -1
  24. ripperdoc/sdk/client.py +1 -1
  25. ripperdoc/tools/background_shell.py +9 -3
  26. ripperdoc/tools/bash_tool.py +19 -4
  27. ripperdoc/tools/file_edit_tool.py +9 -2
  28. ripperdoc/tools/file_read_tool.py +9 -2
  29. ripperdoc/tools/file_write_tool.py +15 -2
  30. ripperdoc/tools/glob_tool.py +57 -17
  31. ripperdoc/tools/grep_tool.py +9 -2
  32. ripperdoc/tools/ls_tool.py +244 -75
  33. ripperdoc/tools/mcp_tools.py +47 -19
  34. ripperdoc/tools/multi_edit_tool.py +13 -2
  35. ripperdoc/tools/notebook_edit_tool.py +9 -6
  36. ripperdoc/tools/task_tool.py +20 -5
  37. ripperdoc/tools/todo_tool.py +163 -29
  38. ripperdoc/tools/tool_search_tool.py +15 -4
  39. ripperdoc/utils/git_utils.py +276 -0
  40. ripperdoc/utils/json_utils.py +28 -0
  41. ripperdoc/utils/log.py +130 -29
  42. ripperdoc/utils/mcp.py +83 -10
  43. ripperdoc/utils/memory.py +14 -1
  44. ripperdoc/utils/message_compaction.py +51 -14
  45. ripperdoc/utils/messages.py +63 -4
  46. ripperdoc/utils/output_utils.py +36 -9
  47. ripperdoc/utils/permissions/path_validation_utils.py +6 -0
  48. ripperdoc/utils/safe_get_cwd.py +4 -0
  49. ripperdoc/utils/session_history.py +27 -9
  50. ripperdoc/utils/todo.py +2 -2
  51. {ripperdoc-0.1.0.dist-info → ripperdoc-0.2.2.dist-info}/METADATA +4 -2
  52. ripperdoc-0.2.2.dist-info/RECORD +86 -0
  53. ripperdoc-0.1.0.dist-info/RECORD +0 -81
  54. {ripperdoc-0.1.0.dist-info → ripperdoc-0.2.2.dist-info}/WHEEL +0 -0
  55. {ripperdoc-0.1.0.dist-info → ripperdoc-0.2.2.dist-info}/entry_points.txt +0 -0
  56. {ripperdoc-0.1.0.dist-info → ripperdoc-0.2.2.dist-info}/licenses/LICENSE +0 -0
  57. {ripperdoc-0.1.0.dist-info → ripperdoc-0.2.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,276 @@
1
+ """Git utilities for Ripperdoc."""
2
+
3
+ import subprocess
4
+ from pathlib import Path
5
+ from typing import Dict, List, Optional, Tuple
6
+ import fnmatch
7
+
8
+
9
+ def is_git_repository(path: Path) -> bool:
10
+ """Check if a directory is a git repository."""
11
+ try:
12
+ result = subprocess.run(
13
+ ["git", "rev-parse", "--is-inside-work-tree"],
14
+ cwd=path,
15
+ capture_output=True,
16
+ text=True,
17
+ timeout=5,
18
+ )
19
+ return result.returncode == 0 and result.stdout.strip() == "true"
20
+ except (subprocess.SubprocessError, FileNotFoundError):
21
+ return False
22
+
23
+
24
+ def get_git_root(path: Path) -> Optional[Path]:
25
+ """Get the git root directory for a given path."""
26
+ try:
27
+ result = subprocess.run(
28
+ ["git", "rev-parse", "--show-toplevel"],
29
+ cwd=path,
30
+ capture_output=True,
31
+ text=True,
32
+ timeout=5,
33
+ )
34
+ if result.returncode == 0:
35
+ return Path(result.stdout.strip())
36
+ return None
37
+ except (subprocess.SubprocessError, FileNotFoundError):
38
+ return None
39
+
40
+
41
+ def read_gitignore_patterns(path: Path) -> List[str]:
42
+ """Read .gitignore patterns from a directory and its parent directories."""
43
+ patterns: List[str] = []
44
+ current = path
45
+
46
+ # Read .gitignore from current directory up to git root
47
+ git_root = get_git_root(path)
48
+
49
+ while current and (git_root is None or current.is_relative_to(git_root)):
50
+ gitignore_file = current / ".gitignore"
51
+ if gitignore_file.exists():
52
+ try:
53
+ with open(gitignore_file, "r", encoding="utf-8") as f:
54
+ for line in f:
55
+ line = line.strip()
56
+ if line and not line.startswith("#"):
57
+ patterns.append(line)
58
+ except (IOError, UnicodeDecodeError):
59
+ pass
60
+
61
+ # Also check for .git/info/exclude
62
+ git_info_exclude = current / ".git" / "info" / "exclude"
63
+ if git_info_exclude.exists():
64
+ try:
65
+ with open(git_info_exclude, "r", encoding="utf-8") as f:
66
+ for line in f:
67
+ line = line.strip()
68
+ if line and not line.startswith("#"):
69
+ patterns.append(line)
70
+ except (IOError, UnicodeDecodeError):
71
+ pass
72
+
73
+ if current.parent == current: # Reached root
74
+ break
75
+ current = current.parent
76
+
77
+ # Add global gitignore patterns
78
+ global_gitignore = Path.home() / ".gitignore"
79
+ if global_gitignore.exists():
80
+ try:
81
+ with open(global_gitignore, "r", encoding="utf-8") as f:
82
+ for line in f:
83
+ line = line.strip()
84
+ if line and not line.startswith("#"):
85
+ patterns.append(line)
86
+ except (IOError, UnicodeDecodeError):
87
+ pass
88
+
89
+ return patterns
90
+
91
+
92
+ def parse_gitignore_pattern(pattern: str, root_path: Path) -> Tuple[str, Optional[Path]]:
93
+ """Parse a gitignore pattern and return (relative_pattern, root)."""
94
+ pattern = pattern.strip()
95
+
96
+ # Handle absolute paths
97
+ if pattern.startswith("/"):
98
+ return pattern[1:], root_path
99
+
100
+ # Handle patterns relative to home directory
101
+ if pattern.startswith("~/"):
102
+ home_pattern = pattern[2:]
103
+ return home_pattern, Path.home()
104
+
105
+ # Handle patterns with leading slash (relative to repository root)
106
+ if pattern.startswith("/"):
107
+ return pattern[1:], root_path
108
+
109
+ # Default: pattern is relative to the directory containing .gitignore
110
+ return pattern, None
111
+
112
+
113
+ def build_ignore_patterns_map(
114
+ root_path: Path,
115
+ user_ignore_patterns: Optional[List[str]] = None,
116
+ include_gitignore: bool = True
117
+ ) -> Dict[Optional[Path], List[str]]:
118
+ """Build a map of ignore patterns by root directory."""
119
+ ignore_map: Dict[Optional[Path], List[str]] = {}
120
+
121
+ # Add user-provided ignore patterns
122
+ if user_ignore_patterns:
123
+ for pattern in user_ignore_patterns:
124
+ relative_pattern, pattern_root = parse_gitignore_pattern(pattern, root_path)
125
+ if pattern_root not in ignore_map:
126
+ ignore_map[pattern_root] = []
127
+ ignore_map[pattern_root].append(relative_pattern)
128
+
129
+ # Add .gitignore patterns
130
+ if include_gitignore and is_git_repository(root_path):
131
+ gitignore_patterns = read_gitignore_patterns(root_path)
132
+ for pattern in gitignore_patterns:
133
+ relative_pattern, pattern_root = parse_gitignore_pattern(pattern, root_path)
134
+ if pattern_root not in ignore_map:
135
+ ignore_map[pattern_root] = []
136
+ ignore_map[pattern_root].append(relative_pattern)
137
+
138
+ return ignore_map
139
+
140
+
141
+ def should_ignore_path(
142
+ path: Path,
143
+ root_path: Path,
144
+ ignore_map: Dict[Optional[Path], List[str]]
145
+ ) -> bool:
146
+ """Check if a path should be ignored based on ignore patterns."""
147
+ # Check against each root in the ignore map
148
+ for pattern_root, patterns in ignore_map.items():
149
+ # Determine the actual root to use for pattern matching
150
+ actual_root = pattern_root if pattern_root is not None else root_path
151
+
152
+ try:
153
+ # Get relative path from actual_root
154
+ rel_path = path.relative_to(actual_root).as_posix()
155
+ except ValueError:
156
+ # Path is not under this root, skip
157
+ continue
158
+
159
+ # For directories, also check with trailing slash
160
+ rel_path_dir = f"{rel_path}/" if path.is_dir() else rel_path
161
+
162
+ # Check each pattern
163
+ for pattern in patterns:
164
+ # Handle directory-specific patterns
165
+ if pattern.endswith("/"):
166
+ if not path.is_dir():
167
+ continue
168
+ pattern_without_slash = pattern[:-1]
169
+ if fnmatch.fnmatch(rel_path, pattern_without_slash) or \
170
+ fnmatch.fnmatch(rel_path_dir, pattern):
171
+ return True
172
+ else:
173
+ if fnmatch.fnmatch(rel_path, pattern) or \
174
+ fnmatch.fnmatch(rel_path_dir, pattern):
175
+ return True
176
+
177
+ return False
178
+
179
+
180
+ def get_git_status_files(root_path: Path) -> Tuple[List[str], List[str]]:
181
+ """Get tracked and untracked files from git status."""
182
+ tracked: List[str] = []
183
+ untracked: List[str] = []
184
+
185
+ if not is_git_repository(root_path):
186
+ return tracked, untracked
187
+
188
+ try:
189
+ # Get tracked files (modified, added, etc.)
190
+ result = subprocess.run(
191
+ ["git", "status", "--porcelain"],
192
+ cwd=root_path,
193
+ capture_output=True,
194
+ text=True,
195
+ timeout=10,
196
+ )
197
+
198
+ if result.returncode == 0:
199
+ for line in result.stdout.strip().split("\n"):
200
+ if line:
201
+ status = line[:2].strip()
202
+ file_path = line[3:].strip()
203
+
204
+ # Remove quotes if present
205
+ if file_path.startswith('"') and file_path.endswith('"'):
206
+ file_path = file_path[1:-1]
207
+
208
+ if status == "??": # Untracked
209
+ untracked.append(file_path)
210
+ else: # Tracked (modified, added, etc.)
211
+ tracked.append(file_path)
212
+
213
+ except (subprocess.SubprocessError, FileNotFoundError):
214
+ pass
215
+
216
+ return tracked, untracked
217
+
218
+
219
+ def get_current_git_branch(root_path: Path) -> Optional[str]:
220
+ """Get the current git branch name."""
221
+ if not is_git_repository(root_path):
222
+ return None
223
+
224
+ try:
225
+ result = subprocess.run(
226
+ ["git", "branch", "--show-current"],
227
+ cwd=root_path,
228
+ capture_output=True,
229
+ text=True,
230
+ timeout=5,
231
+ )
232
+ if result.returncode == 0:
233
+ return result.stdout.strip()
234
+ except (subprocess.SubprocessError, FileNotFoundError):
235
+ pass
236
+
237
+ return None
238
+
239
+
240
+ def get_git_commit_hash(root_path: Path) -> Optional[str]:
241
+ """Get the current git commit hash."""
242
+ if not is_git_repository(root_path):
243
+ return None
244
+
245
+ try:
246
+ result = subprocess.run(
247
+ ["git", "rev-parse", "HEAD"],
248
+ cwd=root_path,
249
+ capture_output=True,
250
+ text=True,
251
+ timeout=5,
252
+ )
253
+ if result.returncode == 0:
254
+ return result.stdout.strip()[:8] # Short hash
255
+ except (subprocess.SubprocessError, FileNotFoundError):
256
+ pass
257
+
258
+ return None
259
+
260
+
261
+ def is_working_directory_clean(root_path: Path) -> bool:
262
+ """Check if the working directory is clean (no uncommitted changes)."""
263
+ if not is_git_repository(root_path):
264
+ return True
265
+
266
+ try:
267
+ result = subprocess.run(
268
+ ["git", "status", "--porcelain"],
269
+ cwd=root_path,
270
+ capture_output=True,
271
+ text=True,
272
+ timeout=5,
273
+ )
274
+ return result.returncode == 0 and not result.stdout.strip()
275
+ except (subprocess.SubprocessError, FileNotFoundError):
276
+ return True
@@ -0,0 +1,28 @@
1
+ """JSON helper utilities for Ripperdoc."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from typing import Any, Optional
7
+
8
+ from ripperdoc.utils.log import get_logger
9
+
10
+
11
+ logger = get_logger()
12
+
13
+
14
+ def safe_parse_json(json_text: Optional[str], log_error: bool = True) -> Optional[Any]:
15
+ """Best-effort JSON.parse wrapper that returns None on failure.
16
+ """
17
+ if not json_text:
18
+ return None
19
+ try:
20
+ return json.loads(json_text)
21
+ except Exception as exc:
22
+ if log_error:
23
+ logger.debug(
24
+ "[json_utils] Failed to parse JSON",
25
+ extra={"error": str(exc), "length": len(json_text)},
26
+ exc_info=True,
27
+ )
28
+ return None
ripperdoc/utils/log.py CHANGED
@@ -1,11 +1,66 @@
1
1
  """Logging utilities for Ripperdoc."""
2
2
 
3
+ import json
3
4
  import logging
4
5
  import sys
5
6
  import os
6
- from pathlib import Path
7
- from typing import Optional
8
7
  from datetime import datetime
8
+ from pathlib import Path
9
+ from typing import Any, Optional
10
+
11
+ from ripperdoc.utils.path_utils import sanitize_project_path
12
+
13
+
14
+ _LOG_RECORD_FIELDS = {
15
+ "name",
16
+ "msg",
17
+ "args",
18
+ "levelname",
19
+ "levelno",
20
+ "pathname",
21
+ "filename",
22
+ "module",
23
+ "exc_info",
24
+ "exc_text",
25
+ "stack_info",
26
+ "lineno",
27
+ "funcName",
28
+ "created",
29
+ "msecs",
30
+ "relativeCreated",
31
+ "thread",
32
+ "threadName",
33
+ "processName",
34
+ "process",
35
+ "message",
36
+ "asctime",
37
+ "stacklevel",
38
+ }
39
+
40
+
41
+ class StructuredFormatter(logging.Formatter):
42
+ """Formatter with ISO timestamps and context."""
43
+
44
+ def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -> str:
45
+ timestamp = datetime.utcfromtimestamp(record.created)
46
+ return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
47
+
48
+ def format(self, record: logging.LogRecord) -> str:
49
+ message = super().format(record)
50
+ extras = {
51
+ key: value
52
+ for key, value in record.__dict__.items()
53
+ if key not in _LOG_RECORD_FIELDS and not key.startswith("_")
54
+ }
55
+ if extras:
56
+ try:
57
+ serialized = json.dumps(
58
+ extras, sort_keys=True, ensure_ascii=True, default=str
59
+ )
60
+ except Exception:
61
+ serialized = str(extras)
62
+ return f"{message} | {serialized}"
63
+ return message
9
64
 
10
65
 
11
66
  class RipperdocLogger:
@@ -15,46 +70,71 @@ class RipperdocLogger:
15
70
  self.logger = logging.getLogger(name)
16
71
  level_name = os.getenv("RIPPERDOC_LOG_LEVEL", "WARNING").upper()
17
72
  level = getattr(logging, level_name, logging.WARNING)
18
- self.logger.setLevel(level)
73
+ # Allow file handlers to capture debug logs while console respects the configured level.
74
+ self.logger.setLevel(logging.DEBUG)
75
+ self.logger.propagate = False
76
+
77
+ # Avoid adding duplicate handlers if an existing logger is reused.
78
+ if not self.logger.handlers:
79
+ console_handler = logging.StreamHandler(sys.stderr)
80
+ console_handler.setLevel(level)
81
+ console_formatter = logging.Formatter("%(levelname)s: %(message)s")
82
+ console_handler.setFormatter(console_formatter)
83
+ self.logger.addHandler(console_handler)
19
84
 
20
- # Console handler
21
- console_handler = logging.StreamHandler(sys.stderr)
22
- console_handler.setLevel(level)
23
- console_formatter = logging.Formatter("%(levelname)s: %(message)s")
24
- console_handler.setFormatter(console_formatter)
25
- self.logger.addHandler(console_handler)
85
+ self._file_handler: Optional[logging.Handler] = None
86
+ self._file_handler_path: Optional[Path] = None
26
87
 
27
- # File handler (optional)
28
88
  if log_dir:
29
- log_dir.mkdir(exist_ok=True)
89
+ log_dir.mkdir(parents=True, exist_ok=True)
30
90
  log_file = log_dir / f"ripperdoc_{datetime.now().strftime('%Y%m%d')}.log"
31
- file_handler = logging.FileHandler(log_file)
32
- file_handler.setLevel(logging.DEBUG)
33
- file_formatter = logging.Formatter(
34
- "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
35
- )
36
- file_handler.setFormatter(file_formatter)
37
- self.logger.addHandler(file_handler)
38
-
39
- def debug(self, message: str) -> None:
91
+ self.attach_file_handler(log_file)
92
+
93
+ def attach_file_handler(self, log_file: Path) -> Path:
94
+ """Attach or replace a file handler for logging to disk."""
95
+ log_file.parent.mkdir(parents=True, exist_ok=True)
96
+ if self._file_handler and self._file_handler_path == log_file:
97
+ return log_file
98
+
99
+ if self._file_handler:
100
+ try:
101
+ self.logger.removeHandler(self._file_handler)
102
+ except Exception:
103
+ # Swallow errors while rotating handlers; console logging should continue.
104
+ self.logger.exception("[logging] Failed to remove existing file handler")
105
+
106
+ file_handler = logging.FileHandler(log_file)
107
+ file_handler.setLevel(logging.DEBUG)
108
+ file_formatter = StructuredFormatter("%(asctime)s [%(levelname)s] %(message)s")
109
+ file_handler.setFormatter(file_formatter)
110
+ self.logger.addHandler(file_handler)
111
+ self._file_handler = file_handler
112
+ self._file_handler_path = log_file
113
+ return log_file
114
+
115
+ def debug(self, message: str, *args: Any, **kwargs: Any) -> None:
40
116
  """Log debug message."""
41
- self.logger.debug(message)
117
+ self.logger.debug(message, *args, **kwargs)
42
118
 
43
- def info(self, message: str) -> None:
119
+ def info(self, message: str, *args: Any, **kwargs: Any) -> None:
44
120
  """Log info message."""
45
- self.logger.info(message)
121
+ self.logger.info(message, *args, **kwargs)
46
122
 
47
- def warning(self, message: str) -> None:
123
+ def warning(self, message: str, *args: Any, **kwargs: Any) -> None:
48
124
  """Log warning message."""
49
- self.logger.warning(message)
125
+ self.logger.warning(message, *args, **kwargs)
50
126
 
51
- def error(self, message: str) -> None:
127
+ def error(self, message: str, *args: Any, **kwargs: Any) -> None:
52
128
  """Log error message."""
53
- self.logger.error(message)
129
+ self.logger.error(message, *args, **kwargs)
54
130
 
55
- def critical(self, message: str) -> None:
131
+ def critical(self, message: str, *args: Any, **kwargs: Any) -> None:
56
132
  """Log critical message."""
57
- self.logger.critical(message)
133
+ self.logger.critical(message, *args, **kwargs)
134
+
135
+ def exception(self, message: str, *args: Any, **kwargs: Any) -> None:
136
+ """Log an exception with traceback."""
137
+ self.logger.exception(message, *args, **kwargs)
58
138
 
59
139
 
60
140
  # Global logger instance
@@ -69,8 +149,29 @@ def get_logger() -> RipperdocLogger:
69
149
  return _logger
70
150
 
71
151
 
152
+ def _normalize_path_for_logs(project_path: Path) -> Path:
153
+ """Return the directory for log files for a given project."""
154
+ safe_name = sanitize_project_path(project_path)
155
+ return Path.home() / ".ripperdoc" / "logs" / safe_name
156
+
157
+
158
+ def session_log_path(project_path: Path, session_id: str, when: Optional[datetime] = None) -> Path:
159
+ """Build the log file path for a project session."""
160
+ timestamp = (when or datetime.now()).strftime("%Y%m%d-%H%M%S")
161
+ return _normalize_path_for_logs(project_path) / f"{timestamp}-{session_id}.log"
162
+
163
+
72
164
  def init_logger(log_dir: Optional[Path] = None) -> RipperdocLogger:
73
165
  """Initialize the global logger."""
74
166
  global _logger
75
167
  _logger = RipperdocLogger(log_dir=log_dir)
76
168
  return _logger
169
+
170
+
171
+ def enable_session_file_logging(project_path: Path, session_id: str) -> Path:
172
+ """Ensure the global logger writes to the session-specific log file."""
173
+ logger = get_logger()
174
+ log_file = session_log_path(project_path, session_id)
175
+ logger.attach_file_handler(log_file)
176
+ logger.debug(f"[logging] File logging enabled at {log_file}")
177
+ return log_file
ripperdoc/utils/mcp.py CHANGED
@@ -14,6 +14,8 @@ from ripperdoc import __version__
14
14
  from ripperdoc.utils.log import get_logger
15
15
  from ripperdoc.utils.message_compaction import estimate_tokens_from_text
16
16
 
17
+ logger = get_logger()
18
+
17
19
  try:
18
20
  import mcp.types as mcp_types
19
21
  from mcp.client.session import ClientSession
@@ -26,9 +28,7 @@ except Exception: # pragma: no cover - handled gracefully at runtime
26
28
  MCP_AVAILABLE = False
27
29
  ClientSession = object # type: ignore
28
30
  mcp_types = None # type: ignore
29
-
30
-
31
- logger = get_logger()
31
+ logger.exception("[mcp] MCP SDK not available at import time")
32
32
 
33
33
 
34
34
  @dataclass
@@ -72,9 +72,12 @@ def _load_json_file(path: Path) -> Dict[str, Any]:
72
72
  if not path.exists():
73
73
  return {}
74
74
  try:
75
- return json.loads(path.read_text())
76
- except (OSError, json.JSONDecodeError) as exc:
77
- logger.error(f"Failed to load JSON from {path}: {exc}")
75
+ data = json.loads(path.read_text())
76
+ if isinstance(data, dict):
77
+ return data
78
+ return {}
79
+ except (OSError, json.JSONDecodeError):
80
+ logger.exception("Failed to load JSON", extra={"path": str(path)})
78
81
  return {}
79
82
 
80
83
 
@@ -86,6 +89,10 @@ def _ensure_str_dict(raw: object) -> Dict[str, str]:
86
89
  try:
87
90
  result[str(key)] = str(value)
88
91
  except Exception:
92
+ logger.exception(
93
+ "[mcp] Failed to coerce env/header value to string",
94
+ extra={"key": key, "value": value},
95
+ )
89
96
  continue
90
97
  return result
91
98
 
@@ -151,6 +158,14 @@ def _load_server_configs(project_path: Optional[Path]) -> Dict[str, McpServerInf
151
158
  for path in candidates:
152
159
  data = _load_json_file(path)
153
160
  merged.update(_parse_servers(data))
161
+ logger.debug(
162
+ "[mcp] Loaded MCP server configs",
163
+ extra={
164
+ "project_path": str(project_path),
165
+ "server_count": len(merged),
166
+ "candidates": [str(path) for path in candidates],
167
+ },
168
+ )
154
169
  return merged
155
170
 
156
171
 
@@ -165,6 +180,14 @@ class McpRuntime:
165
180
  self._closed = False
166
181
 
167
182
  async def connect(self, configs: Dict[str, McpServerInfo]) -> List[McpServerInfo]:
183
+ logger.info(
184
+ "[mcp] Connecting to MCP servers",
185
+ extra={
186
+ "project_path": str(self.project_path),
187
+ "server_count": len(configs),
188
+ "servers": list(configs.keys()),
189
+ },
190
+ )
168
191
  await self._exit_stack.__aenter__()
169
192
  if not MCP_AVAILABLE:
170
193
  for config in configs.values():
@@ -179,13 +202,21 @@ class McpRuntime:
179
202
 
180
203
  for config in configs.values():
181
204
  self.servers.append(await self._connect_server(config))
205
+ logger.debug(
206
+ "[mcp] MCP connection summary",
207
+ extra={
208
+ "connected": [s.name for s in self.servers if s.status == "connected"],
209
+ "failed": [s.name for s in self.servers if s.status == "failed"],
210
+ "unavailable": [s.name for s in self.servers if s.status == "unavailable"],
211
+ },
212
+ )
182
213
  return self.servers
183
214
 
184
- async def _list_roots_callback(self, *_: Any, **__: Any):
215
+ async def _list_roots_callback(self, *_: Any, **__: Any) -> Optional[Any]:
185
216
  if not mcp_types:
186
217
  return None
187
218
  return mcp_types.ListRootsResult(
188
- roots=[mcp_types.Root(uri=Path(self.project_path).resolve().as_uri())]
219
+ roots=[mcp_types.Root(uri=Path(self.project_path).resolve().as_uri())] # type: ignore[arg-type]
189
220
  )
190
221
 
191
222
  async def _connect_server(self, config: McpServerInfo) -> McpServerInfo:
@@ -198,6 +229,15 @@ class McpRuntime:
198
229
  try:
199
230
  read_stream = None
200
231
  write_stream = None
232
+ logger.debug(
233
+ "[mcp] Connecting server",
234
+ extra={
235
+ "server": config.name,
236
+ "type": config.type,
237
+ "command": config.command,
238
+ "url": config.url,
239
+ },
240
+ )
201
241
 
202
242
  if config.type in ("sse", "sse-ide"):
203
243
  if not config.url:
@@ -228,17 +268,22 @@ class McpRuntime:
228
268
  stdio_client(stdio_params)
229
269
  )
230
270
 
271
+ if read_stream is None or write_stream is None:
272
+ raise ValueError("Failed to create read/write streams for MCP server")
273
+
231
274
  session = await self._exit_stack.enter_async_context(
232
275
  ClientSession(
233
276
  read_stream,
234
277
  write_stream,
235
- list_roots_callback=self._list_roots_callback,
278
+ list_roots_callback=self._list_roots_callback, # type: ignore[arg-type]
236
279
  client_info=mcp_types.Implementation(name="ripperdoc", version=__version__),
237
280
  )
238
281
  )
239
282
 
240
283
  init_result = await session.initialize()
241
284
  capabilities = session.get_server_capabilities()
285
+ if capabilities is None:
286
+ capabilities = mcp_types.ServerCapabilities()
242
287
 
243
288
  info.status = "connected"
244
289
  info.instructions = init_result.instructions or info.instructions
@@ -272,8 +317,21 @@ class McpRuntime:
272
317
  for resource in resources_result.resources
273
318
  ]
274
319
 
320
+ logger.info(
321
+ "[mcp] Connected to MCP server",
322
+ extra={
323
+ "server": config.name,
324
+ "status": info.status,
325
+ "tools": len(info.tools),
326
+ "resources": len(info.resources),
327
+ "capabilities": list(info.capabilities.keys()),
328
+ },
329
+ )
275
330
  except Exception as exc: # pragma: no cover - network/process errors
276
- logger.error(f"Failed to connect to MCP server '{config.name}': {exc}")
331
+ logger.exception(
332
+ "Failed to connect to MCP server",
333
+ extra={"server": config.name, "error": str(exc)},
334
+ )
277
335
  info.status = "failed"
278
336
  info.error = str(exc)
279
337
 
@@ -283,6 +341,10 @@ class McpRuntime:
283
341
  if self._closed:
284
342
  return
285
343
  self._closed = True
344
+ logger.debug(
345
+ "[mcp] Shutting down MCP runtime",
346
+ extra={"project_path": str(self.project_path), "session_count": len(self.sessions)},
347
+ )
286
348
  try:
287
349
  await self._exit_stack.aclose()
288
350
  finally:
@@ -308,12 +370,23 @@ async def ensure_mcp_runtime(project_path: Optional[Path] = None) -> McpRuntime:
308
370
  runtime = _get_runtime()
309
371
  project_path = project_path or Path.cwd()
310
372
  if runtime and not runtime._closed and runtime.project_path == project_path:
373
+ logger.debug(
374
+ "[mcp] Reusing existing MCP runtime",
375
+ extra={
376
+ "project_path": str(project_path),
377
+ "server_count": len(runtime.servers),
378
+ },
379
+ )
311
380
  return runtime
312
381
 
313
382
  if runtime:
314
383
  await runtime.aclose()
315
384
 
316
385
  runtime = McpRuntime(project_path)
386
+ logger.debug(
387
+ "[mcp] Creating MCP runtime",
388
+ extra={"project_path": str(project_path)},
389
+ )
317
390
  configs = _load_server_configs(project_path)
318
391
  await runtime.connect(configs)
319
392
  _runtime_var.set(runtime)