cicada-mcp 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cicada-mcp might be problematic. Click here for more details.

Files changed (48) hide show
  1. cicada/__init__.py +30 -0
  2. cicada/clean.py +297 -0
  3. cicada/command_logger.py +293 -0
  4. cicada/dead_code_analyzer.py +282 -0
  5. cicada/extractors/__init__.py +36 -0
  6. cicada/extractors/base.py +66 -0
  7. cicada/extractors/call.py +176 -0
  8. cicada/extractors/dependency.py +361 -0
  9. cicada/extractors/doc.py +179 -0
  10. cicada/extractors/function.py +246 -0
  11. cicada/extractors/module.py +123 -0
  12. cicada/extractors/spec.py +151 -0
  13. cicada/find_dead_code.py +270 -0
  14. cicada/formatter.py +918 -0
  15. cicada/git_helper.py +646 -0
  16. cicada/indexer.py +629 -0
  17. cicada/install.py +724 -0
  18. cicada/keyword_extractor.py +364 -0
  19. cicada/keyword_search.py +553 -0
  20. cicada/lightweight_keyword_extractor.py +298 -0
  21. cicada/mcp_server.py +1559 -0
  22. cicada/mcp_tools.py +291 -0
  23. cicada/parser.py +124 -0
  24. cicada/pr_finder.py +435 -0
  25. cicada/pr_indexer/__init__.py +20 -0
  26. cicada/pr_indexer/cli.py +62 -0
  27. cicada/pr_indexer/github_api_client.py +431 -0
  28. cicada/pr_indexer/indexer.py +297 -0
  29. cicada/pr_indexer/line_mapper.py +209 -0
  30. cicada/pr_indexer/pr_index_builder.py +253 -0
  31. cicada/setup.py +339 -0
  32. cicada/utils/__init__.py +52 -0
  33. cicada/utils/call_site_formatter.py +95 -0
  34. cicada/utils/function_grouper.py +57 -0
  35. cicada/utils/hash_utils.py +173 -0
  36. cicada/utils/index_utils.py +290 -0
  37. cicada/utils/path_utils.py +240 -0
  38. cicada/utils/signature_builder.py +106 -0
  39. cicada/utils/storage.py +111 -0
  40. cicada/utils/subprocess_runner.py +182 -0
  41. cicada/utils/text_utils.py +90 -0
  42. cicada/version_check.py +116 -0
  43. cicada_mcp-0.1.4.dist-info/METADATA +619 -0
  44. cicada_mcp-0.1.4.dist-info/RECORD +48 -0
  45. cicada_mcp-0.1.4.dist-info/WHEEL +5 -0
  46. cicada_mcp-0.1.4.dist-info/entry_points.txt +8 -0
  47. cicada_mcp-0.1.4.dist-info/licenses/LICENSE +21 -0
  48. cicada_mcp-0.1.4.dist-info/top_level.txt +1 -0
cicada/__init__.py ADDED
@@ -0,0 +1,30 @@
1
+ """Cicada - An Elixir module search MCP server."""
2
+
3
+ import sys
4
+ from pathlib import Path
5
+
6
+ # Python 3.11+ has tomllib built-in, 3.10 needs tomli
7
+ if sys.version_info >= (3, 11):
8
+ import tomllib
9
+ else:
10
+ try:
11
+ import tomli as tomllib
12
+ except ImportError:
13
+ tomllib = None
14
+
15
+
16
+ def _get_version() -> str:
17
+ """Read version from pyproject.toml."""
18
+ if tomllib is None:
19
+ return "unknown"
20
+
21
+ try:
22
+ pyproject_path = Path(__file__).parent.parent / "pyproject.toml"
23
+ with open(pyproject_path, "rb") as f:
24
+ pyproject_data = tomllib.load(f)
25
+ return pyproject_data["project"]["version"]
26
+ except (FileNotFoundError, KeyError, Exception):
27
+ return "unknown"
28
+
29
+
30
+ __version__ = _get_version()
cicada/clean.py ADDED
@@ -0,0 +1,297 @@
1
+ #!/usr/bin/env python
2
+ """
3
+ Cicada Clean Command.
4
+
5
+ Removes all Cicada configuration and indexes for a repository.
6
+ """
7
+
8
+ import argparse
9
+ import json
10
+ import shutil
11
+ import sys
12
+ from pathlib import Path
13
+
14
+ from cicada.utils import get_storage_dir
15
+
16
+
17
+ def remove_mcp_config_entry(config_path: Path, server_key: str = "cicada") -> bool:
18
+ """
19
+ Remove Cicada entry from an MCP configuration file.
20
+
21
+ Args:
22
+ config_path: Path to the MCP config file
23
+ server_key: Server key to remove (default: "cicada")
24
+
25
+ Returns:
26
+ True if entry was removed, False if file doesn't exist or no entry found
27
+ """
28
+ if not config_path.exists():
29
+ return False
30
+
31
+ try:
32
+ with open(config_path, "r") as f:
33
+ config = json.load(f)
34
+
35
+ # Determine the config key based on editor type
36
+ if ".mcp.json" in str(config_path) or ".cursor" in str(config_path):
37
+ config_key = "mcpServers"
38
+ elif ".vscode" in str(config_path):
39
+ config_key = "mcp.servers"
40
+ else:
41
+ return False
42
+
43
+ # Check if the key exists and remove cicada entry
44
+ if config_key in config and server_key in config[config_key]:
45
+ del config[config_key][server_key]
46
+
47
+ # Write back the modified config
48
+ with open(config_path, "w") as f:
49
+ json.dump(config, f, indent=2)
50
+
51
+ return True
52
+
53
+ except (json.JSONDecodeError, IOError) as e:
54
+ print(f"Warning: Could not process {config_path}: {e}")
55
+
56
+ return False
57
+
58
+
59
+ def clean_repository(repo_path: Path, force: bool = False) -> None:
60
+ """
61
+ Remove all Cicada configuration and indexes for a repository.
62
+
63
+ Args:
64
+ repo_path: Path to the repository
65
+ force: Skip confirmation prompt if True
66
+ """
67
+ repo_path = repo_path.resolve()
68
+
69
+ print("=" * 60)
70
+ print("Cicada Clean")
71
+ print("=" * 60)
72
+ print()
73
+ print(f"Repository: {repo_path}")
74
+ print()
75
+
76
+ # Collect items to remove
77
+ items_to_remove = []
78
+
79
+ # 1. Storage directory (~/.cicada/projects/<repo_hash>/)
80
+ storage_dir = get_storage_dir(repo_path)
81
+ if storage_dir.exists():
82
+ items_to_remove.append(("Storage directory", storage_dir))
83
+
84
+ # 2. Old .cicada directory (backward compatibility)
85
+ old_cicada_dir = repo_path / ".cicada"
86
+ if old_cicada_dir.exists():
87
+ items_to_remove.append(("Legacy .cicada directory", old_cicada_dir))
88
+
89
+ # 3. MCP config files
90
+ mcp_configs = [
91
+ (repo_path / ".mcp.json", "Claude Code config"),
92
+ (repo_path / ".cursor" / "mcp.json", "Cursor config"),
93
+ (repo_path / ".vscode" / "settings.json", "VS Code config"),
94
+ ]
95
+
96
+ for config_path, desc in mcp_configs:
97
+ if config_path.exists():
98
+ # Check if cicada entry exists
99
+ try:
100
+ with open(config_path, "r") as f:
101
+ config = json.load(f)
102
+
103
+ config_key = (
104
+ "mcpServers" if ".vscode" not in str(config_path) else "mcp.servers"
105
+ )
106
+
107
+ if config_key in config and "cicada" in config[config_key]:
108
+ items_to_remove.append(
109
+ (desc, config_path, True)
110
+ ) # True = is MCP config
111
+ except (json.JSONDecodeError, IOError):
112
+ pass
113
+
114
+ # Show what will be removed
115
+ if not items_to_remove:
116
+ print("✓ No Cicada configuration found for this repository.")
117
+ print()
118
+ return
119
+
120
+ print("The following items will be removed:")
121
+ print()
122
+ for item in items_to_remove:
123
+ if len(item) == 3 and item[2]: # MCP config entry
124
+ print(f" • {item[0]}: Remove 'cicada' entry from {item[1]}")
125
+ else:
126
+ print(f" • {item[0]}: {item[1]}")
127
+ print()
128
+
129
+ # Confirmation prompt
130
+ if not force:
131
+ response = input("Are you sure you want to continue? [y/N]: ")
132
+ if response.lower() not in ["y", "yes"]:
133
+ print("Aborted.")
134
+ return
135
+
136
+ print()
137
+ print("Removing Cicada configuration...")
138
+ print()
139
+
140
+ # Remove items
141
+ removed_count = 0
142
+ for item in items_to_remove:
143
+ if len(item) == 3 and item[2]: # MCP config entry
144
+ desc, config_path, _ = item
145
+ if remove_mcp_config_entry(config_path):
146
+ print(f"✓ Removed 'cicada' entry from {desc}")
147
+ removed_count += 1
148
+ else:
149
+ desc, path = item
150
+ try:
151
+ if path.is_dir():
152
+ shutil.rmtree(path)
153
+ else:
154
+ path.unlink()
155
+ print(f"✓ Removed {desc}")
156
+ removed_count += 1
157
+ except (OSError, PermissionError) as e:
158
+ print(f"✗ Failed to remove {desc}: {e}")
159
+
160
+ print()
161
+ print("=" * 60)
162
+ print(f"✓ Cleanup Complete! ({removed_count} items removed)")
163
+ print("=" * 60)
164
+ print()
165
+ print("Next steps:")
166
+ print("1. Restart your editor if it's currently running")
167
+ print("2. Run 'uvx cicada <editor>' to set up Cicada again")
168
+ print()
169
+
170
+
171
+ def clean_all_projects(force: bool = False) -> None:
172
+ """
173
+ Remove all Cicada storage directories for all projects.
174
+
175
+ Args:
176
+ force: Skip confirmation prompt if True
177
+ """
178
+ from pathlib import Path
179
+
180
+ storage_base = Path.home() / ".cicada" / "projects"
181
+
182
+ if not storage_base.exists():
183
+ print("✓ No Cicada storage found (~/.cicada/projects/ does not exist).")
184
+ return
185
+
186
+ # Count project directories
187
+ project_dirs = [d for d in storage_base.iterdir() if d.is_dir()]
188
+
189
+ if not project_dirs:
190
+ print("✓ No Cicada projects found in ~/.cicada/projects/")
191
+ return
192
+
193
+ print("=" * 60)
194
+ print("Cicada Clean All Projects")
195
+ print("=" * 60)
196
+ print()
197
+ print(f"Found {len(project_dirs)} project(s) in: {storage_base}")
198
+ print()
199
+
200
+ # Show project directories
201
+ print("The following storage directories will be removed:")
202
+ print()
203
+ for proj_dir in sorted(project_dirs):
204
+ print(f" • {proj_dir.name}/")
205
+ print()
206
+
207
+ # Confirmation prompt
208
+ if not force:
209
+ response = input(
210
+ f"Are you sure you want to remove ALL {len(project_dirs)} project(s)? [y/N]: "
211
+ )
212
+ if response.lower() not in ["y", "yes"]:
213
+ print("Aborted.")
214
+ return
215
+
216
+ print()
217
+ print("Removing all Cicada storage directories...")
218
+ print()
219
+
220
+ # Remove all project directories
221
+ removed_count = 0
222
+ for proj_dir in project_dirs:
223
+ try:
224
+ shutil.rmtree(proj_dir)
225
+ print(f"✓ Removed {proj_dir.name}/")
226
+ removed_count += 1
227
+ except (OSError, PermissionError) as e:
228
+ print(f"✗ Failed to remove {proj_dir.name}/: {e}")
229
+
230
+ print()
231
+ print("=" * 60)
232
+ print(f"✓ Cleanup Complete! ({removed_count}/{len(project_dirs)} projects removed)")
233
+ print("=" * 60)
234
+ print()
235
+
236
+
237
+ def main():
238
+ """Main entry point for the clean command."""
239
+ parser = argparse.ArgumentParser(
240
+ description="Remove all Cicada configuration and indexes for a repository",
241
+ epilog="Examples:\n"
242
+ " cicada-clean -f # Clean current repository\n"
243
+ " cicada-clean --all -f # Remove ALL project storage\n",
244
+ formatter_class=argparse.RawDescriptionHelpFormatter,
245
+ )
246
+ parser.add_argument(
247
+ "repo",
248
+ nargs="?",
249
+ default=None,
250
+ help="Path to the repository (default: current directory)",
251
+ )
252
+ parser.add_argument(
253
+ "-f",
254
+ "--force",
255
+ action="store_true",
256
+ help="Skip confirmation prompt",
257
+ )
258
+ parser.add_argument(
259
+ "--all",
260
+ action="store_true",
261
+ help="Remove ALL Cicada storage for all projects (~/.cicada/projects/)",
262
+ )
263
+
264
+ args = parser.parse_args()
265
+
266
+ # Handle --all flag
267
+ if args.all:
268
+ try:
269
+ clean_all_projects(force=args.force)
270
+ except Exception as e:
271
+ print(f"\nError: Cleanup failed: {e}")
272
+ sys.exit(1)
273
+ return
274
+
275
+ # Determine repo path
276
+ repo_path = Path(args.repo) if args.repo else Path.cwd()
277
+
278
+ # Validate path exists
279
+ if not repo_path.exists():
280
+ print(f"Error: Path does not exist: {repo_path}")
281
+ sys.exit(1)
282
+
283
+ # Validate path is a directory
284
+ if not repo_path.is_dir():
285
+ print(f"Error: Path is not a directory: {repo_path}")
286
+ sys.exit(1)
287
+
288
+ # Run cleanup
289
+ try:
290
+ clean_repository(repo_path, force=args.force)
291
+ except Exception as e:
292
+ print(f"\nError: Cleanup failed: {e}")
293
+ sys.exit(1)
294
+
295
+
296
+ if __name__ == "__main__":
297
+ main()
@@ -0,0 +1,293 @@
1
+ """Command logging functionality for cicada-server MCP.
2
+
3
+ This module provides logging capabilities for all MCP tool executions,
4
+ storing logs in JSONL format organized by date.
5
+ """
6
+
7
+ import asyncio
8
+ import json
9
+ import tempfile
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import Any, Dict, Optional
13
+
14
+
15
+ class CommandLogger:
16
+ """Logger for MCP tool executions."""
17
+
18
+ def __init__(self, log_dir: Optional[str] = None):
19
+ """Initialize the command logger.
20
+
21
+ Args:
22
+ log_dir: Directory to store logs. If None, uses system temp directory.
23
+ """
24
+ if log_dir is None:
25
+ # Use system temp directory with a cicada subdirectory
26
+ self.log_dir = Path(tempfile.gettempdir()) / "cicada-logs"
27
+ else:
28
+ self.log_dir = Path(log_dir)
29
+
30
+ # Create log directory if it doesn't exist
31
+ self.log_dir.mkdir(parents=True, exist_ok=True)
32
+
33
+ def _get_log_file_path(self, timestamp: datetime) -> Path:
34
+ """Get the log file path for a given timestamp.
35
+
36
+ Logs are organized by date (YYYY-MM-DD.jsonl).
37
+
38
+ Args:
39
+ timestamp: The timestamp for the log entry.
40
+
41
+ Returns:
42
+ Path to the log file.
43
+ """
44
+ date_str = timestamp.strftime("%Y-%m-%d")
45
+ return self.log_dir / f"{date_str}.jsonl"
46
+
47
+ def log_command(
48
+ self,
49
+ tool_name: str,
50
+ arguments: Dict[str, Any],
51
+ response: Any,
52
+ execution_time_ms: float,
53
+ timestamp: Optional[datetime] = None,
54
+ error: Optional[str] = None,
55
+ ) -> None:
56
+ """Log a command execution.
57
+
58
+ Args:
59
+ tool_name: Name of the tool that was executed.
60
+ arguments: Arguments passed to the tool.
61
+ response: Response from the tool execution.
62
+ execution_time_ms: Execution time in milliseconds.
63
+ timestamp: Timestamp of the execution. If None, uses current time.
64
+ error: Error message if the command failed.
65
+ """
66
+ if timestamp is None:
67
+ timestamp = datetime.now()
68
+
69
+ # Prepare the log entry
70
+ log_entry = {
71
+ "timestamp": timestamp.isoformat(),
72
+ "tool_name": tool_name,
73
+ "arguments": arguments,
74
+ "execution_time_ms": round(execution_time_ms, 3),
75
+ }
76
+
77
+ # Add response or error
78
+ if error:
79
+ log_entry["error"] = error
80
+ log_entry["success"] = False
81
+ else:
82
+ log_entry["response"] = self._serialize_response(response)
83
+ log_entry["success"] = True
84
+
85
+ # Get the log file path for this date
86
+ log_file = self._get_log_file_path(timestamp)
87
+
88
+ # Append the log entry to the file
89
+ try:
90
+ with open(log_file, "a", encoding="utf-8") as f:
91
+ json.dump(log_entry, f, ensure_ascii=False)
92
+ f.write("\n")
93
+ except Exception as e:
94
+ # If logging fails, write to stderr but don't crash the server
95
+ import sys
96
+
97
+ print(
98
+ f"Warning: Failed to write command log: {e}",
99
+ file=sys.stderr,
100
+ )
101
+
102
+ async def log_command_async(
103
+ self,
104
+ tool_name: str,
105
+ arguments: Dict[str, Any],
106
+ response: Any,
107
+ execution_time_ms: float,
108
+ timestamp: Optional[datetime] = None,
109
+ error: Optional[str] = None,
110
+ ) -> None:
111
+ """Async version of log_command that runs file I/O in a thread pool.
112
+
113
+ This prevents blocking the event loop when logging commands.
114
+
115
+ Args:
116
+ tool_name: Name of the tool that was executed.
117
+ arguments: Arguments passed to the tool.
118
+ response: Response from the tool execution.
119
+ execution_time_ms: Execution time in milliseconds.
120
+ timestamp: Timestamp of the execution. If None, uses current time.
121
+ error: Error message if the command failed.
122
+ """
123
+ # Run the synchronous log_command in a thread pool
124
+ await asyncio.to_thread(
125
+ self.log_command,
126
+ tool_name,
127
+ arguments,
128
+ response,
129
+ execution_time_ms,
130
+ timestamp,
131
+ error,
132
+ )
133
+
134
+ def _serialize_response(self, response: Any) -> Any:
135
+ """Serialize the response for JSON storage.
136
+
137
+ Args:
138
+ response: The response object to serialize.
139
+
140
+ Returns:
141
+ JSON-serializable representation of the response.
142
+ """
143
+ # Handle common types first
144
+ if isinstance(response, list):
145
+ return [self._serialize_response(item) for item in response]
146
+ elif isinstance(response, dict):
147
+ return {k: self._serialize_response(v) for k, v in response.items()}
148
+ elif hasattr(response, "text"):
149
+ # MCP TextContent object
150
+ return {"type": "text", "text": response.text}
151
+ elif hasattr(response, "__dict__"):
152
+ # Generic object - convert to string
153
+ try:
154
+ return str(response)
155
+ except Exception:
156
+ return str(response)
157
+ else:
158
+ return response
159
+
160
+ def get_log_files(self) -> list[Path]:
161
+ """Get all log files, sorted by date (oldest first).
162
+
163
+ Returns:
164
+ List of log file paths.
165
+ """
166
+ log_files = sorted(self.log_dir.glob("*.jsonl"))
167
+ return log_files
168
+
169
+ def read_logs(
170
+ self, date: Optional[str] = None, limit: Optional[int] = None
171
+ ) -> list[Dict[str, Any]]:
172
+ """Read logs from file(s).
173
+
174
+ Args:
175
+ date: Date string in YYYY-MM-DD format. If None, reads all logs.
176
+ limit: Maximum number of log entries to return (most recent).
177
+
178
+ Returns:
179
+ List of log entries.
180
+ """
181
+ logs = []
182
+
183
+ if date:
184
+ # Validate date format to prevent path traversal
185
+ try:
186
+ datetime.strptime(date, "%Y-%m-%d")
187
+ except ValueError:
188
+ # Invalid date format - return empty list
189
+ return []
190
+
191
+ # Read from specific date file
192
+ log_file = self.log_dir / f"{date}.jsonl"
193
+ if log_file.exists():
194
+ logs.extend(self._read_log_file(log_file))
195
+ else:
196
+ # Read from all log files, sorted by date
197
+ for log_file in self.get_log_files():
198
+ logs.extend(self._read_log_file(log_file))
199
+
200
+ # Sort by timestamp (most recent last)
201
+ logs.sort(key=lambda x: x.get("timestamp", ""))
202
+
203
+ # Apply limit if specified (return most recent)
204
+ if limit:
205
+ logs = logs[-limit:]
206
+
207
+ return logs
208
+
209
+ def _read_log_file(self, file_path: Path) -> list[Dict[str, Any]]:
210
+ """Read logs from a single JSONL file.
211
+
212
+ Args:
213
+ file_path: Path to the log file.
214
+
215
+ Returns:
216
+ List of log entries from the file.
217
+ """
218
+ logs = []
219
+ try:
220
+ with open(file_path, "r", encoding="utf-8") as f:
221
+ for line in f:
222
+ line = line.strip()
223
+ if line:
224
+ try:
225
+ logs.append(json.loads(line))
226
+ except json.JSONDecodeError:
227
+ # Skip malformed lines
228
+ continue
229
+ except Exception:
230
+ # If reading fails, return what we have
231
+ pass
232
+
233
+ return logs
234
+
235
+ def clear_logs(self, older_than_days: Optional[int] = None) -> int:
236
+ """Clear log files.
237
+
238
+ Args:
239
+ older_than_days: If specified, only delete logs older than this many days.
240
+ If None, deletes all logs.
241
+
242
+ Returns:
243
+ Number of files deleted.
244
+ """
245
+ count = 0
246
+ now = datetime.now()
247
+
248
+ for log_file in self.get_log_files():
249
+ should_delete = False
250
+
251
+ if older_than_days is None:
252
+ # Delete all logs
253
+ should_delete = True
254
+ else:
255
+ # Check if file is old enough
256
+ try:
257
+ # Extract date from filename (YYYY-MM-DD.jsonl)
258
+ date_str = log_file.stem # Gets filename without .jsonl
259
+ file_date = datetime.strptime(date_str, "%Y-%m-%d")
260
+ age_days = (now - file_date).days
261
+ if age_days > older_than_days:
262
+ should_delete = True
263
+ except Exception:
264
+ # If we can't parse the date, skip it
265
+ continue
266
+
267
+ if should_delete:
268
+ try:
269
+ log_file.unlink()
270
+ count += 1
271
+ except Exception:
272
+ pass
273
+
274
+ return count
275
+
276
+
277
+ # Global logger instance
278
+ _global_logger: Optional[CommandLogger] = None
279
+
280
+
281
+ def get_logger(log_dir: Optional[str] = None) -> CommandLogger:
282
+ """Get or create the global command logger instance.
283
+
284
+ Args:
285
+ log_dir: Directory to store logs. Only used on first call.
286
+
287
+ Returns:
288
+ CommandLogger instance.
289
+ """
290
+ global _global_logger
291
+ if _global_logger is None:
292
+ _global_logger = CommandLogger(log_dir)
293
+ return _global_logger