cicada-mcp 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cicada-mcp might be problematic. Click here for more details.
- cicada/__init__.py +30 -0
- cicada/clean.py +297 -0
- cicada/command_logger.py +293 -0
- cicada/dead_code_analyzer.py +282 -0
- cicada/extractors/__init__.py +36 -0
- cicada/extractors/base.py +66 -0
- cicada/extractors/call.py +176 -0
- cicada/extractors/dependency.py +361 -0
- cicada/extractors/doc.py +179 -0
- cicada/extractors/function.py +246 -0
- cicada/extractors/module.py +123 -0
- cicada/extractors/spec.py +151 -0
- cicada/find_dead_code.py +270 -0
- cicada/formatter.py +918 -0
- cicada/git_helper.py +646 -0
- cicada/indexer.py +629 -0
- cicada/install.py +724 -0
- cicada/keyword_extractor.py +364 -0
- cicada/keyword_search.py +553 -0
- cicada/lightweight_keyword_extractor.py +298 -0
- cicada/mcp_server.py +1559 -0
- cicada/mcp_tools.py +291 -0
- cicada/parser.py +124 -0
- cicada/pr_finder.py +435 -0
- cicada/pr_indexer/__init__.py +20 -0
- cicada/pr_indexer/cli.py +62 -0
- cicada/pr_indexer/github_api_client.py +431 -0
- cicada/pr_indexer/indexer.py +297 -0
- cicada/pr_indexer/line_mapper.py +209 -0
- cicada/pr_indexer/pr_index_builder.py +253 -0
- cicada/setup.py +339 -0
- cicada/utils/__init__.py +52 -0
- cicada/utils/call_site_formatter.py +95 -0
- cicada/utils/function_grouper.py +57 -0
- cicada/utils/hash_utils.py +173 -0
- cicada/utils/index_utils.py +290 -0
- cicada/utils/path_utils.py +240 -0
- cicada/utils/signature_builder.py +106 -0
- cicada/utils/storage.py +111 -0
- cicada/utils/subprocess_runner.py +182 -0
- cicada/utils/text_utils.py +90 -0
- cicada/version_check.py +116 -0
- cicada_mcp-0.1.4.dist-info/METADATA +619 -0
- cicada_mcp-0.1.4.dist-info/RECORD +48 -0
- cicada_mcp-0.1.4.dist-info/WHEEL +5 -0
- cicada_mcp-0.1.4.dist-info/entry_points.txt +8 -0
- cicada_mcp-0.1.4.dist-info/licenses/LICENSE +21 -0
- cicada_mcp-0.1.4.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""
|
|
2
|
+
PR Index Builder.
|
|
3
|
+
|
|
4
|
+
This module handles building and merging PR index structures,
|
|
5
|
+
separating index construction logic from API and mapping concerns.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import Dict, List, Any, Optional
|
|
10
|
+
|
|
11
|
+
from cicada.utils import load_index as load_index_util, save_index as save_index_util
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PRIndexBuilder:
|
|
15
|
+
"""
|
|
16
|
+
Builds and manages PR index structures.
|
|
17
|
+
|
|
18
|
+
This class handles creating, merging, and saving PR indexes,
|
|
19
|
+
keeping index manipulation separate from data fetching.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, repo_owner: str, repo_name: str):
|
|
23
|
+
"""
|
|
24
|
+
Initialize the index builder.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
repo_owner: GitHub repository owner
|
|
28
|
+
repo_name: GitHub repository name
|
|
29
|
+
"""
|
|
30
|
+
self.repo_owner = repo_owner
|
|
31
|
+
self.repo_name = repo_name
|
|
32
|
+
|
|
33
|
+
def build_index(
|
|
34
|
+
self, prs: List[Dict[str, Any]], preserve_last_pr: Optional[int] = None
|
|
35
|
+
) -> Dict[str, Any]:
|
|
36
|
+
"""
|
|
37
|
+
Build the index structure from PR data.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
prs: List of PR dictionaries
|
|
41
|
+
preserve_last_pr: If set, use this as last_pr_number instead of calculating.
|
|
42
|
+
Used when building partial indexes from interrupted fetches.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Index dictionary with metadata, prs, commit_to_pr mapping, and file_to_prs mapping
|
|
46
|
+
"""
|
|
47
|
+
print("Building index...")
|
|
48
|
+
|
|
49
|
+
# Build commit -> PR mapping
|
|
50
|
+
commit_to_pr = self._build_commit_mapping(prs)
|
|
51
|
+
|
|
52
|
+
# Build file -> PRs mapping
|
|
53
|
+
file_to_prs = self._build_file_mapping(prs)
|
|
54
|
+
|
|
55
|
+
# Count total comments
|
|
56
|
+
total_comments = sum(len(pr.get("comments", [])) for pr in prs)
|
|
57
|
+
|
|
58
|
+
# Build index structure
|
|
59
|
+
metadata: Dict[str, Any] = {
|
|
60
|
+
"repo_owner": self.repo_owner,
|
|
61
|
+
"repo_name": self.repo_name,
|
|
62
|
+
"last_indexed_at": datetime.now().isoformat(),
|
|
63
|
+
"total_prs": len(prs),
|
|
64
|
+
"total_commits_mapped": len(commit_to_pr),
|
|
65
|
+
"total_comments": total_comments,
|
|
66
|
+
"total_files": len(file_to_prs),
|
|
67
|
+
}
|
|
68
|
+
index: Dict[str, Any] = {
|
|
69
|
+
"metadata": metadata,
|
|
70
|
+
"prs": {str(pr["number"]): pr for pr in prs},
|
|
71
|
+
"commit_to_pr": commit_to_pr,
|
|
72
|
+
"file_to_prs": file_to_prs,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
# Track last PR number for incremental updates
|
|
76
|
+
if preserve_last_pr is not None:
|
|
77
|
+
# Use preserved value (for partial/interrupted fetches)
|
|
78
|
+
index["metadata"]["last_pr_number"] = preserve_last_pr
|
|
79
|
+
elif prs:
|
|
80
|
+
# Calculate from PRs (for complete fetches)
|
|
81
|
+
index["metadata"]["last_pr_number"] = max(pr["number"] for pr in prs)
|
|
82
|
+
|
|
83
|
+
print(
|
|
84
|
+
f"Index built: {len(prs)} PRs, {len(commit_to_pr)} commits, "
|
|
85
|
+
f"{len(file_to_prs)} files, {total_comments} comments"
|
|
86
|
+
)
|
|
87
|
+
return index
|
|
88
|
+
|
|
89
|
+
def _build_commit_mapping(self, prs: List[Dict[str, Any]]) -> Dict[str, int]:
|
|
90
|
+
"""
|
|
91
|
+
Build commit SHA -> PR number mapping.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
prs: List of PR dictionaries
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Dictionary mapping commit SHAs to PR numbers
|
|
98
|
+
"""
|
|
99
|
+
commit_to_pr = {}
|
|
100
|
+
for pr in prs:
|
|
101
|
+
pr_number = pr["number"]
|
|
102
|
+
for commit in pr["commits"]:
|
|
103
|
+
commit_to_pr[commit] = pr_number
|
|
104
|
+
return commit_to_pr
|
|
105
|
+
|
|
106
|
+
def _build_file_mapping(self, prs: List[Dict[str, Any]]) -> Dict[str, List[int]]:
|
|
107
|
+
"""
|
|
108
|
+
Build file path -> PR numbers mapping.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
prs: List of PR dictionaries
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Dictionary mapping file paths to lists of PR numbers (sorted newest first)
|
|
115
|
+
"""
|
|
116
|
+
file_to_prs = {}
|
|
117
|
+
for pr in prs:
|
|
118
|
+
pr_number = pr["number"]
|
|
119
|
+
for file_path in pr.get("files_changed", []):
|
|
120
|
+
if file_path not in file_to_prs:
|
|
121
|
+
file_to_prs[file_path] = []
|
|
122
|
+
file_to_prs[file_path].append(pr_number)
|
|
123
|
+
|
|
124
|
+
# Sort PR numbers for each file (newest first)
|
|
125
|
+
for file_path in file_to_prs:
|
|
126
|
+
file_to_prs[file_path].sort(reverse=True)
|
|
127
|
+
|
|
128
|
+
return file_to_prs
|
|
129
|
+
|
|
130
|
+
def merge_indexes(
|
|
131
|
+
self, existing_index: Dict[str, Any], new_prs: List[Dict[str, Any]]
|
|
132
|
+
) -> Dict[str, Any]:
|
|
133
|
+
"""
|
|
134
|
+
Merge new PRs into existing index.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
existing_index: The existing index
|
|
138
|
+
new_prs: List of new PR dictionaries
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Updated index dictionary
|
|
142
|
+
"""
|
|
143
|
+
print("Merging new PRs into existing index...")
|
|
144
|
+
|
|
145
|
+
# Update PR data
|
|
146
|
+
for pr in new_prs:
|
|
147
|
+
existing_index["prs"][str(pr["number"])] = pr
|
|
148
|
+
|
|
149
|
+
# Update commit -> PR mapping
|
|
150
|
+
for commit in pr["commits"]:
|
|
151
|
+
existing_index["commit_to_pr"][commit] = pr["number"]
|
|
152
|
+
|
|
153
|
+
# Rebuild file -> PRs mapping from scratch (most reliable)
|
|
154
|
+
file_to_prs = self._build_file_mapping(list(existing_index["prs"].values()))
|
|
155
|
+
existing_index["file_to_prs"] = file_to_prs
|
|
156
|
+
|
|
157
|
+
# Count total comments
|
|
158
|
+
total_comments = sum(
|
|
159
|
+
len(pr.get("comments", [])) for pr in existing_index["prs"].values()
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Update metadata
|
|
163
|
+
existing_index["metadata"]["last_indexed_at"] = datetime.now().isoformat()
|
|
164
|
+
existing_index["metadata"]["total_prs"] = len(existing_index["prs"])
|
|
165
|
+
existing_index["metadata"]["total_commits_mapped"] = len(
|
|
166
|
+
existing_index["commit_to_pr"]
|
|
167
|
+
)
|
|
168
|
+
existing_index["metadata"]["total_comments"] = total_comments
|
|
169
|
+
existing_index["metadata"]["total_files"] = len(file_to_prs)
|
|
170
|
+
|
|
171
|
+
# Update last_pr_number to the highest PR we have in the index
|
|
172
|
+
if existing_index["prs"]:
|
|
173
|
+
all_pr_numbers = [int(num) for num in existing_index["prs"].keys()]
|
|
174
|
+
existing_index["metadata"]["last_pr_number"] = max(all_pr_numbers)
|
|
175
|
+
|
|
176
|
+
return existing_index
|
|
177
|
+
|
|
178
|
+
def merge_partial_clean(
|
|
179
|
+
self, existing_index: Dict[str, Any], partial_index: Dict[str, Any]
|
|
180
|
+
) -> Dict[str, Any]:
|
|
181
|
+
"""
|
|
182
|
+
Merge a partial clean build with an existing index.
|
|
183
|
+
|
|
184
|
+
This is used when a --clean rebuild is interrupted. We want to keep both:
|
|
185
|
+
- PRs from the existing index (old data)
|
|
186
|
+
- PRs from the partial new index (newly fetched data)
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
existing_index: The old complete/partial index
|
|
190
|
+
partial_index: The new partial index from interrupted --clean
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
Merged index with all PRs from both indexes
|
|
194
|
+
"""
|
|
195
|
+
print("Merging partial index with existing index...")
|
|
196
|
+
|
|
197
|
+
# Start with existing index structure
|
|
198
|
+
merged = existing_index.copy()
|
|
199
|
+
|
|
200
|
+
# Update PRs: add/replace with new PRs from partial index
|
|
201
|
+
for pr_num_str, pr_data in partial_index["prs"].items():
|
|
202
|
+
merged["prs"][pr_num_str] = pr_data
|
|
203
|
+
|
|
204
|
+
# Rebuild commit -> PR mapping from scratch
|
|
205
|
+
merged["commit_to_pr"] = self._build_commit_mapping(
|
|
206
|
+
list(merged["prs"].values())
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Rebuild file -> PRs mapping from scratch
|
|
210
|
+
merged["file_to_prs"] = self._build_file_mapping(list(merged["prs"].values()))
|
|
211
|
+
|
|
212
|
+
# Count total comments
|
|
213
|
+
total_comments = sum(
|
|
214
|
+
len(pr.get("comments", [])) for pr in merged["prs"].values()
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
# Update metadata (use partial_index's last_pr_number which was preserved)
|
|
218
|
+
merged["metadata"]["last_indexed_at"] = datetime.now().isoformat()
|
|
219
|
+
merged["metadata"]["total_prs"] = len(merged["prs"])
|
|
220
|
+
merged["metadata"]["total_commits_mapped"] = len(merged["commit_to_pr"])
|
|
221
|
+
merged["metadata"]["total_comments"] = total_comments
|
|
222
|
+
merged["metadata"]["total_files"] = len(merged["file_to_prs"])
|
|
223
|
+
merged["metadata"]["last_pr_number"] = partial_index["metadata"].get(
|
|
224
|
+
"last_pr_number", 0
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
print(
|
|
228
|
+
f"Merged: {len(merged['prs'])} total PRs "
|
|
229
|
+
f"({len(partial_index['prs'])} new/updated)"
|
|
230
|
+
)
|
|
231
|
+
return merged
|
|
232
|
+
|
|
233
|
+
def load_existing_index(self, index_path: str) -> Optional[Dict[str, Any]]:
|
|
234
|
+
"""
|
|
235
|
+
Load existing index file if it exists.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
index_path: Path to the index file
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
Existing index dictionary or None if file doesn't exist
|
|
242
|
+
"""
|
|
243
|
+
return load_index_util(index_path, verbose=True, raise_on_error=False)
|
|
244
|
+
|
|
245
|
+
def save_index(self, index: Dict[str, Any], output_path: str) -> None:
|
|
246
|
+
"""
|
|
247
|
+
Save index to file.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
index: Index dictionary to save
|
|
251
|
+
output_path: Path where the index will be saved
|
|
252
|
+
"""
|
|
253
|
+
save_index_util(index, output_path, create_dirs=True, verbose=True)
|
cicada/setup.py
ADDED
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
Cicada Simplified Setup Script.
|
|
4
|
+
|
|
5
|
+
One-command setup: uvx cicada [claude|cursor|vs]
|
|
6
|
+
- Indexes the repository with keyword extraction
|
|
7
|
+
- Stores all files in temp directory (~/.cicada/projects/<hash>/)
|
|
8
|
+
- Creates only MCP config file in user's repo
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import json
|
|
13
|
+
import sys
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Literal
|
|
16
|
+
|
|
17
|
+
from cicada.indexer import ElixirIndexer
|
|
18
|
+
from cicada.utils import (
|
|
19
|
+
create_storage_dir,
|
|
20
|
+
get_index_path,
|
|
21
|
+
get_config_path,
|
|
22
|
+
get_hashes_path,
|
|
23
|
+
get_storage_dir,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
EditorType = Literal["claude", "cursor", "vs"]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _load_existing_config(config_path: Path) -> dict:
|
|
31
|
+
"""
|
|
32
|
+
Load existing configuration file with error handling.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
config_path: Path to the config file
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
Loaded config dict, or empty dict if file doesn't exist or is invalid
|
|
39
|
+
"""
|
|
40
|
+
if not config_path.exists():
|
|
41
|
+
return {}
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
with open(config_path, "r") as f:
|
|
45
|
+
return json.load(f)
|
|
46
|
+
except json.JSONDecodeError as e:
|
|
47
|
+
print(
|
|
48
|
+
f"Warning: Existing config at {config_path} is malformed, creating new one: {e}"
|
|
49
|
+
)
|
|
50
|
+
return {}
|
|
51
|
+
except IOError as e:
|
|
52
|
+
print(f"Warning: Could not read config file {config_path}: {e}")
|
|
53
|
+
return {}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _build_server_config(
|
|
57
|
+
command: str, args: list, cwd: str | None, repo_path: Path, storage_dir: Path
|
|
58
|
+
) -> dict:
|
|
59
|
+
"""
|
|
60
|
+
Build the MCP server configuration.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
command: Command to run the MCP server
|
|
64
|
+
args: Command line arguments
|
|
65
|
+
cwd: Working directory (optional)
|
|
66
|
+
repo_path: Path to the repository
|
|
67
|
+
storage_dir: Path to the storage directory
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
Server configuration dict
|
|
71
|
+
"""
|
|
72
|
+
server_config = {"command": command}
|
|
73
|
+
|
|
74
|
+
if args:
|
|
75
|
+
server_config["args"] = args
|
|
76
|
+
|
|
77
|
+
if cwd:
|
|
78
|
+
server_config["cwd"] = cwd
|
|
79
|
+
|
|
80
|
+
server_config["env"] = {
|
|
81
|
+
"CICADA_REPO_PATH": str(repo_path),
|
|
82
|
+
"CICADA_CONFIG_DIR": str(storage_dir),
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return server_config
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def get_mcp_config_for_editor(
|
|
89
|
+
editor: EditorType, repo_path: Path, storage_dir: Path
|
|
90
|
+
) -> tuple[Path, dict]:
|
|
91
|
+
"""
|
|
92
|
+
Get the MCP configuration file path and content for a specific editor.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
editor: Editor type (claude, cursor, vs)
|
|
96
|
+
repo_path: Path to the repository
|
|
97
|
+
storage_dir: Path to the storage directory
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Tuple of (config_file_path, config_content)
|
|
101
|
+
"""
|
|
102
|
+
# Detect installation method
|
|
103
|
+
import shutil
|
|
104
|
+
|
|
105
|
+
has_cicada_server = shutil.which("cicada-server") is not None
|
|
106
|
+
|
|
107
|
+
if has_cicada_server:
|
|
108
|
+
command = "cicada-server"
|
|
109
|
+
args = []
|
|
110
|
+
cwd = None
|
|
111
|
+
else:
|
|
112
|
+
python_bin = sys.executable
|
|
113
|
+
command = str(python_bin)
|
|
114
|
+
args = ["-m", "cicada.mcp_server"]
|
|
115
|
+
cwd = None
|
|
116
|
+
|
|
117
|
+
# Editor-specific specifications
|
|
118
|
+
editor_specs = {
|
|
119
|
+
"claude": {
|
|
120
|
+
"config_path": repo_path / ".mcp.json",
|
|
121
|
+
"config_key": "mcpServers",
|
|
122
|
+
"needs_dir": False,
|
|
123
|
+
},
|
|
124
|
+
"cursor": {
|
|
125
|
+
"config_path": repo_path / ".cursor" / "mcp.json",
|
|
126
|
+
"config_key": "mcpServers",
|
|
127
|
+
"needs_dir": True,
|
|
128
|
+
},
|
|
129
|
+
"vs": {
|
|
130
|
+
"config_path": repo_path / ".vscode" / "settings.json",
|
|
131
|
+
"config_key": "mcp.servers",
|
|
132
|
+
"needs_dir": True,
|
|
133
|
+
},
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
if editor not in editor_specs:
|
|
137
|
+
raise ValueError(f"Unsupported editor: {editor}")
|
|
138
|
+
|
|
139
|
+
spec = editor_specs[editor]
|
|
140
|
+
config_path = spec["config_path"]
|
|
141
|
+
|
|
142
|
+
# Create parent directory if needed
|
|
143
|
+
if spec["needs_dir"]:
|
|
144
|
+
config_path.parent.mkdir(exist_ok=True)
|
|
145
|
+
|
|
146
|
+
# Load existing config
|
|
147
|
+
config = _load_existing_config(config_path)
|
|
148
|
+
|
|
149
|
+
# Ensure config section exists
|
|
150
|
+
if spec["config_key"] not in config:
|
|
151
|
+
config[spec["config_key"]] = {}
|
|
152
|
+
|
|
153
|
+
# Build and add server configuration
|
|
154
|
+
server_config = _build_server_config(command, args, cwd, repo_path, storage_dir)
|
|
155
|
+
config[spec["config_key"]]["cicada"] = server_config
|
|
156
|
+
|
|
157
|
+
return config_path, config
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def create_config_yaml(repo_path: Path, storage_dir: Path) -> None:
|
|
161
|
+
"""
|
|
162
|
+
Create config.yaml in storage directory.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
repo_path: Path to the repository
|
|
166
|
+
storage_dir: Path to the storage directory
|
|
167
|
+
"""
|
|
168
|
+
config_path = get_config_path(repo_path)
|
|
169
|
+
index_path = get_index_path(repo_path)
|
|
170
|
+
|
|
171
|
+
config_content = f"""repository:
|
|
172
|
+
path: {repo_path}
|
|
173
|
+
|
|
174
|
+
storage:
|
|
175
|
+
index_path: {index_path}
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
with open(config_path, "w") as f:
|
|
179
|
+
f.write(config_content)
|
|
180
|
+
|
|
181
|
+
print(f"✓ Config file created at {config_path}")
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def index_repository(repo_path: Path) -> None:
|
|
185
|
+
"""
|
|
186
|
+
Index the repository with keyword extraction enabled.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
repo_path: Path to the repository
|
|
190
|
+
|
|
191
|
+
Raises:
|
|
192
|
+
Exception: If indexing fails
|
|
193
|
+
"""
|
|
194
|
+
try:
|
|
195
|
+
index_path = get_index_path(repo_path)
|
|
196
|
+
indexer = ElixirIndexer(verbose=True)
|
|
197
|
+
|
|
198
|
+
# Index with keyword extraction enabled by default
|
|
199
|
+
# Note: Using 'small' model for compatibility with uvx
|
|
200
|
+
# For better accuracy, install permanently and use cicada-index with --spacy-model medium/large
|
|
201
|
+
indexer.index_repository(
|
|
202
|
+
repo_path=str(repo_path),
|
|
203
|
+
output_path=str(index_path),
|
|
204
|
+
extract_keywords=True,
|
|
205
|
+
spacy_model="small",
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
print(f"✓ Repository indexed at {index_path}")
|
|
209
|
+
except Exception as e:
|
|
210
|
+
print(f"Error: Failed to index repository: {e}")
|
|
211
|
+
print("Please check that the repository contains valid Elixir files.")
|
|
212
|
+
raise
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def setup(editor: EditorType, repo_path: Path | None = None) -> None:
|
|
216
|
+
"""
|
|
217
|
+
Run the complete setup for the specified editor.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
editor: Editor type (claude, cursor, vs)
|
|
221
|
+
repo_path: Path to the repository (defaults to current directory)
|
|
222
|
+
"""
|
|
223
|
+
# Determine repository path
|
|
224
|
+
if repo_path is None:
|
|
225
|
+
repo_path = Path.cwd()
|
|
226
|
+
repo_path = repo_path.resolve()
|
|
227
|
+
|
|
228
|
+
print("=" * 60)
|
|
229
|
+
print(f"Cicada Setup for {editor.upper()}")
|
|
230
|
+
print("=" * 60)
|
|
231
|
+
print()
|
|
232
|
+
|
|
233
|
+
# Create storage directory
|
|
234
|
+
print(f"Repository: {repo_path}")
|
|
235
|
+
storage_dir = create_storage_dir(repo_path)
|
|
236
|
+
print(f"Storage: {storage_dir}")
|
|
237
|
+
print()
|
|
238
|
+
|
|
239
|
+
# Index repository
|
|
240
|
+
index_repository(repo_path)
|
|
241
|
+
print()
|
|
242
|
+
|
|
243
|
+
# Create config.yaml
|
|
244
|
+
create_config_yaml(repo_path, storage_dir)
|
|
245
|
+
print()
|
|
246
|
+
|
|
247
|
+
# Create MCP config for the editor
|
|
248
|
+
config_path, config_content = get_mcp_config_for_editor(
|
|
249
|
+
editor, repo_path, storage_dir
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
# Write config file
|
|
253
|
+
with open(config_path, "w") as f:
|
|
254
|
+
json.dump(config_content, f, indent=2)
|
|
255
|
+
|
|
256
|
+
print(f"✓ MCP configuration created at {config_path}")
|
|
257
|
+
print()
|
|
258
|
+
|
|
259
|
+
print("=" * 60)
|
|
260
|
+
print("✓ Setup Complete!")
|
|
261
|
+
print("=" * 60)
|
|
262
|
+
print()
|
|
263
|
+
print("Next steps:")
|
|
264
|
+
print(f"1. Restart {editor.upper()}")
|
|
265
|
+
print("2. Cicada MCP server will be available automatically")
|
|
266
|
+
print()
|
|
267
|
+
print("Storage location:")
|
|
268
|
+
print(f" {storage_dir}")
|
|
269
|
+
print()
|
|
270
|
+
print("All index files are stored outside your repository.")
|
|
271
|
+
print(f"Only {config_path.name} was added to your repo.")
|
|
272
|
+
print()
|
|
273
|
+
|
|
274
|
+
# Check if running via uvx and suggest permanent installation
|
|
275
|
+
import shutil
|
|
276
|
+
from cicada import __version__
|
|
277
|
+
|
|
278
|
+
if not shutil.which("cicada-server"):
|
|
279
|
+
print("💡 Tip: For best experience, install Cicada permanently:")
|
|
280
|
+
print(
|
|
281
|
+
f" uv tool install git+https://github.com/wende/cicada.git@v{__version__}"
|
|
282
|
+
)
|
|
283
|
+
print()
|
|
284
|
+
print(" Benefits:")
|
|
285
|
+
print(" • Faster MCP server startup")
|
|
286
|
+
print(" • Access to cicada-index with medium/large spaCy models")
|
|
287
|
+
print(" • PR indexing with cicada-index-pr")
|
|
288
|
+
print()
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def main():
|
|
292
|
+
"""Main entry point for the simplified setup script."""
|
|
293
|
+
parser = argparse.ArgumentParser(
|
|
294
|
+
description="Cicada One-Command Setup",
|
|
295
|
+
epilog="Example: uvx cicada claude",
|
|
296
|
+
)
|
|
297
|
+
parser.add_argument(
|
|
298
|
+
"editor",
|
|
299
|
+
choices=["claude", "cursor", "vs"],
|
|
300
|
+
help="Editor to configure (claude=Claude Code, cursor=Cursor, vs=VS Code)",
|
|
301
|
+
)
|
|
302
|
+
parser.add_argument(
|
|
303
|
+
"repo",
|
|
304
|
+
nargs="?",
|
|
305
|
+
default=None,
|
|
306
|
+
help="Path to the Elixir repository (default: current directory)",
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
args = parser.parse_args()
|
|
310
|
+
|
|
311
|
+
# Determine repo path
|
|
312
|
+
repo_path = Path(args.repo) if args.repo else Path.cwd()
|
|
313
|
+
|
|
314
|
+
# Validate path exists
|
|
315
|
+
if not repo_path.exists():
|
|
316
|
+
print(f"Error: Path does not exist: {repo_path}")
|
|
317
|
+
sys.exit(1)
|
|
318
|
+
|
|
319
|
+
# Validate path is a directory
|
|
320
|
+
if not repo_path.is_dir():
|
|
321
|
+
print(f"Error: Path is not a directory: {repo_path}")
|
|
322
|
+
sys.exit(1)
|
|
323
|
+
|
|
324
|
+
# Check if it's an Elixir repository
|
|
325
|
+
if not (repo_path / "mix.exs").exists():
|
|
326
|
+
print(f"Error: {repo_path} does not appear to be an Elixir project")
|
|
327
|
+
print("(mix.exs not found)")
|
|
328
|
+
sys.exit(1)
|
|
329
|
+
|
|
330
|
+
# Run setup
|
|
331
|
+
try:
|
|
332
|
+
setup(args.editor, repo_path)
|
|
333
|
+
except Exception as e:
|
|
334
|
+
print(f"\nError: Setup failed: {e}")
|
|
335
|
+
sys.exit(1)
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
if __name__ == "__main__":
|
|
339
|
+
main()
|
cicada/utils/__init__.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utility modules for CICADA.
|
|
3
|
+
|
|
4
|
+
This package contains shared utilities used across the codebase to reduce
|
|
5
|
+
code duplication and improve maintainability.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .subprocess_runner import SubprocessRunner, run_git_command, run_gh_command
|
|
9
|
+
from .path_utils import normalize_file_path, resolve_to_repo_root
|
|
10
|
+
from .index_utils import (
|
|
11
|
+
load_index,
|
|
12
|
+
save_index,
|
|
13
|
+
merge_indexes_incremental,
|
|
14
|
+
validate_index_structure,
|
|
15
|
+
)
|
|
16
|
+
from .function_grouper import FunctionGrouper
|
|
17
|
+
from .call_site_formatter import CallSiteFormatter
|
|
18
|
+
from .signature_builder import SignatureBuilder
|
|
19
|
+
from .text_utils import split_identifier, split_camel_snake_case
|
|
20
|
+
from .storage import (
|
|
21
|
+
get_repo_hash,
|
|
22
|
+
get_storage_dir,
|
|
23
|
+
create_storage_dir,
|
|
24
|
+
get_index_path,
|
|
25
|
+
get_config_path,
|
|
26
|
+
get_hashes_path,
|
|
27
|
+
get_pr_index_path,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
"SubprocessRunner",
|
|
32
|
+
"run_git_command",
|
|
33
|
+
"run_gh_command",
|
|
34
|
+
"normalize_file_path",
|
|
35
|
+
"resolve_to_repo_root",
|
|
36
|
+
"load_index",
|
|
37
|
+
"save_index",
|
|
38
|
+
"merge_indexes_incremental",
|
|
39
|
+
"validate_index_structure",
|
|
40
|
+
"FunctionGrouper",
|
|
41
|
+
"CallSiteFormatter",
|
|
42
|
+
"SignatureBuilder",
|
|
43
|
+
"split_identifier",
|
|
44
|
+
"split_camel_snake_case",
|
|
45
|
+
"get_repo_hash",
|
|
46
|
+
"get_storage_dir",
|
|
47
|
+
"create_storage_dir",
|
|
48
|
+
"get_index_path",
|
|
49
|
+
"get_config_path",
|
|
50
|
+
"get_hashes_path",
|
|
51
|
+
"get_pr_index_path",
|
|
52
|
+
]
|