cicada-mcp 0.1.7__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. cicada/ascii_art.py +60 -0
  2. cicada/clean.py +195 -60
  3. cicada/cli.py +757 -0
  4. cicada/colors.py +27 -0
  5. cicada/command_logger.py +14 -16
  6. cicada/dead_code_analyzer.py +12 -19
  7. cicada/extractors/__init__.py +6 -6
  8. cicada/extractors/base.py +3 -3
  9. cicada/extractors/call.py +11 -15
  10. cicada/extractors/dependency.py +39 -51
  11. cicada/extractors/doc.py +8 -9
  12. cicada/extractors/function.py +12 -24
  13. cicada/extractors/module.py +11 -15
  14. cicada/extractors/spec.py +8 -12
  15. cicada/find_dead_code.py +15 -39
  16. cicada/formatter.py +37 -91
  17. cicada/git_helper.py +22 -34
  18. cicada/indexer.py +122 -107
  19. cicada/interactive_setup.py +490 -0
  20. cicada/keybert_extractor.py +286 -0
  21. cicada/keyword_search.py +22 -30
  22. cicada/keyword_test.py +127 -0
  23. cicada/lightweight_keyword_extractor.py +5 -13
  24. cicada/mcp_entry.py +683 -0
  25. cicada/mcp_server.py +103 -209
  26. cicada/parser.py +9 -9
  27. cicada/pr_finder.py +15 -19
  28. cicada/pr_indexer/__init__.py +3 -3
  29. cicada/pr_indexer/cli.py +4 -9
  30. cicada/pr_indexer/github_api_client.py +22 -37
  31. cicada/pr_indexer/indexer.py +17 -29
  32. cicada/pr_indexer/line_mapper.py +8 -12
  33. cicada/pr_indexer/pr_index_builder.py +22 -34
  34. cicada/setup.py +189 -87
  35. cicada/utils/__init__.py +9 -9
  36. cicada/utils/call_site_formatter.py +4 -6
  37. cicada/utils/function_grouper.py +4 -4
  38. cicada/utils/hash_utils.py +12 -15
  39. cicada/utils/index_utils.py +15 -15
  40. cicada/utils/path_utils.py +24 -29
  41. cicada/utils/signature_builder.py +3 -3
  42. cicada/utils/subprocess_runner.py +17 -19
  43. cicada/utils/text_utils.py +1 -2
  44. cicada/version_check.py +2 -5
  45. {cicada_mcp-0.1.7.dist-info → cicada_mcp-0.2.0.dist-info}/METADATA +144 -55
  46. cicada_mcp-0.2.0.dist-info/RECORD +53 -0
  47. cicada_mcp-0.2.0.dist-info/entry_points.txt +4 -0
  48. cicada/install.py +0 -741
  49. cicada_mcp-0.1.7.dist-info/RECORD +0 -47
  50. cicada_mcp-0.1.7.dist-info/entry_points.txt +0 -9
  51. {cicada_mcp-0.1.7.dist-info → cicada_mcp-0.2.0.dist-info}/WHEEL +0 -0
  52. {cicada_mcp-0.1.7.dist-info → cicada_mcp-0.2.0.dist-info}/licenses/LICENSE +0 -0
  53. {cicada_mcp-0.1.7.dist-info → cicada_mcp-0.2.0.dist-info}/top_level.txt +0 -0
@@ -10,7 +10,6 @@ import json
10
10
  import os
11
11
  from datetime import datetime, timezone
12
12
  from pathlib import Path
13
- from typing import Dict, List, Tuple
14
13
 
15
14
 
16
15
  def compute_file_hash(file_path: str) -> str:
@@ -38,12 +37,12 @@ def compute_file_hash(file_path: str) -> str:
38
37
  hash_md5.update(chunk)
39
38
  return hash_md5.hexdigest()
40
39
  except FileNotFoundError:
41
- raise FileNotFoundError(f"File not found: {file_path}")
40
+ raise FileNotFoundError(f"File not found: {file_path}") from None
42
41
  except Exception as e:
43
- raise IOError(f"Error reading file {file_path}: {e}")
42
+ raise OSError(f"Error reading file {file_path}: {e}") from e
44
43
 
45
44
 
46
- def load_file_hashes(cicada_dir: str) -> Dict[str, str]:
45
+ def load_file_hashes(cicada_dir: str) -> dict[str, str]:
47
46
  """
48
47
  Load file hashes from .cicada/hashes.json.
49
48
 
@@ -60,15 +59,15 @@ def load_file_hashes(cicada_dir: str) -> Dict[str, str]:
60
59
  return {}
61
60
 
62
61
  try:
63
- with open(hashes_path, "r", encoding="utf-8") as f:
62
+ with open(hashes_path, encoding="utf-8") as f:
64
63
  data = json.load(f)
65
64
  return data.get("hashes", {})
66
- except (json.JSONDecodeError, IOError) as e:
65
+ except (OSError, json.JSONDecodeError) as e:
67
66
  print(f"Warning: Could not load hashes.json: {e}")
68
67
  return {}
69
68
 
70
69
 
71
- def save_file_hashes(cicada_dir: str, hashes: Dict[str, str]) -> None:
70
+ def save_file_hashes(cicada_dir: str, hashes: dict[str, str]) -> None:
72
71
  """
73
72
  Save file hashes to .cicada/hashes.json.
74
73
 
@@ -90,13 +89,13 @@ def save_file_hashes(cicada_dir: str, hashes: Dict[str, str]) -> None:
90
89
  try:
91
90
  with open(hashes_path, "w", encoding="utf-8") as f:
92
91
  json.dump(data, f, indent=2)
93
- except IOError as e:
92
+ except OSError as e:
94
93
  print(f"Warning: Could not save hashes.json: {e}")
95
94
 
96
95
 
97
96
  def detect_file_changes(
98
- files: List[str], old_hashes: Dict[str, str], repo_path: str | None = None
99
- ) -> Tuple[List[str], List[str], List[str]]:
97
+ files: list[str], old_hashes: dict[str, str], repo_path: str | None = None
98
+ ) -> tuple[list[str], list[str], list[str]]:
100
99
  """
101
100
  Detect new, modified, and deleted files by comparing hashes.
102
101
 
@@ -137,7 +136,7 @@ def detect_file_changes(
137
136
  current_hash = compute_file_hash(full_path)
138
137
  if current_hash != old_hashes[file_path]:
139
138
  modified_files.append(file_path)
140
- except (FileNotFoundError, IOError) as e:
139
+ except (OSError, FileNotFoundError) as e:
141
140
  # File might have been deleted after listing
142
141
  print(f"Warning: Could not hash {file_path}: {e}")
143
142
  deleted_files.append(file_path)
@@ -145,9 +144,7 @@ def detect_file_changes(
145
144
  return new_files, modified_files, deleted_files
146
145
 
147
146
 
148
- def compute_hashes_for_files(
149
- files: List[str], repo_path: str | None = None
150
- ) -> Dict[str, str]:
147
+ def compute_hashes_for_files(files: list[str], repo_path: str | None = None) -> dict[str, str]:
151
148
  """
152
149
  Compute MD5 hashes for a list of files.
153
150
 
@@ -167,7 +164,7 @@ def compute_hashes_for_files(
167
164
 
168
165
  try:
169
166
  hashes[file_path] = compute_file_hash(full_path)
170
- except (FileNotFoundError, IOError) as e:
167
+ except (OSError, FileNotFoundError) as e:
171
168
  print(f"Warning: Could not hash {file_path}: {e}")
172
169
 
173
170
  return hashes
@@ -8,14 +8,14 @@ JSON index files with consistent error handling.
8
8
  import json
9
9
  import sys
10
10
  from pathlib import Path
11
- from typing import Optional, Dict, Any, Union
11
+ from typing import Any
12
12
 
13
13
 
14
14
  def load_index(
15
- index_path: Union[str, Path],
15
+ index_path: str | Path,
16
16
  verbose: bool = False,
17
17
  raise_on_error: bool = False,
18
- ) -> Optional[Dict[str, Any]]:
18
+ ) -> dict[str, Any] | None:
19
19
  """
20
20
  Load a JSON index file.
21
21
 
@@ -42,7 +42,7 @@ def load_index(
42
42
  return None
43
43
 
44
44
  try:
45
- with open(index_file, "r") as f:
45
+ with open(index_file) as f:
46
46
  return json.load(f)
47
47
  except json.JSONDecodeError as e:
48
48
  if raise_on_error:
@@ -50,7 +50,7 @@ def load_index(
50
50
  if verbose:
51
51
  print(f"Warning: Could not parse index: {e}", file=sys.stderr)
52
52
  return None
53
- except IOError as e:
53
+ except OSError as e:
54
54
  if raise_on_error:
55
55
  raise
56
56
  if verbose:
@@ -59,8 +59,8 @@ def load_index(
59
59
 
60
60
 
61
61
  def save_index(
62
- index: Dict[str, Any],
63
- output_path: Union[str, Path],
62
+ index: dict[str, Any],
63
+ output_path: str | Path,
64
64
  indent: int = 2,
65
65
  create_dirs: bool = True,
66
66
  verbose: bool = False,
@@ -93,8 +93,8 @@ def save_index(
93
93
 
94
94
  def validate_index_structure(
95
95
  index: Any,
96
- required_keys: Optional[list[str]] = None,
97
- ) -> tuple[bool, Optional[str]]:
96
+ required_keys: list[str] | None = None,
97
+ ) -> tuple[bool, str | None]:
98
98
  """
99
99
  Validate the structure of an index dictionary.
100
100
 
@@ -133,9 +133,9 @@ def validate_index_structure(
133
133
 
134
134
 
135
135
  def merge_indexes(
136
- *indexes: Dict[str, Any],
136
+ *indexes: dict[str, Any],
137
137
  strategy: str = "last_wins",
138
- ) -> Dict[str, Any]:
138
+ ) -> dict[str, Any]:
139
139
  """
140
140
  Merge multiple index dictionaries.
141
141
 
@@ -180,7 +180,7 @@ def merge_indexes(
180
180
  return merged
181
181
 
182
182
 
183
- def get_index_stats(index: Dict[str, Any]) -> Dict[str, Any]:
183
+ def get_index_stats(index: dict[str, Any]) -> dict[str, Any]:
184
184
  """
185
185
  Get statistics about an index.
186
186
 
@@ -226,10 +226,10 @@ def get_index_stats(index: Dict[str, Any]) -> Dict[str, Any]:
226
226
 
227
227
 
228
228
  def merge_indexes_incremental(
229
- old_index: Dict[str, Any],
230
- new_index: Dict[str, Any],
229
+ old_index: dict[str, Any],
230
+ new_index: dict[str, Any],
231
231
  deleted_files: list[str],
232
- ) -> Dict[str, Any]:
232
+ ) -> dict[str, Any]:
233
233
  """
234
234
  Merge old and new indexes for incremental reindexing.
235
235
 
@@ -6,11 +6,10 @@ functions used throughout the codebase.
6
6
  """
7
7
 
8
8
  from pathlib import Path
9
- from typing import Optional, Union
10
9
 
11
10
 
12
11
  def normalize_file_path(
13
- file_path: Union[str, Path],
12
+ file_path: str | Path,
14
13
  strip_leading_dot: bool = True,
15
14
  strip_trailing_whitespace: bool = True,
16
15
  ) -> str:
@@ -43,8 +42,8 @@ def normalize_file_path(
43
42
 
44
43
 
45
44
  def resolve_to_repo_root(
46
- file_path: Union[str, Path],
47
- repo_root: Union[str, Path],
45
+ file_path: str | Path,
46
+ repo_root: str | Path,
48
47
  ) -> Path:
49
48
  """
50
49
  Resolve a file path relative to repository root.
@@ -77,12 +76,12 @@ def resolve_to_repo_root(
77
76
  try:
78
77
  return file_path_obj.relative_to(repo_root_obj)
79
78
  except ValueError:
80
- raise ValueError(f"File path {file_path} is not within repository {repo_root}")
79
+ raise ValueError(f"File path {file_path} is not within repository {repo_root}") from None
81
80
 
82
81
 
83
82
  def match_file_path(
84
- candidate: Union[str, Path],
85
- target: Union[str, Path],
83
+ candidate: str | Path,
84
+ target: str | Path,
86
85
  normalize: bool = True,
87
86
  ) -> bool:
88
87
  """
@@ -122,13 +121,10 @@ def match_file_path(
122
121
  return True
123
122
 
124
123
  # Target ends with candidate (partial path provided)
125
- if target_str.endswith(candidate_str):
126
- return True
127
-
128
- return False
124
+ return bool(target_str.endswith(candidate_str))
129
125
 
130
126
 
131
- def find_repo_root(start_path: Optional[Union[str, Path]] = None) -> Optional[Path]:
127
+ def find_repo_root(start_path: str | Path | None = None) -> Path | None:
132
128
  """
133
129
  Find the git repository root starting from a given path.
134
130
 
@@ -142,10 +138,7 @@ def find_repo_root(start_path: Optional[Union[str, Path]] = None) -> Optional[Pa
142
138
  find_repo_root('/repo/lib/user') -> Path('/repo')
143
139
  find_repo_root('/not/a/repo') -> None
144
140
  """
145
- if start_path is None:
146
- current = Path.cwd()
147
- else:
148
- current = Path(start_path).resolve()
141
+ current = Path.cwd() if start_path is None else Path(start_path).resolve()
149
142
 
150
143
  # Walk up the directory tree looking for .git
151
144
  for parent in [current] + list(current.parents):
@@ -156,8 +149,8 @@ def find_repo_root(start_path: Optional[Union[str, Path]] = None) -> Optional[Pa
156
149
 
157
150
 
158
151
  def ensure_relative_to_repo(
159
- file_path: Union[str, Path],
160
- repo_root: Union[str, Path],
152
+ file_path: str | Path,
153
+ repo_root: str | Path,
161
154
  ) -> str:
162
155
  """
163
156
  Ensure a file path is relative to the repository root.
@@ -181,7 +174,7 @@ def ensure_relative_to_repo(
181
174
  return normalize_file_path(resolved)
182
175
 
183
176
 
184
- def ensure_gitignore_has_cicada(repo_root: Union[str, Path]) -> bool:
177
+ def ensure_gitignore_has_cicada(repo_root: str | Path) -> bool:
185
178
  """
186
179
  Ensure .gitignore contains .cicada/ directory entry.
187
180
 
@@ -207,7 +200,7 @@ def ensure_gitignore_has_cicada(repo_root: Union[str, Path]) -> bool:
207
200
 
208
201
  try:
209
202
  # Read existing .gitignore
210
- with open(gitignore_path, "r") as f:
203
+ with open(gitignore_path) as f:
211
204
  content = f.read()
212
205
 
213
206
  # Check if .cicada/ is already present in actual gitignore patterns
@@ -215,15 +208,17 @@ def ensure_gitignore_has_cicada(repo_root: Union[str, Path]) -> bool:
215
208
  for line in content.splitlines():
216
209
  # Strip whitespace and skip empty lines and comments
217
210
  stripped = line.strip()
218
- if stripped and not stripped.startswith("#"):
219
- # Check if this line contains .cicada as a gitignore pattern
220
- # Valid patterns: .cicada, .cicada/, /.cicada, /.cicada/, **/.cicada/, etc.
221
- if (
211
+ # Check if this line contains .cicada as a gitignore pattern
212
+ # Valid patterns: .cicada, .cicada/, /.cicada, /.cicada/, **/.cicada/, etc.
213
+ if (
214
+ stripped
215
+ and not stripped.startswith("#")
216
+ and (
222
217
  stripped in (".cicada", ".cicada/")
223
- or stripped.endswith("/.cicada")
224
- or stripped.endswith("/.cicada/")
225
- ):
226
- return False
218
+ or stripped.endswith(("/.cicada", "/.cicada/"))
219
+ )
220
+ ):
221
+ return False
227
222
 
228
223
  # Add .cicada/ to .gitignore
229
224
  with open(gitignore_path, "a") as f:
@@ -235,6 +230,6 @@ def ensure_gitignore_has_cicada(repo_root: Union[str, Path]) -> bool:
235
230
 
236
231
  return True
237
232
 
238
- except (IOError, OSError):
233
+ except OSError:
239
234
  # Fail silently if we can't read/write the file
240
235
  return False
@@ -5,7 +5,7 @@ This module provides utilities for formatting function signatures,
5
5
  eliminating duplication across the formatter module.
6
6
  """
7
7
 
8
- from typing import Dict, List, Any
8
+ from typing import Any
9
9
 
10
10
 
11
11
  class SignatureBuilder:
@@ -17,7 +17,7 @@ class SignatureBuilder:
17
17
  """
18
18
 
19
19
  @staticmethod
20
- def build(func: Dict[str, Any]) -> str:
20
+ def build(func: dict[str, Any]) -> str:
21
21
  """
22
22
  Build a formatted function signature.
23
23
 
@@ -78,7 +78,7 @@ class SignatureBuilder:
78
78
  return signature
79
79
 
80
80
  @staticmethod
81
- def _format_args_with_types(args_with_types: List[Dict[str, str]]) -> str:
81
+ def _format_args_with_types(args_with_types: list[dict[str, str]]) -> str:
82
82
  """
83
83
  Format arguments with type annotations.
84
84
 
@@ -5,10 +5,10 @@ This module provides centralized subprocess execution with consistent
5
5
  error handling and logging patterns.
6
6
  """
7
7
 
8
+ import shlex
8
9
  import subprocess
9
10
  import sys
10
11
  from pathlib import Path
11
- from typing import Optional, List, Union
12
12
 
13
13
 
14
14
  class SubprocessRunner:
@@ -20,7 +20,7 @@ class SubprocessRunner:
20
20
  error handling.
21
21
  """
22
22
 
23
- def __init__(self, cwd: Optional[Union[str, Path]] = None, verbose: bool = False):
23
+ def __init__(self, cwd: str | Path | None = None, verbose: bool = False):
24
24
  """
25
25
  Initialize the subprocess runner.
26
26
 
@@ -33,11 +33,11 @@ class SubprocessRunner:
33
33
 
34
34
  def run(
35
35
  self,
36
- cmd: Union[str, List[str]],
36
+ cmd: str | list[str],
37
37
  capture_output: bool = True,
38
38
  text: bool = True,
39
39
  check: bool = True,
40
- timeout: Optional[int] = None,
40
+ timeout: int | None = None,
41
41
  ) -> subprocess.CompletedProcess:
42
42
  """
43
43
  Run a subprocess command with error handling.
@@ -56,6 +56,10 @@ class SubprocessRunner:
56
56
  subprocess.CalledProcessError: If command fails and check=True
57
57
  subprocess.TimeoutExpired: If timeout is reached
58
58
  """
59
+ # Convert string commands to list for safety and compatibility
60
+ if isinstance(cmd, str):
61
+ cmd = shlex.split(cmd)
62
+
59
63
  try:
60
64
  result = subprocess.run(
61
65
  cmd,
@@ -77,14 +81,14 @@ class SubprocessRunner:
77
81
  if e.stderr:
78
82
  print(f"Error: {e.stderr}", file=sys.stderr)
79
83
  raise
80
- except subprocess.TimeoutExpired as e:
84
+ except subprocess.TimeoutExpired:
81
85
  if self.verbose:
82
86
  print(f"Command timed out: {cmd}", file=sys.stderr)
83
87
  raise
84
88
 
85
89
  def run_git_command(
86
90
  self,
87
- args: Union[str, List[str]],
91
+ args: str | list[str],
88
92
  check: bool = True,
89
93
  ) -> subprocess.CompletedProcess:
90
94
  """
@@ -101,16 +105,13 @@ class SubprocessRunner:
101
105
  runner.run_git_command(['status', '--short'])
102
106
  runner.run_git_command('log --oneline -n 5')
103
107
  """
104
- if isinstance(args, str):
105
- cmd = f"git {args}"
106
- else:
107
- cmd = ["git"] + args
108
+ cmd = f"git {args}" if isinstance(args, str) else ["git"] + args
108
109
 
109
110
  return self.run(cmd, check=check)
110
111
 
111
112
  def run_gh_command(
112
113
  self,
113
- args: Union[str, List[str]],
114
+ args: str | list[str],
114
115
  check: bool = True,
115
116
  ) -> subprocess.CompletedProcess:
116
117
  """
@@ -127,10 +128,7 @@ class SubprocessRunner:
127
128
  runner.run_gh_command(['pr', 'list'])
128
129
  runner.run_gh_command('api repos/owner/repo/pulls')
129
130
  """
130
- if isinstance(args, str):
131
- cmd = f"gh {args}"
132
- else:
133
- cmd = ["gh"] + args
131
+ cmd = f"gh {args}" if isinstance(args, str) else ["gh"] + args
134
132
 
135
133
  return self.run(cmd, check=check)
136
134
 
@@ -139,8 +137,8 @@ class SubprocessRunner:
139
137
 
140
138
 
141
139
  def run_git_command(
142
- args: Union[str, List[str]],
143
- cwd: Optional[Union[str, Path]] = None,
140
+ args: str | list[str],
141
+ cwd: str | Path | None = None,
144
142
  check: bool = True,
145
143
  verbose: bool = False,
146
144
  ) -> subprocess.CompletedProcess:
@@ -161,8 +159,8 @@ def run_git_command(
161
159
 
162
160
 
163
161
  def run_gh_command(
164
- args: Union[str, List[str]],
165
- cwd: Optional[Union[str, Path]] = None,
162
+ args: str | list[str],
163
+ cwd: str | Path | None = None,
166
164
  check: bool = True,
167
165
  verbose: bool = False,
168
166
  ) -> subprocess.CompletedProcess:
@@ -6,10 +6,9 @@ including splitting camelCase, PascalCase, and snake_case identifiers.
6
6
  """
7
7
 
8
8
  import re
9
- from typing import List
10
9
 
11
10
 
12
- def split_identifier(identifier: str, lowercase: bool = True) -> List[str]:
11
+ def split_identifier(identifier: str, lowercase: bool = True) -> list[str]:
13
12
  """
14
13
  Split an identifier by camelCase, PascalCase, and snake_case.
15
14
 
cicada/version_check.py CHANGED
@@ -5,7 +5,6 @@ Checks if a newer version of cicada is available on GitHub.
5
5
  """
6
6
 
7
7
  import subprocess
8
- from typing import Optional
9
8
 
10
9
 
11
10
  def get_current_version() -> str:
@@ -20,7 +19,7 @@ def get_current_version() -> str:
20
19
  return __version__
21
20
 
22
21
 
23
- def get_latest_github_tag(repo: str = "wende/cicada") -> Optional[str]:
22
+ def get_latest_github_tag(repo: str = "wende/cicada") -> str | None:
24
23
  """
25
24
  Get the latest tag from GitHub repository.
26
25
 
@@ -106,9 +105,7 @@ def check_for_updates() -> None:
106
105
  f"{yellow}⚠️ A newer version of cicada is available: v{latest} (current: v{current}){reset}"
107
106
  )
108
107
  print(f"{yellow} To update, run:{reset}")
109
- print(
110
- f"{yellow} uv tool install git+https://github.com/wende/cicada.git{reset}"
111
- )
108
+ print(f"{yellow} uv tool install git+https://github.com/wende/cicada.git{reset}")
112
109
  print()
113
110
 
114
111
  except Exception: