mcp-vector-search 0.0.3__py3-none-any.whl → 0.4.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-vector-search might be problematic. Click here for more details.

Files changed (49) hide show
  1. mcp_vector_search/__init__.py +3 -2
  2. mcp_vector_search/cli/commands/auto_index.py +397 -0
  3. mcp_vector_search/cli/commands/config.py +88 -40
  4. mcp_vector_search/cli/commands/index.py +198 -52
  5. mcp_vector_search/cli/commands/init.py +471 -58
  6. mcp_vector_search/cli/commands/install.py +284 -0
  7. mcp_vector_search/cli/commands/mcp.py +495 -0
  8. mcp_vector_search/cli/commands/search.py +241 -87
  9. mcp_vector_search/cli/commands/status.py +184 -58
  10. mcp_vector_search/cli/commands/watch.py +34 -35
  11. mcp_vector_search/cli/didyoumean.py +184 -0
  12. mcp_vector_search/cli/export.py +320 -0
  13. mcp_vector_search/cli/history.py +292 -0
  14. mcp_vector_search/cli/interactive.py +342 -0
  15. mcp_vector_search/cli/main.py +175 -27
  16. mcp_vector_search/cli/output.py +63 -45
  17. mcp_vector_search/config/defaults.py +50 -36
  18. mcp_vector_search/config/settings.py +49 -35
  19. mcp_vector_search/core/auto_indexer.py +298 -0
  20. mcp_vector_search/core/connection_pool.py +322 -0
  21. mcp_vector_search/core/database.py +335 -25
  22. mcp_vector_search/core/embeddings.py +73 -29
  23. mcp_vector_search/core/exceptions.py +19 -2
  24. mcp_vector_search/core/factory.py +310 -0
  25. mcp_vector_search/core/git_hooks.py +345 -0
  26. mcp_vector_search/core/indexer.py +237 -73
  27. mcp_vector_search/core/models.py +21 -19
  28. mcp_vector_search/core/project.py +73 -58
  29. mcp_vector_search/core/scheduler.py +330 -0
  30. mcp_vector_search/core/search.py +574 -86
  31. mcp_vector_search/core/watcher.py +48 -46
  32. mcp_vector_search/mcp/__init__.py +4 -0
  33. mcp_vector_search/mcp/__main__.py +25 -0
  34. mcp_vector_search/mcp/server.py +701 -0
  35. mcp_vector_search/parsers/base.py +30 -31
  36. mcp_vector_search/parsers/javascript.py +74 -48
  37. mcp_vector_search/parsers/python.py +57 -49
  38. mcp_vector_search/parsers/registry.py +47 -32
  39. mcp_vector_search/parsers/text.py +179 -0
  40. mcp_vector_search/utils/__init__.py +40 -0
  41. mcp_vector_search/utils/gitignore.py +229 -0
  42. mcp_vector_search/utils/timing.py +334 -0
  43. mcp_vector_search/utils/version.py +47 -0
  44. {mcp_vector_search-0.0.3.dist-info → mcp_vector_search-0.4.12.dist-info}/METADATA +173 -7
  45. mcp_vector_search-0.4.12.dist-info/RECORD +54 -0
  46. mcp_vector_search-0.0.3.dist-info/RECORD +0 -35
  47. {mcp_vector_search-0.0.3.dist-info → mcp_vector_search-0.4.12.dist-info}/WHEEL +0 -0
  48. {mcp_vector_search-0.0.3.dist-info → mcp_vector_search-0.4.12.dist-info}/entry_points.txt +0 -0
  49. {mcp_vector_search-0.0.3.dist-info → mcp_vector_search-0.4.12.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,184 @@
1
+ """Enhanced CLI with 'did you mean' functionality for better user experience."""
2
+
3
+ import click
4
+ import typer
5
+ from click_didyoumean import DYMGroup
6
+ from typing import Any, Dict, Optional
7
+
8
+
9
+ class DidYouMeanTyper(typer.Typer):
10
+ """Enhanced Typer class with 'did you mean' functionality."""
11
+
12
+ def __init__(self, *args, **kwargs):
13
+ """Initialize with did-you-mean support."""
14
+ # Extract Typer-specific kwargs
15
+ typer_kwargs = {}
16
+ click_kwargs = {}
17
+
18
+ # Separate Typer and Click kwargs
19
+ typer_specific = {
20
+ 'name', 'help', 'epilog', 'short_help', 'options_metavar',
21
+ 'add_completion', 'context_settings', 'callback', 'invoke_without_command',
22
+ 'no_args_is_help', 'subcommand_metavar', 'chain', 'result_callback',
23
+ 'deprecated', 'rich_markup_mode', 'rich_help_panel', 'pretty_exceptions_enable',
24
+ 'pretty_exceptions_show_locals', 'pretty_exceptions_short'
25
+ }
26
+
27
+ for key, value in kwargs.items():
28
+ if key in typer_specific:
29
+ typer_kwargs[key] = value
30
+ else:
31
+ click_kwargs[key] = value
32
+
33
+ # Initialize Typer with its specific kwargs
34
+ super().__init__(*args, **typer_kwargs)
35
+
36
+ # Store click kwargs for later use
37
+ self._click_kwargs = click_kwargs
38
+
39
+ def __call__(self, *args, **kwargs):
40
+ """Override call to use DYMGroup."""
41
+ # Get the underlying click group
42
+ click_group = super().__call__(*args, **kwargs)
43
+
44
+ # Create a new DYMGroup with the same properties
45
+ dym_group = DYMGroup(
46
+ name=click_group.name,
47
+ commands=click_group.commands,
48
+ **self._click_kwargs
49
+ )
50
+
51
+ # Copy all attributes from the original group
52
+ for attr in dir(click_group):
53
+ if not attr.startswith('_') and attr not in ['commands', 'name']:
54
+ try:
55
+ setattr(dym_group, attr, getattr(click_group, attr))
56
+ except (AttributeError, TypeError):
57
+ # Skip attributes that can't be set
58
+ pass
59
+
60
+ return dym_group
61
+
62
+
63
+ class DidYouMeanGroup(DYMGroup):
64
+ """Custom Click group with enhanced 'did you mean' functionality."""
65
+
66
+ def __init__(self, *args, **kwargs):
67
+ """Initialize with better error messages."""
68
+ super().__init__(*args, **kwargs)
69
+
70
+ def resolve_command(self, ctx: click.Context, args: list) -> tuple:
71
+ """Resolve command with enhanced error handling."""
72
+ try:
73
+ return super().resolve_command(ctx, args)
74
+ except click.UsageError as e:
75
+ # Enhance the error message with available commands
76
+ if "No such command" in str(e):
77
+ available_commands = list(self.commands.keys())
78
+ if available_commands:
79
+ commands_list = ", ".join(f"'{cmd}'" for cmd in sorted(available_commands))
80
+ enhanced_msg = f"{str(e)}\n\nAvailable commands: {commands_list}"
81
+ raise click.UsageError(enhanced_msg, ctx=ctx)
82
+ raise
83
+
84
+
85
+ def create_enhanced_typer(**kwargs) -> typer.Typer:
86
+ """Create a Typer instance with 'did you mean' functionality."""
87
+ # Set default values for better UX
88
+ defaults = {
89
+ 'no_args_is_help': True,
90
+ 'add_completion': False,
91
+ 'rich_markup_mode': 'rich',
92
+ }
93
+
94
+ # Merge with provided kwargs
95
+ final_kwargs = {**defaults, **kwargs}
96
+
97
+ # Create the enhanced Typer
98
+ app = DidYouMeanTyper(**final_kwargs)
99
+
100
+ return app
101
+
102
+
103
+ def enhance_existing_typer(app: typer.Typer) -> typer.Typer:
104
+ """Enhance an existing Typer app with 'did you mean' functionality."""
105
+ # This is a bit tricky since we need to modify the underlying Click group
106
+ # We'll create a wrapper that intercepts the click group creation
107
+
108
+ original_call = app.__call__
109
+
110
+ def enhanced_call(*args, **kwargs):
111
+ """Enhanced call that uses DYMGroup."""
112
+ click_group = original_call(*args, **kwargs)
113
+
114
+ # Create enhanced group
115
+ enhanced_group = DidYouMeanGroup(
116
+ name=click_group.name,
117
+ commands=click_group.commands,
118
+ callback=click_group.callback,
119
+ params=click_group.params,
120
+ help=click_group.help,
121
+ epilog=click_group.epilog,
122
+ short_help=click_group.short_help,
123
+ options_metavar=click_group.options_metavar,
124
+ add_help_option=click_group.add_help_option,
125
+ context_settings=click_group.context_settings,
126
+ invoke_without_command=click_group.invoke_without_command,
127
+ no_args_is_help=click_group.no_args_is_help,
128
+ subcommand_metavar=click_group.subcommand_metavar,
129
+ chain=click_group.chain,
130
+ result_callback=click_group.result_callback,
131
+ deprecated=click_group.deprecated,
132
+ )
133
+
134
+ return enhanced_group
135
+
136
+ app.__call__ = enhanced_call
137
+ return app
138
+
139
+
140
+ def add_common_suggestions(ctx: click.Context, command_name: str) -> None:
141
+ """Add common command suggestions to error messages."""
142
+ common_typos = {
143
+ 'serach': 'search',
144
+ 'seach': 'search',
145
+ 'searh': 'search',
146
+ 'find': 'search',
147
+ 'indx': 'index',
148
+ 'idx': 'index',
149
+ 'reindex': 'index --force',
150
+ 'stat': 'status',
151
+ 'stats': 'status',
152
+ 'info': 'status',
153
+ 'conf': 'config',
154
+ 'cfg': 'config',
155
+ 'setting': 'config',
156
+ 'settings': 'config',
157
+ 'init': 'init',
158
+ 'initialize': 'init',
159
+ 'setup': 'init',
160
+ 'start': 'init',
161
+ 'watch': 'watch',
162
+ 'monitor': 'watch',
163
+ 'auto': 'auto-index',
164
+ 'automatic': 'auto-index',
165
+ 'mcp': 'mcp',
166
+ 'claude': 'mcp',
167
+ 'server': 'mcp',
168
+ 'install': 'install',
169
+ 'setup': 'install',
170
+ 'demo': 'demo',
171
+ 'example': 'demo',
172
+ 'test': 'mcp test',
173
+ 'check': 'status',
174
+ 'doctor': 'doctor',
175
+ 'health': 'doctor',
176
+ 'version': 'version',
177
+ 'ver': 'version',
178
+ 'help': '--help',
179
+ 'h': '--help',
180
+ }
181
+
182
+ if command_name.lower() in common_typos:
183
+ suggestion = common_typos[command_name.lower()]
184
+ click.echo(f"\nDid you mean: mcp-vector-search {suggestion}?", err=True)
@@ -0,0 +1,320 @@
1
+ """Export functionality for search results."""
2
+
3
+ import csv
4
+ import json
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+
8
+ from rich.console import Console
9
+
10
+ from ..core.models import SearchResult
11
+ from .output import print_error, print_success
12
+
13
+ console = Console()
14
+
15
+
16
+ class SearchResultExporter:
17
+ """Export search results to various formats."""
18
+
19
+ def __init__(self):
20
+ """Initialize exporter."""
21
+ pass
22
+
23
+ def export_to_json(
24
+ self,
25
+ results: list[SearchResult],
26
+ output_path: Path,
27
+ query: str,
28
+ include_metadata: bool = True,
29
+ ) -> bool:
30
+ """Export results to JSON format.
31
+
32
+ Args:
33
+ results: Search results to export
34
+ output_path: Output file path
35
+ query: Original search query
36
+ include_metadata: Whether to include metadata
37
+
38
+ Returns:
39
+ True if successful
40
+ """
41
+ try:
42
+ export_data = {
43
+ "query": query,
44
+ "timestamp": datetime.now().isoformat(),
45
+ "total_results": len(results),
46
+ "results": [],
47
+ }
48
+
49
+ for result in results:
50
+ result_data = {
51
+ "rank": result.rank,
52
+ "file_path": str(result.file_path),
53
+ "similarity_score": result.similarity_score,
54
+ "start_line": result.start_line,
55
+ "end_line": result.end_line,
56
+ "language": result.language,
57
+ "chunk_type": result.chunk_type,
58
+ }
59
+
60
+ if result.function_name:
61
+ result_data["function_name"] = result.function_name
62
+ if result.class_name:
63
+ result_data["class_name"] = result.class_name
64
+ if result.content:
65
+ result_data["content"] = result.content
66
+
67
+ if include_metadata:
68
+ result_data["location"] = result.location
69
+
70
+ export_data["results"].append(result_data)
71
+
72
+ with open(output_path, "w", encoding="utf-8") as f:
73
+ json.dump(export_data, f, indent=2, ensure_ascii=False)
74
+
75
+ print_success(f"Exported {len(results)} results to {output_path}")
76
+ return True
77
+
78
+ except Exception as e:
79
+ print_error(f"Failed to export to JSON: {e}")
80
+ return False
81
+
82
+ def export_to_csv(
83
+ self, results: list[SearchResult], output_path: Path, query: str
84
+ ) -> bool:
85
+ """Export results to CSV format.
86
+
87
+ Args:
88
+ results: Search results to export
89
+ output_path: Output file path
90
+ query: Original search query
91
+
92
+ Returns:
93
+ True if successful
94
+ """
95
+ try:
96
+ fieldnames = [
97
+ "rank",
98
+ "file_path",
99
+ "similarity_score",
100
+ "start_line",
101
+ "end_line",
102
+ "language",
103
+ "chunk_type",
104
+ "function_name",
105
+ "class_name",
106
+ "location",
107
+ ]
108
+
109
+ with open(output_path, "w", newline="", encoding="utf-8") as f:
110
+ writer = csv.DictWriter(f, fieldnames=fieldnames)
111
+ writer.writeheader()
112
+
113
+ # Write metadata row
114
+ writer.writerow(
115
+ {
116
+ "rank": f"# Query: {query}",
117
+ "file_path": f"# Timestamp: {datetime.now().isoformat()}",
118
+ "similarity_score": f"# Total Results: {len(results)}",
119
+ "start_line": "",
120
+ "end_line": "",
121
+ "language": "",
122
+ "chunk_type": "",
123
+ "function_name": "",
124
+ "class_name": "",
125
+ "location": "",
126
+ }
127
+ )
128
+
129
+ for result in results:
130
+ writer.writerow(
131
+ {
132
+ "rank": result.rank,
133
+ "file_path": str(result.file_path),
134
+ "similarity_score": f"{result.similarity_score:.4f}",
135
+ "start_line": result.start_line,
136
+ "end_line": result.end_line,
137
+ "language": result.language,
138
+ "chunk_type": result.chunk_type,
139
+ "function_name": result.function_name or "",
140
+ "class_name": result.class_name or "",
141
+ "location": result.location,
142
+ }
143
+ )
144
+
145
+ print_success(f"Exported {len(results)} results to {output_path}")
146
+ return True
147
+
148
+ except Exception as e:
149
+ print_error(f"Failed to export to CSV: {e}")
150
+ return False
151
+
152
+ def export_to_markdown(
153
+ self,
154
+ results: list[SearchResult],
155
+ output_path: Path,
156
+ query: str,
157
+ include_content: bool = True,
158
+ ) -> bool:
159
+ """Export results to Markdown format.
160
+
161
+ Args:
162
+ results: Search results to export
163
+ output_path: Output file path
164
+ query: Original search query
165
+ include_content: Whether to include code content
166
+
167
+ Returns:
168
+ True if successful
169
+ """
170
+ try:
171
+ with open(output_path, "w", encoding="utf-8") as f:
172
+ # Write header
173
+ f.write("# Search Results\n\n")
174
+ f.write(f"**Query:** `{query}`\n")
175
+ f.write(
176
+ f"**Timestamp:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
177
+ )
178
+ f.write(f"**Total Results:** {len(results)}\n\n")
179
+
180
+ # Write results
181
+ for result in results:
182
+ f.write(f"## {result.rank}. {result.file_path.name}\n\n")
183
+
184
+ # Basic info
185
+ f.write(f"- **File:** `{result.file_path}`\n")
186
+ f.write(
187
+ f"- **Location:** Lines {result.start_line}-{result.end_line}\n"
188
+ )
189
+ f.write(f"- **Similarity:** {result.similarity_score:.2%}\n")
190
+ f.write(f"- **Language:** {result.language}\n")
191
+
192
+ if result.function_name:
193
+ f.write(f"- **Function:** `{result.function_name}()`\n")
194
+ if result.class_name:
195
+ f.write(f"- **Class:** `{result.class_name}`\n")
196
+
197
+ f.write("\n")
198
+
199
+ # Code content
200
+ if include_content and result.content:
201
+ f.write(f"```{result.language}\n")
202
+ f.write(result.content)
203
+ f.write("\n```\n\n")
204
+
205
+ f.write("---\n\n")
206
+
207
+ print_success(f"Exported {len(results)} results to {output_path}")
208
+ return True
209
+
210
+ except Exception as e:
211
+ print_error(f"Failed to export to Markdown: {e}")
212
+ return False
213
+
214
+ def export_summary_table(
215
+ self, results: list[SearchResult], output_path: Path, query: str
216
+ ) -> bool:
217
+ """Export a summary table of results.
218
+
219
+ Args:
220
+ results: Search results to export
221
+ output_path: Output file path
222
+ query: Original search query
223
+
224
+ Returns:
225
+ True if successful
226
+ """
227
+ try:
228
+ # Calculate summary statistics
229
+ languages = {}
230
+ files = {}
231
+ functions = {}
232
+ classes = {}
233
+
234
+ for result in results:
235
+ languages[result.language] = languages.get(result.language, 0) + 1
236
+ files[result.file_path.name] = files.get(result.file_path.name, 0) + 1
237
+
238
+ if result.function_name:
239
+ functions[result.function_name] = (
240
+ functions.get(result.function_name, 0) + 1
241
+ )
242
+ if result.class_name:
243
+ classes[result.class_name] = classes.get(result.class_name, 0) + 1
244
+
245
+ avg_similarity = (
246
+ sum(r.similarity_score for r in results) / len(results)
247
+ if results
248
+ else 0
249
+ )
250
+
251
+ summary_data = {
252
+ "query": query,
253
+ "timestamp": datetime.now().isoformat(),
254
+ "summary": {
255
+ "total_results": len(results),
256
+ "average_similarity": round(avg_similarity, 4),
257
+ "unique_files": len(files),
258
+ "unique_functions": len(functions),
259
+ "unique_classes": len(classes),
260
+ },
261
+ "distributions": {
262
+ "languages": dict(
263
+ sorted(languages.items(), key=lambda x: x[1], reverse=True)
264
+ ),
265
+ "top_files": dict(
266
+ sorted(files.items(), key=lambda x: x[1], reverse=True)[:10]
267
+ ),
268
+ "top_functions": dict(
269
+ sorted(functions.items(), key=lambda x: x[1], reverse=True)[:10]
270
+ ),
271
+ "top_classes": dict(
272
+ sorted(classes.items(), key=lambda x: x[1], reverse=True)[:10]
273
+ ),
274
+ },
275
+ }
276
+
277
+ with open(output_path, "w", encoding="utf-8") as f:
278
+ json.dump(summary_data, f, indent=2, ensure_ascii=False)
279
+
280
+ print_success(
281
+ f"Exported summary for {len(results)} results to {output_path}"
282
+ )
283
+ return True
284
+
285
+ except Exception as e:
286
+ print_error(f"Failed to export summary: {e}")
287
+ return False
288
+
289
+
290
+ def get_export_path(format_type: str, query: str, base_dir: Path | None = None) -> Path:
291
+ """Generate export file path based on format and query.
292
+
293
+ Args:
294
+ format_type: Export format (json, csv, markdown, summary)
295
+ query: Search query
296
+ base_dir: Base directory for export
297
+
298
+ Returns:
299
+ Generated file path
300
+ """
301
+ if base_dir is None:
302
+ base_dir = Path.cwd()
303
+
304
+ # Sanitize query for filename
305
+ safe_query = "".join(
306
+ c for c in query if c.isalnum() or c in (" ", "-", "_")
307
+ ).rstrip()
308
+ safe_query = safe_query.replace(" ", "_")[:50] # Limit length
309
+
310
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
311
+
312
+ extensions = {
313
+ "json": ".json",
314
+ "csv": ".csv",
315
+ "markdown": ".md",
316
+ "summary": "_summary.json",
317
+ }
318
+
319
+ filename = f"search_{safe_query}_{timestamp}{extensions.get(format_type, '.txt')}"
320
+ return base_dir / filename