mcp-vector-search 1.0.3__py3-none-any.whl → 1.1.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. mcp_vector_search/__init__.py +3 -3
  2. mcp_vector_search/analysis/__init__.py +48 -1
  3. mcp_vector_search/analysis/baseline/__init__.py +68 -0
  4. mcp_vector_search/analysis/baseline/comparator.py +462 -0
  5. mcp_vector_search/analysis/baseline/manager.py +621 -0
  6. mcp_vector_search/analysis/collectors/__init__.py +35 -0
  7. mcp_vector_search/analysis/collectors/cohesion.py +463 -0
  8. mcp_vector_search/analysis/collectors/coupling.py +1162 -0
  9. mcp_vector_search/analysis/collectors/halstead.py +514 -0
  10. mcp_vector_search/analysis/collectors/smells.py +325 -0
  11. mcp_vector_search/analysis/debt.py +516 -0
  12. mcp_vector_search/analysis/interpretation.py +685 -0
  13. mcp_vector_search/analysis/metrics.py +74 -1
  14. mcp_vector_search/analysis/reporters/__init__.py +3 -1
  15. mcp_vector_search/analysis/reporters/console.py +424 -0
  16. mcp_vector_search/analysis/reporters/markdown.py +480 -0
  17. mcp_vector_search/analysis/reporters/sarif.py +377 -0
  18. mcp_vector_search/analysis/storage/__init__.py +93 -0
  19. mcp_vector_search/analysis/storage/metrics_store.py +762 -0
  20. mcp_vector_search/analysis/storage/schema.py +245 -0
  21. mcp_vector_search/analysis/storage/trend_tracker.py +560 -0
  22. mcp_vector_search/analysis/trends.py +308 -0
  23. mcp_vector_search/analysis/visualizer/__init__.py +90 -0
  24. mcp_vector_search/analysis/visualizer/d3_data.py +534 -0
  25. mcp_vector_search/analysis/visualizer/exporter.py +484 -0
  26. mcp_vector_search/analysis/visualizer/html_report.py +2895 -0
  27. mcp_vector_search/analysis/visualizer/schemas.py +525 -0
  28. mcp_vector_search/cli/commands/analyze.py +665 -11
  29. mcp_vector_search/cli/commands/chat.py +193 -0
  30. mcp_vector_search/cli/commands/index.py +600 -2
  31. mcp_vector_search/cli/commands/index_background.py +467 -0
  32. mcp_vector_search/cli/commands/search.py +194 -1
  33. mcp_vector_search/cli/commands/setup.py +64 -13
  34. mcp_vector_search/cli/commands/status.py +302 -3
  35. mcp_vector_search/cli/commands/visualize/cli.py +26 -10
  36. mcp_vector_search/cli/commands/visualize/exporters/json_exporter.py +8 -4
  37. mcp_vector_search/cli/commands/visualize/graph_builder.py +167 -234
  38. mcp_vector_search/cli/commands/visualize/server.py +304 -15
  39. mcp_vector_search/cli/commands/visualize/templates/base.py +60 -6
  40. mcp_vector_search/cli/commands/visualize/templates/scripts.py +2100 -65
  41. mcp_vector_search/cli/commands/visualize/templates/styles.py +1297 -88
  42. mcp_vector_search/cli/didyoumean.py +5 -0
  43. mcp_vector_search/cli/main.py +16 -5
  44. mcp_vector_search/cli/output.py +134 -5
  45. mcp_vector_search/config/thresholds.py +89 -1
  46. mcp_vector_search/core/__init__.py +16 -0
  47. mcp_vector_search/core/database.py +39 -2
  48. mcp_vector_search/core/embeddings.py +24 -0
  49. mcp_vector_search/core/git.py +380 -0
  50. mcp_vector_search/core/indexer.py +445 -84
  51. mcp_vector_search/core/llm_client.py +9 -4
  52. mcp_vector_search/core/models.py +88 -1
  53. mcp_vector_search/core/relationships.py +473 -0
  54. mcp_vector_search/core/search.py +1 -1
  55. mcp_vector_search/mcp/server.py +795 -4
  56. mcp_vector_search/parsers/python.py +285 -5
  57. mcp_vector_search/utils/gitignore.py +0 -3
  58. {mcp_vector_search-1.0.3.dist-info → mcp_vector_search-1.1.22.dist-info}/METADATA +3 -2
  59. {mcp_vector_search-1.0.3.dist-info → mcp_vector_search-1.1.22.dist-info}/RECORD +62 -39
  60. mcp_vector_search/cli/commands/visualize.py.original +0 -2536
  61. {mcp_vector_search-1.0.3.dist-info → mcp_vector_search-1.1.22.dist-info}/WHEEL +0 -0
  62. {mcp_vector_search-1.0.3.dist-info → mcp_vector_search-1.1.22.dist-info}/entry_points.txt +0 -0
  63. {mcp_vector_search-1.0.3.dist-info → mcp_vector_search-1.1.22.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,11 @@
1
1
  """Index command for MCP Vector Search CLI."""
2
2
 
3
3
  import asyncio
4
+ import json
5
+ import os
6
+ import signal
7
+ import subprocess
8
+ import sys
4
9
  from pathlib import Path
5
10
 
6
11
  import typer
@@ -19,6 +24,7 @@ from ..output import (
19
24
  print_next_steps,
20
25
  print_success,
21
26
  print_tip,
27
+ print_warning,
22
28
  )
23
29
 
24
30
  # Create index subcommand app with callback for direct usage
@@ -38,6 +44,13 @@ def main(
38
44
  help="Watch for file changes and update index incrementally",
39
45
  rich_help_panel="⚙️ Advanced Options",
40
46
  ),
47
+ background: bool = typer.Option(
48
+ False,
49
+ "--background",
50
+ "-bg",
51
+ help="Run indexing in background (detached process)",
52
+ rich_help_panel="⚙️ Advanced Options",
53
+ ),
41
54
  incremental: bool = typer.Option(
42
55
  True,
43
56
  "--incremental/--full",
@@ -58,6 +71,12 @@ def main(
58
71
  help="Force reindexing of all files",
59
72
  rich_help_panel="📊 Indexing Options",
60
73
  ),
74
+ auto_analyze: bool = typer.Option(
75
+ True,
76
+ "--analyze/--no-analyze",
77
+ help="Automatically run analysis after force reindex",
78
+ rich_help_panel="📊 Indexing Options",
79
+ ),
61
80
  batch_size: int = typer.Option(
62
81
  32,
63
82
  "--batch-size",
@@ -74,12 +93,21 @@ def main(
74
93
  help="Enable debug output (shows hierarchy building details)",
75
94
  rich_help_panel="🔍 Debugging",
76
95
  ),
96
+ skip_relationships: bool = typer.Option(
97
+ True,
98
+ "--skip-relationships/--compute-relationships",
99
+ help="Skip relationship computation during indexing (default: skip). Relationships are computed lazily by the visualizer when needed.",
100
+ rich_help_panel="⚡ Performance",
101
+ ),
77
102
  ) -> None:
78
103
  """📑 Index your codebase for semantic search.
79
104
 
80
105
  Parses code files, generates semantic embeddings, and stores them in ChromaDB.
81
106
  Supports incremental indexing to skip unchanged files for faster updates.
82
107
 
108
+ When using --force, automatically runs code analysis after indexing completes
109
+ (can be disabled with --no-analyze).
110
+
83
111
  [bold cyan]Basic Examples:[/bold cyan]
84
112
 
85
113
  [green]Index entire project:[/green]
@@ -88,6 +116,9 @@ def main(
88
116
  [green]Force full reindex:[/green]
89
117
  $ mcp-vector-search index --force
90
118
 
119
+ [green]Force reindex without analysis:[/green]
120
+ $ mcp-vector-search index --force --no-analyze
121
+
91
122
  [green]Custom file extensions:[/green]
92
123
  $ mcp-vector-search index --extensions .py,.js,.ts,.md
93
124
 
@@ -102,7 +133,10 @@ def main(
102
133
  [green]Optimize for large projects:[/green]
103
134
  $ mcp-vector-search index --batch-size 64
104
135
 
105
- [dim]💡 Tip: Use incremental indexing (default) for faster updates on subsequent runs.[/dim]
136
+ [green]Pre-compute relationships (slower indexing, instant visualization):[/green]
137
+ $ mcp-vector-search index --compute-relationships
138
+
139
+ [dim]💡 Tip: Relationships are computed lazily by the visualizer for instant indexing.[/dim]
106
140
  """
107
141
  # If a subcommand was invoked, don't run the indexing logic
108
142
  if ctx.invoked_subcommand is not None:
@@ -111,6 +145,11 @@ def main(
111
145
  try:
112
146
  project_root = (ctx.obj.get("project_root") if ctx.obj else None) or Path.cwd()
113
147
 
148
+ # Handle background mode
149
+ if background:
150
+ _spawn_background_indexer(project_root, force, extensions)
151
+ return
152
+
114
153
  # Run async indexing
115
154
  asyncio.run(
116
155
  run_indexing(
@@ -122,9 +161,23 @@ def main(
122
161
  batch_size=batch_size,
123
162
  show_progress=True,
124
163
  debug=debug,
164
+ skip_relationships=skip_relationships,
125
165
  )
126
166
  )
127
167
 
168
+ # Auto-analyze after force reindex
169
+ if force and auto_analyze:
170
+ from .analyze import run_analysis
171
+
172
+ print_info("\n📊 Running analysis after reindex...")
173
+ asyncio.run(
174
+ run_analysis(
175
+ project_root=project_root,
176
+ quick_mode=True, # Use quick mode for speed
177
+ show_smells=True,
178
+ )
179
+ )
180
+
128
181
  except KeyboardInterrupt:
129
182
  print_info("Indexing interrupted by user")
130
183
  raise typer.Exit(0)
@@ -134,6 +187,120 @@ def main(
134
187
  raise typer.Exit(1)
135
188
 
136
189
 
190
+ def _spawn_background_indexer(
191
+ project_root: Path, force: bool = False, extensions: str | None = None
192
+ ) -> None:
193
+ """Spawn background indexing process.
194
+
195
+ Args:
196
+ project_root: Project root directory
197
+ force: Force reindexing of all files
198
+ extensions: Override file extensions (comma-separated)
199
+ """
200
+ # Check for existing background process
201
+ progress_file = project_root / ".mcp-vector-search" / "indexing_progress.json"
202
+ if progress_file.exists():
203
+ try:
204
+ with open(progress_file) as f:
205
+ progress = json.load(f)
206
+ pid = progress.get("pid")
207
+ if pid and _is_process_alive(pid):
208
+ print_warning(f"Background indexing already in progress (PID: {pid})")
209
+ print_info("Use 'mcp-vector-search index status' to check progress")
210
+ print_info("Use 'mcp-vector-search index cancel' to cancel")
211
+ return
212
+ else:
213
+ # Stale progress file, remove it
214
+ progress_file.unlink()
215
+ except Exception as e:
216
+ logger.warning(f"Failed to read progress file: {e}")
217
+ progress_file.unlink()
218
+
219
+ # Build command
220
+ python_exe = sys.executable
221
+ cmd = [
222
+ python_exe,
223
+ "-m",
224
+ "mcp_vector_search.cli.commands.index_background",
225
+ "--project-root",
226
+ str(project_root),
227
+ ]
228
+
229
+ if force:
230
+ cmd.append("--force")
231
+
232
+ if extensions:
233
+ cmd.extend(["--extensions", extensions])
234
+
235
+ # Spawn detached process
236
+ try:
237
+ if sys.platform == "win32":
238
+ # Windows detachment flags
239
+ detached_process = 0x00000008
240
+ create_new_process_group = 0x00000200
241
+
242
+ process = subprocess.Popen(
243
+ cmd,
244
+ creationflags=detached_process | create_new_process_group,
245
+ stdout=subprocess.DEVNULL,
246
+ stderr=subprocess.DEVNULL,
247
+ stdin=subprocess.DEVNULL,
248
+ )
249
+ else:
250
+ # Unix detachment (fork + setsid)
251
+ process = subprocess.Popen(
252
+ cmd,
253
+ start_new_session=True, # Creates new process group
254
+ stdout=subprocess.DEVNULL,
255
+ stderr=subprocess.DEVNULL,
256
+ stdin=subprocess.DEVNULL,
257
+ )
258
+
259
+ pid = process.pid
260
+ print_success(f"Started background indexing (PID: {pid})")
261
+ print_info(f"Progress file: {progress_file}")
262
+ print_info(
263
+ f"Log file: {project_root / '.mcp-vector-search' / 'indexing_background.log'}"
264
+ )
265
+ print_info("")
266
+ print_info("Use [cyan]mcp-vector-search index status[/cyan] to check progress")
267
+ print_info("Use [cyan]mcp-vector-search index cancel[/cyan] to cancel")
268
+
269
+ except Exception as e:
270
+ logger.error(f"Failed to spawn background process: {e}")
271
+ print_error(f"Failed to start background indexing: {e}")
272
+ raise typer.Exit(1)
273
+
274
+
275
+ def _is_process_alive(pid: int) -> bool:
276
+ """Check if process with given PID is alive.
277
+
278
+ Args:
279
+ pid: Process ID to check
280
+
281
+ Returns:
282
+ True if process is alive, False otherwise
283
+ """
284
+ try:
285
+ if sys.platform == "win32":
286
+ # Windows: try to open process
287
+ import ctypes
288
+
289
+ kernel32 = ctypes.windll.kernel32
290
+ process_query_information = 0x0400
291
+ handle = kernel32.OpenProcess(process_query_information, False, pid)
292
+ if handle:
293
+ kernel32.CloseHandle(handle)
294
+ return True
295
+ return False
296
+ else:
297
+ # Unix: send signal 0 (no-op, just checks if process exists)
298
+ os.kill(pid, 0)
299
+ return True
300
+ except (OSError, ProcessLookupError, AttributeError):
301
+ return False
302
+
303
+
137
304
  async def run_indexing(
138
305
  project_root: Path,
139
306
  watch: bool = False,
@@ -143,6 +310,7 @@ async def run_indexing(
143
310
  batch_size: int = 32,
144
311
  show_progress: bool = True,
145
312
  debug: bool = False,
313
+ skip_relationships: bool = False,
146
314
  ) -> None:
147
315
  """Run the indexing process."""
148
316
  # Load project configuration
@@ -197,7 +365,9 @@ async def run_indexing(
197
365
  if watch:
198
366
  await _run_watch_mode(indexer, show_progress)
199
367
  else:
200
- await _run_batch_indexing(indexer, force_reindex, show_progress)
368
+ await _run_batch_indexing(
369
+ indexer, force_reindex, show_progress, skip_relationships
370
+ )
201
371
 
202
372
  except Exception as e:
203
373
  logger.error(f"Indexing error: {e}")
@@ -208,6 +378,7 @@ async def _run_batch_indexing(
208
378
  indexer: SemanticIndexer,
209
379
  force_reindex: bool,
210
380
  show_progress: bool,
381
+ skip_relationships: bool = False,
211
382
  ) -> None:
212
383
  """Run batch indexing of all files."""
213
384
  if show_progress:
@@ -357,6 +528,30 @@ async def _run_batch_indexing(
357
528
  except Exception as e:
358
529
  logger.error(f"Failed to update directory index: {e}")
359
530
 
531
+ # Mark relationships for background computation (unless skipped)
532
+ if not skip_relationships and indexed_count > 0:
533
+ try:
534
+ console.print(
535
+ "\n[cyan]Marking relationships for background computation...[/cyan]"
536
+ )
537
+ all_chunks = await indexer.database.get_all_chunks()
538
+
539
+ if len(all_chunks) > 0:
540
+ await indexer.relationship_store.compute_and_store(
541
+ all_chunks, indexer.database, background=True
542
+ )
543
+ console.print(
544
+ "[green]✓[/green] Relationships marked for background computation"
545
+ )
546
+ console.print(
547
+ "[dim] → Use 'mcp-vector-search index relationships' to compute now[/dim]"
548
+ )
549
+ except Exception as e:
550
+ logger.warning(f"Failed to mark relationships: {e}")
551
+ console.print(
552
+ "[yellow]⚠ Relationships not marked (visualization will compute on demand)[/yellow]"
553
+ )
554
+
360
555
  # Final progress summary
361
556
  console.print()
362
557
  if failed_count > 0:
@@ -375,6 +570,7 @@ async def _run_batch_indexing(
375
570
  indexed_count = await indexer.index_project(
376
571
  force_reindex=force_reindex,
377
572
  show_progress=show_progress,
573
+ skip_relationships=skip_relationships,
378
574
  )
379
575
 
380
576
  # Show statistics
@@ -733,6 +929,237 @@ def health_cmd(
733
929
  health_main(project_root=project_root, repair=repair)
734
930
 
735
931
 
932
+ @index_app.command("status")
933
+ def status_cmd(
934
+ ctx: typer.Context,
935
+ ) -> None:
936
+ """📊 Show background indexing status.
937
+
938
+ Displays the current progress of any background indexing process.
939
+
940
+ Examples:
941
+ mcp-vector-search index status
942
+ """
943
+ try:
944
+ project_root = ctx.obj.get("project_root") or Path.cwd()
945
+ _show_background_status(project_root)
946
+ except Exception as e:
947
+ logger.error(f"Status check failed: {e}")
948
+ print_error(f"Status check failed: {e}")
949
+ raise typer.Exit(1)
950
+
951
+
952
+ @index_app.command("cancel")
953
+ def cancel_cmd(
954
+ ctx: typer.Context,
955
+ force: bool = typer.Option(
956
+ False,
957
+ "--force",
958
+ "-f",
959
+ help="Force termination without confirmation",
960
+ ),
961
+ ) -> None:
962
+ """🛑 Cancel background indexing process.
963
+
964
+ Terminates any running background indexing process and cleans up.
965
+
966
+ Examples:
967
+ mcp-vector-search index cancel
968
+ mcp-vector-search index cancel --force
969
+ """
970
+ try:
971
+ project_root = ctx.obj.get("project_root") or Path.cwd()
972
+ _cancel_background_indexer(project_root, force)
973
+ except Exception as e:
974
+ logger.error(f"Cancel failed: {e}")
975
+ print_error(f"Cancel failed: {e}")
976
+ raise typer.Exit(1)
977
+
978
+
979
+ def _show_background_status(project_root: Path) -> None:
980
+ """Show background indexing status.
981
+
982
+ Args:
983
+ project_root: Project root directory
984
+ """
985
+ from rich.table import Table
986
+
987
+ from ..output import console
988
+
989
+ progress_file = project_root / ".mcp-vector-search" / "indexing_progress.json"
990
+
991
+ if not progress_file.exists():
992
+ print_info("No background indexing in progress")
993
+ return
994
+
995
+ # Read progress
996
+ try:
997
+ with open(progress_file) as f:
998
+ progress = json.load(f)
999
+ except Exception as e:
1000
+ print_error(f"Failed to read progress file: {e}")
1001
+ return
1002
+
1003
+ # Check if process is alive
1004
+ pid = progress.get("pid")
1005
+ is_alive = _is_process_alive(pid) if pid else False
1006
+
1007
+ if not is_alive:
1008
+ print_warning(f"Process {pid} is no longer running")
1009
+ print_info("The background indexing process has stopped")
1010
+ print_info("Run [cyan]mcp-vector-search index --background[/cyan] to restart")
1011
+ # Optionally clean up stale file
1012
+ return
1013
+
1014
+ # Display progress with Rich table
1015
+ table = Table(title="Background Indexing Status", show_header=True)
1016
+ table.add_column("Metric", style="cyan", width=20)
1017
+ table.add_column("Value", style="green")
1018
+
1019
+ # Format status with color
1020
+ status = progress.get("status", "unknown")
1021
+ status_colors = {
1022
+ "initializing": "yellow",
1023
+ "scanning": "cyan",
1024
+ "running": "green",
1025
+ "computing_relationships": "cyan",
1026
+ "completed": "green",
1027
+ "failed": "red",
1028
+ "cancelled": "yellow",
1029
+ }
1030
+ status_color = status_colors.get(status, "white")
1031
+
1032
+ table.add_row("PID", str(pid))
1033
+ table.add_row("Status", f"[{status_color}]{status}[/{status_color}]")
1034
+
1035
+ # Progress percentage
1036
+ total = progress.get("total_files", 0)
1037
+ processed = progress.get("processed_files", 0)
1038
+ if total > 0:
1039
+ percentage = (processed / total) * 100
1040
+ table.add_row(
1041
+ "Progress",
1042
+ f"{processed}/{total} files ({percentage:.1f}%)",
1043
+ )
1044
+ else:
1045
+ table.add_row("Progress", f"{processed} files")
1046
+
1047
+ current_file = progress.get("current_file")
1048
+ if current_file:
1049
+ table.add_row("Current File", current_file)
1050
+
1051
+ table.add_row("Chunks Created", str(progress.get("chunks_created", 0)))
1052
+ table.add_row("Errors", str(progress.get("errors", 0)))
1053
+
1054
+ # ETA
1055
+ eta_seconds = progress.get("eta_seconds", 0)
1056
+ if eta_seconds > 0:
1057
+ eta_minutes = eta_seconds / 60
1058
+ if eta_minutes < 1:
1059
+ table.add_row("ETA", f"{eta_seconds} seconds")
1060
+ else:
1061
+ table.add_row("ETA", f"{eta_minutes:.1f} minutes")
1062
+
1063
+ # Last updated
1064
+ last_updated = progress.get("last_updated")
1065
+ if last_updated:
1066
+ table.add_row("Last Updated", last_updated)
1067
+
1068
+ console.print(table)
1069
+
1070
+ # Show log file location
1071
+ log_file = project_root / ".mcp-vector-search" / "indexing_background.log"
1072
+ if log_file.exists():
1073
+ print_info(f"\nLog file: {log_file}")
1074
+
1075
+
1076
+ def _cancel_background_indexer(project_root: Path, force: bool = False) -> None:
1077
+ """Cancel background indexing process.
1078
+
1079
+ Args:
1080
+ project_root: Project root directory
1081
+ force: Skip confirmation prompt
1082
+ """
1083
+ progress_file = project_root / ".mcp-vector-search" / "indexing_progress.json"
1084
+
1085
+ if not progress_file.exists():
1086
+ print_info("No background indexing in progress")
1087
+ return
1088
+
1089
+ # Read progress
1090
+ try:
1091
+ with open(progress_file) as f:
1092
+ progress = json.load(f)
1093
+ except Exception as e:
1094
+ print_error(f"Failed to read progress file: {e}")
1095
+ return
1096
+
1097
+ pid = progress.get("pid")
1098
+ if not pid:
1099
+ print_error("No PID found in progress file")
1100
+ return
1101
+
1102
+ # Check if process is alive
1103
+ if not _is_process_alive(pid):
1104
+ print_warning(f"Process {pid} is not running (already completed?)")
1105
+ # Clean up stale progress file
1106
+ try:
1107
+ progress_file.unlink()
1108
+ print_info("Cleaned up stale progress file")
1109
+ except Exception as e:
1110
+ logger.error(f"Failed to clean up progress file: {e}")
1111
+ return
1112
+
1113
+ # Confirm cancellation
1114
+ if not force:
1115
+ from ..output import confirm_action
1116
+
1117
+ if not confirm_action(
1118
+ f"Cancel background indexing process (PID: {pid})?", default=False
1119
+ ):
1120
+ print_info("Cancellation aborted")
1121
+ return
1122
+
1123
+ # Send termination signal
1124
+ try:
1125
+ if sys.platform == "win32":
1126
+ # Windows: terminate process
1127
+ import ctypes
1128
+
1129
+ kernel32 = ctypes.windll.kernel32
1130
+ process_terminate = 0x0001
1131
+ handle = kernel32.OpenProcess(process_terminate, False, pid)
1132
+ if handle:
1133
+ kernel32.TerminateProcess(handle, 0)
1134
+ kernel32.CloseHandle(handle)
1135
+ print_success(f"Cancelled indexing process {pid}")
1136
+ else:
1137
+ print_error(f"Failed to open process {pid}")
1138
+ return
1139
+ else:
1140
+ # Unix: send SIGTERM
1141
+ os.kill(pid, signal.SIGTERM)
1142
+ print_success(f"Cancelled indexing process {pid}")
1143
+
1144
+ # Clean up progress file after a brief delay
1145
+ import time
1146
+
1147
+ time.sleep(0.5)
1148
+ if progress_file.exists():
1149
+ progress_file.unlink()
1150
+ print_info("Cleaned up progress file")
1151
+
1152
+ except ProcessLookupError:
1153
+ print_warning(f"Process {pid} not found (already completed?)")
1154
+ if progress_file.exists():
1155
+ progress_file.unlink()
1156
+ except PermissionError:
1157
+ print_error(f"Permission denied to cancel process {pid}")
1158
+ except Exception as e:
1159
+ logger.error(f"Failed to cancel process: {e}")
1160
+ print_error(f"Failed to cancel process: {e}")
1161
+
1162
+
736
1163
  def _prune_error_log(log_path: Path, max_lines: int = 1000) -> None:
737
1164
  """Prune error log to keep only the most recent N lines.
738
1165
 
@@ -758,5 +1185,176 @@ def _prune_error_log(log_path: Path, max_lines: int = 1000) -> None:
758
1185
  logger.warning(f"Failed to prune error log: {e}")
759
1186
 
760
1187
 
1188
+ @index_app.command("relationships")
1189
+ def compute_relationships_cmd(
1190
+ ctx: typer.Context,
1191
+ background: bool = typer.Option(
1192
+ False,
1193
+ "--background",
1194
+ "-bg",
1195
+ help="Run relationship computation in background (non-blocking)",
1196
+ ),
1197
+ ) -> None:
1198
+ """🔗 Compute semantic relationships for visualization.
1199
+
1200
+ By default, indexing marks relationships for background computation.
1201
+ This command lets you compute them immediately or spawn a background task.
1202
+
1203
+ Examples:
1204
+ # Compute relationships now (blocks until complete)
1205
+ mcp-vector-search index relationships
1206
+
1207
+ # Compute in background (returns immediately)
1208
+ mcp-vector-search index relationships --background
1209
+ """
1210
+ try:
1211
+ project_root = ctx.obj.get("project_root") or Path.cwd()
1212
+
1213
+ if background:
1214
+ # Spawn background relationship computation
1215
+ print_info("Starting background relationship computation...")
1216
+ _spawn_background_relationships(project_root)
1217
+ else:
1218
+ # Compute synchronously
1219
+ asyncio.run(_compute_relationships_sync(project_root))
1220
+
1221
+ except Exception as e:
1222
+ logger.error(f"Relationship computation failed: {e}")
1223
+ print_error(f"Relationship computation failed: {e}")
1224
+ raise typer.Exit(1)
1225
+
1226
+
1227
+ def _spawn_background_relationships(project_root: Path) -> None:
1228
+ """Spawn background relationship computation process.
1229
+
1230
+ Args:
1231
+ project_root: Project root directory
1232
+ """
1233
+ # Build command
1234
+ python_exe = sys.executable
1235
+ cmd = [
1236
+ python_exe,
1237
+ "-m",
1238
+ "mcp_vector_search.cli.commands.index_background",
1239
+ "--project-root",
1240
+ str(project_root),
1241
+ "--relationships-only", # New flag for relationship-only mode
1242
+ ]
1243
+
1244
+ # Spawn detached process (reuse existing background infrastructure)
1245
+ try:
1246
+ if sys.platform == "win32":
1247
+ detached_process = 0x00000008
1248
+ create_new_process_group = 0x00000200
1249
+
1250
+ process = subprocess.Popen(
1251
+ cmd,
1252
+ creationflags=detached_process | create_new_process_group,
1253
+ stdout=subprocess.DEVNULL,
1254
+ stderr=subprocess.DEVNULL,
1255
+ stdin=subprocess.DEVNULL,
1256
+ )
1257
+ else:
1258
+ process = subprocess.Popen(
1259
+ cmd,
1260
+ start_new_session=True,
1261
+ stdout=subprocess.DEVNULL,
1262
+ stderr=subprocess.DEVNULL,
1263
+ stdin=subprocess.DEVNULL,
1264
+ )
1265
+
1266
+ pid = process.pid
1267
+ print_success(f"Started background relationship computation (PID: {pid})")
1268
+ print_info(
1269
+ f"Log file: {project_root / '.mcp-vector-search' / 'relationships_background.log'}"
1270
+ )
1271
+
1272
+ except Exception as e:
1273
+ logger.error(f"Failed to spawn background process: {e}")
1274
+ print_error(f"Failed to start background computation: {e}")
1275
+ raise typer.Exit(1)
1276
+
1277
+
1278
+ async def _compute_relationships_sync(project_root: Path) -> None:
1279
+ """Compute relationships synchronously (blocking).
1280
+
1281
+ Args:
1282
+ project_root: Project root directory
1283
+ """
1284
+ from rich.progress import (
1285
+ BarColumn,
1286
+ Progress,
1287
+ SpinnerColumn,
1288
+ TextColumn,
1289
+ TimeRemainingColumn,
1290
+ )
1291
+
1292
+ from ..output import console
1293
+
1294
+ # Load project configuration
1295
+ project_manager = ProjectManager(project_root)
1296
+
1297
+ if not project_manager.is_initialized():
1298
+ raise ProjectNotFoundError(
1299
+ f"Project not initialized at {project_root}. Run 'mcp-vector-search init' first."
1300
+ )
1301
+
1302
+ config = project_manager.load_config()
1303
+
1304
+ console.print(f"[cyan]Project:[/cyan] {project_root}")
1305
+ console.print(f"[cyan]Embedding model:[/cyan] {config.embedding_model}")
1306
+
1307
+ # Setup database
1308
+ embedding_function, _ = create_embedding_function(config.embedding_model)
1309
+ database = ChromaVectorDatabase(
1310
+ persist_directory=config.index_path,
1311
+ embedding_function=embedding_function,
1312
+ )
1313
+
1314
+ async with database:
1315
+ # Get all chunks
1316
+ console.print("[cyan]Fetching chunks from database...[/cyan]")
1317
+ all_chunks = await database.get_all_chunks()
1318
+
1319
+ if len(all_chunks) == 0:
1320
+ console.print(
1321
+ "[yellow]No chunks found in index. Run 'mcp-vector-search index' first.[/yellow]"
1322
+ )
1323
+ raise typer.Exit(1)
1324
+
1325
+ console.print(f"[green]✓[/green] Retrieved {len(all_chunks)} chunks\n")
1326
+
1327
+ # Initialize relationship store
1328
+ from ...core.relationships import RelationshipStore
1329
+
1330
+ relationship_store = RelationshipStore(project_root)
1331
+
1332
+ # Compute relationships with progress
1333
+ with Progress(
1334
+ SpinnerColumn(),
1335
+ TextColumn("[progress.description]{task.description}"),
1336
+ BarColumn(bar_width=40),
1337
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
1338
+ TimeRemainingColumn(),
1339
+ console=console,
1340
+ ) as progress:
1341
+ task = progress.add_task("Computing semantic relationships...", total=100)
1342
+
1343
+ # Compute and store (non-background mode)
1344
+ rel_stats = await relationship_store.compute_and_store(
1345
+ all_chunks, database, background=False
1346
+ )
1347
+
1348
+ progress.update(task, completed=100)
1349
+
1350
+ # Show results
1351
+ console.print()
1352
+ console.print(
1353
+ f"[green]✓[/green] Computed {rel_stats['semantic_links']} semantic links "
1354
+ f"in {rel_stats['computation_time']:.1f}s"
1355
+ )
1356
+ print_success("Relationships ready for visualization")
1357
+
1358
+
761
1359
  if __name__ == "__main__":
762
1360
  index_app()