stravinsky 0.2.67__py3-none-any.whl → 0.4.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of stravinsky might be problematic. Click here for more details.

@@ -0,0 +1,151 @@
1
+ """
2
+ Desktop Notifications Manager for Stravinsky.
3
+
4
+ Provides cross-platform desktop notifications (macOS, Linux, Windows)
5
+ for long-running operations like codebase indexing.
6
+
7
+ Supports:
8
+ - Non-blocking async notifications
9
+ - Platform-specific backends
10
+ - Notification queuing
11
+ """
12
+
13
+ import logging
14
+ import platform
15
+ import subprocess
16
+ from pathlib import Path
17
+ from typing import Dict, Optional
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class NotificationManager:
23
+ """
24
+ Cross-platform desktop notification manager.
25
+
26
+ Provides non-blocking notifications with automatic platform detection.
27
+ """
28
+
29
+ def __init__(self, app_name: str = "Stravinsky"):
30
+ self.app_name = app_name
31
+ self.system = platform.system()
32
+
33
+ def _get_notification_command(
34
+ self,
35
+ title: str,
36
+ message: str,
37
+ sound: bool = True
38
+ ) -> Optional[list]:
39
+ """Get platform-specific notification command."""
40
+ if self.system == "Darwin": # macOS
41
+ script = f'display notification "{message}" with title "{title}"'
42
+ if sound:
43
+ script += ' sound name "Glass"'
44
+ return ["osascript", "-e", script]
45
+
46
+ elif self.system == "Linux":
47
+ cmd = ["notify-send", "--app-name", self.app_name, title, message]
48
+ if sound:
49
+ cmd.extend(["--urgency=normal"])
50
+ return cmd
51
+
52
+ elif self.system == "Windows":
53
+ ps_script = f"""
54
+ [Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
55
+ [Windows.UI.Notifications.ToastNotification, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
56
+ [Windows.Data.Xml.Dom.XmlDocument, Windows.Data.Xml.Dom.XmlDocument, ContentType = WindowsRuntime] | Out-Null
57
+
58
+ $template = @"
59
+ <toast>
60
+ <visual>
61
+ <binding template="ToastGeneric">
62
+ <text>{title}</text>
63
+ <text>{message}</text>
64
+ </binding>
65
+ </visual>
66
+ </toast>
67
+ "@
68
+
69
+ $xml = New-Object Windows.Data.Xml.Dom.XmlDocument
70
+ $xml.LoadXml($template)
71
+ $toast = New-Object Windows.UI.Notifications.ToastNotification $xml
72
+ [Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier("{self.app_name}").Show($toast)
73
+ """
74
+ return ["powershell", "-Command", ps_script]
75
+
76
+ return None
77
+
78
+ def _send_notification_sync(
79
+ self,
80
+ title: str,
81
+ message: str,
82
+ sound: bool = True
83
+ ) -> bool:
84
+ """Send notification synchronously (blocking)."""
85
+ cmd = self._get_notification_command(title, message, sound)
86
+
87
+ if not cmd:
88
+ logger.warning(
89
+ f"[Notifications] Desktop notifications not supported on {self.system}"
90
+ )
91
+ return False
92
+
93
+ try:
94
+ subprocess.Popen(
95
+ cmd,
96
+ stdout=subprocess.DEVNULL,
97
+ stderr=subprocess.DEVNULL,
98
+ start_new_session=True
99
+ )
100
+ logger.debug(f"[Notifications] Sent: {title}")
101
+ return True
102
+ except FileNotFoundError:
103
+ logger.warning(f"[Notifications] Command not found: {cmd[0]}")
104
+ return False
105
+ except Exception as e:
106
+ logger.error(f"[Notifications] Failed to send notification: {e}")
107
+ return False
108
+
109
+ async def notify_reindex_start(self, project_path: str) -> bool:
110
+ """Notify that codebase reindexing has started."""
111
+ path = Path(project_path).name or Path(project_path).parent.name
112
+ title = "Codebase Indexing Started"
113
+ message = f"Indexing {path}..."
114
+ return self._send_notification_sync(title, message, sound=True)
115
+
116
+ async def notify_reindex_complete(self, stats: Dict) -> bool:
117
+ """Notify that codebase reindexing is complete."""
118
+ indexed = stats.get("indexed", 0)
119
+ pruned = stats.get("pruned", 0)
120
+ time_taken = stats.get("time_taken", 0)
121
+
122
+ title = "Codebase Indexing Complete"
123
+ message = f"Indexed {indexed} chunks, pruned {pruned} stale entries in {time_taken}s"
124
+
125
+ return self._send_notification_sync(title, message, sound=True)
126
+
127
+ async def notify_reindex_error(self, error_message: str) -> bool:
128
+ """Notify that codebase reindexing failed."""
129
+ title = "Codebase Indexing Failed"
130
+ # Truncate long error messages
131
+ message = error_message[:100] + "..." if len(error_message) > 100 else error_message
132
+
133
+ return self._send_notification_sync(title, message, sound=True)
134
+
135
+
136
+ # Global singleton instance
137
+ _notification_manager: Optional[NotificationManager] = None
138
+
139
+
140
+ def get_notification_manager() -> NotificationManager:
141
+ """Get or create the global notification manager instance."""
142
+ global _notification_manager
143
+ if _notification_manager is None:
144
+ _notification_manager = NotificationManager()
145
+ return _notification_manager
146
+
147
+
148
+ def reset_notification_manager() -> None:
149
+ """Reset the global notification manager (for testing)."""
150
+ global _notification_manager
151
+ _notification_manager = None
mcp_bridge/server.py CHANGED
@@ -95,56 +95,9 @@ async def list_tools() -> list[Tool]:
95
95
  return get_tool_definitions()
96
96
 
97
97
 
98
- def _format_tool_log(name: str, arguments: dict[str, Any]) -> str:
99
- """Format a concise log message for tool calls."""
100
- # LSP tools - show file:line
101
- if name.startswith("lsp_"):
102
- file_path = arguments.get("file_path", "")
103
- if file_path:
104
- # Shorten path to last 2 components
105
- parts = file_path.split("/")
106
- short_path = "/".join(parts[-2:]) if len(parts) > 2 else file_path
107
- line = arguments.get("line", "")
108
- if line:
109
- return f"→ {name}: {short_path}:{line}"
110
- return f"→ {name}: {short_path}"
111
- query = arguments.get("query", "")
112
- if query:
113
- return f"→ {name}: query='{query[:40]}'"
114
- return f"→ {name}"
115
-
116
- # Model invocation - show agent context if present
117
- if name in ("invoke_gemini", "invoke_openai"):
118
- agent_ctx = arguments.get("agent_context", {})
119
- agent_type = agent_ctx.get("agent_type", "direct") if agent_ctx else "direct"
120
- model = arguments.get("model", "default")
121
- prompt = arguments.get("prompt", "")
122
- # Summarize prompt
123
- summary = " ".join(prompt.split())[:80] + "..." if len(prompt) > 80 else prompt
124
- return f"[{agent_type}] → {model}: {summary}"
125
-
126
- # Search tools - show pattern
127
- if name in ("grep_search", "ast_grep_search", "ast_grep_replace"):
128
- pattern = arguments.get("pattern", "")[:50]
129
- return f"→ {name}: pattern='{pattern}'"
130
-
131
- # Agent tools - show agent type/task_id
132
- if name == "agent_spawn":
133
- agent_type = arguments.get("agent_type", "explore")
134
- desc = arguments.get("description", "")[:40]
135
- return f"→ {name}: [{agent_type}] {desc}"
136
- if name in ("agent_output", "agent_cancel", "agent_progress"):
137
- task_id = arguments.get("task_id", "")
138
- return f"→ {name}: {task_id}"
139
-
140
- # Default - just tool name
141
- return f"→ {name}"
142
-
143
-
144
98
  @server.call_tool()
145
99
  async def call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent]:
146
100
  """Handle tool calls with deep lazy loading of implementations."""
147
- logger.info(_format_tool_log(name, arguments))
148
101
  hook_manager = get_hook_manager_lazy()
149
102
  token_store = get_token_store()
150
103
 
@@ -190,6 +143,19 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent]:
190
143
 
191
144
  result_content = await get_system_health()
192
145
 
146
+ elif name == "semantic_health":
147
+ from .tools.semantic_search import semantic_health
148
+
149
+ result_content = await semantic_health(
150
+ project_path=arguments.get("project_path", "."),
151
+ provider=arguments.get("provider", "ollama"),
152
+ )
153
+
154
+ elif name == "lsp_health":
155
+ from .tools.lsp.tools import lsp_health
156
+
157
+ result_content = await lsp_health()
158
+
193
159
  # --- SEARCH DISPATCH ---
194
160
  elif name == "grep_search":
195
161
  from .tools.code_search import grep_search
@@ -423,6 +389,28 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent]:
423
389
  character=arguments["character"],
424
390
  )
425
391
 
392
+ elif name == "lsp_code_action_resolve":
393
+ from .tools.lsp import lsp_code_action_resolve
394
+
395
+ result_content = await lsp_code_action_resolve(
396
+ file_path=arguments["file_path"],
397
+ action_code=arguments["action_code"],
398
+ line=arguments.get("line"),
399
+ )
400
+
401
+ elif name == "lsp_extract_refactor":
402
+ from .tools.lsp import lsp_extract_refactor
403
+
404
+ result_content = await lsp_extract_refactor(
405
+ file_path=arguments["file_path"],
406
+ start_line=arguments["start_line"],
407
+ start_char=arguments["start_char"],
408
+ end_line=arguments["end_line"],
409
+ end_char=arguments["end_char"],
410
+ new_name=arguments["new_name"],
411
+ kind=arguments.get("kind", "function"),
412
+ )
413
+
426
414
  elif name == "lsp_servers":
427
415
  from .tools.lsp import lsp_servers
428
416
 
@@ -436,6 +424,156 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent]:
436
424
  severity=arguments.get("severity", "all"),
437
425
  )
438
426
 
427
+ elif name == "semantic_search":
428
+ from .tools.semantic_search import semantic_search
429
+
430
+ result_content = await semantic_search(
431
+ query=arguments["query"],
432
+ project_path=arguments.get("project_path", "."),
433
+ n_results=arguments.get("n_results", 10),
434
+ language=arguments.get("language"),
435
+ node_type=arguments.get("node_type"),
436
+ provider=arguments.get("provider", "ollama"),
437
+ )
438
+
439
+ elif name == "hybrid_search":
440
+ from .tools.semantic_search import hybrid_search
441
+
442
+ result_content = await hybrid_search(
443
+ query=arguments["query"],
444
+ pattern=arguments.get("pattern"),
445
+ project_path=arguments.get("project_path", "."),
446
+ n_results=arguments.get("n_results", 10),
447
+ language=arguments.get("language"),
448
+ provider=arguments.get("provider", "ollama"),
449
+ )
450
+
451
+ elif name == "semantic_index":
452
+ from .tools.semantic_search import index_codebase
453
+
454
+ result_content = await index_codebase(
455
+ project_path=arguments.get("project_path", "."),
456
+ force=arguments.get("force", False),
457
+ provider=arguments.get("provider", "ollama"),
458
+ )
459
+
460
+ elif name == "semantic_stats":
461
+ from .tools.semantic_search import semantic_stats
462
+
463
+ result_content = await semantic_stats(
464
+ project_path=arguments.get("project_path", "."),
465
+ provider=arguments.get("provider", "ollama"),
466
+ )
467
+
468
+ elif name == "start_file_watcher":
469
+ from .tools.semantic_search import start_file_watcher
470
+ import json
471
+
472
+ try:
473
+ watcher = start_file_watcher(
474
+ project_path=arguments.get("project_path", "."),
475
+ provider=arguments.get("provider", "ollama"),
476
+ debounce_seconds=arguments.get("debounce_seconds", 2.0),
477
+ )
478
+
479
+ result_content = json.dumps({
480
+ "status": "started",
481
+ "project_path": str(watcher.project_path),
482
+ "debounce_seconds": watcher.debounce_seconds,
483
+ "provider": watcher.store.provider_name,
484
+ "is_running": watcher.is_running()
485
+ }, indent=2)
486
+ except ValueError as e:
487
+ # No index exists
488
+ result_content = json.dumps({
489
+ "error": str(e),
490
+ "hint": "Run semantic_index() before starting file watcher"
491
+ }, indent=2)
492
+ print(f"⚠️ start_file_watcher ValueError: {e}", file=sys.stderr)
493
+ except Exception as e:
494
+ # Unexpected error
495
+ import traceback
496
+ result_content = json.dumps({
497
+ "error": f"{type(e).__name__}: {str(e)}",
498
+ "hint": "Check MCP server logs for details"
499
+ }, indent=2)
500
+ print(f"❌ start_file_watcher error: {e}", file=sys.stderr)
501
+ traceback.print_exc(file=sys.stderr)
502
+
503
+ elif name == "stop_file_watcher":
504
+ from .tools.semantic_search import stop_file_watcher
505
+ import json
506
+
507
+ stopped = stop_file_watcher(
508
+ project_path=arguments.get("project_path", "."),
509
+ )
510
+
511
+ result_content = json.dumps({
512
+ "stopped": stopped,
513
+ "project_path": arguments.get("project_path", ".")
514
+ }, indent=2)
515
+
516
+ elif name == "cancel_indexing":
517
+ from .tools.semantic_search import cancel_indexing
518
+
519
+ result_content = cancel_indexing(
520
+ project_path=arguments.get("project_path", "."),
521
+ provider=arguments.get("provider", "ollama"),
522
+ )
523
+
524
+ elif name == "delete_index":
525
+ from .tools.semantic_search import delete_index
526
+
527
+ result_content = delete_index(
528
+ project_path=arguments.get("project_path", "."),
529
+ provider=arguments.get("provider"), # None if not specified
530
+ delete_all=arguments.get("delete_all", False),
531
+ )
532
+
533
+ elif name == "list_file_watchers":
534
+ from .tools.semantic_search import list_file_watchers
535
+ import json
536
+
537
+ result_content = json.dumps(list_file_watchers(), indent=2)
538
+
539
+ elif name == "multi_query_search":
540
+ from .tools.semantic_search import multi_query_search
541
+
542
+ result_content = await multi_query_search(
543
+ query=arguments["query"],
544
+ project_path=arguments.get("project_path", "."),
545
+ n_results=arguments.get("n_results", 10),
546
+ num_expansions=arguments.get("num_expansions", 3),
547
+ language=arguments.get("language"),
548
+ node_type=arguments.get("node_type"),
549
+ provider=arguments.get("provider", "ollama"),
550
+ )
551
+
552
+ elif name == "decomposed_search":
553
+ from .tools.semantic_search import decomposed_search
554
+
555
+ result_content = await decomposed_search(
556
+ query=arguments["query"],
557
+ project_path=arguments.get("project_path", "."),
558
+ n_results=arguments.get("n_results", 10),
559
+ language=arguments.get("language"),
560
+ node_type=arguments.get("node_type"),
561
+ provider=arguments.get("provider", "ollama"),
562
+ )
563
+
564
+ elif name == "enhanced_search":
565
+ from .tools.semantic_search import enhanced_search
566
+
567
+ result_content = await enhanced_search(
568
+ query=arguments["query"],
569
+ project_path=arguments.get("project_path", "."),
570
+ n_results=arguments.get("n_results", 10),
571
+ mode=arguments.get("mode", "auto"),
572
+ language=arguments.get("language"),
573
+ node_type=arguments.get("node_type"),
574
+ provider=arguments.get("provider", "ollama"),
575
+ )
576
+
439
577
  else:
440
578
  result_content = f"Unknown tool: {name}"
441
579
 
@@ -521,6 +659,16 @@ async def async_main():
521
659
  except Exception as e:
522
660
  logger.error(f"Failed to initialize hooks: {e}")
523
661
 
662
+ # Clean up stale ChromaDB locks on startup
663
+ try:
664
+ from .tools.semantic_search import cleanup_stale_chromadb_locks
665
+
666
+ removed_count = cleanup_stale_chromadb_locks()
667
+ if removed_count > 0:
668
+ logger.info(f"Cleaned up {removed_count} stale ChromaDB lock(s)")
669
+ except Exception as e:
670
+ logger.warning(f"Failed to cleanup ChromaDB locks: {e}")
671
+
524
672
  # Start background token refresh scheduler
525
673
  try:
526
674
  from .auth.token_refresh import background_token_refresh
@@ -540,6 +688,12 @@ async def async_main():
540
688
  except Exception as e:
541
689
  logger.critical("Server process crashed in async_main", exc_info=True)
542
690
  sys.exit(1)
691
+ finally:
692
+ logger.info("Initiating shutdown sequence...")
693
+ from .tools.lsp.manager import get_lsp_manager
694
+
695
+ lsp_manager = get_lsp_manager()
696
+ await lsp_manager.shutdown()
543
697
 
544
698
 
545
699
  def main():
@@ -752,4 +906,4 @@ def main():
752
906
 
753
907
 
754
908
  if __name__ == "__main__":
755
- main()
909
+ sys.exit(main())