claude-mpm 4.2.9__py3-none-any.whl → 4.2.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/cli/commands/dashboard.py +59 -126
  3. claude_mpm/cli/commands/monitor.py +82 -212
  4. claude_mpm/cli/commands/run.py +33 -33
  5. claude_mpm/cli/parsers/monitor_parser.py +12 -2
  6. claude_mpm/dashboard/static/css/code-tree.css +8 -16
  7. claude_mpm/dashboard/static/dist/components/code-tree.js +1 -1
  8. claude_mpm/dashboard/static/dist/components/file-viewer.js +2 -0
  9. claude_mpm/dashboard/static/dist/components/module-viewer.js +1 -1
  10. claude_mpm/dashboard/static/dist/components/unified-data-viewer.js +1 -1
  11. claude_mpm/dashboard/static/dist/dashboard.js +1 -1
  12. claude_mpm/dashboard/static/dist/socket-client.js +1 -1
  13. claude_mpm/dashboard/static/js/components/code-tree.js +692 -114
  14. claude_mpm/dashboard/static/js/components/file-viewer.js +538 -0
  15. claude_mpm/dashboard/static/js/components/module-viewer.js +26 -0
  16. claude_mpm/dashboard/static/js/components/unified-data-viewer.js +166 -14
  17. claude_mpm/dashboard/static/js/dashboard.js +108 -91
  18. claude_mpm/dashboard/static/js/socket-client.js +9 -7
  19. claude_mpm/dashboard/templates/index.html +2 -7
  20. claude_mpm/hooks/claude_hooks/hook_handler.py +1 -11
  21. claude_mpm/hooks/claude_hooks/services/connection_manager.py +54 -59
  22. claude_mpm/hooks/claude_hooks/services/connection_manager_http.py +112 -72
  23. claude_mpm/services/agents/deployment/agent_template_builder.py +0 -1
  24. claude_mpm/services/cli/unified_dashboard_manager.py +354 -0
  25. claude_mpm/services/monitor/__init__.py +20 -0
  26. claude_mpm/services/monitor/daemon.py +378 -0
  27. claude_mpm/services/monitor/event_emitter.py +342 -0
  28. claude_mpm/services/monitor/handlers/__init__.py +20 -0
  29. claude_mpm/services/monitor/handlers/code_analysis.py +334 -0
  30. claude_mpm/services/monitor/handlers/dashboard.py +298 -0
  31. claude_mpm/services/monitor/handlers/hooks.py +491 -0
  32. claude_mpm/services/monitor/management/__init__.py +18 -0
  33. claude_mpm/services/monitor/management/health.py +124 -0
  34. claude_mpm/services/monitor/management/lifecycle.py +338 -0
  35. claude_mpm/services/monitor/server.py +596 -0
  36. claude_mpm/tools/code_tree_analyzer.py +33 -17
  37. {claude_mpm-4.2.9.dist-info → claude_mpm-4.2.12.dist-info}/METADATA +1 -1
  38. {claude_mpm-4.2.9.dist-info → claude_mpm-4.2.12.dist-info}/RECORD +42 -37
  39. claude_mpm/cli/commands/socketio_monitor.py +0 -233
  40. claude_mpm/scripts/socketio_daemon.py +0 -571
  41. claude_mpm/scripts/socketio_daemon_hardened.py +0 -937
  42. claude_mpm/scripts/socketio_daemon_wrapper.py +0 -78
  43. claude_mpm/scripts/socketio_server_manager.py +0 -349
  44. claude_mpm/services/cli/dashboard_launcher.py +0 -423
  45. claude_mpm/services/cli/socketio_manager.py +0 -595
  46. claude_mpm/services/dashboard/stable_server.py +0 -1020
  47. claude_mpm/services/socketio/monitor_server.py +0 -505
  48. {claude_mpm-4.2.9.dist-info → claude_mpm-4.2.12.dist-info}/WHEEL +0 -0
  49. {claude_mpm-4.2.9.dist-info → claude_mpm-4.2.12.dist-info}/entry_points.txt +0 -0
  50. {claude_mpm-4.2.9.dist-info → claude_mpm-4.2.12.dist-info}/licenses/LICENSE +0 -0
  51. {claude_mpm-4.2.9.dist-info → claude_mpm-4.2.12.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,342 @@
1
+ """
2
+ High-Performance Async Event Emitter
3
+ ====================================
4
+
5
+ Provides ultra-low latency event emission with direct function calls for in-process
6
+ events and connection pooling for external HTTP requests.
7
+
8
+ WHY: Eliminates HTTP overhead for in-process events while maintaining external API support.
9
+ """
10
+
11
+ import asyncio
12
+ import weakref
13
+ from datetime import datetime
14
+ from typing import Any, Dict, Optional, Set
15
+
16
+ import aiohttp
17
+
18
+ from ...core.logging_config import get_logger
19
+
20
+
21
+ class AsyncEventEmitter:
22
+ """High-performance async event emitter with direct calls and connection pooling."""
23
+
24
+ _instance: Optional["AsyncEventEmitter"] = None
25
+ _lock = asyncio.Lock()
26
+
27
+ def __init__(self):
28
+ """Initialize the event emitter."""
29
+ self.logger = get_logger(__name__)
30
+
31
+ # Direct emission targets (in-process)
32
+ self._socketio_servers: Set[weakref.ref] = set()
33
+
34
+ # HTTP connection pool for external requests
35
+ self._http_session: Optional[aiohttp.ClientSession] = None
36
+ self._http_connector: Optional[aiohttp.TCPConnector] = None
37
+
38
+ # Performance metrics
39
+ self._direct_events = 0
40
+ self._http_events = 0
41
+ self._failed_events = 0
42
+
43
+ # Event queue for batching (if needed)
44
+ self._event_queue = asyncio.Queue(maxsize=10000)
45
+ self._batch_processor_task: Optional[asyncio.Task] = None
46
+
47
+ @classmethod
48
+ async def get_instance(cls) -> "AsyncEventEmitter":
49
+ """Get singleton instance with async initialization."""
50
+ if cls._instance is None:
51
+ async with cls._lock:
52
+ if cls._instance is None:
53
+ cls._instance = cls()
54
+ await cls._instance._initialize()
55
+ return cls._instance
56
+
57
+ async def _initialize(self):
58
+ """Initialize async components."""
59
+ try:
60
+ # Create HTTP connection pool with optimized settings
61
+ self._http_connector = aiohttp.TCPConnector(
62
+ limit=100, # Total connection pool size
63
+ limit_per_host=20, # Connections per host
64
+ ttl_dns_cache=300, # DNS cache TTL
65
+ use_dns_cache=True,
66
+ keepalive_timeout=30, # Keep connections alive
67
+ enable_cleanup_closed=True,
68
+ force_close=False, # Reuse connections
69
+ )
70
+
71
+ # Create session with timeout and connection pooling
72
+ timeout = aiohttp.ClientTimeout(
73
+ total=5.0, # Total timeout
74
+ connect=1.0, # Connection timeout
75
+ sock_read=2.0, # Socket read timeout
76
+ )
77
+
78
+ self._http_session = aiohttp.ClientSession(
79
+ connector=self._http_connector,
80
+ timeout=timeout,
81
+ headers={
82
+ "Content-Type": "application/json",
83
+ "User-Agent": "Claude-MPM-EventEmitter/1.0",
84
+ },
85
+ )
86
+
87
+ self.logger.info("AsyncEventEmitter initialized with connection pooling")
88
+
89
+ except Exception as e:
90
+ self.logger.error(f"Error initializing AsyncEventEmitter: {e}")
91
+ raise
92
+
93
+ def register_socketio_server(self, sio_server):
94
+ """Register a Socket.IO server for direct event emission."""
95
+ # Use weak reference to avoid circular references
96
+ weak_ref = weakref.ref(sio_server)
97
+ self._socketio_servers.add(weak_ref)
98
+ self.logger.debug(f"Registered Socket.IO server: {id(sio_server)}")
99
+
100
+ def unregister_socketio_server(self, sio_server):
101
+ """Unregister a Socket.IO server."""
102
+ to_remove = []
103
+ for weak_ref in self._socketio_servers:
104
+ if weak_ref() is sio_server or weak_ref() is None:
105
+ to_remove.append(weak_ref)
106
+
107
+ for weak_ref in to_remove:
108
+ self._socketio_servers.discard(weak_ref)
109
+
110
+ self.logger.debug(f"Unregistered Socket.IO server: {id(sio_server)}")
111
+
112
+ async def emit_event(
113
+ self,
114
+ namespace: str,
115
+ event: str,
116
+ data: Dict[str, Any],
117
+ force_http: bool = False,
118
+ endpoint: str = None,
119
+ ) -> bool:
120
+ """
121
+ Emit event with optimal routing (direct calls vs HTTP).
122
+
123
+ Args:
124
+ namespace: Event namespace (e.g., 'hook')
125
+ event: Event name (e.g., 'claude_event')
126
+ data: Event data
127
+ force_http: Force HTTP emission even if direct emission available
128
+ endpoint: HTTP endpoint URL (defaults to localhost:8765)
129
+
130
+ Returns:
131
+ True if event was emitted successfully
132
+ """
133
+ try:
134
+ # Clean up dead weak references
135
+ self._cleanup_dead_references()
136
+
137
+ # Try direct emission first (unless forced to use HTTP)
138
+ if not force_http and self._socketio_servers:
139
+ success = await self._emit_direct(event, data)
140
+ if success:
141
+ self._direct_events += 1
142
+ return True
143
+
144
+ # Fallback to HTTP emission
145
+ if endpoint or not self._socketio_servers:
146
+ success = await self._emit_http(namespace, event, data, endpoint)
147
+ if success:
148
+ self._http_events += 1
149
+ return True
150
+
151
+ self._failed_events += 1
152
+ return False
153
+
154
+ except Exception as e:
155
+ self.logger.error(f"Error emitting event {event}: {e}")
156
+ self._failed_events += 1
157
+ return False
158
+
159
+ async def _emit_direct(self, event: str, data: Dict[str, Any]) -> bool:
160
+ """Emit event directly to registered Socket.IO servers."""
161
+ success_count = 0
162
+
163
+ for weak_ref in list(
164
+ self._socketio_servers
165
+ ): # Copy to avoid modification during iteration
166
+ sio_server = weak_ref()
167
+ if sio_server is None:
168
+ continue # Will be cleaned up later
169
+
170
+ try:
171
+ # Direct async call to Socket.IO server
172
+ await sio_server.emit(event, data)
173
+ success_count += 1
174
+
175
+ except Exception as e:
176
+ self.logger.warning(
177
+ f"Direct emission failed for server {id(sio_server)}: {e}"
178
+ )
179
+
180
+ if success_count > 0:
181
+ self.logger.debug(
182
+ f"Direct emission successful to {success_count} servers: {event}"
183
+ )
184
+ return True
185
+
186
+ return False
187
+
188
+ async def _emit_http(
189
+ self, namespace: str, event: str, data: Dict[str, Any], endpoint: str = None
190
+ ) -> bool:
191
+ """Emit event via HTTP with connection pooling."""
192
+ if not self._http_session:
193
+ self.logger.warning("HTTP session not initialized")
194
+ return False
195
+
196
+ url = endpoint or "http://localhost:8765/api/events"
197
+
198
+ payload = {
199
+ "namespace": namespace,
200
+ "event": event,
201
+ "data": data,
202
+ "timestamp": datetime.now().isoformat(),
203
+ "source": "async_emitter",
204
+ }
205
+
206
+ try:
207
+ async with self._http_session.post(url, json=payload) as response:
208
+ if response.status in [200, 204]:
209
+ self.logger.debug(f"HTTP emission successful: {event}")
210
+ return True
211
+ self.logger.warning(
212
+ f"HTTP emission failed with status {response.status}: {event}"
213
+ )
214
+ return False
215
+
216
+ except asyncio.TimeoutError:
217
+ self.logger.warning(f"HTTP emission timeout: {event}")
218
+ return False
219
+ except aiohttp.ClientError as e:
220
+ self.logger.warning(f"HTTP emission client error: {e}")
221
+ return False
222
+ except Exception as e:
223
+ self.logger.error(f"HTTP emission unexpected error: {e}")
224
+ return False
225
+
226
+ def _cleanup_dead_references(self):
227
+ """Clean up dead weak references."""
228
+ to_remove = []
229
+ for weak_ref in self._socketio_servers:
230
+ if weak_ref() is None:
231
+ to_remove.append(weak_ref)
232
+
233
+ for weak_ref in to_remove:
234
+ self._socketio_servers.discard(weak_ref)
235
+
236
+ def get_stats(self) -> Dict[str, Any]:
237
+ """Get performance statistics."""
238
+ return {
239
+ "direct_events": self._direct_events,
240
+ "http_events": self._http_events,
241
+ "failed_events": self._failed_events,
242
+ "registered_servers": len(
243
+ [ref for ref in self._socketio_servers if ref() is not None]
244
+ ),
245
+ "connection_pool_size": (
246
+ self._http_connector.limit if self._http_connector else 0
247
+ ),
248
+ "active_connections": (
249
+ len(self._http_connector._conns) if self._http_connector else 0
250
+ ),
251
+ }
252
+
253
+ async def close(self):
254
+ """Clean up resources with proper order and timing."""
255
+ try:
256
+ # Cancel batch processor if running
257
+ if self._batch_processor_task and not self._batch_processor_task.done():
258
+ self._batch_processor_task.cancel()
259
+ try:
260
+ await self._batch_processor_task
261
+ except asyncio.CancelledError:
262
+ pass
263
+ except Exception as e:
264
+ self.logger.debug(f"Error cancelling batch processor: {e}")
265
+ finally:
266
+ self._batch_processor_task = None
267
+
268
+ # Clear Socket.IO server references first
269
+ # This prevents any new events from being emitted
270
+ self._socketio_servers.clear()
271
+
272
+ # Close HTTP session (must be done before connector)
273
+ if self._http_session:
274
+ try:
275
+ # Cancel any pending requests
276
+ if (
277
+ hasattr(self._http_session, "_connector")
278
+ and self._http_session._connector
279
+ ):
280
+ # Give ongoing requests a moment to complete
281
+ await asyncio.sleep(0.1)
282
+
283
+ # Close the session
284
+ await self._http_session.close()
285
+
286
+ # CRITICAL: Wait for session to fully close
287
+ # This prevents the "I/O operation on closed kqueue" error
288
+ await asyncio.sleep(0.25)
289
+
290
+ except Exception as e:
291
+ self.logger.debug(f"Error closing HTTP session: {e}")
292
+ finally:
293
+ self._http_session = None
294
+
295
+ # Then close the connector (after session is fully closed)
296
+ if self._http_connector:
297
+ try:
298
+ # Close the connector
299
+ await self._http_connector.close()
300
+
301
+ # Give the connector adequate time to close all connections
302
+ # This is critical for preventing kqueue errors
303
+ await asyncio.sleep(0.5)
304
+
305
+ except Exception as e:
306
+ self.logger.debug(f"Error closing HTTP connector: {e}")
307
+ finally:
308
+ self._http_connector = None
309
+
310
+ # Reset singleton instance
311
+ AsyncEventEmitter._instance = None
312
+
313
+ self.logger.info("AsyncEventEmitter closed successfully")
314
+
315
+ except Exception as e:
316
+ self.logger.error(f"Error closing AsyncEventEmitter: {e}")
317
+ finally:
318
+ # Ensure references are cleared even if errors occur
319
+ self._http_session = None
320
+ self._http_connector = None
321
+ self._socketio_servers.clear()
322
+ AsyncEventEmitter._instance = None
323
+
324
+
325
+ # Global instance for easy access
326
+ _global_emitter: Optional[AsyncEventEmitter] = None
327
+
328
+
329
+ async def get_event_emitter() -> AsyncEventEmitter:
330
+ """Get the global event emitter instance."""
331
+ global _global_emitter
332
+ if _global_emitter is None:
333
+ _global_emitter = await AsyncEventEmitter.get_instance()
334
+ return _global_emitter
335
+
336
+
337
+ async def cleanup_event_emitter():
338
+ """Clean up the global event emitter instance."""
339
+ global _global_emitter
340
+ if _global_emitter is not None:
341
+ await _global_emitter.close()
342
+ _global_emitter = None
@@ -0,0 +1,20 @@
1
+ """
2
+ Event Handlers for Unified Monitor
3
+ ==================================
4
+
5
+ WHY: These handlers provide the event-driven functionality for the unified
6
+ monitor daemon. They handle real AST analysis, dashboard events, and Claude
7
+ Code hook ingestion.
8
+
9
+ DESIGN DECISIONS:
10
+ - Real AST analysis using CodeTreeAnalyzer instead of mock data
11
+ - Event-driven architecture with Socket.IO
12
+ - Modular handler design for maintainability
13
+ - Integration with existing Claude MPM tools
14
+ """
15
+
16
+ from .code_analysis import CodeAnalysisHandler
17
+ from .dashboard import DashboardHandler
18
+ from .hooks import HookHandler
19
+
20
+ __all__ = ["CodeAnalysisHandler", "DashboardHandler", "HookHandler"]
@@ -0,0 +1,334 @@
1
+ """
2
+ Code Analysis Event Handler for Unified Monitor
3
+ ===============================================
4
+
5
+ WHY: This handler provides real AST analysis functionality for the Code Tree
6
+ viewer. It uses the actual CodeTreeAnalyzer instead of mock data to generate
7
+ proper hierarchical data structures for 3D visualization.
8
+
9
+ DESIGN DECISIONS:
10
+ - Uses real CodeTreeAnalyzer for AST parsing
11
+ - Generates proper data structures for D3.js tree visualization
12
+ - Handles file analysis requests via Socket.IO events
13
+ - Provides real-time code analysis updates
14
+ """
15
+
16
+ import asyncio
17
+ from typing import Dict, Optional
18
+
19
+ import socketio
20
+
21
+ from ....core.logging_config import get_logger
22
+
23
+ try:
24
+ from ....tools.code_tree_analyzer import CodeTreeAnalyzer
25
+ from ....tools.code_tree_builder import CodeTreeBuilder
26
+ except ImportError:
27
+ # Fallback if tools are not available
28
+ CodeTreeAnalyzer = None
29
+ CodeTreeBuilder = None
30
+
31
+
32
+ class CodeAnalysisHandler:
33
+ """Event handler for real code analysis functionality.
34
+
35
+ WHY: Provides real AST analysis for the Code Tree viewer instead of mock data.
36
+ Integrates the existing CodeTreeAnalyzer with the unified monitor daemon.
37
+ """
38
+
39
+ def __init__(self, sio: socketio.AsyncServer):
40
+ """Initialize the code analysis handler.
41
+
42
+ Args:
43
+ sio: Socket.IO server instance
44
+ """
45
+ self.sio = sio
46
+ self.logger = get_logger(__name__)
47
+
48
+ # Analysis tools (with fallback)
49
+ self.analyzer = CodeTreeAnalyzer() if CodeTreeAnalyzer else None
50
+ self.builder = CodeTreeBuilder() if CodeTreeBuilder else None
51
+
52
+ # Cache for analysis results
53
+ self.analysis_cache = {}
54
+
55
+ def register(self):
56
+ """Register Socket.IO event handlers."""
57
+ try:
58
+ # File analysis events
59
+ self.sio.on("code:analyze:file", self.handle_analyze_file)
60
+ self.sio.on("code:analyze:directory", self.handle_analyze_directory)
61
+ self.sio.on("code:get:tree", self.handle_get_tree)
62
+
63
+ # Cache management
64
+ self.sio.on("code:clear:cache", self.handle_clear_cache)
65
+
66
+ self.logger.info("Code analysis event handlers registered")
67
+
68
+ except Exception as e:
69
+ self.logger.error(f"Error registering code analysis handlers: {e}")
70
+ raise
71
+
72
+ async def handle_analyze_file(self, sid: str, data: Dict):
73
+ """Handle file analysis request.
74
+
75
+ Args:
76
+ sid: Socket.IO session ID
77
+ data: Request data containing file path
78
+ """
79
+ try:
80
+ file_path = data.get("path")
81
+ if not file_path:
82
+ await self.sio.emit(
83
+ "code:error", {"error": "No file path provided"}, room=sid
84
+ )
85
+ return
86
+
87
+ self.logger.info(f"Analyzing file: {file_path}")
88
+
89
+ # Check cache first
90
+ cache_key = f"file:{file_path}"
91
+ if cache_key in self.analysis_cache:
92
+ self.logger.debug(f"Using cached analysis for {file_path}")
93
+ # Send cached result in same format as fresh analysis
94
+ cached_result = self.analysis_cache[cache_key]
95
+ response_data = {
96
+ "path": file_path,
97
+ "cached": True,
98
+ **cached_result, # Spread cached analysis result at top level
99
+ }
100
+
101
+ await self.sio.emit(
102
+ "code:file:analyzed",
103
+ response_data,
104
+ room=sid,
105
+ )
106
+ return
107
+
108
+ # Perform real analysis
109
+ analysis_result = await self._analyze_file_async(file_path)
110
+
111
+ if analysis_result:
112
+ # Cache the result
113
+ self.analysis_cache[cache_key] = analysis_result
114
+
115
+ # Emit the result in the same format as legacy server
116
+ # Frontend expects analysis data at top level, not wrapped in "analysis" field
117
+ response_data = {
118
+ "path": file_path,
119
+ "cached": False,
120
+ **analysis_result, # Spread analysis result at top level
121
+ }
122
+
123
+ await self.sio.emit(
124
+ "code:file:analyzed",
125
+ response_data,
126
+ room=sid,
127
+ )
128
+
129
+ self.logger.info(f"File analysis completed: {file_path}")
130
+ else:
131
+ await self.sio.emit(
132
+ "code:error",
133
+ {"error": f"Failed to analyze file: {file_path}"},
134
+ room=sid,
135
+ )
136
+
137
+ except Exception as e:
138
+ self.logger.error(f"Error analyzing file: {e}")
139
+ await self.sio.emit(
140
+ "code:error", {"error": f"Analysis error: {e!s}"}, room=sid
141
+ )
142
+
143
+ async def handle_analyze_directory(self, sid: str, data: Dict):
144
+ """Handle directory analysis request.
145
+
146
+ Args:
147
+ sid: Socket.IO session ID
148
+ data: Request data containing directory path
149
+ """
150
+ try:
151
+ dir_path = data.get("path", ".")
152
+ max_depth = data.get("max_depth", 3)
153
+
154
+ self.logger.info(f"Analyzing directory: {dir_path}")
155
+
156
+ # Check cache first
157
+ cache_key = f"dir:{dir_path}:{max_depth}"
158
+ if cache_key in self.analysis_cache:
159
+ self.logger.debug(f"Using cached analysis for {dir_path}")
160
+ await self.sio.emit(
161
+ "code:directory:analyzed",
162
+ {
163
+ "path": dir_path,
164
+ "tree": self.analysis_cache[cache_key],
165
+ "cached": True,
166
+ },
167
+ room=sid,
168
+ )
169
+ return
170
+
171
+ # Build directory tree with analysis
172
+ tree_result = await self._build_directory_tree_async(dir_path, max_depth)
173
+
174
+ if tree_result:
175
+ # Cache the result
176
+ self.analysis_cache[cache_key] = tree_result
177
+
178
+ # Emit the result
179
+ await self.sio.emit(
180
+ "code:directory:analyzed",
181
+ {"path": dir_path, "tree": tree_result, "cached": False},
182
+ room=sid,
183
+ )
184
+
185
+ self.logger.info(f"Directory analysis completed: {dir_path}")
186
+ else:
187
+ await self.sio.emit(
188
+ "code:error",
189
+ {"error": f"Failed to analyze directory: {dir_path}"},
190
+ room=sid,
191
+ )
192
+
193
+ except Exception as e:
194
+ self.logger.error(f"Error analyzing directory: {e}")
195
+ await self.sio.emit(
196
+ "code:error", {"error": f"Directory analysis error: {e!s}"}, room=sid
197
+ )
198
+
199
+ async def handle_get_tree(self, sid: str, data: Dict):
200
+ """Handle request for code tree visualization data.
201
+
202
+ Args:
203
+ sid: Socket.IO session ID
204
+ data: Request data
205
+ """
206
+ try:
207
+ path = data.get("path", ".")
208
+ format_type = data.get("format", "d3") # d3, json, etc.
209
+
210
+ self.logger.info(f"Getting tree for: {path}")
211
+
212
+ # Get or build tree data
213
+ tree_data = await self._get_tree_data_async(path, format_type)
214
+
215
+ if tree_data:
216
+ await self.sio.emit(
217
+ "code:tree:data",
218
+ {"path": path, "format": format_type, "tree": tree_data},
219
+ room=sid,
220
+ )
221
+
222
+ self.logger.info(f"Tree data sent for: {path}")
223
+ else:
224
+ await self.sio.emit(
225
+ "code:error",
226
+ {"error": f"Failed to get tree data for: {path}"},
227
+ room=sid,
228
+ )
229
+
230
+ except Exception as e:
231
+ self.logger.error(f"Error getting tree data: {e}")
232
+ await self.sio.emit(
233
+ "code:error", {"error": f"Tree data error: {e!s}"}, room=sid
234
+ )
235
+
236
+ async def handle_clear_cache(self, sid: str, data: Dict):
237
+ """Handle cache clearing request.
238
+
239
+ Args:
240
+ sid: Socket.IO session ID
241
+ data: Request data
242
+ """
243
+ try:
244
+ cache_type = data.get("type", "all") # all, file, directory
245
+
246
+ if cache_type == "all":
247
+ self.analysis_cache.clear()
248
+ self.logger.info("All analysis cache cleared")
249
+ elif cache_type == "file":
250
+ keys_to_remove = [
251
+ k for k in self.analysis_cache.keys() if k.startswith("file:")
252
+ ]
253
+ for key in keys_to_remove:
254
+ del self.analysis_cache[key]
255
+ self.logger.info("File analysis cache cleared")
256
+ elif cache_type == "directory":
257
+ keys_to_remove = [
258
+ k for k in self.analysis_cache.keys() if k.startswith("dir:")
259
+ ]
260
+ for key in keys_to_remove:
261
+ del self.analysis_cache[key]
262
+ self.logger.info("Directory analysis cache cleared")
263
+
264
+ await self.sio.emit("code:cache:cleared", {"type": cache_type}, room=sid)
265
+
266
+ except Exception as e:
267
+ self.logger.error(f"Error clearing cache: {e}")
268
+ await self.sio.emit(
269
+ "code:error", {"error": f"Cache clear error: {e!s}"}, room=sid
270
+ )
271
+
272
+ async def _analyze_file_async(self, file_path: str) -> Optional[Dict]:
273
+ """Perform file analysis asynchronously.
274
+
275
+ Args:
276
+ file_path: Path to file to analyze
277
+
278
+ Returns:
279
+ Analysis result or None if failed
280
+ """
281
+ try:
282
+ # Run analysis in thread pool to avoid blocking
283
+ loop = asyncio.get_event_loop()
284
+ result = await loop.run_in_executor(
285
+ None, self.analyzer.analyze_file, file_path
286
+ )
287
+ return result
288
+
289
+ except Exception as e:
290
+ self.logger.error(f"Error in async file analysis: {e}")
291
+ return None
292
+
293
+ async def _build_directory_tree_async(
294
+ self, dir_path: str, max_depth: int
295
+ ) -> Optional[Dict]:
296
+ """Build directory tree asynchronously.
297
+
298
+ Args:
299
+ dir_path: Path to directory
300
+ max_depth: Maximum depth to analyze
301
+
302
+ Returns:
303
+ Tree result or None if failed
304
+ """
305
+ try:
306
+ # Run tree building in thread pool
307
+ loop = asyncio.get_event_loop()
308
+ result = await loop.run_in_executor(
309
+ None, self.builder.build_tree, dir_path, max_depth
310
+ )
311
+ return result
312
+
313
+ except Exception as e:
314
+ self.logger.error(f"Error in async directory tree building: {e}")
315
+ return None
316
+
317
+ async def _get_tree_data_async(self, path: str, format_type: str) -> Optional[Dict]:
318
+ """Get tree data in specified format asynchronously.
319
+
320
+ Args:
321
+ path: Path to analyze
322
+ format_type: Format for tree data (d3, json, etc.)
323
+
324
+ Returns:
325
+ Tree data or None if failed
326
+ """
327
+ try:
328
+ # For now, use directory analysis
329
+ # TODO: Add format-specific tree generation
330
+ return await self._build_directory_tree_async(path, 3)
331
+
332
+ except Exception as e:
333
+ self.logger.error(f"Error getting tree data: {e}")
334
+ return None