claude-mpm 4.1.7__py3-none-any.whl → 4.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/INSTRUCTIONS.md +26 -1
- claude_mpm/agents/OUTPUT_STYLE.md +73 -0
- claude_mpm/agents/agents_metadata.py +57 -0
- claude_mpm/agents/templates/.claude-mpm/memories/README.md +17 -0
- claude_mpm/agents/templates/.claude-mpm/memories/engineer_memories.md +3 -0
- claude_mpm/agents/templates/agent-manager.json +263 -17
- claude_mpm/agents/templates/agent-manager.md +248 -10
- claude_mpm/agents/templates/agentic_coder_optimizer.json +222 -0
- claude_mpm/agents/templates/code_analyzer.json +18 -8
- claude_mpm/agents/templates/engineer.json +1 -1
- claude_mpm/agents/templates/logs/prompts/agent_engineer_20250826_014258_728.md +39 -0
- claude_mpm/agents/templates/qa.json +1 -1
- claude_mpm/agents/templates/research.json +1 -1
- claude_mpm/cli/__init__.py +4 -0
- claude_mpm/cli/commands/__init__.py +6 -0
- claude_mpm/cli/commands/analyze.py +547 -0
- claude_mpm/cli/commands/analyze_code.py +524 -0
- claude_mpm/cli/commands/configure.py +223 -25
- claude_mpm/cli/commands/configure_tui.py +65 -61
- claude_mpm/cli/commands/debug.py +1387 -0
- claude_mpm/cli/parsers/analyze_code_parser.py +170 -0
- claude_mpm/cli/parsers/analyze_parser.py +135 -0
- claude_mpm/cli/parsers/base_parser.py +29 -0
- claude_mpm/cli/parsers/configure_parser.py +23 -0
- claude_mpm/cli/parsers/debug_parser.py +319 -0
- claude_mpm/config/socketio_config.py +21 -21
- claude_mpm/constants.py +3 -1
- claude_mpm/core/framework_loader.py +148 -6
- claude_mpm/core/log_manager.py +16 -13
- claude_mpm/core/logger.py +1 -1
- claude_mpm/core/unified_agent_registry.py +1 -1
- claude_mpm/dashboard/.claude-mpm/socketio-instances.json +1 -0
- claude_mpm/dashboard/analysis_runner.py +428 -0
- claude_mpm/dashboard/static/built/components/activity-tree.js +2 -0
- claude_mpm/dashboard/static/built/components/agent-inference.js +1 -1
- claude_mpm/dashboard/static/built/components/event-viewer.js +1 -1
- claude_mpm/dashboard/static/built/components/file-tool-tracker.js +1 -1
- claude_mpm/dashboard/static/built/components/module-viewer.js +1 -1
- claude_mpm/dashboard/static/built/components/session-manager.js +1 -1
- claude_mpm/dashboard/static/built/components/working-directory.js +1 -1
- claude_mpm/dashboard/static/built/dashboard.js +1 -1
- claude_mpm/dashboard/static/built/socket-client.js +1 -1
- claude_mpm/dashboard/static/css/activity.css +549 -0
- claude_mpm/dashboard/static/css/code-tree.css +846 -0
- claude_mpm/dashboard/static/css/dashboard.css +245 -0
- claude_mpm/dashboard/static/dist/components/activity-tree.js +2 -0
- claude_mpm/dashboard/static/dist/components/code-tree.js +2 -0
- claude_mpm/dashboard/static/dist/components/code-viewer.js +2 -0
- claude_mpm/dashboard/static/dist/components/event-viewer.js +1 -1
- claude_mpm/dashboard/static/dist/components/session-manager.js +1 -1
- claude_mpm/dashboard/static/dist/components/working-directory.js +1 -1
- claude_mpm/dashboard/static/dist/dashboard.js +1 -1
- claude_mpm/dashboard/static/dist/socket-client.js +1 -1
- claude_mpm/dashboard/static/js/components/activity-tree.js +1139 -0
- claude_mpm/dashboard/static/js/components/code-tree.js +1357 -0
- claude_mpm/dashboard/static/js/components/code-viewer.js +480 -0
- claude_mpm/dashboard/static/js/components/event-viewer.js +11 -0
- claude_mpm/dashboard/static/js/components/session-manager.js +40 -4
- claude_mpm/dashboard/static/js/components/socket-manager.js +12 -0
- claude_mpm/dashboard/static/js/components/ui-state-manager.js +4 -0
- claude_mpm/dashboard/static/js/components/working-directory.js +17 -1
- claude_mpm/dashboard/static/js/dashboard.js +39 -0
- claude_mpm/dashboard/static/js/socket-client.js +414 -20
- claude_mpm/dashboard/templates/index.html +184 -4
- claude_mpm/hooks/claude_hooks/hook_handler.py +182 -5
- claude_mpm/hooks/claude_hooks/installer.py +728 -0
- claude_mpm/scripts/claude-hook-handler.sh +161 -0
- claude_mpm/scripts/socketio_daemon.py +121 -8
- claude_mpm/services/agents/deployment/agent_config_provider.py +127 -27
- claude_mpm/services/agents/deployment/agent_lifecycle_manager_refactored.py +2 -2
- claude_mpm/services/agents/deployment/agent_record_service.py +1 -2
- claude_mpm/services/agents/memory/memory_format_service.py +1 -5
- claude_mpm/services/cli/agent_cleanup_service.py +1 -2
- claude_mpm/services/cli/agent_dependency_service.py +1 -1
- claude_mpm/services/cli/agent_validation_service.py +3 -4
- claude_mpm/services/cli/dashboard_launcher.py +2 -3
- claude_mpm/services/cli/startup_checker.py +0 -10
- claude_mpm/services/core/cache_manager.py +1 -2
- claude_mpm/services/core/path_resolver.py +1 -4
- claude_mpm/services/core/service_container.py +2 -2
- claude_mpm/services/diagnostics/checks/instructions_check.py +2 -5
- claude_mpm/services/event_bus/direct_relay.py +98 -20
- claude_mpm/services/infrastructure/monitoring/__init__.py +11 -11
- claude_mpm/services/infrastructure/monitoring.py +11 -11
- claude_mpm/services/project/architecture_analyzer.py +1 -1
- claude_mpm/services/project/dependency_analyzer.py +4 -4
- claude_mpm/services/project/language_analyzer.py +3 -3
- claude_mpm/services/project/metrics_collector.py +3 -6
- claude_mpm/services/socketio/handlers/__init__.py +2 -0
- claude_mpm/services/socketio/handlers/code_analysis.py +170 -0
- claude_mpm/services/socketio/handlers/registry.py +2 -0
- claude_mpm/services/socketio/server/connection_manager.py +95 -65
- claude_mpm/services/socketio/server/core.py +125 -17
- claude_mpm/services/socketio/server/main.py +44 -5
- claude_mpm/services/visualization/__init__.py +19 -0
- claude_mpm/services/visualization/mermaid_generator.py +938 -0
- claude_mpm/tools/__main__.py +208 -0
- claude_mpm/tools/code_tree_analyzer.py +778 -0
- claude_mpm/tools/code_tree_builder.py +632 -0
- claude_mpm/tools/code_tree_events.py +318 -0
- claude_mpm/tools/socketio_debug.py +671 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.10.dist-info}/METADATA +1 -1
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.10.dist-info}/RECORD +108 -77
- claude_mpm/agents/schema/agent_schema.json +0 -314
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.10.dist-info}/WHEEL +0 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.10.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.10.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.10.dist-info}/top_level.txt +0 -0
|
@@ -134,9 +134,8 @@ class DashboardLauncher(IDashboardLauncher):
|
|
|
134
134
|
"""
|
|
135
135
|
try:
|
|
136
136
|
# Verify dependencies for Socket.IO dashboard
|
|
137
|
-
if monitor_mode:
|
|
138
|
-
|
|
139
|
-
return False, False
|
|
137
|
+
if monitor_mode and not self._verify_socketio_dependencies():
|
|
138
|
+
return False, False
|
|
140
139
|
|
|
141
140
|
self.logger.info(
|
|
142
141
|
f"Launching dashboard (port: {port}, monitor: {monitor_mode})"
|
|
@@ -176,17 +176,7 @@ class StartupCheckerService(IStartupChecker):
|
|
|
176
176
|
|
|
177
177
|
try:
|
|
178
178
|
# Check Python version
|
|
179
|
-
import sys
|
|
180
179
|
|
|
181
|
-
if sys.version_info < (3, 8):
|
|
182
|
-
warnings.append(
|
|
183
|
-
StartupWarning(
|
|
184
|
-
category="environment",
|
|
185
|
-
message=f"Python {sys.version_info.major}.{sys.version_info.minor} detected",
|
|
186
|
-
suggestion="Python 3.8+ is recommended for optimal performance",
|
|
187
|
-
severity="info",
|
|
188
|
-
)
|
|
189
|
-
)
|
|
190
180
|
|
|
191
181
|
# Check for common missing directories
|
|
192
182
|
warnings.extend(self._check_required_directories())
|
|
@@ -253,7 +253,7 @@ class CacheManager(ICacheManager):
|
|
|
253
253
|
with self._lock:
|
|
254
254
|
current_time = time.time()
|
|
255
255
|
|
|
256
|
-
|
|
256
|
+
return {
|
|
257
257
|
"capabilities": {
|
|
258
258
|
"cached": self._capabilities_cache is not None,
|
|
259
259
|
"age": (
|
|
@@ -308,4 +308,3 @@ class CacheManager(ICacheManager):
|
|
|
308
308
|
"fs_cache": self._fs_cache.get_stats() if self._fs_cache else {},
|
|
309
309
|
}
|
|
310
310
|
|
|
311
|
-
return stats
|
|
@@ -91,10 +91,7 @@ class PathResolver(IPathResolver):
|
|
|
91
91
|
resolved = path.resolve()
|
|
92
92
|
|
|
93
93
|
# Check if path exists if required
|
|
94
|
-
|
|
95
|
-
return False
|
|
96
|
-
|
|
97
|
-
return True
|
|
94
|
+
return not (must_exist and not resolved.exists())
|
|
98
95
|
except (OSError, ValueError):
|
|
99
96
|
return False
|
|
100
97
|
|
|
@@ -244,7 +244,7 @@ class ServiceContainer:
|
|
|
244
244
|
|
|
245
245
|
# Check for circular dependencies
|
|
246
246
|
if service_type in self._resolution_stack.stack:
|
|
247
|
-
raise CircularDependencyError(self._resolution_stack.stack
|
|
247
|
+
raise CircularDependencyError([*self._resolution_stack.stack, service_type])
|
|
248
248
|
|
|
249
249
|
try:
|
|
250
250
|
# Add to resolution stack
|
|
@@ -287,7 +287,7 @@ class ServiceContainer:
|
|
|
287
287
|
results = []
|
|
288
288
|
|
|
289
289
|
with self._lock:
|
|
290
|
-
for registered_type,
|
|
290
|
+
for registered_type, _descriptor in self._services.items():
|
|
291
291
|
# Check if registered type is subclass of requested type
|
|
292
292
|
if self._is_assignable(registered_type, service_type):
|
|
293
293
|
try:
|
|
@@ -197,7 +197,6 @@ class InstructionsCheck(BaseDiagnosticCheck):
|
|
|
197
197
|
)
|
|
198
198
|
|
|
199
199
|
# Calculate content hashes
|
|
200
|
-
content_hashes = {}
|
|
201
200
|
content_snippets = defaultdict(list)
|
|
202
201
|
|
|
203
202
|
for path in files:
|
|
@@ -265,7 +264,7 @@ class InstructionsCheck(BaseDiagnosticCheck):
|
|
|
265
264
|
for description, occurrences in pattern_occurrences.items():
|
|
266
265
|
if len(occurrences) > 1:
|
|
267
266
|
files_info = []
|
|
268
|
-
for path, count,
|
|
267
|
+
for path, count, _snippet in occurrences:
|
|
269
268
|
rel_path = (
|
|
270
269
|
path.relative_to(Path.cwd())
|
|
271
270
|
if Path.cwd() in path.parents or path.parent == Path.cwd()
|
|
@@ -374,9 +373,7 @@ class InstructionsCheck(BaseDiagnosticCheck):
|
|
|
374
373
|
continue
|
|
375
374
|
|
|
376
375
|
# Check for Claude Code specific content in INSTRUCTIONS.md
|
|
377
|
-
instructions_files = [
|
|
378
|
-
path for path in files if path.name == "INSTRUCTIONS.md"
|
|
379
|
-
]
|
|
376
|
+
instructions_files = [path for path in files if path.name == "INSTRUCTIONS.md"]
|
|
380
377
|
for path in instructions_files:
|
|
381
378
|
try:
|
|
382
379
|
content = path.read_text(encoding="utf-8")
|
|
@@ -2,6 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
This module provides a relay that connects EventBus directly to the
|
|
4
4
|
Socket.IO server's broadcaster, avoiding the client loopback issue.
|
|
5
|
+
|
|
6
|
+
IMPORTANT - Claude Event Format:
|
|
7
|
+
Claude sends hook events with these REQUIRED fields:
|
|
8
|
+
- hook_event_name: The event type (UserPromptSubmit, PreToolUse, PostToolUse, etc.)
|
|
9
|
+
- hook_event_type: Usually same as hook_event_name
|
|
10
|
+
- hook_input_data: Contains the actual event data
|
|
11
|
+
- sessionId: Session identifier
|
|
12
|
+
- timestamp: ISO format timestamp
|
|
13
|
+
|
|
14
|
+
DO NOT use "event" or "type" fields - use "hook_event_name" instead!
|
|
5
15
|
"""
|
|
6
16
|
|
|
7
17
|
import logging
|
|
@@ -37,9 +47,12 @@ class DirectSocketIORelay:
|
|
|
37
47
|
"last_relay_time": None,
|
|
38
48
|
}
|
|
39
49
|
self.debug = logger.isEnabledFor(logging.DEBUG)
|
|
50
|
+
self.connection_retries = 0
|
|
51
|
+
self.max_retries = 10
|
|
52
|
+
self.retry_delay = 1.0 # Start with 1 second
|
|
40
53
|
|
|
41
54
|
def start(self) -> None:
|
|
42
|
-
"""Start the relay by subscribing to EventBus events."""
|
|
55
|
+
"""Start the relay by subscribing to EventBus events with retry logic."""
|
|
43
56
|
if not self.enabled:
|
|
44
57
|
logger.warning("DirectSocketIORelay is disabled")
|
|
45
58
|
return
|
|
@@ -60,12 +73,9 @@ class DirectSocketIORelay:
|
|
|
60
73
|
# Add debug logging for verification
|
|
61
74
|
logger.info("[DirectRelay] Subscribed to hook.* events on EventBus")
|
|
62
75
|
|
|
63
|
-
# Check and log broadcaster availability
|
|
64
|
-
broadcaster_available = (
|
|
65
|
-
|
|
66
|
-
and hasattr(self.server, "broadcaster")
|
|
67
|
-
and self.server.broadcaster is not None
|
|
68
|
-
)
|
|
76
|
+
# Check and log broadcaster availability with retry logic
|
|
77
|
+
broadcaster_available = self._check_broadcaster_with_retry()
|
|
78
|
+
|
|
69
79
|
logger.info(
|
|
70
80
|
f"[DirectRelay] Server broadcaster available: {broadcaster_available}"
|
|
71
81
|
)
|
|
@@ -80,14 +90,49 @@ class DirectSocketIORelay:
|
|
|
80
90
|
)
|
|
81
91
|
else:
|
|
82
92
|
logger.warning(
|
|
83
|
-
"[DirectRelay] Server broadcaster is None - events will not be relayed!"
|
|
93
|
+
"[DirectRelay] Server broadcaster is None after retries - events will not be relayed!"
|
|
84
94
|
)
|
|
85
95
|
|
|
86
96
|
logger.info(f"[DirectRelay] EventBus instance: {self.event_bus is not None}")
|
|
87
97
|
|
|
88
98
|
# Mark as connected after successful subscription
|
|
89
|
-
self.connected =
|
|
90
|
-
logger.info("[DirectRelay] Started
|
|
99
|
+
self.connected = broadcaster_available
|
|
100
|
+
logger.info(f"[DirectRelay] Started with connection status: {self.connected}")
|
|
101
|
+
|
|
102
|
+
def _check_broadcaster_with_retry(self) -> bool:
|
|
103
|
+
"""Check broadcaster availability with exponential backoff retry.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
True if broadcaster is available, False after max retries
|
|
107
|
+
"""
|
|
108
|
+
import time
|
|
109
|
+
|
|
110
|
+
retry_delay = self.retry_delay
|
|
111
|
+
|
|
112
|
+
for attempt in range(self.max_retries):
|
|
113
|
+
broadcaster_available = (
|
|
114
|
+
self.server
|
|
115
|
+
and hasattr(self.server, "broadcaster")
|
|
116
|
+
and self.server.broadcaster is not None
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if broadcaster_available:
|
|
120
|
+
self.connection_retries = 0 # Reset counter on success
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
if attempt < self.max_retries - 1:
|
|
124
|
+
logger.info(
|
|
125
|
+
f"[DirectRelay] Broadcaster not ready, retry {attempt + 1}/{self.max_retries} "
|
|
126
|
+
f"in {retry_delay:.1f}s"
|
|
127
|
+
)
|
|
128
|
+
time.sleep(retry_delay)
|
|
129
|
+
retry_delay = min(retry_delay * 2, 30.0) # Exponential backoff, max 30s
|
|
130
|
+
else:
|
|
131
|
+
logger.error(
|
|
132
|
+
f"[DirectRelay] Broadcaster not available after {self.max_retries} attempts"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
return False
|
|
91
136
|
|
|
92
137
|
def _handle_hook_event(self, event_type: str, data: Any):
|
|
93
138
|
"""Internal method to handle hook events and broadcast them.
|
|
@@ -173,15 +218,48 @@ class DirectSocketIORelay:
|
|
|
173
218
|
|
|
174
219
|
# Use the full event_type (e.g., "hook.pre_tool") as the event name
|
|
175
220
|
# The normalizer handles dotted names and will extract type and subtype correctly
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
221
|
+
try:
|
|
222
|
+
self.server.broadcaster.broadcast_event(
|
|
223
|
+
event_type, broadcast_data
|
|
224
|
+
)
|
|
225
|
+
self.stats["events_relayed"] += 1
|
|
226
|
+
self.stats["last_relay_time"] = datetime.now().isoformat()
|
|
227
|
+
|
|
228
|
+
# Reset retry counter on successful broadcast
|
|
229
|
+
if self.connection_retries > 0:
|
|
230
|
+
self.connection_retries = 0
|
|
231
|
+
self.connected = True
|
|
232
|
+
logger.info("[DirectRelay] Connection restored")
|
|
233
|
+
|
|
234
|
+
if self.debug:
|
|
235
|
+
logger.debug(
|
|
236
|
+
f"[DirectRelay] Broadcasted hook event: {event_type}"
|
|
237
|
+
)
|
|
238
|
+
except Exception as broadcast_error:
|
|
239
|
+
logger.error(
|
|
240
|
+
f"[DirectRelay] Broadcast failed for {event_type}: {broadcast_error}"
|
|
184
241
|
)
|
|
242
|
+
self.stats["events_failed"] += 1
|
|
243
|
+
|
|
244
|
+
# Try to reconnect if broadcast fails
|
|
245
|
+
if self.connection_retries < self.max_retries:
|
|
246
|
+
self.connection_retries += 1
|
|
247
|
+
self.connected = self._check_broadcaster_with_retry()
|
|
248
|
+
if self.connected:
|
|
249
|
+
# Retry the broadcast
|
|
250
|
+
try:
|
|
251
|
+
self.server.broadcaster.broadcast_event(
|
|
252
|
+
event_type, broadcast_data
|
|
253
|
+
)
|
|
254
|
+
self.stats["events_relayed"] += 1
|
|
255
|
+
self.stats[
|
|
256
|
+
"events_failed"
|
|
257
|
+
] -= 1 # Undo the failure count
|
|
258
|
+
logger.info(
|
|
259
|
+
f"[DirectRelay] Retry successful for {event_type}"
|
|
260
|
+
)
|
|
261
|
+
except:
|
|
262
|
+
pass # Already counted as failed
|
|
185
263
|
else:
|
|
186
264
|
# Enhanced logging when broadcaster is not available
|
|
187
265
|
logger.warning(
|
|
@@ -189,7 +267,7 @@ class DirectSocketIORelay:
|
|
|
189
267
|
)
|
|
190
268
|
if self.server:
|
|
191
269
|
logger.warning(
|
|
192
|
-
|
|
270
|
+
"[DirectRelay] Server exists but broadcaster is None"
|
|
193
271
|
)
|
|
194
272
|
logger.warning(
|
|
195
273
|
f"[DirectRelay] Server type: {type(self.server).__name__}"
|
|
@@ -202,7 +280,7 @@ class DirectSocketIORelay:
|
|
|
202
280
|
f"[DirectRelay] Broadcaster value: {self.server.broadcaster}"
|
|
203
281
|
)
|
|
204
282
|
else:
|
|
205
|
-
logger.warning(
|
|
283
|
+
logger.warning("[DirectRelay] Server is None")
|
|
206
284
|
self.stats["events_failed"] += 1
|
|
207
285
|
|
|
208
286
|
except Exception as e:
|
|
@@ -24,20 +24,20 @@ from .resources import ResourceMonitorService
|
|
|
24
24
|
from .service import ServiceHealthService
|
|
25
25
|
|
|
26
26
|
__all__ = [
|
|
27
|
-
|
|
28
|
-
"ResourceMonitorService",
|
|
29
|
-
"ProcessHealthService",
|
|
30
|
-
"ServiceHealthService",
|
|
31
|
-
"NetworkHealthService",
|
|
32
|
-
"MonitoringAggregatorService",
|
|
33
|
-
# Base components
|
|
34
|
-
"HealthStatus",
|
|
35
|
-
"HealthMetric",
|
|
27
|
+
"AdvancedHealthMonitor",
|
|
36
28
|
"HealthCheckResult",
|
|
37
29
|
"HealthChecker",
|
|
30
|
+
"HealthMetric",
|
|
31
|
+
# Base components
|
|
32
|
+
"HealthStatus",
|
|
33
|
+
"MonitoringAggregatorService",
|
|
34
|
+
"NetworkConnectivityChecker",
|
|
35
|
+
"NetworkHealthService",
|
|
36
|
+
"ProcessHealthService",
|
|
38
37
|
# Legacy compatibility
|
|
39
38
|
"ProcessResourceChecker",
|
|
40
|
-
|
|
39
|
+
# New service-based API
|
|
40
|
+
"ResourceMonitorService",
|
|
41
41
|
"ServiceHealthChecker",
|
|
42
|
-
"
|
|
42
|
+
"ServiceHealthService",
|
|
43
43
|
]
|
|
@@ -47,22 +47,22 @@ from .monitoring import ( # noqa: F401; New service-based API; Base components;
|
|
|
47
47
|
)
|
|
48
48
|
|
|
49
49
|
__all__ = [
|
|
50
|
-
|
|
51
|
-
"ResourceMonitorService",
|
|
52
|
-
"ProcessHealthService",
|
|
53
|
-
"ServiceHealthService",
|
|
54
|
-
"NetworkHealthService",
|
|
55
|
-
"MonitoringAggregatorService",
|
|
56
|
-
# Base components
|
|
57
|
-
"HealthStatus",
|
|
58
|
-
"HealthMetric",
|
|
50
|
+
"AdvancedHealthMonitor",
|
|
59
51
|
"HealthCheckResult",
|
|
60
52
|
"HealthChecker",
|
|
53
|
+
"HealthMetric",
|
|
54
|
+
# Base components
|
|
55
|
+
"HealthStatus",
|
|
56
|
+
"MonitoringAggregatorService",
|
|
57
|
+
"NetworkConnectivityChecker",
|
|
58
|
+
"NetworkHealthService",
|
|
59
|
+
"ProcessHealthService",
|
|
61
60
|
# Legacy compatibility
|
|
62
61
|
"ProcessResourceChecker",
|
|
63
|
-
|
|
62
|
+
# New service-based API
|
|
63
|
+
"ResourceMonitorService",
|
|
64
64
|
"ServiceHealthChecker",
|
|
65
|
-
"
|
|
65
|
+
"ServiceHealthService",
|
|
66
66
|
]
|
|
67
67
|
|
|
68
68
|
# Module metadata
|
|
@@ -224,7 +224,7 @@ class ArchitectureAnalyzerService:
|
|
|
224
224
|
existing_dirs = set()
|
|
225
225
|
|
|
226
226
|
# Collect all directory names
|
|
227
|
-
for
|
|
227
|
+
for _dirpath, dirnames, _ in self.working_directory.walk():
|
|
228
228
|
for dirname in dirnames:
|
|
229
229
|
if not dirname.startswith("."):
|
|
230
230
|
existing_dirs.add(dirname.lower())
|
|
@@ -171,7 +171,7 @@ class DependencyAnalyzerService:
|
|
|
171
171
|
if any(pkg in dep_lower for pkg in db_packages):
|
|
172
172
|
databases.add(db_name)
|
|
173
173
|
|
|
174
|
-
return sorted(
|
|
174
|
+
return sorted(databases)
|
|
175
175
|
|
|
176
176
|
def detect_testing_frameworks(
|
|
177
177
|
self, dependencies: Optional[List[str]] = None
|
|
@@ -201,7 +201,7 @@ class DependencyAnalyzerService:
|
|
|
201
201
|
testing_frameworks.add(dep)
|
|
202
202
|
break
|
|
203
203
|
|
|
204
|
-
return sorted(
|
|
204
|
+
return sorted(testing_frameworks)
|
|
205
205
|
|
|
206
206
|
def detect_web_frameworks(self, dependencies: List[str]) -> List[str]:
|
|
207
207
|
"""Detect web frameworks from dependencies.
|
|
@@ -343,7 +343,7 @@ class DependencyAnalyzerService:
|
|
|
343
343
|
for dep in all_deps:
|
|
344
344
|
dep_lower = dep.lower()
|
|
345
345
|
# Check for database packages
|
|
346
|
-
for
|
|
346
|
+
for _db_name, db_packages in self.DATABASE_PACKAGES.items():
|
|
347
347
|
if any(pkg in dep_lower for pkg in db_packages):
|
|
348
348
|
dependencies["databases"].append(dep)
|
|
349
349
|
|
|
@@ -407,7 +407,7 @@ class DependencyAnalyzerService:
|
|
|
407
407
|
dep_lower = dep.lower()
|
|
408
408
|
|
|
409
409
|
# Check databases
|
|
410
|
-
for
|
|
410
|
+
for _db_name, db_packages in self.DATABASE_PACKAGES.items():
|
|
411
411
|
if any(pkg in dep_lower for pkg in db_packages):
|
|
412
412
|
dependencies["databases"].append(dep)
|
|
413
413
|
|
|
@@ -109,7 +109,7 @@ class LanguageAnalyzerService:
|
|
|
109
109
|
if files:
|
|
110
110
|
languages.add(lang)
|
|
111
111
|
|
|
112
|
-
return sorted(
|
|
112
|
+
return sorted(languages)
|
|
113
113
|
|
|
114
114
|
def detect_primary_language(
|
|
115
115
|
self, file_counts: Optional[Dict[str, int]] = None
|
|
@@ -229,7 +229,7 @@ class LanguageAnalyzerService:
|
|
|
229
229
|
"""Count files by extension in the project."""
|
|
230
230
|
counts = Counter()
|
|
231
231
|
|
|
232
|
-
for ext in self.FILE_EXTENSIONS
|
|
232
|
+
for ext in self.FILE_EXTENSIONS:
|
|
233
233
|
files = list(self.working_directory.rglob(f"*{ext}"))
|
|
234
234
|
# Filter out vendor directories
|
|
235
235
|
files = [
|
|
@@ -249,7 +249,7 @@ class LanguageAnalyzerService:
|
|
|
249
249
|
"""Get a sample of source files for analysis."""
|
|
250
250
|
source_files = []
|
|
251
251
|
|
|
252
|
-
for ext in self.FILE_EXTENSIONS
|
|
252
|
+
for ext in self.FILE_EXTENSIONS:
|
|
253
253
|
files = list(self.working_directory.rglob(f"*{ext}"))
|
|
254
254
|
# Filter out vendor directories
|
|
255
255
|
files = [
|
|
@@ -166,7 +166,7 @@ class MetricsCollectorService:
|
|
|
166
166
|
|
|
167
167
|
for file_path in self._iter_code_files():
|
|
168
168
|
try:
|
|
169
|
-
|
|
169
|
+
file_path.stat().st_size
|
|
170
170
|
lines = len(
|
|
171
171
|
file_path.read_text(encoding="utf-8", errors="ignore").splitlines()
|
|
172
172
|
)
|
|
@@ -377,7 +377,7 @@ class MetricsCollectorService:
|
|
|
377
377
|
|
|
378
378
|
# Count directories
|
|
379
379
|
dir_count = 0
|
|
380
|
-
for
|
|
380
|
+
for _dirpath, dirnames, _ in self.working_directory.walk():
|
|
381
381
|
dirnames[:] = [d for d in dirnames if d not in self.EXCLUDE_DIRS]
|
|
382
382
|
dir_count += len(dirnames)
|
|
383
383
|
|
|
@@ -404,7 +404,4 @@ class MetricsCollectorService:
|
|
|
404
404
|
def _should_analyze_file(self, file_path: Path) -> bool:
|
|
405
405
|
"""Check if a file should be analyzed."""
|
|
406
406
|
# Skip files in excluded directories
|
|
407
|
-
for part in file_path.parts
|
|
408
|
-
if part in self.EXCLUDE_DIRS:
|
|
409
|
-
return False
|
|
410
|
-
return True
|
|
407
|
+
return all(part not in self.EXCLUDE_DIRS for part in file_path.parts)
|
|
@@ -7,6 +7,7 @@ and maintainability.
|
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
9
|
from .base import BaseEventHandler
|
|
10
|
+
from .code_analysis import CodeAnalysisEventHandler
|
|
10
11
|
from .connection import ConnectionEventHandler
|
|
11
12
|
from .file import FileEventHandler
|
|
12
13
|
from .git import GitEventHandler
|
|
@@ -16,6 +17,7 @@ from .registry import EventHandlerRegistry
|
|
|
16
17
|
|
|
17
18
|
__all__ = [
|
|
18
19
|
"BaseEventHandler",
|
|
20
|
+
"CodeAnalysisEventHandler",
|
|
19
21
|
"ConnectionEventHandler",
|
|
20
22
|
"EventHandlerRegistry",
|
|
21
23
|
"FileEventHandler",
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Code Analysis Event Handler for Socket.IO
|
|
3
|
+
==========================================
|
|
4
|
+
|
|
5
|
+
WHY: Handles code analysis requests from the dashboard, managing the analysis
|
|
6
|
+
runner subprocess and streaming results back to connected clients.
|
|
7
|
+
|
|
8
|
+
DESIGN DECISIONS:
|
|
9
|
+
- Single analysis runner instance per server
|
|
10
|
+
- Queue multiple requests for sequential processing
|
|
11
|
+
- Support cancellation of running analysis
|
|
12
|
+
- Stream events in real-time to all connected clients
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import uuid
|
|
16
|
+
from typing import Any, Dict
|
|
17
|
+
|
|
18
|
+
from ....core.logging_config import get_logger
|
|
19
|
+
from ....dashboard.analysis_runner import CodeAnalysisRunner
|
|
20
|
+
from .base import BaseEventHandler
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class CodeAnalysisEventHandler(BaseEventHandler):
|
|
24
|
+
"""Handles code analysis events from dashboard clients.
|
|
25
|
+
|
|
26
|
+
WHY: Provides a clean interface between the dashboard UI and the
|
|
27
|
+
code analysis subprocess, managing requests and responses.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, server):
|
|
31
|
+
"""Initialize the code analysis event handler.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
server: The SocketIOServer instance
|
|
35
|
+
"""
|
|
36
|
+
super().__init__(server)
|
|
37
|
+
self.logger = get_logger(__name__)
|
|
38
|
+
self.analysis_runner = None
|
|
39
|
+
|
|
40
|
+
def initialize(self):
|
|
41
|
+
"""Initialize the analysis runner."""
|
|
42
|
+
if not self.analysis_runner:
|
|
43
|
+
self.analysis_runner = CodeAnalysisRunner(self.server)
|
|
44
|
+
self.analysis_runner.start()
|
|
45
|
+
self.logger.info("Code analysis runner initialized")
|
|
46
|
+
|
|
47
|
+
def cleanup(self):
|
|
48
|
+
"""Cleanup the analysis runner on shutdown."""
|
|
49
|
+
if self.analysis_runner:
|
|
50
|
+
self.analysis_runner.stop()
|
|
51
|
+
self.analysis_runner = None
|
|
52
|
+
self.logger.info("Code analysis runner stopped")
|
|
53
|
+
|
|
54
|
+
def get_events(self) -> Dict[str, Any]:
|
|
55
|
+
"""Get the events this handler manages.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Dictionary mapping event names to handler methods
|
|
59
|
+
"""
|
|
60
|
+
return {
|
|
61
|
+
"code:analyze:request": self.handle_analyze_request,
|
|
62
|
+
"code:analyze:cancel": self.handle_cancel_request,
|
|
63
|
+
"code:analyze:status": self.handle_status_request,
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
def register_events(self) -> None:
|
|
67
|
+
"""Register Socket.IO event handlers.
|
|
68
|
+
|
|
69
|
+
WHY: Required by BaseEventHandler to register events with the Socket.IO server.
|
|
70
|
+
"""
|
|
71
|
+
events = self.get_events()
|
|
72
|
+
for event_name, handler_method in events.items():
|
|
73
|
+
self.server.core.sio.on(event_name, handler_method)
|
|
74
|
+
self.logger.info(f"Registered event handler: {event_name}")
|
|
75
|
+
|
|
76
|
+
async def handle_analyze_request(self, sid: str, data: Dict[str, Any]):
|
|
77
|
+
"""Handle code analysis request from client.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
sid: Socket ID of the requesting client
|
|
81
|
+
data: Request data containing path and options
|
|
82
|
+
"""
|
|
83
|
+
self.logger.info(f"Code analysis requested from {sid}: {data}")
|
|
84
|
+
|
|
85
|
+
# Initialize runner if needed
|
|
86
|
+
if not self.analysis_runner:
|
|
87
|
+
self.initialize()
|
|
88
|
+
|
|
89
|
+
# Validate request
|
|
90
|
+
path = data.get("path")
|
|
91
|
+
if not path:
|
|
92
|
+
await self.server.sio.emit(
|
|
93
|
+
"code:analysis:error",
|
|
94
|
+
{
|
|
95
|
+
"message": "Path is required for analysis",
|
|
96
|
+
"request_id": data.get("request_id"),
|
|
97
|
+
},
|
|
98
|
+
room=sid,
|
|
99
|
+
)
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
# Generate request ID if not provided
|
|
103
|
+
request_id = data.get("request_id") or str(uuid.uuid4())
|
|
104
|
+
|
|
105
|
+
# Extract options
|
|
106
|
+
languages = data.get("languages")
|
|
107
|
+
max_depth = data.get("max_depth")
|
|
108
|
+
ignore_patterns = data.get("ignore_patterns")
|
|
109
|
+
|
|
110
|
+
# Queue analysis request
|
|
111
|
+
success = self.analysis_runner.request_analysis(
|
|
112
|
+
request_id=request_id,
|
|
113
|
+
path=path,
|
|
114
|
+
languages=languages,
|
|
115
|
+
max_depth=max_depth,
|
|
116
|
+
ignore_patterns=ignore_patterns,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if success:
|
|
120
|
+
# Send acknowledgment to requesting client
|
|
121
|
+
await self.server.sio.emit(
|
|
122
|
+
"code:analysis:accepted",
|
|
123
|
+
{
|
|
124
|
+
"request_id": request_id,
|
|
125
|
+
"path": path,
|
|
126
|
+
"message": "Analysis request queued",
|
|
127
|
+
},
|
|
128
|
+
room=sid,
|
|
129
|
+
)
|
|
130
|
+
else:
|
|
131
|
+
# Send error if request failed
|
|
132
|
+
await self.server.sio.emit(
|
|
133
|
+
"code:analysis:error",
|
|
134
|
+
{
|
|
135
|
+
"request_id": request_id,
|
|
136
|
+
"message": "Failed to queue analysis request",
|
|
137
|
+
},
|
|
138
|
+
room=sid,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
async def handle_cancel_request(self, sid: str, data: Dict[str, Any]):
|
|
142
|
+
"""Handle analysis cancellation request.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
sid: Socket ID of the requesting client
|
|
146
|
+
data: Request data (may contain request_id)
|
|
147
|
+
"""
|
|
148
|
+
self.logger.info(f"Analysis cancellation requested from {sid}")
|
|
149
|
+
|
|
150
|
+
# Cancel current analysis
|
|
151
|
+
self.analysis_runner.cancel_current()
|
|
152
|
+
|
|
153
|
+
# Send confirmation
|
|
154
|
+
await self.server.sio.emit(
|
|
155
|
+
"code:analysis:cancelled",
|
|
156
|
+
{"message": "Analysis cancelled", "request_id": data.get("request_id")},
|
|
157
|
+
room=sid,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
async def handle_status_request(self, sid: str, data: Dict[str, Any]):
|
|
161
|
+
"""Handle status request from client.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
sid: Socket ID of the requesting client
|
|
165
|
+
data: Request data (unused)
|
|
166
|
+
"""
|
|
167
|
+
status = self.analysis_runner.get_status()
|
|
168
|
+
|
|
169
|
+
# Send status to requesting client
|
|
170
|
+
await self.server.sio.emit("code:analysis:status", status, room=sid)
|
|
@@ -15,6 +15,7 @@ if TYPE_CHECKING:
|
|
|
15
15
|
|
|
16
16
|
from ..server import SocketIOServer
|
|
17
17
|
|
|
18
|
+
from .code_analysis import CodeAnalysisEventHandler
|
|
18
19
|
from .connection import ConnectionEventHandler
|
|
19
20
|
from .file import FileEventHandler
|
|
20
21
|
from .git import GitEventHandler
|
|
@@ -37,6 +38,7 @@ class EventHandlerRegistry:
|
|
|
37
38
|
HookEventHandler, # Hook events for session tracking
|
|
38
39
|
GitEventHandler, # Git operations
|
|
39
40
|
FileEventHandler, # File operations
|
|
41
|
+
CodeAnalysisEventHandler, # Code analysis for dashboard
|
|
40
42
|
ProjectEventHandler, # Project management (future)
|
|
41
43
|
MemoryEventHandler, # Memory management (future)
|
|
42
44
|
]
|