claude-mpm 4.0.34__py3-none-any.whl → 4.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/INSTRUCTIONS.md +70 -2
- claude_mpm/agents/OUTPUT_STYLE.md +0 -11
- claude_mpm/agents/WORKFLOW.md +14 -2
- claude_mpm/agents/templates/web_qa.json +85 -58
- claude_mpm/agents/templates/web_ui.json +3 -3
- claude_mpm/cli/__init__.py +48 -7
- claude_mpm/cli/commands/agents.py +82 -0
- claude_mpm/cli/commands/cleanup_orphaned_agents.py +150 -0
- claude_mpm/cli/commands/mcp_pipx_config.py +199 -0
- claude_mpm/cli/parsers/agents_parser.py +27 -0
- claude_mpm/cli/parsers/base_parser.py +6 -0
- claude_mpm/cli/startup_logging.py +75 -0
- claude_mpm/dashboard/static/js/components/build-tracker.js +35 -1
- claude_mpm/dashboard/static/js/socket-client.js +7 -5
- claude_mpm/hooks/claude_hooks/connection_pool.py +13 -2
- claude_mpm/hooks/claude_hooks/hook_handler.py +67 -167
- claude_mpm/services/agents/deployment/agent_discovery_service.py +4 -1
- claude_mpm/services/agents/deployment/agent_template_builder.py +2 -1
- claude_mpm/services/agents/deployment/agent_version_manager.py +4 -1
- claude_mpm/services/agents/deployment/multi_source_deployment_service.py +207 -10
- claude_mpm/services/event_bus/config.py +165 -0
- claude_mpm/services/event_bus/event_bus.py +35 -20
- claude_mpm/services/event_bus/relay.py +8 -12
- claude_mpm/services/mcp_gateway/auto_configure.py +372 -0
- claude_mpm/services/socketio/handlers/connection.py +3 -3
- claude_mpm/services/socketio/server/core.py +25 -2
- claude_mpm/services/socketio/server/eventbus_integration.py +189 -0
- claude_mpm/services/socketio/server/main.py +25 -0
- {claude_mpm-4.0.34.dist-info → claude_mpm-4.1.1.dist-info}/METADATA +25 -7
- {claude_mpm-4.0.34.dist-info → claude_mpm-4.1.1.dist-info}/RECORD +35 -30
- {claude_mpm-4.0.34.dist-info → claude_mpm-4.1.1.dist-info}/WHEEL +0 -0
- {claude_mpm-4.0.34.dist-info → claude_mpm-4.1.1.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.0.34.dist-info → claude_mpm-4.1.1.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.0.34.dist-info → claude_mpm-4.1.1.dist-info}/top_level.txt +0 -0
|
@@ -35,12 +35,46 @@ export class BuildTracker {
|
|
|
35
35
|
/**
|
|
36
36
|
* Initialize the build tracker component
|
|
37
37
|
*/
|
|
38
|
-
init() {
|
|
38
|
+
async init() {
|
|
39
39
|
console.log('Initializing BuildTracker component');
|
|
40
|
+
|
|
41
|
+
// Try to load version.json for dashboard version
|
|
42
|
+
await this.loadDashboardVersion();
|
|
43
|
+
|
|
40
44
|
this.createElements();
|
|
41
45
|
this.setupEventListeners();
|
|
42
46
|
}
|
|
43
47
|
|
|
48
|
+
/**
|
|
49
|
+
* Load dashboard version from version.json if available
|
|
50
|
+
*
|
|
51
|
+
* WHY: Attempts to load the actual dashboard version from the
|
|
52
|
+
* version.json file created by the version management script.
|
|
53
|
+
* Falls back to defaults if file is not available.
|
|
54
|
+
*/
|
|
55
|
+
async loadDashboardVersion() {
|
|
56
|
+
try {
|
|
57
|
+
// Try to fetch version.json from the dashboard root
|
|
58
|
+
const response = await fetch('/version.json');
|
|
59
|
+
if (response.ok) {
|
|
60
|
+
const versionData = await response.json();
|
|
61
|
+
|
|
62
|
+
// Update monitor build info with loaded data
|
|
63
|
+
this.buildInfo.monitor = {
|
|
64
|
+
version: versionData.version || "1.0.0",
|
|
65
|
+
build: versionData.build || 1,
|
|
66
|
+
formatted_build: versionData.formatted_build || "0001",
|
|
67
|
+
full_version: versionData.full_version || "v1.0.0-0001"
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
console.log('Loaded dashboard version:', this.buildInfo.monitor);
|
|
71
|
+
}
|
|
72
|
+
} catch (error) {
|
|
73
|
+
// Silently fall back to defaults if version.json not available
|
|
74
|
+
console.debug('Dashboard version.json not available, using defaults');
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
44
78
|
/**
|
|
45
79
|
* Create the DOM elements for version display
|
|
46
80
|
*
|
|
@@ -47,7 +47,7 @@ class SocketClient {
|
|
|
47
47
|
// Health monitoring
|
|
48
48
|
this.lastPingTime = null;
|
|
49
49
|
this.lastPongTime = null;
|
|
50
|
-
this.pingTimeout =
|
|
50
|
+
this.pingTimeout = 90000; // 90 seconds for health check (more lenient than Socket.IO timeout)
|
|
51
51
|
this.healthCheckInterval = null;
|
|
52
52
|
|
|
53
53
|
// Start periodic status check as fallback mechanism
|
|
@@ -97,11 +97,13 @@ class SocketClient {
|
|
|
97
97
|
autoConnect: true,
|
|
98
98
|
reconnection: true,
|
|
99
99
|
reconnectionDelay: 1000,
|
|
100
|
-
reconnectionDelayMax:
|
|
101
|
-
|
|
102
|
-
timeout:
|
|
100
|
+
reconnectionDelayMax: 5000,
|
|
101
|
+
reconnectionAttempts: Infinity, // Keep trying indefinitely
|
|
102
|
+
timeout: 20000, // Increase connection timeout
|
|
103
103
|
forceNew: true,
|
|
104
|
-
transports: ['websocket', 'polling']
|
|
104
|
+
transports: ['websocket', 'polling'],
|
|
105
|
+
pingInterval: 25000, // Match server setting
|
|
106
|
+
pingTimeout: 60000 // Match server setting
|
|
105
107
|
});
|
|
106
108
|
|
|
107
109
|
this.setupSocketHandlers();
|
|
@@ -1,7 +1,18 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
"""Socket.IO connection pool for Claude Code hook handler.
|
|
2
|
+
"""[DEPRECATED] Socket.IO connection pool for Claude Code hook handler.
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
DEPRECATION NOTICE: As of v4.0.35, this module is deprecated.
|
|
5
|
+
All event emission now goes through the EventBus, which handles
|
|
6
|
+
Socket.IO connections via its relay component. This provides:
|
|
7
|
+
- Single event path (no duplicates)
|
|
8
|
+
- Better separation of concerns
|
|
9
|
+
- Centralized connection management
|
|
10
|
+
- More resilient architecture
|
|
11
|
+
|
|
12
|
+
This module is kept for backward compatibility but will be removed in v5.0.0.
|
|
13
|
+
Please use EventBus.publish() instead of direct Socket.IO emission.
|
|
14
|
+
|
|
15
|
+
Original purpose: Provided connection pooling for Socket.IO clients to reduce
|
|
5
16
|
connection overhead and implement circuit breaker patterns.
|
|
6
17
|
"""
|
|
7
18
|
|
|
@@ -60,6 +60,14 @@ except ImportError:
|
|
|
60
60
|
}
|
|
61
61
|
})
|
|
62
62
|
|
|
63
|
+
# Import EventBus for decoupled event distribution
|
|
64
|
+
try:
|
|
65
|
+
from claude_mpm.services.event_bus import EventBus
|
|
66
|
+
EVENTBUS_AVAILABLE = True
|
|
67
|
+
except ImportError:
|
|
68
|
+
EVENTBUS_AVAILABLE = False
|
|
69
|
+
EventBus = None
|
|
70
|
+
|
|
63
71
|
# Import constants for configuration
|
|
64
72
|
try:
|
|
65
73
|
from claude_mpm.core.constants import NetworkConfig, RetryConfig, TimeoutConfig
|
|
@@ -111,13 +119,23 @@ class ClaudeHookHandler:
|
|
|
111
119
|
"""
|
|
112
120
|
|
|
113
121
|
def __init__(self):
|
|
114
|
-
# Socket.IO client (persistent if possible)
|
|
115
|
-
self.connection_pool = SocketIOConnectionPool(max_connections=3)
|
|
116
122
|
# Track events for periodic cleanup
|
|
117
123
|
self.events_processed = 0
|
|
118
124
|
self.last_cleanup = time.time()
|
|
119
125
|
# Event normalizer for consistent event schema
|
|
120
126
|
self.event_normalizer = EventNormalizer()
|
|
127
|
+
|
|
128
|
+
# Initialize EventBus for decoupled event distribution
|
|
129
|
+
self.event_bus = None
|
|
130
|
+
if EVENTBUS_AVAILABLE:
|
|
131
|
+
try:
|
|
132
|
+
self.event_bus = EventBus.get_instance()
|
|
133
|
+
if DEBUG:
|
|
134
|
+
print("✅ EventBus initialized for hook handler", file=sys.stderr)
|
|
135
|
+
except Exception as e:
|
|
136
|
+
if DEBUG:
|
|
137
|
+
print(f"⚠️ Failed to initialize EventBus: {e}", file=sys.stderr)
|
|
138
|
+
self.event_bus = None
|
|
121
139
|
|
|
122
140
|
# Maximum sizes for tracking
|
|
123
141
|
self.MAX_DELEGATION_TRACKING = 200
|
|
@@ -511,176 +529,61 @@ class ClaudeHookHandler:
|
|
|
511
529
|
"""
|
|
512
530
|
print(json.dumps({"action": "continue"}))
|
|
513
531
|
|
|
514
|
-
def _discover_socketio_port(self) -> int:
|
|
515
|
-
"""Discover the port of the running SocketIO server."""
|
|
516
|
-
try:
|
|
517
|
-
# Try to import port manager
|
|
518
|
-
from claude_mpm.services.port_manager import PortManager
|
|
519
|
-
|
|
520
|
-
port_manager = PortManager()
|
|
521
|
-
instances = port_manager.list_active_instances()
|
|
522
|
-
|
|
523
|
-
if instances:
|
|
524
|
-
# Prefer port 8765 if available
|
|
525
|
-
for instance in instances:
|
|
526
|
-
if instance.get("port") == 8765:
|
|
527
|
-
return 8765
|
|
528
|
-
# Otherwise use the first active instance
|
|
529
|
-
return instances[0].get("port", 8765)
|
|
530
|
-
else:
|
|
531
|
-
# No active instances, use default
|
|
532
|
-
return 8765
|
|
533
|
-
except Exception:
|
|
534
|
-
# Fallback to environment variable or default
|
|
535
|
-
return int(os.environ.get("CLAUDE_MPM_SOCKETIO_PORT", "8765"))
|
|
536
532
|
|
|
537
533
|
def _emit_socketio_event(self, namespace: str, event: str, data: dict):
|
|
538
|
-
"""Emit
|
|
539
|
-
|
|
540
|
-
WHY
|
|
541
|
-
-
|
|
542
|
-
-
|
|
543
|
-
- Better
|
|
544
|
-
-
|
|
545
|
-
-
|
|
546
|
-
- All events normalized to standard schema before emission
|
|
534
|
+
"""Emit event through EventBus for Socket.IO relay.
|
|
535
|
+
|
|
536
|
+
WHY EventBus-only approach:
|
|
537
|
+
- Single event path prevents duplicates
|
|
538
|
+
- EventBus relay handles Socket.IO connection management
|
|
539
|
+
- Better separation of concerns
|
|
540
|
+
- More resilient with centralized failure handling
|
|
541
|
+
- Cleaner architecture and easier testing
|
|
547
542
|
"""
|
|
548
|
-
#
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
543
|
+
# Create event data for normalization
|
|
544
|
+
raw_event = {
|
|
545
|
+
"type": "hook",
|
|
546
|
+
"subtype": event, # e.g., "user_prompt", "pre_tool", "subagent_stop"
|
|
547
|
+
"timestamp": datetime.now().isoformat(),
|
|
548
|
+
"data": data,
|
|
549
|
+
"source": "claude_hooks", # Identify the source
|
|
550
|
+
"session_id": data.get("sessionId"), # Include session if available
|
|
551
|
+
}
|
|
554
552
|
|
|
555
|
-
#
|
|
556
|
-
|
|
557
|
-
|
|
553
|
+
# Normalize the event using EventNormalizer for consistent schema
|
|
554
|
+
normalized_event = self.event_normalizer.normalize(raw_event, source="hook")
|
|
555
|
+
claude_event_data = normalized_event.to_dict()
|
|
556
|
+
|
|
557
|
+
# Log important events for debugging
|
|
558
|
+
if DEBUG and event in ["subagent_stop", "pre_tool"]:
|
|
559
|
+
if event == "subagent_stop":
|
|
560
|
+
agent_type = data.get("agent_type", "unknown")
|
|
558
561
|
print(
|
|
559
|
-
f"Hook handler:
|
|
562
|
+
f"Hook handler: Publishing SubagentStop for agent '{agent_type}'",
|
|
560
563
|
file=sys.stderr,
|
|
561
564
|
)
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
565
|
+
elif event == "pre_tool" and data.get("tool_name") == "Task":
|
|
566
|
+
delegation = data.get("delegation_details", {})
|
|
567
|
+
agent_type = delegation.get("agent_type", "unknown")
|
|
568
|
+
print(
|
|
569
|
+
f"Hook handler: Publishing Task delegation to agent '{agent_type}'",
|
|
570
|
+
file=sys.stderr,
|
|
568
571
|
)
|
|
569
|
-
|
|
572
|
+
|
|
573
|
+
# Publish to EventBus for distribution through relay
|
|
574
|
+
if self.event_bus and EVENTBUS_AVAILABLE:
|
|
575
|
+
try:
|
|
576
|
+
# Publish to EventBus with topic format: hook.{event}
|
|
577
|
+
topic = f"hook.{event}"
|
|
578
|
+
self.event_bus.publish(topic, claude_event_data)
|
|
570
579
|
if DEBUG:
|
|
571
|
-
print(
|
|
572
|
-
|
|
573
|
-
file=sys.stderr,
|
|
574
|
-
)
|
|
575
|
-
return
|
|
576
|
-
|
|
577
|
-
try:
|
|
578
|
-
# Verify connection is alive before emitting
|
|
579
|
-
if not client.connected:
|
|
580
|
+
print(f"✅ Published to EventBus: {topic}", file=sys.stderr)
|
|
581
|
+
except Exception as e:
|
|
580
582
|
if DEBUG:
|
|
581
|
-
print(
|
|
582
|
-
|
|
583
|
-
file=sys.stderr,
|
|
584
|
-
)
|
|
585
|
-
# Try to reconnect
|
|
586
|
-
try:
|
|
587
|
-
client.connect(
|
|
588
|
-
f"http://localhost:{port}",
|
|
589
|
-
wait=True,
|
|
590
|
-
wait_timeout=1.0,
|
|
591
|
-
transports=['websocket', 'polling'],
|
|
592
|
-
)
|
|
593
|
-
except:
|
|
594
|
-
# If reconnection fails, get a fresh client
|
|
595
|
-
client = self.connection_pool._create_connection(port)
|
|
596
|
-
if not client:
|
|
597
|
-
if DEBUG:
|
|
598
|
-
print(
|
|
599
|
-
f"Hook handler: Reconnection failed for event: hook.{event}",
|
|
600
|
-
file=sys.stderr,
|
|
601
|
-
)
|
|
602
|
-
return
|
|
603
|
-
|
|
604
|
-
# Create event data for normalization
|
|
605
|
-
raw_event = {
|
|
606
|
-
"type": "hook",
|
|
607
|
-
"subtype": event, # e.g., "user_prompt", "pre_tool", "subagent_stop"
|
|
608
|
-
"timestamp": datetime.now().isoformat(),
|
|
609
|
-
"data": data,
|
|
610
|
-
"source": "claude_hooks", # Identify the source
|
|
611
|
-
"session_id": data.get("sessionId"), # Include session if available
|
|
612
|
-
}
|
|
613
|
-
|
|
614
|
-
# Normalize the event using EventNormalizer for consistent schema
|
|
615
|
-
# Pass source explicitly to ensure it's set correctly
|
|
616
|
-
normalized_event = self.event_normalizer.normalize(raw_event, source="hook")
|
|
617
|
-
claude_event_data = normalized_event.to_dict()
|
|
618
|
-
|
|
619
|
-
# Log important events for debugging
|
|
620
|
-
if DEBUG and event in ["subagent_stop", "pre_tool"]:
|
|
621
|
-
if event == "subagent_stop":
|
|
622
|
-
agent_type = data.get("agent_type", "unknown")
|
|
623
|
-
print(
|
|
624
|
-
f"Hook handler: Emitting SubagentStop for agent '{agent_type}'",
|
|
625
|
-
file=sys.stderr,
|
|
626
|
-
)
|
|
627
|
-
elif event == "pre_tool" and data.get("tool_name") == "Task":
|
|
628
|
-
delegation = data.get("delegation_details", {})
|
|
629
|
-
agent_type = delegation.get("agent_type", "unknown")
|
|
630
|
-
print(
|
|
631
|
-
f"Hook handler: Emitting Task delegation to agent '{agent_type}'",
|
|
632
|
-
file=sys.stderr,
|
|
633
|
-
)
|
|
634
|
-
|
|
635
|
-
# Emit synchronously
|
|
636
|
-
client.emit("claude_event", claude_event_data)
|
|
637
|
-
|
|
638
|
-
# For critical events, wait a moment to ensure delivery
|
|
639
|
-
if event in ["subagent_stop", "pre_tool"]:
|
|
640
|
-
time.sleep(0.01) # Small delay to ensure event is sent
|
|
641
|
-
|
|
642
|
-
# Verify emission for critical events
|
|
643
|
-
if event in ["subagent_stop", "pre_tool"] and DEBUG:
|
|
644
|
-
if client.connected:
|
|
645
|
-
print(
|
|
646
|
-
f"✅ Successfully emitted Socket.IO event: hook.{event} (connection still active)",
|
|
647
|
-
file=sys.stderr,
|
|
648
|
-
)
|
|
649
|
-
else:
|
|
650
|
-
print(
|
|
651
|
-
f"⚠️ Event emitted but connection closed after: hook.{event}",
|
|
652
|
-
file=sys.stderr,
|
|
653
|
-
)
|
|
654
|
-
|
|
655
|
-
except Exception as e:
|
|
583
|
+
print(f"⚠️ Failed to publish to EventBus: {e}", file=sys.stderr)
|
|
584
|
+
else:
|
|
656
585
|
if DEBUG:
|
|
657
|
-
print(f"
|
|
658
|
-
|
|
659
|
-
# Try to reconnect immediately for critical events
|
|
660
|
-
if event in ["subagent_stop", "pre_tool"]:
|
|
661
|
-
if DEBUG:
|
|
662
|
-
print(
|
|
663
|
-
f"Hook handler: Attempting immediate reconnection for critical event: hook.{event}",
|
|
664
|
-
file=sys.stderr,
|
|
665
|
-
)
|
|
666
|
-
# Force get a new client and emit again
|
|
667
|
-
self.connection_pool._cleanup_dead_connections()
|
|
668
|
-
retry_client = self.connection_pool._create_connection(port)
|
|
669
|
-
if retry_client:
|
|
670
|
-
try:
|
|
671
|
-
retry_client.emit("claude_event", claude_event_data)
|
|
672
|
-
# Add to pool for future use
|
|
673
|
-
self.connection_pool.connections.append(
|
|
674
|
-
{"port": port, "client": retry_client, "created": time.time()}
|
|
675
|
-
)
|
|
676
|
-
if DEBUG:
|
|
677
|
-
print(
|
|
678
|
-
f"✅ Successfully re-emitted event after reconnection: hook.{event}",
|
|
679
|
-
file=sys.stderr,
|
|
680
|
-
)
|
|
681
|
-
except Exception as retry_e:
|
|
682
|
-
if DEBUG:
|
|
683
|
-
print(f"❌ Re-emission failed: {retry_e}", file=sys.stderr)
|
|
586
|
+
print(f"⚠️ EventBus not available for event: hook.{event}", file=sys.stderr)
|
|
684
587
|
|
|
685
588
|
def handle_subagent_stop(self, event: dict):
|
|
686
589
|
"""Handle subagent stop events with improved agent type detection.
|
|
@@ -1012,12 +915,9 @@ class ClaudeHookHandler:
|
|
|
1012
915
|
self._emit_socketio_event("/hook", "subagent_stop", subagent_stop_data)
|
|
1013
916
|
|
|
1014
917
|
def __del__(self):
|
|
1015
|
-
"""Cleanup
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
self.connection_pool.close_all()
|
|
1019
|
-
except:
|
|
1020
|
-
pass
|
|
918
|
+
"""Cleanup on handler destruction."""
|
|
919
|
+
# Connection pool no longer used - EventBus handles cleanup
|
|
920
|
+
pass
|
|
1021
921
|
|
|
1022
922
|
|
|
1023
923
|
def main():
|
|
@@ -198,7 +198,10 @@ class AgentDiscoveryService:
|
|
|
198
198
|
"name": metadata.get("name", template_file.stem),
|
|
199
199
|
"description": metadata.get("description", "No description available"),
|
|
200
200
|
"version": template_data.get(
|
|
201
|
-
"agent_version",
|
|
201
|
+
"agent_version",
|
|
202
|
+
template_data.get("version",
|
|
203
|
+
metadata.get("version", "1.0.0")
|
|
204
|
+
)
|
|
202
205
|
),
|
|
203
206
|
"tools": capabilities.get("tools", []),
|
|
204
207
|
"specializations": metadata.get(
|
|
@@ -136,7 +136,8 @@ class AgentTemplateBuilder:
|
|
|
136
136
|
)
|
|
137
137
|
|
|
138
138
|
# Extract custom metadata fields
|
|
139
|
-
|
|
139
|
+
metadata = template_data.get("metadata", {})
|
|
140
|
+
agent_version = template_data.get("agent_version") or template_data.get("version") or metadata.get("version", "1.0.0")
|
|
140
141
|
agent_type = template_data.get("agent_type", "general")
|
|
141
142
|
# Use the capabilities_model we already extracted earlier
|
|
142
143
|
model_type = capabilities_model or "sonnet"
|
|
@@ -254,8 +254,11 @@ class AgentVersionManager:
|
|
|
254
254
|
template_data = json.loads(template_file.read_text())
|
|
255
255
|
|
|
256
256
|
# Extract agent version from template
|
|
257
|
+
metadata = template_data.get("metadata", {})
|
|
257
258
|
current_agent_version = self.parse_version(
|
|
258
|
-
template_data.get("agent_version") or
|
|
259
|
+
template_data.get("agent_version") or
|
|
260
|
+
template_data.get("version") or
|
|
261
|
+
metadata.get("version", 0)
|
|
259
262
|
)
|
|
260
263
|
|
|
261
264
|
# If old format detected, always trigger update for migration
|
|
@@ -507,6 +507,7 @@ class MultiSourceAgentDeploymentService:
|
|
|
507
507
|
"needs_update": [],
|
|
508
508
|
"up_to_date": [],
|
|
509
509
|
"new_agents": [],
|
|
510
|
+
"orphaned_agents": [], # Agents without templates
|
|
510
511
|
"version_upgrades": [],
|
|
511
512
|
"version_downgrades": [],
|
|
512
513
|
"source_changes": []
|
|
@@ -527,9 +528,11 @@ class MultiSourceAgentDeploymentService:
|
|
|
527
528
|
# Read template version
|
|
528
529
|
try:
|
|
529
530
|
template_data = json.loads(template_path.read_text())
|
|
531
|
+
metadata = template_data.get("metadata", {})
|
|
530
532
|
template_version = self.version_manager.parse_version(
|
|
531
533
|
template_data.get("agent_version") or
|
|
532
|
-
template_data.get("version"
|
|
534
|
+
template_data.get("version") or
|
|
535
|
+
metadata.get("version", "0.0.0")
|
|
533
536
|
)
|
|
534
537
|
except Exception as e:
|
|
535
538
|
self.logger.warning(f"Error reading template for '{agent_name}': {e}")
|
|
@@ -597,13 +600,20 @@ class MultiSourceAgentDeploymentService:
|
|
|
597
600
|
"source": agent_sources[agent_name]
|
|
598
601
|
})
|
|
599
602
|
|
|
603
|
+
# Check for orphaned agents (deployed but no template)
|
|
604
|
+
orphaned = self._detect_orphaned_agents_simple(deployed_agents_dir, agents_to_deploy)
|
|
605
|
+
comparison_results["orphaned_agents"] = orphaned
|
|
606
|
+
|
|
600
607
|
# Log summary
|
|
601
|
-
|
|
602
|
-
f"
|
|
603
|
-
f"{len(comparison_results['
|
|
604
|
-
f"{len(comparison_results['up_to_date'])} up to date, "
|
|
608
|
+
summary_parts = [
|
|
609
|
+
f"{len(comparison_results['needs_update'])} need updates",
|
|
610
|
+
f"{len(comparison_results['up_to_date'])} up to date",
|
|
605
611
|
f"{len(comparison_results['new_agents'])} new agents"
|
|
606
|
-
|
|
612
|
+
]
|
|
613
|
+
if comparison_results["orphaned_agents"]:
|
|
614
|
+
summary_parts.append(f"{len(comparison_results['orphaned_agents'])} orphaned")
|
|
615
|
+
|
|
616
|
+
self.logger.info(f"Version comparison complete: {', '.join(summary_parts)}")
|
|
607
617
|
|
|
608
618
|
if comparison_results["version_upgrades"]:
|
|
609
619
|
for upgrade in comparison_results["version_upgrades"]:
|
|
@@ -622,10 +632,24 @@ class MultiSourceAgentDeploymentService:
|
|
|
622
632
|
|
|
623
633
|
if comparison_results["version_downgrades"]:
|
|
624
634
|
for downgrade in comparison_results["version_downgrades"]:
|
|
625
|
-
|
|
626
|
-
|
|
635
|
+
# Changed from warning to debug - deployed versions higher than templates
|
|
636
|
+
# are not errors, just informational
|
|
637
|
+
self.logger.debug(
|
|
638
|
+
f" Note: {downgrade['name']} deployed version "
|
|
627
639
|
f"{downgrade['deployed_version']} is higher than template "
|
|
628
|
-
f"{downgrade['template_version']}"
|
|
640
|
+
f"{downgrade['template_version']} (keeping deployed version)"
|
|
641
|
+
)
|
|
642
|
+
|
|
643
|
+
# Log orphaned agents if found
|
|
644
|
+
if comparison_results["orphaned_agents"]:
|
|
645
|
+
self.logger.info(
|
|
646
|
+
f"Found {len(comparison_results['orphaned_agents'])} orphaned agent(s) "
|
|
647
|
+
f"(deployed without templates):"
|
|
648
|
+
)
|
|
649
|
+
for orphan in comparison_results["orphaned_agents"]:
|
|
650
|
+
self.logger.info(
|
|
651
|
+
f" - {orphan['name']} v{orphan['version']} "
|
|
652
|
+
f"(consider removing or creating a template)"
|
|
629
653
|
)
|
|
630
654
|
|
|
631
655
|
return comparison_results
|
|
@@ -692,4 +716,177 @@ class MultiSourceAgentDeploymentService:
|
|
|
692
716
|
return "system"
|
|
693
717
|
|
|
694
718
|
# Complex names are more likely to be user/project agents
|
|
695
|
-
return "user"
|
|
719
|
+
return "user"
|
|
720
|
+
|
|
721
|
+
def detect_orphaned_agents(
|
|
722
|
+
self,
|
|
723
|
+
deployed_agents_dir: Path,
|
|
724
|
+
available_agents: Dict[str, Any]
|
|
725
|
+
) -> List[Dict[str, Any]]:
|
|
726
|
+
"""Detect deployed agents that don't have corresponding templates.
|
|
727
|
+
|
|
728
|
+
WHY: Orphaned agents can cause confusion with version warnings.
|
|
729
|
+
This method identifies them so they can be handled appropriately.
|
|
730
|
+
|
|
731
|
+
Args:
|
|
732
|
+
deployed_agents_dir: Directory containing deployed agents
|
|
733
|
+
available_agents: Dictionary of available agents from all sources
|
|
734
|
+
|
|
735
|
+
Returns:
|
|
736
|
+
List of orphaned agent information
|
|
737
|
+
"""
|
|
738
|
+
orphaned = []
|
|
739
|
+
|
|
740
|
+
if not deployed_agents_dir.exists():
|
|
741
|
+
return orphaned
|
|
742
|
+
|
|
743
|
+
# Build a mapping of file stems to agent names for comparison
|
|
744
|
+
# Since available_agents uses display names like "Code Analysis Agent"
|
|
745
|
+
# but deployed files use stems like "code_analyzer"
|
|
746
|
+
available_stems = set()
|
|
747
|
+
stem_to_name = {}
|
|
748
|
+
|
|
749
|
+
for agent_name, agent_sources in available_agents.items():
|
|
750
|
+
# Get the file path from the first source to extract the stem
|
|
751
|
+
if agent_sources and isinstance(agent_sources, list) and len(agent_sources) > 0:
|
|
752
|
+
first_source = agent_sources[0]
|
|
753
|
+
if 'file_path' in first_source:
|
|
754
|
+
file_path = Path(first_source['file_path'])
|
|
755
|
+
stem = file_path.stem
|
|
756
|
+
available_stems.add(stem)
|
|
757
|
+
stem_to_name[stem] = agent_name
|
|
758
|
+
|
|
759
|
+
for deployed_file in deployed_agents_dir.glob("*.md"):
|
|
760
|
+
agent_stem = deployed_file.stem
|
|
761
|
+
|
|
762
|
+
# Skip if this agent has a template (check by stem, not display name)
|
|
763
|
+
if agent_stem in available_stems:
|
|
764
|
+
continue
|
|
765
|
+
|
|
766
|
+
# This is an orphaned agent
|
|
767
|
+
try:
|
|
768
|
+
deployed_content = deployed_file.read_text()
|
|
769
|
+
deployed_version, _, _ = self.version_manager.extract_version_from_frontmatter(
|
|
770
|
+
deployed_content
|
|
771
|
+
)
|
|
772
|
+
version_str = self.version_manager.format_version_display(deployed_version)
|
|
773
|
+
except Exception:
|
|
774
|
+
version_str = "unknown"
|
|
775
|
+
|
|
776
|
+
orphaned.append({
|
|
777
|
+
"name": agent_stem,
|
|
778
|
+
"file": str(deployed_file),
|
|
779
|
+
"version": version_str
|
|
780
|
+
})
|
|
781
|
+
|
|
782
|
+
return orphaned
|
|
783
|
+
|
|
784
|
+
def _detect_orphaned_agents_simple(
|
|
785
|
+
self,
|
|
786
|
+
deployed_agents_dir: Path,
|
|
787
|
+
agents_to_deploy: Dict[str, Path]
|
|
788
|
+
) -> List[Dict[str, Any]]:
|
|
789
|
+
"""Simple orphan detection that works with agents_to_deploy structure.
|
|
790
|
+
|
|
791
|
+
Args:
|
|
792
|
+
deployed_agents_dir: Directory containing deployed agents
|
|
793
|
+
agents_to_deploy: Dictionary mapping file stems to template paths
|
|
794
|
+
|
|
795
|
+
Returns:
|
|
796
|
+
List of orphaned agent information
|
|
797
|
+
"""
|
|
798
|
+
orphaned = []
|
|
799
|
+
|
|
800
|
+
if not deployed_agents_dir.exists():
|
|
801
|
+
return orphaned
|
|
802
|
+
|
|
803
|
+
# agents_to_deploy already contains file stems as keys
|
|
804
|
+
available_stems = set(agents_to_deploy.keys())
|
|
805
|
+
|
|
806
|
+
for deployed_file in deployed_agents_dir.glob("*.md"):
|
|
807
|
+
agent_stem = deployed_file.stem
|
|
808
|
+
|
|
809
|
+
# Skip if this agent has a template (check by stem)
|
|
810
|
+
if agent_stem in available_stems:
|
|
811
|
+
continue
|
|
812
|
+
|
|
813
|
+
# This is an orphaned agent
|
|
814
|
+
try:
|
|
815
|
+
deployed_content = deployed_file.read_text()
|
|
816
|
+
deployed_version, _, _ = self.version_manager.extract_version_from_frontmatter(
|
|
817
|
+
deployed_content
|
|
818
|
+
)
|
|
819
|
+
version_str = self.version_manager.format_version_display(deployed_version)
|
|
820
|
+
except Exception:
|
|
821
|
+
version_str = "unknown"
|
|
822
|
+
|
|
823
|
+
orphaned.append({
|
|
824
|
+
"name": agent_stem,
|
|
825
|
+
"file": str(deployed_file),
|
|
826
|
+
"version": version_str
|
|
827
|
+
})
|
|
828
|
+
|
|
829
|
+
return orphaned
|
|
830
|
+
|
|
831
|
+
def cleanup_orphaned_agents(
|
|
832
|
+
self,
|
|
833
|
+
deployed_agents_dir: Path,
|
|
834
|
+
dry_run: bool = True
|
|
835
|
+
) -> Dict[str, Any]:
|
|
836
|
+
"""Clean up orphaned agents that don't have templates.
|
|
837
|
+
|
|
838
|
+
WHY: Orphaned agents can accumulate over time and cause confusion.
|
|
839
|
+
This method provides a way to clean them up systematically.
|
|
840
|
+
|
|
841
|
+
Args:
|
|
842
|
+
deployed_agents_dir: Directory containing deployed agents
|
|
843
|
+
dry_run: If True, only report what would be removed
|
|
844
|
+
|
|
845
|
+
Returns:
|
|
846
|
+
Dictionary with cleanup results
|
|
847
|
+
"""
|
|
848
|
+
results = {
|
|
849
|
+
"orphaned": [],
|
|
850
|
+
"removed": [],
|
|
851
|
+
"errors": []
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
# First, discover all available agents from all sources
|
|
855
|
+
all_agents = self.discover_agents_from_all_sources()
|
|
856
|
+
available_names = set(all_agents.keys())
|
|
857
|
+
|
|
858
|
+
# Detect orphaned agents
|
|
859
|
+
orphaned = self.detect_orphaned_agents(deployed_agents_dir, all_agents)
|
|
860
|
+
results["orphaned"] = orphaned
|
|
861
|
+
|
|
862
|
+
if not orphaned:
|
|
863
|
+
self.logger.info("No orphaned agents found")
|
|
864
|
+
return results
|
|
865
|
+
|
|
866
|
+
self.logger.info(f"Found {len(orphaned)} orphaned agent(s)")
|
|
867
|
+
|
|
868
|
+
for orphan in orphaned:
|
|
869
|
+
agent_file = Path(orphan["file"])
|
|
870
|
+
|
|
871
|
+
if dry_run:
|
|
872
|
+
self.logger.info(
|
|
873
|
+
f" Would remove: {orphan['name']} v{orphan['version']}"
|
|
874
|
+
)
|
|
875
|
+
else:
|
|
876
|
+
try:
|
|
877
|
+
agent_file.unlink()
|
|
878
|
+
results["removed"].append(orphan["name"])
|
|
879
|
+
self.logger.info(
|
|
880
|
+
f" Removed: {orphan['name']} v{orphan['version']}"
|
|
881
|
+
)
|
|
882
|
+
except Exception as e:
|
|
883
|
+
error_msg = f"Failed to remove {orphan['name']}: {e}"
|
|
884
|
+
results["errors"].append(error_msg)
|
|
885
|
+
self.logger.error(f" {error_msg}")
|
|
886
|
+
|
|
887
|
+
if dry_run and orphaned:
|
|
888
|
+
self.logger.info(
|
|
889
|
+
"Run with dry_run=False to actually remove orphaned agents"
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
return results
|