claude-mpm 4.1.7__py3-none-any.whl → 4.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/OUTPUT_STYLE.md +73 -0
- claude_mpm/agents/templates/agent-manager.json +1 -1
- claude_mpm/agents/templates/agent-manager.md +248 -10
- claude_mpm/cli/commands/configure.py +151 -2
- claude_mpm/cli/commands/configure_tui.py +5 -1
- claude_mpm/cli/parsers/configure_parser.py +23 -0
- claude_mpm/config/socketio_config.py +21 -21
- claude_mpm/hooks/claude_hooks/installer.py +455 -0
- claude_mpm/services/agents/deployment/agent_config_provider.py +127 -27
- claude_mpm/services/diagnostics/checks/instructions_check.py +1 -3
- claude_mpm/services/event_bus/direct_relay.py +98 -20
- claude_mpm/services/socketio/server/connection_manager.py +91 -61
- claude_mpm/services/socketio/server/core.py +25 -6
- claude_mpm/services/socketio/server/main.py +36 -3
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.8.dist-info}/METADATA +1 -1
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.8.dist-info}/RECORD +21 -19
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.8.dist-info}/WHEEL +0 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.8.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.8.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.1.7.dist-info → claude_mpm-4.1.8.dist-info}/top_level.txt +0 -0
|
@@ -90,8 +90,19 @@ class AgentConfigProvider:
|
|
|
90
90
|
"engineer": {
|
|
91
91
|
**base_config,
|
|
92
92
|
"description": "Code implementation, development, and inline documentation",
|
|
93
|
-
"tags":
|
|
94
|
-
"tools":
|
|
93
|
+
"tags": ["engineer", "development", "coding", "implementation"],
|
|
94
|
+
"tools": [
|
|
95
|
+
"Read",
|
|
96
|
+
"Write",
|
|
97
|
+
"Edit",
|
|
98
|
+
"MultiEdit",
|
|
99
|
+
"Bash",
|
|
100
|
+
"Grep",
|
|
101
|
+
"Glob",
|
|
102
|
+
"LS",
|
|
103
|
+
"WebSearch",
|
|
104
|
+
"TodoWrite",
|
|
105
|
+
],
|
|
95
106
|
"temperature": 0.2,
|
|
96
107
|
"when_to_use": [
|
|
97
108
|
"Code implementation needed",
|
|
@@ -109,14 +120,28 @@ class AgentConfigProvider:
|
|
|
109
120
|
"Refactor codebases",
|
|
110
121
|
],
|
|
111
122
|
"primary_role": "Code implementation and development",
|
|
112
|
-
"specializations":
|
|
123
|
+
"specializations": [
|
|
124
|
+
"coding",
|
|
125
|
+
"debugging",
|
|
126
|
+
"refactoring",
|
|
127
|
+
"optimization",
|
|
128
|
+
],
|
|
113
129
|
"authority": "ALL code implementation decisions",
|
|
114
130
|
},
|
|
115
131
|
"qa": {
|
|
116
132
|
**base_config,
|
|
117
133
|
"description": "Quality assurance, testing, and validation",
|
|
118
|
-
"tags":
|
|
119
|
-
"tools":
|
|
134
|
+
"tags": ["qa", "testing", "quality", "validation"],
|
|
135
|
+
"tools": [
|
|
136
|
+
"Read",
|
|
137
|
+
"Write",
|
|
138
|
+
"Edit",
|
|
139
|
+
"Bash",
|
|
140
|
+
"Grep",
|
|
141
|
+
"Glob",
|
|
142
|
+
"LS",
|
|
143
|
+
"TodoWrite",
|
|
144
|
+
],
|
|
120
145
|
"temperature": 0.1,
|
|
121
146
|
"when_to_use": [
|
|
122
147
|
"Testing needed",
|
|
@@ -134,14 +159,29 @@ class AgentConfigProvider:
|
|
|
134
159
|
"Validate quality",
|
|
135
160
|
],
|
|
136
161
|
"primary_role": "Testing and quality assurance",
|
|
137
|
-
"specializations":
|
|
162
|
+
"specializations": [
|
|
163
|
+
"testing",
|
|
164
|
+
"validation",
|
|
165
|
+
"quality-assurance",
|
|
166
|
+
"coverage",
|
|
167
|
+
],
|
|
138
168
|
"authority": "ALL testing and quality decisions",
|
|
139
169
|
},
|
|
140
170
|
"documentation": {
|
|
141
171
|
**base_config,
|
|
142
172
|
"description": "Documentation creation, maintenance, and changelog generation",
|
|
143
|
-
"tags":
|
|
144
|
-
"tools":
|
|
173
|
+
"tags": ["documentation", "writing", "changelog", "docs"],
|
|
174
|
+
"tools": [
|
|
175
|
+
"Read",
|
|
176
|
+
"Write",
|
|
177
|
+
"Edit",
|
|
178
|
+
"MultiEdit",
|
|
179
|
+
"Grep",
|
|
180
|
+
"Glob",
|
|
181
|
+
"LS",
|
|
182
|
+
"WebSearch",
|
|
183
|
+
"TodoWrite",
|
|
184
|
+
],
|
|
145
185
|
"temperature": 0.3,
|
|
146
186
|
"when_to_use": [
|
|
147
187
|
"Documentation updates needed",
|
|
@@ -159,14 +199,27 @@ class AgentConfigProvider:
|
|
|
159
199
|
"Maintain docs",
|
|
160
200
|
],
|
|
161
201
|
"primary_role": "Documentation and technical writing",
|
|
162
|
-
"specializations":
|
|
202
|
+
"specializations": [
|
|
203
|
+
"technical-writing",
|
|
204
|
+
"changelog",
|
|
205
|
+
"api-docs",
|
|
206
|
+
"guides",
|
|
207
|
+
],
|
|
163
208
|
"authority": "ALL documentation decisions",
|
|
164
209
|
},
|
|
165
210
|
"research": {
|
|
166
211
|
**base_config,
|
|
167
212
|
"description": "Technical research, analysis, and investigation",
|
|
168
|
-
"tags":
|
|
169
|
-
"tools":
|
|
213
|
+
"tags": ["research", "analysis", "investigation", "evaluation"],
|
|
214
|
+
"tools": [
|
|
215
|
+
"Read",
|
|
216
|
+
"Grep",
|
|
217
|
+
"Glob",
|
|
218
|
+
"LS",
|
|
219
|
+
"WebSearch",
|
|
220
|
+
"WebFetch",
|
|
221
|
+
"TodoWrite",
|
|
222
|
+
],
|
|
170
223
|
"temperature": 0.4,
|
|
171
224
|
"when_to_use": [
|
|
172
225
|
"Technical research needed",
|
|
@@ -184,14 +237,27 @@ class AgentConfigProvider:
|
|
|
184
237
|
"Evidence-based recommendations",
|
|
185
238
|
],
|
|
186
239
|
"primary_role": "Research and technical analysis",
|
|
187
|
-
"specializations":
|
|
240
|
+
"specializations": [
|
|
241
|
+
"investigation",
|
|
242
|
+
"analysis",
|
|
243
|
+
"evaluation",
|
|
244
|
+
"recommendations",
|
|
245
|
+
],
|
|
188
246
|
"authority": "ALL research decisions",
|
|
189
247
|
},
|
|
190
248
|
"security": {
|
|
191
249
|
**base_config,
|
|
192
250
|
"description": "Security analysis, vulnerability assessment, and protection",
|
|
193
|
-
"tags":
|
|
194
|
-
"tools":
|
|
251
|
+
"tags": ["security", "vulnerability", "protection", "audit"],
|
|
252
|
+
"tools": [
|
|
253
|
+
"Read",
|
|
254
|
+
"Grep",
|
|
255
|
+
"Glob",
|
|
256
|
+
"LS",
|
|
257
|
+
"Bash",
|
|
258
|
+
"WebSearch",
|
|
259
|
+
"TodoWrite",
|
|
260
|
+
],
|
|
195
261
|
"temperature": 0.1,
|
|
196
262
|
"when_to_use": [
|
|
197
263
|
"Security review needed",
|
|
@@ -209,14 +275,28 @@ class AgentConfigProvider:
|
|
|
209
275
|
"Threat modeling",
|
|
210
276
|
],
|
|
211
277
|
"primary_role": "Security analysis and protection",
|
|
212
|
-
"specializations":
|
|
278
|
+
"specializations": [
|
|
279
|
+
"vulnerability-assessment",
|
|
280
|
+
"security-audit",
|
|
281
|
+
"threat-modeling",
|
|
282
|
+
"protection",
|
|
283
|
+
],
|
|
213
284
|
"authority": "ALL security decisions",
|
|
214
285
|
},
|
|
215
286
|
"ops": {
|
|
216
287
|
**base_config,
|
|
217
288
|
"description": "Deployment, operations, and infrastructure management",
|
|
218
|
-
"tags":
|
|
219
|
-
"tools":
|
|
289
|
+
"tags": ["ops", "deployment", "infrastructure", "devops"],
|
|
290
|
+
"tools": [
|
|
291
|
+
"Read",
|
|
292
|
+
"Write",
|
|
293
|
+
"Edit",
|
|
294
|
+
"Bash",
|
|
295
|
+
"Grep",
|
|
296
|
+
"Glob",
|
|
297
|
+
"LS",
|
|
298
|
+
"TodoWrite",
|
|
299
|
+
],
|
|
220
300
|
"temperature": 0.2,
|
|
221
301
|
"when_to_use": [
|
|
222
302
|
"Deployment configuration",
|
|
@@ -234,14 +314,29 @@ class AgentConfigProvider:
|
|
|
234
314
|
"Automate operations",
|
|
235
315
|
],
|
|
236
316
|
"primary_role": "Operations and deployment management",
|
|
237
|
-
"specializations":
|
|
317
|
+
"specializations": [
|
|
318
|
+
"deployment",
|
|
319
|
+
"infrastructure",
|
|
320
|
+
"automation",
|
|
321
|
+
"monitoring",
|
|
322
|
+
],
|
|
238
323
|
"authority": "ALL operations decisions",
|
|
239
324
|
},
|
|
240
325
|
"data_engineer": {
|
|
241
326
|
**base_config,
|
|
242
327
|
"description": "Data pipeline management and AI API integrations",
|
|
243
|
-
"tags":
|
|
244
|
-
"tools":
|
|
328
|
+
"tags": ["data", "pipeline", "etl", "ai-integration"],
|
|
329
|
+
"tools": [
|
|
330
|
+
"Read",
|
|
331
|
+
"Write",
|
|
332
|
+
"Edit",
|
|
333
|
+
"Bash",
|
|
334
|
+
"Grep",
|
|
335
|
+
"Glob",
|
|
336
|
+
"LS",
|
|
337
|
+
"WebSearch",
|
|
338
|
+
"TodoWrite",
|
|
339
|
+
],
|
|
245
340
|
"temperature": 0.2,
|
|
246
341
|
"when_to_use": [
|
|
247
342
|
"Data pipeline setup",
|
|
@@ -259,14 +354,19 @@ class AgentConfigProvider:
|
|
|
259
354
|
"Integrate AI services",
|
|
260
355
|
],
|
|
261
356
|
"primary_role": "Data engineering and AI integration",
|
|
262
|
-
"specializations":
|
|
357
|
+
"specializations": [
|
|
358
|
+
"data-pipelines",
|
|
359
|
+
"etl",
|
|
360
|
+
"database",
|
|
361
|
+
"ai-integration",
|
|
362
|
+
],
|
|
263
363
|
"authority": "ALL data engineering decisions",
|
|
264
364
|
},
|
|
265
365
|
"version_control": {
|
|
266
366
|
**base_config,
|
|
267
367
|
"description": "Git operations, version management, and release coordination",
|
|
268
|
-
"tags":
|
|
269
|
-
"tools":
|
|
368
|
+
"tags": ["git", "version-control", "release", "branching"],
|
|
369
|
+
"tools": ["Read", "Bash", "Grep", "Glob", "LS", "TodoWrite"],
|
|
270
370
|
"temperature": 0.1,
|
|
271
371
|
"network_access": False, # Git operations are local
|
|
272
372
|
"when_to_use": [
|
|
@@ -285,7 +385,7 @@ class AgentConfigProvider:
|
|
|
285
385
|
"Release coordination",
|
|
286
386
|
],
|
|
287
387
|
"primary_role": "Version control and release management",
|
|
288
|
-
"specializations":
|
|
388
|
+
"specializations": ["git", "versioning", "branching", "releases"],
|
|
289
389
|
"authority": "ALL version control decisions",
|
|
290
390
|
},
|
|
291
391
|
}
|
|
@@ -298,13 +398,13 @@ class AgentConfigProvider:
|
|
|
298
398
|
return {
|
|
299
399
|
**base_config,
|
|
300
400
|
"description": f"{agent_name.title()} agent for specialized tasks",
|
|
301
|
-
"tags":
|
|
302
|
-
"tools":
|
|
401
|
+
"tags": [agent_name, "specialized", "mpm"],
|
|
402
|
+
"tools": ["Read", "Write", "Edit", "Grep", "Glob", "LS", "TodoWrite"],
|
|
303
403
|
"temperature": 0.3,
|
|
304
404
|
"when_to_use": [f"When {agent_name} expertise is needed"],
|
|
305
405
|
"specialized_knowledge": [f"{agent_name.title()} domain knowledge"],
|
|
306
406
|
"unique_capabilities": [f"{agent_name.title()} specialized operations"],
|
|
307
407
|
"primary_role": f"{agent_name.title()} operations",
|
|
308
|
-
"specializations":
|
|
408
|
+
"specializations": [agent_name],
|
|
309
409
|
"authority": f"ALL {agent_name} decisions",
|
|
310
410
|
}
|
|
@@ -374,9 +374,7 @@ class InstructionsCheck(BaseDiagnosticCheck):
|
|
|
374
374
|
continue
|
|
375
375
|
|
|
376
376
|
# Check for Claude Code specific content in INSTRUCTIONS.md
|
|
377
|
-
instructions_files = [
|
|
378
|
-
path for path in files if path.name == "INSTRUCTIONS.md"
|
|
379
|
-
]
|
|
377
|
+
instructions_files = [path for path in files if path.name == "INSTRUCTIONS.md"]
|
|
380
378
|
for path in instructions_files:
|
|
381
379
|
try:
|
|
382
380
|
content = path.read_text(encoding="utf-8")
|
|
@@ -2,6 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
This module provides a relay that connects EventBus directly to the
|
|
4
4
|
Socket.IO server's broadcaster, avoiding the client loopback issue.
|
|
5
|
+
|
|
6
|
+
IMPORTANT - Claude Event Format:
|
|
7
|
+
Claude sends hook events with these REQUIRED fields:
|
|
8
|
+
- hook_event_name: The event type (UserPromptSubmit, PreToolUse, PostToolUse, etc.)
|
|
9
|
+
- hook_event_type: Usually same as hook_event_name
|
|
10
|
+
- hook_input_data: Contains the actual event data
|
|
11
|
+
- sessionId: Session identifier
|
|
12
|
+
- timestamp: ISO format timestamp
|
|
13
|
+
|
|
14
|
+
DO NOT use "event" or "type" fields - use "hook_event_name" instead!
|
|
5
15
|
"""
|
|
6
16
|
|
|
7
17
|
import logging
|
|
@@ -37,9 +47,12 @@ class DirectSocketIORelay:
|
|
|
37
47
|
"last_relay_time": None,
|
|
38
48
|
}
|
|
39
49
|
self.debug = logger.isEnabledFor(logging.DEBUG)
|
|
50
|
+
self.connection_retries = 0
|
|
51
|
+
self.max_retries = 10
|
|
52
|
+
self.retry_delay = 1.0 # Start with 1 second
|
|
40
53
|
|
|
41
54
|
def start(self) -> None:
|
|
42
|
-
"""Start the relay by subscribing to EventBus events."""
|
|
55
|
+
"""Start the relay by subscribing to EventBus events with retry logic."""
|
|
43
56
|
if not self.enabled:
|
|
44
57
|
logger.warning("DirectSocketIORelay is disabled")
|
|
45
58
|
return
|
|
@@ -60,12 +73,9 @@ class DirectSocketIORelay:
|
|
|
60
73
|
# Add debug logging for verification
|
|
61
74
|
logger.info("[DirectRelay] Subscribed to hook.* events on EventBus")
|
|
62
75
|
|
|
63
|
-
# Check and log broadcaster availability
|
|
64
|
-
broadcaster_available = (
|
|
65
|
-
|
|
66
|
-
and hasattr(self.server, "broadcaster")
|
|
67
|
-
and self.server.broadcaster is not None
|
|
68
|
-
)
|
|
76
|
+
# Check and log broadcaster availability with retry logic
|
|
77
|
+
broadcaster_available = self._check_broadcaster_with_retry()
|
|
78
|
+
|
|
69
79
|
logger.info(
|
|
70
80
|
f"[DirectRelay] Server broadcaster available: {broadcaster_available}"
|
|
71
81
|
)
|
|
@@ -80,14 +90,49 @@ class DirectSocketIORelay:
|
|
|
80
90
|
)
|
|
81
91
|
else:
|
|
82
92
|
logger.warning(
|
|
83
|
-
"[DirectRelay] Server broadcaster is None - events will not be relayed!"
|
|
93
|
+
"[DirectRelay] Server broadcaster is None after retries - events will not be relayed!"
|
|
84
94
|
)
|
|
85
95
|
|
|
86
96
|
logger.info(f"[DirectRelay] EventBus instance: {self.event_bus is not None}")
|
|
87
97
|
|
|
88
98
|
# Mark as connected after successful subscription
|
|
89
|
-
self.connected =
|
|
90
|
-
logger.info("[DirectRelay] Started
|
|
99
|
+
self.connected = broadcaster_available
|
|
100
|
+
logger.info(f"[DirectRelay] Started with connection status: {self.connected}")
|
|
101
|
+
|
|
102
|
+
def _check_broadcaster_with_retry(self) -> bool:
|
|
103
|
+
"""Check broadcaster availability with exponential backoff retry.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
True if broadcaster is available, False after max retries
|
|
107
|
+
"""
|
|
108
|
+
import time
|
|
109
|
+
|
|
110
|
+
retry_delay = self.retry_delay
|
|
111
|
+
|
|
112
|
+
for attempt in range(self.max_retries):
|
|
113
|
+
broadcaster_available = (
|
|
114
|
+
self.server
|
|
115
|
+
and hasattr(self.server, "broadcaster")
|
|
116
|
+
and self.server.broadcaster is not None
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if broadcaster_available:
|
|
120
|
+
self.connection_retries = 0 # Reset counter on success
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
if attempt < self.max_retries - 1:
|
|
124
|
+
logger.info(
|
|
125
|
+
f"[DirectRelay] Broadcaster not ready, retry {attempt + 1}/{self.max_retries} "
|
|
126
|
+
f"in {retry_delay:.1f}s"
|
|
127
|
+
)
|
|
128
|
+
time.sleep(retry_delay)
|
|
129
|
+
retry_delay = min(retry_delay * 2, 30.0) # Exponential backoff, max 30s
|
|
130
|
+
else:
|
|
131
|
+
logger.error(
|
|
132
|
+
f"[DirectRelay] Broadcaster not available after {self.max_retries} attempts"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
return False
|
|
91
136
|
|
|
92
137
|
def _handle_hook_event(self, event_type: str, data: Any):
|
|
93
138
|
"""Internal method to handle hook events and broadcast them.
|
|
@@ -173,15 +218,48 @@ class DirectSocketIORelay:
|
|
|
173
218
|
|
|
174
219
|
# Use the full event_type (e.g., "hook.pre_tool") as the event name
|
|
175
220
|
# The normalizer handles dotted names and will extract type and subtype correctly
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
221
|
+
try:
|
|
222
|
+
self.server.broadcaster.broadcast_event(
|
|
223
|
+
event_type, broadcast_data
|
|
224
|
+
)
|
|
225
|
+
self.stats["events_relayed"] += 1
|
|
226
|
+
self.stats["last_relay_time"] = datetime.now().isoformat()
|
|
227
|
+
|
|
228
|
+
# Reset retry counter on successful broadcast
|
|
229
|
+
if self.connection_retries > 0:
|
|
230
|
+
self.connection_retries = 0
|
|
231
|
+
self.connected = True
|
|
232
|
+
logger.info("[DirectRelay] Connection restored")
|
|
233
|
+
|
|
234
|
+
if self.debug:
|
|
235
|
+
logger.debug(
|
|
236
|
+
f"[DirectRelay] Broadcasted hook event: {event_type}"
|
|
237
|
+
)
|
|
238
|
+
except Exception as broadcast_error:
|
|
239
|
+
logger.error(
|
|
240
|
+
f"[DirectRelay] Broadcast failed for {event_type}: {broadcast_error}"
|
|
184
241
|
)
|
|
242
|
+
self.stats["events_failed"] += 1
|
|
243
|
+
|
|
244
|
+
# Try to reconnect if broadcast fails
|
|
245
|
+
if self.connection_retries < self.max_retries:
|
|
246
|
+
self.connection_retries += 1
|
|
247
|
+
self.connected = self._check_broadcaster_with_retry()
|
|
248
|
+
if self.connected:
|
|
249
|
+
# Retry the broadcast
|
|
250
|
+
try:
|
|
251
|
+
self.server.broadcaster.broadcast_event(
|
|
252
|
+
event_type, broadcast_data
|
|
253
|
+
)
|
|
254
|
+
self.stats["events_relayed"] += 1
|
|
255
|
+
self.stats[
|
|
256
|
+
"events_failed"
|
|
257
|
+
] -= 1 # Undo the failure count
|
|
258
|
+
logger.info(
|
|
259
|
+
f"[DirectRelay] Retry successful for {event_type}"
|
|
260
|
+
)
|
|
261
|
+
except:
|
|
262
|
+
pass # Already counted as failed
|
|
185
263
|
else:
|
|
186
264
|
# Enhanced logging when broadcaster is not available
|
|
187
265
|
logger.warning(
|
|
@@ -189,7 +267,7 @@ class DirectSocketIORelay:
|
|
|
189
267
|
)
|
|
190
268
|
if self.server:
|
|
191
269
|
logger.warning(
|
|
192
|
-
|
|
270
|
+
"[DirectRelay] Server exists but broadcaster is None"
|
|
193
271
|
)
|
|
194
272
|
logger.warning(
|
|
195
273
|
f"[DirectRelay] Server type: {type(self.server).__name__}"
|
|
@@ -202,7 +280,7 @@ class DirectSocketIORelay:
|
|
|
202
280
|
f"[DirectRelay] Broadcaster value: {self.server.broadcaster}"
|
|
203
281
|
)
|
|
204
282
|
else:
|
|
205
|
-
logger.warning(
|
|
283
|
+
logger.warning("[DirectRelay] Server is None")
|
|
206
284
|
self.stats["events_failed"] += 1
|
|
207
285
|
|
|
208
286
|
except Exception as e:
|