claude-mpm 4.1.6__py3-none-any.whl → 4.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/OUTPUT_STYLE.md +73 -0
- claude_mpm/agents/templates/agent-manager.json +1 -1
- claude_mpm/agents/templates/agent-manager.md +349 -34
- claude_mpm/cli/commands/configure.py +151 -2
- claude_mpm/cli/commands/configure_tui.py +5 -1
- claude_mpm/cli/parsers/configure_parser.py +23 -0
- claude_mpm/config/socketio_config.py +33 -4
- claude_mpm/dashboard/static/js/socket-client.js +40 -16
- claude_mpm/hooks/claude_hooks/installer.py +455 -0
- claude_mpm/hooks/claude_hooks/services/connection_manager.py +17 -0
- claude_mpm/services/agents/deployment/agent_config_provider.py +127 -27
- claude_mpm/services/diagnostics/checks/instructions_check.py +1 -3
- claude_mpm/services/event_bus/direct_relay.py +146 -11
- claude_mpm/services/socketio/handlers/connection_handler.py +3 -18
- claude_mpm/services/socketio/server/connection_manager.py +124 -63
- claude_mpm/services/socketio/server/core.py +34 -7
- claude_mpm/services/socketio/server/main.py +83 -21
- {claude_mpm-4.1.6.dist-info → claude_mpm-4.1.8.dist-info}/METADATA +1 -1
- {claude_mpm-4.1.6.dist-info → claude_mpm-4.1.8.dist-info}/RECORD +24 -22
- {claude_mpm-4.1.6.dist-info → claude_mpm-4.1.8.dist-info}/WHEEL +0 -0
- {claude_mpm-4.1.6.dist-info → claude_mpm-4.1.8.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.1.6.dist-info → claude_mpm-4.1.8.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.1.6.dist-info → claude_mpm-4.1.8.dist-info}/top_level.txt +0 -0
|
@@ -90,8 +90,19 @@ class AgentConfigProvider:
|
|
|
90
90
|
"engineer": {
|
|
91
91
|
**base_config,
|
|
92
92
|
"description": "Code implementation, development, and inline documentation",
|
|
93
|
-
"tags":
|
|
94
|
-
"tools":
|
|
93
|
+
"tags": ["engineer", "development", "coding", "implementation"],
|
|
94
|
+
"tools": [
|
|
95
|
+
"Read",
|
|
96
|
+
"Write",
|
|
97
|
+
"Edit",
|
|
98
|
+
"MultiEdit",
|
|
99
|
+
"Bash",
|
|
100
|
+
"Grep",
|
|
101
|
+
"Glob",
|
|
102
|
+
"LS",
|
|
103
|
+
"WebSearch",
|
|
104
|
+
"TodoWrite",
|
|
105
|
+
],
|
|
95
106
|
"temperature": 0.2,
|
|
96
107
|
"when_to_use": [
|
|
97
108
|
"Code implementation needed",
|
|
@@ -109,14 +120,28 @@ class AgentConfigProvider:
|
|
|
109
120
|
"Refactor codebases",
|
|
110
121
|
],
|
|
111
122
|
"primary_role": "Code implementation and development",
|
|
112
|
-
"specializations":
|
|
123
|
+
"specializations": [
|
|
124
|
+
"coding",
|
|
125
|
+
"debugging",
|
|
126
|
+
"refactoring",
|
|
127
|
+
"optimization",
|
|
128
|
+
],
|
|
113
129
|
"authority": "ALL code implementation decisions",
|
|
114
130
|
},
|
|
115
131
|
"qa": {
|
|
116
132
|
**base_config,
|
|
117
133
|
"description": "Quality assurance, testing, and validation",
|
|
118
|
-
"tags":
|
|
119
|
-
"tools":
|
|
134
|
+
"tags": ["qa", "testing", "quality", "validation"],
|
|
135
|
+
"tools": [
|
|
136
|
+
"Read",
|
|
137
|
+
"Write",
|
|
138
|
+
"Edit",
|
|
139
|
+
"Bash",
|
|
140
|
+
"Grep",
|
|
141
|
+
"Glob",
|
|
142
|
+
"LS",
|
|
143
|
+
"TodoWrite",
|
|
144
|
+
],
|
|
120
145
|
"temperature": 0.1,
|
|
121
146
|
"when_to_use": [
|
|
122
147
|
"Testing needed",
|
|
@@ -134,14 +159,29 @@ class AgentConfigProvider:
|
|
|
134
159
|
"Validate quality",
|
|
135
160
|
],
|
|
136
161
|
"primary_role": "Testing and quality assurance",
|
|
137
|
-
"specializations":
|
|
162
|
+
"specializations": [
|
|
163
|
+
"testing",
|
|
164
|
+
"validation",
|
|
165
|
+
"quality-assurance",
|
|
166
|
+
"coverage",
|
|
167
|
+
],
|
|
138
168
|
"authority": "ALL testing and quality decisions",
|
|
139
169
|
},
|
|
140
170
|
"documentation": {
|
|
141
171
|
**base_config,
|
|
142
172
|
"description": "Documentation creation, maintenance, and changelog generation",
|
|
143
|
-
"tags":
|
|
144
|
-
"tools":
|
|
173
|
+
"tags": ["documentation", "writing", "changelog", "docs"],
|
|
174
|
+
"tools": [
|
|
175
|
+
"Read",
|
|
176
|
+
"Write",
|
|
177
|
+
"Edit",
|
|
178
|
+
"MultiEdit",
|
|
179
|
+
"Grep",
|
|
180
|
+
"Glob",
|
|
181
|
+
"LS",
|
|
182
|
+
"WebSearch",
|
|
183
|
+
"TodoWrite",
|
|
184
|
+
],
|
|
145
185
|
"temperature": 0.3,
|
|
146
186
|
"when_to_use": [
|
|
147
187
|
"Documentation updates needed",
|
|
@@ -159,14 +199,27 @@ class AgentConfigProvider:
|
|
|
159
199
|
"Maintain docs",
|
|
160
200
|
],
|
|
161
201
|
"primary_role": "Documentation and technical writing",
|
|
162
|
-
"specializations":
|
|
202
|
+
"specializations": [
|
|
203
|
+
"technical-writing",
|
|
204
|
+
"changelog",
|
|
205
|
+
"api-docs",
|
|
206
|
+
"guides",
|
|
207
|
+
],
|
|
163
208
|
"authority": "ALL documentation decisions",
|
|
164
209
|
},
|
|
165
210
|
"research": {
|
|
166
211
|
**base_config,
|
|
167
212
|
"description": "Technical research, analysis, and investigation",
|
|
168
|
-
"tags":
|
|
169
|
-
"tools":
|
|
213
|
+
"tags": ["research", "analysis", "investigation", "evaluation"],
|
|
214
|
+
"tools": [
|
|
215
|
+
"Read",
|
|
216
|
+
"Grep",
|
|
217
|
+
"Glob",
|
|
218
|
+
"LS",
|
|
219
|
+
"WebSearch",
|
|
220
|
+
"WebFetch",
|
|
221
|
+
"TodoWrite",
|
|
222
|
+
],
|
|
170
223
|
"temperature": 0.4,
|
|
171
224
|
"when_to_use": [
|
|
172
225
|
"Technical research needed",
|
|
@@ -184,14 +237,27 @@ class AgentConfigProvider:
|
|
|
184
237
|
"Evidence-based recommendations",
|
|
185
238
|
],
|
|
186
239
|
"primary_role": "Research and technical analysis",
|
|
187
|
-
"specializations":
|
|
240
|
+
"specializations": [
|
|
241
|
+
"investigation",
|
|
242
|
+
"analysis",
|
|
243
|
+
"evaluation",
|
|
244
|
+
"recommendations",
|
|
245
|
+
],
|
|
188
246
|
"authority": "ALL research decisions",
|
|
189
247
|
},
|
|
190
248
|
"security": {
|
|
191
249
|
**base_config,
|
|
192
250
|
"description": "Security analysis, vulnerability assessment, and protection",
|
|
193
|
-
"tags":
|
|
194
|
-
"tools":
|
|
251
|
+
"tags": ["security", "vulnerability", "protection", "audit"],
|
|
252
|
+
"tools": [
|
|
253
|
+
"Read",
|
|
254
|
+
"Grep",
|
|
255
|
+
"Glob",
|
|
256
|
+
"LS",
|
|
257
|
+
"Bash",
|
|
258
|
+
"WebSearch",
|
|
259
|
+
"TodoWrite",
|
|
260
|
+
],
|
|
195
261
|
"temperature": 0.1,
|
|
196
262
|
"when_to_use": [
|
|
197
263
|
"Security review needed",
|
|
@@ -209,14 +275,28 @@ class AgentConfigProvider:
|
|
|
209
275
|
"Threat modeling",
|
|
210
276
|
],
|
|
211
277
|
"primary_role": "Security analysis and protection",
|
|
212
|
-
"specializations":
|
|
278
|
+
"specializations": [
|
|
279
|
+
"vulnerability-assessment",
|
|
280
|
+
"security-audit",
|
|
281
|
+
"threat-modeling",
|
|
282
|
+
"protection",
|
|
283
|
+
],
|
|
213
284
|
"authority": "ALL security decisions",
|
|
214
285
|
},
|
|
215
286
|
"ops": {
|
|
216
287
|
**base_config,
|
|
217
288
|
"description": "Deployment, operations, and infrastructure management",
|
|
218
|
-
"tags":
|
|
219
|
-
"tools":
|
|
289
|
+
"tags": ["ops", "deployment", "infrastructure", "devops"],
|
|
290
|
+
"tools": [
|
|
291
|
+
"Read",
|
|
292
|
+
"Write",
|
|
293
|
+
"Edit",
|
|
294
|
+
"Bash",
|
|
295
|
+
"Grep",
|
|
296
|
+
"Glob",
|
|
297
|
+
"LS",
|
|
298
|
+
"TodoWrite",
|
|
299
|
+
],
|
|
220
300
|
"temperature": 0.2,
|
|
221
301
|
"when_to_use": [
|
|
222
302
|
"Deployment configuration",
|
|
@@ -234,14 +314,29 @@ class AgentConfigProvider:
|
|
|
234
314
|
"Automate operations",
|
|
235
315
|
],
|
|
236
316
|
"primary_role": "Operations and deployment management",
|
|
237
|
-
"specializations":
|
|
317
|
+
"specializations": [
|
|
318
|
+
"deployment",
|
|
319
|
+
"infrastructure",
|
|
320
|
+
"automation",
|
|
321
|
+
"monitoring",
|
|
322
|
+
],
|
|
238
323
|
"authority": "ALL operations decisions",
|
|
239
324
|
},
|
|
240
325
|
"data_engineer": {
|
|
241
326
|
**base_config,
|
|
242
327
|
"description": "Data pipeline management and AI API integrations",
|
|
243
|
-
"tags":
|
|
244
|
-
"tools":
|
|
328
|
+
"tags": ["data", "pipeline", "etl", "ai-integration"],
|
|
329
|
+
"tools": [
|
|
330
|
+
"Read",
|
|
331
|
+
"Write",
|
|
332
|
+
"Edit",
|
|
333
|
+
"Bash",
|
|
334
|
+
"Grep",
|
|
335
|
+
"Glob",
|
|
336
|
+
"LS",
|
|
337
|
+
"WebSearch",
|
|
338
|
+
"TodoWrite",
|
|
339
|
+
],
|
|
245
340
|
"temperature": 0.2,
|
|
246
341
|
"when_to_use": [
|
|
247
342
|
"Data pipeline setup",
|
|
@@ -259,14 +354,19 @@ class AgentConfigProvider:
|
|
|
259
354
|
"Integrate AI services",
|
|
260
355
|
],
|
|
261
356
|
"primary_role": "Data engineering and AI integration",
|
|
262
|
-
"specializations":
|
|
357
|
+
"specializations": [
|
|
358
|
+
"data-pipelines",
|
|
359
|
+
"etl",
|
|
360
|
+
"database",
|
|
361
|
+
"ai-integration",
|
|
362
|
+
],
|
|
263
363
|
"authority": "ALL data engineering decisions",
|
|
264
364
|
},
|
|
265
365
|
"version_control": {
|
|
266
366
|
**base_config,
|
|
267
367
|
"description": "Git operations, version management, and release coordination",
|
|
268
|
-
"tags":
|
|
269
|
-
"tools":
|
|
368
|
+
"tags": ["git", "version-control", "release", "branching"],
|
|
369
|
+
"tools": ["Read", "Bash", "Grep", "Glob", "LS", "TodoWrite"],
|
|
270
370
|
"temperature": 0.1,
|
|
271
371
|
"network_access": False, # Git operations are local
|
|
272
372
|
"when_to_use": [
|
|
@@ -285,7 +385,7 @@ class AgentConfigProvider:
|
|
|
285
385
|
"Release coordination",
|
|
286
386
|
],
|
|
287
387
|
"primary_role": "Version control and release management",
|
|
288
|
-
"specializations":
|
|
388
|
+
"specializations": ["git", "versioning", "branching", "releases"],
|
|
289
389
|
"authority": "ALL version control decisions",
|
|
290
390
|
},
|
|
291
391
|
}
|
|
@@ -298,13 +398,13 @@ class AgentConfigProvider:
|
|
|
298
398
|
return {
|
|
299
399
|
**base_config,
|
|
300
400
|
"description": f"{agent_name.title()} agent for specialized tasks",
|
|
301
|
-
"tags":
|
|
302
|
-
"tools":
|
|
401
|
+
"tags": [agent_name, "specialized", "mpm"],
|
|
402
|
+
"tools": ["Read", "Write", "Edit", "Grep", "Glob", "LS", "TodoWrite"],
|
|
303
403
|
"temperature": 0.3,
|
|
304
404
|
"when_to_use": [f"When {agent_name} expertise is needed"],
|
|
305
405
|
"specialized_knowledge": [f"{agent_name.title()} domain knowledge"],
|
|
306
406
|
"unique_capabilities": [f"{agent_name.title()} specialized operations"],
|
|
307
407
|
"primary_role": f"{agent_name.title()} operations",
|
|
308
|
-
"specializations":
|
|
408
|
+
"specializations": [agent_name],
|
|
309
409
|
"authority": f"ALL {agent_name} decisions",
|
|
310
410
|
}
|
|
@@ -374,9 +374,7 @@ class InstructionsCheck(BaseDiagnosticCheck):
|
|
|
374
374
|
continue
|
|
375
375
|
|
|
376
376
|
# Check for Claude Code specific content in INSTRUCTIONS.md
|
|
377
|
-
instructions_files = [
|
|
378
|
-
path for path in files if path.name == "INSTRUCTIONS.md"
|
|
379
|
-
]
|
|
377
|
+
instructions_files = [path for path in files if path.name == "INSTRUCTIONS.md"]
|
|
380
378
|
for path in instructions_files:
|
|
381
379
|
try:
|
|
382
380
|
content = path.read_text(encoding="utf-8")
|
|
@@ -2,6 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
This module provides a relay that connects EventBus directly to the
|
|
4
4
|
Socket.IO server's broadcaster, avoiding the client loopback issue.
|
|
5
|
+
|
|
6
|
+
IMPORTANT - Claude Event Format:
|
|
7
|
+
Claude sends hook events with these REQUIRED fields:
|
|
8
|
+
- hook_event_name: The event type (UserPromptSubmit, PreToolUse, PostToolUse, etc.)
|
|
9
|
+
- hook_event_type: Usually same as hook_event_name
|
|
10
|
+
- hook_input_data: Contains the actual event data
|
|
11
|
+
- sessionId: Session identifier
|
|
12
|
+
- timestamp: ISO format timestamp
|
|
13
|
+
|
|
14
|
+
DO NOT use "event" or "type" fields - use "hook_event_name" instead!
|
|
5
15
|
"""
|
|
6
16
|
|
|
7
17
|
import logging
|
|
@@ -37,9 +47,12 @@ class DirectSocketIORelay:
|
|
|
37
47
|
"last_relay_time": None,
|
|
38
48
|
}
|
|
39
49
|
self.debug = logger.isEnabledFor(logging.DEBUG)
|
|
50
|
+
self.connection_retries = 0
|
|
51
|
+
self.max_retries = 10
|
|
52
|
+
self.retry_delay = 1.0 # Start with 1 second
|
|
40
53
|
|
|
41
54
|
def start(self) -> None:
|
|
42
|
-
"""Start the relay by subscribing to EventBus events."""
|
|
55
|
+
"""Start the relay by subscribing to EventBus events with retry logic."""
|
|
43
56
|
if not self.enabled:
|
|
44
57
|
logger.warning("DirectSocketIORelay is disabled")
|
|
45
58
|
return
|
|
@@ -59,14 +72,67 @@ class DirectSocketIORelay:
|
|
|
59
72
|
|
|
60
73
|
# Add debug logging for verification
|
|
61
74
|
logger.info("[DirectRelay] Subscribed to hook.* events on EventBus")
|
|
75
|
+
|
|
76
|
+
# Check and log broadcaster availability with retry logic
|
|
77
|
+
broadcaster_available = self._check_broadcaster_with_retry()
|
|
78
|
+
|
|
62
79
|
logger.info(
|
|
63
|
-
f"[DirectRelay] Server broadcaster available: {
|
|
80
|
+
f"[DirectRelay] Server broadcaster available: {broadcaster_available}"
|
|
64
81
|
)
|
|
82
|
+
if not broadcaster_available:
|
|
83
|
+
if not self.server:
|
|
84
|
+
logger.warning(
|
|
85
|
+
"[DirectRelay] No server instance provided - events will not be relayed!"
|
|
86
|
+
)
|
|
87
|
+
elif not hasattr(self.server, "broadcaster"):
|
|
88
|
+
logger.warning(
|
|
89
|
+
"[DirectRelay] Server has no broadcaster attribute - events will not be relayed!"
|
|
90
|
+
)
|
|
91
|
+
else:
|
|
92
|
+
logger.warning(
|
|
93
|
+
"[DirectRelay] Server broadcaster is None after retries - events will not be relayed!"
|
|
94
|
+
)
|
|
95
|
+
|
|
65
96
|
logger.info(f"[DirectRelay] EventBus instance: {self.event_bus is not None}")
|
|
66
97
|
|
|
67
98
|
# Mark as connected after successful subscription
|
|
68
|
-
self.connected =
|
|
69
|
-
logger.info("[DirectRelay] Started
|
|
99
|
+
self.connected = broadcaster_available
|
|
100
|
+
logger.info(f"[DirectRelay] Started with connection status: {self.connected}")
|
|
101
|
+
|
|
102
|
+
def _check_broadcaster_with_retry(self) -> bool:
|
|
103
|
+
"""Check broadcaster availability with exponential backoff retry.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
True if broadcaster is available, False after max retries
|
|
107
|
+
"""
|
|
108
|
+
import time
|
|
109
|
+
|
|
110
|
+
retry_delay = self.retry_delay
|
|
111
|
+
|
|
112
|
+
for attempt in range(self.max_retries):
|
|
113
|
+
broadcaster_available = (
|
|
114
|
+
self.server
|
|
115
|
+
and hasattr(self.server, "broadcaster")
|
|
116
|
+
and self.server.broadcaster is not None
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if broadcaster_available:
|
|
120
|
+
self.connection_retries = 0 # Reset counter on success
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
if attempt < self.max_retries - 1:
|
|
124
|
+
logger.info(
|
|
125
|
+
f"[DirectRelay] Broadcaster not ready, retry {attempt + 1}/{self.max_retries} "
|
|
126
|
+
f"in {retry_delay:.1f}s"
|
|
127
|
+
)
|
|
128
|
+
time.sleep(retry_delay)
|
|
129
|
+
retry_delay = min(retry_delay * 2, 30.0) # Exponential backoff, max 30s
|
|
130
|
+
else:
|
|
131
|
+
logger.error(
|
|
132
|
+
f"[DirectRelay] Broadcaster not available after {self.max_retries} attempts"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
return False
|
|
70
136
|
|
|
71
137
|
def _handle_hook_event(self, event_type: str, data: Any):
|
|
72
138
|
"""Internal method to handle hook events and broadcast them.
|
|
@@ -76,9 +142,28 @@ class DirectSocketIORelay:
|
|
|
76
142
|
data: The event data
|
|
77
143
|
"""
|
|
78
144
|
try:
|
|
79
|
-
#
|
|
145
|
+
# Enhanced debug logging for troubleshooting
|
|
80
146
|
if self.debug:
|
|
81
147
|
logger.debug(f"[DirectRelay] Received event: {event_type}")
|
|
148
|
+
logger.debug(f"[DirectRelay] Event data type: {type(data).__name__}")
|
|
149
|
+
logger.debug(
|
|
150
|
+
f"[DirectRelay] Event data keys: {list(data.keys()) if isinstance(data, dict) else 'not-dict'}"
|
|
151
|
+
)
|
|
152
|
+
logger.debug(
|
|
153
|
+
f"[DirectRelay] Relay state - enabled: {self.enabled}, connected: {self.connected}"
|
|
154
|
+
)
|
|
155
|
+
logger.debug(
|
|
156
|
+
f"[DirectRelay] Server state - has_server: {self.server is not None}, has_broadcaster: {self.server and hasattr(self.server, 'broadcaster') and self.server.broadcaster is not None}"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
# Always log reception of important events
|
|
160
|
+
if event_type in [
|
|
161
|
+
"hook.pre_tool",
|
|
162
|
+
"hook.post_tool",
|
|
163
|
+
"hook.user_prompt",
|
|
164
|
+
"hook.subagent_stop",
|
|
165
|
+
]:
|
|
166
|
+
logger.info(f"[DirectRelay] Processing important event: {event_type}")
|
|
82
167
|
|
|
83
168
|
# Only relay hook events
|
|
84
169
|
if event_type.startswith("hook."):
|
|
@@ -133,19 +218,69 @@ class DirectSocketIORelay:
|
|
|
133
218
|
|
|
134
219
|
# Use the full event_type (e.g., "hook.pre_tool") as the event name
|
|
135
220
|
# The normalizer handles dotted names and will extract type and subtype correctly
|
|
136
|
-
|
|
221
|
+
try:
|
|
222
|
+
self.server.broadcaster.broadcast_event(
|
|
223
|
+
event_type, broadcast_data
|
|
224
|
+
)
|
|
225
|
+
self.stats["events_relayed"] += 1
|
|
226
|
+
self.stats["last_relay_time"] = datetime.now().isoformat()
|
|
137
227
|
|
|
138
|
-
|
|
139
|
-
|
|
228
|
+
# Reset retry counter on successful broadcast
|
|
229
|
+
if self.connection_retries > 0:
|
|
230
|
+
self.connection_retries = 0
|
|
231
|
+
self.connected = True
|
|
232
|
+
logger.info("[DirectRelay] Connection restored")
|
|
140
233
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
234
|
+
if self.debug:
|
|
235
|
+
logger.debug(
|
|
236
|
+
f"[DirectRelay] Broadcasted hook event: {event_type}"
|
|
237
|
+
)
|
|
238
|
+
except Exception as broadcast_error:
|
|
239
|
+
logger.error(
|
|
240
|
+
f"[DirectRelay] Broadcast failed for {event_type}: {broadcast_error}"
|
|
144
241
|
)
|
|
242
|
+
self.stats["events_failed"] += 1
|
|
243
|
+
|
|
244
|
+
# Try to reconnect if broadcast fails
|
|
245
|
+
if self.connection_retries < self.max_retries:
|
|
246
|
+
self.connection_retries += 1
|
|
247
|
+
self.connected = self._check_broadcaster_with_retry()
|
|
248
|
+
if self.connected:
|
|
249
|
+
# Retry the broadcast
|
|
250
|
+
try:
|
|
251
|
+
self.server.broadcaster.broadcast_event(
|
|
252
|
+
event_type, broadcast_data
|
|
253
|
+
)
|
|
254
|
+
self.stats["events_relayed"] += 1
|
|
255
|
+
self.stats[
|
|
256
|
+
"events_failed"
|
|
257
|
+
] -= 1 # Undo the failure count
|
|
258
|
+
logger.info(
|
|
259
|
+
f"[DirectRelay] Retry successful for {event_type}"
|
|
260
|
+
)
|
|
261
|
+
except:
|
|
262
|
+
pass # Already counted as failed
|
|
145
263
|
else:
|
|
264
|
+
# Enhanced logging when broadcaster is not available
|
|
146
265
|
logger.warning(
|
|
147
266
|
f"[DirectRelay] Server broadcaster not available for {event_type}"
|
|
148
267
|
)
|
|
268
|
+
if self.server:
|
|
269
|
+
logger.warning(
|
|
270
|
+
"[DirectRelay] Server exists but broadcaster is None"
|
|
271
|
+
)
|
|
272
|
+
logger.warning(
|
|
273
|
+
f"[DirectRelay] Server type: {type(self.server).__name__}"
|
|
274
|
+
)
|
|
275
|
+
logger.warning(
|
|
276
|
+
f"[DirectRelay] Server has broadcaster attr: {hasattr(self.server, 'broadcaster')}"
|
|
277
|
+
)
|
|
278
|
+
if hasattr(self.server, "broadcaster"):
|
|
279
|
+
logger.warning(
|
|
280
|
+
f"[DirectRelay] Broadcaster value: {self.server.broadcaster}"
|
|
281
|
+
)
|
|
282
|
+
else:
|
|
283
|
+
logger.warning("[DirectRelay] Server is None")
|
|
149
284
|
self.stats["events_failed"] += 1
|
|
150
285
|
|
|
151
286
|
except Exception as e:
|
|
@@ -177,7 +177,7 @@ class EnhancedConnectionEventHandler(BaseEventHandler):
|
|
|
177
177
|
async def ping(sid):
|
|
178
178
|
"""Handle ping from client for health monitoring."""
|
|
179
179
|
try:
|
|
180
|
-
# Update activity in connection manager
|
|
180
|
+
# Update activity in connection manager - CRITICAL for preventing stale connections
|
|
181
181
|
if self.server.connection_manager:
|
|
182
182
|
await self.server.connection_manager.update_activity(sid, "ping")
|
|
183
183
|
|
|
@@ -293,23 +293,8 @@ class EnhancedConnectionEventHandler(BaseEventHandler):
|
|
|
293
293
|
except Exception as e:
|
|
294
294
|
self.logger.error(f"Error getting connection stats for {sid}: {e}")
|
|
295
295
|
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
"""Handle client heartbeat for connection monitoring."""
|
|
299
|
-
try:
|
|
300
|
-
# Update activity
|
|
301
|
-
if self.server.connection_manager:
|
|
302
|
-
await self.server.connection_manager.update_activity(sid, "event")
|
|
303
|
-
|
|
304
|
-
# Send heartbeat response
|
|
305
|
-
await sio.emit(
|
|
306
|
-
"heartbeat_response",
|
|
307
|
-
{"timestamp": datetime.now().isoformat(), "status": "alive"},
|
|
308
|
-
room=sid,
|
|
309
|
-
)
|
|
310
|
-
|
|
311
|
-
except Exception as e:
|
|
312
|
-
self.logger.error(f"Error handling heartbeat from {sid}: {e}")
|
|
296
|
+
# Heartbeat handler removed - Using Socket.IO's built-in ping/pong instead
|
|
297
|
+
# This prevents conflicting heartbeat systems that can cause disconnections
|
|
313
298
|
|
|
314
299
|
self.logger.info("Enhanced connection event handlers registered")
|
|
315
300
|
|