claude-mpm 3.4.26__py3-none-any.whl → 3.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/INSTRUCTIONS.md +182 -299
- claude_mpm/agents/agent_loader.py +283 -57
- claude_mpm/agents/agent_loader_integration.py +6 -9
- claude_mpm/agents/base_agent.json +2 -1
- claude_mpm/agents/base_agent_loader.py +1 -1
- claude_mpm/cli/__init__.py +6 -10
- claude_mpm/cli/commands/__init__.py +0 -2
- claude_mpm/cli/commands/agents.py +1 -1
- claude_mpm/cli/commands/memory.py +1 -1
- claude_mpm/cli/commands/run.py +12 -0
- claude_mpm/cli/parser.py +0 -13
- claude_mpm/cli/utils.py +1 -1
- claude_mpm/config/__init__.py +44 -2
- claude_mpm/config/agent_config.py +348 -0
- claude_mpm/config/paths.py +322 -0
- claude_mpm/constants.py +0 -1
- claude_mpm/core/__init__.py +2 -5
- claude_mpm/core/agent_registry.py +63 -17
- claude_mpm/core/claude_runner.py +354 -43
- claude_mpm/core/config.py +7 -1
- claude_mpm/core/config_aliases.py +4 -3
- claude_mpm/core/config_paths.py +151 -0
- claude_mpm/core/factories.py +4 -50
- claude_mpm/core/logger.py +11 -13
- claude_mpm/core/service_registry.py +2 -2
- claude_mpm/dashboard/static/js/components/agent-inference.js +101 -25
- claude_mpm/dashboard/static/js/components/event-processor.js +3 -2
- claude_mpm/hooks/claude_hooks/hook_handler.py +343 -83
- claude_mpm/hooks/memory_integration_hook.py +1 -1
- claude_mpm/init.py +37 -6
- claude_mpm/scripts/socketio_daemon.py +6 -2
- claude_mpm/services/__init__.py +71 -3
- claude_mpm/services/agents/__init__.py +85 -0
- claude_mpm/services/agents/deployment/__init__.py +21 -0
- claude_mpm/services/{agent_deployment.py → agents/deployment/agent_deployment.py} +192 -41
- claude_mpm/services/{agent_lifecycle_manager.py → agents/deployment/agent_lifecycle_manager.py} +11 -10
- claude_mpm/services/agents/loading/__init__.py +11 -0
- claude_mpm/services/{agent_profile_loader.py → agents/loading/agent_profile_loader.py} +9 -8
- claude_mpm/services/{base_agent_manager.py → agents/loading/base_agent_manager.py} +2 -2
- claude_mpm/services/{framework_agent_loader.py → agents/loading/framework_agent_loader.py} +116 -40
- claude_mpm/services/agents/management/__init__.py +9 -0
- claude_mpm/services/{agent_management_service.py → agents/management/agent_management_service.py} +6 -5
- claude_mpm/services/agents/memory/__init__.py +21 -0
- claude_mpm/services/{agent_memory_manager.py → agents/memory/agent_memory_manager.py} +3 -3
- claude_mpm/services/agents/registry/__init__.py +29 -0
- claude_mpm/services/{agent_registry.py → agents/registry/agent_registry.py} +101 -16
- claude_mpm/services/{deployed_agent_discovery.py → agents/registry/deployed_agent_discovery.py} +12 -2
- claude_mpm/services/{agent_modification_tracker.py → agents/registry/modification_tracker.py} +6 -5
- claude_mpm/services/async_session_logger.py +584 -0
- claude_mpm/services/claude_session_logger.py +299 -0
- claude_mpm/services/framework_claude_md_generator/content_assembler.py +2 -2
- claude_mpm/services/framework_claude_md_generator/section_generators/agents.py +17 -17
- claude_mpm/services/framework_claude_md_generator/section_generators/claude_pm_init.py +3 -3
- claude_mpm/services/framework_claude_md_generator/section_generators/core_responsibilities.py +1 -1
- claude_mpm/services/framework_claude_md_generator/section_generators/orchestration_principles.py +1 -1
- claude_mpm/services/framework_claude_md_generator/section_generators/todo_task_tools.py +19 -24
- claude_mpm/services/framework_claude_md_generator/section_generators/troubleshooting.py +1 -1
- claude_mpm/services/framework_claude_md_generator.py +4 -2
- claude_mpm/services/memory/__init__.py +17 -0
- claude_mpm/services/{memory_builder.py → memory/builder.py} +3 -3
- claude_mpm/services/memory/cache/__init__.py +14 -0
- claude_mpm/services/{shared_prompt_cache.py → memory/cache/shared_prompt_cache.py} +1 -1
- claude_mpm/services/memory/cache/simple_cache.py +317 -0
- claude_mpm/services/{memory_optimizer.py → memory/optimizer.py} +1 -1
- claude_mpm/services/{memory_router.py → memory/router.py} +1 -1
- claude_mpm/services/optimized_hook_service.py +542 -0
- claude_mpm/services/project_registry.py +14 -8
- claude_mpm/services/response_tracker.py +237 -0
- claude_mpm/services/ticketing_service_original.py +4 -2
- claude_mpm/services/version_control/branch_strategy.py +3 -1
- claude_mpm/utils/paths.py +12 -10
- claude_mpm/utils/session_logging.py +114 -0
- claude_mpm/validation/agent_validator.py +2 -1
- {claude_mpm-3.4.26.dist-info → claude_mpm-3.5.0.dist-info}/METADATA +26 -20
- {claude_mpm-3.4.26.dist-info → claude_mpm-3.5.0.dist-info}/RECORD +83 -106
- claude_mpm/cli/commands/ui.py +0 -57
- claude_mpm/core/simple_runner.py +0 -1046
- claude_mpm/hooks/builtin/__init__.py +0 -1
- claude_mpm/hooks/builtin/logging_hook_example.py +0 -165
- claude_mpm/hooks/builtin/memory_hooks_example.py +0 -67
- claude_mpm/hooks/builtin/mpm_command_hook.py +0 -125
- claude_mpm/hooks/builtin/post_delegation_hook_example.py +0 -124
- claude_mpm/hooks/builtin/pre_delegation_hook_example.py +0 -125
- claude_mpm/hooks/builtin/submit_hook_example.py +0 -100
- claude_mpm/hooks/builtin/ticket_extraction_hook_example.py +0 -237
- claude_mpm/hooks/builtin/todo_agent_prefix_hook.py +0 -240
- claude_mpm/hooks/builtin/workflow_start_hook.py +0 -181
- claude_mpm/orchestration/__init__.py +0 -6
- claude_mpm/orchestration/archive/direct_orchestrator.py +0 -195
- claude_mpm/orchestration/archive/factory.py +0 -215
- claude_mpm/orchestration/archive/hook_enabled_orchestrator.py +0 -188
- claude_mpm/orchestration/archive/hook_integration_example.py +0 -178
- claude_mpm/orchestration/archive/interactive_subprocess_orchestrator.py +0 -826
- claude_mpm/orchestration/archive/orchestrator.py +0 -501
- claude_mpm/orchestration/archive/pexpect_orchestrator.py +0 -252
- claude_mpm/orchestration/archive/pty_orchestrator.py +0 -270
- claude_mpm/orchestration/archive/simple_orchestrator.py +0 -82
- claude_mpm/orchestration/archive/subprocess_orchestrator.py +0 -801
- claude_mpm/orchestration/archive/system_prompt_orchestrator.py +0 -278
- claude_mpm/orchestration/archive/wrapper_orchestrator.py +0 -187
- claude_mpm/schemas/workflow_validator.py +0 -411
- claude_mpm/services/parent_directory_manager/__init__.py +0 -577
- claude_mpm/services/parent_directory_manager/backup_manager.py +0 -258
- claude_mpm/services/parent_directory_manager/config_manager.py +0 -210
- claude_mpm/services/parent_directory_manager/deduplication_manager.py +0 -279
- claude_mpm/services/parent_directory_manager/framework_protector.py +0 -143
- claude_mpm/services/parent_directory_manager/operations.py +0 -186
- claude_mpm/services/parent_directory_manager/state_manager.py +0 -624
- claude_mpm/services/parent_directory_manager/template_deployer.py +0 -579
- claude_mpm/services/parent_directory_manager/validation_manager.py +0 -378
- claude_mpm/services/parent_directory_manager/version_control_helper.py +0 -339
- claude_mpm/services/parent_directory_manager/version_manager.py +0 -222
- claude_mpm/ui/__init__.py +0 -1
- claude_mpm/ui/rich_terminal_ui.py +0 -295
- claude_mpm/ui/terminal_ui.py +0 -328
- /claude_mpm/services/{agent_versioning.py → agents/deployment/agent_versioning.py} +0 -0
- /claude_mpm/services/{agent_capabilities_generator.py → agents/management/agent_capabilities_generator.py} +0 -0
- /claude_mpm/services/{agent_persistence_service.py → agents/memory/agent_persistence_service.py} +0 -0
- {claude_mpm-3.4.26.dist-info → claude_mpm-3.5.0.dist-info}/WHEEL +0 -0
- {claude_mpm-3.4.26.dist-info → claude_mpm-3.5.0.dist-info}/entry_points.txt +0 -0
- {claude_mpm-3.4.26.dist-info → claude_mpm-3.5.0.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-3.4.26.dist-info → claude_mpm-3.5.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,584 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Async Claude Session Response Logger with Optimized Performance
|
|
3
|
+
|
|
4
|
+
High-performance async logging system with timestamp-based filenames to eliminate
|
|
5
|
+
concurrency issues and achieve near-zero performance overhead.
|
|
6
|
+
|
|
7
|
+
Key Features:
|
|
8
|
+
- Timestamp-based filenames with microsecond precision
|
|
9
|
+
- Async I/O with fire-and-forget pattern
|
|
10
|
+
- Queue-based background writing
|
|
11
|
+
- Optional OS-native logging format
|
|
12
|
+
- Zero blocking on main thread
|
|
13
|
+
- Configuration via .claude-mpm/configuration.yaml
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import asyncio
|
|
17
|
+
import json
|
|
18
|
+
import os
|
|
19
|
+
import sys
|
|
20
|
+
import time
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
from typing import Dict, Any, Optional, Callable
|
|
24
|
+
from queue import Queue, Full
|
|
25
|
+
from threading import Thread, Lock
|
|
26
|
+
import logging
|
|
27
|
+
import logging.handlers
|
|
28
|
+
from dataclasses import dataclass, asdict
|
|
29
|
+
from enum import Enum
|
|
30
|
+
|
|
31
|
+
# Import configuration manager
|
|
32
|
+
from ..core.config import Config
|
|
33
|
+
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class LogFormat(Enum):
|
|
38
|
+
"""Supported log formats for response storage."""
|
|
39
|
+
JSON = "json"
|
|
40
|
+
SYSLOG = "syslog"
|
|
41
|
+
JOURNALD = "journald"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class LogEntry:
|
|
46
|
+
"""Represents a log entry to be written."""
|
|
47
|
+
timestamp: str
|
|
48
|
+
agent: str # Standardized field name
|
|
49
|
+
session_id: str
|
|
50
|
+
request: str # Standardized field name
|
|
51
|
+
response: str # Standardized field name
|
|
52
|
+
metadata: Dict[str, Any]
|
|
53
|
+
microseconds: int
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class AsyncSessionLogger:
|
|
57
|
+
"""
|
|
58
|
+
High-performance async logger with timestamp-based filenames.
|
|
59
|
+
|
|
60
|
+
Features:
|
|
61
|
+
- Non-blocking async writes with background queue processing
|
|
62
|
+
- Timestamp-based filenames to eliminate lookup overhead
|
|
63
|
+
- Configurable log formats (JSON, syslog, journald)
|
|
64
|
+
- Fire-and-forget pattern for zero latency impact
|
|
65
|
+
- Graceful degradation on errors
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(
|
|
69
|
+
self,
|
|
70
|
+
base_dir: Optional[Path] = None,
|
|
71
|
+
log_format: Optional[LogFormat] = None,
|
|
72
|
+
max_queue_size: Optional[int] = None,
|
|
73
|
+
enable_async: Optional[bool] = None,
|
|
74
|
+
enable_compression: Optional[bool] = None,
|
|
75
|
+
config: Optional[Config] = None
|
|
76
|
+
):
|
|
77
|
+
"""
|
|
78
|
+
Initialize the async session logger.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
base_dir: Base directory for responses (overrides config)
|
|
82
|
+
log_format: Format to use for logging (overrides config)
|
|
83
|
+
max_queue_size: Maximum queue size for async writes (overrides config)
|
|
84
|
+
enable_async: Enable async writing (overrides config)
|
|
85
|
+
enable_compression: Enable gzip compression for JSON logs (overrides config)
|
|
86
|
+
config: Configuration instance to use (creates new if not provided)
|
|
87
|
+
"""
|
|
88
|
+
# Load configuration from YAML file or use provided config
|
|
89
|
+
if config is None:
|
|
90
|
+
config = Config()
|
|
91
|
+
self.config = config
|
|
92
|
+
|
|
93
|
+
# Get response logging configuration section
|
|
94
|
+
response_config = self.config.get('response_logging', {})
|
|
95
|
+
|
|
96
|
+
# Apply configuration with parameter overrides
|
|
97
|
+
self.base_dir = Path(base_dir or
|
|
98
|
+
response_config.get('session_directory', '.claude-mpm/responses'))
|
|
99
|
+
|
|
100
|
+
# Convert log format string to enum
|
|
101
|
+
format_str = response_config.get('format', 'json').lower()
|
|
102
|
+
if log_format is not None:
|
|
103
|
+
self.log_format = log_format
|
|
104
|
+
elif format_str == 'syslog':
|
|
105
|
+
self.log_format = LogFormat.SYSLOG
|
|
106
|
+
elif format_str == 'journald':
|
|
107
|
+
self.log_format = LogFormat.JOURNALD
|
|
108
|
+
else:
|
|
109
|
+
self.log_format = LogFormat.JSON
|
|
110
|
+
|
|
111
|
+
self.max_queue_size = max_queue_size if max_queue_size is not None else response_config.get('max_queue_size', 10000)
|
|
112
|
+
|
|
113
|
+
# Handle async configuration with backward compatibility
|
|
114
|
+
if enable_async is not None:
|
|
115
|
+
self.enable_async = enable_async
|
|
116
|
+
else:
|
|
117
|
+
# Check configuration first, then environment variables for backward compatibility
|
|
118
|
+
self.enable_async = response_config.get('use_async', True)
|
|
119
|
+
# Override with environment variable if set (backward compatibility)
|
|
120
|
+
if os.environ.get('CLAUDE_USE_ASYNC_LOG'):
|
|
121
|
+
self.enable_async = os.environ.get('CLAUDE_USE_ASYNC_LOG', 'true').lower() == 'true'
|
|
122
|
+
|
|
123
|
+
# Check debug sync mode (forces synchronous for debugging)
|
|
124
|
+
if response_config.get('debug_sync', False) or os.environ.get('CLAUDE_LOG_SYNC', '').lower() == 'true':
|
|
125
|
+
logger.info("Debug sync mode enabled - forcing synchronous logging")
|
|
126
|
+
self.enable_async = False
|
|
127
|
+
|
|
128
|
+
self.enable_compression = enable_compression if enable_compression is not None else response_config.get('enable_compression', False)
|
|
129
|
+
|
|
130
|
+
# Create base directory
|
|
131
|
+
self.base_dir.mkdir(parents=True, exist_ok=True)
|
|
132
|
+
|
|
133
|
+
# Session management
|
|
134
|
+
self.session_id = self._get_claude_session_id()
|
|
135
|
+
|
|
136
|
+
# Async infrastructure
|
|
137
|
+
self._queue: Queue = Queue(maxsize=self.max_queue_size)
|
|
138
|
+
self._worker_thread: Optional[Thread] = None
|
|
139
|
+
self._shutdown = False
|
|
140
|
+
self._lock = Lock()
|
|
141
|
+
|
|
142
|
+
# Statistics
|
|
143
|
+
self.stats = {
|
|
144
|
+
"logged": 0,
|
|
145
|
+
"queued": 0,
|
|
146
|
+
"dropped": 0,
|
|
147
|
+
"errors": 0,
|
|
148
|
+
"avg_write_time_ms": 0.0
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
# Initialize format-specific handlers
|
|
152
|
+
self._init_format_handler()
|
|
153
|
+
|
|
154
|
+
# Start background worker if async enabled
|
|
155
|
+
if self.enable_async:
|
|
156
|
+
self._start_worker()
|
|
157
|
+
|
|
158
|
+
def _get_claude_session_id(self) -> str:
|
|
159
|
+
"""Get or generate a Claude session ID."""
|
|
160
|
+
# Check environment variables in order of preference
|
|
161
|
+
for env_var in ['CLAUDE_SESSION_ID', 'ANTHROPIC_SESSION_ID', 'SESSION_ID']:
|
|
162
|
+
session_id = os.environ.get(env_var)
|
|
163
|
+
if session_id:
|
|
164
|
+
logger.info(f"Using session ID from {env_var}: {session_id}")
|
|
165
|
+
return session_id
|
|
166
|
+
|
|
167
|
+
# Generate timestamp-based session ID
|
|
168
|
+
session_id = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
169
|
+
logger.info(f"Generated session ID: {session_id}")
|
|
170
|
+
return session_id
|
|
171
|
+
|
|
172
|
+
def _init_format_handler(self):
|
|
173
|
+
"""Initialize format-specific logging handlers."""
|
|
174
|
+
if self.log_format == LogFormat.SYSLOG:
|
|
175
|
+
# Setup syslog handler for ultra-fast OS-level logging
|
|
176
|
+
try:
|
|
177
|
+
if sys.platform == "darwin":
|
|
178
|
+
address = "/var/run/syslog"
|
|
179
|
+
elif sys.platform.startswith("linux"):
|
|
180
|
+
address = "/dev/log"
|
|
181
|
+
else:
|
|
182
|
+
address = ("localhost", 514)
|
|
183
|
+
|
|
184
|
+
self.syslog_handler = logging.handlers.SysLogHandler(address=address)
|
|
185
|
+
self.syslog_handler.setFormatter(
|
|
186
|
+
logging.Formatter('claude-mpm[%(process)d]: %(message)s')
|
|
187
|
+
)
|
|
188
|
+
logger.info("Initialized syslog handler")
|
|
189
|
+
except Exception as e:
|
|
190
|
+
logger.warning(f"Failed to init syslog, falling back to JSON: {e}")
|
|
191
|
+
self.log_format = LogFormat.JSON
|
|
192
|
+
|
|
193
|
+
elif self.log_format == LogFormat.JOURNALD:
|
|
194
|
+
# Use systemd journal for Linux systems
|
|
195
|
+
try:
|
|
196
|
+
from systemd.journal import JournalHandler
|
|
197
|
+
self.journal_handler = JournalHandler()
|
|
198
|
+
self.journal_handler.setFormatter(
|
|
199
|
+
logging.Formatter('%(message)s')
|
|
200
|
+
)
|
|
201
|
+
logger.info("Initialized journald handler")
|
|
202
|
+
except ImportError:
|
|
203
|
+
logger.warning("systemd not available, falling back to JSON")
|
|
204
|
+
self.log_format = LogFormat.JSON
|
|
205
|
+
|
|
206
|
+
def _start_worker(self):
|
|
207
|
+
"""Start the background worker thread for async writes."""
|
|
208
|
+
with self._lock:
|
|
209
|
+
if self._worker_thread is None or not self._worker_thread.is_alive():
|
|
210
|
+
self._shutdown = False
|
|
211
|
+
self._worker_thread = Thread(
|
|
212
|
+
target=self._process_queue,
|
|
213
|
+
name="AsyncLoggerWorker",
|
|
214
|
+
daemon=True
|
|
215
|
+
)
|
|
216
|
+
self._worker_thread.start()
|
|
217
|
+
logger.debug("Started async logger worker thread")
|
|
218
|
+
|
|
219
|
+
def _process_queue(self):
|
|
220
|
+
"""Background worker to process the log queue."""
|
|
221
|
+
write_times = []
|
|
222
|
+
|
|
223
|
+
while not self._shutdown:
|
|
224
|
+
try:
|
|
225
|
+
# Get entry with timeout to allow shutdown checks
|
|
226
|
+
entry = self._queue.get(timeout=0.1)
|
|
227
|
+
|
|
228
|
+
# Time the write operation
|
|
229
|
+
start_time = time.perf_counter()
|
|
230
|
+
self._write_entry(entry)
|
|
231
|
+
write_time = (time.perf_counter() - start_time) * 1000
|
|
232
|
+
|
|
233
|
+
# Update statistics
|
|
234
|
+
write_times.append(write_time)
|
|
235
|
+
if len(write_times) > 100:
|
|
236
|
+
write_times = write_times[-100:] # Keep last 100
|
|
237
|
+
|
|
238
|
+
with self._lock:
|
|
239
|
+
self.stats["logged"] += 1
|
|
240
|
+
self.stats["avg_write_time_ms"] = sum(write_times) / len(write_times)
|
|
241
|
+
|
|
242
|
+
except Exception as e:
|
|
243
|
+
# Check if it's a timeout (queue.Empty) or real error
|
|
244
|
+
if "Empty" not in str(type(e).__name__):
|
|
245
|
+
logger.error(f"Error in async worker: {e}", exc_info=True)
|
|
246
|
+
with self._lock:
|
|
247
|
+
self.stats["errors"] += 1
|
|
248
|
+
# Otherwise it's just a timeout, continue to check shutdown
|
|
249
|
+
|
|
250
|
+
def _write_entry(self, entry: LogEntry):
|
|
251
|
+
"""Write a log entry to disk or system log."""
|
|
252
|
+
try:
|
|
253
|
+
if self.log_format == LogFormat.JSON:
|
|
254
|
+
self._write_json_entry(entry)
|
|
255
|
+
elif self.log_format == LogFormat.SYSLOG:
|
|
256
|
+
self._write_syslog_entry(entry)
|
|
257
|
+
elif self.log_format == LogFormat.JOURNALD:
|
|
258
|
+
self._write_journald_entry(entry)
|
|
259
|
+
except Exception as e:
|
|
260
|
+
logger.error(f"Failed to write log entry: {e}", exc_info=True)
|
|
261
|
+
with self._lock:
|
|
262
|
+
self.stats["errors"] += 1
|
|
263
|
+
|
|
264
|
+
def _generate_filename(self, entry: LogEntry) -> str:
|
|
265
|
+
"""
|
|
266
|
+
Generate a flat filename with session ID, agent, and timestamp.
|
|
267
|
+
|
|
268
|
+
Args:
|
|
269
|
+
entry: Log entry with session, agent, and timestamp info
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Filename in format: [session_id]-[agent]-timestamp.json
|
|
273
|
+
"""
|
|
274
|
+
# Format timestamp for filename (remove special chars)
|
|
275
|
+
timestamp_str = entry.timestamp.replace(':', '').replace('-', '').replace('.', '_')
|
|
276
|
+
|
|
277
|
+
# Create filename: session_id-agent-timestamp.json
|
|
278
|
+
filename = f"{entry.session_id}-{entry.agent}-{timestamp_str}.json"
|
|
279
|
+
if self.enable_compression:
|
|
280
|
+
filename += ".gz"
|
|
281
|
+
return filename
|
|
282
|
+
|
|
283
|
+
def _write_json_entry(self, entry: LogEntry):
|
|
284
|
+
"""Write entry as JSON file with timestamp-based filename."""
|
|
285
|
+
# Ensure base directory exists (flat structure, no subdirs)
|
|
286
|
+
self.base_dir.mkdir(parents=True, exist_ok=True)
|
|
287
|
+
|
|
288
|
+
# Generate flat filename
|
|
289
|
+
filename = self._generate_filename(entry)
|
|
290
|
+
file_path = self.base_dir / filename
|
|
291
|
+
|
|
292
|
+
# Prepare data (exclude microseconds field which is internal only)
|
|
293
|
+
data = asdict(entry)
|
|
294
|
+
# Remove internal-only field
|
|
295
|
+
data.pop('microseconds', None)
|
|
296
|
+
|
|
297
|
+
# Write file
|
|
298
|
+
if self.enable_compression:
|
|
299
|
+
import gzip
|
|
300
|
+
with gzip.open(file_path, 'wt', encoding='utf-8') as f:
|
|
301
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
302
|
+
else:
|
|
303
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
304
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
305
|
+
|
|
306
|
+
logger.debug(f"Wrote log entry to {file_path}")
|
|
307
|
+
|
|
308
|
+
def _write_syslog_entry(self, entry: LogEntry):
|
|
309
|
+
"""Write entry to syslog for OS-level performance."""
|
|
310
|
+
if hasattr(self, 'syslog_handler'):
|
|
311
|
+
# Format as structured log message with standardized field names
|
|
312
|
+
msg = (
|
|
313
|
+
f"agent={entry.agent} "
|
|
314
|
+
f"session={entry.session_id} "
|
|
315
|
+
f"request=\"{entry.request[:100]}\" "
|
|
316
|
+
f"response_len={len(entry.response)} "
|
|
317
|
+
f"metadata={json.dumps(entry.metadata)}"
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
record = logging.LogRecord(
|
|
321
|
+
name="claude-mpm",
|
|
322
|
+
level=logging.INFO,
|
|
323
|
+
pathname="",
|
|
324
|
+
lineno=0,
|
|
325
|
+
msg=msg,
|
|
326
|
+
args=(),
|
|
327
|
+
exc_info=None
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
self.syslog_handler.emit(record)
|
|
331
|
+
|
|
332
|
+
def _write_journald_entry(self, entry: LogEntry):
|
|
333
|
+
"""Write entry to systemd journal."""
|
|
334
|
+
if hasattr(self, 'journal_handler'):
|
|
335
|
+
# Create structured journal entry with standardized field names
|
|
336
|
+
record = logging.LogRecord(
|
|
337
|
+
name="claude-mpm",
|
|
338
|
+
level=logging.INFO,
|
|
339
|
+
pathname="",
|
|
340
|
+
lineno=0,
|
|
341
|
+
msg=f"Claude MPM Response: {entry.request[:100]}",
|
|
342
|
+
args=(),
|
|
343
|
+
exc_info=None
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
# Add structured fields with standardized names
|
|
347
|
+
record.__dict__.update({
|
|
348
|
+
'AGENT': entry.agent,
|
|
349
|
+
'SESSION_ID': entry.session_id,
|
|
350
|
+
'REQUEST': entry.request,
|
|
351
|
+
'RESPONSE_LENGTH': len(entry.response),
|
|
352
|
+
'METADATA': json.dumps(entry.metadata)
|
|
353
|
+
})
|
|
354
|
+
|
|
355
|
+
self.journal_handler.emit(record)
|
|
356
|
+
|
|
357
|
+
def log_response(
|
|
358
|
+
self,
|
|
359
|
+
request_summary: str,
|
|
360
|
+
response_content: str,
|
|
361
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
362
|
+
agent: Optional[str] = None
|
|
363
|
+
) -> bool:
|
|
364
|
+
"""
|
|
365
|
+
Log a response with fire-and-forget async pattern.
|
|
366
|
+
|
|
367
|
+
Args:
|
|
368
|
+
request_summary: Brief summary of the request
|
|
369
|
+
response_content: The full response content
|
|
370
|
+
metadata: Optional metadata (agent name, model, etc.)
|
|
371
|
+
agent: Optional agent name (overrides metadata)
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
True if queued successfully, False if dropped
|
|
375
|
+
"""
|
|
376
|
+
# Extract agent name from parameter, metadata, or use default
|
|
377
|
+
agent_name = "unknown"
|
|
378
|
+
if agent:
|
|
379
|
+
agent_name = agent.replace(" ", "_").lower()
|
|
380
|
+
elif metadata and "agent" in metadata:
|
|
381
|
+
agent_name = metadata["agent"].replace(" ", "_").lower()
|
|
382
|
+
|
|
383
|
+
# Create timestamp with microsecond precision
|
|
384
|
+
now = datetime.now()
|
|
385
|
+
timestamp = now.isoformat()
|
|
386
|
+
microseconds = now.microsecond
|
|
387
|
+
|
|
388
|
+
# Create log entry with standardized field names
|
|
389
|
+
entry = LogEntry(
|
|
390
|
+
timestamp=timestamp,
|
|
391
|
+
agent=agent_name, # Standardized field name
|
|
392
|
+
session_id=self.session_id,
|
|
393
|
+
request=request_summary, # Standardized field name
|
|
394
|
+
response=response_content, # Standardized field name
|
|
395
|
+
metadata=metadata or {},
|
|
396
|
+
microseconds=microseconds
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
# Queue for async processing or write directly
|
|
400
|
+
if self.enable_async:
|
|
401
|
+
try:
|
|
402
|
+
self._queue.put_nowait(entry)
|
|
403
|
+
with self._lock:
|
|
404
|
+
self.stats["queued"] += 1
|
|
405
|
+
return True
|
|
406
|
+
except Full:
|
|
407
|
+
# Queue is full, drop the entry (fire-and-forget)
|
|
408
|
+
logger.warning("Log queue full, dropping entry")
|
|
409
|
+
with self._lock:
|
|
410
|
+
self.stats["dropped"] += 1
|
|
411
|
+
return False
|
|
412
|
+
else:
|
|
413
|
+
# Synchronous write for debugging
|
|
414
|
+
self._write_entry(entry)
|
|
415
|
+
return True
|
|
416
|
+
|
|
417
|
+
def flush(self, timeout: float = 5.0) -> bool:
|
|
418
|
+
"""
|
|
419
|
+
Flush pending log entries with timeout.
|
|
420
|
+
|
|
421
|
+
Args:
|
|
422
|
+
timeout: Maximum time to wait for flush
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
True if all entries flushed, False if timeout
|
|
426
|
+
"""
|
|
427
|
+
if not self.enable_async:
|
|
428
|
+
return True
|
|
429
|
+
|
|
430
|
+
start_time = time.time()
|
|
431
|
+
while not self._queue.empty():
|
|
432
|
+
if time.time() - start_time > timeout:
|
|
433
|
+
logger.warning(f"Flush timeout with {self._queue.qsize()} entries remaining")
|
|
434
|
+
return False
|
|
435
|
+
time.sleep(0.01)
|
|
436
|
+
|
|
437
|
+
return True
|
|
438
|
+
|
|
439
|
+
def shutdown(self, timeout: float = 5.0):
|
|
440
|
+
"""
|
|
441
|
+
Gracefully shutdown the logger.
|
|
442
|
+
|
|
443
|
+
Args:
|
|
444
|
+
timeout: Maximum time to wait for shutdown
|
|
445
|
+
"""
|
|
446
|
+
if self.enable_async:
|
|
447
|
+
logger.info("Shutting down async logger")
|
|
448
|
+
|
|
449
|
+
# Signal shutdown
|
|
450
|
+
self._shutdown = True
|
|
451
|
+
|
|
452
|
+
# Wait for worker to finish
|
|
453
|
+
if self._worker_thread and self._worker_thread.is_alive():
|
|
454
|
+
self._worker_thread.join(timeout)
|
|
455
|
+
|
|
456
|
+
if self._worker_thread.is_alive():
|
|
457
|
+
logger.warning("Worker thread did not shutdown cleanly")
|
|
458
|
+
|
|
459
|
+
# Log final statistics
|
|
460
|
+
logger.info(f"Logger stats: {self.stats}")
|
|
461
|
+
|
|
462
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
463
|
+
"""Get logger statistics."""
|
|
464
|
+
with self._lock:
|
|
465
|
+
return self.stats.copy()
|
|
466
|
+
|
|
467
|
+
def set_session_id(self, session_id: str):
|
|
468
|
+
"""Set a new session ID."""
|
|
469
|
+
self.session_id = session_id
|
|
470
|
+
logger.info(f"Session ID updated to: {session_id}")
|
|
471
|
+
|
|
472
|
+
def is_enabled(self) -> bool:
|
|
473
|
+
"""Check if logging is enabled."""
|
|
474
|
+
return True # Always enabled in this implementation
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
# Singleton instance with lazy initialization
|
|
478
|
+
_logger_instance: Optional[AsyncSessionLogger] = None
|
|
479
|
+
_logger_lock = Lock()
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
def get_async_logger(
|
|
483
|
+
log_format: Optional[LogFormat] = None,
|
|
484
|
+
enable_async: Optional[bool] = None,
|
|
485
|
+
config: Optional[Config] = None
|
|
486
|
+
) -> AsyncSessionLogger:
|
|
487
|
+
"""
|
|
488
|
+
Get the singleton async logger instance.
|
|
489
|
+
|
|
490
|
+
Args:
|
|
491
|
+
log_format: Optional log format override
|
|
492
|
+
enable_async: Enable async mode override
|
|
493
|
+
config: Optional configuration instance to use
|
|
494
|
+
|
|
495
|
+
Returns:
|
|
496
|
+
The shared AsyncSessionLogger instance
|
|
497
|
+
"""
|
|
498
|
+
global _logger_instance
|
|
499
|
+
|
|
500
|
+
with _logger_lock:
|
|
501
|
+
if _logger_instance is None:
|
|
502
|
+
# Load configuration if not provided
|
|
503
|
+
if config is None:
|
|
504
|
+
config = Config()
|
|
505
|
+
|
|
506
|
+
# Get response logging configuration
|
|
507
|
+
response_config = config.get('response_logging', {})
|
|
508
|
+
|
|
509
|
+
# Determine log format
|
|
510
|
+
if log_format is None:
|
|
511
|
+
# Check configuration first
|
|
512
|
+
format_str = response_config.get('format', 'json').lower()
|
|
513
|
+
|
|
514
|
+
# Check environment for backward compatibility
|
|
515
|
+
format_env = os.environ.get('CLAUDE_LOG_FORMAT', '').lower()
|
|
516
|
+
if format_env:
|
|
517
|
+
logger.info(f"Using CLAUDE_LOG_FORMAT environment variable (deprecated): {format_env}")
|
|
518
|
+
format_str = format_env
|
|
519
|
+
|
|
520
|
+
if format_str == 'syslog':
|
|
521
|
+
log_format = LogFormat.SYSLOG
|
|
522
|
+
elif format_str == 'journald':
|
|
523
|
+
log_format = LogFormat.JOURNALD
|
|
524
|
+
else:
|
|
525
|
+
log_format = LogFormat.JSON
|
|
526
|
+
|
|
527
|
+
# Determine async mode if not specified
|
|
528
|
+
if enable_async is None:
|
|
529
|
+
# Configuration takes precedence
|
|
530
|
+
enable_async = response_config.get('use_async', True)
|
|
531
|
+
|
|
532
|
+
# Check environment for backward compatibility
|
|
533
|
+
if os.environ.get('CLAUDE_USE_ASYNC_LOG'):
|
|
534
|
+
env_async = os.environ.get('CLAUDE_USE_ASYNC_LOG', 'true').lower() == 'true'
|
|
535
|
+
logger.info(f"Using CLAUDE_USE_ASYNC_LOG environment variable (deprecated): {env_async}")
|
|
536
|
+
enable_async = env_async
|
|
537
|
+
|
|
538
|
+
# Debug sync mode overrides everything
|
|
539
|
+
if response_config.get('debug_sync', False) or os.environ.get('CLAUDE_LOG_SYNC', '').lower() == 'true':
|
|
540
|
+
if os.environ.get('CLAUDE_LOG_SYNC'):
|
|
541
|
+
logger.info("Using CLAUDE_LOG_SYNC environment variable (deprecated)")
|
|
542
|
+
enable_async = False
|
|
543
|
+
|
|
544
|
+
_logger_instance = AsyncSessionLogger(
|
|
545
|
+
log_format=log_format,
|
|
546
|
+
enable_async=enable_async,
|
|
547
|
+
config=config
|
|
548
|
+
)
|
|
549
|
+
|
|
550
|
+
return _logger_instance
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
def log_response_async(
|
|
554
|
+
request_summary: str,
|
|
555
|
+
response_content: str,
|
|
556
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
557
|
+
agent: Optional[str] = None
|
|
558
|
+
) -> bool:
|
|
559
|
+
"""
|
|
560
|
+
Convenience function for async response logging.
|
|
561
|
+
|
|
562
|
+
Args:
|
|
563
|
+
request_summary: Brief summary of the request
|
|
564
|
+
response_content: The full response content
|
|
565
|
+
metadata: Optional metadata
|
|
566
|
+
agent: Optional agent name
|
|
567
|
+
|
|
568
|
+
Returns:
|
|
569
|
+
True if logged/queued successfully
|
|
570
|
+
"""
|
|
571
|
+
logger = get_async_logger()
|
|
572
|
+
return logger.log_response(request_summary, response_content, metadata, agent)
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
# Cleanup on module unload
|
|
576
|
+
import atexit
|
|
577
|
+
|
|
578
|
+
def _cleanup():
|
|
579
|
+
"""Cleanup function called on exit."""
|
|
580
|
+
global _logger_instance
|
|
581
|
+
if _logger_instance:
|
|
582
|
+
_logger_instance.shutdown()
|
|
583
|
+
|
|
584
|
+
atexit.register(_cleanup)
|