claude-mpm 4.1.4__py3-none-any.whl → 4.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/cli/commands/tickets.py +365 -784
- claude_mpm/core/output_style_manager.py +24 -0
- claude_mpm/core/unified_agent_registry.py +46 -15
- claude_mpm/services/agents/deployment/agent_discovery_service.py +12 -3
- claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +172 -233
- claude_mpm/services/agents/deployment/agent_lifecycle_manager_refactored.py +575 -0
- claude_mpm/services/agents/deployment/agent_operation_service.py +573 -0
- claude_mpm/services/agents/deployment/agent_record_service.py +419 -0
- claude_mpm/services/agents/deployment/agent_state_service.py +381 -0
- claude_mpm/services/agents/deployment/multi_source_deployment_service.py +4 -2
- claude_mpm/services/infrastructure/__init__.py +31 -5
- claude_mpm/services/infrastructure/monitoring/__init__.py +43 -0
- claude_mpm/services/infrastructure/monitoring/aggregator.py +437 -0
- claude_mpm/services/infrastructure/monitoring/base.py +130 -0
- claude_mpm/services/infrastructure/monitoring/legacy.py +203 -0
- claude_mpm/services/infrastructure/monitoring/network.py +218 -0
- claude_mpm/services/infrastructure/monitoring/process.py +342 -0
- claude_mpm/services/infrastructure/monitoring/resources.py +243 -0
- claude_mpm/services/infrastructure/monitoring/service.py +367 -0
- claude_mpm/services/infrastructure/monitoring.py +67 -1030
- claude_mpm/services/project/analyzer.py +13 -4
- claude_mpm/services/project/analyzer_refactored.py +450 -0
- claude_mpm/services/project/analyzer_v2.py +566 -0
- claude_mpm/services/project/architecture_analyzer.py +461 -0
- claude_mpm/services/project/dependency_analyzer.py +462 -0
- claude_mpm/services/project/language_analyzer.py +265 -0
- claude_mpm/services/project/metrics_collector.py +410 -0
- claude_mpm/services/ticket_manager.py +5 -1
- claude_mpm/services/ticket_services/__init__.py +26 -0
- claude_mpm/services/ticket_services/crud_service.py +328 -0
- claude_mpm/services/ticket_services/formatter_service.py +290 -0
- claude_mpm/services/ticket_services/search_service.py +324 -0
- claude_mpm/services/ticket_services/validation_service.py +303 -0
- claude_mpm/services/ticket_services/workflow_service.py +244 -0
- {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/METADATA +1 -1
- {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/RECORD +41 -17
- {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/WHEEL +0 -0
- {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Agent Record Service - Persistence and History Management
|
|
4
|
+
=========================================================
|
|
5
|
+
|
|
6
|
+
Handles agent record persistence, history tracking, and data management.
|
|
7
|
+
Extracted from AgentLifecycleManager to follow Single Responsibility Principle.
|
|
8
|
+
|
|
9
|
+
Key Responsibilities:
|
|
10
|
+
- Save and load agent lifecycle records
|
|
11
|
+
- Manage operation history
|
|
12
|
+
- Handle data serialization
|
|
13
|
+
- Provide record queries and statistics
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import time
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, Dict, List
|
|
20
|
+
|
|
21
|
+
from claude_mpm.core.base_service import BaseService
|
|
22
|
+
from claude_mpm.core.unified_paths import get_path_manager
|
|
23
|
+
from claude_mpm.services.agents.deployment.agent_state_service import (
|
|
24
|
+
AgentLifecycleRecord,
|
|
25
|
+
LifecycleState,
|
|
26
|
+
)
|
|
27
|
+
from claude_mpm.services.agents.registry.modification_tracker import ModificationTier
|
|
28
|
+
from claude_mpm.utils.config_manager import ConfigurationManager
|
|
29
|
+
from claude_mpm.utils.path_operations import path_ops
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class AgentRecordService(BaseService):
|
|
33
|
+
"""
|
|
34
|
+
Service for managing agent lifecycle records and persistence.
|
|
35
|
+
|
|
36
|
+
Responsibilities:
|
|
37
|
+
- Persist agent records to disk
|
|
38
|
+
- Load records on startup
|
|
39
|
+
- Manage operation history
|
|
40
|
+
- Provide record queries
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self):
|
|
44
|
+
"""Initialize the record service."""
|
|
45
|
+
super().__init__("agent_record_service")
|
|
46
|
+
|
|
47
|
+
# Configuration manager for JSON operations
|
|
48
|
+
self.config_mgr = ConfigurationManager(cache_enabled=True)
|
|
49
|
+
|
|
50
|
+
# Storage paths
|
|
51
|
+
self.records_file = (
|
|
52
|
+
get_path_manager().get_tracking_dir() / "lifecycle_records.json"
|
|
53
|
+
)
|
|
54
|
+
self.history_file = (
|
|
55
|
+
get_path_manager().get_tracking_dir() / "operation_history.json"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Ensure tracking directory exists
|
|
59
|
+
path_ops.ensure_dir(self.records_file.parent)
|
|
60
|
+
|
|
61
|
+
self.logger.info("AgentRecordService initialized")
|
|
62
|
+
|
|
63
|
+
async def save_records(self, records: Dict[str, AgentLifecycleRecord]) -> bool:
|
|
64
|
+
"""
|
|
65
|
+
Save agent lifecycle records to disk.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
records: Dictionary of agent records
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
True if successful, False otherwise
|
|
72
|
+
"""
|
|
73
|
+
try:
|
|
74
|
+
data = {}
|
|
75
|
+
for agent_name, record in records.items():
|
|
76
|
+
record_dict = self._serialize_record(record)
|
|
77
|
+
data[agent_name] = record_dict
|
|
78
|
+
|
|
79
|
+
# Write to file with proper formatting
|
|
80
|
+
json_str = json.dumps(data, indent=2, default=str)
|
|
81
|
+
self.records_file.parent.mkdir(parents=True, exist_ok=True)
|
|
82
|
+
|
|
83
|
+
with open(self.records_file, "w", encoding="utf-8") as f:
|
|
84
|
+
f.write(json_str)
|
|
85
|
+
|
|
86
|
+
self.logger.debug(f"Saved {len(records)} agent records")
|
|
87
|
+
return True
|
|
88
|
+
|
|
89
|
+
except Exception as e:
|
|
90
|
+
self.logger.error(f"Failed to save agent records: {e}")
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
async def load_records(self) -> Dict[str, AgentLifecycleRecord]:
|
|
94
|
+
"""
|
|
95
|
+
Load agent lifecycle records from disk.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Dictionary of agent records
|
|
99
|
+
"""
|
|
100
|
+
records = {}
|
|
101
|
+
|
|
102
|
+
try:
|
|
103
|
+
if path_ops.validate_exists(self.records_file):
|
|
104
|
+
data = self.config_mgr.load_json(self.records_file)
|
|
105
|
+
|
|
106
|
+
for agent_name, record_data in data.items():
|
|
107
|
+
record = self._deserialize_record(record_data)
|
|
108
|
+
records[agent_name] = record
|
|
109
|
+
|
|
110
|
+
self.logger.info(f"Loaded {len(records)} agent records")
|
|
111
|
+
else:
|
|
112
|
+
self.logger.debug("No existing records file found")
|
|
113
|
+
|
|
114
|
+
except Exception as e:
|
|
115
|
+
self.logger.warning(f"Failed to load agent records: {e}")
|
|
116
|
+
|
|
117
|
+
return records
|
|
118
|
+
|
|
119
|
+
async def save_history(self, history: List[Any]) -> bool:
|
|
120
|
+
"""
|
|
121
|
+
Save operation history to disk.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
history: List of operation results
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
True if successful, False otherwise
|
|
128
|
+
"""
|
|
129
|
+
try:
|
|
130
|
+
# Convert operation results to dictionaries
|
|
131
|
+
data = []
|
|
132
|
+
for result in history:
|
|
133
|
+
result_dict = {
|
|
134
|
+
"operation": result.operation.value,
|
|
135
|
+
"agent_name": result.agent_name,
|
|
136
|
+
"success": result.success,
|
|
137
|
+
"duration_ms": result.duration_ms,
|
|
138
|
+
"error_message": result.error_message,
|
|
139
|
+
"modification_id": result.modification_id,
|
|
140
|
+
"persistence_id": result.persistence_id,
|
|
141
|
+
"cache_invalidated": result.cache_invalidated,
|
|
142
|
+
"registry_updated": result.registry_updated,
|
|
143
|
+
"metadata": result.metadata,
|
|
144
|
+
"timestamp": time.time(),
|
|
145
|
+
}
|
|
146
|
+
data.append(result_dict)
|
|
147
|
+
|
|
148
|
+
# Write to file
|
|
149
|
+
json_str = json.dumps(data, indent=2, default=str)
|
|
150
|
+
self.history_file.parent.mkdir(parents=True, exist_ok=True)
|
|
151
|
+
|
|
152
|
+
with open(self.history_file, "w", encoding="utf-8") as f:
|
|
153
|
+
f.write(json_str)
|
|
154
|
+
|
|
155
|
+
self.logger.debug(f"Saved {len(history)} operation history entries")
|
|
156
|
+
return True
|
|
157
|
+
|
|
158
|
+
except Exception as e:
|
|
159
|
+
self.logger.error(f"Failed to save operation history: {e}")
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
async def load_history(self) -> List[Dict[str, Any]]:
|
|
163
|
+
"""
|
|
164
|
+
Load operation history from disk.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
List of operation history entries
|
|
168
|
+
"""
|
|
169
|
+
history = []
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
if path_ops.validate_exists(self.history_file):
|
|
173
|
+
data = self.config_mgr.load_json(self.history_file)
|
|
174
|
+
history = data
|
|
175
|
+
self.logger.info(f"Loaded {len(history)} operation history entries")
|
|
176
|
+
else:
|
|
177
|
+
self.logger.debug("No existing history file found")
|
|
178
|
+
|
|
179
|
+
except Exception as e:
|
|
180
|
+
self.logger.warning(f"Failed to load operation history: {e}")
|
|
181
|
+
|
|
182
|
+
return history
|
|
183
|
+
|
|
184
|
+
def _serialize_record(self, record: AgentLifecycleRecord) -> Dict[str, Any]:
|
|
185
|
+
"""
|
|
186
|
+
Serialize an AgentLifecycleRecord to dictionary.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
record: The record to serialize
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Serialized dictionary
|
|
193
|
+
"""
|
|
194
|
+
return {
|
|
195
|
+
"agent_name": record.agent_name,
|
|
196
|
+
"current_state": record.current_state.value,
|
|
197
|
+
"tier": record.tier.value,
|
|
198
|
+
"file_path": record.file_path,
|
|
199
|
+
"created_at": record.created_at,
|
|
200
|
+
"last_modified": record.last_modified,
|
|
201
|
+
"version": record.version,
|
|
202
|
+
"modifications": record.modifications,
|
|
203
|
+
"persistence_operations": record.persistence_operations,
|
|
204
|
+
"backup_paths": record.backup_paths,
|
|
205
|
+
"validation_status": record.validation_status,
|
|
206
|
+
"validation_errors": record.validation_errors,
|
|
207
|
+
"metadata": record.metadata,
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
def _deserialize_record(self, data: Dict[str, Any]) -> AgentLifecycleRecord:
|
|
211
|
+
"""
|
|
212
|
+
Deserialize a dictionary to AgentLifecycleRecord.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
data: Dictionary to deserialize
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
Deserialized AgentLifecycleRecord
|
|
219
|
+
"""
|
|
220
|
+
record = AgentLifecycleRecord(
|
|
221
|
+
agent_name=data["agent_name"],
|
|
222
|
+
current_state=LifecycleState(data["current_state"]),
|
|
223
|
+
tier=ModificationTier(data["tier"]),
|
|
224
|
+
file_path=data["file_path"],
|
|
225
|
+
created_at=data["created_at"],
|
|
226
|
+
last_modified=data["last_modified"],
|
|
227
|
+
version=data["version"],
|
|
228
|
+
modifications=data.get("modifications", []),
|
|
229
|
+
persistence_operations=data.get("persistence_operations", []),
|
|
230
|
+
backup_paths=data.get("backup_paths", []),
|
|
231
|
+
validation_status=data.get("validation_status", "valid"),
|
|
232
|
+
validation_errors=data.get("validation_errors", []),
|
|
233
|
+
metadata=data.get("metadata", {}),
|
|
234
|
+
)
|
|
235
|
+
return record
|
|
236
|
+
|
|
237
|
+
async def export_records(self, output_path: Path, format: str = "json") -> bool:
|
|
238
|
+
"""
|
|
239
|
+
Export records to a file.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
output_path: Path to export to
|
|
243
|
+
format: Export format (json, csv)
|
|
244
|
+
|
|
245
|
+
Returns:
|
|
246
|
+
True if successful, False otherwise
|
|
247
|
+
"""
|
|
248
|
+
try:
|
|
249
|
+
if format == "json":
|
|
250
|
+
# Load current records
|
|
251
|
+
records = await self.load_records()
|
|
252
|
+
data = {
|
|
253
|
+
name: self._serialize_record(record)
|
|
254
|
+
for name, record in records.items()
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
# Write to output path
|
|
258
|
+
json_str = json.dumps(data, indent=2, default=str)
|
|
259
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
260
|
+
f.write(json_str)
|
|
261
|
+
|
|
262
|
+
elif format == "csv":
|
|
263
|
+
# TODO: Implement CSV export
|
|
264
|
+
self.logger.warning("CSV export not yet implemented")
|
|
265
|
+
return False
|
|
266
|
+
|
|
267
|
+
self.logger.info(f"Exported records to {output_path}")
|
|
268
|
+
return True
|
|
269
|
+
|
|
270
|
+
except Exception as e:
|
|
271
|
+
self.logger.error(f"Failed to export records: {e}")
|
|
272
|
+
return False
|
|
273
|
+
|
|
274
|
+
async def import_records(
|
|
275
|
+
self, input_path: Path, merge: bool = False
|
|
276
|
+
) -> Dict[str, AgentLifecycleRecord]:
|
|
277
|
+
"""
|
|
278
|
+
Import records from a file.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
input_path: Path to import from
|
|
282
|
+
merge: Whether to merge with existing records
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Imported records
|
|
286
|
+
"""
|
|
287
|
+
imported = {}
|
|
288
|
+
|
|
289
|
+
try:
|
|
290
|
+
if not path_ops.validate_exists(input_path):
|
|
291
|
+
self.logger.error(f"Import file not found: {input_path}")
|
|
292
|
+
return imported
|
|
293
|
+
|
|
294
|
+
# Load data from file
|
|
295
|
+
data = self.config_mgr.load_json(input_path)
|
|
296
|
+
|
|
297
|
+
# Deserialize records
|
|
298
|
+
for agent_name, record_data in data.items():
|
|
299
|
+
record = self._deserialize_record(record_data)
|
|
300
|
+
imported[agent_name] = record
|
|
301
|
+
|
|
302
|
+
# Optionally merge with existing
|
|
303
|
+
if merge:
|
|
304
|
+
existing = await self.load_records()
|
|
305
|
+
existing.update(imported)
|
|
306
|
+
await self.save_records(existing)
|
|
307
|
+
else:
|
|
308
|
+
await self.save_records(imported)
|
|
309
|
+
|
|
310
|
+
self.logger.info(f"Imported {len(imported)} records from {input_path}")
|
|
311
|
+
|
|
312
|
+
except Exception as e:
|
|
313
|
+
self.logger.error(f"Failed to import records: {e}")
|
|
314
|
+
|
|
315
|
+
return imported
|
|
316
|
+
|
|
317
|
+
async def cleanup_old_records(self, days_threshold: int = 30) -> int:
|
|
318
|
+
"""
|
|
319
|
+
Remove records older than threshold.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
days_threshold: Age threshold in days
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
Number of records removed
|
|
326
|
+
"""
|
|
327
|
+
try:
|
|
328
|
+
records = await self.load_records()
|
|
329
|
+
original_count = len(records)
|
|
330
|
+
|
|
331
|
+
# Filter out old records
|
|
332
|
+
current_time = time.time()
|
|
333
|
+
threshold_seconds = days_threshold * 24 * 3600
|
|
334
|
+
|
|
335
|
+
filtered = {
|
|
336
|
+
name: record
|
|
337
|
+
for name, record in records.items()
|
|
338
|
+
if (current_time - record.created_at) < threshold_seconds
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
removed_count = original_count - len(filtered)
|
|
342
|
+
|
|
343
|
+
if removed_count > 0:
|
|
344
|
+
await self.save_records(filtered)
|
|
345
|
+
self.logger.info(f"Removed {removed_count} old records")
|
|
346
|
+
|
|
347
|
+
return removed_count
|
|
348
|
+
|
|
349
|
+
except Exception as e:
|
|
350
|
+
self.logger.error(f"Failed to cleanup old records: {e}")
|
|
351
|
+
return 0
|
|
352
|
+
|
|
353
|
+
async def get_statistics(self) -> Dict[str, Any]:
|
|
354
|
+
"""
|
|
355
|
+
Get statistics about stored records.
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
Statistics dictionary
|
|
359
|
+
"""
|
|
360
|
+
try:
|
|
361
|
+
records = await self.load_records()
|
|
362
|
+
history = await self.load_history()
|
|
363
|
+
|
|
364
|
+
# Calculate statistics
|
|
365
|
+
stats = {
|
|
366
|
+
"total_records": len(records),
|
|
367
|
+
"total_history_entries": len(history),
|
|
368
|
+
"records_by_state": {},
|
|
369
|
+
"records_by_tier": {},
|
|
370
|
+
"average_age_days": 0.0,
|
|
371
|
+
"newest_record": None,
|
|
372
|
+
"oldest_record": None,
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
if records:
|
|
376
|
+
# Count by state
|
|
377
|
+
for record in records.values():
|
|
378
|
+
state = record.current_state.value
|
|
379
|
+
stats["records_by_state"][state] = (
|
|
380
|
+
stats["records_by_state"].get(state, 0) + 1
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
# Count by tier
|
|
384
|
+
tier = record.tier.value
|
|
385
|
+
stats["records_by_tier"][tier] = (
|
|
386
|
+
stats["records_by_tier"].get(tier, 0) + 1
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
# Calculate age statistics
|
|
390
|
+
ages = [record.age_days for record in records.values()]
|
|
391
|
+
stats["average_age_days"] = sum(ages) / len(ages)
|
|
392
|
+
|
|
393
|
+
# Find newest and oldest
|
|
394
|
+
sorted_records = sorted(records.values(), key=lambda r: r.created_at)
|
|
395
|
+
stats["oldest_record"] = sorted_records[0].agent_name
|
|
396
|
+
stats["newest_record"] = sorted_records[-1].agent_name
|
|
397
|
+
|
|
398
|
+
return stats
|
|
399
|
+
|
|
400
|
+
except Exception as e:
|
|
401
|
+
self.logger.error(f"Failed to get statistics: {e}")
|
|
402
|
+
return {}
|
|
403
|
+
|
|
404
|
+
async def _initialize(self) -> None:
|
|
405
|
+
"""Initialize the record service."""
|
|
406
|
+
# Ensure storage directories exist
|
|
407
|
+
path_ops.ensure_dir(self.records_file.parent)
|
|
408
|
+
self.logger.info("AgentRecordService initialized")
|
|
409
|
+
|
|
410
|
+
async def _cleanup(self) -> None:
|
|
411
|
+
"""Cleanup the record service."""
|
|
412
|
+
self.logger.info("AgentRecordService cleaned up")
|
|
413
|
+
|
|
414
|
+
async def _health_check(self) -> Dict[str, bool]:
|
|
415
|
+
"""Perform health check."""
|
|
416
|
+
return {
|
|
417
|
+
"storage_accessible": path_ops.validate_exists(self.records_file.parent),
|
|
418
|
+
"config_manager_ready": self.config_mgr is not None,
|
|
419
|
+
}
|