claude-mpm 4.1.2__py3-none-any.whl → 4.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/BASE_AGENT_TEMPLATE.md +16 -19
- claude_mpm/agents/MEMORY.md +21 -49
- claude_mpm/agents/templates/OPTIMIZATION_REPORT.md +156 -0
- claude_mpm/agents/templates/api_qa.json +36 -116
- claude_mpm/agents/templates/backup/data_engineer_agent_20250726_234551.json +42 -9
- claude_mpm/agents/templates/backup/documentation_agent_20250726_234551.json +29 -6
- claude_mpm/agents/templates/backup/engineer_agent_20250726_234551.json +34 -6
- claude_mpm/agents/templates/backup/ops_agent_20250726_234551.json +41 -9
- claude_mpm/agents/templates/backup/qa_agent_20250726_234551.json +30 -8
- claude_mpm/agents/templates/backup/research_agent_2025011_234551.json +2 -2
- claude_mpm/agents/templates/backup/research_agent_20250726_234551.json +29 -6
- claude_mpm/agents/templates/backup/research_memory_efficient.json +2 -2
- claude_mpm/agents/templates/backup/security_agent_20250726_234551.json +41 -9
- claude_mpm/agents/templates/backup/version_control_agent_20250726_234551.json +23 -7
- claude_mpm/agents/templates/code_analyzer.json +18 -36
- claude_mpm/agents/templates/data_engineer.json +43 -14
- claude_mpm/agents/templates/documentation.json +55 -74
- claude_mpm/agents/templates/engineer.json +57 -40
- claude_mpm/agents/templates/imagemagick.json +7 -2
- claude_mpm/agents/templates/memory_manager.json +1 -1
- claude_mpm/agents/templates/ops.json +36 -4
- claude_mpm/agents/templates/project_organizer.json +23 -71
- claude_mpm/agents/templates/qa.json +34 -2
- claude_mpm/agents/templates/refactoring_engineer.json +9 -5
- claude_mpm/agents/templates/research.json +36 -4
- claude_mpm/agents/templates/security.json +29 -2
- claude_mpm/agents/templates/ticketing.json +3 -3
- claude_mpm/agents/templates/vercel_ops_agent.json +2 -2
- claude_mpm/agents/templates/version_control.json +28 -2
- claude_mpm/agents/templates/web_qa.json +38 -151
- claude_mpm/agents/templates/web_ui.json +2 -2
- claude_mpm/cli/commands/agent_manager.py +221 -1
- claude_mpm/cli/commands/agents.py +556 -1009
- claude_mpm/cli/commands/memory.py +248 -927
- claude_mpm/cli/commands/run.py +139 -484
- claude_mpm/cli/parsers/agent_manager_parser.py +34 -0
- claude_mpm/cli/startup_logging.py +76 -0
- claude_mpm/core/agent_registry.py +6 -10
- claude_mpm/core/framework_loader.py +205 -595
- claude_mpm/core/log_manager.py +49 -1
- claude_mpm/core/logging_config.py +2 -4
- claude_mpm/hooks/claude_hooks/event_handlers.py +7 -117
- claude_mpm/hooks/claude_hooks/hook_handler.py +91 -755
- claude_mpm/hooks/claude_hooks/hook_handler_original.py +1040 -0
- claude_mpm/hooks/claude_hooks/hook_handler_refactored.py +347 -0
- claude_mpm/hooks/claude_hooks/services/__init__.py +13 -0
- claude_mpm/hooks/claude_hooks/services/connection_manager.py +190 -0
- claude_mpm/hooks/claude_hooks/services/duplicate_detector.py +106 -0
- claude_mpm/hooks/claude_hooks/services/state_manager.py +282 -0
- claude_mpm/hooks/claude_hooks/services/subagent_processor.py +374 -0
- claude_mpm/services/agents/deployment/agent_deployment.py +42 -454
- claude_mpm/services/agents/deployment/base_agent_locator.py +132 -0
- claude_mpm/services/agents/deployment/deployment_results_manager.py +185 -0
- claude_mpm/services/agents/deployment/single_agent_deployer.py +315 -0
- claude_mpm/services/agents/memory/agent_memory_manager.py +42 -508
- claude_mpm/services/agents/memory/memory_categorization_service.py +165 -0
- claude_mpm/services/agents/memory/memory_file_service.py +103 -0
- claude_mpm/services/agents/memory/memory_format_service.py +201 -0
- claude_mpm/services/agents/memory/memory_limits_service.py +99 -0
- claude_mpm/services/agents/registry/__init__.py +1 -1
- claude_mpm/services/cli/__init__.py +18 -0
- claude_mpm/services/cli/agent_cleanup_service.py +407 -0
- claude_mpm/services/cli/agent_dependency_service.py +395 -0
- claude_mpm/services/cli/agent_listing_service.py +463 -0
- claude_mpm/services/cli/agent_output_formatter.py +605 -0
- claude_mpm/services/cli/agent_validation_service.py +589 -0
- claude_mpm/services/cli/dashboard_launcher.py +424 -0
- claude_mpm/services/cli/memory_crud_service.py +617 -0
- claude_mpm/services/cli/memory_output_formatter.py +604 -0
- claude_mpm/services/cli/session_manager.py +513 -0
- claude_mpm/services/cli/socketio_manager.py +498 -0
- claude_mpm/services/cli/startup_checker.py +370 -0
- claude_mpm/services/core/cache_manager.py +311 -0
- claude_mpm/services/core/memory_manager.py +637 -0
- claude_mpm/services/core/path_resolver.py +498 -0
- claude_mpm/services/core/service_container.py +520 -0
- claude_mpm/services/core/service_interfaces.py +436 -0
- claude_mpm/services/diagnostics/checks/agent_check.py +65 -19
- claude_mpm/services/memory/router.py +116 -10
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.4.dist-info}/METADATA +1 -1
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.4.dist-info}/RECORD +86 -55
- claude_mpm/cli/commands/run_config_checker.py +0 -159
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.4.dist-info}/WHEEL +0 -0
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.4.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.4.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.1.2.dist-info → claude_mpm-4.1.4.dist-info}/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ import logging
|
|
|
4
4
|
import time
|
|
5
5
|
from datetime import datetime
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any, Dict, Optional
|
|
7
|
+
from typing import Any, Dict, Optional
|
|
8
8
|
|
|
9
9
|
# Import resource handling for packaged installations
|
|
10
10
|
try:
|
|
@@ -26,6 +26,32 @@ AgentRegistryAdapter = safe_import(
|
|
|
26
26
|
"claude_mpm.core.agent_registry", "core.agent_registry", ["AgentRegistryAdapter"]
|
|
27
27
|
)
|
|
28
28
|
|
|
29
|
+
# Import the service container and interfaces
|
|
30
|
+
try:
|
|
31
|
+
from claude_mpm.services.core.cache_manager import CacheManager
|
|
32
|
+
from claude_mpm.services.core.memory_manager import MemoryManager
|
|
33
|
+
from claude_mpm.services.core.path_resolver import PathResolver
|
|
34
|
+
from claude_mpm.services.core.service_container import (
|
|
35
|
+
ServiceContainer,
|
|
36
|
+
get_global_container,
|
|
37
|
+
)
|
|
38
|
+
from claude_mpm.services.core.service_interfaces import (
|
|
39
|
+
ICacheManager,
|
|
40
|
+
IMemoryManager,
|
|
41
|
+
IPathResolver,
|
|
42
|
+
)
|
|
43
|
+
except ImportError:
|
|
44
|
+
# Fallback for development environments
|
|
45
|
+
from ..services.core.cache_manager import CacheManager
|
|
46
|
+
from ..services.core.memory_manager import MemoryManager
|
|
47
|
+
from ..services.core.path_resolver import PathResolver
|
|
48
|
+
from ..services.core.service_container import ServiceContainer, get_global_container
|
|
49
|
+
from ..services.core.service_interfaces import (
|
|
50
|
+
ICacheManager,
|
|
51
|
+
IMemoryManager,
|
|
52
|
+
IPathResolver,
|
|
53
|
+
)
|
|
54
|
+
|
|
29
55
|
|
|
30
56
|
class FrameworkLoader:
|
|
31
57
|
"""
|
|
@@ -71,7 +97,10 @@ class FrameworkLoader:
|
|
|
71
97
|
"""
|
|
72
98
|
|
|
73
99
|
def __init__(
|
|
74
|
-
self,
|
|
100
|
+
self,
|
|
101
|
+
framework_path: Optional[Path] = None,
|
|
102
|
+
agents_dir: Optional[Path] = None,
|
|
103
|
+
service_container: Optional[ServiceContainer] = None,
|
|
75
104
|
):
|
|
76
105
|
"""
|
|
77
106
|
Initialize framework loader.
|
|
@@ -79,29 +108,58 @@ class FrameworkLoader:
|
|
|
79
108
|
Args:
|
|
80
109
|
framework_path: Explicit path to framework (auto-detected if None)
|
|
81
110
|
agents_dir: Custom agents directory (overrides framework agents)
|
|
111
|
+
service_container: Optional service container for dependency injection
|
|
82
112
|
"""
|
|
83
113
|
self.logger = get_logger("framework_loader")
|
|
84
|
-
self.framework_path = framework_path or self._detect_framework_path()
|
|
85
114
|
self.agents_dir = agents_dir
|
|
86
115
|
self.framework_version = None
|
|
87
116
|
self.framework_last_modified = None
|
|
88
117
|
|
|
89
|
-
#
|
|
90
|
-
self.
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
self.
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
self.
|
|
103
|
-
|
|
104
|
-
|
|
118
|
+
# Use provided container or get global container
|
|
119
|
+
self.container = service_container or get_global_container()
|
|
120
|
+
|
|
121
|
+
# Register services if not already registered
|
|
122
|
+
if not self.container.is_registered(ICacheManager):
|
|
123
|
+
self.container.register(ICacheManager, CacheManager, True) # singleton=True
|
|
124
|
+
|
|
125
|
+
if not self.container.is_registered(IPathResolver):
|
|
126
|
+
# PathResolver depends on CacheManager, so resolve it first
|
|
127
|
+
cache_manager = self.container.resolve(ICacheManager)
|
|
128
|
+
path_resolver = PathResolver(cache_manager=cache_manager)
|
|
129
|
+
self.container.register_instance(IPathResolver, path_resolver)
|
|
130
|
+
|
|
131
|
+
if not self.container.is_registered(IMemoryManager):
|
|
132
|
+
# MemoryManager depends on both CacheManager and PathResolver
|
|
133
|
+
cache_manager = self.container.resolve(ICacheManager)
|
|
134
|
+
path_resolver = self.container.resolve(IPathResolver)
|
|
135
|
+
memory_manager = MemoryManager(
|
|
136
|
+
cache_manager=cache_manager, path_resolver=path_resolver
|
|
137
|
+
)
|
|
138
|
+
self.container.register_instance(IMemoryManager, memory_manager)
|
|
139
|
+
|
|
140
|
+
# Resolve services from container
|
|
141
|
+
self._cache_manager = self.container.resolve(ICacheManager)
|
|
142
|
+
self._path_resolver = self.container.resolve(IPathResolver)
|
|
143
|
+
self._memory_manager = self.container.resolve(IMemoryManager)
|
|
144
|
+
|
|
145
|
+
# Initialize framework path using PathResolver
|
|
146
|
+
self.framework_path = (
|
|
147
|
+
framework_path or self._path_resolver.detect_framework_path()
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Keep TTL constants for backward compatibility
|
|
151
|
+
# These are implementation-specific, so we use defaults if not available
|
|
152
|
+
if hasattr(self._cache_manager, "capabilities_ttl"):
|
|
153
|
+
self.CAPABILITIES_CACHE_TTL = self._cache_manager.capabilities_ttl
|
|
154
|
+
self.DEPLOYED_AGENTS_CACHE_TTL = self._cache_manager.deployed_agents_ttl
|
|
155
|
+
self.METADATA_CACHE_TTL = self._cache_manager.metadata_ttl
|
|
156
|
+
self.MEMORIES_CACHE_TTL = self._cache_manager.memories_ttl
|
|
157
|
+
else:
|
|
158
|
+
# Default TTL values
|
|
159
|
+
self.CAPABILITIES_CACHE_TTL = 60
|
|
160
|
+
self.DEPLOYED_AGENTS_CACHE_TTL = 30
|
|
161
|
+
self.METADATA_CACHE_TTL = 60
|
|
162
|
+
self.MEMORIES_CACHE_TTL = 60
|
|
105
163
|
|
|
106
164
|
self.framework_content = self._load_framework_content()
|
|
107
165
|
|
|
@@ -114,29 +172,15 @@ class FrameworkLoader:
|
|
|
114
172
|
|
|
115
173
|
def clear_all_caches(self) -> None:
|
|
116
174
|
"""Clear all caches to force reload on next access."""
|
|
117
|
-
self.
|
|
118
|
-
self._agent_capabilities_cache = None
|
|
119
|
-
self._agent_capabilities_cache_time = 0
|
|
120
|
-
self._deployed_agents_cache = None
|
|
121
|
-
self._deployed_agents_cache_time = 0
|
|
122
|
-
self._agent_metadata_cache.clear()
|
|
123
|
-
self._memories_cache = None
|
|
124
|
-
self._memories_cache_time = 0
|
|
175
|
+
self._cache_manager.clear_all()
|
|
125
176
|
|
|
126
177
|
def clear_agent_caches(self) -> None:
|
|
127
178
|
"""Clear agent-related caches (capabilities, deployed agents, metadata)."""
|
|
128
|
-
self.
|
|
129
|
-
self._agent_capabilities_cache = None
|
|
130
|
-
self._agent_capabilities_cache_time = 0
|
|
131
|
-
self._deployed_agents_cache = None
|
|
132
|
-
self._deployed_agents_cache_time = 0
|
|
133
|
-
self._agent_metadata_cache.clear()
|
|
179
|
+
self._cache_manager.clear_agent_caches()
|
|
134
180
|
|
|
135
181
|
def clear_memory_caches(self) -> None:
|
|
136
182
|
"""Clear memory-related caches."""
|
|
137
|
-
self.
|
|
138
|
-
self._memories_cache = None
|
|
139
|
-
self._memories_cache_time = 0
|
|
183
|
+
self._cache_manager.clear_memory_caches()
|
|
140
184
|
|
|
141
185
|
def _initialize_output_style(self) -> None:
|
|
142
186
|
"""Initialize output style management and deploy if applicable."""
|
|
@@ -210,151 +254,6 @@ class FrameworkLoader:
|
|
|
210
254
|
"📝 Output style content will be injected into framework instructions as fallback"
|
|
211
255
|
)
|
|
212
256
|
|
|
213
|
-
def _detect_framework_path(self) -> Optional[Path]:
|
|
214
|
-
"""Auto-detect claude-mpm framework using unified path management."""
|
|
215
|
-
try:
|
|
216
|
-
# Use the unified path manager for consistent detection
|
|
217
|
-
from ..core.unified_paths import DeploymentContext, get_path_manager
|
|
218
|
-
|
|
219
|
-
path_manager = get_path_manager()
|
|
220
|
-
deployment_context = path_manager._deployment_context
|
|
221
|
-
|
|
222
|
-
# Check if we're in a packaged installation
|
|
223
|
-
if deployment_context in [
|
|
224
|
-
DeploymentContext.PIP_INSTALL,
|
|
225
|
-
DeploymentContext.PIPX_INSTALL,
|
|
226
|
-
DeploymentContext.SYSTEM_PACKAGE,
|
|
227
|
-
]:
|
|
228
|
-
self.logger.info(
|
|
229
|
-
f"Running from packaged installation (context: {deployment_context})"
|
|
230
|
-
)
|
|
231
|
-
# Return a marker path to indicate packaged installation
|
|
232
|
-
return Path("__PACKAGED__")
|
|
233
|
-
if deployment_context == DeploymentContext.DEVELOPMENT:
|
|
234
|
-
# Development mode - use framework root
|
|
235
|
-
framework_root = path_manager.framework_root
|
|
236
|
-
if (framework_root / "src" / "claude_mpm" / "agents").exists():
|
|
237
|
-
self.logger.info(
|
|
238
|
-
f"Using claude-mpm development installation at: {framework_root}"
|
|
239
|
-
)
|
|
240
|
-
return framework_root
|
|
241
|
-
elif deployment_context == DeploymentContext.EDITABLE_INSTALL:
|
|
242
|
-
# Editable install - similar to development
|
|
243
|
-
framework_root = path_manager.framework_root
|
|
244
|
-
if (framework_root / "src" / "claude_mpm" / "agents").exists():
|
|
245
|
-
self.logger.info(
|
|
246
|
-
f"Using claude-mpm editable installation at: {framework_root}"
|
|
247
|
-
)
|
|
248
|
-
return framework_root
|
|
249
|
-
|
|
250
|
-
except Exception as e:
|
|
251
|
-
self.logger.warning(
|
|
252
|
-
f"Failed to use unified path manager for framework detection: {e}"
|
|
253
|
-
)
|
|
254
|
-
# Fall back to original detection logic
|
|
255
|
-
|
|
256
|
-
# Fallback: Original detection logic for compatibility
|
|
257
|
-
try:
|
|
258
|
-
# Check if the package is installed
|
|
259
|
-
import claude_mpm
|
|
260
|
-
|
|
261
|
-
package_file = Path(claude_mpm.__file__)
|
|
262
|
-
|
|
263
|
-
# For packaged installations, we don't need a framework path
|
|
264
|
-
# since we'll use importlib.resources to load files
|
|
265
|
-
if "site-packages" in str(package_file) or "dist-packages" in str(
|
|
266
|
-
package_file
|
|
267
|
-
):
|
|
268
|
-
self.logger.info(
|
|
269
|
-
f"Running from packaged installation at: {package_file.parent}"
|
|
270
|
-
)
|
|
271
|
-
# Return a marker path to indicate packaged installation
|
|
272
|
-
return Path("__PACKAGED__")
|
|
273
|
-
except ImportError:
|
|
274
|
-
pass
|
|
275
|
-
|
|
276
|
-
# Then check if we're in claude-mpm project (development mode)
|
|
277
|
-
current_file = Path(__file__)
|
|
278
|
-
if "claude-mpm" in str(current_file):
|
|
279
|
-
# We're running from claude-mpm, use its agents
|
|
280
|
-
for parent in current_file.parents:
|
|
281
|
-
if parent.name == "claude-mpm":
|
|
282
|
-
if (parent / "src" / "claude_mpm" / "agents").exists():
|
|
283
|
-
self.logger.info(f"Using claude-mpm at: {parent}")
|
|
284
|
-
return parent
|
|
285
|
-
break
|
|
286
|
-
|
|
287
|
-
# Otherwise check common locations for claude-mpm
|
|
288
|
-
candidates = [
|
|
289
|
-
# Current directory (if we're already in claude-mpm)
|
|
290
|
-
Path.cwd(),
|
|
291
|
-
# Development location
|
|
292
|
-
Path.home() / "Projects" / "claude-mpm",
|
|
293
|
-
# Current directory subdirectory
|
|
294
|
-
Path.cwd() / "claude-mpm",
|
|
295
|
-
]
|
|
296
|
-
|
|
297
|
-
for candidate in candidates:
|
|
298
|
-
if candidate and candidate.exists():
|
|
299
|
-
# Check for claude-mpm agents directory
|
|
300
|
-
if (candidate / "src" / "claude_mpm" / "agents").exists():
|
|
301
|
-
self.logger.info(f"Found claude-mpm at: {candidate}")
|
|
302
|
-
return candidate
|
|
303
|
-
|
|
304
|
-
self.logger.warning("Framework not found, will use minimal instructions")
|
|
305
|
-
return None
|
|
306
|
-
|
|
307
|
-
def _get_npm_global_path(self) -> Optional[Path]:
|
|
308
|
-
"""Get npm global installation path."""
|
|
309
|
-
try:
|
|
310
|
-
import subprocess
|
|
311
|
-
|
|
312
|
-
result = subprocess.run(
|
|
313
|
-
["npm", "root", "-g"],
|
|
314
|
-
capture_output=True,
|
|
315
|
-
text=True,
|
|
316
|
-
timeout=5,
|
|
317
|
-
check=False,
|
|
318
|
-
)
|
|
319
|
-
if result.returncode == 0:
|
|
320
|
-
npm_root = Path(result.stdout.strip())
|
|
321
|
-
return npm_root / "@bobmatnyc" / "claude-multiagent-pm"
|
|
322
|
-
except:
|
|
323
|
-
pass
|
|
324
|
-
return None
|
|
325
|
-
|
|
326
|
-
def _discover_framework_paths(
|
|
327
|
-
self,
|
|
328
|
-
) -> tuple[Optional[Path], Optional[Path], Optional[Path]]:
|
|
329
|
-
"""
|
|
330
|
-
Discover agent directories based on priority.
|
|
331
|
-
|
|
332
|
-
Returns:
|
|
333
|
-
Tuple of (agents_dir, templates_dir, main_dir)
|
|
334
|
-
"""
|
|
335
|
-
agents_dir = None
|
|
336
|
-
templates_dir = None
|
|
337
|
-
main_dir = None
|
|
338
|
-
|
|
339
|
-
if self.agents_dir and self.agents_dir.exists():
|
|
340
|
-
agents_dir = self.agents_dir
|
|
341
|
-
self.logger.info(f"Using custom agents directory: {agents_dir}")
|
|
342
|
-
elif self.framework_path and self.framework_path != Path("__PACKAGED__"):
|
|
343
|
-
# Prioritize templates directory over main agents directory
|
|
344
|
-
templates_dir = (
|
|
345
|
-
self.framework_path / "src" / "claude_mpm" / "agents" / "templates"
|
|
346
|
-
)
|
|
347
|
-
main_dir = self.framework_path / "src" / "claude_mpm" / "agents"
|
|
348
|
-
|
|
349
|
-
if templates_dir.exists() and any(templates_dir.glob("*.md")):
|
|
350
|
-
agents_dir = templates_dir
|
|
351
|
-
self.logger.info(f"Using agents from templates directory: {agents_dir}")
|
|
352
|
-
elif main_dir.exists() and any(main_dir.glob("*.md")):
|
|
353
|
-
agents_dir = main_dir
|
|
354
|
-
self.logger.info(f"Using agents from main directory: {agents_dir}")
|
|
355
|
-
|
|
356
|
-
return agents_dir, templates_dir, main_dir
|
|
357
|
-
|
|
358
257
|
def _try_load_file(self, file_path: Path, file_type: str) -> Optional[str]:
|
|
359
258
|
"""
|
|
360
259
|
Try to load a file with error handling.
|
|
@@ -399,31 +298,6 @@ class FrameworkLoader:
|
|
|
399
298
|
self.logger.error(f"Failed to load {file_type}: {e}")
|
|
400
299
|
return None
|
|
401
300
|
|
|
402
|
-
def _migrate_memory_file(self, old_path: Path, new_path: Path) -> None:
|
|
403
|
-
"""
|
|
404
|
-
Migrate memory file from old naming convention to new.
|
|
405
|
-
|
|
406
|
-
WHY: Supports backward compatibility by automatically migrating from
|
|
407
|
-
the old {agent_id}_agent.md and {agent_id}.md formats to the new {agent_id}_memories.md format.
|
|
408
|
-
|
|
409
|
-
Args:
|
|
410
|
-
old_path: Path to the old file
|
|
411
|
-
new_path: Path to the new file
|
|
412
|
-
"""
|
|
413
|
-
if old_path.exists() and not new_path.exists():
|
|
414
|
-
try:
|
|
415
|
-
# Read content from old file
|
|
416
|
-
content = old_path.read_text(encoding="utf-8")
|
|
417
|
-
# Write to new file
|
|
418
|
-
new_path.write_text(content, encoding="utf-8")
|
|
419
|
-
# Remove old file
|
|
420
|
-
old_path.unlink()
|
|
421
|
-
self.logger.info(
|
|
422
|
-
f"Migrated memory file from {old_path.name} to {new_path.name}"
|
|
423
|
-
)
|
|
424
|
-
except Exception as e:
|
|
425
|
-
self.logger.error(f"Failed to migrate memory file {old_path.name}: {e}")
|
|
426
|
-
|
|
427
301
|
def _load_instructions_file(self, content: Dict[str, Any]) -> None:
|
|
428
302
|
"""
|
|
429
303
|
Load custom INSTRUCTIONS.md from .claude-mpm directories.
|
|
@@ -586,17 +460,10 @@ class FrameworkLoader:
|
|
|
586
460
|
Returns:
|
|
587
461
|
Set of agent names (file stems) that are deployed
|
|
588
462
|
"""
|
|
589
|
-
#
|
|
590
|
-
|
|
591
|
-
if
|
|
592
|
-
|
|
593
|
-
and current_time - self._deployed_agents_cache_time
|
|
594
|
-
< self.DEPLOYED_AGENTS_CACHE_TTL
|
|
595
|
-
):
|
|
596
|
-
self.logger.debug(
|
|
597
|
-
f"Using cached deployed agents (age: {current_time - self._deployed_agents_cache_time:.1f}s)"
|
|
598
|
-
)
|
|
599
|
-
return self._deployed_agents_cache
|
|
463
|
+
# Try to get from cache first
|
|
464
|
+
cached = self._cache_manager.get_deployed_agents()
|
|
465
|
+
if cached is not None:
|
|
466
|
+
return cached
|
|
600
467
|
|
|
601
468
|
# Cache miss or expired - perform actual scan
|
|
602
469
|
self.logger.debug("Scanning for deployed agents (cache miss or expired)")
|
|
@@ -621,368 +488,28 @@ class FrameworkLoader:
|
|
|
621
488
|
self.logger.debug(f"Total deployed agents found: {len(deployed)}")
|
|
622
489
|
|
|
623
490
|
# Update cache
|
|
624
|
-
self.
|
|
625
|
-
self._deployed_agents_cache_time = current_time
|
|
491
|
+
self._cache_manager.set_deployed_agents(deployed)
|
|
626
492
|
|
|
627
493
|
return deployed
|
|
628
494
|
|
|
629
495
|
def _load_actual_memories(self, content: Dict[str, Any]) -> None:
|
|
630
496
|
"""
|
|
631
|
-
Load actual memories
|
|
632
|
-
Uses caching to avoid repeated file I/O operations.
|
|
633
|
-
|
|
634
|
-
Loading order:
|
|
635
|
-
1. User-level memories from ~/.claude-mpm/memories/ (global defaults)
|
|
636
|
-
2. Project-level memories from ./.claude-mpm/memories/ (overrides user)
|
|
497
|
+
Load actual memories using the MemoryManager service.
|
|
637
498
|
|
|
638
|
-
This
|
|
639
|
-
|
|
640
|
-
2. Agent memories from <agent>_memories.md (only if agent is deployed)
|
|
499
|
+
This method delegates all memory loading operations to the MemoryManager,
|
|
500
|
+
which handles caching, aggregation, deduplication, and legacy format migration.
|
|
641
501
|
|
|
642
502
|
Args:
|
|
643
503
|
content: Dictionary to update with actual memories
|
|
644
504
|
"""
|
|
645
|
-
#
|
|
646
|
-
|
|
647
|
-
if (
|
|
648
|
-
self._memories_cache is not None
|
|
649
|
-
and current_time - self._memories_cache_time < self.MEMORIES_CACHE_TTL
|
|
650
|
-
):
|
|
651
|
-
cache_age = current_time - self._memories_cache_time
|
|
652
|
-
self.logger.debug(f"Using cached memories (age: {cache_age:.1f}s)")
|
|
653
|
-
|
|
654
|
-
# Apply cached memories to content
|
|
655
|
-
if "actual_memories" in self._memories_cache:
|
|
656
|
-
content["actual_memories"] = self._memories_cache["actual_memories"]
|
|
657
|
-
if "agent_memories" in self._memories_cache:
|
|
658
|
-
content["agent_memories"] = self._memories_cache["agent_memories"]
|
|
659
|
-
return
|
|
660
|
-
|
|
661
|
-
# Cache miss or expired - perform actual loading
|
|
662
|
-
self.logger.debug("Loading memories from disk (cache miss or expired)")
|
|
663
|
-
|
|
664
|
-
# Define memory directories in priority order (user first, then project)
|
|
665
|
-
user_memories_dir = Path.home() / ".claude-mpm" / "memories"
|
|
666
|
-
project_memories_dir = Path.cwd() / ".claude-mpm" / "memories"
|
|
667
|
-
|
|
668
|
-
# Check for deployed agents
|
|
669
|
-
deployed_agents = self._get_deployed_agents()
|
|
670
|
-
|
|
671
|
-
# Track loading statistics
|
|
672
|
-
loaded_count = 0
|
|
673
|
-
skipped_count = 0
|
|
674
|
-
|
|
675
|
-
# Dictionary to store aggregated memories
|
|
676
|
-
pm_memories = []
|
|
677
|
-
agent_memories_dict = {}
|
|
678
|
-
|
|
679
|
-
# Load memories from user directory first
|
|
680
|
-
if user_memories_dir.exists():
|
|
681
|
-
self.logger.info(
|
|
682
|
-
f"Loading user-level memory files from: {user_memories_dir}"
|
|
683
|
-
)
|
|
684
|
-
loaded, skipped = self._load_memories_from_directory(
|
|
685
|
-
user_memories_dir,
|
|
686
|
-
deployed_agents,
|
|
687
|
-
pm_memories,
|
|
688
|
-
agent_memories_dict,
|
|
689
|
-
"user",
|
|
690
|
-
)
|
|
691
|
-
loaded_count += loaded
|
|
692
|
-
skipped_count += skipped
|
|
693
|
-
else:
|
|
694
|
-
self.logger.debug(
|
|
695
|
-
f"No user memories directory found at: {user_memories_dir}"
|
|
696
|
-
)
|
|
697
|
-
|
|
698
|
-
# Load memories from project directory (overrides user memories)
|
|
699
|
-
if project_memories_dir.exists():
|
|
700
|
-
self.logger.info(
|
|
701
|
-
f"Loading project-level memory files from: {project_memories_dir}"
|
|
702
|
-
)
|
|
703
|
-
loaded, skipped = self._load_memories_from_directory(
|
|
704
|
-
project_memories_dir,
|
|
705
|
-
deployed_agents,
|
|
706
|
-
pm_memories,
|
|
707
|
-
agent_memories_dict,
|
|
708
|
-
"project",
|
|
709
|
-
)
|
|
710
|
-
loaded_count += loaded
|
|
711
|
-
skipped_count += skipped
|
|
712
|
-
else:
|
|
713
|
-
self.logger.debug(
|
|
714
|
-
f"No project memories directory found at: {project_memories_dir}"
|
|
715
|
-
)
|
|
716
|
-
|
|
717
|
-
# Aggregate PM memories
|
|
718
|
-
if pm_memories:
|
|
719
|
-
aggregated_pm = self._aggregate_memories(pm_memories)
|
|
720
|
-
content["actual_memories"] = aggregated_pm
|
|
721
|
-
memory_size = len(aggregated_pm.encode("utf-8"))
|
|
722
|
-
self.logger.info(
|
|
723
|
-
f"Aggregated PM memory ({memory_size:,} bytes) from {len(pm_memories)} source(s)"
|
|
724
|
-
)
|
|
725
|
-
|
|
726
|
-
# Store agent memories (already aggregated per agent)
|
|
727
|
-
if agent_memories_dict:
|
|
728
|
-
content["agent_memories"] = agent_memories_dict
|
|
729
|
-
for agent_name, memory_content in agent_memories_dict.items():
|
|
730
|
-
memory_size = len(memory_content.encode("utf-8"))
|
|
731
|
-
self.logger.debug(
|
|
732
|
-
f"Aggregated {agent_name} memory: {memory_size:,} bytes"
|
|
733
|
-
)
|
|
734
|
-
|
|
735
|
-
# Update cache with loaded memories
|
|
736
|
-
self._memories_cache = {}
|
|
737
|
-
if "actual_memories" in content:
|
|
738
|
-
self._memories_cache["actual_memories"] = content["actual_memories"]
|
|
739
|
-
if "agent_memories" in content:
|
|
740
|
-
self._memories_cache["agent_memories"] = content["agent_memories"]
|
|
741
|
-
self._memories_cache_time = current_time
|
|
742
|
-
|
|
743
|
-
# Log detailed summary
|
|
744
|
-
if loaded_count > 0 or skipped_count > 0:
|
|
745
|
-
# Count unique agents with memories
|
|
746
|
-
agent_count = len(agent_memories_dict) if agent_memories_dict else 0
|
|
747
|
-
pm_loaded = bool(content.get("actual_memories"))
|
|
748
|
-
|
|
749
|
-
summary_parts = []
|
|
750
|
-
if pm_loaded:
|
|
751
|
-
summary_parts.append("PM memory loaded")
|
|
752
|
-
if agent_count > 0:
|
|
753
|
-
summary_parts.append(f"{agent_count} agent memories loaded")
|
|
754
|
-
if skipped_count > 0:
|
|
755
|
-
summary_parts.append(
|
|
756
|
-
f"{skipped_count} non-deployed agent memories skipped"
|
|
757
|
-
)
|
|
758
|
-
|
|
759
|
-
self.logger.info(f"Memory loading complete: {' | '.join(summary_parts)}")
|
|
760
|
-
|
|
761
|
-
# Log deployed agents for reference
|
|
762
|
-
if len(deployed_agents) > 0:
|
|
763
|
-
self.logger.debug(
|
|
764
|
-
f"Deployed agents available for memory loading: {', '.join(sorted(deployed_agents))}"
|
|
765
|
-
)
|
|
766
|
-
|
|
767
|
-
def _load_memories_from_directory(
|
|
768
|
-
self,
|
|
769
|
-
memories_dir: Path,
|
|
770
|
-
deployed_agents: set,
|
|
771
|
-
pm_memories: list,
|
|
772
|
-
agent_memories_dict: dict,
|
|
773
|
-
source: str,
|
|
774
|
-
) -> tuple[int, int]:
|
|
775
|
-
"""
|
|
776
|
-
Load memories from a specific directory.
|
|
777
|
-
|
|
778
|
-
Args:
|
|
779
|
-
memories_dir: Directory to load memories from
|
|
780
|
-
deployed_agents: Set of deployed agent names
|
|
781
|
-
pm_memories: List to append PM memories to
|
|
782
|
-
agent_memories_dict: Dict to store agent memories
|
|
783
|
-
source: Source label ("user" or "project")
|
|
784
|
-
|
|
785
|
-
Returns:
|
|
786
|
-
Tuple of (loaded_count, skipped_count)
|
|
787
|
-
"""
|
|
788
|
-
loaded_count = 0
|
|
789
|
-
skipped_count = 0
|
|
790
|
-
|
|
791
|
-
# Load PM memories (always loaded)
|
|
792
|
-
# Support migration from both old formats
|
|
793
|
-
pm_memory_path = memories_dir / "PM_memories.md"
|
|
794
|
-
old_pm_path = memories_dir / "PM.md"
|
|
795
|
-
|
|
796
|
-
# Migrate from old PM.md if needed
|
|
797
|
-
if not pm_memory_path.exists() and old_pm_path.exists():
|
|
798
|
-
try:
|
|
799
|
-
old_pm_path.rename(pm_memory_path)
|
|
800
|
-
self.logger.info("Migrated PM.md to PM_memories.md")
|
|
801
|
-
except Exception as e:
|
|
802
|
-
self.logger.error(f"Failed to migrate PM.md: {e}")
|
|
803
|
-
pm_memory_path = old_pm_path # Fall back to old path
|
|
804
|
-
if pm_memory_path.exists():
|
|
805
|
-
loaded_content = self._try_load_file(
|
|
806
|
-
pm_memory_path, f"PM memory ({source})"
|
|
807
|
-
)
|
|
808
|
-
if loaded_content:
|
|
809
|
-
pm_memories.append(
|
|
810
|
-
{
|
|
811
|
-
"source": source,
|
|
812
|
-
"content": loaded_content,
|
|
813
|
-
"path": pm_memory_path,
|
|
814
|
-
}
|
|
815
|
-
)
|
|
816
|
-
memory_size = len(loaded_content.encode("utf-8"))
|
|
817
|
-
self.logger.info(
|
|
818
|
-
f"Loaded {source} PM memory: {pm_memory_path} ({memory_size:,} bytes)"
|
|
819
|
-
)
|
|
820
|
-
loaded_count += 1
|
|
821
|
-
|
|
822
|
-
# First, migrate any old format memory files to new format
|
|
823
|
-
# This handles backward compatibility for existing installations
|
|
824
|
-
for old_file in memories_dir.glob("*.md"):
|
|
825
|
-
# Skip files already in correct format and special files
|
|
826
|
-
if old_file.name.endswith("_memories.md") or old_file.name in [
|
|
827
|
-
"PM.md",
|
|
828
|
-
"README.md",
|
|
829
|
-
]:
|
|
830
|
-
continue
|
|
831
|
-
|
|
832
|
-
# Determine new name based on old format
|
|
833
|
-
if old_file.stem.endswith("_agent"):
|
|
834
|
-
# Old format: {agent_name}_agent.md -> {agent_name}_memories.md
|
|
835
|
-
agent_name = old_file.stem[:-6] # Remove "_agent" suffix
|
|
836
|
-
new_path = memories_dir / f"{agent_name}_memories.md"
|
|
837
|
-
if not new_path.exists():
|
|
838
|
-
self._migrate_memory_file(old_file, new_path)
|
|
839
|
-
else:
|
|
840
|
-
# Intermediate format: {agent_name}.md -> {agent_name}_memories.md
|
|
841
|
-
agent_name = old_file.stem
|
|
842
|
-
new_path = memories_dir / f"{agent_name}_memories.md"
|
|
843
|
-
if not new_path.exists():
|
|
844
|
-
self._migrate_memory_file(old_file, new_path)
|
|
845
|
-
|
|
846
|
-
# Load agent memories (only for deployed agents)
|
|
847
|
-
# Only process *_memories.md files to avoid README.md and other docs
|
|
848
|
-
for memory_file in memories_dir.glob("*_memories.md"):
|
|
849
|
-
# Skip PM_memories.md as we already handled it
|
|
850
|
-
if memory_file.name == "PM_memories.md":
|
|
851
|
-
continue
|
|
852
|
-
|
|
853
|
-
# Extract agent name from file (remove "_memories" suffix)
|
|
854
|
-
agent_name = memory_file.stem[:-9] # Remove "_memories" suffix
|
|
855
|
-
|
|
856
|
-
# Check if agent is deployed
|
|
857
|
-
if agent_name in deployed_agents:
|
|
858
|
-
loaded_content = self._try_load_file(
|
|
859
|
-
memory_file, f"agent memory: {agent_name} ({source})"
|
|
860
|
-
)
|
|
861
|
-
if loaded_content:
|
|
862
|
-
# Store or merge agent memories
|
|
863
|
-
if agent_name not in agent_memories_dict:
|
|
864
|
-
agent_memories_dict[agent_name] = []
|
|
865
|
-
|
|
866
|
-
# If it's a list, append the new memory entry
|
|
867
|
-
if isinstance(agent_memories_dict[agent_name], list):
|
|
868
|
-
agent_memories_dict[agent_name].append(
|
|
869
|
-
{
|
|
870
|
-
"source": source,
|
|
871
|
-
"content": loaded_content,
|
|
872
|
-
"path": memory_file,
|
|
873
|
-
}
|
|
874
|
-
)
|
|
875
|
-
|
|
876
|
-
memory_size = len(loaded_content.encode("utf-8"))
|
|
877
|
-
self.logger.info(
|
|
878
|
-
f"Loaded {source} memory for {agent_name}: {memory_file.name} ({memory_size:,} bytes)"
|
|
879
|
-
)
|
|
880
|
-
loaded_count += 1
|
|
881
|
-
else:
|
|
882
|
-
# Provide more detailed logging about why the memory was skipped
|
|
883
|
-
self.logger.info(
|
|
884
|
-
f"Skipped {source} memory: {memory_file.name} (agent '{agent_name}' not deployed)"
|
|
885
|
-
)
|
|
886
|
-
# Also log a debug message with available agents for diagnostics
|
|
887
|
-
if (
|
|
888
|
-
agent_name.replace("_", "-") in deployed_agents
|
|
889
|
-
or agent_name.replace("-", "_") in deployed_agents
|
|
890
|
-
):
|
|
891
|
-
# Detect naming mismatches
|
|
892
|
-
alt_name = (
|
|
893
|
-
agent_name.replace("_", "-")
|
|
894
|
-
if "_" in agent_name
|
|
895
|
-
else agent_name.replace("-", "_")
|
|
896
|
-
)
|
|
897
|
-
if alt_name in deployed_agents:
|
|
898
|
-
self.logger.warning(
|
|
899
|
-
f"Naming mismatch detected: Memory file uses '{agent_name}' but deployed agent is '{alt_name}'. "
|
|
900
|
-
f"Consider renaming {memory_file.name} to {alt_name}_memories.md"
|
|
901
|
-
)
|
|
902
|
-
skipped_count += 1
|
|
903
|
-
|
|
904
|
-
# After loading all memories for this directory, aggregate agent memories
|
|
905
|
-
for agent_name in list(agent_memories_dict.keys()):
|
|
906
|
-
if (
|
|
907
|
-
isinstance(agent_memories_dict[agent_name], list)
|
|
908
|
-
and agent_memories_dict[agent_name]
|
|
909
|
-
):
|
|
910
|
-
# Aggregate memories for this agent
|
|
911
|
-
aggregated = self._aggregate_memories(agent_memories_dict[agent_name])
|
|
912
|
-
agent_memories_dict[agent_name] = aggregated
|
|
913
|
-
|
|
914
|
-
return loaded_count, skipped_count
|
|
915
|
-
|
|
916
|
-
def _aggregate_memories(self, memory_entries: list) -> str:
|
|
917
|
-
"""
|
|
918
|
-
Aggregate multiple memory entries into a single memory string.
|
|
919
|
-
|
|
920
|
-
Strategy:
|
|
921
|
-
- Simplified to support list-based memories only
|
|
922
|
-
- Preserve all unique bullet-point items (lines starting with -)
|
|
923
|
-
- Remove exact duplicates
|
|
924
|
-
- Project-level memories take precedence over user-level
|
|
505
|
+
# Use MemoryManager to load all memories
|
|
506
|
+
memories = self._memory_manager.load_memories()
|
|
925
507
|
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
"""
|
|
932
|
-
if not memory_entries:
|
|
933
|
-
return ""
|
|
934
|
-
|
|
935
|
-
# If only one entry, return it as-is
|
|
936
|
-
if len(memory_entries) == 1:
|
|
937
|
-
return memory_entries[0]["content"]
|
|
938
|
-
|
|
939
|
-
# Parse all memories into a simple list
|
|
940
|
-
all_items = {} # Dict to track items and their source
|
|
941
|
-
metadata_lines = []
|
|
942
|
-
agent_id = None
|
|
943
|
-
|
|
944
|
-
for entry in memory_entries:
|
|
945
|
-
content = entry["content"]
|
|
946
|
-
source = entry["source"]
|
|
947
|
-
|
|
948
|
-
for line in content.split("\n"):
|
|
949
|
-
# Check for header to extract agent_id
|
|
950
|
-
if line.startswith("# Agent Memory:"):
|
|
951
|
-
agent_id = line.replace("# Agent Memory:", "").strip()
|
|
952
|
-
# Check for metadata lines
|
|
953
|
-
elif line.startswith("<!-- ") and line.endswith(" -->"):
|
|
954
|
-
# Only keep metadata from project source or if not already present
|
|
955
|
-
if source == "project" or line not in metadata_lines:
|
|
956
|
-
metadata_lines.append(line)
|
|
957
|
-
# Check for list items
|
|
958
|
-
elif line.strip().startswith("-"):
|
|
959
|
-
# Normalize the item for comparison
|
|
960
|
-
item_text = line.strip()
|
|
961
|
-
normalized = item_text.lstrip("- ").strip().lower()
|
|
962
|
-
|
|
963
|
-
# Add item if new or if project source overrides user source
|
|
964
|
-
if normalized not in all_items or source == "project":
|
|
965
|
-
all_items[normalized] = (item_text, source)
|
|
966
|
-
|
|
967
|
-
# Build aggregated content as simple list
|
|
968
|
-
lines = []
|
|
969
|
-
|
|
970
|
-
# Add header
|
|
971
|
-
if agent_id:
|
|
972
|
-
lines.append(f"# Agent Memory: {agent_id}")
|
|
973
|
-
else:
|
|
974
|
-
lines.append("# Agent Memory")
|
|
975
|
-
|
|
976
|
-
# Add latest timestamp from metadata
|
|
977
|
-
lines.append(f"<!-- Last Updated: {datetime.now().isoformat()}Z -->")
|
|
978
|
-
lines.append("")
|
|
979
|
-
|
|
980
|
-
# Add all unique items (sorted for consistency)
|
|
981
|
-
for normalized_key in sorted(all_items.keys()):
|
|
982
|
-
item_text, source = all_items[normalized_key]
|
|
983
|
-
lines.append(item_text)
|
|
984
|
-
|
|
985
|
-
return "\n".join(lines)
|
|
508
|
+
# Apply loaded memories to content
|
|
509
|
+
if "actual_memories" in memories:
|
|
510
|
+
content["actual_memories"] = memories["actual_memories"]
|
|
511
|
+
if "agent_memories" in memories:
|
|
512
|
+
content["agent_memories"] = memories["agent_memories"]
|
|
986
513
|
|
|
987
514
|
def _load_single_agent(
|
|
988
515
|
self, agent_file: Path
|
|
@@ -1133,8 +660,10 @@ class FrameworkLoader:
|
|
|
1133
660
|
# Load actual memories from .claude-mpm/memories/PM_memories.md
|
|
1134
661
|
self._load_actual_memories(content)
|
|
1135
662
|
|
|
1136
|
-
# Discover agent directories
|
|
1137
|
-
agents_dir, templates_dir, main_dir = self.
|
|
663
|
+
# Discover agent directories using PathResolver
|
|
664
|
+
agents_dir, templates_dir, main_dir = self._path_resolver.discover_agent_paths(
|
|
665
|
+
agents_dir=self.agents_dir, framework_path=self.framework_path
|
|
666
|
+
)
|
|
1138
667
|
|
|
1139
668
|
# Load agents from discovered directory
|
|
1140
669
|
self._load_agents_directory(content, agents_dir, templates_dir, main_dir)
|
|
@@ -1615,18 +1144,13 @@ Extract tickets from these patterns:
|
|
|
1615
1144
|
"""Generate dynamic agent capabilities section from deployed agents.
|
|
1616
1145
|
Uses caching to avoid repeated file I/O and parsing operations."""
|
|
1617
1146
|
|
|
1618
|
-
#
|
|
1147
|
+
# Try to get from cache first
|
|
1148
|
+
cached_capabilities = self._cache_manager.get_capabilities()
|
|
1149
|
+
if cached_capabilities is not None:
|
|
1150
|
+
return cached_capabilities
|
|
1151
|
+
|
|
1152
|
+
# Will be used for updating cache later
|
|
1619
1153
|
current_time = time.time()
|
|
1620
|
-
if (
|
|
1621
|
-
self._agent_capabilities_cache is not None
|
|
1622
|
-
and current_time - self._agent_capabilities_cache_time
|
|
1623
|
-
< self.CAPABILITIES_CACHE_TTL
|
|
1624
|
-
):
|
|
1625
|
-
cache_age = current_time - self._agent_capabilities_cache_time
|
|
1626
|
-
self.logger.debug(
|
|
1627
|
-
f"Using cached agent capabilities (age: {cache_age:.1f}s)"
|
|
1628
|
-
)
|
|
1629
|
-
return self._agent_capabilities_cache
|
|
1630
1154
|
|
|
1631
1155
|
# Cache miss or expired - generate capabilities
|
|
1632
1156
|
self.logger.debug("Generating agent capabilities (cache miss or expired)")
|
|
@@ -1674,8 +1198,7 @@ Extract tickets from these patterns:
|
|
|
1674
1198
|
self.logger.warning(f"No agents found in any location: {agents_dirs}")
|
|
1675
1199
|
result = self._get_fallback_capabilities()
|
|
1676
1200
|
# Cache the fallback result too
|
|
1677
|
-
self.
|
|
1678
|
-
self._agent_capabilities_cache_time = current_time
|
|
1201
|
+
self._cache_manager.set_capabilities(result)
|
|
1679
1202
|
return result
|
|
1680
1203
|
|
|
1681
1204
|
# Log agent collection summary
|
|
@@ -1700,8 +1223,7 @@ Extract tickets from these patterns:
|
|
|
1700
1223
|
if not deployed_agents:
|
|
1701
1224
|
result = self._get_fallback_capabilities()
|
|
1702
1225
|
# Cache the fallback result
|
|
1703
|
-
self.
|
|
1704
|
-
self._agent_capabilities_cache_time = current_time
|
|
1226
|
+
self._cache_manager.set_capabilities(result)
|
|
1705
1227
|
return result
|
|
1706
1228
|
|
|
1707
1229
|
# Sort agents alphabetically by ID
|
|
@@ -1761,6 +1283,14 @@ Extract tickets from these patterns:
|
|
|
1761
1283
|
if agent.get("model") and agent["model"] != "opus":
|
|
1762
1284
|
section += f"- **Model**: {agent['model']}\n"
|
|
1763
1285
|
|
|
1286
|
+
# Add memory routing information if available
|
|
1287
|
+
if agent.get("memory_routing"):
|
|
1288
|
+
memory_routing = agent["memory_routing"]
|
|
1289
|
+
if memory_routing.get("description"):
|
|
1290
|
+
section += (
|
|
1291
|
+
f"- **Memory Routing**: {memory_routing['description']}\n"
|
|
1292
|
+
)
|
|
1293
|
+
|
|
1764
1294
|
# Add simple Context-Aware Agent Selection
|
|
1765
1295
|
section += "\n## Context-Aware Agent Selection\n\n"
|
|
1766
1296
|
section += (
|
|
@@ -1777,8 +1307,7 @@ Extract tickets from these patterns:
|
|
|
1777
1307
|
section += f"\n**Total Available Agents**: {len(deployed_agents)}\n"
|
|
1778
1308
|
|
|
1779
1309
|
# Cache the generated capabilities
|
|
1780
|
-
self.
|
|
1781
|
-
self._agent_capabilities_cache_time = current_time
|
|
1310
|
+
self._cache_manager.set_capabilities(section)
|
|
1782
1311
|
self.logger.debug(
|
|
1783
1312
|
f"Cached agent capabilities section ({len(section)} chars)"
|
|
1784
1313
|
)
|
|
@@ -1806,14 +1335,12 @@ Extract tickets from these patterns:
|
|
|
1806
1335
|
file_mtime = agent_file.stat().st_mtime
|
|
1807
1336
|
current_time = time.time()
|
|
1808
1337
|
|
|
1809
|
-
#
|
|
1810
|
-
|
|
1811
|
-
|
|
1338
|
+
# Try to get from cache first
|
|
1339
|
+
cached_result = self._cache_manager.get_agent_metadata(cache_key)
|
|
1340
|
+
if cached_result is not None:
|
|
1341
|
+
cached_data, cached_mtime = cached_result
|
|
1812
1342
|
# Use cache if file hasn't been modified and cache isn't too old
|
|
1813
|
-
if
|
|
1814
|
-
cached_mtime == file_mtime
|
|
1815
|
-
and current_time - cached_mtime < self.METADATA_CACHE_TTL
|
|
1816
|
-
):
|
|
1343
|
+
if cached_mtime == file_mtime:
|
|
1817
1344
|
self.logger.debug(f"Using cached metadata for {agent_file.name}")
|
|
1818
1345
|
return cached_data
|
|
1819
1346
|
|
|
@@ -1865,8 +1392,16 @@ Extract tickets from these patterns:
|
|
|
1865
1392
|
if routing_data:
|
|
1866
1393
|
agent_data["routing"] = routing_data
|
|
1867
1394
|
|
|
1395
|
+
# Try to load memory routing metadata from JSON template if not in YAML frontmatter
|
|
1396
|
+
if "memory_routing" not in agent_data:
|
|
1397
|
+
memory_routing_data = self._load_memory_routing_from_template(
|
|
1398
|
+
agent_file.stem
|
|
1399
|
+
)
|
|
1400
|
+
if memory_routing_data:
|
|
1401
|
+
agent_data["memory_routing"] = memory_routing_data
|
|
1402
|
+
|
|
1868
1403
|
# Cache the parsed metadata
|
|
1869
|
-
self.
|
|
1404
|
+
self._cache_manager.set_agent_metadata(cache_key, agent_data, file_mtime)
|
|
1870
1405
|
|
|
1871
1406
|
return agent_data
|
|
1872
1407
|
|
|
@@ -1874,6 +1409,81 @@ Extract tickets from these patterns:
|
|
|
1874
1409
|
self.logger.debug(f"Could not parse metadata from {agent_file}: {e}")
|
|
1875
1410
|
return None
|
|
1876
1411
|
|
|
1412
|
+
def _load_memory_routing_from_template(
|
|
1413
|
+
self, agent_name: str
|
|
1414
|
+
) -> Optional[Dict[str, Any]]:
|
|
1415
|
+
"""Load memory routing metadata from agent JSON template.
|
|
1416
|
+
|
|
1417
|
+
Args:
|
|
1418
|
+
agent_name: Name of the agent (stem of the file)
|
|
1419
|
+
|
|
1420
|
+
Returns:
|
|
1421
|
+
Dictionary with memory routing metadata or None if not found
|
|
1422
|
+
"""
|
|
1423
|
+
try:
|
|
1424
|
+
import json
|
|
1425
|
+
|
|
1426
|
+
# Check if we have a framework path
|
|
1427
|
+
if not self.framework_path or self.framework_path == Path("__PACKAGED__"):
|
|
1428
|
+
# For packaged installations, try to load from package resources
|
|
1429
|
+
if files:
|
|
1430
|
+
try:
|
|
1431
|
+
templates_package = files("claude_mpm.agents.templates")
|
|
1432
|
+
template_file = templates_package / f"{agent_name}.json"
|
|
1433
|
+
|
|
1434
|
+
if template_file.is_file():
|
|
1435
|
+
template_content = template_file.read_text()
|
|
1436
|
+
template_data = json.loads(template_content)
|
|
1437
|
+
return template_data.get("memory_routing")
|
|
1438
|
+
except Exception as e:
|
|
1439
|
+
self.logger.debug(
|
|
1440
|
+
f"Could not load memory routing from packaged template for {agent_name}: {e}"
|
|
1441
|
+
)
|
|
1442
|
+
return None
|
|
1443
|
+
|
|
1444
|
+
# For development mode, load from filesystem
|
|
1445
|
+
templates_dir = (
|
|
1446
|
+
self.framework_path / "src" / "claude_mpm" / "agents" / "templates"
|
|
1447
|
+
)
|
|
1448
|
+
template_file = templates_dir / f"{agent_name}.json"
|
|
1449
|
+
|
|
1450
|
+
if template_file.exists():
|
|
1451
|
+
with open(template_file) as f:
|
|
1452
|
+
template_data = json.load(f)
|
|
1453
|
+
return template_data.get("memory_routing")
|
|
1454
|
+
|
|
1455
|
+
# Also check for variations in naming (underscore vs dash)
|
|
1456
|
+
# Handle common naming variations between deployed .md files and .json templates
|
|
1457
|
+
# Remove duplicates by using a set
|
|
1458
|
+
alternative_names = list(
|
|
1459
|
+
{
|
|
1460
|
+
agent_name.replace("-", "_"), # api-qa -> api_qa
|
|
1461
|
+
agent_name.replace("_", "-"), # api_qa -> api-qa
|
|
1462
|
+
agent_name.replace("-", ""), # api-qa -> apiqa
|
|
1463
|
+
agent_name.replace("_", ""), # api_qa -> apiqa
|
|
1464
|
+
agent_name.replace("-agent", ""), # research-agent -> research
|
|
1465
|
+
agent_name.replace("_agent", ""), # research_agent -> research
|
|
1466
|
+
agent_name + "_agent", # research -> research_agent
|
|
1467
|
+
agent_name + "-agent", # research -> research-agent
|
|
1468
|
+
}
|
|
1469
|
+
)
|
|
1470
|
+
|
|
1471
|
+
for alt_name in alternative_names:
|
|
1472
|
+
if alt_name != agent_name: # Skip the original name we already tried
|
|
1473
|
+
alt_file = templates_dir / f"{alt_name}.json"
|
|
1474
|
+
if alt_file.exists():
|
|
1475
|
+
with open(alt_file) as f:
|
|
1476
|
+
template_data = json.load(f)
|
|
1477
|
+
return template_data.get("memory_routing")
|
|
1478
|
+
|
|
1479
|
+
return None
|
|
1480
|
+
|
|
1481
|
+
except Exception as e:
|
|
1482
|
+
self.logger.debug(
|
|
1483
|
+
f"Could not load memory routing from template for {agent_name}: {e}"
|
|
1484
|
+
)
|
|
1485
|
+
return None
|
|
1486
|
+
|
|
1877
1487
|
def _load_routing_from_template(self, agent_name: str) -> Optional[Dict[str, Any]]:
|
|
1878
1488
|
"""Load routing metadata from agent JSON template.
|
|
1879
1489
|
|