claude-mpm 4.1.4__py3-none-any.whl → 4.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/cli/commands/tickets.py +365 -784
  3. claude_mpm/core/output_style_manager.py +24 -0
  4. claude_mpm/core/unified_agent_registry.py +46 -15
  5. claude_mpm/services/agents/deployment/agent_discovery_service.py +12 -3
  6. claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +172 -233
  7. claude_mpm/services/agents/deployment/agent_lifecycle_manager_refactored.py +575 -0
  8. claude_mpm/services/agents/deployment/agent_operation_service.py +573 -0
  9. claude_mpm/services/agents/deployment/agent_record_service.py +419 -0
  10. claude_mpm/services/agents/deployment/agent_state_service.py +381 -0
  11. claude_mpm/services/agents/deployment/multi_source_deployment_service.py +4 -2
  12. claude_mpm/services/infrastructure/__init__.py +31 -5
  13. claude_mpm/services/infrastructure/monitoring/__init__.py +43 -0
  14. claude_mpm/services/infrastructure/monitoring/aggregator.py +437 -0
  15. claude_mpm/services/infrastructure/monitoring/base.py +130 -0
  16. claude_mpm/services/infrastructure/monitoring/legacy.py +203 -0
  17. claude_mpm/services/infrastructure/monitoring/network.py +218 -0
  18. claude_mpm/services/infrastructure/monitoring/process.py +342 -0
  19. claude_mpm/services/infrastructure/monitoring/resources.py +243 -0
  20. claude_mpm/services/infrastructure/monitoring/service.py +367 -0
  21. claude_mpm/services/infrastructure/monitoring.py +67 -1030
  22. claude_mpm/services/project/analyzer.py +13 -4
  23. claude_mpm/services/project/analyzer_refactored.py +450 -0
  24. claude_mpm/services/project/analyzer_v2.py +566 -0
  25. claude_mpm/services/project/architecture_analyzer.py +461 -0
  26. claude_mpm/services/project/dependency_analyzer.py +462 -0
  27. claude_mpm/services/project/language_analyzer.py +265 -0
  28. claude_mpm/services/project/metrics_collector.py +410 -0
  29. claude_mpm/services/ticket_manager.py +5 -1
  30. claude_mpm/services/ticket_services/__init__.py +26 -0
  31. claude_mpm/services/ticket_services/crud_service.py +328 -0
  32. claude_mpm/services/ticket_services/formatter_service.py +290 -0
  33. claude_mpm/services/ticket_services/search_service.py +324 -0
  34. claude_mpm/services/ticket_services/validation_service.py +303 -0
  35. claude_mpm/services/ticket_services/workflow_service.py +244 -0
  36. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/METADATA +1 -1
  37. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/RECORD +41 -17
  38. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/WHEEL +0 -0
  39. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/entry_points.txt +0 -0
  40. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/licenses/LICENSE +0 -0
  41. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,566 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Project Analyzer Service V2 (Refactored with SOLID Principles)
4
+ ===============================================================
5
+
6
+ This is the refactored version that replaces the original god class.
7
+ It maintains 100% backward compatibility while delegating to specialized services.
8
+
9
+ WHY: The original analyzer.py (1,012 lines) violated the Single Responsibility
10
+ Principle. This refactored version delegates work to focused services:
11
+ - LanguageAnalyzerService: Language and framework detection
12
+ - DependencyAnalyzerService: Dependency and package management
13
+ - ArchitectureAnalyzerService: Architecture and structure analysis
14
+ - MetricsCollectorService: Code metrics collection
15
+
16
+ MIGRATION: To use this refactored version, simply replace imports:
17
+ FROM: from claude_mpm.services.project.analyzer import ProjectAnalyzer
18
+ TO: from claude_mpm.services.project.analyzer_v2 import ProjectAnalyzer
19
+ """
20
+
21
+ import logging
22
+ import time
23
+ from pathlib import Path
24
+ from typing import Any, Dict, List, Optional
25
+
26
+ from claude_mpm.core.config import Config
27
+ from claude_mpm.core.interfaces import ProjectAnalyzerInterface
28
+ from claude_mpm.core.unified_paths import get_path_manager
29
+
30
+ # Import the data class from original
31
+ from .analyzer import ProjectCharacteristics
32
+
33
+ # Import refactored services
34
+ from .architecture_analyzer import ArchitectureAnalyzerService
35
+ from .dependency_analyzer import DependencyAnalyzerService
36
+ from .language_analyzer import LanguageAnalyzerService
37
+ from .metrics_collector import MetricsCollectorService
38
+
39
+
40
+ class ProjectAnalyzer(ProjectAnalyzerInterface):
41
+ """Refactored ProjectAnalyzer using service composition.
42
+
43
+ WHY: This version maintains full backward compatibility while following
44
+ SOLID principles through delegation to specialized services.
45
+
46
+ BENEFITS:
47
+ - Reduced complexity: Each service has a single responsibility
48
+ - Better testability: Services can be mocked independently
49
+ - Easier maintenance: Changes are localized to specific services
50
+ - Improved performance: Services can be optimized independently
51
+
52
+ COMPATIBILITY: This class provides the exact same interface as the
53
+ original ProjectAnalyzer, ensuring zero breaking changes.
54
+ """
55
+
56
+ # Keep original constants for compatibility
57
+ CONFIG_FILE_PATTERNS = {
58
+ "package.json": "node_js",
59
+ "requirements.txt": "python",
60
+ "pyproject.toml": "python",
61
+ "setup.py": "python",
62
+ "Cargo.toml": "rust",
63
+ "pom.xml": "java",
64
+ "build.gradle": "java",
65
+ "composer.json": "php",
66
+ "Gemfile": "ruby",
67
+ "go.mod": "go",
68
+ "CMakeLists.txt": "cpp",
69
+ "Makefile": "c_cpp",
70
+ }
71
+
72
+ FRAMEWORK_PATTERNS = {
73
+ "flask": ["from flask", "Flask(", "app.route"],
74
+ "django": ["from django", "DJANGO_SETTINGS", "django.contrib"],
75
+ "fastapi": ["from fastapi", "FastAPI(", "@app."],
76
+ "express": ["express()", "app.get(", "app.post("],
77
+ "react": ["import React", "from react", "ReactDOM"],
78
+ "vue": ["Vue.createApp", "new Vue(", "vue-"],
79
+ "angular": ["@Component", "@Injectable", "Angular"],
80
+ "spring": ["@SpringBootApplication", "@RestController", "Spring"],
81
+ "rails": ["Rails.application", "ApplicationController"],
82
+ }
83
+
84
+ DATABASE_PATTERNS = {
85
+ "postgresql": ["psycopg2", "postgresql:", "postgres:", "pg_"],
86
+ "mysql": ["mysql-connector", "mysql:", "MySQLdb"],
87
+ "sqlite": ["sqlite3", "sqlite:", ".db", ".sqlite"],
88
+ "mongodb": ["pymongo", "mongodb:", "mongoose"],
89
+ "redis": ["redis:", "redis-py", "RedisClient"],
90
+ "elasticsearch": ["elasticsearch:", "elastic"],
91
+ }
92
+
93
+ def __init__(
94
+ self,
95
+ config: Optional[Config] = None,
96
+ working_directory: Optional[Path] = None,
97
+ # Dependency injection for services (optional)
98
+ language_analyzer: Optional[LanguageAnalyzerService] = None,
99
+ dependency_analyzer: Optional[DependencyAnalyzerService] = None,
100
+ architecture_analyzer: Optional[ArchitectureAnalyzerService] = None,
101
+ metrics_collector: Optional[MetricsCollectorService] = None,
102
+ ):
103
+ """Initialize the refactored project analyzer.
104
+
105
+ Args:
106
+ config: Optional Config object
107
+ working_directory: Optional working directory path
108
+ language_analyzer: Optional injected language analyzer service
109
+ dependency_analyzer: Optional injected dependency analyzer service
110
+ architecture_analyzer: Optional injected architecture analyzer service
111
+ metrics_collector: Optional injected metrics collector service
112
+ """
113
+ self.config = config or Config()
114
+ self.working_directory = (
115
+ working_directory or get_path_manager().get_project_root()
116
+ )
117
+ self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
118
+
119
+ # Initialize services (use injected or create new)
120
+ self.language_analyzer = language_analyzer or LanguageAnalyzerService(
121
+ self.working_directory
122
+ )
123
+ self.dependency_analyzer = dependency_analyzer or DependencyAnalyzerService(
124
+ self.working_directory
125
+ )
126
+ self.architecture_analyzer = (
127
+ architecture_analyzer or ArchitectureAnalyzerService(self.working_directory)
128
+ )
129
+ self.metrics_collector = metrics_collector or MetricsCollectorService(
130
+ self.working_directory
131
+ )
132
+
133
+ # Cache for analysis results
134
+ self._analysis_cache: Optional[ProjectCharacteristics] = None
135
+ self._cache_timestamp: Optional[float] = None
136
+ self._cache_ttl = 300 # 5 minutes
137
+
138
+ def analyze_project(self, force_refresh: bool = False) -> ProjectCharacteristics:
139
+ """Analyze the current project and return characteristics.
140
+
141
+ WHY: Orchestrates multiple specialized services to provide comprehensive
142
+ project analysis while maintaining the original interface.
143
+
144
+ Args:
145
+ force_refresh: If True, ignores cache and performs fresh analysis
146
+
147
+ Returns:
148
+ ProjectCharacteristics: Structured project analysis results
149
+ """
150
+ try:
151
+ # Check cache first
152
+ if not force_refresh and self._is_cache_valid():
153
+ self.logger.debug("Using cached project analysis")
154
+ return self._analysis_cache
155
+
156
+ self.logger.info(f"Analyzing project at: {self.working_directory}")
157
+
158
+ # Initialize characteristics
159
+ characteristics = self._create_empty_characteristics()
160
+
161
+ # Delegate to specialized services
162
+ self._analyze_with_language_service(characteristics)
163
+ self._analyze_with_dependency_service(characteristics)
164
+ self._analyze_with_architecture_service(characteristics)
165
+ self._enrich_with_metrics(characteristics)
166
+
167
+ # Apply any compatibility adjustments
168
+ self._ensure_backward_compatibility(characteristics)
169
+
170
+ # Cache the results
171
+ self._update_cache(characteristics)
172
+
173
+ self.logger.info(
174
+ f"Project analysis complete: {characteristics.primary_language} project "
175
+ f"with {len(characteristics.frameworks)} frameworks"
176
+ )
177
+ return characteristics
178
+
179
+ except Exception as e:
180
+ self.logger.error(f"Error analyzing project: {e}")
181
+ return self._create_empty_characteristics()
182
+
183
+ # ================================================================================
184
+ # Service Delegation Methods
185
+ # ================================================================================
186
+
187
+ def _analyze_with_language_service(
188
+ self, characteristics: ProjectCharacteristics
189
+ ) -> None:
190
+ """Delegate language analysis to specialized service."""
191
+ try:
192
+ characteristics.languages = self.language_analyzer.detect_languages()
193
+ characteristics.primary_language = (
194
+ self.language_analyzer.detect_primary_language()
195
+ )
196
+ characteristics.frameworks = self.language_analyzer.detect_frameworks()
197
+ characteristics.code_conventions = (
198
+ self.language_analyzer.analyze_code_style()
199
+ )
200
+ except Exception as e:
201
+ self.logger.warning(f"Error in language analysis: {e}")
202
+
203
+ def _analyze_with_dependency_service(
204
+ self, characteristics: ProjectCharacteristics
205
+ ) -> None:
206
+ """Delegate dependency analysis to specialized service."""
207
+ try:
208
+ characteristics.package_manager = (
209
+ self.dependency_analyzer.detect_package_manager()
210
+ )
211
+
212
+ deps = self.dependency_analyzer.analyze_dependencies()
213
+ characteristics.key_dependencies = deps["production"][:20]
214
+ characteristics.databases = self.dependency_analyzer.detect_databases(
215
+ deps["production"] + deps["development"]
216
+ )
217
+ characteristics.web_frameworks = (
218
+ self.dependency_analyzer.detect_web_frameworks(deps["production"])
219
+ )
220
+
221
+ testing_frameworks = self.dependency_analyzer.detect_testing_frameworks(
222
+ deps["development"] + deps["testing"]
223
+ )
224
+ if testing_frameworks:
225
+ characteristics.testing_framework = testing_frameworks[0]
226
+
227
+ characteristics.build_tools = self.dependency_analyzer.get_build_tools()
228
+ except Exception as e:
229
+ self.logger.warning(f"Error in dependency analysis: {e}")
230
+
231
+ def _analyze_with_architecture_service(
232
+ self, characteristics: ProjectCharacteristics
233
+ ) -> None:
234
+ """Delegate architecture analysis to specialized service."""
235
+ try:
236
+ arch_info = self.architecture_analyzer.analyze_architecture()
237
+
238
+ characteristics.architecture_type = arch_info.architecture_type
239
+ characteristics.main_modules = arch_info.main_modules
240
+ characteristics.key_directories = arch_info.key_directories
241
+ characteristics.entry_points = arch_info.entry_points
242
+ characteristics.api_patterns = arch_info.api_patterns
243
+ characteristics.configuration_patterns = arch_info.configuration_patterns
244
+ characteristics.project_terminology = arch_info.project_terminology
245
+
246
+ design_patterns = self.architecture_analyzer.detect_design_patterns()
247
+ for pattern in design_patterns[:3]:
248
+ pattern_name = pattern.replace("_", " ").title() + " Pattern"
249
+ if pattern_name not in characteristics.code_conventions:
250
+ characteristics.code_conventions.append(pattern_name)
251
+ except Exception as e:
252
+ self.logger.warning(f"Error in architecture analysis: {e}")
253
+
254
+ def _enrich_with_metrics(self, characteristics: ProjectCharacteristics) -> None:
255
+ """Enrich characteristics with metrics data."""
256
+ try:
257
+ metrics = self.metrics_collector.collect_metrics()
258
+
259
+ if metrics.test_files > 0:
260
+ characteristics.test_patterns.append(f"{metrics.test_files} test files")
261
+ if metrics.test_to_code_ratio > 0:
262
+ ratio_pct = int(metrics.test_to_code_ratio * 100)
263
+ characteristics.test_patterns.append(
264
+ f"{ratio_pct}% test coverage ratio"
265
+ )
266
+ if metrics.test_coverage_files > 0:
267
+ characteristics.test_patterns.append("Test coverage tracking")
268
+
269
+ # Find documentation files
270
+ doc_patterns = ["README*", "CONTRIBUTING*", "CHANGELOG*", "docs/*"]
271
+ doc_files = []
272
+ for pattern in doc_patterns:
273
+ matches = list(self.working_directory.glob(pattern))
274
+ doc_files.extend(
275
+ [
276
+ str(f.relative_to(self.working_directory))
277
+ for f in matches
278
+ if f.is_file()
279
+ ]
280
+ )
281
+ characteristics.documentation_files = doc_files[:10]
282
+
283
+ # Find important configs
284
+ config_patterns = ["*.json", "*.yaml", "*.yml", "*.toml", "*.ini", ".env*"]
285
+ config_files = []
286
+ for pattern in config_patterns:
287
+ matches = list(self.working_directory.glob(pattern))
288
+ config_files.extend(
289
+ [
290
+ str(f.relative_to(self.working_directory))
291
+ for f in matches
292
+ if f.is_file()
293
+ ]
294
+ )
295
+ characteristics.important_configs = config_files[:10]
296
+ except Exception as e:
297
+ self.logger.warning(f"Error collecting metrics: {e}")
298
+
299
+ def _ensure_backward_compatibility(
300
+ self, characteristics: ProjectCharacteristics
301
+ ) -> None:
302
+ """Ensure compatibility with original analyzer behavior."""
303
+ # Ensure test patterns always has "Tests in /tests/ directory" if tests exist
304
+ if (self.working_directory / "tests").exists():
305
+ pattern = "Tests in /tests/ directory"
306
+ if pattern not in characteristics.test_patterns:
307
+ characteristics.test_patterns.insert(0, pattern)
308
+ elif (self.working_directory / "test").exists():
309
+ pattern = "Tests in /test/ directory"
310
+ if pattern not in characteristics.test_patterns:
311
+ characteristics.test_patterns.insert(0, pattern)
312
+
313
+ # ================================================================================
314
+ # Backward Compatibility Methods (Delegate to services)
315
+ # ================================================================================
316
+
317
+ def _analyze_config_files(self, characteristics: ProjectCharacteristics) -> None:
318
+ """Backward compatibility method - delegates to dependency service."""
319
+ self._analyze_with_dependency_service(characteristics)
320
+
321
+ def _analyze_directory_structure(
322
+ self, characteristics: ProjectCharacteristics
323
+ ) -> None:
324
+ """Backward compatibility method - delegates to architecture service."""
325
+ arch_info = self.architecture_analyzer.analyze_architecture()
326
+ characteristics.key_directories = arch_info.key_directories
327
+ characteristics.main_modules = arch_info.main_modules
328
+ characteristics.entry_points = arch_info.entry_points
329
+
330
+ def _analyze_source_code(self, characteristics: ProjectCharacteristics) -> None:
331
+ """Backward compatibility method - delegates to language service."""
332
+ self._analyze_with_language_service(characteristics)
333
+
334
+ def _analyze_dependencies(self, characteristics: ProjectCharacteristics) -> None:
335
+ """Backward compatibility method - delegates to dependency service."""
336
+ deps = self.dependency_analyzer.analyze_dependencies()
337
+ characteristics.key_dependencies = deps["production"][:20]
338
+
339
+ def _analyze_testing_patterns(
340
+ self, characteristics: ProjectCharacteristics
341
+ ) -> None:
342
+ """Backward compatibility method - analyzes testing patterns."""
343
+ self._ensure_backward_compatibility(characteristics)
344
+
345
+ def _analyze_documentation(self, characteristics: ProjectCharacteristics) -> None:
346
+ """Backward compatibility method - finds documentation files."""
347
+ doc_patterns = ["README*", "CONTRIBUTING*", "CHANGELOG*", "docs/*"]
348
+ doc_files = []
349
+ for pattern in doc_patterns:
350
+ matches = list(self.working_directory.glob(pattern))
351
+ doc_files.extend(
352
+ [
353
+ str(f.relative_to(self.working_directory))
354
+ for f in matches
355
+ if f.is_file()
356
+ ]
357
+ )
358
+ characteristics.documentation_files = doc_files[:10]
359
+
360
+ def _infer_architecture_type(self, characteristics: ProjectCharacteristics) -> None:
361
+ """Backward compatibility method - already handled by architecture service."""
362
+ # Architecture type is set by architecture service
363
+
364
+ def _extract_project_terminology(
365
+ self, characteristics: ProjectCharacteristics
366
+ ) -> None:
367
+ """Backward compatibility method - already handled by architecture service."""
368
+ # Terminology is extracted by architecture service
369
+
370
+ def _parse_package_json(
371
+ self, package_path: Path, characteristics: ProjectCharacteristics
372
+ ) -> None:
373
+ """Backward compatibility method for package.json parsing."""
374
+ deps = self.dependency_analyzer.analyze_dependencies()
375
+ characteristics.key_dependencies.extend(deps["production"][:10])
376
+ characteristics.web_frameworks = self.dependency_analyzer.detect_web_frameworks(
377
+ deps["production"]
378
+ )
379
+ characteristics.build_tools = self.dependency_analyzer.get_build_tools()
380
+
381
+ def _parse_python_dependencies(
382
+ self, deps_path: Path, characteristics: ProjectCharacteristics
383
+ ) -> None:
384
+ """Backward compatibility method for Python dependency parsing."""
385
+ deps = self.dependency_analyzer.analyze_dependencies()
386
+ characteristics.key_dependencies.extend(deps["production"][:10])
387
+ characteristics.web_frameworks = self.dependency_analyzer.detect_web_frameworks(
388
+ deps["production"]
389
+ )
390
+
391
+ def _parse_cargo_toml(
392
+ self, cargo_path: Path, characteristics: ProjectCharacteristics
393
+ ) -> None:
394
+ """Backward compatibility method for Cargo.toml parsing."""
395
+ deps = self.dependency_analyzer.analyze_dependencies()
396
+ characteristics.key_dependencies.extend(deps["production"][:10])
397
+
398
+ def _get_subdirectories(self, path: Path, max_depth: int = 2) -> List[str]:
399
+ """Backward compatibility method - get subdirectory names."""
400
+ subdirs = []
401
+ try:
402
+ for item in path.iterdir():
403
+ if item.is_dir() and not item.name.startswith("."):
404
+ subdirs.append(item.name)
405
+ if max_depth > 1:
406
+ for subitem in item.iterdir():
407
+ if subitem.is_dir() and not subitem.name.startswith("."):
408
+ subdirs.append(f"{item.name}/{subitem.name}")
409
+ except PermissionError:
410
+ pass
411
+ return subdirs[:10]
412
+
413
+ # ================================================================================
414
+ # Interface Methods
415
+ # ================================================================================
416
+
417
+ def detect_technology_stack(self) -> List[str]:
418
+ """Detect technologies used in the project."""
419
+ characteristics = self.analyze_project()
420
+
421
+ technologies = []
422
+ technologies.extend(characteristics.languages)
423
+ technologies.extend(characteristics.frameworks)
424
+ technologies.extend(characteristics.web_frameworks)
425
+ technologies.extend(characteristics.databases)
426
+
427
+ if characteristics.package_manager:
428
+ technologies.append(characteristics.package_manager)
429
+ technologies.extend(characteristics.build_tools)
430
+
431
+ return list(set(technologies))
432
+
433
+ def analyze_code_patterns(self) -> Dict[str, Any]:
434
+ """Analyze code patterns and conventions."""
435
+ characteristics = self.analyze_project()
436
+
437
+ return {
438
+ "code_conventions": characteristics.code_conventions,
439
+ "test_patterns": characteristics.test_patterns,
440
+ "api_patterns": characteristics.api_patterns,
441
+ "configuration_patterns": characteristics.configuration_patterns,
442
+ "architecture_type": characteristics.architecture_type,
443
+ }
444
+
445
+ def get_project_structure(self) -> Dict[str, Any]:
446
+ """Get project directory structure analysis."""
447
+ characteristics = self.analyze_project()
448
+
449
+ return {
450
+ "project_name": characteristics.project_name,
451
+ "main_modules": characteristics.main_modules,
452
+ "key_directories": characteristics.key_directories,
453
+ "entry_points": characteristics.entry_points,
454
+ "documentation_files": characteristics.documentation_files,
455
+ "important_configs": characteristics.important_configs,
456
+ "architecture_type": characteristics.architecture_type,
457
+ }
458
+
459
+ def identify_entry_points(self) -> List[Path]:
460
+ """Identify project entry points."""
461
+ characteristics = self.analyze_project()
462
+
463
+ entry_paths = []
464
+ for entry_point in characteristics.entry_points:
465
+ entry_path = self.working_directory / entry_point
466
+ if entry_path.exists():
467
+ entry_paths.append(entry_path)
468
+ return entry_paths
469
+
470
+ def get_project_context_summary(self) -> str:
471
+ """Get a concise summary of project context for memory templates."""
472
+ characteristics = self.analyze_project()
473
+
474
+ summary_parts = []
475
+
476
+ lang_info = characteristics.primary_language or "mixed"
477
+ if characteristics.languages and len(characteristics.languages) > 1:
478
+ lang_info = (
479
+ f"{lang_info} (with {', '.join(characteristics.languages[1:3])})"
480
+ )
481
+
482
+ summary_parts.append(
483
+ f"{characteristics.project_name}: {lang_info} {characteristics.architecture_type.lower()}"
484
+ )
485
+
486
+ if characteristics.main_modules:
487
+ modules_str = ", ".join(characteristics.main_modules[:4])
488
+ summary_parts.append(f"- Main modules: {modules_str}")
489
+
490
+ if characteristics.frameworks or characteristics.web_frameworks:
491
+ all_frameworks = characteristics.frameworks + characteristics.web_frameworks
492
+ frameworks_str = ", ".join(all_frameworks[:3])
493
+ summary_parts.append(f"- Uses: {frameworks_str}")
494
+
495
+ if characteristics.testing_framework:
496
+ summary_parts.append(f"- Testing: {characteristics.testing_framework}")
497
+ elif characteristics.test_patterns:
498
+ summary_parts.append(f"- Testing: {characteristics.test_patterns[0]}")
499
+
500
+ if characteristics.code_conventions:
501
+ patterns_str = ", ".join(characteristics.code_conventions[:2])
502
+ summary_parts.append(f"- Key patterns: {patterns_str}")
503
+
504
+ return "\n".join(summary_parts)
505
+
506
+ def get_important_files_for_context(self) -> List[str]:
507
+ """Get list of important files for memory context."""
508
+ characteristics = self.analyze_project()
509
+ important_files = []
510
+
511
+ standard_docs = ["README.md", "CONTRIBUTING.md", "CHANGELOG.md"]
512
+ for doc in standard_docs:
513
+ if (self.working_directory / doc).exists():
514
+ important_files.append(doc)
515
+
516
+ important_files.extend(characteristics.important_configs)
517
+ important_files.extend(characteristics.documentation_files[:5])
518
+ important_files.extend(characteristics.entry_points)
519
+
520
+ arch_patterns = ["ARCHITECTURE.md", "docs/architecture.md", "docs/STRUCTURE.md"]
521
+ for pattern in arch_patterns:
522
+ if (self.working_directory / pattern).exists():
523
+ important_files.append(pattern)
524
+
525
+ return list(set(important_files))
526
+
527
+ # ================================================================================
528
+ # Helper Methods
529
+ # ================================================================================
530
+
531
+ def _create_empty_characteristics(self) -> ProjectCharacteristics:
532
+ """Create empty ProjectCharacteristics with defaults."""
533
+ return ProjectCharacteristics(
534
+ project_name=self.working_directory.name,
535
+ primary_language=None,
536
+ languages=[],
537
+ frameworks=[],
538
+ architecture_type="unknown",
539
+ main_modules=[],
540
+ key_directories=[],
541
+ entry_points=[],
542
+ testing_framework=None,
543
+ test_patterns=[],
544
+ package_manager=None,
545
+ build_tools=[],
546
+ databases=[],
547
+ web_frameworks=[],
548
+ api_patterns=[],
549
+ key_dependencies=[],
550
+ code_conventions=[],
551
+ configuration_patterns=[],
552
+ project_terminology=[],
553
+ documentation_files=[],
554
+ important_configs=[],
555
+ )
556
+
557
+ def _is_cache_valid(self) -> bool:
558
+ """Check if cache is still valid."""
559
+ if self._analysis_cache is None or self._cache_timestamp is None:
560
+ return False
561
+ return time.time() - self._cache_timestamp < self._cache_ttl
562
+
563
+ def _update_cache(self, characteristics: ProjectCharacteristics) -> None:
564
+ """Update the cache with new results."""
565
+ self._analysis_cache = characteristics
566
+ self._cache_timestamp = time.time()