claude-mpm 4.1.4__py3-none-any.whl → 4.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/templates/research.json +39 -13
  3. claude_mpm/cli/__init__.py +2 -0
  4. claude_mpm/cli/commands/__init__.py +2 -0
  5. claude_mpm/cli/commands/configure.py +1221 -0
  6. claude_mpm/cli/commands/configure_tui.py +1921 -0
  7. claude_mpm/cli/commands/tickets.py +365 -784
  8. claude_mpm/cli/parsers/base_parser.py +7 -0
  9. claude_mpm/cli/parsers/configure_parser.py +119 -0
  10. claude_mpm/cli/startup_logging.py +39 -12
  11. claude_mpm/constants.py +1 -0
  12. claude_mpm/core/output_style_manager.py +24 -0
  13. claude_mpm/core/socketio_pool.py +35 -3
  14. claude_mpm/core/unified_agent_registry.py +46 -15
  15. claude_mpm/dashboard/static/css/connection-status.css +370 -0
  16. claude_mpm/dashboard/static/js/components/connection-debug.js +654 -0
  17. claude_mpm/dashboard/static/js/connection-manager.js +536 -0
  18. claude_mpm/dashboard/templates/index.html +11 -0
  19. claude_mpm/hooks/claude_hooks/services/__init__.py +3 -1
  20. claude_mpm/hooks/claude_hooks/services/connection_manager_http.py +190 -0
  21. claude_mpm/services/agents/deployment/agent_discovery_service.py +12 -3
  22. claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +172 -233
  23. claude_mpm/services/agents/deployment/agent_lifecycle_manager_refactored.py +575 -0
  24. claude_mpm/services/agents/deployment/agent_operation_service.py +573 -0
  25. claude_mpm/services/agents/deployment/agent_record_service.py +419 -0
  26. claude_mpm/services/agents/deployment/agent_state_service.py +381 -0
  27. claude_mpm/services/agents/deployment/multi_source_deployment_service.py +4 -2
  28. claude_mpm/services/diagnostics/checks/__init__.py +2 -0
  29. claude_mpm/services/diagnostics/checks/instructions_check.py +418 -0
  30. claude_mpm/services/diagnostics/diagnostic_runner.py +15 -2
  31. claude_mpm/services/event_bus/direct_relay.py +173 -0
  32. claude_mpm/services/infrastructure/__init__.py +31 -5
  33. claude_mpm/services/infrastructure/monitoring/__init__.py +43 -0
  34. claude_mpm/services/infrastructure/monitoring/aggregator.py +437 -0
  35. claude_mpm/services/infrastructure/monitoring/base.py +130 -0
  36. claude_mpm/services/infrastructure/monitoring/legacy.py +203 -0
  37. claude_mpm/services/infrastructure/monitoring/network.py +218 -0
  38. claude_mpm/services/infrastructure/monitoring/process.py +342 -0
  39. claude_mpm/services/infrastructure/monitoring/resources.py +243 -0
  40. claude_mpm/services/infrastructure/monitoring/service.py +367 -0
  41. claude_mpm/services/infrastructure/monitoring.py +67 -1030
  42. claude_mpm/services/project/analyzer.py +13 -4
  43. claude_mpm/services/project/analyzer_refactored.py +450 -0
  44. claude_mpm/services/project/analyzer_v2.py +566 -0
  45. claude_mpm/services/project/architecture_analyzer.py +461 -0
  46. claude_mpm/services/project/dependency_analyzer.py +462 -0
  47. claude_mpm/services/project/language_analyzer.py +265 -0
  48. claude_mpm/services/project/metrics_collector.py +410 -0
  49. claude_mpm/services/socketio/handlers/connection_handler.py +345 -0
  50. claude_mpm/services/socketio/server/broadcaster.py +32 -1
  51. claude_mpm/services/socketio/server/connection_manager.py +516 -0
  52. claude_mpm/services/socketio/server/core.py +63 -0
  53. claude_mpm/services/socketio/server/eventbus_integration.py +20 -9
  54. claude_mpm/services/socketio/server/main.py +27 -1
  55. claude_mpm/services/ticket_manager.py +5 -1
  56. claude_mpm/services/ticket_services/__init__.py +26 -0
  57. claude_mpm/services/ticket_services/crud_service.py +328 -0
  58. claude_mpm/services/ticket_services/formatter_service.py +290 -0
  59. claude_mpm/services/ticket_services/search_service.py +324 -0
  60. claude_mpm/services/ticket_services/validation_service.py +303 -0
  61. claude_mpm/services/ticket_services/workflow_service.py +244 -0
  62. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.6.dist-info}/METADATA +3 -1
  63. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.6.dist-info}/RECORD +67 -46
  64. claude_mpm/agents/OUTPUT_STYLE.md +0 -73
  65. claude_mpm/agents/backups/INSTRUCTIONS.md +0 -352
  66. claude_mpm/agents/templates/OPTIMIZATION_REPORT.md +0 -156
  67. claude_mpm/agents/templates/backup/data_engineer_agent_20250726_234551.json +0 -79
  68. claude_mpm/agents/templates/backup/documentation_agent_20250726_234551.json +0 -68
  69. claude_mpm/agents/templates/backup/engineer_agent_20250726_234551.json +0 -77
  70. claude_mpm/agents/templates/backup/ops_agent_20250726_234551.json +0 -78
  71. claude_mpm/agents/templates/backup/qa_agent_20250726_234551.json +0 -67
  72. claude_mpm/agents/templates/backup/research_agent_2025011_234551.json +0 -88
  73. claude_mpm/agents/templates/backup/research_agent_20250726_234551.json +0 -72
  74. claude_mpm/agents/templates/backup/research_memory_efficient.json +0 -88
  75. claude_mpm/agents/templates/backup/security_agent_20250726_234551.json +0 -78
  76. claude_mpm/agents/templates/backup/version_control_agent_20250726_234551.json +0 -62
  77. claude_mpm/agents/templates/vercel_ops_instructions.md +0 -582
  78. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.6.dist-info}/WHEEL +0 -0
  79. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.6.dist-info}/entry_points.txt +0 -0
  80. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.6.dist-info}/licenses/LICENSE +0 -0
  81. {claude_mpm-4.1.4.dist-info → claude_mpm-4.1.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,462 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Dependency Analyzer Service
4
+ ===========================
5
+
6
+ WHY: Separates dependency analysis from the main analyzer to follow
7
+ single responsibility principle. Handles parsing and analysis of
8
+ project dependencies across different package managers.
9
+
10
+ DECISION: Create a focused service for dependency analysis that can
11
+ handle multiple package managers and provide detailed dependency insights.
12
+ """
13
+
14
+ import json
15
+ import logging
16
+ from pathlib import Path
17
+ from typing import Any, Dict, List, Optional
18
+
19
+
20
+ class DependencyAnalyzerService:
21
+ """Analyzes project dependencies and package management.
22
+
23
+ WHY: Dependency analysis is complex and varies by ecosystem. This service
24
+ provides a unified interface for analyzing dependencies across different
25
+ package managers and languages.
26
+ """
27
+
28
+ # Configuration file to package manager mapping
29
+ PACKAGE_MANAGERS = {
30
+ "package.json": "npm",
31
+ "yarn.lock": "yarn",
32
+ "pnpm-lock.yaml": "pnpm",
33
+ "package-lock.json": "npm",
34
+ "requirements.txt": "pip",
35
+ "Pipfile": "pipenv",
36
+ "poetry.lock": "poetry",
37
+ "pyproject.toml": "pip",
38
+ "Cargo.toml": "cargo",
39
+ "go.mod": "go",
40
+ "pom.xml": "maven",
41
+ "build.gradle": "gradle",
42
+ "composer.json": "composer",
43
+ "Gemfile": "bundler",
44
+ "mix.exs": "mix",
45
+ "pubspec.yaml": "pub",
46
+ }
47
+
48
+ # Database-related dependencies
49
+ DATABASE_PACKAGES = {
50
+ "postgresql": ["psycopg2", "pg", "postgres", "postgresql", "node-postgres"],
51
+ "mysql": ["mysql", "mysql2", "mysqlclient", "mysql-connector"],
52
+ "sqlite": ["sqlite3", "better-sqlite3"],
53
+ "mongodb": ["mongodb", "mongoose", "pymongo", "motor"],
54
+ "redis": ["redis", "ioredis", "redis-py"],
55
+ "elasticsearch": ["elasticsearch", "@elastic/elasticsearch"],
56
+ "cassandra": ["cassandra-driver"],
57
+ "neo4j": ["neo4j", "neo4j-driver"],
58
+ }
59
+
60
+ # Testing framework packages
61
+ TESTING_PACKAGES = {
62
+ "python": ["pytest", "unittest", "nose", "nose2", "tox"],
63
+ "javascript": ["jest", "mocha", "chai", "jasmine", "cypress", "playwright"],
64
+ "java": ["junit", "testng", "mockito"],
65
+ "ruby": ["rspec", "minitest"],
66
+ "go": ["testify", "ginkgo"],
67
+ "rust": ["test", "quickcheck", "proptest"],
68
+ }
69
+
70
+ def __init__(self, working_directory: Path):
71
+ """Initialize the dependency analyzer service.
72
+
73
+ Args:
74
+ working_directory: Project root directory
75
+ """
76
+ self.working_directory = working_directory
77
+ self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
78
+
79
+ def detect_package_manager(self) -> Optional[str]:
80
+ """Detect the primary package manager used in the project.
81
+
82
+ WHY: Knowing the package manager helps understand the project's
83
+ ecosystem and how to install/manage dependencies.
84
+
85
+ Returns:
86
+ Package manager name or None
87
+ """
88
+ # Check for lock files first (more specific)
89
+ lock_files = [
90
+ "yarn.lock",
91
+ "pnpm-lock.yaml",
92
+ "package-lock.json",
93
+ "poetry.lock",
94
+ "Pipfile.lock",
95
+ "Cargo.lock",
96
+ ]
97
+
98
+ for lock_file in lock_files:
99
+ if (self.working_directory / lock_file).exists():
100
+ if lock_file in self.PACKAGE_MANAGERS:
101
+ return self.PACKAGE_MANAGERS[lock_file]
102
+
103
+ # Then check for config files
104
+ for config_file, manager in self.PACKAGE_MANAGERS.items():
105
+ if (self.working_directory / config_file).exists():
106
+ return manager
107
+
108
+ return None
109
+
110
+ def analyze_dependencies(self) -> Dict[str, List[str]]:
111
+ """Analyze all project dependencies.
112
+
113
+ WHY: Understanding dependencies helps identify the project's
114
+ technology stack and integration points.
115
+
116
+ Returns:
117
+ Dictionary with categories of dependencies
118
+ """
119
+ dependencies = {
120
+ "production": [],
121
+ "development": [],
122
+ "databases": [],
123
+ "testing": [],
124
+ "build_tools": [],
125
+ }
126
+
127
+ # Analyze package.json
128
+ package_json_path = self.working_directory / "package.json"
129
+ if package_json_path.exists():
130
+ self._analyze_package_json(package_json_path, dependencies)
131
+
132
+ # Analyze Python dependencies
133
+ for py_file in ["requirements.txt", "pyproject.toml", "Pipfile"]:
134
+ py_path = self.working_directory / py_file
135
+ if py_path.exists():
136
+ self._analyze_python_deps(py_path, dependencies)
137
+
138
+ # Analyze Cargo.toml
139
+ cargo_path = self.working_directory / "Cargo.toml"
140
+ if cargo_path.exists():
141
+ self._analyze_cargo_toml(cargo_path, dependencies)
142
+
143
+ # Analyze go.mod
144
+ go_mod_path = self.working_directory / "go.mod"
145
+ if go_mod_path.exists():
146
+ self._analyze_go_mod(go_mod_path, dependencies)
147
+
148
+ return dependencies
149
+
150
+ def detect_databases(self, dependencies: Optional[List[str]] = None) -> List[str]:
151
+ """Detect database systems from dependencies.
152
+
153
+ WHY: Database detection helps understand data persistence patterns
154
+ and integration requirements.
155
+
156
+ Args:
157
+ dependencies: Optional list of dependencies to analyze
158
+
159
+ Returns:
160
+ List of detected database names
161
+ """
162
+ if dependencies is None:
163
+ all_deps = self.analyze_dependencies()
164
+ dependencies = all_deps["production"] + all_deps["development"]
165
+
166
+ databases = set()
167
+
168
+ for dep in dependencies:
169
+ dep_lower = dep.lower()
170
+ for db_name, db_packages in self.DATABASE_PACKAGES.items():
171
+ if any(pkg in dep_lower for pkg in db_packages):
172
+ databases.add(db_name)
173
+
174
+ return sorted(list(databases))
175
+
176
+ def detect_testing_frameworks(
177
+ self, dependencies: Optional[List[str]] = None
178
+ ) -> List[str]:
179
+ """Detect testing frameworks from dependencies.
180
+
181
+ WHY: Understanding testing tools helps maintain and extend
182
+ the project's test suite appropriately.
183
+
184
+ Args:
185
+ dependencies: Optional list of dependencies to analyze
186
+
187
+ Returns:
188
+ List of detected testing framework names
189
+ """
190
+ if dependencies is None:
191
+ all_deps = self.analyze_dependencies()
192
+ dependencies = all_deps["development"] + all_deps["testing"]
193
+
194
+ testing_frameworks = set()
195
+
196
+ for dep in dependencies:
197
+ dep_lower = dep.lower()
198
+ for test_packages in self.TESTING_PACKAGES.values():
199
+ for test_pkg in test_packages:
200
+ if test_pkg in dep_lower:
201
+ testing_frameworks.add(dep)
202
+ break
203
+
204
+ return sorted(list(testing_frameworks))
205
+
206
+ def detect_web_frameworks(self, dependencies: List[str]) -> List[str]:
207
+ """Detect web frameworks from dependencies.
208
+
209
+ WHY: Web frameworks indicate the project type and determine
210
+ specific patterns agents should understand.
211
+
212
+ Args:
213
+ dependencies: List of project dependencies
214
+
215
+ Returns:
216
+ List of detected web framework names
217
+ """
218
+ web_frameworks = []
219
+ web_framework_names = [
220
+ "flask",
221
+ "django",
222
+ "fastapi",
223
+ "tornado",
224
+ "bottle",
225
+ "pyramid", # Python
226
+ "express",
227
+ "koa",
228
+ "hapi",
229
+ "fastify",
230
+ "nestjs", # JavaScript
231
+ "rails",
232
+ "sinatra",
233
+ "hanami", # Ruby
234
+ "laravel",
235
+ "symfony",
236
+ "slim",
237
+ "lumen", # PHP
238
+ "spring",
239
+ "springboot",
240
+ "struts",
241
+ "play", # Java
242
+ "actix-web",
243
+ "warp",
244
+ "rocket", # Rust
245
+ "gin",
246
+ "echo",
247
+ "fiber", # Go
248
+ "aspnet",
249
+ "aspnetcore", # C#
250
+ ]
251
+
252
+ for dep in dependencies:
253
+ dep_lower = dep.lower()
254
+ for framework in web_framework_names:
255
+ if framework in dep_lower:
256
+ web_frameworks.append(dep)
257
+ break
258
+
259
+ return list(set(web_frameworks))
260
+
261
+ def get_build_tools(
262
+ self, package_data: Optional[Dict[str, Any]] = None
263
+ ) -> List[str]:
264
+ """Extract build tools from package configuration.
265
+
266
+ WHY: Build tools determine how the project is compiled, bundled,
267
+ and prepared for deployment.
268
+
269
+ Args:
270
+ package_data: Optional package.json data
271
+
272
+ Returns:
273
+ List of build tool names
274
+ """
275
+ build_tools = []
276
+
277
+ # Check package.json scripts
278
+ if package_data is None:
279
+ package_json_path = self.working_directory / "package.json"
280
+ if package_json_path.exists():
281
+ try:
282
+ with open(package_json_path) as f:
283
+ package_data = json.load(f)
284
+ except Exception as e:
285
+ self.logger.warning(f"Error reading package.json: {e}")
286
+ return []
287
+
288
+ if package_data and "scripts" in package_data:
289
+ scripts = package_data["scripts"]
290
+
291
+ # Common build tools
292
+ build_tool_names = [
293
+ "webpack",
294
+ "rollup",
295
+ "vite",
296
+ "parcel",
297
+ "esbuild",
298
+ "browserify",
299
+ "gulp",
300
+ "grunt",
301
+ "make",
302
+ "cmake",
303
+ ]
304
+
305
+ for script_cmd in scripts.values():
306
+ for tool in build_tool_names:
307
+ if tool in script_cmd.lower():
308
+ build_tools.append(tool)
309
+
310
+ # Check for Makefile
311
+ if (self.working_directory / "Makefile").exists():
312
+ build_tools.append("make")
313
+
314
+ # Check for gradle/maven
315
+ if (self.working_directory / "build.gradle").exists():
316
+ build_tools.append("gradle")
317
+ if (self.working_directory / "pom.xml").exists():
318
+ build_tools.append("maven")
319
+
320
+ return list(set(build_tools))
321
+
322
+ def _analyze_package_json(
323
+ self, path: Path, dependencies: Dict[str, List[str]]
324
+ ) -> None:
325
+ """Parse package.json for dependencies."""
326
+ try:
327
+ with open(path) as f:
328
+ data = json.load(f)
329
+
330
+ # Production dependencies
331
+ if "dependencies" in data:
332
+ dependencies["production"].extend(data["dependencies"].keys())
333
+
334
+ # Development dependencies
335
+ if "devDependencies" in data:
336
+ dependencies["development"].extend(data["devDependencies"].keys())
337
+
338
+ # Detect databases
339
+ all_deps = list(data.get("dependencies", {}).keys()) + list(
340
+ data.get("devDependencies", {}).keys()
341
+ )
342
+
343
+ for dep in all_deps:
344
+ dep_lower = dep.lower()
345
+ # Check for database packages
346
+ for db_name, db_packages in self.DATABASE_PACKAGES.items():
347
+ if any(pkg in dep_lower for pkg in db_packages):
348
+ dependencies["databases"].append(dep)
349
+
350
+ # Check for testing packages
351
+ for test_packages in self.TESTING_PACKAGES.values():
352
+ if any(pkg in dep_lower for pkg in test_packages):
353
+ dependencies["testing"].append(dep)
354
+
355
+ # Extract build tools
356
+ build_tools = self.get_build_tools(data)
357
+ dependencies["build_tools"].extend(build_tools)
358
+
359
+ except Exception as e:
360
+ self.logger.warning(f"Error parsing package.json: {e}")
361
+
362
+ def _analyze_python_deps(
363
+ self, path: Path, dependencies: Dict[str, List[str]]
364
+ ) -> None:
365
+ """Parse Python dependency files."""
366
+ try:
367
+ if path.name == "requirements.txt":
368
+ content = path.read_text()
369
+ deps = [
370
+ line.strip().split("=")[0].split(">")[0].split("<")[0].split("[")[0]
371
+ for line in content.splitlines()
372
+ if line.strip() and not line.startswith("#")
373
+ ]
374
+ dependencies["production"].extend(deps)
375
+
376
+ elif path.name == "pyproject.toml":
377
+ try:
378
+ import tomllib
379
+ except ImportError:
380
+ try:
381
+ import tomli as tomllib
382
+ except ImportError:
383
+ return
384
+
385
+ with open(path, "rb") as f:
386
+ data = tomllib.load(f)
387
+
388
+ # PEP 621 dependencies
389
+ project_deps = data.get("project", {}).get("dependencies", [])
390
+ dependencies["production"].extend(project_deps)
391
+
392
+ # Poetry dependencies
393
+ poetry_deps = (
394
+ data.get("tool", {}).get("poetry", {}).get("dependencies", {})
395
+ )
396
+ dependencies["production"].extend(poetry_deps.keys())
397
+
398
+ # Development dependencies
399
+ dev_deps = (
400
+ data.get("tool", {}).get("poetry", {}).get("dev-dependencies", {})
401
+ )
402
+ dependencies["development"].extend(dev_deps.keys())
403
+
404
+ # Categorize Python dependencies
405
+ for dep_list in [dependencies["production"], dependencies["development"]]:
406
+ for dep in dep_list:
407
+ dep_lower = dep.lower()
408
+
409
+ # Check databases
410
+ for db_name, db_packages in self.DATABASE_PACKAGES.items():
411
+ if any(pkg in dep_lower for pkg in db_packages):
412
+ dependencies["databases"].append(dep)
413
+
414
+ # Check testing
415
+ if any(
416
+ test in dep_lower for test in self.TESTING_PACKAGES["python"]
417
+ ):
418
+ dependencies["testing"].append(dep)
419
+
420
+ except Exception as e:
421
+ self.logger.warning(f"Error parsing Python dependencies: {e}")
422
+
423
+ def _analyze_cargo_toml(
424
+ self, path: Path, dependencies: Dict[str, List[str]]
425
+ ) -> None:
426
+ """Parse Cargo.toml for Rust dependencies."""
427
+ try:
428
+ try:
429
+ import tomllib
430
+ except ImportError:
431
+ try:
432
+ import tomli as tomllib
433
+ except ImportError:
434
+ return
435
+
436
+ with open(path, "rb") as f:
437
+ data = tomllib.load(f)
438
+
439
+ # Production dependencies
440
+ deps = data.get("dependencies", {})
441
+ dependencies["production"].extend(deps.keys())
442
+
443
+ # Dev dependencies
444
+ dev_deps = data.get("dev-dependencies", {})
445
+ dependencies["development"].extend(dev_deps.keys())
446
+
447
+ except Exception as e:
448
+ self.logger.warning(f"Error parsing Cargo.toml: {e}")
449
+
450
+ def _analyze_go_mod(self, path: Path, dependencies: Dict[str, List[str]]) -> None:
451
+ """Parse go.mod for Go dependencies."""
452
+ try:
453
+ content = path.read_text()
454
+
455
+ # Extract require blocks
456
+ import re
457
+
458
+ requires = re.findall(r"require\s+([^\s]+)\s+v", content)
459
+ dependencies["production"].extend(requires)
460
+
461
+ except Exception as e:
462
+ self.logger.warning(f"Error parsing go.mod: {e}")