claude-mpm 5.4.21__py3-none-any.whl → 5.4.36__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of claude-mpm might be problematic. Click here for more details.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/BASE_AGENT.md +164 -0
- claude_mpm/agents/BASE_ENGINEER.md +658 -0
- claude_mpm/agents/MEMORY.md +1 -1
- claude_mpm/agents/PM_INSTRUCTIONS.md +320 -880
- claude_mpm/agents/WORKFLOW.md +5 -254
- claude_mpm/agents/agent_loader.py +1 -1
- claude_mpm/agents/base_agent.json +31 -0
- claude_mpm/cli/commands/agent_state_manager.py +10 -10
- claude_mpm/cli/commands/agents.py +9 -9
- claude_mpm/cli/commands/auto_configure.py +4 -4
- claude_mpm/cli/commands/configure.py +1 -1
- claude_mpm/cli/commands/postmortem.py +1 -1
- claude_mpm/cli/interactive/agent_wizard.py +2 -2
- claude_mpm/cli/startup.py +98 -58
- claude_mpm/core/config.py +2 -4
- claude_mpm/core/framework/loaders/agent_loader.py +1 -1
- claude_mpm/core/framework/loaders/instruction_loader.py +52 -11
- claude_mpm/core/unified_agent_registry.py +1 -1
- claude_mpm/dashboard/static/svelte-build/_app/env.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/assets/0.B_FtCwCQ.css +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/assets/2.Cl_eSA4x.css +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/BgChzWQ1.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/CIXEwuWe.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/CWc5urbQ.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/DMkZpdF2.js +2 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/DjhvlsAc.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/N4qtv3Hx.js +2 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/chunks/uj46x2Wr.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/entry/app.DTL5mJO-.js +2 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/entry/start.DzuEhzqh.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/nodes/0.CAGBuiOw.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/nodes/1.DFLC8jdE.js +1 -0
- claude_mpm/dashboard/static/svelte-build/_app/immutable/nodes/2.DPvEihJJ.js +10 -0
- claude_mpm/dashboard/static/svelte-build/_app/version.json +1 -0
- claude_mpm/dashboard/static/svelte-build/favicon.svg +7 -0
- claude_mpm/dashboard/static/svelte-build/index.html +36 -0
- claude_mpm/hooks/claude_hooks/__pycache__/__init__.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/correlation_manager.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/event_handlers.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/hook_handler.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/installer.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/memory_integration.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/response_tracking.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/tool_analysis.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/hook_handler.py +149 -1
- claude_mpm/hooks/claude_hooks/services/__pycache__/__init__.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/connection_manager.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/connection_manager_http.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/duplicate_detector.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/state_manager.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/subagent_processor.cpython-311.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/connection_manager.py +26 -6
- claude_mpm/models/git_repository.py +3 -3
- claude_mpm/services/agents/cache_git_manager.py +6 -6
- claude_mpm/services/agents/deployment/agent_deployment.py +7 -7
- claude_mpm/services/agents/deployment/agent_discovery_service.py +2 -2
- claude_mpm/services/agents/deployment/agent_template_builder.py +2 -2
- claude_mpm/services/agents/deployment/agents_directory_resolver.py +2 -2
- claude_mpm/services/agents/deployment/multi_source_deployment_service.py +20 -22
- claude_mpm/services/agents/deployment/remote_agent_discovery_service.py +55 -53
- claude_mpm/services/agents/git_source_manager.py +2 -2
- claude_mpm/services/agents/recommender.py +5 -3
- claude_mpm/services/agents/single_tier_deployment_service.py +2 -2
- claude_mpm/services/agents/sources/git_source_sync_service.py +5 -5
- claude_mpm/services/agents/startup_sync.py +22 -2
- claude_mpm/services/diagnostics/checks/agent_check.py +2 -2
- claude_mpm/services/diagnostics/checks/agent_sources_check.py +1 -1
- claude_mpm/services/git/git_operations_service.py +8 -8
- claude_mpm/services/monitor/server.py +473 -3
- claude_mpm/services/socketio/dashboard_server.py +1 -0
- claude_mpm/services/socketio/event_normalizer.py +37 -6
- claude_mpm/services/socketio/server/core.py +262 -123
- claude_mpm/utils/agent_dependency_loader.py +14 -2
- claude_mpm/utils/agent_filters.py +1 -1
- claude_mpm/utils/migration.py +4 -4
- claude_mpm/utils/robust_installer.py +47 -3
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/METADATA +5 -3
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/RECORD +84 -49
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/WHEEL +0 -0
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/entry_points.txt +0 -0
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/licenses/LICENSE-FAQ.md +0 -0
- {claude_mpm-5.4.21.dist-info → claude_mpm-5.4.36.dist-info}/top_level.txt +0 -0
|
@@ -29,7 +29,6 @@ from watchdog.observers import Observer
|
|
|
29
29
|
|
|
30
30
|
from ...core.enums import ServiceState
|
|
31
31
|
from ...core.logging_config import get_logger
|
|
32
|
-
from ...dashboard.api.simple_directory import list_directory
|
|
33
32
|
from .event_emitter import get_event_emitter
|
|
34
33
|
from .handlers.code_analysis import CodeAnalysisHandler
|
|
35
34
|
from .handlers.dashboard import DashboardHandler
|
|
@@ -588,6 +587,243 @@ class UnifiedMonitorServer:
|
|
|
588
587
|
{"success": False, "error": str(e)}, status=500
|
|
589
588
|
)
|
|
590
589
|
|
|
590
|
+
# File listing endpoint for file browser
|
|
591
|
+
async def api_files_handler(request):
|
|
592
|
+
"""List files in a directory for the file browser."""
|
|
593
|
+
try:
|
|
594
|
+
# Get path from query param, default to working directory
|
|
595
|
+
path = request.query.get("path", str(Path.cwd()))
|
|
596
|
+
dir_path = Path(path)
|
|
597
|
+
|
|
598
|
+
if not dir_path.exists():
|
|
599
|
+
return web.json_response(
|
|
600
|
+
{"success": False, "error": "Directory not found"},
|
|
601
|
+
status=404,
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
if not dir_path.is_dir():
|
|
605
|
+
return web.json_response(
|
|
606
|
+
{"success": False, "error": "Path is not a directory"},
|
|
607
|
+
status=400,
|
|
608
|
+
)
|
|
609
|
+
|
|
610
|
+
# Patterns to exclude
|
|
611
|
+
exclude_patterns = {
|
|
612
|
+
".git",
|
|
613
|
+
"node_modules",
|
|
614
|
+
"__pycache__",
|
|
615
|
+
".svelte-kit",
|
|
616
|
+
"venv",
|
|
617
|
+
".venv",
|
|
618
|
+
"dist",
|
|
619
|
+
"build",
|
|
620
|
+
".next",
|
|
621
|
+
".cache",
|
|
622
|
+
".pytest_cache",
|
|
623
|
+
".mypy_cache",
|
|
624
|
+
".ruff_cache",
|
|
625
|
+
"eggs",
|
|
626
|
+
"*.egg-info",
|
|
627
|
+
".tox",
|
|
628
|
+
".nox",
|
|
629
|
+
"htmlcov",
|
|
630
|
+
".coverage",
|
|
631
|
+
}
|
|
632
|
+
|
|
633
|
+
entries = []
|
|
634
|
+
try:
|
|
635
|
+
for entry in sorted(
|
|
636
|
+
dir_path.iterdir(),
|
|
637
|
+
key=lambda x: (not x.is_dir(), x.name.lower()),
|
|
638
|
+
):
|
|
639
|
+
# Skip hidden files and excluded patterns
|
|
640
|
+
if entry.name.startswith(".") and entry.name not in {
|
|
641
|
+
".env",
|
|
642
|
+
".gitignore",
|
|
643
|
+
}:
|
|
644
|
+
if entry.name in {".git", ".svelte-kit", ".cache"}:
|
|
645
|
+
continue
|
|
646
|
+
if entry.name in exclude_patterns:
|
|
647
|
+
continue
|
|
648
|
+
if any(
|
|
649
|
+
entry.name.endswith(p.replace("*", ""))
|
|
650
|
+
for p in exclude_patterns
|
|
651
|
+
if "*" in p
|
|
652
|
+
):
|
|
653
|
+
continue
|
|
654
|
+
|
|
655
|
+
try:
|
|
656
|
+
stat = entry.stat()
|
|
657
|
+
entries.append(
|
|
658
|
+
{
|
|
659
|
+
"name": entry.name,
|
|
660
|
+
"path": str(entry),
|
|
661
|
+
"type": "directory"
|
|
662
|
+
if entry.is_dir()
|
|
663
|
+
else "file",
|
|
664
|
+
"size": stat.st_size if entry.is_file() else 0,
|
|
665
|
+
"modified": stat.st_mtime,
|
|
666
|
+
"extension": entry.suffix.lstrip(".")
|
|
667
|
+
if entry.is_file()
|
|
668
|
+
else None,
|
|
669
|
+
}
|
|
670
|
+
)
|
|
671
|
+
except (PermissionError, OSError):
|
|
672
|
+
continue
|
|
673
|
+
|
|
674
|
+
except PermissionError:
|
|
675
|
+
return web.json_response(
|
|
676
|
+
{"success": False, "error": "Permission denied"},
|
|
677
|
+
status=403,
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
# Separate directories and files
|
|
681
|
+
directories = [e for e in entries if e["type"] == "directory"]
|
|
682
|
+
files = [e for e in entries if e["type"] == "file"]
|
|
683
|
+
|
|
684
|
+
return web.json_response(
|
|
685
|
+
{
|
|
686
|
+
"success": True,
|
|
687
|
+
"path": str(dir_path),
|
|
688
|
+
"directories": directories,
|
|
689
|
+
"files": files,
|
|
690
|
+
"total_directories": len(directories),
|
|
691
|
+
"total_files": len(files),
|
|
692
|
+
}
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
except Exception as e:
|
|
696
|
+
self.logger.error(f"Error listing directory: {e}")
|
|
697
|
+
return web.json_response(
|
|
698
|
+
{"success": False, "error": str(e)}, status=500
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
# File read endpoint (GET) for file browser
|
|
702
|
+
async def api_file_read_handler(request):
|
|
703
|
+
"""Read file content via GET request."""
|
|
704
|
+
import base64
|
|
705
|
+
|
|
706
|
+
try:
|
|
707
|
+
file_path = request.query.get("path", "")
|
|
708
|
+
|
|
709
|
+
if not file_path:
|
|
710
|
+
return web.json_response(
|
|
711
|
+
{"success": False, "error": "Path parameter required"},
|
|
712
|
+
status=400,
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
path = Path(file_path)
|
|
716
|
+
|
|
717
|
+
if not path.exists():
|
|
718
|
+
return web.json_response(
|
|
719
|
+
{"success": False, "error": "File not found"},
|
|
720
|
+
status=404,
|
|
721
|
+
)
|
|
722
|
+
|
|
723
|
+
if not path.is_file():
|
|
724
|
+
return web.json_response(
|
|
725
|
+
{"success": False, "error": "Path is not a file"},
|
|
726
|
+
status=400,
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
# Get file info
|
|
730
|
+
file_size = path.stat().st_size
|
|
731
|
+
file_ext = path.suffix.lstrip(".").lower()
|
|
732
|
+
|
|
733
|
+
# Define image extensions
|
|
734
|
+
image_extensions = {
|
|
735
|
+
"png",
|
|
736
|
+
"jpg",
|
|
737
|
+
"jpeg",
|
|
738
|
+
"gif",
|
|
739
|
+
"svg",
|
|
740
|
+
"webp",
|
|
741
|
+
"ico",
|
|
742
|
+
"bmp",
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
# Check if file is an image
|
|
746
|
+
if file_ext in image_extensions:
|
|
747
|
+
# Read as binary and encode to base64
|
|
748
|
+
try:
|
|
749
|
+
binary_content = path.read_bytes()
|
|
750
|
+
base64_content = base64.b64encode(binary_content).decode(
|
|
751
|
+
"utf-8"
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
# Map extension to MIME type
|
|
755
|
+
mime_types = {
|
|
756
|
+
"png": "image/png",
|
|
757
|
+
"jpg": "image/jpeg",
|
|
758
|
+
"jpeg": "image/jpeg",
|
|
759
|
+
"gif": "image/gif",
|
|
760
|
+
"svg": "image/svg+xml",
|
|
761
|
+
"webp": "image/webp",
|
|
762
|
+
"ico": "image/x-icon",
|
|
763
|
+
"bmp": "image/bmp",
|
|
764
|
+
}
|
|
765
|
+
mime_type = mime_types.get(file_ext, "image/png")
|
|
766
|
+
|
|
767
|
+
return web.json_response(
|
|
768
|
+
{
|
|
769
|
+
"success": True,
|
|
770
|
+
"path": str(path),
|
|
771
|
+
"content": base64_content,
|
|
772
|
+
"size": file_size,
|
|
773
|
+
"type": "image",
|
|
774
|
+
"mime": mime_type,
|
|
775
|
+
"extension": file_ext,
|
|
776
|
+
}
|
|
777
|
+
)
|
|
778
|
+
except Exception as e:
|
|
779
|
+
self.logger.error(f"Error reading image file: {e}")
|
|
780
|
+
return web.json_response(
|
|
781
|
+
{
|
|
782
|
+
"success": False,
|
|
783
|
+
"error": f"Failed to read image: {e!s}",
|
|
784
|
+
},
|
|
785
|
+
status=500,
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
# Read text file content
|
|
789
|
+
try:
|
|
790
|
+
content = path.read_text(encoding="utf-8")
|
|
791
|
+
lines = content.count("\n") + 1
|
|
792
|
+
except UnicodeDecodeError:
|
|
793
|
+
return web.json_response(
|
|
794
|
+
{"success": False, "error": "File is not a text file"},
|
|
795
|
+
status=415,
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
return web.json_response(
|
|
799
|
+
{
|
|
800
|
+
"success": True,
|
|
801
|
+
"path": str(path),
|
|
802
|
+
"content": content,
|
|
803
|
+
"lines": lines,
|
|
804
|
+
"size": file_size,
|
|
805
|
+
"type": file_ext or "text",
|
|
806
|
+
}
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
except Exception as e:
|
|
810
|
+
self.logger.error(f"Error reading file: {e}")
|
|
811
|
+
return web.json_response(
|
|
812
|
+
{"success": False, "error": str(e)}, status=500
|
|
813
|
+
)
|
|
814
|
+
|
|
815
|
+
# Favicon handler
|
|
816
|
+
async def favicon_handler(request):
|
|
817
|
+
"""Serve favicon.svg from static directory."""
|
|
818
|
+
from aiohttp.web_fileresponse import FileResponse
|
|
819
|
+
|
|
820
|
+
favicon_path = static_dir / "svelte-build" / "favicon.svg"
|
|
821
|
+
if favicon_path.exists():
|
|
822
|
+
return FileResponse(
|
|
823
|
+
favicon_path, headers={"Content-Type": "image/svg+xml"}
|
|
824
|
+
)
|
|
825
|
+
raise web.HTTPNotFound()
|
|
826
|
+
|
|
591
827
|
# Version endpoint for dashboard build tracker
|
|
592
828
|
async def version_handler(request):
|
|
593
829
|
"""Serve version information for dashboard build tracker."""
|
|
@@ -653,7 +889,7 @@ class UnifiedMonitorServer:
|
|
|
653
889
|
async def working_directory_handler(request):
|
|
654
890
|
"""Return the current working directory."""
|
|
655
891
|
return web.json_response(
|
|
656
|
-
{"working_directory": Path.cwd(), "success": True}
|
|
892
|
+
{"working_directory": str(Path.cwd()), "success": True}
|
|
657
893
|
)
|
|
658
894
|
|
|
659
895
|
# Monitor page routes
|
|
@@ -671,15 +907,249 @@ class UnifiedMonitorServer:
|
|
|
671
907
|
return web.Response(text=content, content_type="text/html")
|
|
672
908
|
return web.Response(text="Page not found", status=404)
|
|
673
909
|
|
|
910
|
+
# Git history handler
|
|
911
|
+
async def git_history_handler(request: web.Request) -> web.Response:
|
|
912
|
+
"""Get git history for a file."""
|
|
913
|
+
import subprocess
|
|
914
|
+
|
|
915
|
+
try:
|
|
916
|
+
data = await request.json()
|
|
917
|
+
file_path = data.get("path", "")
|
|
918
|
+
limit = data.get("limit", 10)
|
|
919
|
+
|
|
920
|
+
if not file_path:
|
|
921
|
+
return web.json_response(
|
|
922
|
+
{
|
|
923
|
+
"success": False,
|
|
924
|
+
"error": "No path provided",
|
|
925
|
+
"commits": [],
|
|
926
|
+
},
|
|
927
|
+
status=400,
|
|
928
|
+
)
|
|
929
|
+
|
|
930
|
+
path = Path(file_path)
|
|
931
|
+
if not path.exists():
|
|
932
|
+
return web.json_response(
|
|
933
|
+
{
|
|
934
|
+
"success": False,
|
|
935
|
+
"error": "File not found",
|
|
936
|
+
"commits": [],
|
|
937
|
+
},
|
|
938
|
+
status=404,
|
|
939
|
+
)
|
|
940
|
+
|
|
941
|
+
# Get git log for file
|
|
942
|
+
result = subprocess.run(
|
|
943
|
+
[
|
|
944
|
+
"git",
|
|
945
|
+
"log",
|
|
946
|
+
f"-{limit}",
|
|
947
|
+
"--pretty=format:%H|%an|%ar|%s",
|
|
948
|
+
"--",
|
|
949
|
+
str(path),
|
|
950
|
+
],
|
|
951
|
+
check=False,
|
|
952
|
+
capture_output=True,
|
|
953
|
+
text=True,
|
|
954
|
+
cwd=str(path.parent),
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
commits = []
|
|
958
|
+
if result.returncode == 0 and result.stdout:
|
|
959
|
+
for line in result.stdout.strip().split("\n"):
|
|
960
|
+
if line:
|
|
961
|
+
parts = line.split("|", 3)
|
|
962
|
+
if len(parts) == 4:
|
|
963
|
+
commits.append(
|
|
964
|
+
{
|
|
965
|
+
"hash": parts[0][:7],
|
|
966
|
+
"author": parts[1],
|
|
967
|
+
"date": parts[2],
|
|
968
|
+
"message": parts[3],
|
|
969
|
+
}
|
|
970
|
+
)
|
|
971
|
+
|
|
972
|
+
return web.json_response({"success": True, "commits": commits})
|
|
973
|
+
except Exception as e:
|
|
974
|
+
return web.json_response(
|
|
975
|
+
{"success": False, "error": str(e), "commits": []}, status=500
|
|
976
|
+
)
|
|
977
|
+
|
|
978
|
+
# Git diff handler
|
|
979
|
+
async def git_diff_handler(request: web.Request) -> web.Response:
|
|
980
|
+
"""Get git diff for a file with optional commit selection."""
|
|
981
|
+
import subprocess
|
|
982
|
+
|
|
983
|
+
try:
|
|
984
|
+
file_path = request.query.get("path", "")
|
|
985
|
+
commit_hash = request.query.get(
|
|
986
|
+
"commit", ""
|
|
987
|
+
) # Optional commit hash
|
|
988
|
+
|
|
989
|
+
if not file_path:
|
|
990
|
+
return web.json_response(
|
|
991
|
+
{
|
|
992
|
+
"success": False,
|
|
993
|
+
"error": "No path provided",
|
|
994
|
+
"diff": "",
|
|
995
|
+
"has_changes": False,
|
|
996
|
+
},
|
|
997
|
+
status=400,
|
|
998
|
+
)
|
|
999
|
+
|
|
1000
|
+
path = Path(file_path)
|
|
1001
|
+
if not path.exists():
|
|
1002
|
+
return web.json_response(
|
|
1003
|
+
{
|
|
1004
|
+
"success": False,
|
|
1005
|
+
"error": "File not found",
|
|
1006
|
+
"diff": "",
|
|
1007
|
+
"has_changes": False,
|
|
1008
|
+
},
|
|
1009
|
+
status=404,
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
# Find git repository root
|
|
1013
|
+
git_root_result = subprocess.run(
|
|
1014
|
+
["git", "rev-parse", "--show-toplevel"],
|
|
1015
|
+
check=False,
|
|
1016
|
+
capture_output=True,
|
|
1017
|
+
text=True,
|
|
1018
|
+
cwd=str(path.parent),
|
|
1019
|
+
)
|
|
1020
|
+
|
|
1021
|
+
if git_root_result.returncode != 0:
|
|
1022
|
+
# Not in a git repository
|
|
1023
|
+
return web.json_response(
|
|
1024
|
+
{
|
|
1025
|
+
"success": True,
|
|
1026
|
+
"diff": "",
|
|
1027
|
+
"has_changes": False,
|
|
1028
|
+
"tracked": False,
|
|
1029
|
+
"history": [],
|
|
1030
|
+
"has_uncommitted": False,
|
|
1031
|
+
}
|
|
1032
|
+
)
|
|
1033
|
+
|
|
1034
|
+
git_root = Path(git_root_result.stdout.strip())
|
|
1035
|
+
|
|
1036
|
+
# Check if file is tracked by git
|
|
1037
|
+
ls_files_result = subprocess.run(
|
|
1038
|
+
["git", "ls-files", "--error-unmatch", str(path)],
|
|
1039
|
+
check=False,
|
|
1040
|
+
capture_output=True,
|
|
1041
|
+
text=True,
|
|
1042
|
+
cwd=str(git_root),
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1045
|
+
if ls_files_result.returncode != 0:
|
|
1046
|
+
# File is not tracked by git
|
|
1047
|
+
return web.json_response(
|
|
1048
|
+
{
|
|
1049
|
+
"success": True,
|
|
1050
|
+
"diff": "",
|
|
1051
|
+
"has_changes": False,
|
|
1052
|
+
"tracked": False,
|
|
1053
|
+
"history": [],
|
|
1054
|
+
"has_uncommitted": False,
|
|
1055
|
+
}
|
|
1056
|
+
)
|
|
1057
|
+
|
|
1058
|
+
# Get commit history for this file (last 5 commits)
|
|
1059
|
+
history_result = subprocess.run(
|
|
1060
|
+
[
|
|
1061
|
+
"git",
|
|
1062
|
+
"log",
|
|
1063
|
+
"-5",
|
|
1064
|
+
"--pretty=format:%H|%s|%ar",
|
|
1065
|
+
"--",
|
|
1066
|
+
str(path),
|
|
1067
|
+
],
|
|
1068
|
+
check=False,
|
|
1069
|
+
capture_output=True,
|
|
1070
|
+
text=True,
|
|
1071
|
+
cwd=str(git_root),
|
|
1072
|
+
)
|
|
1073
|
+
|
|
1074
|
+
history = []
|
|
1075
|
+
if history_result.returncode == 0 and history_result.stdout:
|
|
1076
|
+
for line in history_result.stdout.strip().split("\n"):
|
|
1077
|
+
if line:
|
|
1078
|
+
parts = line.split("|", 2)
|
|
1079
|
+
if len(parts) == 3:
|
|
1080
|
+
history.append(
|
|
1081
|
+
{
|
|
1082
|
+
"hash": parts[0][:7], # Short hash
|
|
1083
|
+
"full_hash": parts[0], # Full hash for API
|
|
1084
|
+
"message": parts[1],
|
|
1085
|
+
"time_ago": parts[2],
|
|
1086
|
+
}
|
|
1087
|
+
)
|
|
1088
|
+
|
|
1089
|
+
# Check for uncommitted changes
|
|
1090
|
+
uncommitted_result = subprocess.run(
|
|
1091
|
+
["git", "diff", "HEAD", str(path)],
|
|
1092
|
+
check=False,
|
|
1093
|
+
capture_output=True,
|
|
1094
|
+
text=True,
|
|
1095
|
+
cwd=str(git_root),
|
|
1096
|
+
)
|
|
1097
|
+
|
|
1098
|
+
has_uncommitted = bool(uncommitted_result.stdout.strip())
|
|
1099
|
+
|
|
1100
|
+
# Get diff based on commit parameter
|
|
1101
|
+
if commit_hash:
|
|
1102
|
+
# Get diff for specific commit
|
|
1103
|
+
result = subprocess.run(
|
|
1104
|
+
["git", "show", commit_hash, "--", str(path)],
|
|
1105
|
+
check=False,
|
|
1106
|
+
capture_output=True,
|
|
1107
|
+
text=True,
|
|
1108
|
+
cwd=str(git_root),
|
|
1109
|
+
)
|
|
1110
|
+
diff_output = result.stdout if result.returncode == 0 else ""
|
|
1111
|
+
has_changes = bool(diff_output.strip())
|
|
1112
|
+
else:
|
|
1113
|
+
# Get uncommitted diff (default behavior)
|
|
1114
|
+
diff_output = uncommitted_result.stdout
|
|
1115
|
+
has_changes = has_uncommitted
|
|
1116
|
+
|
|
1117
|
+
return web.json_response(
|
|
1118
|
+
{
|
|
1119
|
+
"success": True,
|
|
1120
|
+
"diff": diff_output,
|
|
1121
|
+
"has_changes": has_changes,
|
|
1122
|
+
"tracked": True,
|
|
1123
|
+
"history": history,
|
|
1124
|
+
"has_uncommitted": has_uncommitted,
|
|
1125
|
+
}
|
|
1126
|
+
)
|
|
1127
|
+
except Exception as e:
|
|
1128
|
+
return web.json_response(
|
|
1129
|
+
{
|
|
1130
|
+
"success": False,
|
|
1131
|
+
"error": str(e),
|
|
1132
|
+
"diff": "",
|
|
1133
|
+
"has_changes": False,
|
|
1134
|
+
"history": [],
|
|
1135
|
+
"has_uncommitted": False,
|
|
1136
|
+
},
|
|
1137
|
+
status=500,
|
|
1138
|
+
)
|
|
1139
|
+
|
|
674
1140
|
# Register routes
|
|
675
1141
|
self.app.router.add_get("/", dashboard_index)
|
|
1142
|
+
self.app.router.add_get("/favicon.svg", favicon_handler)
|
|
676
1143
|
self.app.router.add_get("/health", health_check)
|
|
677
1144
|
self.app.router.add_get("/version.json", version_handler)
|
|
678
1145
|
self.app.router.add_get("/api/config", config_handler)
|
|
679
1146
|
self.app.router.add_get("/api/working-directory", working_directory_handler)
|
|
680
|
-
self.app.router.add_get("/api/
|
|
1147
|
+
self.app.router.add_get("/api/files", api_files_handler)
|
|
1148
|
+
self.app.router.add_get("/api/file/read", api_file_read_handler)
|
|
1149
|
+
self.app.router.add_get("/api/file/diff", git_diff_handler)
|
|
681
1150
|
self.app.router.add_post("/api/events", api_events_handler)
|
|
682
1151
|
self.app.router.add_post("/api/file", api_file_handler)
|
|
1152
|
+
self.app.router.add_post("/api/git-history", git_history_handler)
|
|
683
1153
|
|
|
684
1154
|
# Monitor page routes
|
|
685
1155
|
self.app.router.add_get("/monitor", lambda r: monitor_page_handler(r))
|
|
@@ -152,6 +152,7 @@ class DashboardServer(SocketIOServiceInterface):
|
|
|
152
152
|
|
|
153
153
|
# Register handlers for all events we want to relay from monitor to dashboard
|
|
154
154
|
relay_events = [
|
|
155
|
+
"claude_event", # Tool events from Claude Code hooks
|
|
155
156
|
"session_started",
|
|
156
157
|
"session_ended",
|
|
157
158
|
"claude_status",
|
|
@@ -6,7 +6,7 @@ This normalizer ensures all events follow a consistent schema before broadcastin
|
|
|
6
6
|
providing backward compatibility while establishing a standard format.
|
|
7
7
|
|
|
8
8
|
DESIGN DECISION: Transform all events to a consistent schema:
|
|
9
|
-
- event: Socket.IO event name (always "
|
|
9
|
+
- event: Socket.IO event name (always "mpm_event")
|
|
10
10
|
- type: Main category (hook, system, session, file, connection)
|
|
11
11
|
- subtype: Specific event type (pre_tool, heartbeat, started, etc.)
|
|
12
12
|
- timestamp: ISO format timestamp
|
|
@@ -72,7 +72,7 @@ class NormalizedEvent:
|
|
|
72
72
|
structure explicit and self-documenting.
|
|
73
73
|
"""
|
|
74
74
|
|
|
75
|
-
event: str = "
|
|
75
|
+
event: str = "mpm_event" # Socket.IO event name
|
|
76
76
|
source: str = "" # WHERE the event comes from
|
|
77
77
|
type: str = "" # WHAT category of event
|
|
78
78
|
subtype: str = "" # Specific event type
|
|
@@ -81,6 +81,8 @@ class NormalizedEvent:
|
|
|
81
81
|
correlation_id: Optional[str] = (
|
|
82
82
|
None # For correlating related events (e.g., pre_tool/post_tool)
|
|
83
83
|
)
|
|
84
|
+
session_id: Optional[str] = None # Session identifier for stream grouping
|
|
85
|
+
cwd: Optional[str] = None # Working directory for project identification
|
|
84
86
|
|
|
85
87
|
def to_dict(self) -> Dict[str, Any]:
|
|
86
88
|
"""Convert to dictionary for emission."""
|
|
@@ -95,6 +97,12 @@ class NormalizedEvent:
|
|
|
95
97
|
# Include correlation_id if present
|
|
96
98
|
if self.correlation_id:
|
|
97
99
|
result["correlation_id"] = self.correlation_id
|
|
100
|
+
# Include session_id if present
|
|
101
|
+
if self.session_id:
|
|
102
|
+
result["session_id"] = self.session_id
|
|
103
|
+
# Include cwd if present
|
|
104
|
+
if self.cwd:
|
|
105
|
+
result["cwd"] = self.cwd
|
|
98
106
|
return result
|
|
99
107
|
|
|
100
108
|
|
|
@@ -113,6 +121,7 @@ class EventNormalizer:
|
|
|
113
121
|
"pre_response": (EventType.HOOK, "pre_response"),
|
|
114
122
|
"post_response": (EventType.HOOK, "post_response"),
|
|
115
123
|
"hook_event": (EventType.HOOK, "generic"),
|
|
124
|
+
"hook_execution": (EventType.HOOK, "execution"), # Hook execution metadata
|
|
116
125
|
"UserPrompt": (EventType.HOOK, "user_prompt"), # Legacy format
|
|
117
126
|
# Test events (legacy format)
|
|
118
127
|
"TestStart": (EventType.TEST, "start"),
|
|
@@ -225,20 +234,32 @@ class EventNormalizer:
|
|
|
225
234
|
# Get or generate timestamp
|
|
226
235
|
timestamp = self._extract_timestamp(event_data)
|
|
227
236
|
|
|
228
|
-
# Extract correlation_id if present
|
|
237
|
+
# Extract correlation_id, session_id, and cwd if present
|
|
229
238
|
correlation_id = None
|
|
239
|
+
session_id = None
|
|
240
|
+
cwd = None
|
|
230
241
|
if isinstance(event_data, dict):
|
|
231
242
|
correlation_id = event_data.get("correlation_id")
|
|
243
|
+
# Try both naming conventions for session_id
|
|
244
|
+
session_id = event_data.get("session_id") or event_data.get("sessionId")
|
|
245
|
+
# Try multiple field names for working directory
|
|
246
|
+
cwd = (
|
|
247
|
+
event_data.get("cwd")
|
|
248
|
+
or event_data.get("working_directory")
|
|
249
|
+
or event_data.get("workingDirectory")
|
|
250
|
+
)
|
|
232
251
|
|
|
233
252
|
# Create normalized event
|
|
234
253
|
normalized = NormalizedEvent(
|
|
235
|
-
event="
|
|
254
|
+
event="mpm_event",
|
|
236
255
|
source=event_source,
|
|
237
256
|
type=event_type,
|
|
238
257
|
subtype=subtype,
|
|
239
258
|
timestamp=timestamp,
|
|
240
259
|
data=data,
|
|
241
260
|
correlation_id=correlation_id,
|
|
261
|
+
session_id=session_id,
|
|
262
|
+
cwd=cwd,
|
|
242
263
|
)
|
|
243
264
|
|
|
244
265
|
self.stats["normalized"] += 1
|
|
@@ -252,7 +273,7 @@ class EventNormalizer:
|
|
|
252
273
|
|
|
253
274
|
# Return a generic event on error
|
|
254
275
|
return NormalizedEvent(
|
|
255
|
-
event="
|
|
276
|
+
event="mpm_event",
|
|
256
277
|
source="system",
|
|
257
278
|
type="unknown",
|
|
258
279
|
subtype="error",
|
|
@@ -285,8 +306,16 @@ class EventNormalizer:
|
|
|
285
306
|
# If source is not a valid EventSource value, keep it as-is
|
|
286
307
|
pass
|
|
287
308
|
|
|
309
|
+
# Extract session_id and cwd, trying multiple naming conventions
|
|
310
|
+
session_id = event_data.get("session_id") or event_data.get("sessionId")
|
|
311
|
+
cwd = (
|
|
312
|
+
event_data.get("cwd")
|
|
313
|
+
or event_data.get("working_directory")
|
|
314
|
+
or event_data.get("workingDirectory")
|
|
315
|
+
)
|
|
316
|
+
|
|
288
317
|
return NormalizedEvent(
|
|
289
|
-
event="
|
|
318
|
+
event="mpm_event", # Always use standard event name
|
|
290
319
|
source=source,
|
|
291
320
|
type=event_data.get("type", "unknown"),
|
|
292
321
|
subtype=event_data.get("subtype", "generic"),
|
|
@@ -295,6 +324,8 @@ class EventNormalizer:
|
|
|
295
324
|
),
|
|
296
325
|
data=event_data.get("data", {}),
|
|
297
326
|
correlation_id=event_data.get("correlation_id"),
|
|
327
|
+
session_id=session_id,
|
|
328
|
+
cwd=cwd,
|
|
298
329
|
)
|
|
299
330
|
|
|
300
331
|
def _extract_event_info(self, event_data: Any) -> Tuple[str, str, Dict[str, Any]]:
|