claude-mpm 4.25.10__py3-none-any.whl → 5.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of claude-mpm might be problematic. Click here for more details.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/BASE_PM.md +12 -0
- claude_mpm/agents/PM_INSTRUCTIONS.md +1055 -2230
- claude_mpm/agents/PM_INSTRUCTIONS_TEACH.md +1322 -0
- claude_mpm/agents/WORKFLOW.md +4 -4
- claude_mpm/agents/__init__.py +6 -0
- claude_mpm/agents/agent_loader.py +1 -4
- claude_mpm/agents/base_agent_loader.py +10 -35
- claude_mpm/agents/templates/{circuit_breakers.md → circuit-breakers.md} +576 -66
- claude_mpm/agents/templates/context-management-examples.md +544 -0
- claude_mpm/agents/templates/pr-workflow-examples.md +427 -0
- claude_mpm/agents/templates/research-gate-examples.md +669 -0
- claude_mpm/agents/templates/structured-questions-examples.md +615 -0
- claude_mpm/agents/templates/ticket-completeness-examples.md +139 -0
- claude_mpm/agents/templates/ticketing-examples.md +277 -0
- claude_mpm/cli/__init__.py +28 -3
- claude_mpm/cli/commands/__init__.py +2 -0
- claude_mpm/cli/commands/agent_source.py +774 -0
- claude_mpm/cli/commands/agent_state_manager.py +188 -30
- claude_mpm/cli/commands/agents.py +959 -36
- claude_mpm/cli/commands/agents_cleanup.py +210 -0
- claude_mpm/cli/commands/agents_discover.py +338 -0
- claude_mpm/cli/commands/auto_configure.py +537 -239
- claude_mpm/cli/commands/config.py +7 -4
- claude_mpm/cli/commands/configure.py +924 -45
- claude_mpm/cli/commands/configure_navigation.py +63 -46
- claude_mpm/cli/commands/doctor.py +10 -2
- claude_mpm/cli/commands/local_deploy.py +1 -4
- claude_mpm/cli/commands/postmortem.py +401 -0
- claude_mpm/cli/commands/run.py +1 -39
- claude_mpm/cli/commands/skill_source.py +694 -0
- claude_mpm/cli/commands/skills.py +322 -19
- claude_mpm/cli/executor.py +22 -3
- claude_mpm/cli/interactive/agent_wizard.py +1028 -43
- claude_mpm/cli/parsers/agent_source_parser.py +171 -0
- claude_mpm/cli/parsers/agents_parser.py +256 -4
- claude_mpm/cli/parsers/auto_configure_parser.py +13 -0
- claude_mpm/cli/parsers/base_parser.py +25 -0
- claude_mpm/cli/parsers/config_parser.py +96 -43
- claude_mpm/cli/parsers/skill_source_parser.py +169 -0
- claude_mpm/cli/parsers/skills_parser.py +7 -0
- claude_mpm/cli/parsers/source_parser.py +138 -0
- claude_mpm/cli/startup.py +456 -103
- claude_mpm/cli/startup_display.py +4 -4
- claude_mpm/commands/{mpm-auto-configure.md → mpm-agents-auto-configure.md} +9 -0
- claude_mpm/commands/mpm-agents-detect.md +9 -0
- claude_mpm/commands/{mpm-agents.md → mpm-agents-list.md} +9 -0
- claude_mpm/commands/mpm-agents-recommend.md +9 -0
- claude_mpm/commands/{mpm-config.md → mpm-config-view.md} +9 -0
- claude_mpm/commands/mpm-doctor.md +9 -0
- claude_mpm/commands/mpm-help.md +14 -2
- claude_mpm/commands/mpm-init.md +9 -0
- claude_mpm/commands/mpm-monitor.md +9 -0
- claude_mpm/commands/mpm-postmortem.md +123 -0
- claude_mpm/commands/{mpm-resume.md → mpm-session-resume.md} +9 -0
- claude_mpm/commands/mpm-status.md +9 -0
- claude_mpm/commands/{mpm-organize.md → mpm-ticket-organize.md} +9 -0
- claude_mpm/commands/mpm-ticket-view.md +552 -0
- claude_mpm/commands/mpm-version.md +9 -0
- claude_mpm/commands/mpm.md +10 -0
- claude_mpm/config/agent_presets.py +488 -0
- claude_mpm/config/agent_sources.py +325 -0
- claude_mpm/config/skill_presets.py +392 -0
- claude_mpm/config/skill_sources.py +590 -0
- claude_mpm/constants.py +1 -0
- claude_mpm/core/claude_runner.py +5 -34
- claude_mpm/core/config.py +16 -0
- claude_mpm/core/framework/__init__.py +3 -16
- claude_mpm/core/framework/loaders/file_loader.py +54 -101
- claude_mpm/core/framework/loaders/instruction_loader.py +25 -5
- claude_mpm/core/interactive_session.py +83 -7
- claude_mpm/core/oneshot_session.py +71 -8
- claude_mpm/core/protocols/__init__.py +23 -0
- claude_mpm/core/protocols/runner_protocol.py +103 -0
- claude_mpm/core/protocols/session_protocol.py +131 -0
- claude_mpm/core/shared/singleton_manager.py +11 -4
- claude_mpm/core/system_context.py +38 -0
- claude_mpm/core/unified_config.py +22 -0
- claude_mpm/experimental/cli_enhancements.py +1 -5
- claude_mpm/hooks/claude_hooks/__pycache__/__init__.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/event_handlers.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/hook_handler.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/memory_integration.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/response_tracking.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/__pycache__/tool_analysis.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/memory_integration.py +12 -1
- claude_mpm/hooks/claude_hooks/services/__pycache__/__init__.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/connection_manager_http.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/duplicate_detector.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/state_manager.cpython-313.pyc +0 -0
- claude_mpm/hooks/claude_hooks/services/__pycache__/subagent_processor.cpython-313.pyc +0 -0
- claude_mpm/hooks/failure_learning/__init__.py +2 -8
- claude_mpm/hooks/failure_learning/failure_detection_hook.py +1 -6
- claude_mpm/hooks/failure_learning/fix_detection_hook.py +1 -6
- claude_mpm/hooks/failure_learning/learning_extraction_hook.py +1 -6
- claude_mpm/hooks/kuzu_response_hook.py +1 -5
- claude_mpm/models/git_repository.py +198 -0
- claude_mpm/services/agents/agent_builder.py +45 -9
- claude_mpm/services/agents/agent_preset_service.py +238 -0
- claude_mpm/services/agents/agent_selection_service.py +484 -0
- claude_mpm/services/agents/auto_deploy_index_parser.py +569 -0
- claude_mpm/services/agents/cache_git_manager.py +621 -0
- claude_mpm/services/agents/deployment/agent_deployment.py +126 -2
- claude_mpm/services/agents/deployment/agent_discovery_service.py +105 -73
- claude_mpm/services/agents/deployment/agent_lifecycle_manager.py +1 -5
- claude_mpm/services/agents/deployment/agent_restore_handler.py +1 -4
- claude_mpm/services/agents/deployment/agent_template_builder.py +236 -15
- claude_mpm/services/agents/deployment/agents_directory_resolver.py +101 -15
- claude_mpm/services/agents/deployment/async_agent_deployment.py +2 -1
- claude_mpm/services/agents/deployment/multi_source_deployment_service.py +115 -15
- claude_mpm/services/agents/deployment/refactored_agent_deployment_service.py +1 -4
- claude_mpm/services/agents/deployment/remote_agent_discovery_service.py +363 -0
- claude_mpm/services/agents/deployment/single_agent_deployer.py +2 -2
- claude_mpm/services/agents/deployment/system_instructions_deployer.py +168 -46
- claude_mpm/services/agents/deployment/validation/deployment_validator.py +2 -2
- claude_mpm/services/agents/git_source_manager.py +629 -0
- claude_mpm/services/agents/loading/framework_agent_loader.py +1 -4
- claude_mpm/services/agents/local_template_manager.py +47 -9
- claude_mpm/services/agents/single_tier_deployment_service.py +696 -0
- claude_mpm/services/agents/sources/__init__.py +13 -0
- claude_mpm/services/agents/sources/agent_sync_state.py +516 -0
- claude_mpm/services/agents/sources/git_source_sync_service.py +1087 -0
- claude_mpm/services/agents/startup_sync.py +239 -0
- claude_mpm/services/agents/toolchain_detector.py +474 -0
- claude_mpm/services/analysis/__init__.py +25 -0
- claude_mpm/services/analysis/postmortem_reporter.py +474 -0
- claude_mpm/services/analysis/postmortem_service.py +765 -0
- claude_mpm/services/command_deployment_service.py +200 -6
- claude_mpm/services/core/base.py +7 -2
- claude_mpm/services/core/interfaces/__init__.py +1 -3
- claude_mpm/services/core/interfaces/health.py +1 -4
- claude_mpm/services/core/models/__init__.py +2 -11
- claude_mpm/services/diagnostics/checks/__init__.py +4 -0
- claude_mpm/services/diagnostics/checks/agent_sources_check.py +577 -0
- claude_mpm/services/diagnostics/checks/mcp_services_check.py +7 -15
- claude_mpm/services/diagnostics/checks/skill_sources_check.py +587 -0
- claude_mpm/services/diagnostics/diagnostic_runner.py +9 -0
- claude_mpm/services/diagnostics/doctor_reporter.py +34 -6
- claude_mpm/services/git/__init__.py +21 -0
- claude_mpm/services/git/git_operations_service.py +494 -0
- claude_mpm/services/github/__init__.py +21 -0
- claude_mpm/services/github/github_cli_service.py +397 -0
- claude_mpm/services/infrastructure/monitoring/__init__.py +1 -5
- claude_mpm/services/infrastructure/monitoring/aggregator.py +1 -6
- claude_mpm/services/instructions/__init__.py +9 -0
- claude_mpm/services/instructions/instruction_cache_service.py +374 -0
- claude_mpm/services/local_ops/__init__.py +3 -13
- claude_mpm/services/local_ops/health_checks/__init__.py +1 -3
- claude_mpm/services/local_ops/health_manager.py +1 -4
- claude_mpm/services/mcp_config_manager.py +75 -145
- claude_mpm/services/mcp_gateway/core/process_pool.py +22 -16
- claude_mpm/services/mcp_gateway/server/mcp_gateway.py +1 -6
- claude_mpm/services/mcp_service_verifier.py +6 -3
- claude_mpm/services/monitor/daemon.py +28 -8
- claude_mpm/services/monitor/daemon_manager.py +96 -19
- claude_mpm/services/pr/__init__.py +14 -0
- claude_mpm/services/pr/pr_template_service.py +329 -0
- claude_mpm/services/project/project_organizer.py +4 -0
- claude_mpm/services/runner_configuration_service.py +16 -3
- claude_mpm/services/session_management_service.py +16 -4
- claude_mpm/services/skills/__init__.py +18 -0
- claude_mpm/services/skills/git_skill_source_manager.py +1169 -0
- claude_mpm/services/skills/skill_discovery_service.py +568 -0
- claude_mpm/services/socketio/server/core.py +1 -4
- claude_mpm/services/socketio/server/main.py +1 -3
- claude_mpm/services/unified/deployment_strategies/vercel.py +1 -5
- claude_mpm/services/unified/unified_deployment.py +1 -5
- claude_mpm/services/visualization/__init__.py +1 -5
- claude_mpm/templates/questions/__init__.py +2 -7
- claude_mpm/templates/questions/pr_strategy.py +1 -4
- claude_mpm/templates/questions/project_init.py +1 -4
- claude_mpm/templates/questions/ticket_mgmt.py +1 -4
- claude_mpm/utils/agent_dependency_loader.py +77 -10
- claude_mpm/utils/agent_filters.py +288 -0
- claude_mpm/utils/gitignore.py +3 -0
- claude_mpm/utils/migration.py +372 -0
- claude_mpm/utils/progress.py +387 -0
- {claude_mpm-4.25.10.dist-info → claude_mpm-5.1.8.dist-info}/METADATA +356 -112
- {claude_mpm-4.25.10.dist-info → claude_mpm-5.1.8.dist-info}/RECORD +188 -439
- claude_mpm/agents/templates/agent-manager.json +0 -273
- claude_mpm/agents/templates/agentic-coder-optimizer.json +0 -248
- claude_mpm/agents/templates/api_qa.json +0 -183
- claude_mpm/agents/templates/clerk-ops.json +0 -235
- claude_mpm/agents/templates/code_analyzer.json +0 -101
- claude_mpm/agents/templates/content-agent.json +0 -358
- claude_mpm/agents/templates/dart_engineer.json +0 -307
- claude_mpm/agents/templates/data_engineer.json +0 -225
- claude_mpm/agents/templates/documentation.json +0 -238
- claude_mpm/agents/templates/engineer.json +0 -210
- claude_mpm/agents/templates/gcp_ops_agent.json +0 -253
- claude_mpm/agents/templates/golang_engineer.json +0 -270
- claude_mpm/agents/templates/imagemagick.json +0 -264
- claude_mpm/agents/templates/java_engineer.json +0 -346
- claude_mpm/agents/templates/javascript_engineer_agent.json +0 -380
- claude_mpm/agents/templates/local_ops_agent.json +0 -1840
- claude_mpm/agents/templates/memory_manager.json +0 -158
- claude_mpm/agents/templates/nextjs_engineer.json +0 -285
- claude_mpm/agents/templates/ops.json +0 -185
- claude_mpm/agents/templates/php-engineer.json +0 -287
- claude_mpm/agents/templates/product_owner.json +0 -338
- claude_mpm/agents/templates/project_organizer.json +0 -144
- claude_mpm/agents/templates/prompt-engineer.json +0 -737
- claude_mpm/agents/templates/python_engineer.json +0 -387
- claude_mpm/agents/templates/qa.json +0 -243
- claude_mpm/agents/templates/react_engineer.json +0 -239
- claude_mpm/agents/templates/refactoring_engineer.json +0 -276
- claude_mpm/agents/templates/research.json +0 -258
- claude_mpm/agents/templates/ruby-engineer.json +0 -280
- claude_mpm/agents/templates/rust_engineer.json +0 -275
- claude_mpm/agents/templates/security.json +0 -202
- claude_mpm/agents/templates/svelte-engineer.json +0 -225
- claude_mpm/agents/templates/tauri_engineer.json +0 -274
- claude_mpm/agents/templates/ticketing.json +0 -181
- claude_mpm/agents/templates/typescript_engineer.json +0 -285
- claude_mpm/agents/templates/vercel_ops_agent.json +0 -412
- claude_mpm/agents/templates/version_control.json +0 -159
- claude_mpm/agents/templates/web_qa.json +0 -400
- claude_mpm/agents/templates/web_ui.json +0 -189
- claude_mpm/cli/README.md +0 -253
- claude_mpm/cli/commands/mcp_install_commands.py.backup +0 -284
- claude_mpm/cli/commands/mpm_init/README.md +0 -365
- claude_mpm/cli_module/refactoring_guide.md +0 -253
- claude_mpm/commands/mpm-tickets.md +0 -151
- claude_mpm/config/agent_capabilities.yaml +0 -658
- claude_mpm/config/async_logging_config.yaml +0 -145
- claude_mpm/core/.claude-mpm/logs/hooks_20250730.log +0 -34
- claude_mpm/d2/.gitignore +0 -22
- claude_mpm/d2/ARCHITECTURE_COMPARISON.md +0 -273
- claude_mpm/d2/FLASK_INTEGRATION.md +0 -156
- claude_mpm/d2/IMPLEMENTATION_SUMMARY.md +0 -452
- claude_mpm/d2/QUICKSTART.md +0 -186
- claude_mpm/d2/README.md +0 -232
- claude_mpm/d2/STORE_FIX_SUMMARY.md +0 -167
- claude_mpm/d2/SVELTE5_STORES_GUIDE.md +0 -180
- claude_mpm/d2/TESTING.md +0 -288
- claude_mpm/d2/index.html +0 -118
- claude_mpm/d2/package.json +0 -19
- claude_mpm/d2/src/App.svelte +0 -110
- claude_mpm/d2/src/components/Header.svelte +0 -153
- claude_mpm/d2/src/components/MainContent.svelte +0 -74
- claude_mpm/d2/src/components/Sidebar.svelte +0 -85
- claude_mpm/d2/src/components/tabs/EventsTab.svelte +0 -326
- claude_mpm/d2/src/lib/socketio.js +0 -144
- claude_mpm/d2/src/main.js +0 -7
- claude_mpm/d2/src/stores/events.js +0 -114
- claude_mpm/d2/src/stores/socket.js +0 -108
- claude_mpm/d2/src/stores/theme.js +0 -65
- claude_mpm/d2/svelte.config.js +0 -12
- claude_mpm/d2/vite.config.js +0 -15
- claude_mpm/dashboard/.claude-mpm/memories/README.md +0 -36
- claude_mpm/dashboard/BUILD_NUMBER +0 -1
- claude_mpm/dashboard/README.md +0 -121
- claude_mpm/dashboard/VERSION +0 -1
- claude_mpm/dashboard/react/components/DataInspector/DataInspector.module.css +0 -188
- claude_mpm/dashboard/react/components/DataInspector/DataInspector.tsx +0 -273
- claude_mpm/dashboard/react/components/ErrorBoundary.tsx +0 -75
- claude_mpm/dashboard/react/components/EventViewer/EventViewer.module.css +0 -156
- claude_mpm/dashboard/react/components/EventViewer/EventViewer.tsx +0 -141
- claude_mpm/dashboard/react/components/shared/ConnectionStatus.module.css +0 -38
- claude_mpm/dashboard/react/components/shared/ConnectionStatus.tsx +0 -36
- claude_mpm/dashboard/react/components/shared/FilterBar.module.css +0 -92
- claude_mpm/dashboard/react/components/shared/FilterBar.tsx +0 -89
- claude_mpm/dashboard/react/contexts/DashboardContext.tsx +0 -215
- claude_mpm/dashboard/react/entries/events.tsx +0 -165
- claude_mpm/dashboard/react/hooks/useEvents.ts +0 -191
- claude_mpm/dashboard/react/hooks/useSocket.ts +0 -225
- claude_mpm/dashboard/static/archive/activity_dashboard_fixed.html +0 -248
- claude_mpm/dashboard/static/built/REFACTORING_SUMMARY.md +0 -170
- claude_mpm/dashboard/static/built/assets/events.DjpNxWNo.css +0 -1
- claude_mpm/dashboard/static/built/components/activity-tree.js +0 -2
- claude_mpm/dashboard/static/built/components/activity-tree.js.map +0 -1
- claude_mpm/dashboard/static/built/components/agent-hierarchy.js +0 -777
- claude_mpm/dashboard/static/built/components/agent-inference.js +0 -2
- claude_mpm/dashboard/static/built/components/agent-inference.js.map +0 -1
- claude_mpm/dashboard/static/built/components/build-tracker.js +0 -333
- claude_mpm/dashboard/static/built/components/code-simple.js +0 -857
- claude_mpm/dashboard/static/built/components/code-tree/tree-breadcrumb.js +0 -353
- claude_mpm/dashboard/static/built/components/code-tree/tree-constants.js +0 -235
- claude_mpm/dashboard/static/built/components/code-tree/tree-search.js +0 -409
- claude_mpm/dashboard/static/built/components/code-tree/tree-utils.js +0 -435
- claude_mpm/dashboard/static/built/components/code-tree.js +0 -2
- claude_mpm/dashboard/static/built/components/code-tree.js.map +0 -1
- claude_mpm/dashboard/static/built/components/code-viewer.js +0 -2
- claude_mpm/dashboard/static/built/components/code-viewer.js.map +0 -1
- claude_mpm/dashboard/static/built/components/connection-debug.js +0 -654
- claude_mpm/dashboard/static/built/components/diff-viewer.js +0 -891
- claude_mpm/dashboard/static/built/components/event-processor.js +0 -2
- claude_mpm/dashboard/static/built/components/event-processor.js.map +0 -1
- claude_mpm/dashboard/static/built/components/event-viewer.js +0 -2
- claude_mpm/dashboard/static/built/components/event-viewer.js.map +0 -1
- claude_mpm/dashboard/static/built/components/export-manager.js +0 -2
- claude_mpm/dashboard/static/built/components/export-manager.js.map +0 -1
- claude_mpm/dashboard/static/built/components/file-change-tracker.js +0 -443
- claude_mpm/dashboard/static/built/components/file-change-viewer.js +0 -690
- claude_mpm/dashboard/static/built/components/file-tool-tracker.js +0 -2
- claude_mpm/dashboard/static/built/components/file-tool-tracker.js.map +0 -1
- claude_mpm/dashboard/static/built/components/file-viewer.js +0 -2
- claude_mpm/dashboard/static/built/components/file-viewer.js.map +0 -1
- claude_mpm/dashboard/static/built/components/hud-library-loader.js +0 -2
- claude_mpm/dashboard/static/built/components/hud-library-loader.js.map +0 -1
- claude_mpm/dashboard/static/built/components/hud-manager.js +0 -2
- claude_mpm/dashboard/static/built/components/hud-manager.js.map +0 -1
- claude_mpm/dashboard/static/built/components/hud-visualizer.js +0 -2
- claude_mpm/dashboard/static/built/components/hud-visualizer.js.map +0 -1
- claude_mpm/dashboard/static/built/components/module-viewer.js +0 -2
- claude_mpm/dashboard/static/built/components/module-viewer.js.map +0 -1
- claude_mpm/dashboard/static/built/components/nav-bar.js +0 -145
- claude_mpm/dashboard/static/built/components/page-structure.js +0 -429
- claude_mpm/dashboard/static/built/components/session-manager.js +0 -2
- claude_mpm/dashboard/static/built/components/session-manager.js.map +0 -1
- claude_mpm/dashboard/static/built/components/socket-manager.js +0 -2
- claude_mpm/dashboard/static/built/components/socket-manager.js.map +0 -1
- claude_mpm/dashboard/static/built/components/ui-state-manager.js +0 -2
- claude_mpm/dashboard/static/built/components/ui-state-manager.js.map +0 -1
- claude_mpm/dashboard/static/built/components/unified-data-viewer.js +0 -2
- claude_mpm/dashboard/static/built/components/unified-data-viewer.js.map +0 -1
- claude_mpm/dashboard/static/built/components/working-directory.js +0 -2
- claude_mpm/dashboard/static/built/components/working-directory.js.map +0 -1
- claude_mpm/dashboard/static/built/connection-manager.js +0 -536
- claude_mpm/dashboard/static/built/dashboard.js +0 -2
- claude_mpm/dashboard/static/built/dashboard.js.map +0 -1
- claude_mpm/dashboard/static/built/extension-error-handler.js +0 -164
- claude_mpm/dashboard/static/built/react/events.js +0 -30
- claude_mpm/dashboard/static/built/react/events.js.map +0 -1
- claude_mpm/dashboard/static/built/shared/dom-helpers.js +0 -396
- claude_mpm/dashboard/static/built/shared/event-bus.js +0 -330
- claude_mpm/dashboard/static/built/shared/event-filter-service.js +0 -540
- claude_mpm/dashboard/static/built/shared/logger.js +0 -385
- claude_mpm/dashboard/static/built/shared/page-structure.js +0 -249
- claude_mpm/dashboard/static/built/shared/tooltip-service.js +0 -253
- claude_mpm/dashboard/static/built/socket-client.js +0 -2
- claude_mpm/dashboard/static/built/socket-client.js.map +0 -1
- claude_mpm/dashboard/static/built/tab-isolation-fix.js +0 -185
- claude_mpm/dashboard/static/events.html +0 -607
- claude_mpm/dashboard/static/index.html +0 -635
- claude_mpm/dashboard/static/js/REFACTORING_SUMMARY.md +0 -170
- claude_mpm/dashboard/static/js/shared/dom-helpers.js +0 -396
- claude_mpm/dashboard/static/js/shared/event-bus.js +0 -330
- claude_mpm/dashboard/static/js/shared/logger.js +0 -385
- claude_mpm/dashboard/static/js/shared/tooltip-service.js +0 -253
- claude_mpm/dashboard/static/js/stores/dashboard-store.js +0 -562
- claude_mpm/dashboard/static/legacy/activity.html +0 -736
- claude_mpm/dashboard/static/legacy/agents.html +0 -786
- claude_mpm/dashboard/static/legacy/files.html +0 -747
- claude_mpm/dashboard/static/legacy/tools.html +0 -831
- claude_mpm/dashboard/static/monitors.html +0 -431
- claude_mpm/dashboard/static/navigation-test-results.md +0 -118
- claude_mpm/dashboard/static/production/events.html +0 -659
- claude_mpm/dashboard/static/production/main.html +0 -698
- claude_mpm/dashboard/static/production/monitors.html +0 -483
- claude_mpm/dashboard/static/test-archive/dashboard.html +0 -635
- claude_mpm/dashboard/static/test-archive/debug-events.html +0 -147
- claude_mpm/dashboard/static/test-archive/test-navigation.html +0 -256
- claude_mpm/dashboard/static/test-archive/test-react-exports.html +0 -180
- claude_mpm/dashboard/templates/.claude-mpm/memories/README.md +0 -36
- claude_mpm/dashboard/templates/.claude-mpm/memories/engineer_agent.md +0 -39
- claude_mpm/dashboard/templates/.claude-mpm/memories/version_control_agent.md +0 -38
- claude_mpm/hooks/README.md +0 -143
- claude_mpm/hooks/templates/README.md +0 -180
- claude_mpm/hooks/templates/settings.json.example +0 -147
- claude_mpm/schemas/agent_schema.json +0 -596
- claude_mpm/schemas/frontmatter_schema.json +0 -165
- claude_mpm/services/event_bus/README.md +0 -244
- claude_mpm/services/events/README.md +0 -303
- claude_mpm/services/framework_claude_md_generator/README.md +0 -119
- claude_mpm/services/mcp_gateway/README.md +0 -185
- claude_mpm/services/socketio/handlers/connection.py.backup +0 -217
- claude_mpm/services/socketio/handlers/hook.py.backup +0 -154
- claude_mpm/services/static/.gitkeep +0 -2
- claude_mpm/services/version_control/VERSION +0 -1
- claude_mpm/skills/bundled/.gitkeep +0 -2
- claude_mpm/skills/bundled/collaboration/brainstorming/SKILL.md +0 -79
- claude_mpm/skills/bundled/collaboration/dispatching-parallel-agents/SKILL.md +0 -178
- claude_mpm/skills/bundled/collaboration/dispatching-parallel-agents/references/agent-prompts.md +0 -577
- claude_mpm/skills/bundled/collaboration/dispatching-parallel-agents/references/coordination-patterns.md +0 -467
- claude_mpm/skills/bundled/collaboration/dispatching-parallel-agents/references/examples.md +0 -537
- claude_mpm/skills/bundled/collaboration/dispatching-parallel-agents/references/troubleshooting.md +0 -730
- claude_mpm/skills/bundled/collaboration/git-worktrees.md +0 -317
- claude_mpm/skills/bundled/collaboration/requesting-code-review/SKILL.md +0 -112
- claude_mpm/skills/bundled/collaboration/requesting-code-review/references/code-reviewer-template.md +0 -146
- claude_mpm/skills/bundled/collaboration/requesting-code-review/references/review-examples.md +0 -412
- claude_mpm/skills/bundled/collaboration/stacked-prs.md +0 -251
- claude_mpm/skills/bundled/collaboration/writing-plans/SKILL.md +0 -81
- claude_mpm/skills/bundled/collaboration/writing-plans/references/best-practices.md +0 -362
- claude_mpm/skills/bundled/collaboration/writing-plans/references/plan-structure-templates.md +0 -312
- claude_mpm/skills/bundled/debugging/root-cause-tracing/SKILL.md +0 -152
- claude_mpm/skills/bundled/debugging/root-cause-tracing/find-polluter.sh +0 -63
- claude_mpm/skills/bundled/debugging/root-cause-tracing/references/advanced-techniques.md +0 -668
- claude_mpm/skills/bundled/debugging/root-cause-tracing/references/examples.md +0 -587
- claude_mpm/skills/bundled/debugging/root-cause-tracing/references/integration.md +0 -438
- claude_mpm/skills/bundled/debugging/root-cause-tracing/references/tracing-techniques.md +0 -391
- claude_mpm/skills/bundled/debugging/systematic-debugging/CREATION-LOG.md +0 -119
- claude_mpm/skills/bundled/debugging/systematic-debugging/SKILL.md +0 -148
- claude_mpm/skills/bundled/debugging/systematic-debugging/references/anti-patterns.md +0 -483
- claude_mpm/skills/bundled/debugging/systematic-debugging/references/examples.md +0 -452
- claude_mpm/skills/bundled/debugging/systematic-debugging/references/troubleshooting.md +0 -449
- claude_mpm/skills/bundled/debugging/systematic-debugging/references/workflow.md +0 -411
- claude_mpm/skills/bundled/debugging/systematic-debugging/test-academic.md +0 -14
- claude_mpm/skills/bundled/debugging/systematic-debugging/test-pressure-1.md +0 -58
- claude_mpm/skills/bundled/debugging/systematic-debugging/test-pressure-2.md +0 -68
- claude_mpm/skills/bundled/debugging/systematic-debugging/test-pressure-3.md +0 -69
- claude_mpm/skills/bundled/debugging/verification-before-completion/SKILL.md +0 -131
- claude_mpm/skills/bundled/debugging/verification-before-completion/references/gate-function.md +0 -325
- claude_mpm/skills/bundled/debugging/verification-before-completion/references/integration-and-workflows.md +0 -490
- claude_mpm/skills/bundled/debugging/verification-before-completion/references/red-flags-and-failures.md +0 -425
- claude_mpm/skills/bundled/debugging/verification-before-completion/references/verification-patterns.md +0 -499
- claude_mpm/skills/bundled/infrastructure/env-manager/INTEGRATION.md +0 -611
- claude_mpm/skills/bundled/infrastructure/env-manager/README.md +0 -596
- claude_mpm/skills/bundled/infrastructure/env-manager/SKILL.md +0 -260
- claude_mpm/skills/bundled/infrastructure/env-manager/examples/nextjs-env-structure.md +0 -315
- claude_mpm/skills/bundled/infrastructure/env-manager/references/frameworks.md +0 -436
- claude_mpm/skills/bundled/infrastructure/env-manager/references/security.md +0 -433
- claude_mpm/skills/bundled/infrastructure/env-manager/references/synchronization.md +0 -452
- claude_mpm/skills/bundled/infrastructure/env-manager/references/troubleshooting.md +0 -404
- claude_mpm/skills/bundled/infrastructure/env-manager/references/validation.md +0 -420
- claude_mpm/skills/bundled/main/artifacts-builder/LICENSE.txt +0 -202
- claude_mpm/skills/bundled/main/artifacts-builder/SKILL.md +0 -86
- claude_mpm/skills/bundled/main/artifacts-builder/scripts/bundle-artifact.sh +0 -54
- claude_mpm/skills/bundled/main/artifacts-builder/scripts/init-artifact.sh +0 -322
- claude_mpm/skills/bundled/main/artifacts-builder/scripts/shadcn-components.tar.gz +0 -0
- claude_mpm/skills/bundled/main/internal-comms/LICENSE.txt +0 -202
- claude_mpm/skills/bundled/main/internal-comms/SKILL.md +0 -43
- claude_mpm/skills/bundled/main/internal-comms/examples/3p-updates.md +0 -47
- claude_mpm/skills/bundled/main/internal-comms/examples/company-newsletter.md +0 -65
- claude_mpm/skills/bundled/main/internal-comms/examples/faq-answers.md +0 -30
- claude_mpm/skills/bundled/main/internal-comms/examples/general-comms.md +0 -16
- claude_mpm/skills/bundled/main/mcp-builder/LICENSE.txt +0 -202
- claude_mpm/skills/bundled/main/mcp-builder/SKILL.md +0 -160
- claude_mpm/skills/bundled/main/mcp-builder/reference/design_principles.md +0 -412
- claude_mpm/skills/bundled/main/mcp-builder/reference/evaluation.md +0 -602
- claude_mpm/skills/bundled/main/mcp-builder/reference/mcp_best_practices.md +0 -915
- claude_mpm/skills/bundled/main/mcp-builder/reference/node_mcp_server.md +0 -916
- claude_mpm/skills/bundled/main/mcp-builder/reference/python_mcp_server.md +0 -752
- claude_mpm/skills/bundled/main/mcp-builder/reference/workflow.md +0 -1237
- claude_mpm/skills/bundled/main/mcp-builder/scripts/example_evaluation.xml +0 -22
- claude_mpm/skills/bundled/main/mcp-builder/scripts/requirements.txt +0 -2
- claude_mpm/skills/bundled/main/skill-creator/LICENSE.txt +0 -202
- claude_mpm/skills/bundled/main/skill-creator/SKILL.md +0 -189
- claude_mpm/skills/bundled/main/skill-creator/references/best-practices.md +0 -500
- claude_mpm/skills/bundled/main/skill-creator/references/creation-workflow.md +0 -464
- claude_mpm/skills/bundled/main/skill-creator/references/examples.md +0 -619
- claude_mpm/skills/bundled/main/skill-creator/references/progressive-disclosure.md +0 -437
- claude_mpm/skills/bundled/main/skill-creator/references/skill-structure.md +0 -231
- claude_mpm/skills/bundled/php/espocrm-development/SKILL.md +0 -170
- claude_mpm/skills/bundled/php/espocrm-development/references/architecture.md +0 -602
- claude_mpm/skills/bundled/php/espocrm-development/references/common-tasks.md +0 -821
- claude_mpm/skills/bundled/php/espocrm-development/references/development-workflow.md +0 -742
- claude_mpm/skills/bundled/php/espocrm-development/references/frontend-customization.md +0 -726
- claude_mpm/skills/bundled/php/espocrm-development/references/hooks-and-services.md +0 -764
- claude_mpm/skills/bundled/php/espocrm-development/references/testing-debugging.md +0 -831
- claude_mpm/skills/bundled/react/flexlayout-react.md +0 -742
- claude_mpm/skills/bundled/rust/desktop-applications/SKILL.md +0 -226
- claude_mpm/skills/bundled/rust/desktop-applications/references/architecture-patterns.md +0 -901
- claude_mpm/skills/bundled/rust/desktop-applications/references/native-gui-frameworks.md +0 -901
- claude_mpm/skills/bundled/rust/desktop-applications/references/platform-integration.md +0 -775
- claude_mpm/skills/bundled/rust/desktop-applications/references/state-management.md +0 -937
- claude_mpm/skills/bundled/rust/desktop-applications/references/tauri-framework.md +0 -770
- claude_mpm/skills/bundled/rust/desktop-applications/references/testing-deployment.md +0 -961
- claude_mpm/skills/bundled/tauri/tauri-async-patterns.md +0 -495
- claude_mpm/skills/bundled/tauri/tauri-build-deploy.md +0 -599
- claude_mpm/skills/bundled/tauri/tauri-command-patterns.md +0 -535
- claude_mpm/skills/bundled/tauri/tauri-error-handling.md +0 -613
- claude_mpm/skills/bundled/tauri/tauri-event-system.md +0 -648
- claude_mpm/skills/bundled/tauri/tauri-file-system.md +0 -673
- claude_mpm/skills/bundled/tauri/tauri-frontend-integration.md +0 -767
- claude_mpm/skills/bundled/tauri/tauri-performance.md +0 -669
- claude_mpm/skills/bundled/tauri/tauri-state-management.md +0 -573
- claude_mpm/skills/bundled/tauri/tauri-testing.md +0 -384
- claude_mpm/skills/bundled/tauri/tauri-window-management.md +0 -628
- claude_mpm/skills/bundled/testing/condition-based-waiting/SKILL.md +0 -119
- claude_mpm/skills/bundled/testing/condition-based-waiting/example.ts +0 -158
- claude_mpm/skills/bundled/testing/condition-based-waiting/references/patterns-and-implementation.md +0 -253
- claude_mpm/skills/bundled/testing/test-driven-development/SKILL.md +0 -145
- claude_mpm/skills/bundled/testing/test-driven-development/references/anti-patterns.md +0 -543
- claude_mpm/skills/bundled/testing/test-driven-development/references/examples.md +0 -741
- claude_mpm/skills/bundled/testing/test-driven-development/references/integration.md +0 -470
- claude_mpm/skills/bundled/testing/test-driven-development/references/philosophy.md +0 -458
- claude_mpm/skills/bundled/testing/test-driven-development/references/workflow.md +0 -639
- claude_mpm/skills/bundled/testing/test-quality-inspector/SKILL.md +0 -458
- claude_mpm/skills/bundled/testing/test-quality-inspector/examples/example-inspection-report.md +0 -411
- claude_mpm/skills/bundled/testing/test-quality-inspector/references/assertion-quality.md +0 -317
- claude_mpm/skills/bundled/testing/test-quality-inspector/references/inspection-checklist.md +0 -270
- claude_mpm/skills/bundled/testing/test-quality-inspector/references/red-flags.md +0 -436
- claude_mpm/skills/bundled/testing/testing-anti-patterns/SKILL.md +0 -140
- claude_mpm/skills/bundled/testing/testing-anti-patterns/references/completeness-anti-patterns.md +0 -572
- claude_mpm/skills/bundled/testing/testing-anti-patterns/references/core-anti-patterns.md +0 -411
- claude_mpm/skills/bundled/testing/testing-anti-patterns/references/detection-guide.md +0 -569
- claude_mpm/skills/bundled/testing/testing-anti-patterns/references/tdd-connection.md +0 -695
- claude_mpm/skills/bundled/testing/webapp-testing/LICENSE.txt +0 -202
- claude_mpm/skills/bundled/testing/webapp-testing/SKILL.md +0 -184
- claude_mpm/skills/bundled/testing/webapp-testing/decision-tree.md +0 -459
- claude_mpm/skills/bundled/testing/webapp-testing/playwright-patterns.md +0 -479
- claude_mpm/skills/bundled/testing/webapp-testing/reconnaissance-pattern.md +0 -687
- claude_mpm/skills/bundled/testing/webapp-testing/server-management.md +0 -758
- claude_mpm/skills/bundled/testing/webapp-testing/troubleshooting.md +0 -868
- claude_mpm/templates/questions/EXAMPLES.md +0 -501
- claude_mpm/tools/README_SOCKETIO_DEBUG.md +0 -224
- claude_mpm/tools/code_tree_analyzer/README.md +0 -64
- /claude_mpm/agents/templates/{git_file_tracking.md → git-file-tracking.md} +0 -0
- /claude_mpm/agents/templates/{pm_examples.md → pm-examples.md} +0 -0
- /claude_mpm/agents/templates/{pm_red_flags.md → pm-red-flags.md} +0 -0
- /claude_mpm/agents/templates/{response_format.md → response-format.md} +0 -0
- /claude_mpm/agents/templates/{validation_templates.md → validation-templates.md} +0 -0
- {claude_mpm-4.25.10.dist-info → claude_mpm-5.1.8.dist-info}/WHEEL +0 -0
- {claude_mpm-4.25.10.dist-info → claude_mpm-5.1.8.dist-info}/entry_points.txt +0 -0
- {claude_mpm-4.25.10.dist-info → claude_mpm-5.1.8.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-4.25.10.dist-info → claude_mpm-5.1.8.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1169 @@
|
|
|
1
|
+
"""Git source manager for multi-repository skill sync and discovery.
|
|
2
|
+
|
|
3
|
+
This module manages multiple Git-based skill sources with priority resolution.
|
|
4
|
+
It orchestrates syncing, caching, and discovery of skills from multiple repositories,
|
|
5
|
+
applying priority-based conflict resolution when skills have the same ID.
|
|
6
|
+
|
|
7
|
+
Design Decision: Reuse GitSourceSyncService for all Git operations
|
|
8
|
+
|
|
9
|
+
Rationale: The GitSourceSyncService provides robust ETag-based caching and
|
|
10
|
+
incremental updates for Git repositories. Rather than duplicating this logic,
|
|
11
|
+
we compose it and adapt for skills-specific discovery.
|
|
12
|
+
|
|
13
|
+
Trade-offs:
|
|
14
|
+
- Code Reuse: Leverage proven sync infrastructure
|
|
15
|
+
- Maintainability: Single source of truth for Git operations
|
|
16
|
+
- Flexibility: Easy to extend with skills-specific features
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
20
|
+
from datetime import datetime, timezone
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from threading import Lock
|
|
23
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
24
|
+
|
|
25
|
+
from claude_mpm.config.skill_sources import SkillSource, SkillSourceConfiguration
|
|
26
|
+
from claude_mpm.core.logging_config import get_logger
|
|
27
|
+
from claude_mpm.services.agents.sources.git_source_sync_service import (
|
|
28
|
+
GitSourceSyncService,
|
|
29
|
+
)
|
|
30
|
+
from claude_mpm.services.skills.skill_discovery_service import SkillDiscoveryService
|
|
31
|
+
|
|
32
|
+
logger = get_logger(__name__)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class GitSkillSourceManager:
|
|
36
|
+
"""Manages multiple Git-based skill sources with priority resolution.
|
|
37
|
+
|
|
38
|
+
Responsibilities:
|
|
39
|
+
- Coordinate syncing of multiple skill repositories
|
|
40
|
+
- Apply priority-based resolution for duplicate skills
|
|
41
|
+
- Provide unified catalog of available skills
|
|
42
|
+
- Handle caching and updates
|
|
43
|
+
|
|
44
|
+
Priority Resolution:
|
|
45
|
+
- Lower priority number = higher precedence
|
|
46
|
+
- Priority 0 reserved for system repository
|
|
47
|
+
- Skills with same ID: lowest priority wins
|
|
48
|
+
|
|
49
|
+
Design Pattern: Orchestrator with Dependency Injection
|
|
50
|
+
|
|
51
|
+
This class orchestrates multiple services (sync, discovery) without
|
|
52
|
+
reimplementing their logic. Services can be injected for testing.
|
|
53
|
+
|
|
54
|
+
Example:
|
|
55
|
+
>>> config = SkillSourceConfiguration()
|
|
56
|
+
>>> manager = GitSkillSourceManager(config)
|
|
57
|
+
>>> results = manager.sync_all_sources()
|
|
58
|
+
>>> skills = manager.get_all_skills()
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
config: SkillSourceConfiguration,
|
|
64
|
+
cache_dir: Optional[Path] = None,
|
|
65
|
+
sync_service: Optional[GitSourceSyncService] = None,
|
|
66
|
+
):
|
|
67
|
+
"""Initialize skill source manager.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
config: Skill source configuration
|
|
71
|
+
cache_dir: Cache directory (defaults to ~/.claude-mpm/cache/skills/)
|
|
72
|
+
sync_service: Git sync service (injected for testing)
|
|
73
|
+
"""
|
|
74
|
+
if cache_dir is None:
|
|
75
|
+
cache_dir = Path.home() / ".claude-mpm" / "cache" / "skills"
|
|
76
|
+
|
|
77
|
+
self.config = config
|
|
78
|
+
self.cache_dir = cache_dir
|
|
79
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
self.sync_service = sync_service # Use injected if provided
|
|
81
|
+
self.logger = get_logger(__name__)
|
|
82
|
+
self._etag_cache_lock = Lock() # Thread-safe ETag cache operations
|
|
83
|
+
|
|
84
|
+
self.logger.info(
|
|
85
|
+
f"GitSkillSourceManager initialized with cache: {self.cache_dir}"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
def sync_all_sources(
|
|
89
|
+
self, force: bool = False, progress_callback=None
|
|
90
|
+
) -> Dict[str, Any]:
|
|
91
|
+
"""Sync all enabled skill sources.
|
|
92
|
+
|
|
93
|
+
Syncs sources in priority order (lower priority first). Individual
|
|
94
|
+
failures don't stop overall sync.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
force: Force re-download even if cached
|
|
98
|
+
progress_callback: Optional callback(increment: int) called for each file synced
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
Dict with sync results for each source:
|
|
102
|
+
{
|
|
103
|
+
"synced_count": int,
|
|
104
|
+
"failed_count": int,
|
|
105
|
+
"total_files_updated": int,
|
|
106
|
+
"total_files_cached": int,
|
|
107
|
+
"sources": {
|
|
108
|
+
"source_id": {
|
|
109
|
+
"synced": bool,
|
|
110
|
+
"files_updated": int,
|
|
111
|
+
"skills_discovered": int,
|
|
112
|
+
"error": str (if failed)
|
|
113
|
+
}
|
|
114
|
+
},
|
|
115
|
+
"timestamp": str
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
Example:
|
|
119
|
+
>>> manager = GitSkillSourceManager(config)
|
|
120
|
+
>>> results = manager.sync_all_sources()
|
|
121
|
+
>>> print(f"Synced {results['synced_count']} sources")
|
|
122
|
+
"""
|
|
123
|
+
sources = self.config.get_enabled_sources()
|
|
124
|
+
self.logger.info(f"Syncing {len(sources)} enabled skill sources")
|
|
125
|
+
|
|
126
|
+
results = {
|
|
127
|
+
"synced_count": 0,
|
|
128
|
+
"failed_count": 0,
|
|
129
|
+
"total_files_updated": 0,
|
|
130
|
+
"total_files_cached": 0,
|
|
131
|
+
"sources": {},
|
|
132
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
for source in sources:
|
|
136
|
+
try:
|
|
137
|
+
result = self.sync_source(
|
|
138
|
+
source.id, force=force, progress_callback=progress_callback
|
|
139
|
+
)
|
|
140
|
+
results["sources"][source.id] = result
|
|
141
|
+
|
|
142
|
+
if result.get("synced"):
|
|
143
|
+
results["synced_count"] += 1
|
|
144
|
+
results["total_files_updated"] += result.get("files_updated", 0)
|
|
145
|
+
results["total_files_cached"] += result.get("files_cached", 0)
|
|
146
|
+
else:
|
|
147
|
+
results["failed_count"] += 1
|
|
148
|
+
|
|
149
|
+
except Exception as e:
|
|
150
|
+
self.logger.error(f"Exception syncing source {source.id}: {e}")
|
|
151
|
+
results["sources"][source.id] = {"synced": False, "error": str(e)}
|
|
152
|
+
results["failed_count"] += 1
|
|
153
|
+
|
|
154
|
+
self.logger.info(
|
|
155
|
+
f"Sync complete: {results['synced_count']} succeeded, "
|
|
156
|
+
f"{results['failed_count']} failed"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
return results
|
|
160
|
+
|
|
161
|
+
def sync_source(
|
|
162
|
+
self, source_id: str, force: bool = False, progress_callback=None
|
|
163
|
+
) -> Dict[str, Any]:
|
|
164
|
+
"""Sync a specific skill source.
|
|
165
|
+
|
|
166
|
+
Design Decision: Recursive GitHub directory download for skills
|
|
167
|
+
|
|
168
|
+
Rationale: Skills use nested directory structures (e.g., universal/collaboration/SKILL.md)
|
|
169
|
+
unlike agents which are flat .md files. We need to recursively download the entire
|
|
170
|
+
repository structure to discover all SKILL.md files.
|
|
171
|
+
|
|
172
|
+
Approach: Use GitHub API to recursively discover all files, then download each via
|
|
173
|
+
raw.githubusercontent.com with ETag caching for efficiency.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
source_id: ID of source to sync
|
|
177
|
+
force: Force re-download
|
|
178
|
+
progress_callback: Optional callback(increment: int) called for each file synced
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
Sync result dict:
|
|
182
|
+
{
|
|
183
|
+
"synced": bool,
|
|
184
|
+
"files_updated": int,
|
|
185
|
+
"files_cached": int,
|
|
186
|
+
"skills_discovered": int,
|
|
187
|
+
"timestamp": str,
|
|
188
|
+
"error": str (if failed)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
Raises:
|
|
192
|
+
ValueError: If source_id not found
|
|
193
|
+
|
|
194
|
+
Example:
|
|
195
|
+
>>> manager = GitSkillSourceManager(config)
|
|
196
|
+
>>> result = manager.sync_source("system")
|
|
197
|
+
>>> print(f"Updated {result['files_updated']} files")
|
|
198
|
+
"""
|
|
199
|
+
source = self.config.get_source(source_id)
|
|
200
|
+
if not source:
|
|
201
|
+
raise ValueError(f"Source not found: {source_id}")
|
|
202
|
+
|
|
203
|
+
if not source.enabled:
|
|
204
|
+
self.logger.warning(f"Source is disabled: {source_id}")
|
|
205
|
+
return {"synced": False, "error": "Source is disabled"}
|
|
206
|
+
|
|
207
|
+
self.logger.info(f"Syncing skill source: {source_id} ({source.url})")
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
# Determine cache path for this source
|
|
211
|
+
cache_path = self._get_source_cache_path(source)
|
|
212
|
+
cache_path.mkdir(parents=True, exist_ok=True)
|
|
213
|
+
|
|
214
|
+
# Recursively sync repository structure
|
|
215
|
+
files_updated, files_cached = self._recursive_sync_repository(
|
|
216
|
+
source, cache_path, force, progress_callback
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# Discover skills in cache
|
|
220
|
+
discovery_service = SkillDiscoveryService(cache_path)
|
|
221
|
+
discovered_skills = discovery_service.discover_skills()
|
|
222
|
+
|
|
223
|
+
# Build result
|
|
224
|
+
result = {
|
|
225
|
+
"synced": True,
|
|
226
|
+
"files_updated": files_updated,
|
|
227
|
+
"files_cached": files_cached,
|
|
228
|
+
"skills_discovered": len(discovered_skills),
|
|
229
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
self.logger.info(
|
|
233
|
+
f"Sync complete for {source_id}: {result['files_updated']} updated, "
|
|
234
|
+
f"{result['skills_discovered']} skills discovered"
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return result
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
self.logger.error(f"Failed to sync source {source_id}: {e}")
|
|
241
|
+
return {
|
|
242
|
+
"synced": False,
|
|
243
|
+
"error": str(e),
|
|
244
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
def get_all_skills(self) -> List[Dict[str, Any]]:
|
|
248
|
+
"""Get all skills from all sources with priority resolution.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
List of resolved skill dicts, each containing:
|
|
252
|
+
{
|
|
253
|
+
"skill_id": str,
|
|
254
|
+
"name": str,
|
|
255
|
+
"description": str,
|
|
256
|
+
"version": str,
|
|
257
|
+
"tags": List[str],
|
|
258
|
+
"agent_types": List[str],
|
|
259
|
+
"content": str,
|
|
260
|
+
"source_id": str,
|
|
261
|
+
"source_priority": int,
|
|
262
|
+
"source_file": str
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
Priority Resolution Algorithm:
|
|
266
|
+
1. Load skills from all enabled sources
|
|
267
|
+
2. Group by skill ID (name converted to ID)
|
|
268
|
+
3. For each group, select skill with lowest priority
|
|
269
|
+
4. Return deduplicated skill list
|
|
270
|
+
|
|
271
|
+
Example:
|
|
272
|
+
>>> manager = GitSkillSourceManager(config)
|
|
273
|
+
>>> skills = manager.get_all_skills()
|
|
274
|
+
>>> for skill in skills:
|
|
275
|
+
... print(f"{skill['name']} from {skill['source_id']}")
|
|
276
|
+
"""
|
|
277
|
+
sources = self.config.get_enabled_sources()
|
|
278
|
+
|
|
279
|
+
if not sources:
|
|
280
|
+
self.logger.warning("No enabled sources found")
|
|
281
|
+
return []
|
|
282
|
+
|
|
283
|
+
# Collect skills from all sources
|
|
284
|
+
skills_by_source = {}
|
|
285
|
+
|
|
286
|
+
for source in sources:
|
|
287
|
+
try:
|
|
288
|
+
cache_path = self._get_source_cache_path(source)
|
|
289
|
+
if not cache_path.exists():
|
|
290
|
+
self.logger.debug(f"Cache not found for source: {source.id}")
|
|
291
|
+
continue
|
|
292
|
+
|
|
293
|
+
discovery_service = SkillDiscoveryService(cache_path)
|
|
294
|
+
source_skills = discovery_service.discover_skills()
|
|
295
|
+
|
|
296
|
+
# Tag skills with source metadata
|
|
297
|
+
for skill in source_skills:
|
|
298
|
+
skill["source_id"] = source.id
|
|
299
|
+
skill["source_priority"] = source.priority
|
|
300
|
+
|
|
301
|
+
skills_by_source[source.id] = source_skills
|
|
302
|
+
|
|
303
|
+
except Exception as e:
|
|
304
|
+
self.logger.warning(f"Failed to discover skills from {source.id}: {e}")
|
|
305
|
+
continue
|
|
306
|
+
|
|
307
|
+
# Apply priority resolution
|
|
308
|
+
resolved_skills = self._apply_priority_resolution(skills_by_source)
|
|
309
|
+
|
|
310
|
+
self.logger.info(
|
|
311
|
+
f"Discovered {len(resolved_skills)} skills from {len(skills_by_source)} sources"
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
return resolved_skills
|
|
315
|
+
|
|
316
|
+
def get_skills_by_source(self, source_id: str) -> List[Dict[str, Any]]:
|
|
317
|
+
"""Get skills from a specific source.
|
|
318
|
+
|
|
319
|
+
Args:
|
|
320
|
+
source_id: ID of source to query
|
|
321
|
+
|
|
322
|
+
Returns:
|
|
323
|
+
List of skill dicts from that source
|
|
324
|
+
|
|
325
|
+
Example:
|
|
326
|
+
>>> manager = GitSkillSourceManager(config)
|
|
327
|
+
>>> skills = manager.get_skills_by_source("system")
|
|
328
|
+
>>> print(f"Found {len(skills)} system skills")
|
|
329
|
+
"""
|
|
330
|
+
source = self.config.get_source(source_id)
|
|
331
|
+
if not source:
|
|
332
|
+
self.logger.warning(f"Source not found: {source_id}")
|
|
333
|
+
return []
|
|
334
|
+
|
|
335
|
+
cache_path = self._get_source_cache_path(source)
|
|
336
|
+
if not cache_path.exists():
|
|
337
|
+
self.logger.debug(f"Cache not found for source: {source_id}")
|
|
338
|
+
return []
|
|
339
|
+
|
|
340
|
+
try:
|
|
341
|
+
discovery_service = SkillDiscoveryService(cache_path)
|
|
342
|
+
skills = discovery_service.discover_skills()
|
|
343
|
+
|
|
344
|
+
# Tag with source metadata
|
|
345
|
+
for skill in skills:
|
|
346
|
+
skill["source_id"] = source.id
|
|
347
|
+
skill["source_priority"] = source.priority
|
|
348
|
+
|
|
349
|
+
return skills
|
|
350
|
+
|
|
351
|
+
except Exception as e:
|
|
352
|
+
self.logger.error(f"Failed to discover skills from {source_id}: {e}")
|
|
353
|
+
return []
|
|
354
|
+
|
|
355
|
+
def _apply_priority_resolution(
|
|
356
|
+
self, skills_by_source: Dict[str, List[Dict[str, Any]]]
|
|
357
|
+
) -> List[Dict[str, Any]]:
|
|
358
|
+
"""Apply priority resolution to skill list.
|
|
359
|
+
|
|
360
|
+
Args:
|
|
361
|
+
skills_by_source: Dict mapping source_id to skill list
|
|
362
|
+
|
|
363
|
+
Returns:
|
|
364
|
+
Deduplicated skill list with priority resolution applied
|
|
365
|
+
|
|
366
|
+
Resolution Strategy:
|
|
367
|
+
- Group skills by skill_id
|
|
368
|
+
- For each group, select skill from source with lowest priority
|
|
369
|
+
- If multiple skills have same priority, use first encountered
|
|
370
|
+
|
|
371
|
+
Example:
|
|
372
|
+
skills_by_source = {
|
|
373
|
+
"system": [{"skill_id": "review", "source_priority": 0}],
|
|
374
|
+
"custom": [{"skill_id": "review", "source_priority": 100}]
|
|
375
|
+
}
|
|
376
|
+
# Returns: skill from "system" (priority 0 < 100)
|
|
377
|
+
"""
|
|
378
|
+
# Flatten skills from all sources
|
|
379
|
+
all_skills = []
|
|
380
|
+
for skills in skills_by_source.values():
|
|
381
|
+
all_skills.extend(skills)
|
|
382
|
+
|
|
383
|
+
if not all_skills:
|
|
384
|
+
return []
|
|
385
|
+
|
|
386
|
+
# Group by skill_id
|
|
387
|
+
skills_by_id: Dict[str, List[Dict[str, Any]]] = {}
|
|
388
|
+
for skill in all_skills:
|
|
389
|
+
skill_id = skill.get("skill_id", skill.get("name", "unknown"))
|
|
390
|
+
if skill_id not in skills_by_id:
|
|
391
|
+
skills_by_id[skill_id] = []
|
|
392
|
+
skills_by_id[skill_id].append(skill)
|
|
393
|
+
|
|
394
|
+
# Select skill with lowest priority for each group
|
|
395
|
+
resolved_skills = []
|
|
396
|
+
for skill_id, skill_group in skills_by_id.items():
|
|
397
|
+
# Sort by priority (ascending), take first
|
|
398
|
+
skill_group_sorted = sorted(
|
|
399
|
+
skill_group, key=lambda s: s.get("source_priority", 999)
|
|
400
|
+
)
|
|
401
|
+
selected_skill = skill_group_sorted[0]
|
|
402
|
+
|
|
403
|
+
# Log if multiple versions exist
|
|
404
|
+
if len(skill_group) > 1:
|
|
405
|
+
sources = [s.get("source_id") for s in skill_group]
|
|
406
|
+
self.logger.debug(
|
|
407
|
+
f"Skill '{skill_id}' found in multiple sources {sources}, "
|
|
408
|
+
f"using source '{selected_skill.get('source_id')}'"
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
resolved_skills.append(selected_skill)
|
|
412
|
+
|
|
413
|
+
return resolved_skills
|
|
414
|
+
|
|
415
|
+
def _recursive_sync_repository(
|
|
416
|
+
self,
|
|
417
|
+
source: SkillSource,
|
|
418
|
+
cache_path: Path,
|
|
419
|
+
force: bool = False,
|
|
420
|
+
progress_callback=None,
|
|
421
|
+
) -> Tuple[int, int]:
|
|
422
|
+
"""Recursively sync entire GitHub repository structure to cache.
|
|
423
|
+
|
|
424
|
+
Design Decision: Two-phase sync architecture (Phase 2 refactoring)
|
|
425
|
+
|
|
426
|
+
Rationale: Separates syncing (to cache) from deployment (to project).
|
|
427
|
+
Phase 1: Download ALL repository files to cache with Git Tree API
|
|
428
|
+
Phase 2: Deploy selected skills from cache to project-specific locations
|
|
429
|
+
|
|
430
|
+
This refactoring follows the agent sync pattern (git_source_sync_service.py)
|
|
431
|
+
with cache-first architecture for multi-project support.
|
|
432
|
+
|
|
433
|
+
Trade-offs:
|
|
434
|
+
- Storage: 2x disk usage (cache + deployments) vs. direct deployment
|
|
435
|
+
- Performance: Copy operation adds ~10ms, but enables offline deployment
|
|
436
|
+
- Flexibility: Multiple projects can deploy from single cache
|
|
437
|
+
- Isolation: Projects have independent skill sets from shared cache
|
|
438
|
+
|
|
439
|
+
Args:
|
|
440
|
+
source: SkillSource configuration
|
|
441
|
+
cache_path: Local cache directory (structure preserved)
|
|
442
|
+
force: Force re-download even if ETag cached
|
|
443
|
+
progress_callback: Optional callback(absolute_position: int) for progress tracking
|
|
444
|
+
|
|
445
|
+
Returns:
|
|
446
|
+
Tuple of (files_updated, files_cached)
|
|
447
|
+
|
|
448
|
+
Algorithm:
|
|
449
|
+
1. Parse GitHub URL to extract owner/repo
|
|
450
|
+
2. Discover ALL files via Git Tree API (recursive=1, single request)
|
|
451
|
+
3. Filter for relevant files (.md, .json, .gitignore)
|
|
452
|
+
4. Download each file to cache with ETag caching
|
|
453
|
+
5. Call progress_callback with ABSOLUTE position (not increment)
|
|
454
|
+
6. Preserve nested directory structure in cache
|
|
455
|
+
|
|
456
|
+
Error Handling:
|
|
457
|
+
- Invalid GitHub URL: Raises ValueError
|
|
458
|
+
- Tree API failure: Returns 0, 0 (logged as warning)
|
|
459
|
+
- Individual file failures: Logged but don't stop sync
|
|
460
|
+
"""
|
|
461
|
+
# Parse GitHub URL
|
|
462
|
+
url_parts = source.url.rstrip("/").replace(".git", "").split("github.com/")
|
|
463
|
+
if len(url_parts) != 2:
|
|
464
|
+
raise ValueError(f"Invalid GitHub URL: {source.url}")
|
|
465
|
+
|
|
466
|
+
repo_path = url_parts[1].strip("/")
|
|
467
|
+
owner_repo = "/".join(repo_path.split("/")[:2])
|
|
468
|
+
|
|
469
|
+
# Step 1: Discover all files via GitHub Tree API (single request)
|
|
470
|
+
# This discovers the COMPLETE repository structure (272 files for skills)
|
|
471
|
+
all_files = self._discover_repository_files_via_tree_api(
|
|
472
|
+
owner_repo, source.branch
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
if not all_files:
|
|
476
|
+
self.logger.warning(f"No files discovered in repository: {source.url}")
|
|
477
|
+
return 0, 0
|
|
478
|
+
|
|
479
|
+
self.logger.info(
|
|
480
|
+
f"Discovered {len(all_files)} files in {owner_repo}/{source.branch} via Tree API"
|
|
481
|
+
)
|
|
482
|
+
|
|
483
|
+
# Step 2: Filter to only download relevant files (markdown, JSON metadata)
|
|
484
|
+
relevant_files = [
|
|
485
|
+
f
|
|
486
|
+
for f in all_files
|
|
487
|
+
if f.endswith(".md") or f.endswith(".json") or f == ".gitignore"
|
|
488
|
+
]
|
|
489
|
+
|
|
490
|
+
self.logger.info(
|
|
491
|
+
f"Filtered to {len(relevant_files)} relevant files (.md, .json, .gitignore)"
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
# Step 3: Download files to cache with ETag caching (parallel)
|
|
495
|
+
files_updated = 0
|
|
496
|
+
files_cached = 0
|
|
497
|
+
|
|
498
|
+
# Use ThreadPoolExecutor for parallel downloads (10 workers for optimal performance)
|
|
499
|
+
# Trade-off: 10 workers balances speed (306 files in ~3-5s) vs. GitHub rate limits
|
|
500
|
+
with ThreadPoolExecutor(max_workers=10) as executor:
|
|
501
|
+
# Submit all download tasks
|
|
502
|
+
future_to_file = {}
|
|
503
|
+
for file_path in relevant_files:
|
|
504
|
+
raw_url = f"https://raw.githubusercontent.com/{owner_repo}/{source.branch}/{file_path}"
|
|
505
|
+
cache_file = cache_path / file_path
|
|
506
|
+
future = executor.submit(
|
|
507
|
+
self._download_file_with_etag, raw_url, cache_file, force
|
|
508
|
+
)
|
|
509
|
+
future_to_file[future] = file_path
|
|
510
|
+
|
|
511
|
+
# Process completed downloads as they finish
|
|
512
|
+
completed = 0
|
|
513
|
+
for future in as_completed(future_to_file):
|
|
514
|
+
completed += 1
|
|
515
|
+
try:
|
|
516
|
+
updated = future.result()
|
|
517
|
+
if updated:
|
|
518
|
+
files_updated += 1
|
|
519
|
+
else:
|
|
520
|
+
files_cached += 1
|
|
521
|
+
except Exception as e:
|
|
522
|
+
file_path = future_to_file[future]
|
|
523
|
+
self.logger.warning(f"Failed to download {file_path}: {e}")
|
|
524
|
+
|
|
525
|
+
# Call progress callback with ABSOLUTE position
|
|
526
|
+
if progress_callback:
|
|
527
|
+
progress_callback(completed)
|
|
528
|
+
|
|
529
|
+
self.logger.info(
|
|
530
|
+
f"Repository sync complete: {files_updated} updated, "
|
|
531
|
+
f"{files_cached} cached from {len(relevant_files)} files"
|
|
532
|
+
)
|
|
533
|
+
return files_updated, files_cached
|
|
534
|
+
|
|
535
|
+
def _discover_repository_files_via_tree_api(
|
|
536
|
+
self, owner_repo: str, branch: str
|
|
537
|
+
) -> List[str]:
|
|
538
|
+
"""Discover all files in repository using GitHub Git Tree API.
|
|
539
|
+
|
|
540
|
+
Design Decision: Two-step Tree API pattern (Phase 2 refactoring)
|
|
541
|
+
|
|
542
|
+
Rationale: Git Tree API with recursive=1 discovers entire repository
|
|
543
|
+
structure in a SINGLE request, solving the "limited file discovery" issue.
|
|
544
|
+
This is the same pattern used successfully in agent sync (Phase 1).
|
|
545
|
+
|
|
546
|
+
Previous Issue: Contents API only showed top-level files, missing nested
|
|
547
|
+
directories. This caused skills sync to discover only 1-2 files instead
|
|
548
|
+
of 272 files in the repository.
|
|
549
|
+
|
|
550
|
+
Trade-offs:
|
|
551
|
+
- Performance: Single API call vs. 50+ recursive Contents API calls
|
|
552
|
+
- Rate Limiting: 1 request vs. dozens (avoids 403 rate limit errors)
|
|
553
|
+
- Discovery: Finds ALL 272 files in nested structure
|
|
554
|
+
- API Complexity: Requires commit SHA lookup before tree fetch
|
|
555
|
+
|
|
556
|
+
Algorithm (matches agents pattern from git_source_sync_service.py):
|
|
557
|
+
1. GET /repos/{owner}/{repo}/git/refs/heads/{branch} → commit SHA
|
|
558
|
+
2. GET /repos/{owner}/{repo}/git/trees/{sha}?recursive=1 → all files
|
|
559
|
+
3. Filter for blobs (files), exclude trees (directories)
|
|
560
|
+
4. Return complete file list
|
|
561
|
+
|
|
562
|
+
Args:
|
|
563
|
+
owner_repo: GitHub owner/repo (e.g., "bobmatnyc/claude-mpm-skills")
|
|
564
|
+
branch: Branch name (e.g., "main")
|
|
565
|
+
|
|
566
|
+
Returns:
|
|
567
|
+
List of all file paths in repository
|
|
568
|
+
(e.g., ["collections/toolchains/python/pytest.md", ...])
|
|
569
|
+
|
|
570
|
+
Error Handling:
|
|
571
|
+
- HTTP 404: Branch or repo not found, raises RequestException
|
|
572
|
+
- HTTP 403: Rate limit exceeded (warns about GITHUB_TOKEN)
|
|
573
|
+
- Timeout: 30 second timeout per request
|
|
574
|
+
- Empty tree: Returns empty list (logged as warning)
|
|
575
|
+
|
|
576
|
+
Performance:
|
|
577
|
+
- Expected: ~500-800ms for 272 files (2 API calls)
|
|
578
|
+
- Rate Limit: Consumes 2 API calls per sync
|
|
579
|
+
- Scalability: Handles 1000s of files without truncation
|
|
580
|
+
|
|
581
|
+
Example:
|
|
582
|
+
>>> files = self._discover_repository_files_via_tree_api(
|
|
583
|
+
... "bobmatnyc/claude-mpm-skills", "main"
|
|
584
|
+
... )
|
|
585
|
+
>>> print(len(files))
|
|
586
|
+
272 # Complete repository (not just top-level)
|
|
587
|
+
"""
|
|
588
|
+
import requests
|
|
589
|
+
|
|
590
|
+
all_files = []
|
|
591
|
+
|
|
592
|
+
try:
|
|
593
|
+
# Step 1: Get the latest commit SHA for the branch
|
|
594
|
+
refs_url = (
|
|
595
|
+
f"https://api.github.com/repos/{owner_repo}/git/refs/heads/{branch}"
|
|
596
|
+
)
|
|
597
|
+
self.logger.debug(f"Fetching commit SHA from {refs_url}")
|
|
598
|
+
|
|
599
|
+
refs_response = requests.get(
|
|
600
|
+
refs_url, headers={"Accept": "application/vnd.github+json"}, timeout=30
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
# Check for rate limiting
|
|
604
|
+
if refs_response.status_code == 403:
|
|
605
|
+
self.logger.warning(
|
|
606
|
+
"GitHub API rate limit exceeded (HTTP 403). "
|
|
607
|
+
"Consider setting GITHUB_TOKEN environment variable for higher limits."
|
|
608
|
+
)
|
|
609
|
+
raise requests.RequestException("Rate limit exceeded")
|
|
610
|
+
|
|
611
|
+
refs_response.raise_for_status()
|
|
612
|
+
commit_sha = refs_response.json()["object"]["sha"]
|
|
613
|
+
self.logger.debug(f"Resolved {branch} to commit {commit_sha[:8]}")
|
|
614
|
+
|
|
615
|
+
# Step 2: Get the tree for that commit (recursive=1 gets ALL files)
|
|
616
|
+
tree_url = (
|
|
617
|
+
f"https://api.github.com/repos/{owner_repo}/git/trees/{commit_sha}"
|
|
618
|
+
)
|
|
619
|
+
params = {"recursive": "1"} # Recursively get entire tree
|
|
620
|
+
|
|
621
|
+
self.logger.debug(f"Fetching recursive tree from {tree_url}")
|
|
622
|
+
tree_response = requests.get(
|
|
623
|
+
tree_url,
|
|
624
|
+
headers={"Accept": "application/vnd.github+json"},
|
|
625
|
+
params=params,
|
|
626
|
+
timeout=30,
|
|
627
|
+
)
|
|
628
|
+
tree_response.raise_for_status()
|
|
629
|
+
|
|
630
|
+
tree_data = tree_response.json()
|
|
631
|
+
all_items = tree_data.get("tree", [])
|
|
632
|
+
|
|
633
|
+
self.logger.debug(f"Tree API returned {len(all_items)} total items")
|
|
634
|
+
|
|
635
|
+
# Step 3: Extract file paths (filter out directories)
|
|
636
|
+
for item in all_items:
|
|
637
|
+
if item["type"] == "blob": # blob = file, tree = directory
|
|
638
|
+
all_files.append(item["path"])
|
|
639
|
+
|
|
640
|
+
self.logger.info(
|
|
641
|
+
f"Discovered {len(all_files)} files via Tree API in {owner_repo}/{branch}"
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
except requests.exceptions.RequestException as e:
|
|
645
|
+
self.logger.error(f"Failed to discover files via Tree API: {e}")
|
|
646
|
+
# Fall back to empty list (sync will fail gracefully)
|
|
647
|
+
return []
|
|
648
|
+
except (KeyError, ValueError) as e:
|
|
649
|
+
self.logger.error(f"Error parsing GitHub API response: {e}")
|
|
650
|
+
return []
|
|
651
|
+
|
|
652
|
+
return all_files
|
|
653
|
+
|
|
654
|
+
def _download_file_with_etag(
|
|
655
|
+
self, url: str, local_path: Path, force: bool = False
|
|
656
|
+
) -> bool:
|
|
657
|
+
"""Download file from URL with ETag caching (thread-safe).
|
|
658
|
+
|
|
659
|
+
Args:
|
|
660
|
+
url: Raw GitHub URL
|
|
661
|
+
local_path: Local file path to save to
|
|
662
|
+
force: Force download even if cached
|
|
663
|
+
|
|
664
|
+
Returns:
|
|
665
|
+
True if file was updated, False if cached
|
|
666
|
+
"""
|
|
667
|
+
|
|
668
|
+
import json
|
|
669
|
+
|
|
670
|
+
import requests
|
|
671
|
+
|
|
672
|
+
# Create parent directory (thread-safe with exist_ok=True)
|
|
673
|
+
local_path.parent.mkdir(parents=True, exist_ok=True)
|
|
674
|
+
|
|
675
|
+
# Thread-safe ETag cache operations
|
|
676
|
+
etag_cache_file = local_path.parent / ".etag_cache.json"
|
|
677
|
+
|
|
678
|
+
# Read cached ETag (lock required for file read)
|
|
679
|
+
with self._etag_cache_lock:
|
|
680
|
+
etag_cache = {}
|
|
681
|
+
if etag_cache_file.exists():
|
|
682
|
+
try:
|
|
683
|
+
with open(etag_cache_file, encoding="utf-8") as f:
|
|
684
|
+
etag_cache = json.load(f)
|
|
685
|
+
except Exception:
|
|
686
|
+
pass
|
|
687
|
+
|
|
688
|
+
cached_etag = etag_cache.get(str(local_path))
|
|
689
|
+
|
|
690
|
+
# Make conditional request (no lock needed - independent HTTP call)
|
|
691
|
+
headers = {}
|
|
692
|
+
if cached_etag and not force:
|
|
693
|
+
headers["If-None-Match"] = cached_etag
|
|
694
|
+
|
|
695
|
+
try:
|
|
696
|
+
response = requests.get(url, headers=headers, timeout=30)
|
|
697
|
+
|
|
698
|
+
# 304 Not Modified - use cached version
|
|
699
|
+
if response.status_code == 304:
|
|
700
|
+
self.logger.debug(f"Cache hit (ETag match): {local_path.name}")
|
|
701
|
+
return False
|
|
702
|
+
|
|
703
|
+
response.raise_for_status()
|
|
704
|
+
|
|
705
|
+
# Download and save file (no lock needed - independent file write)
|
|
706
|
+
local_path.write_bytes(response.content)
|
|
707
|
+
|
|
708
|
+
# Save new ETag (lock required for cache file write)
|
|
709
|
+
if "ETag" in response.headers:
|
|
710
|
+
with self._etag_cache_lock:
|
|
711
|
+
# Re-read cache in case other threads updated it
|
|
712
|
+
if etag_cache_file.exists():
|
|
713
|
+
try:
|
|
714
|
+
with open(etag_cache_file, encoding="utf-8") as f:
|
|
715
|
+
etag_cache = json.load(f)
|
|
716
|
+
except Exception:
|
|
717
|
+
etag_cache = {}
|
|
718
|
+
|
|
719
|
+
etag_cache[str(local_path)] = response.headers["ETag"]
|
|
720
|
+
with open(etag_cache_file, "w", encoding="utf-8") as f:
|
|
721
|
+
json.dump(etag_cache, f, indent=2)
|
|
722
|
+
|
|
723
|
+
self.logger.debug(f"Downloaded: {local_path.name}")
|
|
724
|
+
return True
|
|
725
|
+
|
|
726
|
+
except requests.exceptions.RequestException as e:
|
|
727
|
+
self.logger.warning(f"Failed to download {url}: {e}")
|
|
728
|
+
return False
|
|
729
|
+
|
|
730
|
+
def _build_raw_github_url(self, source: SkillSource) -> str:
|
|
731
|
+
"""Build raw GitHub URL for source.
|
|
732
|
+
|
|
733
|
+
Args:
|
|
734
|
+
source: SkillSource instance
|
|
735
|
+
|
|
736
|
+
Returns:
|
|
737
|
+
Raw GitHub content URL
|
|
738
|
+
|
|
739
|
+
Example:
|
|
740
|
+
>>> source = SkillSource(
|
|
741
|
+
... id="system",
|
|
742
|
+
... url="https://github.com/owner/repo",
|
|
743
|
+
... branch="main"
|
|
744
|
+
... )
|
|
745
|
+
>>> url = manager._build_raw_github_url(source)
|
|
746
|
+
>>> print(url)
|
|
747
|
+
'https://raw.githubusercontent.com/owner/repo/main'
|
|
748
|
+
"""
|
|
749
|
+
# Parse GitHub URL to extract owner/repo
|
|
750
|
+
url = source.url.rstrip("/")
|
|
751
|
+
if url.endswith(".git"):
|
|
752
|
+
url = url[:-4]
|
|
753
|
+
|
|
754
|
+
# Extract path components
|
|
755
|
+
parts = url.split("github.com/")
|
|
756
|
+
if len(parts) != 2:
|
|
757
|
+
raise ValueError(f"Invalid GitHub URL: {source.url}")
|
|
758
|
+
|
|
759
|
+
repo_path = parts[1].strip("/")
|
|
760
|
+
owner_repo = "/".join(repo_path.split("/")[:2])
|
|
761
|
+
|
|
762
|
+
return f"https://raw.githubusercontent.com/{owner_repo}/{source.branch}"
|
|
763
|
+
|
|
764
|
+
def _get_source_cache_path(self, source: SkillSource) -> Path:
|
|
765
|
+
"""Get cache directory path for a source.
|
|
766
|
+
|
|
767
|
+
Args:
|
|
768
|
+
source: SkillSource instance
|
|
769
|
+
|
|
770
|
+
Returns:
|
|
771
|
+
Absolute path to cache directory
|
|
772
|
+
|
|
773
|
+
Cache Structure:
|
|
774
|
+
~/.claude-mpm/cache/skills/{source_id}/
|
|
775
|
+
|
|
776
|
+
Example:
|
|
777
|
+
>>> source = SkillSource(id="system", ...)
|
|
778
|
+
>>> path = manager._get_source_cache_path(source)
|
|
779
|
+
>>> print(path)
|
|
780
|
+
Path('/Users/user/.claude-mpm/cache/skills/system')
|
|
781
|
+
"""
|
|
782
|
+
return self.cache_dir / source.id
|
|
783
|
+
|
|
784
|
+
def deploy_skills_to_project(
|
|
785
|
+
self,
|
|
786
|
+
project_dir: Path,
|
|
787
|
+
skill_list: Optional[List[str]] = None,
|
|
788
|
+
force: bool = False,
|
|
789
|
+
) -> Dict[str, Any]:
|
|
790
|
+
"""Deploy skills from cache to project directory (Phase 2 deployment).
|
|
791
|
+
|
|
792
|
+
Design Decision: Deploy from cache to project-specific directory
|
|
793
|
+
|
|
794
|
+
Rationale: Follows agent deployment pattern (git_source_sync_service.py).
|
|
795
|
+
Separates sync (cache) from deployment (project), enabling:
|
|
796
|
+
- Multiple projects using same cached skills
|
|
797
|
+
- Offline deployment from cache
|
|
798
|
+
- Project-specific skill selection
|
|
799
|
+
- Consistent two-phase architecture
|
|
800
|
+
|
|
801
|
+
This complements deploy_skills() which deploys to global ~/.claude/skills/.
|
|
802
|
+
This method deploys to project-local .claude-mpm/skills/ for project-specific
|
|
803
|
+
skill management.
|
|
804
|
+
|
|
805
|
+
Trade-offs:
|
|
806
|
+
- Storage: 2x disk (cache + project deployments)
|
|
807
|
+
- Performance: Copy ~10ms for 50 skills (negligible)
|
|
808
|
+
- Flexibility: Project-specific skill sets from shared cache
|
|
809
|
+
- Isolation: Projects don't affect each other
|
|
810
|
+
|
|
811
|
+
Args:
|
|
812
|
+
project_dir: Project root directory (e.g., /path/to/myproject)
|
|
813
|
+
skill_list: Optional list of skill names to deploy (deploys all if None)
|
|
814
|
+
force: Force redeployment even if up-to-date
|
|
815
|
+
|
|
816
|
+
Returns:
|
|
817
|
+
Dictionary with deployment results:
|
|
818
|
+
{
|
|
819
|
+
"deployed": ["skill1"], # Newly deployed
|
|
820
|
+
"updated": ["skill2"], # Updated existing
|
|
821
|
+
"skipped": ["skill3"], # Already up-to-date
|
|
822
|
+
"failed": [], # Copy failures
|
|
823
|
+
"deployment_dir": "/path/.claude-mpm/skills"
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
Algorithm:
|
|
827
|
+
1. Create .claude-mpm/skills/ in project directory
|
|
828
|
+
2. Get all skills from cache (or use provided list)
|
|
829
|
+
3. For each skill:
|
|
830
|
+
a. Check if cache file exists
|
|
831
|
+
b. Flatten nested path to deployment name
|
|
832
|
+
c. Compare modification times (skip if up-to-date)
|
|
833
|
+
d. Copy from cache to project
|
|
834
|
+
e. Track result (deployed/updated/skipped/failed)
|
|
835
|
+
4. Return deployment statistics
|
|
836
|
+
|
|
837
|
+
Error Handling:
|
|
838
|
+
- Missing cache files: Logged and added to "failed"
|
|
839
|
+
- Permission errors: Individual failures don't stop deployment
|
|
840
|
+
- Path validation: Security check prevents directory traversal
|
|
841
|
+
|
|
842
|
+
Example:
|
|
843
|
+
>>> manager = GitSkillSourceManager(config)
|
|
844
|
+
>>> manager.sync_all_sources() # Sync to cache first
|
|
845
|
+
>>> result = manager.deploy_skills_to_project(Path("/my/project"))
|
|
846
|
+
>>> print(f"Deployed {len(result['deployed'])} skills")
|
|
847
|
+
"""
|
|
848
|
+
import shutil
|
|
849
|
+
|
|
850
|
+
deployment_dir = project_dir / ".claude-mpm" / "skills"
|
|
851
|
+
|
|
852
|
+
# Try to create deployment directory
|
|
853
|
+
try:
|
|
854
|
+
deployment_dir.mkdir(parents=True, exist_ok=True)
|
|
855
|
+
except PermissionError as e:
|
|
856
|
+
self.logger.error(f"Permission denied creating deployment directory: {e}")
|
|
857
|
+
return {
|
|
858
|
+
"deployed": [],
|
|
859
|
+
"deployed_count": 0,
|
|
860
|
+
"updated": [],
|
|
861
|
+
"updated_count": 0,
|
|
862
|
+
"skipped": [],
|
|
863
|
+
"skipped_count": 0,
|
|
864
|
+
"failed": [],
|
|
865
|
+
"failed_count": 0,
|
|
866
|
+
"deployment_dir": str(deployment_dir),
|
|
867
|
+
}
|
|
868
|
+
|
|
869
|
+
results = {
|
|
870
|
+
"deployed": [],
|
|
871
|
+
"updated": [],
|
|
872
|
+
"skipped": [],
|
|
873
|
+
"failed": [],
|
|
874
|
+
"deployment_dir": str(deployment_dir),
|
|
875
|
+
}
|
|
876
|
+
|
|
877
|
+
# Get all skills from cache or use provided list
|
|
878
|
+
if skill_list is None:
|
|
879
|
+
all_skills = self.get_all_skills()
|
|
880
|
+
else:
|
|
881
|
+
# Filter skills by provided list
|
|
882
|
+
all_skills = [
|
|
883
|
+
s for s in self.get_all_skills() if s.get("name") in skill_list
|
|
884
|
+
]
|
|
885
|
+
|
|
886
|
+
self.logger.info(
|
|
887
|
+
f"Deploying {len(all_skills)} skills from cache to {deployment_dir}"
|
|
888
|
+
)
|
|
889
|
+
|
|
890
|
+
for skill in all_skills:
|
|
891
|
+
skill_name = skill.get("name", "unknown")
|
|
892
|
+
deployment_name = skill.get("deployment_name")
|
|
893
|
+
source_file = skill.get("source_file")
|
|
894
|
+
|
|
895
|
+
if not deployment_name or not source_file:
|
|
896
|
+
self.logger.warning(
|
|
897
|
+
f"Skill {skill_name} missing deployment_name or source_file, skipping"
|
|
898
|
+
)
|
|
899
|
+
results["failed"].append(skill_name)
|
|
900
|
+
continue
|
|
901
|
+
|
|
902
|
+
try:
|
|
903
|
+
source_path = Path(source_file)
|
|
904
|
+
if not source_path.exists():
|
|
905
|
+
self.logger.warning(f"Cache file not found: {source_file}")
|
|
906
|
+
results["failed"].append(skill_name)
|
|
907
|
+
continue
|
|
908
|
+
|
|
909
|
+
# Source is the entire skill directory (not just SKILL.md)
|
|
910
|
+
source_dir = source_path.parent
|
|
911
|
+
target_skill_dir = deployment_dir / deployment_name
|
|
912
|
+
|
|
913
|
+
# Check if already deployed and up-to-date
|
|
914
|
+
should_deploy = force
|
|
915
|
+
was_existing = target_skill_dir.exists()
|
|
916
|
+
|
|
917
|
+
if not force and was_existing:
|
|
918
|
+
# Compare modification times of SKILL.md files
|
|
919
|
+
source_mtime = source_path.stat().st_mtime
|
|
920
|
+
target_file = target_skill_dir / "SKILL.md"
|
|
921
|
+
if target_file.exists():
|
|
922
|
+
target_mtime = target_file.stat().st_mtime
|
|
923
|
+
should_deploy = source_mtime > target_mtime
|
|
924
|
+
else:
|
|
925
|
+
should_deploy = True
|
|
926
|
+
|
|
927
|
+
if not should_deploy and was_existing:
|
|
928
|
+
results["skipped"].append(deployment_name)
|
|
929
|
+
self.logger.debug(f"Skipped (up-to-date): {deployment_name}")
|
|
930
|
+
continue
|
|
931
|
+
|
|
932
|
+
# Security: Validate paths
|
|
933
|
+
if not self._validate_safe_path(deployment_dir, target_skill_dir):
|
|
934
|
+
self.logger.error(f"Invalid target path: {target_skill_dir}")
|
|
935
|
+
results["failed"].append(skill_name)
|
|
936
|
+
continue
|
|
937
|
+
|
|
938
|
+
# Remove existing if force or updating
|
|
939
|
+
if target_skill_dir.exists():
|
|
940
|
+
if target_skill_dir.is_symlink():
|
|
941
|
+
self.logger.warning(f"Removing symlink: {target_skill_dir}")
|
|
942
|
+
target_skill_dir.unlink()
|
|
943
|
+
else:
|
|
944
|
+
shutil.rmtree(target_skill_dir)
|
|
945
|
+
|
|
946
|
+
# Copy entire skill directory from cache
|
|
947
|
+
shutil.copytree(source_dir, target_skill_dir)
|
|
948
|
+
|
|
949
|
+
# Track result
|
|
950
|
+
if was_existing:
|
|
951
|
+
results["updated"].append(deployment_name)
|
|
952
|
+
self.logger.info(f"Updated: {deployment_name}")
|
|
953
|
+
else:
|
|
954
|
+
results["deployed"].append(deployment_name)
|
|
955
|
+
self.logger.info(f"Deployed: {deployment_name}")
|
|
956
|
+
|
|
957
|
+
except PermissionError as e:
|
|
958
|
+
self.logger.error(f"Permission denied deploying {skill_name}: {e}")
|
|
959
|
+
results["failed"].append(skill_name)
|
|
960
|
+
except OSError as e:
|
|
961
|
+
self.logger.error(f"IO error deploying {skill_name}: {e}")
|
|
962
|
+
results["failed"].append(skill_name)
|
|
963
|
+
except Exception as e:
|
|
964
|
+
self.logger.error(f"Unexpected error deploying {skill_name}: {e}")
|
|
965
|
+
results["failed"].append(skill_name)
|
|
966
|
+
|
|
967
|
+
# Log summary
|
|
968
|
+
total_success = len(results["deployed"]) + len(results["updated"])
|
|
969
|
+
self.logger.info(
|
|
970
|
+
f"Deployment complete: {total_success} deployed/updated, "
|
|
971
|
+
f"{len(results['skipped'])} skipped, {len(results['failed'])} failed"
|
|
972
|
+
)
|
|
973
|
+
|
|
974
|
+
# Return format matching agents deployment pattern
|
|
975
|
+
return {
|
|
976
|
+
"deployed": results["deployed"],
|
|
977
|
+
"deployed_count": len(results["deployed"]),
|
|
978
|
+
"updated": results["updated"],
|
|
979
|
+
"updated_count": len(results["updated"]),
|
|
980
|
+
"skipped": results["skipped"],
|
|
981
|
+
"skipped_count": len(results["skipped"]),
|
|
982
|
+
"failed": results["failed"],
|
|
983
|
+
"failed_count": len(results["failed"]),
|
|
984
|
+
"deployment_dir": results["deployment_dir"],
|
|
985
|
+
}
|
|
986
|
+
|
|
987
|
+
def deploy_skills(
|
|
988
|
+
self,
|
|
989
|
+
target_dir: Optional[Path] = None,
|
|
990
|
+
force: bool = False,
|
|
991
|
+
progress_callback=None,
|
|
992
|
+
) -> Dict[str, Any]:
|
|
993
|
+
"""Deploy skills from cache to target directory with flat structure.
|
|
994
|
+
|
|
995
|
+
Flattens nested Git repository structure into Claude Code compatible
|
|
996
|
+
flat directory structure. Each skill directory is copied with a
|
|
997
|
+
hyphen-separated name derived from its path.
|
|
998
|
+
|
|
999
|
+
Transformation Example:
|
|
1000
|
+
Cache: collaboration/dispatching-parallel-agents/SKILL.md
|
|
1001
|
+
Deploy: collaboration-dispatching-parallel-agents/SKILL.md
|
|
1002
|
+
|
|
1003
|
+
Args:
|
|
1004
|
+
target_dir: Target deployment directory (default: ~/.claude/skills/)
|
|
1005
|
+
force: Overwrite existing skills
|
|
1006
|
+
progress_callback: Optional callback(increment: int) called for each skill deployed
|
|
1007
|
+
|
|
1008
|
+
Returns:
|
|
1009
|
+
Dict with deployment results:
|
|
1010
|
+
{
|
|
1011
|
+
"deployed_count": int,
|
|
1012
|
+
"skipped_count": int,
|
|
1013
|
+
"failed_count": int,
|
|
1014
|
+
"deployed_skills": List[str],
|
|
1015
|
+
"skipped_skills": List[str],
|
|
1016
|
+
"errors": List[str]
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
Example:
|
|
1020
|
+
>>> manager = GitSkillSourceManager(config)
|
|
1021
|
+
>>> result = manager.deploy_skills()
|
|
1022
|
+
>>> print(f"Deployed {result['deployed_count']} skills")
|
|
1023
|
+
"""
|
|
1024
|
+
if target_dir is None:
|
|
1025
|
+
target_dir = Path.home() / ".claude" / "skills"
|
|
1026
|
+
|
|
1027
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
1028
|
+
|
|
1029
|
+
deployed = []
|
|
1030
|
+
skipped = []
|
|
1031
|
+
errors = []
|
|
1032
|
+
|
|
1033
|
+
# Get all skills from all sources
|
|
1034
|
+
all_skills = self.get_all_skills()
|
|
1035
|
+
|
|
1036
|
+
self.logger.info(
|
|
1037
|
+
f"Deploying {len(all_skills)} skills to {target_dir} (force={force})"
|
|
1038
|
+
)
|
|
1039
|
+
|
|
1040
|
+
for idx, skill in enumerate(all_skills, start=1):
|
|
1041
|
+
skill_name = skill.get("name", "unknown")
|
|
1042
|
+
deployment_name = skill.get("deployment_name")
|
|
1043
|
+
|
|
1044
|
+
if not deployment_name:
|
|
1045
|
+
self.logger.warning(
|
|
1046
|
+
f"Skill {skill_name} missing deployment_name, skipping"
|
|
1047
|
+
)
|
|
1048
|
+
errors.append(f"{skill_name}: Missing deployment_name")
|
|
1049
|
+
if progress_callback:
|
|
1050
|
+
progress_callback(idx)
|
|
1051
|
+
continue
|
|
1052
|
+
|
|
1053
|
+
try:
|
|
1054
|
+
result = self._deploy_single_skill(
|
|
1055
|
+
skill, target_dir, deployment_name, force
|
|
1056
|
+
)
|
|
1057
|
+
|
|
1058
|
+
if result["deployed"]:
|
|
1059
|
+
deployed.append(deployment_name)
|
|
1060
|
+
elif result["skipped"]:
|
|
1061
|
+
skipped.append(deployment_name)
|
|
1062
|
+
|
|
1063
|
+
if result["error"]:
|
|
1064
|
+
errors.append(result["error"])
|
|
1065
|
+
|
|
1066
|
+
except Exception as e:
|
|
1067
|
+
self.logger.error(f"Failed to deploy {skill_name}: {e}")
|
|
1068
|
+
errors.append(f"{skill_name}: {e}")
|
|
1069
|
+
|
|
1070
|
+
# Call progress callback for each skill processed
|
|
1071
|
+
if progress_callback:
|
|
1072
|
+
progress_callback(idx)
|
|
1073
|
+
|
|
1074
|
+
self.logger.info(
|
|
1075
|
+
f"Deployment complete: {len(deployed)} deployed, "
|
|
1076
|
+
f"{len(skipped)} skipped, {len(errors)} errors"
|
|
1077
|
+
)
|
|
1078
|
+
|
|
1079
|
+
return {
|
|
1080
|
+
"deployed_count": len(deployed),
|
|
1081
|
+
"skipped_count": len(skipped),
|
|
1082
|
+
"failed_count": len(errors),
|
|
1083
|
+
"deployed_skills": deployed,
|
|
1084
|
+
"skipped_skills": skipped,
|
|
1085
|
+
"errors": errors,
|
|
1086
|
+
}
|
|
1087
|
+
|
|
1088
|
+
def _deploy_single_skill(
|
|
1089
|
+
self, skill: Dict[str, Any], target_dir: Path, deployment_name: str, force: bool
|
|
1090
|
+
) -> Dict[str, Any]:
|
|
1091
|
+
"""Deploy a single skill with flattened directory name.
|
|
1092
|
+
|
|
1093
|
+
Args:
|
|
1094
|
+
skill: Skill metadata dict
|
|
1095
|
+
target_dir: Target deployment directory
|
|
1096
|
+
deployment_name: Flattened deployment directory name
|
|
1097
|
+
force: Overwrite if exists
|
|
1098
|
+
|
|
1099
|
+
Returns:
|
|
1100
|
+
Dict with deployed, skipped, error flags
|
|
1101
|
+
"""
|
|
1102
|
+
import shutil
|
|
1103
|
+
|
|
1104
|
+
source_file = Path(skill["source_file"])
|
|
1105
|
+
source_dir = source_file.parent
|
|
1106
|
+
|
|
1107
|
+
target_skill_dir = target_dir / deployment_name
|
|
1108
|
+
|
|
1109
|
+
# Check if already deployed
|
|
1110
|
+
if target_skill_dir.exists() and not force:
|
|
1111
|
+
self.logger.debug(f"Skipped {deployment_name} (already exists)")
|
|
1112
|
+
return {"deployed": False, "skipped": True, "error": None}
|
|
1113
|
+
|
|
1114
|
+
# Security: Validate paths
|
|
1115
|
+
if not self._validate_safe_path(target_dir, target_skill_dir):
|
|
1116
|
+
return {
|
|
1117
|
+
"deployed": False,
|
|
1118
|
+
"skipped": False,
|
|
1119
|
+
"error": f"Invalid target path: {target_skill_dir}",
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
try:
|
|
1123
|
+
# Remove existing if force
|
|
1124
|
+
if target_skill_dir.exists():
|
|
1125
|
+
if target_skill_dir.is_symlink():
|
|
1126
|
+
self.logger.warning(f"Removing symlink: {target_skill_dir}")
|
|
1127
|
+
target_skill_dir.unlink()
|
|
1128
|
+
else:
|
|
1129
|
+
shutil.rmtree(target_skill_dir)
|
|
1130
|
+
|
|
1131
|
+
# Copy entire skill directory with all resources
|
|
1132
|
+
shutil.copytree(source_dir, target_skill_dir)
|
|
1133
|
+
|
|
1134
|
+
self.logger.debug(
|
|
1135
|
+
f"Deployed {deployment_name} from {source_dir} to {target_skill_dir}"
|
|
1136
|
+
)
|
|
1137
|
+
return {"deployed": True, "skipped": False, "error": None}
|
|
1138
|
+
|
|
1139
|
+
except Exception as e:
|
|
1140
|
+
return {
|
|
1141
|
+
"deployed": False,
|
|
1142
|
+
"skipped": False,
|
|
1143
|
+
"error": f"{deployment_name}: {e}",
|
|
1144
|
+
}
|
|
1145
|
+
|
|
1146
|
+
def _validate_safe_path(self, base: Path, target: Path) -> bool:
|
|
1147
|
+
"""Ensure target path is within base directory (security).
|
|
1148
|
+
|
|
1149
|
+
Args:
|
|
1150
|
+
base: Base directory
|
|
1151
|
+
target: Target path to validate
|
|
1152
|
+
|
|
1153
|
+
Returns:
|
|
1154
|
+
True if path is safe, False otherwise
|
|
1155
|
+
"""
|
|
1156
|
+
try:
|
|
1157
|
+
target.resolve().relative_to(base.resolve())
|
|
1158
|
+
return True
|
|
1159
|
+
except ValueError:
|
|
1160
|
+
return False
|
|
1161
|
+
|
|
1162
|
+
def __repr__(self) -> str:
|
|
1163
|
+
"""Return string representation."""
|
|
1164
|
+
sources = self.config.load()
|
|
1165
|
+
enabled_count = len([s for s in sources if s.enabled])
|
|
1166
|
+
return (
|
|
1167
|
+
f"GitSkillSourceManager(cache='{self.cache_dir}', "
|
|
1168
|
+
f"sources={len(sources)}, enabled={enabled_count})"
|
|
1169
|
+
)
|