claude-mpm 5.6.1__py3-none-any.whl → 5.6.76__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- claude_mpm/VERSION +1 -1
- claude_mpm/agents/PM_INSTRUCTIONS.md +8 -3
- claude_mpm/auth/__init__.py +35 -0
- claude_mpm/auth/callback_server.py +328 -0
- claude_mpm/auth/models.py +104 -0
- claude_mpm/auth/oauth_manager.py +266 -0
- claude_mpm/auth/providers/__init__.py +12 -0
- claude_mpm/auth/providers/base.py +165 -0
- claude_mpm/auth/providers/google.py +261 -0
- claude_mpm/auth/token_storage.py +252 -0
- claude_mpm/cli/commands/commander.py +174 -4
- claude_mpm/cli/commands/mcp.py +29 -17
- claude_mpm/cli/commands/mcp_command_router.py +39 -0
- claude_mpm/cli/commands/mcp_service_commands.py +304 -0
- claude_mpm/cli/commands/oauth.py +481 -0
- claude_mpm/cli/commands/skill_source.py +51 -2
- claude_mpm/cli/commands/skills.py +5 -3
- claude_mpm/cli/executor.py +9 -0
- claude_mpm/cli/helpers.py +1 -1
- claude_mpm/cli/parsers/base_parser.py +13 -0
- claude_mpm/cli/parsers/commander_parser.py +43 -10
- claude_mpm/cli/parsers/mcp_parser.py +79 -0
- claude_mpm/cli/parsers/oauth_parser.py +165 -0
- claude_mpm/cli/parsers/skill_source_parser.py +4 -0
- claude_mpm/cli/parsers/skills_parser.py +5 -0
- claude_mpm/cli/startup.py +300 -33
- claude_mpm/cli/startup_display.py +4 -2
- claude_mpm/cli/startup_migrations.py +236 -0
- claude_mpm/commander/__init__.py +6 -0
- claude_mpm/commander/adapters/__init__.py +32 -3
- claude_mpm/commander/adapters/auggie.py +260 -0
- claude_mpm/commander/adapters/base.py +98 -1
- claude_mpm/commander/adapters/claude_code.py +32 -1
- claude_mpm/commander/adapters/codex.py +237 -0
- claude_mpm/commander/adapters/example_usage.py +310 -0
- claude_mpm/commander/adapters/mpm.py +389 -0
- claude_mpm/commander/adapters/registry.py +204 -0
- claude_mpm/commander/api/app.py +32 -16
- claude_mpm/commander/api/errors.py +21 -0
- claude_mpm/commander/api/routes/messages.py +11 -11
- claude_mpm/commander/api/routes/projects.py +20 -20
- claude_mpm/commander/api/routes/sessions.py +37 -26
- claude_mpm/commander/api/routes/work.py +86 -50
- claude_mpm/commander/api/schemas.py +4 -0
- claude_mpm/commander/chat/cli.py +47 -5
- claude_mpm/commander/chat/commands.py +44 -16
- claude_mpm/commander/chat/repl.py +1729 -82
- claude_mpm/commander/config.py +5 -3
- claude_mpm/commander/core/__init__.py +10 -0
- claude_mpm/commander/core/block_manager.py +325 -0
- claude_mpm/commander/core/response_manager.py +323 -0
- claude_mpm/commander/daemon.py +215 -10
- claude_mpm/commander/env_loader.py +59 -0
- claude_mpm/commander/events/manager.py +61 -1
- claude_mpm/commander/frameworks/base.py +91 -1
- claude_mpm/commander/frameworks/mpm.py +9 -14
- claude_mpm/commander/git/__init__.py +5 -0
- claude_mpm/commander/git/worktree_manager.py +212 -0
- claude_mpm/commander/instance_manager.py +546 -15
- claude_mpm/commander/memory/__init__.py +45 -0
- claude_mpm/commander/memory/compression.py +347 -0
- claude_mpm/commander/memory/embeddings.py +230 -0
- claude_mpm/commander/memory/entities.py +310 -0
- claude_mpm/commander/memory/example_usage.py +290 -0
- claude_mpm/commander/memory/integration.py +325 -0
- claude_mpm/commander/memory/search.py +381 -0
- claude_mpm/commander/memory/store.py +657 -0
- claude_mpm/commander/models/events.py +6 -0
- claude_mpm/commander/persistence/state_store.py +95 -1
- claude_mpm/commander/registry.py +10 -4
- claude_mpm/commander/runtime/monitor.py +32 -2
- claude_mpm/commander/tmux_orchestrator.py +3 -2
- claude_mpm/commander/work/executor.py +38 -20
- claude_mpm/commander/workflow/event_handler.py +25 -3
- claude_mpm/config/skill_sources.py +16 -0
- claude_mpm/constants.py +5 -0
- claude_mpm/core/claude_runner.py +152 -0
- claude_mpm/core/config.py +30 -22
- claude_mpm/core/config_constants.py +74 -9
- claude_mpm/core/constants.py +56 -12
- claude_mpm/core/hook_manager.py +2 -1
- claude_mpm/core/interactive_session.py +5 -4
- claude_mpm/core/logger.py +16 -2
- claude_mpm/core/logging_utils.py +40 -16
- claude_mpm/core/network_config.py +148 -0
- claude_mpm/core/oneshot_session.py +7 -6
- claude_mpm/core/output_style_manager.py +37 -7
- claude_mpm/core/socketio_pool.py +47 -15
- claude_mpm/core/unified_paths.py +68 -80
- claude_mpm/hooks/claude_hooks/auto_pause_handler.py +30 -31
- claude_mpm/hooks/claude_hooks/event_handlers.py +285 -194
- claude_mpm/hooks/claude_hooks/hook_handler.py +115 -32
- claude_mpm/hooks/claude_hooks/installer.py +222 -54
- claude_mpm/hooks/claude_hooks/memory_integration.py +52 -32
- claude_mpm/hooks/claude_hooks/response_tracking.py +40 -59
- claude_mpm/hooks/claude_hooks/services/__init__.py +21 -0
- claude_mpm/hooks/claude_hooks/services/connection_manager.py +25 -30
- claude_mpm/hooks/claude_hooks/services/connection_manager_http.py +24 -28
- claude_mpm/hooks/claude_hooks/services/container.py +326 -0
- claude_mpm/hooks/claude_hooks/services/protocols.py +328 -0
- claude_mpm/hooks/claude_hooks/services/state_manager.py +25 -38
- claude_mpm/hooks/claude_hooks/services/subagent_processor.py +49 -75
- claude_mpm/hooks/session_resume_hook.py +22 -18
- claude_mpm/hooks/templates/pre_tool_use_simple.py +6 -6
- claude_mpm/hooks/templates/pre_tool_use_template.py +16 -8
- claude_mpm/init.py +21 -14
- claude_mpm/mcp/__init__.py +9 -0
- claude_mpm/mcp/google_workspace_server.py +610 -0
- claude_mpm/scripts/claude-hook-handler.sh +10 -9
- claude_mpm/services/agents/agent_selection_service.py +2 -2
- claude_mpm/services/agents/single_tier_deployment_service.py +4 -4
- claude_mpm/services/command_deployment_service.py +44 -26
- claude_mpm/services/hook_installer_service.py +77 -8
- claude_mpm/services/mcp_config_manager.py +99 -19
- claude_mpm/services/mcp_service_registry.py +294 -0
- claude_mpm/services/monitor/server.py +6 -1
- claude_mpm/services/pm_skills_deployer.py +5 -3
- claude_mpm/services/skills/git_skill_source_manager.py +79 -8
- claude_mpm/services/skills/selective_skill_deployer.py +28 -0
- claude_mpm/services/skills/skill_discovery_service.py +17 -1
- claude_mpm/services/skills_deployer.py +31 -5
- claude_mpm/skills/__init__.py +2 -1
- claude_mpm/skills/bundled/pm/mpm-session-pause/SKILL.md +170 -0
- claude_mpm/skills/registry.py +295 -90
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/METADATA +28 -3
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/RECORD +131 -93
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/WHEEL +1 -1
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/entry_points.txt +2 -0
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/licenses/LICENSE +0 -0
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/licenses/LICENSE-FAQ.md +0 -0
- {claude_mpm-5.6.1.dist-info → claude_mpm-5.6.76.dist-info}/top_level.txt +0 -0
claude_mpm/commander/daemon.py
CHANGED
|
@@ -15,12 +15,23 @@ from .api.app import (
|
|
|
15
15
|
app,
|
|
16
16
|
)
|
|
17
17
|
from .config import DaemonConfig
|
|
18
|
+
from .core.block_manager import BlockManager
|
|
19
|
+
from .env_loader import load_env
|
|
18
20
|
from .events.manager import EventManager
|
|
19
21
|
from .inbox import Inbox
|
|
22
|
+
from .models.events import EventStatus
|
|
23
|
+
from .parsing.output_parser import OutputParser
|
|
20
24
|
from .persistence import EventStore, StateStore
|
|
21
25
|
from .project_session import ProjectSession, SessionState
|
|
22
26
|
from .registry import ProjectRegistry
|
|
27
|
+
from .runtime.monitor import RuntimeMonitor
|
|
23
28
|
from .tmux_orchestrator import TmuxOrchestrator
|
|
29
|
+
from .work.executor import WorkExecutor
|
|
30
|
+
from .work.queue import WorkQueue
|
|
31
|
+
from .workflow.event_handler import EventHandler
|
|
32
|
+
|
|
33
|
+
# Load environment variables at module import
|
|
34
|
+
load_env()
|
|
24
35
|
|
|
25
36
|
logger = logging.getLogger(__name__)
|
|
26
37
|
|
|
@@ -38,6 +49,11 @@ class CommanderDaemon:
|
|
|
38
49
|
event_manager: Event manager
|
|
39
50
|
inbox: Event inbox
|
|
40
51
|
sessions: Active project sessions by project_id
|
|
52
|
+
work_queues: Work queues by project_id
|
|
53
|
+
work_executors: Work executors by project_id
|
|
54
|
+
block_manager: Block manager for automatic work blocking
|
|
55
|
+
runtime_monitor: Runtime monitor for output monitoring
|
|
56
|
+
event_handler: Event handler for blocking event workflow
|
|
41
57
|
state_store: StateStore for project/session persistence
|
|
42
58
|
event_store: EventStore for event queue persistence
|
|
43
59
|
running: Whether daemon is currently running
|
|
@@ -68,6 +84,8 @@ class CommanderDaemon:
|
|
|
68
84
|
self.event_manager = EventManager()
|
|
69
85
|
self.inbox = Inbox(self.event_manager, self.registry)
|
|
70
86
|
self.sessions: Dict[str, ProjectSession] = {}
|
|
87
|
+
self.work_queues: Dict[str, WorkQueue] = {}
|
|
88
|
+
self.work_executors: Dict[str, WorkExecutor] = {}
|
|
71
89
|
self._running = False
|
|
72
90
|
self._server_task: Optional[asyncio.Task] = None
|
|
73
91
|
self._main_loop_task: Optional[asyncio.Task] = None
|
|
@@ -76,6 +94,30 @@ class CommanderDaemon:
|
|
|
76
94
|
self.state_store = StateStore(config.state_dir)
|
|
77
95
|
self.event_store = EventStore(config.state_dir)
|
|
78
96
|
|
|
97
|
+
# Initialize BlockManager with work queues and executors
|
|
98
|
+
self.block_manager = BlockManager(
|
|
99
|
+
event_manager=self.event_manager,
|
|
100
|
+
work_queues=self.work_queues,
|
|
101
|
+
work_executors=self.work_executors,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
# Initialize RuntimeMonitor with BlockManager
|
|
105
|
+
parser = OutputParser(self.event_manager)
|
|
106
|
+
self.runtime_monitor = RuntimeMonitor(
|
|
107
|
+
orchestrator=self.orchestrator,
|
|
108
|
+
parser=parser,
|
|
109
|
+
event_manager=self.event_manager,
|
|
110
|
+
poll_interval=config.poll_interval,
|
|
111
|
+
block_manager=self.block_manager,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
# Initialize EventHandler with BlockManager
|
|
115
|
+
self.event_handler = EventHandler(
|
|
116
|
+
inbox=self.inbox,
|
|
117
|
+
session_manager=self.sessions,
|
|
118
|
+
block_manager=self.block_manager,
|
|
119
|
+
)
|
|
120
|
+
|
|
79
121
|
# Configure logging
|
|
80
122
|
logging.basicConfig(
|
|
81
123
|
level=getattr(logging, config.log_level.upper()),
|
|
@@ -122,12 +164,16 @@ class CommanderDaemon:
|
|
|
122
164
|
# Set up signal handlers
|
|
123
165
|
self._setup_signal_handlers()
|
|
124
166
|
|
|
125
|
-
# Inject
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
167
|
+
# Inject daemon instances into API app.state (BEFORE lifespan runs)
|
|
168
|
+
app.state.registry = self.registry
|
|
169
|
+
app.state.tmux = self.orchestrator
|
|
170
|
+
app.state.event_manager = self.event_manager
|
|
171
|
+
app.state.inbox = self.inbox
|
|
172
|
+
app.state.work_queues = self.work_queues
|
|
173
|
+
app.state.daemon_instance = self
|
|
174
|
+
app.state.session_manager = self.sessions
|
|
175
|
+
app.state.event_handler = self.event_handler
|
|
176
|
+
logger.info(f"Injected work_queues dict id: {id(self.work_queues)}")
|
|
131
177
|
|
|
132
178
|
# Start API server in background
|
|
133
179
|
logger.info(f"Starting API server on {self.config.host}:{self.config.port}")
|
|
@@ -171,6 +217,16 @@ class CommanderDaemon:
|
|
|
171
217
|
except Exception as e:
|
|
172
218
|
logger.error(f"Error stopping session {project_id}: {e}")
|
|
173
219
|
|
|
220
|
+
# Clear BlockManager project mappings
|
|
221
|
+
for project_id in list(self.work_queues.keys()):
|
|
222
|
+
try:
|
|
223
|
+
removed = self.block_manager.clear_project_mappings(project_id)
|
|
224
|
+
logger.debug(
|
|
225
|
+
f"Cleared {removed} work mappings for project {project_id}"
|
|
226
|
+
)
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.error(f"Error clearing mappings for {project_id}: {e}")
|
|
229
|
+
|
|
174
230
|
# Cancel main loop task
|
|
175
231
|
if self._main_loop_task and not self._main_loop_task.done():
|
|
176
232
|
self._main_loop_task.cancel()
|
|
@@ -210,9 +266,19 @@ class CommanderDaemon:
|
|
|
210
266
|
|
|
211
267
|
while self._running:
|
|
212
268
|
try:
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
269
|
+
logger.info(f"🔄 Main loop iteration (running={self._running})")
|
|
270
|
+
logger.info(
|
|
271
|
+
f"work_queues dict id: {id(self.work_queues)}, keys: {list(self.work_queues.keys())}"
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
# Check for resolved events and resume sessions
|
|
275
|
+
await self._check_and_resume_sessions()
|
|
276
|
+
|
|
277
|
+
# Check each ProjectSession for runnable work
|
|
278
|
+
logger.info(
|
|
279
|
+
f"Checking for pending work across {len(self.work_queues)} queues"
|
|
280
|
+
)
|
|
281
|
+
await self._execute_pending_work()
|
|
216
282
|
|
|
217
283
|
# Periodic state persistence
|
|
218
284
|
current_time = asyncio.get_event_loop().time()
|
|
@@ -241,7 +307,16 @@ class CommanderDaemon:
|
|
|
241
307
|
|
|
242
308
|
Registers handlers for SIGINT and SIGTERM that trigger
|
|
243
309
|
daemon shutdown via asyncio event loop.
|
|
310
|
+
|
|
311
|
+
Note: Signal handlers can only be registered from the main thread.
|
|
312
|
+
If called from a background thread, registration is skipped.
|
|
244
313
|
"""
|
|
314
|
+
import threading
|
|
315
|
+
|
|
316
|
+
# Signal handlers can only be registered from the main thread
|
|
317
|
+
if threading.current_thread() is not threading.main_thread():
|
|
318
|
+
logger.info("Running in background thread - signal handlers skipped")
|
|
319
|
+
return
|
|
245
320
|
|
|
246
321
|
def handle_signal(signum: int, frame) -> None:
|
|
247
322
|
"""Handle shutdown signal.
|
|
@@ -282,7 +357,26 @@ class CommanderDaemon:
|
|
|
282
357
|
if project is None:
|
|
283
358
|
raise ValueError(f"Project not found: {project_id}")
|
|
284
359
|
|
|
285
|
-
|
|
360
|
+
# Create work queue for project if not exists
|
|
361
|
+
if project_id not in self.work_queues:
|
|
362
|
+
self.work_queues[project_id] = WorkQueue(project_id)
|
|
363
|
+
logger.debug(f"Created work queue for project {project_id}")
|
|
364
|
+
|
|
365
|
+
# Create work executor for project if not exists
|
|
366
|
+
if project_id not in self.work_executors:
|
|
367
|
+
from .runtime.executor import RuntimeExecutor
|
|
368
|
+
|
|
369
|
+
runtime_executor = RuntimeExecutor(self.orchestrator)
|
|
370
|
+
self.work_executors[project_id] = WorkExecutor(
|
|
371
|
+
runtime=runtime_executor, queue=self.work_queues[project_id]
|
|
372
|
+
)
|
|
373
|
+
logger.debug(f"Created work executor for project {project_id}")
|
|
374
|
+
|
|
375
|
+
session = ProjectSession(
|
|
376
|
+
project=project,
|
|
377
|
+
orchestrator=self.orchestrator,
|
|
378
|
+
monitor=self.runtime_monitor,
|
|
379
|
+
)
|
|
286
380
|
self.sessions[project_id] = session
|
|
287
381
|
|
|
288
382
|
logger.info(f"Created new session for project {project_id}")
|
|
@@ -363,6 +457,117 @@ class CommanderDaemon:
|
|
|
363
457
|
except Exception as e:
|
|
364
458
|
logger.error(f"Failed to save state: {e}", exc_info=True)
|
|
365
459
|
|
|
460
|
+
async def _check_and_resume_sessions(self) -> None:
|
|
461
|
+
"""Check for resolved events and resume paused sessions.
|
|
462
|
+
|
|
463
|
+
Iterates through all paused sessions, checks if their blocking events
|
|
464
|
+
have been resolved, and resumes execution if ready.
|
|
465
|
+
"""
|
|
466
|
+
for project_id, session in list(self.sessions.items()):
|
|
467
|
+
# Skip non-paused sessions
|
|
468
|
+
if session.state != SessionState.PAUSED:
|
|
469
|
+
continue
|
|
470
|
+
|
|
471
|
+
# Check if pause reason (event ID) is resolved
|
|
472
|
+
if not session.pause_reason:
|
|
473
|
+
logger.warning(f"Session {project_id} paused with no reason, resuming")
|
|
474
|
+
await session.resume()
|
|
475
|
+
continue
|
|
476
|
+
|
|
477
|
+
# Check if event is resolved
|
|
478
|
+
event = self.event_manager.get(session.pause_reason)
|
|
479
|
+
if event and event.status == EventStatus.RESOLVED:
|
|
480
|
+
logger.info(
|
|
481
|
+
f"Event {event.id} resolved, resuming session for {project_id}"
|
|
482
|
+
)
|
|
483
|
+
await session.resume()
|
|
484
|
+
|
|
485
|
+
# Unblock any work items that were blocked by this event
|
|
486
|
+
if project_id in self.work_executors:
|
|
487
|
+
executor = self.work_executors[project_id]
|
|
488
|
+
queue = self.work_queues[project_id]
|
|
489
|
+
|
|
490
|
+
# Find work items blocked by this event
|
|
491
|
+
blocked_items = [
|
|
492
|
+
item
|
|
493
|
+
for item in queue.list()
|
|
494
|
+
if item.state.value == "blocked"
|
|
495
|
+
and item.metadata.get("block_reason") == event.id
|
|
496
|
+
]
|
|
497
|
+
|
|
498
|
+
for item in blocked_items:
|
|
499
|
+
await executor.handle_unblock(item.id)
|
|
500
|
+
logger.info(f"Unblocked work item {item.id}")
|
|
501
|
+
|
|
502
|
+
async def _execute_pending_work(self) -> None:
|
|
503
|
+
"""Execute pending work for all ready sessions.
|
|
504
|
+
|
|
505
|
+
Scans all work queues for pending work. For projects with work but no session,
|
|
506
|
+
auto-creates a session. Then executes the next available work item via WorkExecutor.
|
|
507
|
+
"""
|
|
508
|
+
# First pass: Auto-create and start sessions for projects with pending work
|
|
509
|
+
for project_id, queue in list(self.work_queues.items()):
|
|
510
|
+
logger.info(
|
|
511
|
+
f"Checking queue for {project_id}: pending={queue.pending_count}"
|
|
512
|
+
)
|
|
513
|
+
# Skip if no pending work
|
|
514
|
+
if queue.pending_count == 0:
|
|
515
|
+
continue
|
|
516
|
+
|
|
517
|
+
# Auto-create session if needed
|
|
518
|
+
if project_id not in self.sessions:
|
|
519
|
+
try:
|
|
520
|
+
logger.info(
|
|
521
|
+
f"Auto-creating session for project {project_id} with pending work"
|
|
522
|
+
)
|
|
523
|
+
session = self.get_or_create_session(project_id)
|
|
524
|
+
|
|
525
|
+
# Start the session so it's ready for work
|
|
526
|
+
if session.state.value == "idle":
|
|
527
|
+
logger.info(f"Auto-starting session for {project_id}")
|
|
528
|
+
await session.start()
|
|
529
|
+
except Exception as e:
|
|
530
|
+
logger.error(
|
|
531
|
+
f"Failed to auto-create/start session for {project_id}: {e}",
|
|
532
|
+
exc_info=True,
|
|
533
|
+
)
|
|
534
|
+
continue
|
|
535
|
+
|
|
536
|
+
# Second pass: Execute work for ready sessions
|
|
537
|
+
for project_id, session in list(self.sessions.items()):
|
|
538
|
+
# Skip sessions that aren't ready for work
|
|
539
|
+
if not session.is_ready():
|
|
540
|
+
continue
|
|
541
|
+
|
|
542
|
+
# Skip if no work queue exists
|
|
543
|
+
if project_id not in self.work_queues:
|
|
544
|
+
continue
|
|
545
|
+
|
|
546
|
+
# Get work executor for project
|
|
547
|
+
executor = self.work_executors.get(project_id)
|
|
548
|
+
if not executor:
|
|
549
|
+
logger.warning(
|
|
550
|
+
f"No work executor found for project {project_id}, skipping"
|
|
551
|
+
)
|
|
552
|
+
continue
|
|
553
|
+
|
|
554
|
+
# Check if there's work available
|
|
555
|
+
queue = self.work_queues[project_id]
|
|
556
|
+
if queue.pending_count == 0:
|
|
557
|
+
continue
|
|
558
|
+
|
|
559
|
+
# Try to execute next work item
|
|
560
|
+
try:
|
|
561
|
+
# Pass the session's active pane for execution
|
|
562
|
+
executed = await executor.execute_next(pane_target=session.active_pane)
|
|
563
|
+
if executed:
|
|
564
|
+
logger.info(f"Started work execution for project {project_id}")
|
|
565
|
+
except Exception as e:
|
|
566
|
+
logger.error(
|
|
567
|
+
f"Error executing work for project {project_id}: {e}",
|
|
568
|
+
exc_info=True,
|
|
569
|
+
)
|
|
570
|
+
|
|
366
571
|
|
|
367
572
|
async def main(config: Optional[DaemonConfig] = None) -> None:
|
|
368
573
|
"""Main entry point for running the daemon.
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Environment variable loader for Commander.
|
|
2
|
+
|
|
3
|
+
This module handles automatic loading of .env and .env.local files
|
|
4
|
+
at Commander startup. Environment files are loaded with the following precedence:
|
|
5
|
+
1. Existing environment variables (not overridden)
|
|
6
|
+
2. .env.local (local overrides)
|
|
7
|
+
3. .env (defaults)
|
|
8
|
+
|
|
9
|
+
Example:
|
|
10
|
+
>>> from claude_mpm.commander.env_loader import load_env
|
|
11
|
+
>>> load_env()
|
|
12
|
+
# Automatically loads .env.local and .env from project root
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
from dotenv import load_dotenv
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def load_env() -> None:
|
|
24
|
+
"""Load environment variables from .env and .env.local files.
|
|
25
|
+
|
|
26
|
+
Searches for .env and .env.local in the project root directory
|
|
27
|
+
(parent of src/claude_mpm). Files are loaded with override=False,
|
|
28
|
+
meaning existing environment variables take precedence.
|
|
29
|
+
|
|
30
|
+
Precedence (highest to lowest):
|
|
31
|
+
1. Existing environment variables
|
|
32
|
+
2. .env.local
|
|
33
|
+
3. .env
|
|
34
|
+
|
|
35
|
+
Example:
|
|
36
|
+
>>> load_env()
|
|
37
|
+
# Loads .env.local and .env if they exist
|
|
38
|
+
"""
|
|
39
|
+
# Find project root (parent of src/claude_mpm)
|
|
40
|
+
# Current file: src/claude_mpm/commander/env_loader.py
|
|
41
|
+
# Project root: ../../../ (3 levels up)
|
|
42
|
+
current_file = Path(__file__)
|
|
43
|
+
project_root = current_file.parent.parent.parent.parent
|
|
44
|
+
|
|
45
|
+
# Try loading .env.local first (higher priority)
|
|
46
|
+
env_local = project_root / ".env.local"
|
|
47
|
+
if env_local.exists():
|
|
48
|
+
load_dotenv(env_local, override=False)
|
|
49
|
+
logger.debug(f"Loaded environment from {env_local}")
|
|
50
|
+
|
|
51
|
+
# Then load .env (lower priority)
|
|
52
|
+
env_file = project_root / ".env"
|
|
53
|
+
if env_file.exists():
|
|
54
|
+
load_dotenv(env_file, override=False)
|
|
55
|
+
logger.debug(f"Loaded environment from {env_file}")
|
|
56
|
+
|
|
57
|
+
# Log if neither file exists
|
|
58
|
+
if not env_local.exists() and not env_file.exists():
|
|
59
|
+
logger.debug("No .env or .env.local files found in project root")
|
|
@@ -3,11 +3,13 @@
|
|
|
3
3
|
Manages event lifecycle, inbox queries, and project event tracking.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
+
import asyncio
|
|
6
7
|
import logging
|
|
7
8
|
import threading
|
|
8
9
|
import uuid
|
|
10
|
+
from asyncio import Queue
|
|
9
11
|
from datetime import datetime, timezone
|
|
10
|
-
from typing import Any, Dict, List, Optional
|
|
12
|
+
from typing import Any, Callable, Dict, List, Optional
|
|
11
13
|
|
|
12
14
|
from ..models.events import (
|
|
13
15
|
DEFAULT_PRIORITIES,
|
|
@@ -48,6 +50,8 @@ class EventManager:
|
|
|
48
50
|
self._events: Dict[str, Event] = {}
|
|
49
51
|
self._project_index: Dict[str, List[str]] = {} # project_id -> event_ids
|
|
50
52
|
self._lock = threading.RLock()
|
|
53
|
+
self._subscribers: Dict[EventType, List[Callable]] = {}
|
|
54
|
+
self._event_queue: Queue = Queue()
|
|
51
55
|
|
|
52
56
|
def create(
|
|
53
57
|
self,
|
|
@@ -330,3 +334,59 @@ class EventManager:
|
|
|
330
334
|
for eid in event_ids:
|
|
331
335
|
self._events.pop(eid, None)
|
|
332
336
|
return len(event_ids)
|
|
337
|
+
|
|
338
|
+
def subscribe(self, event_type: EventType, callback: Callable) -> None:
|
|
339
|
+
"""Subscribe callback to event type.
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
event_type: Type of event to subscribe to
|
|
343
|
+
callback: Function to call when event occurs (sync or async)
|
|
344
|
+
|
|
345
|
+
Example:
|
|
346
|
+
def on_error(event):
|
|
347
|
+
print(f"Error: {event.title}")
|
|
348
|
+
|
|
349
|
+
manager.subscribe(EventType.ERROR, on_error)
|
|
350
|
+
"""
|
|
351
|
+
if event_type not in self._subscribers:
|
|
352
|
+
self._subscribers[event_type] = []
|
|
353
|
+
self._subscribers[event_type].append(callback)
|
|
354
|
+
|
|
355
|
+
def unsubscribe(self, event_type: EventType, callback: Callable) -> None:
|
|
356
|
+
"""Unsubscribe callback from event type.
|
|
357
|
+
|
|
358
|
+
Args:
|
|
359
|
+
event_type: Type of event to unsubscribe from
|
|
360
|
+
callback: Function to remove from subscribers
|
|
361
|
+
|
|
362
|
+
Example:
|
|
363
|
+
manager.unsubscribe(EventType.ERROR, on_error)
|
|
364
|
+
"""
|
|
365
|
+
if (
|
|
366
|
+
event_type in self._subscribers
|
|
367
|
+
and callback in self._subscribers[event_type]
|
|
368
|
+
):
|
|
369
|
+
self._subscribers[event_type].remove(callback)
|
|
370
|
+
|
|
371
|
+
async def emit(self, event: Event) -> None:
|
|
372
|
+
"""Emit event to all subscribers.
|
|
373
|
+
|
|
374
|
+
Queues the event and notifies all subscribed callbacks.
|
|
375
|
+
Supports both sync and async callbacks.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
event: Event to emit
|
|
379
|
+
|
|
380
|
+
Example:
|
|
381
|
+
await manager.emit(event)
|
|
382
|
+
"""
|
|
383
|
+
await self._event_queue.put(event)
|
|
384
|
+
if event.type in self._subscribers:
|
|
385
|
+
for callback in self._subscribers[event.type]:
|
|
386
|
+
try:
|
|
387
|
+
if asyncio.iscoroutinefunction(callback):
|
|
388
|
+
await callback(event)
|
|
389
|
+
else:
|
|
390
|
+
callback(event)
|
|
391
|
+
except Exception as e:
|
|
392
|
+
logger.error(f"Subscriber callback error: {e}")
|
|
@@ -19,6 +19,7 @@ class InstanceInfo:
|
|
|
19
19
|
pane_target: Tmux pane target (e.g., "%1")
|
|
20
20
|
git_branch: Current git branch if project is a git repo
|
|
21
21
|
git_status: Git status summary if project is a git repo
|
|
22
|
+
connected: Whether instance has an active adapter connection
|
|
22
23
|
|
|
23
24
|
Example:
|
|
24
25
|
>>> info = InstanceInfo(
|
|
@@ -28,7 +29,8 @@ class InstanceInfo:
|
|
|
28
29
|
... tmux_session="mpm-commander",
|
|
29
30
|
... pane_target="%1",
|
|
30
31
|
... git_branch="main",
|
|
31
|
-
... git_status="clean"
|
|
32
|
+
... git_status="clean",
|
|
33
|
+
... connected=True
|
|
32
34
|
... )
|
|
33
35
|
"""
|
|
34
36
|
|
|
@@ -39,6 +41,94 @@ class InstanceInfo:
|
|
|
39
41
|
pane_target: str
|
|
40
42
|
git_branch: Optional[str] = None
|
|
41
43
|
git_status: Optional[str] = None
|
|
44
|
+
connected: bool = False
|
|
45
|
+
ready: bool = False
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class RegisteredInstance:
|
|
50
|
+
"""Persistent instance configuration (survives daemon restart).
|
|
51
|
+
|
|
52
|
+
Attributes:
|
|
53
|
+
name: Instance identifier
|
|
54
|
+
path: Original project directory path (stored as string for JSON)
|
|
55
|
+
framework: Framework identifier ("cc" or "mpm")
|
|
56
|
+
registered_at: ISO timestamp when instance was registered
|
|
57
|
+
worktree_path: Path to git worktree (if using worktree isolation)
|
|
58
|
+
worktree_branch: Branch name in the worktree
|
|
59
|
+
use_worktree: Whether worktree isolation is enabled
|
|
60
|
+
|
|
61
|
+
Example:
|
|
62
|
+
>>> instance = RegisteredInstance(
|
|
63
|
+
... name="myapp",
|
|
64
|
+
... path="/Users/user/myapp",
|
|
65
|
+
... framework="cc",
|
|
66
|
+
... registered_at="2024-01-15T10:30:00"
|
|
67
|
+
... )
|
|
68
|
+
>>> instance.to_dict()
|
|
69
|
+
{'name': 'myapp', 'path': '/Users/user/myapp', 'framework': 'cc', ...}
|
|
70
|
+
>>> instance.working_path
|
|
71
|
+
'/Users/user/myapp'
|
|
72
|
+
|
|
73
|
+
>>> # With worktree enabled
|
|
74
|
+
>>> instance = RegisteredInstance(
|
|
75
|
+
... name="myapp",
|
|
76
|
+
... path="/Users/user/myapp",
|
|
77
|
+
... framework="cc",
|
|
78
|
+
... registered_at="2024-01-15T10:30:00",
|
|
79
|
+
... worktree_path="/Users/user/.mpm/worktrees/myapp",
|
|
80
|
+
... worktree_branch="feature/new-feature",
|
|
81
|
+
... use_worktree=True
|
|
82
|
+
... )
|
|
83
|
+
>>> instance.working_path
|
|
84
|
+
'/Users/user/.mpm/worktrees/myapp'
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
name: str
|
|
88
|
+
path: str # Original project path
|
|
89
|
+
framework: str
|
|
90
|
+
registered_at: str
|
|
91
|
+
# Worktree fields
|
|
92
|
+
worktree_path: Optional[str] = None # Path to worktree (if using)
|
|
93
|
+
worktree_branch: Optional[str] = None # Branch in worktree
|
|
94
|
+
use_worktree: bool = False # Whether worktree is enabled
|
|
95
|
+
|
|
96
|
+
def to_dict(self) -> dict:
|
|
97
|
+
"""Serialize for JSON storage."""
|
|
98
|
+
return {
|
|
99
|
+
"name": self.name,
|
|
100
|
+
"path": self.path,
|
|
101
|
+
"framework": self.framework,
|
|
102
|
+
"registered_at": self.registered_at,
|
|
103
|
+
"worktree_path": self.worktree_path,
|
|
104
|
+
"worktree_branch": self.worktree_branch,
|
|
105
|
+
"use_worktree": self.use_worktree,
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
@classmethod
|
|
109
|
+
def from_dict(cls, data: dict) -> "RegisteredInstance":
|
|
110
|
+
"""Deserialize from JSON."""
|
|
111
|
+
return cls(
|
|
112
|
+
name=data["name"],
|
|
113
|
+
path=data["path"],
|
|
114
|
+
framework=data["framework"],
|
|
115
|
+
registered_at=data.get("registered_at", ""),
|
|
116
|
+
worktree_path=data.get("worktree_path"),
|
|
117
|
+
worktree_branch=data.get("worktree_branch"),
|
|
118
|
+
use_worktree=data.get("use_worktree", False),
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
def working_path(self) -> str:
|
|
123
|
+
"""Get the actual working path (worktree or original).
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
The worktree path if worktree is enabled and configured,
|
|
127
|
+
otherwise the original project path.
|
|
128
|
+
"""
|
|
129
|
+
if self.use_worktree and self.worktree_path:
|
|
130
|
+
return self.worktree_path
|
|
131
|
+
return self.path
|
|
42
132
|
|
|
43
133
|
|
|
44
134
|
class BaseFramework(ABC):
|
|
@@ -10,9 +10,7 @@ from .base import BaseFramework
|
|
|
10
10
|
class MPMFramework(BaseFramework):
|
|
11
11
|
"""Claude MPM framework.
|
|
12
12
|
|
|
13
|
-
This framework launches Claude with
|
|
14
|
-
It uses the same 'claude' command as Claude Code, but relies on CLAUDE.md
|
|
15
|
-
in the project to load the MPM agent system.
|
|
13
|
+
This framework launches Claude MPM with full agent orchestration.
|
|
16
14
|
|
|
17
15
|
Example:
|
|
18
16
|
>>> framework = MPMFramework()
|
|
@@ -21,42 +19,39 @@ class MPMFramework(BaseFramework):
|
|
|
21
19
|
>>> framework.is_available()
|
|
22
20
|
True
|
|
23
21
|
>>> framework.get_startup_command(Path("/Users/user/myapp"))
|
|
24
|
-
"cd '/Users/user/myapp' && claude
|
|
22
|
+
"cd '/Users/user/myapp' && claude-mpm"
|
|
25
23
|
"""
|
|
26
24
|
|
|
27
25
|
name = "mpm"
|
|
28
26
|
display_name = "Claude MPM"
|
|
29
|
-
command = "claude"
|
|
27
|
+
command = "claude-mpm"
|
|
30
28
|
|
|
31
29
|
def get_startup_command(self, project_path: Path) -> str:
|
|
32
30
|
"""Get the command to start Claude MPM in a project.
|
|
33
31
|
|
|
34
|
-
The MPM framework uses the standard 'claude' command, but expects
|
|
35
|
-
a CLAUDE.md file in the project to load the MPM agent system.
|
|
36
|
-
|
|
37
32
|
Args:
|
|
38
33
|
project_path: Path to the project directory
|
|
39
34
|
|
|
40
35
|
Returns:
|
|
41
|
-
Shell command string to start Claude
|
|
36
|
+
Shell command string to start Claude MPM
|
|
42
37
|
|
|
43
38
|
Example:
|
|
44
39
|
>>> framework = MPMFramework()
|
|
45
40
|
>>> framework.get_startup_command(Path("/Users/user/myapp"))
|
|
46
|
-
"cd '/Users/user/myapp' && claude
|
|
41
|
+
"cd '/Users/user/myapp' && claude-mpm"
|
|
47
42
|
"""
|
|
48
43
|
quoted_path = shlex.quote(str(project_path))
|
|
49
|
-
return f"cd {quoted_path} && claude
|
|
44
|
+
return f"cd {quoted_path} && claude-mpm"
|
|
50
45
|
|
|
51
46
|
def is_available(self) -> bool:
|
|
52
|
-
"""Check if 'claude' command is available.
|
|
47
|
+
"""Check if 'claude-mpm' command is available.
|
|
53
48
|
|
|
54
49
|
Returns:
|
|
55
|
-
True if 'claude' command exists in PATH
|
|
50
|
+
True if 'claude-mpm' command exists in PATH
|
|
56
51
|
|
|
57
52
|
Example:
|
|
58
53
|
>>> framework = MPMFramework()
|
|
59
54
|
>>> framework.is_available()
|
|
60
55
|
True
|
|
61
56
|
"""
|
|
62
|
-
return shutil.which("claude") is not None
|
|
57
|
+
return shutil.which("claude-mpm") is not None
|