monoco-toolkit 0.3.11__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/automation/__init__.py +40 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +805 -0
- monoco/core/config.py +29 -11
- monoco/core/daemon/__init__.py +5 -0
- monoco/core/daemon/pid.py +290 -0
- monoco/core/git.py +15 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/injection.py +86 -8
- monoco/core/integrations.py +0 -24
- monoco/core/router/__init__.py +17 -0
- monoco/core/router/action.py +202 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +197 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/setup.py +9 -0
- monoco/core/sync.py +199 -4
- monoco/core/watcher/__init__.py +63 -0
- monoco/core/watcher/base.py +382 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/im.py +460 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +192 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +3 -60
- monoco/daemon/commands.py +459 -25
- monoco/daemon/events.py +34 -0
- monoco/daemon/scheduler.py +157 -201
- monoco/daemon/services.py +42 -243
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/features/agent/resources/en/AGENTS.md +14 -14
- monoco/features/agent/resources/en/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/en/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/en/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/en/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/resources/zh/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/zh/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/zh/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/zh/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/worker.py +1 -1
- monoco/features/hooks/__init__.py +61 -6
- monoco/features/hooks/commands.py +281 -271
- monoco/features/hooks/dispatchers/__init__.py +23 -0
- monoco/features/hooks/dispatchers/agent_dispatcher.py +486 -0
- monoco/features/hooks/dispatchers/git_dispatcher.py +478 -0
- monoco/features/hooks/manager.py +357 -0
- monoco/features/hooks/models.py +262 -0
- monoco/features/hooks/parser.py +322 -0
- monoco/features/hooks/universal_interceptor.py +503 -0
- monoco/features/im/__init__.py +67 -0
- monoco/features/im/core.py +782 -0
- monoco/features/im/models.py +311 -0
- monoco/features/issue/commands.py +133 -60
- monoco/features/issue/core.py +385 -40
- monoco/features/issue/domain_commands.py +0 -19
- monoco/features/issue/resources/en/AGENTS.md +17 -122
- monoco/features/issue/resources/hooks/agent/before-tool.sh +102 -0
- monoco/features/issue/resources/hooks/agent/session-start.sh +88 -0
- monoco/features/issue/resources/hooks/{post-checkout.sh → git/git-post-checkout.sh} +10 -9
- monoco/features/issue/resources/hooks/git/git-pre-commit.sh +31 -0
- monoco/features/issue/resources/hooks/{pre-push.sh → git/git-pre-push.sh} +7 -13
- monoco/features/issue/resources/zh/AGENTS.md +18 -123
- monoco/features/memo/cli.py +15 -64
- monoco/features/memo/core.py +6 -34
- monoco/features/memo/models.py +24 -15
- monoco/features/memo/resources/en/AGENTS.md +31 -0
- monoco/features/memo/resources/zh/AGENTS.md +28 -5
- monoco/features/spike/commands.py +5 -3
- monoco/main.py +5 -3
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/METADATA +1 -1
- monoco_toolkit-0.4.0.dist-info/RECORD +170 -0
- monoco/core/execution.py +0 -67
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -127
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +0 -61
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +0 -73
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +0 -55
- monoco/features/agent/resources/atoms/atom-review.yaml +0 -60
- monoco/features/agent/resources/en/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +0 -93
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +0 -85
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -114
- monoco/features/agent/resources/workflows/workflow-dev.yaml +0 -83
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +0 -72
- monoco/features/agent/resources/workflows/workflow-review.yaml +0 -94
- monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +0 -49
- monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +0 -47
- monoco/features/agent/resources/zh/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_manager/SKILL.md +0 -88
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +0 -259
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -137
- monoco/features/agent/session.py +0 -169
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +0 -278
- monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/hooks/adapter.py +0 -67
- monoco/features/hooks/core.py +0 -441
- monoco/features/i18n/resources/en/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/i18n/resources/zh/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/zh/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/issue/resources/en/skills/monoco_atom_issue/SKILL.md +0 -165
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/issue/resources/hooks/pre-commit.sh +0 -41
- monoco/features/issue/resources/zh/skills/monoco_atom_issue_lifecycle/SKILL.md +0 -190
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/memo/resources/zh/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/zh/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/spike/resources/en/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco/features/spike/resources/zh/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/zh/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco_toolkit-0.3.11.dist-info/RECORD +0 -181
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/licenses/LICENSE +0 -0
monoco/daemon/scheduler.py
CHANGED
|
@@ -1,236 +1,192 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Scheduler Service - Unified event-driven architecture (FEAT-0164).
|
|
3
|
+
|
|
4
|
+
This module implements a unified event-driven scheduler service that:
|
|
5
|
+
1. Uses AgentScheduler for agent lifecycle management (FEAT-0160)
|
|
6
|
+
2. Integrates Watcher framework for file system events (FEAT-0161)
|
|
7
|
+
3. Uses new Handler framework from core.automation (FEAT-0162)
|
|
8
|
+
|
|
9
|
+
Replaces the old architecture based on SessionManager + SemaphoreManager + polling loops.
|
|
10
|
+
"""
|
|
11
|
+
|
|
1
12
|
import asyncio
|
|
2
13
|
import logging
|
|
3
14
|
import os
|
|
4
|
-
from typing import Dict, Optional, List, Any
|
|
15
|
+
from typing import Dict, Optional, List, Any
|
|
5
16
|
from pathlib import Path
|
|
6
17
|
|
|
7
|
-
from monoco.daemon.services import ProjectManager
|
|
8
|
-
from monoco.
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
18
|
+
from monoco.daemon.services import ProjectManager
|
|
19
|
+
from monoco.core.scheduler import (
|
|
20
|
+
AgentEventType,
|
|
21
|
+
event_bus,
|
|
22
|
+
AgentScheduler,
|
|
23
|
+
LocalProcessScheduler,
|
|
24
|
+
)
|
|
25
|
+
from monoco.core.watcher import WatchConfig, IssueWatcher, MemoWatcher, TaskWatcher
|
|
26
|
+
from monoco.core.automation.handlers import start_all_handlers, stop_all_handlers
|
|
14
27
|
from monoco.core.config import get_config
|
|
15
28
|
|
|
16
29
|
logger = logging.getLogger("monoco.daemon.scheduler")
|
|
17
30
|
|
|
31
|
+
|
|
18
32
|
class SchedulerService:
|
|
33
|
+
"""
|
|
34
|
+
Unified event-driven scheduler service.
|
|
35
|
+
|
|
36
|
+
Responsibilities:
|
|
37
|
+
- Initialize and manage AgentScheduler
|
|
38
|
+
- Setup and manage Watchers for file system events
|
|
39
|
+
- Start/stop all handlers
|
|
40
|
+
|
|
41
|
+
Architecture:
|
|
42
|
+
```
|
|
43
|
+
SchedulerService
|
|
44
|
+
├── AgentScheduler (LocalProcessScheduler)
|
|
45
|
+
│ └── Manages agent process lifecycle
|
|
46
|
+
├── Watchers
|
|
47
|
+
│ ├── IssueWatcher -> EventBus
|
|
48
|
+
│ ├── MemoWatcher -> EventBus
|
|
49
|
+
│ └── TaskWatcher -> EventBus
|
|
50
|
+
└── Handlers (from core.automation)
|
|
51
|
+
├── TaskFileHandler
|
|
52
|
+
├── IssueStageHandler
|
|
53
|
+
├── MemoThresholdHandler
|
|
54
|
+
└── PRCreatedHandler
|
|
55
|
+
```
|
|
56
|
+
"""
|
|
57
|
+
|
|
19
58
|
def __init__(self, project_manager: ProjectManager):
|
|
20
59
|
self.project_manager = project_manager
|
|
21
|
-
self.session_managers: Dict[str, SessionManager] = {}
|
|
22
|
-
self._monitoring_task: Optional[asyncio.Task] = None
|
|
23
|
-
self.apoptosis_managers: Dict[str, ApoptosisManager] = {}
|
|
24
60
|
|
|
25
|
-
#
|
|
26
|
-
|
|
27
|
-
self.
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
61
|
+
# AgentScheduler (FEAT-0160)
|
|
62
|
+
scheduler_config = self._load_scheduler_config()
|
|
63
|
+
self.agent_scheduler: AgentScheduler = LocalProcessScheduler(
|
|
64
|
+
max_concurrent=scheduler_config.get("max_concurrent", 5),
|
|
65
|
+
project_root=Path.cwd(),
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
# Watchers (FEAT-0161)
|
|
69
|
+
self.watchers: List[Any] = []
|
|
70
|
+
|
|
71
|
+
# Handlers (FEAT-0162)
|
|
72
|
+
self.handlers: List[Any] = []
|
|
73
|
+
|
|
74
|
+
# Background tasks
|
|
75
|
+
self._tasks: List[asyncio.Task] = []
|
|
76
|
+
self._running = False
|
|
77
|
+
|
|
78
|
+
def _load_scheduler_config(self) -> Dict[str, Any]:
|
|
79
|
+
"""Load scheduler configuration from config files and env vars."""
|
|
80
|
+
config = {"max_concurrent": 5}
|
|
81
|
+
|
|
31
82
|
try:
|
|
32
83
|
settings = get_config()
|
|
33
|
-
|
|
84
|
+
|
|
85
|
+
# Check for concurrency config
|
|
86
|
+
if hasattr(settings, "agent") and hasattr(settings.agent, "concurrency"):
|
|
87
|
+
concurrency_config = settings.agent.concurrency
|
|
88
|
+
if hasattr(concurrency_config, "global_max"):
|
|
89
|
+
config["max_concurrent"] = concurrency_config.global_max
|
|
34
90
|
|
|
35
91
|
# Check for environment variable override
|
|
36
92
|
env_max_agents = os.environ.get("MONOCO_MAX_AGENTS")
|
|
37
93
|
if env_max_agents:
|
|
38
94
|
try:
|
|
39
|
-
|
|
40
|
-
logger.info(f"Overriding
|
|
95
|
+
config["max_concurrent"] = int(env_max_agents)
|
|
96
|
+
logger.info(f"Overriding max_concurrent from environment: {env_max_agents}")
|
|
41
97
|
except ValueError:
|
|
42
98
|
logger.warning(f"Invalid MONOCO_MAX_AGENTS value: {env_max_agents}")
|
|
43
99
|
|
|
44
|
-
return
|
|
100
|
+
return config
|
|
45
101
|
except Exception as e:
|
|
46
|
-
logger.warning(f"Failed to load
|
|
47
|
-
return
|
|
48
|
-
|
|
49
|
-
def get_managers(self, project_path: Path) -> Tuple[SessionManager, ApoptosisManager]:
|
|
50
|
-
key = str(project_path)
|
|
51
|
-
if key not in self.session_managers:
|
|
52
|
-
sm = SessionManager(project_root=project_path)
|
|
53
|
-
self.session_managers[key] = sm
|
|
54
|
-
self.apoptosis_managers[key] = ApoptosisManager(sm)
|
|
55
|
-
return self.session_managers[key], self.apoptosis_managers[key]
|
|
56
|
-
|
|
102
|
+
logger.warning(f"Failed to load scheduler config: {e}. Using defaults.")
|
|
103
|
+
return config
|
|
104
|
+
|
|
57
105
|
async def start(self):
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
106
|
+
"""Start the scheduler service."""
|
|
107
|
+
logger.info("Starting Scheduler Service (unified event-driven architecture)...")
|
|
108
|
+
self._running = True
|
|
109
|
+
|
|
110
|
+
# 1. Start EventBus
|
|
111
|
+
await event_bus.start()
|
|
112
|
+
|
|
113
|
+
# 2. Start AgentScheduler
|
|
114
|
+
await self.agent_scheduler.start()
|
|
115
|
+
|
|
116
|
+
# 3. Setup and start Watchers
|
|
117
|
+
self._setup_watchers()
|
|
118
|
+
for watcher in self.watchers:
|
|
119
|
+
await watcher.start()
|
|
120
|
+
|
|
121
|
+
# 4. Start Handlers (FEAT-0162)
|
|
122
|
+
self.handlers = start_all_handlers(self.agent_scheduler)
|
|
123
|
+
|
|
124
|
+
logger.info("Scheduler Service started with unified event-driven architecture")
|
|
125
|
+
|
|
61
126
|
def stop(self):
|
|
127
|
+
"""Stop the scheduler service."""
|
|
62
128
|
logger.info("Stopping Scheduler Service...")
|
|
63
|
-
|
|
64
|
-
self._monitoring_task.cancel()
|
|
65
|
-
|
|
66
|
-
# Terminate all sessions
|
|
67
|
-
for sm in self.session_managers.values():
|
|
68
|
-
filtered_sessions = sm.list_sessions()
|
|
69
|
-
for session in filtered_sessions:
|
|
70
|
-
session.terminate()
|
|
71
|
-
|
|
72
|
-
async def monitor_loop(self):
|
|
73
|
-
try:
|
|
74
|
-
while True:
|
|
75
|
-
await self.tick()
|
|
76
|
-
await asyncio.sleep(5)
|
|
77
|
-
except asyncio.CancelledError:
|
|
78
|
-
pass
|
|
79
|
-
except Exception as e:
|
|
80
|
-
logger.error(f"Scheduler loop crashed: {e}", exc_info=True)
|
|
81
|
-
|
|
82
|
-
async def tick(self):
|
|
83
|
-
# We iterate over keys to avoid modification during iteration issues if new projects added
|
|
84
|
-
projects = list(self.project_manager.projects.values())
|
|
85
|
-
for project_ctx in projects:
|
|
86
|
-
await self.process_project(project_ctx)
|
|
87
|
-
|
|
88
|
-
async def process_project(self, project_context):
|
|
89
|
-
sm, am = self.get_managers(project_context.path)
|
|
129
|
+
self._running = False
|
|
90
130
|
|
|
91
|
-
#
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
# 1.5 Handover Trigger: Architect -> Engineer
|
|
95
|
-
await self.check_handover_trigger(sm, project_context)
|
|
96
|
-
|
|
97
|
-
# 2. Monitor Active Sessions (Supervisor)
|
|
98
|
-
active_sessions = sm.list_sessions()
|
|
99
|
-
for session in active_sessions:
|
|
100
|
-
if session.model.status in ["running", "pending"]:
|
|
101
|
-
status = session.refresh_status() # Updates model.status
|
|
102
|
-
|
|
103
|
-
# Check for timeout/failure
|
|
104
|
-
if status == "timeout" or status == "failed":
|
|
105
|
-
if session.model.status != "crashed":
|
|
106
|
-
logger.warning(f"Session {session.model.id} led to {status}. Triggering Autopsy.")
|
|
107
|
-
# Record failure for cooldown
|
|
108
|
-
self.semaphore_manager.record_failure(
|
|
109
|
-
issue_id=session.model.issue_id,
|
|
110
|
-
session_id=session.model.id
|
|
111
|
-
)
|
|
112
|
-
am.trigger_apoptosis(session.model.id, failure_reason=f"Session status became {status}")
|
|
113
|
-
else:
|
|
114
|
-
# Track active session in semaphore manager
|
|
115
|
-
self.semaphore_manager.acquire(session.model.id, session.model.role_name)
|
|
116
|
-
|
|
117
|
-
# Daemon Logic for Chained Execution
|
|
118
|
-
if status == "completed":
|
|
119
|
-
# Clear failure record on success
|
|
120
|
-
self.semaphore_manager.clear_failure(session.model.issue_id)
|
|
121
|
-
self.handle_completion(session, sm)
|
|
122
|
-
|
|
123
|
-
async def check_inbox_trigger(self, sm: SessionManager, project_context):
|
|
124
|
-
# Checking existing Architect sessions
|
|
125
|
-
existing_architects = [s for s in sm.list_sessions() if s.model.role_name == "Architect" and s.model.status == "running"]
|
|
126
|
-
|
|
127
|
-
if not existing_architects:
|
|
128
|
-
# Check semaphore before spawning
|
|
129
|
-
if not self.semaphore_manager.can_acquire("Architect"):
|
|
130
|
-
logger.warning("Cannot spawn Architect: concurrency limit reached")
|
|
131
|
-
return
|
|
132
|
-
|
|
133
|
-
trigger_policy = MemoAccumulationPolicy(count_threshold=5)
|
|
134
|
-
if trigger_policy.evaluate({"issues_root": project_context.issues_root}):
|
|
135
|
-
logger.info(f"Triggering Architect for project {project_context.id}")
|
|
136
|
-
self.spawn_architect(sm, project_context)
|
|
137
|
-
|
|
138
|
-
async def check_handover_trigger(self, sm: SessionManager, project_context):
|
|
139
|
-
# Scan for OPEN + DOING issues with no active worker
|
|
140
|
-
try:
|
|
141
|
-
all_issues = list_issues(project_context.issues_root)
|
|
142
|
-
handover_policy = HandoverPolicy(target_status="open", target_stage="doing")
|
|
143
|
-
|
|
144
|
-
for issue in all_issues:
|
|
145
|
-
if handover_policy.evaluate({"issue": issue}):
|
|
146
|
-
# Check if session exists
|
|
147
|
-
active = [s for s in sm.list_sessions(issue_id=issue.id) if s.model.status in ["running", "pending"]]
|
|
148
|
-
if not active:
|
|
149
|
-
# Check semaphore before spawning (including cooldown check)
|
|
150
|
-
if not self.semaphore_manager.can_acquire("Engineer", issue_id=issue.id):
|
|
151
|
-
logger.warning(f"Cannot spawn Engineer for {issue.id}: concurrency limit or cooldown active")
|
|
152
|
-
continue
|
|
153
|
-
|
|
154
|
-
logger.info(f"Handover trigger: Spawning Engineer for {issue.id}")
|
|
155
|
-
self.spawn_engineer(sm, issue)
|
|
156
|
-
except Exception as e:
|
|
157
|
-
logger.error(f"Error in Handover trigger: {e}")
|
|
158
|
-
|
|
159
|
-
def spawn_engineer(self, sm: SessionManager, issue):
|
|
160
|
-
role = RoleTemplate(
|
|
161
|
-
name="Engineer",
|
|
162
|
-
description="Software Engineer",
|
|
163
|
-
trigger="handover",
|
|
164
|
-
goal=f"Implement feature: {issue.title}",
|
|
165
|
-
system_prompt="You are a Software Engineer. Read the issue and implement requirements.",
|
|
166
|
-
engine="gemini"
|
|
167
|
-
)
|
|
168
|
-
session = sm.create_session(issue_id=issue.id, role=role)
|
|
131
|
+
# Cancel background tasks
|
|
132
|
+
for task in self._tasks:
|
|
133
|
+
task.cancel()
|
|
169
134
|
|
|
170
|
-
#
|
|
171
|
-
self.
|
|
135
|
+
# Stop Handlers
|
|
136
|
+
stop_all_handlers(self.handlers)
|
|
137
|
+
self.handlers = []
|
|
172
138
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
self.semaphore_manager.release(session.model.id)
|
|
178
|
-
self.semaphore_manager.record_failure(issue.id, session.model.id)
|
|
179
|
-
logger.error(f"Failed to start Engineer session for {issue.id}: {e}")
|
|
180
|
-
raise
|
|
181
|
-
|
|
182
|
-
def spawn_architect(self, sm: SessionManager, project_context):
|
|
183
|
-
# Create Architect Session
|
|
184
|
-
role = RoleTemplate(
|
|
185
|
-
name="Architect",
|
|
186
|
-
description="System Architect",
|
|
187
|
-
trigger="memo.accumulation",
|
|
188
|
-
goal="Process memo inbox and create issues.",
|
|
189
|
-
system_prompt="You are the Architect. Process the Memo inbox.",
|
|
190
|
-
engine="gemini" # Default or from config?
|
|
191
|
-
)
|
|
192
|
-
session = sm.create_session(issue_id="architecture-review", role=role)
|
|
139
|
+
# Stop Watchers
|
|
140
|
+
for watcher in self.watchers:
|
|
141
|
+
asyncio.create_task(watcher.stop())
|
|
142
|
+
self.watchers = []
|
|
193
143
|
|
|
194
|
-
#
|
|
195
|
-
self.
|
|
144
|
+
# Stop AgentScheduler
|
|
145
|
+
asyncio.create_task(self.agent_scheduler.stop())
|
|
196
146
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
logger.warning(
|
|
211
|
-
f"Cannot spawn Reviewer for {session.model.issue_id}: "
|
|
212
|
-
f"concurrency limit reached. Review will be deferred."
|
|
213
|
-
)
|
|
214
|
-
return
|
|
215
|
-
|
|
216
|
-
logger.info(f"Engineer finished for {session.model.issue_id}. Spawning Reviewer.")
|
|
217
|
-
reviewer_role = RoleTemplate(
|
|
218
|
-
name="Reviewer",
|
|
219
|
-
description="Code Reviewer",
|
|
220
|
-
trigger="engineer.completion",
|
|
221
|
-
goal=f"Review work on {session.model.issue_id}",
|
|
222
|
-
system_prompt="You are a Code Reviewer. Review the code changes.",
|
|
223
|
-
engine="gemini"
|
|
147
|
+
# Stop EventBus
|
|
148
|
+
asyncio.create_task(event_bus.stop())
|
|
149
|
+
|
|
150
|
+
logger.info("Scheduler Service stopped")
|
|
151
|
+
|
|
152
|
+
def _setup_watchers(self):
|
|
153
|
+
"""Initialize all filesystem watchers."""
|
|
154
|
+
for project_ctx in self.project_manager.projects.values():
|
|
155
|
+
# IssueWatcher
|
|
156
|
+
config = WatchConfig(
|
|
157
|
+
path=project_ctx.issues_root,
|
|
158
|
+
patterns=["*.md"],
|
|
159
|
+
recursive=True,
|
|
224
160
|
)
|
|
225
|
-
|
|
161
|
+
self.watchers.append(IssueWatcher(config, event_bus))
|
|
226
162
|
|
|
227
|
-
#
|
|
228
|
-
|
|
163
|
+
# MemoWatcher
|
|
164
|
+
memo_path = project_ctx.path / "Memos" / "inbox.md"
|
|
165
|
+
if memo_path.exists():
|
|
166
|
+
memo_config = WatchConfig(
|
|
167
|
+
path=memo_path,
|
|
168
|
+
patterns=["*.md"],
|
|
169
|
+
)
|
|
170
|
+
self.watchers.append(MemoWatcher(memo_config, event_bus))
|
|
229
171
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
172
|
+
# TaskWatcher (if tasks.md exists)
|
|
173
|
+
task_path = project_ctx.path / "tasks.md"
|
|
174
|
+
if task_path.exists():
|
|
175
|
+
task_config = WatchConfig(
|
|
176
|
+
path=task_path,
|
|
177
|
+
patterns=["*.md"],
|
|
178
|
+
)
|
|
179
|
+
self.watchers.append(TaskWatcher(task_config, event_bus))
|
|
180
|
+
|
|
181
|
+
logger.info(f"Setup {len(self.watchers)} watchers")
|
|
182
|
+
|
|
183
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
184
|
+
"""Get scheduler service statistics."""
|
|
185
|
+
return {
|
|
186
|
+
"running": self._running,
|
|
187
|
+
"event_bus": event_bus.get_stats(),
|
|
188
|
+
"agent_scheduler": self.agent_scheduler.get_stats(),
|
|
189
|
+
"watchers": len(self.watchers),
|
|
190
|
+
"handlers": len(self.handlers),
|
|
191
|
+
"projects": len(self.project_manager.projects),
|
|
192
|
+
}
|