monoco-toolkit 0.3.11__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/automation/__init__.py +40 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +805 -0
- monoco/core/config.py +29 -11
- monoco/core/daemon/__init__.py +5 -0
- monoco/core/daemon/pid.py +290 -0
- monoco/core/git.py +15 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/injection.py +86 -8
- monoco/core/integrations.py +0 -24
- monoco/core/router/__init__.py +17 -0
- monoco/core/router/action.py +202 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +197 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/setup.py +9 -0
- monoco/core/sync.py +199 -4
- monoco/core/watcher/__init__.py +63 -0
- monoco/core/watcher/base.py +382 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/im.py +460 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +192 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +3 -60
- monoco/daemon/commands.py +459 -25
- monoco/daemon/events.py +34 -0
- monoco/daemon/scheduler.py +157 -201
- monoco/daemon/services.py +42 -243
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/features/agent/resources/en/AGENTS.md +14 -14
- monoco/features/agent/resources/en/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/en/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/en/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/en/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/resources/zh/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/zh/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/zh/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/zh/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/worker.py +1 -1
- monoco/features/hooks/__init__.py +61 -6
- monoco/features/hooks/commands.py +281 -271
- monoco/features/hooks/dispatchers/__init__.py +23 -0
- monoco/features/hooks/dispatchers/agent_dispatcher.py +486 -0
- monoco/features/hooks/dispatchers/git_dispatcher.py +478 -0
- monoco/features/hooks/manager.py +357 -0
- monoco/features/hooks/models.py +262 -0
- monoco/features/hooks/parser.py +322 -0
- monoco/features/hooks/universal_interceptor.py +503 -0
- monoco/features/im/__init__.py +67 -0
- monoco/features/im/core.py +782 -0
- monoco/features/im/models.py +311 -0
- monoco/features/issue/commands.py +133 -60
- monoco/features/issue/core.py +385 -40
- monoco/features/issue/domain_commands.py +0 -19
- monoco/features/issue/resources/en/AGENTS.md +17 -122
- monoco/features/issue/resources/hooks/agent/before-tool.sh +102 -0
- monoco/features/issue/resources/hooks/agent/session-start.sh +88 -0
- monoco/features/issue/resources/hooks/{post-checkout.sh → git/git-post-checkout.sh} +10 -9
- monoco/features/issue/resources/hooks/git/git-pre-commit.sh +31 -0
- monoco/features/issue/resources/hooks/{pre-push.sh → git/git-pre-push.sh} +7 -13
- monoco/features/issue/resources/zh/AGENTS.md +18 -123
- monoco/features/memo/cli.py +15 -64
- monoco/features/memo/core.py +6 -34
- monoco/features/memo/models.py +24 -15
- monoco/features/memo/resources/en/AGENTS.md +31 -0
- monoco/features/memo/resources/zh/AGENTS.md +28 -5
- monoco/features/spike/commands.py +5 -3
- monoco/main.py +5 -3
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/METADATA +1 -1
- monoco_toolkit-0.4.0.dist-info/RECORD +170 -0
- monoco/core/execution.py +0 -67
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -127
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +0 -61
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +0 -73
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +0 -55
- monoco/features/agent/resources/atoms/atom-review.yaml +0 -60
- monoco/features/agent/resources/en/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +0 -93
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +0 -85
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -114
- monoco/features/agent/resources/workflows/workflow-dev.yaml +0 -83
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +0 -72
- monoco/features/agent/resources/workflows/workflow-review.yaml +0 -94
- monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +0 -49
- monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +0 -47
- monoco/features/agent/resources/zh/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_manager/SKILL.md +0 -88
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +0 -259
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -137
- monoco/features/agent/session.py +0 -169
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +0 -278
- monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/hooks/adapter.py +0 -67
- monoco/features/hooks/core.py +0 -441
- monoco/features/i18n/resources/en/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/i18n/resources/zh/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/zh/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/issue/resources/en/skills/monoco_atom_issue/SKILL.md +0 -165
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/issue/resources/hooks/pre-commit.sh +0 -41
- monoco/features/issue/resources/zh/skills/monoco_atom_issue_lifecycle/SKILL.md +0 -190
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/memo/resources/zh/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/zh/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/spike/resources/en/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco/features/spike/resources/zh/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/zh/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco_toolkit-0.3.11.dist-info/RECORD +0 -181
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,805 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Event Handlers - Stateless, Independent Microservices (FEAT-0162).
|
|
3
|
+
|
|
4
|
+
This module implements independent event handlers for Agent collaboration.
|
|
5
|
+
Each handler is a self-contained, stateless microservice that:
|
|
6
|
+
- Listens to specific event types
|
|
7
|
+
- Applies its own filtering logic
|
|
8
|
+
- Triggers appropriate agent actions
|
|
9
|
+
|
|
10
|
+
Architecture: No Workflow class or orchestration. Workflow emerges from
|
|
11
|
+
the natural interaction of independent handlers.
|
|
12
|
+
|
|
13
|
+
Handlers:
|
|
14
|
+
- TaskFileHandler: Monitors tasks.md changes -> triggers Architect
|
|
15
|
+
- IssueStageHandler: Monitors Issue stage=doing -> triggers Engineer
|
|
16
|
+
- MemoThresholdHandler: Monitors Memo accumulation -> triggers Architect
|
|
17
|
+
- PRCreatedHandler: Monitors PR creation -> triggers Reviewer
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import logging
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Dict, List, Optional, Set
|
|
25
|
+
|
|
26
|
+
from monoco.core.scheduler import (
|
|
27
|
+
AgentEvent,
|
|
28
|
+
AgentEventType,
|
|
29
|
+
AgentScheduler,
|
|
30
|
+
AgentTask,
|
|
31
|
+
event_bus,
|
|
32
|
+
)
|
|
33
|
+
from monoco.core.router import ActionResult
|
|
34
|
+
from monoco.features.memo.models import Memo
|
|
35
|
+
from monoco.features.memo.core import load_memos, get_inbox_path
|
|
36
|
+
|
|
37
|
+
logger = logging.getLogger(__name__)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# =============================================================================
|
|
41
|
+
# TaskFileHandler - Independent Microservice
|
|
42
|
+
# =============================================================================
|
|
43
|
+
|
|
44
|
+
class TaskFileHandler:
|
|
45
|
+
"""
|
|
46
|
+
Independent handler for task file changes.
|
|
47
|
+
|
|
48
|
+
Trigger: ISSUE_UPDATED event (from TaskWatcher)
|
|
49
|
+
Condition: New tasks added to tasks.md
|
|
50
|
+
Action: Spawn Architect agent to analyze and create Issue (stage=draft)
|
|
51
|
+
|
|
52
|
+
Emergent Workflow: tasks.md → Architect → Issue (draft)
|
|
53
|
+
|
|
54
|
+
This handler is stateless and self-contained. It directly subscribes
|
|
55
|
+
to the EventBus and manages its own lifecycle.
|
|
56
|
+
|
|
57
|
+
Example:
|
|
58
|
+
>>> handler = TaskFileHandler(scheduler)
|
|
59
|
+
>>> handler.start() # Subscribe to events
|
|
60
|
+
>>> # ... handler runs independently ...
|
|
61
|
+
>>> handler.stop() # Unsubscribe
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
def __init__(
|
|
65
|
+
self,
|
|
66
|
+
scheduler: AgentScheduler,
|
|
67
|
+
name: str = "TaskFileHandler",
|
|
68
|
+
):
|
|
69
|
+
self.scheduler = scheduler
|
|
70
|
+
self.name = name
|
|
71
|
+
self._subscribed = False
|
|
72
|
+
self._processed_tasks: Set[str] = set()
|
|
73
|
+
|
|
74
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
75
|
+
"""
|
|
76
|
+
Check if we should handle this event.
|
|
77
|
+
|
|
78
|
+
Conditions:
|
|
79
|
+
- Event is from TaskWatcher
|
|
80
|
+
- New tasks were added (not just status changes)
|
|
81
|
+
"""
|
|
82
|
+
source = event.payload.get("watcher_name", "")
|
|
83
|
+
if "Task" not in source:
|
|
84
|
+
return False
|
|
85
|
+
|
|
86
|
+
task_changes = event.payload.get("task_changes", [])
|
|
87
|
+
new_tasks = [c for c in task_changes if c.get("type") == "created"]
|
|
88
|
+
|
|
89
|
+
if not new_tasks:
|
|
90
|
+
logger.debug("No new tasks in event, skipping")
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
96
|
+
"""
|
|
97
|
+
Handle the event by spawning Architect agent.
|
|
98
|
+
|
|
99
|
+
The Architect will:
|
|
100
|
+
1. Read the tasks.md file
|
|
101
|
+
2. Analyze task requirements
|
|
102
|
+
3. Create Issue tickets (stage=draft)
|
|
103
|
+
"""
|
|
104
|
+
file_path = event.payload.get("path", "unknown")
|
|
105
|
+
task_changes = event.payload.get("task_changes", [])
|
|
106
|
+
new_tasks = [c for c in task_changes if c.get("type") == "created"]
|
|
107
|
+
|
|
108
|
+
logger.info(f"TaskFileHandler: Spawning Architect for {len(new_tasks)} new tasks")
|
|
109
|
+
|
|
110
|
+
task = AgentTask(
|
|
111
|
+
task_id=f"architect-task-{event.timestamp.timestamp()}",
|
|
112
|
+
role_name="Architect",
|
|
113
|
+
issue_id="task-analysis",
|
|
114
|
+
prompt=self._build_prompt(file_path, new_tasks),
|
|
115
|
+
engine="gemini",
|
|
116
|
+
timeout=600,
|
|
117
|
+
metadata={
|
|
118
|
+
"trigger": "task_file_changed",
|
|
119
|
+
"file_path": file_path,
|
|
120
|
+
"new_tasks": new_tasks,
|
|
121
|
+
},
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
session_id = await self.scheduler.schedule(task)
|
|
126
|
+
logger.info(f"Architect scheduled: session={session_id}")
|
|
127
|
+
|
|
128
|
+
return ActionResult.success_result(
|
|
129
|
+
output={
|
|
130
|
+
"session_id": session_id,
|
|
131
|
+
"role": "Architect",
|
|
132
|
+
"trigger": "task_file_changed",
|
|
133
|
+
"tasks_analyzed": len(new_tasks),
|
|
134
|
+
},
|
|
135
|
+
metadata={"file_path": file_path},
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
except Exception as e:
|
|
139
|
+
logger.error(f"Failed to spawn Architect: {e}")
|
|
140
|
+
return ActionResult.failure_result(
|
|
141
|
+
error=f"Failed to schedule Architect: {e}",
|
|
142
|
+
metadata={"file_path": file_path},
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
146
|
+
"""Make handler callable - used as EventBus callback."""
|
|
147
|
+
try:
|
|
148
|
+
if self._should_handle(event):
|
|
149
|
+
return await self._handle(event)
|
|
150
|
+
except Exception as e:
|
|
151
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
def start(self) -> None:
|
|
155
|
+
"""Subscribe this handler to the EventBus."""
|
|
156
|
+
if self._subscribed:
|
|
157
|
+
return
|
|
158
|
+
|
|
159
|
+
event_bus.subscribe(AgentEventType.ISSUE_UPDATED, self)
|
|
160
|
+
self._subscribed = True
|
|
161
|
+
logger.info(f"{self.name} started, subscribed to ISSUE_UPDATED")
|
|
162
|
+
|
|
163
|
+
def stop(self) -> None:
|
|
164
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
165
|
+
if not self._subscribed:
|
|
166
|
+
return
|
|
167
|
+
|
|
168
|
+
event_bus.unsubscribe(AgentEventType.ISSUE_UPDATED, self)
|
|
169
|
+
self._subscribed = False
|
|
170
|
+
logger.info(f"{self.name} stopped")
|
|
171
|
+
|
|
172
|
+
def _build_prompt(self, file_path: str, new_tasks: list) -> str:
|
|
173
|
+
"""Build the prompt for the Architect agent."""
|
|
174
|
+
tasks_text = "\n".join([
|
|
175
|
+
f"- {t.get('content', 'Unknown task')}"
|
|
176
|
+
for t in new_tasks
|
|
177
|
+
])
|
|
178
|
+
|
|
179
|
+
return f"""You are the Architect. New tasks have been added to {file_path}:
|
|
180
|
+
|
|
181
|
+
{tasks_text}
|
|
182
|
+
|
|
183
|
+
Your task:
|
|
184
|
+
1. Analyze these tasks for clarity and completeness
|
|
185
|
+
2. If they represent feature requests or bugs, create appropriate Issue tickets
|
|
186
|
+
3. Set the Issue stage to 'draft' for review
|
|
187
|
+
4. Use `monoco issue create` command to create issues
|
|
188
|
+
|
|
189
|
+
Focus on understanding the intent and creating well-structured issues."""
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
# =============================================================================
|
|
193
|
+
# IssueStageHandler - Independent Microservice
|
|
194
|
+
# =============================================================================
|
|
195
|
+
|
|
196
|
+
class IssueStageHandler:
|
|
197
|
+
"""
|
|
198
|
+
Independent handler for Issue stage changes.
|
|
199
|
+
|
|
200
|
+
Trigger: ISSUE_STAGE_CHANGED event
|
|
201
|
+
Condition: Stage changed to 'doing' AND status is 'open'
|
|
202
|
+
Action: Spawn Engineer agent to implement the Issue
|
|
203
|
+
|
|
204
|
+
Emergent Workflow: Issue (doing) → Engineer → PR
|
|
205
|
+
|
|
206
|
+
This handler is stateless and self-contained.
|
|
207
|
+
|
|
208
|
+
Example:
|
|
209
|
+
>>> handler = IssueStageHandler(scheduler)
|
|
210
|
+
>>> handler.start()
|
|
211
|
+
>>> # ... handler runs independently ...
|
|
212
|
+
>>> handler.stop()
|
|
213
|
+
"""
|
|
214
|
+
|
|
215
|
+
def __init__(
|
|
216
|
+
self,
|
|
217
|
+
scheduler: AgentScheduler,
|
|
218
|
+
name: str = "IssueStageHandler",
|
|
219
|
+
):
|
|
220
|
+
self.scheduler = scheduler
|
|
221
|
+
self.name = name
|
|
222
|
+
self._subscribed = False
|
|
223
|
+
self._processed_issues: Set[str] = set()
|
|
224
|
+
|
|
225
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
226
|
+
"""
|
|
227
|
+
Check if we should handle this stage change.
|
|
228
|
+
|
|
229
|
+
Conditions:
|
|
230
|
+
- New stage is 'doing'
|
|
231
|
+
- Issue status is 'open'
|
|
232
|
+
- Not already processed
|
|
233
|
+
"""
|
|
234
|
+
new_stage = event.payload.get("new_stage")
|
|
235
|
+
issue_status = event.payload.get("issue_status")
|
|
236
|
+
issue_id = event.payload.get("issue_id")
|
|
237
|
+
|
|
238
|
+
if new_stage != "doing":
|
|
239
|
+
logger.debug(f"Stage is '{new_stage}', not 'doing', skipping")
|
|
240
|
+
return False
|
|
241
|
+
|
|
242
|
+
if issue_status != "open":
|
|
243
|
+
logger.debug(f"Issue status is '{issue_status}', not 'open', skipping")
|
|
244
|
+
return False
|
|
245
|
+
|
|
246
|
+
if issue_id in self._processed_issues:
|
|
247
|
+
logger.debug(f"Issue {issue_id} already processed, skipping")
|
|
248
|
+
return False
|
|
249
|
+
|
|
250
|
+
return True
|
|
251
|
+
|
|
252
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
253
|
+
"""
|
|
254
|
+
Handle the event by spawning Engineer agent.
|
|
255
|
+
|
|
256
|
+
The Engineer will:
|
|
257
|
+
1. Read the Issue file
|
|
258
|
+
2. Understand requirements
|
|
259
|
+
3. Implement the feature/fix
|
|
260
|
+
4. Create a PR when done
|
|
261
|
+
"""
|
|
262
|
+
issue_id = event.payload.get("issue_id", "unknown")
|
|
263
|
+
issue_title = event.payload.get("issue_title", "Unknown")
|
|
264
|
+
file_path = event.payload.get("path", "")
|
|
265
|
+
|
|
266
|
+
logger.info(f"IssueStageHandler: Spawning Engineer for {issue_id}")
|
|
267
|
+
|
|
268
|
+
self._processed_issues.add(issue_id)
|
|
269
|
+
|
|
270
|
+
task = AgentTask(
|
|
271
|
+
task_id=f"engineer-{issue_id}-{event.timestamp.timestamp()}",
|
|
272
|
+
role_name="Engineer",
|
|
273
|
+
issue_id=issue_id,
|
|
274
|
+
prompt=self._build_prompt(issue_id, issue_title, file_path),
|
|
275
|
+
engine="gemini",
|
|
276
|
+
timeout=1800,
|
|
277
|
+
metadata={
|
|
278
|
+
"trigger": "issue_stage_doing",
|
|
279
|
+
"issue_id": issue_id,
|
|
280
|
+
"issue_title": issue_title,
|
|
281
|
+
"file_path": file_path,
|
|
282
|
+
},
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
try:
|
|
286
|
+
session_id = await self.scheduler.schedule(task)
|
|
287
|
+
logger.info(f"Engineer scheduled: session={session_id}")
|
|
288
|
+
|
|
289
|
+
return ActionResult.success_result(
|
|
290
|
+
output={
|
|
291
|
+
"session_id": session_id,
|
|
292
|
+
"role": "Engineer",
|
|
293
|
+
"trigger": "issue_stage_doing",
|
|
294
|
+
"issue_id": issue_id,
|
|
295
|
+
},
|
|
296
|
+
metadata={"issue_id": issue_id},
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
except Exception as e:
|
|
300
|
+
logger.error(f"Failed to spawn Engineer for {issue_id}: {e}")
|
|
301
|
+
return ActionResult.failure_result(
|
|
302
|
+
error=f"Failed to schedule Engineer: {e}",
|
|
303
|
+
metadata={"issue_id": issue_id},
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
307
|
+
"""Make handler callable - used as EventBus callback."""
|
|
308
|
+
try:
|
|
309
|
+
if self._should_handle(event):
|
|
310
|
+
return await self._handle(event)
|
|
311
|
+
except Exception as e:
|
|
312
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
313
|
+
return None
|
|
314
|
+
|
|
315
|
+
def start(self) -> None:
|
|
316
|
+
"""Subscribe this handler to the EventBus."""
|
|
317
|
+
if self._subscribed:
|
|
318
|
+
return
|
|
319
|
+
|
|
320
|
+
event_bus.subscribe(AgentEventType.ISSUE_STAGE_CHANGED, self)
|
|
321
|
+
self._subscribed = True
|
|
322
|
+
logger.info(f"{self.name} started, subscribed to ISSUE_STAGE_CHANGED")
|
|
323
|
+
|
|
324
|
+
def stop(self) -> None:
|
|
325
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
326
|
+
if not self._subscribed:
|
|
327
|
+
return
|
|
328
|
+
|
|
329
|
+
event_bus.unsubscribe(AgentEventType.ISSUE_STAGE_CHANGED, self)
|
|
330
|
+
self._subscribed = False
|
|
331
|
+
logger.info(f"{self.name} stopped")
|
|
332
|
+
|
|
333
|
+
def _build_prompt(self, issue_id: str, issue_title: str, file_path: str) -> str:
|
|
334
|
+
"""Build the prompt for the Engineer agent."""
|
|
335
|
+
return f"""You are a Software Engineer. You have been assigned to implement:
|
|
336
|
+
|
|
337
|
+
Issue: {issue_id} - {issue_title}
|
|
338
|
+
File: {file_path}
|
|
339
|
+
|
|
340
|
+
Your task:
|
|
341
|
+
1. Read and understand the Issue requirements
|
|
342
|
+
2. Follow the Git workflow:
|
|
343
|
+
- Use `monoco issue start {issue_id} --branch` to create feature branch
|
|
344
|
+
- Implement the requirements
|
|
345
|
+
- Run tests to ensure quality
|
|
346
|
+
- Use `monoco issue sync-files` to track changes
|
|
347
|
+
- Submit PR when done
|
|
348
|
+
3. Follow coding standards and best practices
|
|
349
|
+
4. Ensure all tests pass
|
|
350
|
+
|
|
351
|
+
Start by reading the Issue file to understand the full requirements."""
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
# =============================================================================
|
|
355
|
+
# MemoThresholdHandler - Independent Microservice
|
|
356
|
+
# =============================================================================
|
|
357
|
+
|
|
358
|
+
class MemoThresholdHandler:
|
|
359
|
+
"""
|
|
360
|
+
Independent handler for Memo threshold events.
|
|
361
|
+
|
|
362
|
+
Trigger: MEMO_THRESHOLD event
|
|
363
|
+
Condition: Pending memo count exceeds threshold
|
|
364
|
+
Action: Spawn Architect agent to analyze and create Issues
|
|
365
|
+
|
|
366
|
+
Signal Queue Model (FEAT-0165):
|
|
367
|
+
- Memos are signals, not assets
|
|
368
|
+
- File existence = signal pending
|
|
369
|
+
- File cleared = signal consumed
|
|
370
|
+
- Git is the archive, not app state
|
|
371
|
+
|
|
372
|
+
Emergent Workflow: Memos (threshold) → Architect → Issues
|
|
373
|
+
|
|
374
|
+
This handler is stateless and self-contained.
|
|
375
|
+
|
|
376
|
+
Example:
|
|
377
|
+
>>> handler = MemoThresholdHandler(scheduler, threshold=5)
|
|
378
|
+
>>> handler.start()
|
|
379
|
+
>>> # ... handler runs independently ...
|
|
380
|
+
>>> handler.stop()
|
|
381
|
+
"""
|
|
382
|
+
|
|
383
|
+
DEFAULT_THRESHOLD = 5
|
|
384
|
+
|
|
385
|
+
def __init__(
|
|
386
|
+
self,
|
|
387
|
+
scheduler: AgentScheduler,
|
|
388
|
+
threshold: int = DEFAULT_THRESHOLD,
|
|
389
|
+
name: str = "MemoThresholdHandler",
|
|
390
|
+
):
|
|
391
|
+
self.scheduler = scheduler
|
|
392
|
+
self.name = name
|
|
393
|
+
self.threshold = threshold
|
|
394
|
+
self._subscribed = False
|
|
395
|
+
|
|
396
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
397
|
+
"""
|
|
398
|
+
Check if we should handle this memo threshold event.
|
|
399
|
+
|
|
400
|
+
Conditions:
|
|
401
|
+
- Event is MEMO_THRESHOLD
|
|
402
|
+
- Threshold was just crossed (not already above)
|
|
403
|
+
"""
|
|
404
|
+
pending_count = event.payload.get("pending_count", 0)
|
|
405
|
+
|
|
406
|
+
if pending_count < self.threshold:
|
|
407
|
+
logger.debug(f"Pending count {pending_count} below threshold {self.threshold}")
|
|
408
|
+
return False
|
|
409
|
+
|
|
410
|
+
return True
|
|
411
|
+
|
|
412
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
413
|
+
"""
|
|
414
|
+
Handle the event by spawning Architect agent.
|
|
415
|
+
|
|
416
|
+
Signal Queue Semantics:
|
|
417
|
+
1. Atomically load and clear inbox BEFORE scheduling
|
|
418
|
+
2. Memos are embedded in prompt, not read from file
|
|
419
|
+
3. File cleared = consumed, no state needed
|
|
420
|
+
|
|
421
|
+
This ensures:
|
|
422
|
+
- Natural idempotency (deleted memos won't be reprocessed)
|
|
423
|
+
- No dependency on memory state across restarts
|
|
424
|
+
- Architect always has data even if file is cleared
|
|
425
|
+
"""
|
|
426
|
+
file_path_str = event.payload.get("path", "Memos/inbox.md")
|
|
427
|
+
file_path = Path(file_path_str)
|
|
428
|
+
pending_count = event.payload.get("pending_count", 0)
|
|
429
|
+
|
|
430
|
+
logger.info(f"MemoThresholdHandler: Processing {pending_count} memos")
|
|
431
|
+
|
|
432
|
+
# Phase 1: Atomically load and clear inbox
|
|
433
|
+
try:
|
|
434
|
+
# Load memos before clearing
|
|
435
|
+
memos = self._load_and_clear_memos(file_path)
|
|
436
|
+
if not memos:
|
|
437
|
+
logger.warning("Inbox was empty after locking, skipping")
|
|
438
|
+
return None
|
|
439
|
+
except Exception as e:
|
|
440
|
+
logger.error(f"Failed to load and clear inbox: {e}")
|
|
441
|
+
return ActionResult.failure_result(
|
|
442
|
+
error=f"Failed to consume memos: {e}",
|
|
443
|
+
metadata={"file_path": file_path_str},
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
# Phase 2: Schedule Architect with embedded memos
|
|
447
|
+
task = AgentTask(
|
|
448
|
+
task_id=f"architect-memo-{event.timestamp.timestamp()}",
|
|
449
|
+
role_name="Architect",
|
|
450
|
+
issue_id="memo-analysis",
|
|
451
|
+
prompt=self._build_prompt(file_path_str, memos),
|
|
452
|
+
engine="gemini",
|
|
453
|
+
timeout=900,
|
|
454
|
+
metadata={
|
|
455
|
+
"trigger": "memo_threshold",
|
|
456
|
+
"file_path": file_path_str,
|
|
457
|
+
"pending_count": pending_count,
|
|
458
|
+
"threshold": self.threshold,
|
|
459
|
+
"memo_count": len(memos),
|
|
460
|
+
},
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
try:
|
|
464
|
+
session_id = await self.scheduler.schedule(task)
|
|
465
|
+
logger.info(f"Architect scheduled: session={session_id} with {len(memos)} memos")
|
|
466
|
+
|
|
467
|
+
return ActionResult.success_result(
|
|
468
|
+
output={
|
|
469
|
+
"session_id": session_id,
|
|
470
|
+
"role": "Architect",
|
|
471
|
+
"trigger": "memo_threshold",
|
|
472
|
+
"memo_count": len(memos),
|
|
473
|
+
},
|
|
474
|
+
metadata={"file_path": file_path_str},
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
except Exception as e:
|
|
478
|
+
logger.error(f"Failed to spawn Architect: {e}")
|
|
479
|
+
# Note: At this point memos are already cleared from inbox
|
|
480
|
+
# This is intentional - we trade "at-least-once" for "at-most-once" semantics
|
|
481
|
+
# If Architect fails, the memos are in git history
|
|
482
|
+
return ActionResult.failure_result(
|
|
483
|
+
error=f"Failed to schedule Architect: {e}",
|
|
484
|
+
metadata={"file_path": file_path_str, "memos_consumed": len(memos)},
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
def _load_and_clear_memos(self, inbox_path: Path) -> List[Memo]:
|
|
488
|
+
"""
|
|
489
|
+
Atomically load all memos and clear the inbox file.
|
|
490
|
+
|
|
491
|
+
This implements the "consume" operation in signal queue model.
|
|
492
|
+
File existence is the state - clearing the file marks all signals consumed.
|
|
493
|
+
"""
|
|
494
|
+
# Resolve path relative to project root if needed
|
|
495
|
+
if not inbox_path.is_absolute():
|
|
496
|
+
from monoco.core.config import find_monoco_root
|
|
497
|
+
project_root = find_monoco_root()
|
|
498
|
+
inbox_path = project_root / inbox_path
|
|
499
|
+
|
|
500
|
+
if not inbox_path.exists():
|
|
501
|
+
return []
|
|
502
|
+
|
|
503
|
+
# Load memos directly from inbox path
|
|
504
|
+
# inbox_path is Memos/inbox.md, issues_root is sibling: Issues/
|
|
505
|
+
issues_root = inbox_path.parent.parent / "Issues"
|
|
506
|
+
memos = load_memos(issues_root)
|
|
507
|
+
|
|
508
|
+
# Clear inbox (atomic write)
|
|
509
|
+
inbox_path.write_text("# Monoco Memos Inbox\n\n", encoding="utf-8")
|
|
510
|
+
logger.info(f"Inbox cleared after consuming {len(memos)} memos")
|
|
511
|
+
|
|
512
|
+
return memos
|
|
513
|
+
|
|
514
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
515
|
+
"""Make handler callable - used as EventBus callback."""
|
|
516
|
+
try:
|
|
517
|
+
if self._should_handle(event):
|
|
518
|
+
return await self._handle(event)
|
|
519
|
+
except Exception as e:
|
|
520
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
521
|
+
return None
|
|
522
|
+
|
|
523
|
+
def start(self) -> None:
|
|
524
|
+
"""Subscribe this handler to the EventBus."""
|
|
525
|
+
if self._subscribed:
|
|
526
|
+
return
|
|
527
|
+
|
|
528
|
+
event_bus.subscribe(AgentEventType.MEMO_THRESHOLD, self)
|
|
529
|
+
self._subscribed = True
|
|
530
|
+
logger.info(f"{self.name} started, subscribed to MEMO_THRESHOLD")
|
|
531
|
+
|
|
532
|
+
def stop(self) -> None:
|
|
533
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
534
|
+
if not self._subscribed:
|
|
535
|
+
return
|
|
536
|
+
|
|
537
|
+
event_bus.unsubscribe(AgentEventType.MEMO_THRESHOLD, self)
|
|
538
|
+
self._subscribed = False
|
|
539
|
+
logger.info(f"{self.name} stopped")
|
|
540
|
+
|
|
541
|
+
def _build_prompt(self, file_path: str, memos: List[Memo]) -> str:
|
|
542
|
+
"""Build the prompt for the Architect agent with embedded memos."""
|
|
543
|
+
# Format memos for prompt
|
|
544
|
+
memo_sections = []
|
|
545
|
+
for i, memo in enumerate(memos, 1):
|
|
546
|
+
section = f"""### Memo {i} (ID: {memo.uid})
|
|
547
|
+
- **Time**: {memo.timestamp.strftime("%Y-%m-%d %H:%M:%S")}
|
|
548
|
+
- **Type**: {memo.type}
|
|
549
|
+
- **Source**: {memo.source}
|
|
550
|
+
- **Author**: {memo.author}
|
|
551
|
+
{'' if not memo.context else f'- **Context**: `{memo.context}`'}
|
|
552
|
+
|
|
553
|
+
{memo.content}
|
|
554
|
+
"""
|
|
555
|
+
memo_sections.append(section)
|
|
556
|
+
|
|
557
|
+
memos_text = "\n".join(memo_sections)
|
|
558
|
+
|
|
559
|
+
return f"""You are the Architect. {len(memos)} memos have been consumed from {file_path}.
|
|
560
|
+
|
|
561
|
+
## Consumed Memos (Signal Queue Model)
|
|
562
|
+
|
|
563
|
+
The following memos have been atomically consumed from the inbox.
|
|
564
|
+
They are provided here for your analysis - do NOT read the inbox file as it has been cleared.
|
|
565
|
+
|
|
566
|
+
{memos_text}
|
|
567
|
+
|
|
568
|
+
## Your Task
|
|
569
|
+
|
|
570
|
+
1. Analyze the accumulated memos above
|
|
571
|
+
2. Categorize and prioritize the ideas
|
|
572
|
+
3. Create Issue tickets for actionable items:
|
|
573
|
+
- Use `monoco issue create` command
|
|
574
|
+
- Set appropriate type (feature, fix, chore)
|
|
575
|
+
- Set stage to 'draft' for review
|
|
576
|
+
4. Link related memos to created issues via `source_memo` field if applicable
|
|
577
|
+
|
|
578
|
+
## Signal Queue Semantics
|
|
579
|
+
|
|
580
|
+
- Memos are signals, not assets - they are consumed (deleted) upon processing
|
|
581
|
+
- No need to "resolve" or "link" memos - just create Issues from them
|
|
582
|
+
- Historical memos can be found in git history if needed
|
|
583
|
+
|
|
584
|
+
Focus on turning raw ideas into structured, actionable work items."""
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
# =============================================================================
|
|
588
|
+
# PRCreatedHandler - Independent Microservice
|
|
589
|
+
# =============================================================================
|
|
590
|
+
|
|
591
|
+
class PRCreatedHandler:
|
|
592
|
+
"""
|
|
593
|
+
Independent handler for PR creation events.
|
|
594
|
+
|
|
595
|
+
Trigger: PR_CREATED event
|
|
596
|
+
Condition: New PR created for an Issue
|
|
597
|
+
Action: Spawn Reviewer agent to review the PR
|
|
598
|
+
|
|
599
|
+
Emergent Workflow: PR → Reviewer → 审查报告
|
|
600
|
+
|
|
601
|
+
This handler is stateless and self-contained.
|
|
602
|
+
|
|
603
|
+
Example:
|
|
604
|
+
>>> handler = PRCreatedHandler(scheduler)
|
|
605
|
+
>>> handler.start()
|
|
606
|
+
>>> # ... handler runs independently ...
|
|
607
|
+
>>> handler.stop()
|
|
608
|
+
"""
|
|
609
|
+
|
|
610
|
+
def __init__(
|
|
611
|
+
self,
|
|
612
|
+
scheduler: AgentScheduler,
|
|
613
|
+
name: str = "PRCreatedHandler",
|
|
614
|
+
):
|
|
615
|
+
self.scheduler = scheduler
|
|
616
|
+
self.name = name
|
|
617
|
+
self._subscribed = False
|
|
618
|
+
self._processed_prs: Set[str] = set()
|
|
619
|
+
|
|
620
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
621
|
+
"""
|
|
622
|
+
Check if we should handle this PR creation event.
|
|
623
|
+
|
|
624
|
+
Conditions:
|
|
625
|
+
- Event is PR_CREATED
|
|
626
|
+
- Has valid PR URL or ID
|
|
627
|
+
- Not already processed
|
|
628
|
+
"""
|
|
629
|
+
pr_url = event.payload.get("pr_url", "")
|
|
630
|
+
pr_id = event.payload.get("pr_id", "")
|
|
631
|
+
|
|
632
|
+
pr_identifier = pr_id or pr_url
|
|
633
|
+
if not pr_identifier:
|
|
634
|
+
logger.debug("No PR identifier in event, skipping")
|
|
635
|
+
return False
|
|
636
|
+
|
|
637
|
+
if pr_identifier in self._processed_prs:
|
|
638
|
+
logger.debug(f"PR {pr_identifier} already processed, skipping")
|
|
639
|
+
return False
|
|
640
|
+
|
|
641
|
+
return True
|
|
642
|
+
|
|
643
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
644
|
+
"""
|
|
645
|
+
Handle the event by spawning Reviewer agent.
|
|
646
|
+
|
|
647
|
+
The Reviewer will:
|
|
648
|
+
1. Fetch the PR details
|
|
649
|
+
2. Review code changes
|
|
650
|
+
3. Generate a review report
|
|
651
|
+
4. Output findings to file/Memos
|
|
652
|
+
"""
|
|
653
|
+
pr_url = event.payload.get("pr_url", "")
|
|
654
|
+
pr_id = event.payload.get("pr_id", "")
|
|
655
|
+
issue_id = event.payload.get("issue_id", "")
|
|
656
|
+
branch = event.payload.get("branch", "")
|
|
657
|
+
|
|
658
|
+
pr_identifier = pr_id or pr_url or f"{issue_id}-pr"
|
|
659
|
+
|
|
660
|
+
logger.info(f"PRCreatedHandler: Spawning Reviewer for PR {pr_identifier}")
|
|
661
|
+
|
|
662
|
+
self._processed_prs.add(pr_identifier)
|
|
663
|
+
|
|
664
|
+
task = AgentTask(
|
|
665
|
+
task_id=f"reviewer-{pr_identifier}-{event.timestamp.timestamp()}",
|
|
666
|
+
role_name="Reviewer",
|
|
667
|
+
issue_id=issue_id or "review",
|
|
668
|
+
prompt=self._build_prompt(pr_url, pr_id, issue_id, branch),
|
|
669
|
+
engine="gemini",
|
|
670
|
+
timeout=900,
|
|
671
|
+
metadata={
|
|
672
|
+
"trigger": "pr_created",
|
|
673
|
+
"pr_url": pr_url,
|
|
674
|
+
"pr_id": pr_id,
|
|
675
|
+
"issue_id": issue_id,
|
|
676
|
+
"branch": branch,
|
|
677
|
+
},
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
try:
|
|
681
|
+
session_id = await self.scheduler.schedule(task)
|
|
682
|
+
logger.info(f"Reviewer scheduled: session={session_id}")
|
|
683
|
+
|
|
684
|
+
return ActionResult.success_result(
|
|
685
|
+
output={
|
|
686
|
+
"session_id": session_id,
|
|
687
|
+
"role": "Reviewer",
|
|
688
|
+
"trigger": "pr_created",
|
|
689
|
+
"pr_identifier": pr_identifier,
|
|
690
|
+
},
|
|
691
|
+
metadata={"pr_identifier": pr_identifier},
|
|
692
|
+
)
|
|
693
|
+
|
|
694
|
+
except Exception as e:
|
|
695
|
+
logger.error(f"Failed to spawn Reviewer: {e}")
|
|
696
|
+
return ActionResult.failure_result(
|
|
697
|
+
error=f"Failed to schedule Reviewer: {e}",
|
|
698
|
+
metadata={"pr_identifier": pr_identifier},
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
702
|
+
"""Make handler callable - used as EventBus callback."""
|
|
703
|
+
try:
|
|
704
|
+
if self._should_handle(event):
|
|
705
|
+
return await self._handle(event)
|
|
706
|
+
except Exception as e:
|
|
707
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
708
|
+
return None
|
|
709
|
+
|
|
710
|
+
def start(self) -> None:
|
|
711
|
+
"""Subscribe this handler to the EventBus."""
|
|
712
|
+
if self._subscribed:
|
|
713
|
+
return
|
|
714
|
+
|
|
715
|
+
event_bus.subscribe(AgentEventType.PR_CREATED, self)
|
|
716
|
+
self._subscribed = True
|
|
717
|
+
logger.info(f"{self.name} started, subscribed to PR_CREATED")
|
|
718
|
+
|
|
719
|
+
def stop(self) -> None:
|
|
720
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
721
|
+
if not self._subscribed:
|
|
722
|
+
return
|
|
723
|
+
|
|
724
|
+
event_bus.unsubscribe(AgentEventType.PR_CREATED, self)
|
|
725
|
+
self._subscribed = False
|
|
726
|
+
logger.info(f"{self.name} stopped")
|
|
727
|
+
|
|
728
|
+
def _build_prompt(
|
|
729
|
+
self,
|
|
730
|
+
pr_url: str,
|
|
731
|
+
pr_id: str,
|
|
732
|
+
issue_id: str,
|
|
733
|
+
branch: str,
|
|
734
|
+
) -> str:
|
|
735
|
+
"""Build the prompt for the Reviewer agent."""
|
|
736
|
+
pr_info = f"""
|
|
737
|
+
PR URL: {pr_url or 'N/A'}
|
|
738
|
+
PR ID: {pr_id or 'N/A'}
|
|
739
|
+
Issue: {issue_id or 'N/A'}
|
|
740
|
+
Branch: {branch or 'N/A'}
|
|
741
|
+
"""
|
|
742
|
+
|
|
743
|
+
return f"""You are a Code Reviewer. A new PR has been created:
|
|
744
|
+
|
|
745
|
+
{pr_info}
|
|
746
|
+
|
|
747
|
+
Your task:
|
|
748
|
+
1. Fetch and review the PR changes
|
|
749
|
+
2. Check against the original Issue requirements
|
|
750
|
+
3. Review for:
|
|
751
|
+
- Code quality and best practices
|
|
752
|
+
- Test coverage
|
|
753
|
+
- Documentation
|
|
754
|
+
- Security considerations
|
|
755
|
+
4. Generate a review report:
|
|
756
|
+
- Use `monoco memo add` to record findings
|
|
757
|
+
- Include specific file/line references
|
|
758
|
+
- Provide actionable feedback
|
|
759
|
+
|
|
760
|
+
Focus on thorough, constructive review that improves code quality."""
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
# =============================================================================
|
|
764
|
+
# Convenience Functions
|
|
765
|
+
# =============================================================================
|
|
766
|
+
|
|
767
|
+
def start_all_handlers(scheduler: AgentScheduler, memo_threshold: int = 5) -> list:
|
|
768
|
+
"""
|
|
769
|
+
Start all event handlers.
|
|
770
|
+
|
|
771
|
+
This is a convenience function - handlers remain independent
|
|
772
|
+
and do not form a Workflow or orchestration layer.
|
|
773
|
+
|
|
774
|
+
Args:
|
|
775
|
+
scheduler: The AgentScheduler for spawning agents
|
|
776
|
+
memo_threshold: Threshold for memo handler
|
|
777
|
+
|
|
778
|
+
Returns:
|
|
779
|
+
List of started handler instances
|
|
780
|
+
"""
|
|
781
|
+
handlers = [
|
|
782
|
+
TaskFileHandler(scheduler),
|
|
783
|
+
IssueStageHandler(scheduler),
|
|
784
|
+
MemoThresholdHandler(scheduler, threshold=memo_threshold),
|
|
785
|
+
PRCreatedHandler(scheduler),
|
|
786
|
+
]
|
|
787
|
+
|
|
788
|
+
for handler in handlers:
|
|
789
|
+
handler.start()
|
|
790
|
+
|
|
791
|
+
logger.info(f"Started {len(handlers)} independent handlers")
|
|
792
|
+
return handlers
|
|
793
|
+
|
|
794
|
+
|
|
795
|
+
def stop_all_handlers(handlers: list) -> None:
|
|
796
|
+
"""
|
|
797
|
+
Stop all event handlers.
|
|
798
|
+
|
|
799
|
+
Args:
|
|
800
|
+
handlers: List of handler instances to stop
|
|
801
|
+
"""
|
|
802
|
+
for handler in handlers:
|
|
803
|
+
handler.stop()
|
|
804
|
+
|
|
805
|
+
logger.info(f"Stopped {len(handlers)} handlers")
|