monoco-toolkit 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/automation/__init__.py +51 -0
- monoco/core/automation/config.py +338 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +723 -0
- monoco/core/config.py +31 -4
- monoco/core/executor/__init__.py +38 -0
- monoco/core/executor/agent_action.py +254 -0
- monoco/core/executor/git_action.py +303 -0
- monoco/core/executor/im_action.py +309 -0
- monoco/core/executor/pytest_action.py +218 -0
- monoco/core/git.py +38 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/loader.py +633 -0
- monoco/core/registry.py +34 -25
- monoco/core/router/__init__.py +55 -0
- monoco/core/router/action.py +341 -0
- monoco/core/router/router.py +392 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +171 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/skills.py +119 -80
- monoco/core/watcher/__init__.py +57 -0
- monoco/core/watcher/base.py +365 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +200 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/events.py +34 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +207 -0
- monoco/daemon/services.py +27 -58
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/adapter.py +17 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/{core/resources/en/skills/monoco_core → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/resources/{roles/role-engineer.yaml → zh/roles/monoco_role_engineer.yaml} +3 -3
- monoco/features/agent/resources/{roles/role-manager.yaml → zh/roles/monoco_role_manager.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-planner.yaml → zh/roles/monoco_role_planner.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-reviewer.yaml → zh/roles/monoco_role_reviewer.yaml} +8 -8
- monoco/{core/resources/zh/skills/monoco_core → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/worker.py +1 -1
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/adapter.py +18 -7
- monoco/features/glossary/resources/en/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/glossary/resources/zh/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/en/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +352 -20
- monoco/features/issue/core.py +475 -16
- monoco/features/issue/engine/machine.py +114 -4
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +2 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/skills/{monoco_issue → monoco_atom_issue}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/skills/{monoco_issue → monoco_atom_issue_lifecycle}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/en/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/commands.py +5 -3
- monoco/features/spike/resources/en/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/METADATA +7 -1
- monoco_toolkit-0.3.12.dist-info/RECORD +202 -0
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -91
- monoco/features/agent/session.py +0 -121
- monoco_toolkit-0.3.10.dist-info/RECORD +0 -156
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,723 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Event Handlers - Stateless, Independent Microservices (FEAT-0162).
|
|
3
|
+
|
|
4
|
+
This module implements independent event handlers for Agent collaboration.
|
|
5
|
+
Each handler is a self-contained, stateless microservice that:
|
|
6
|
+
- Listens to specific event types
|
|
7
|
+
- Applies its own filtering logic
|
|
8
|
+
- Triggers appropriate agent actions
|
|
9
|
+
|
|
10
|
+
Architecture: No Workflow class or orchestration. Workflow emerges from
|
|
11
|
+
the natural interaction of independent handlers.
|
|
12
|
+
|
|
13
|
+
Handlers:
|
|
14
|
+
- TaskFileHandler: Monitors tasks.md changes -> triggers Architect
|
|
15
|
+
- IssueStageHandler: Monitors Issue stage=doing -> triggers Engineer
|
|
16
|
+
- MemoThresholdHandler: Monitors Memo accumulation -> triggers Architect
|
|
17
|
+
- PRCreatedHandler: Monitors PR creation -> triggers Reviewer
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import logging
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Dict, Optional, Set
|
|
25
|
+
|
|
26
|
+
from monoco.core.scheduler import (
|
|
27
|
+
AgentEvent,
|
|
28
|
+
AgentEventType,
|
|
29
|
+
AgentScheduler,
|
|
30
|
+
AgentTask,
|
|
31
|
+
event_bus,
|
|
32
|
+
)
|
|
33
|
+
from monoco.core.router import ActionResult
|
|
34
|
+
|
|
35
|
+
logger = logging.getLogger(__name__)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# =============================================================================
|
|
39
|
+
# TaskFileHandler - Independent Microservice
|
|
40
|
+
# =============================================================================
|
|
41
|
+
|
|
42
|
+
class TaskFileHandler:
|
|
43
|
+
"""
|
|
44
|
+
Independent handler for task file changes.
|
|
45
|
+
|
|
46
|
+
Trigger: ISSUE_UPDATED event (from TaskWatcher)
|
|
47
|
+
Condition: New tasks added to tasks.md
|
|
48
|
+
Action: Spawn Architect agent to analyze and create Issue (stage=draft)
|
|
49
|
+
|
|
50
|
+
Emergent Workflow: tasks.md → Architect → Issue (draft)
|
|
51
|
+
|
|
52
|
+
This handler is stateless and self-contained. It directly subscribes
|
|
53
|
+
to the EventBus and manages its own lifecycle.
|
|
54
|
+
|
|
55
|
+
Example:
|
|
56
|
+
>>> handler = TaskFileHandler(scheduler)
|
|
57
|
+
>>> handler.start() # Subscribe to events
|
|
58
|
+
>>> # ... handler runs independently ...
|
|
59
|
+
>>> handler.stop() # Unsubscribe
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
def __init__(
|
|
63
|
+
self,
|
|
64
|
+
scheduler: AgentScheduler,
|
|
65
|
+
name: str = "TaskFileHandler",
|
|
66
|
+
):
|
|
67
|
+
self.scheduler = scheduler
|
|
68
|
+
self.name = name
|
|
69
|
+
self._subscribed = False
|
|
70
|
+
self._processed_tasks: Set[str] = set()
|
|
71
|
+
|
|
72
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
73
|
+
"""
|
|
74
|
+
Check if we should handle this event.
|
|
75
|
+
|
|
76
|
+
Conditions:
|
|
77
|
+
- Event is from TaskWatcher
|
|
78
|
+
- New tasks were added (not just status changes)
|
|
79
|
+
"""
|
|
80
|
+
source = event.payload.get("watcher_name", "")
|
|
81
|
+
if "Task" not in source:
|
|
82
|
+
return False
|
|
83
|
+
|
|
84
|
+
task_changes = event.payload.get("task_changes", [])
|
|
85
|
+
new_tasks = [c for c in task_changes if c.get("type") == "created"]
|
|
86
|
+
|
|
87
|
+
if not new_tasks:
|
|
88
|
+
logger.debug("No new tasks in event, skipping")
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
return True
|
|
92
|
+
|
|
93
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
94
|
+
"""
|
|
95
|
+
Handle the event by spawning Architect agent.
|
|
96
|
+
|
|
97
|
+
The Architect will:
|
|
98
|
+
1. Read the tasks.md file
|
|
99
|
+
2. Analyze task requirements
|
|
100
|
+
3. Create Issue tickets (stage=draft)
|
|
101
|
+
"""
|
|
102
|
+
file_path = event.payload.get("path", "unknown")
|
|
103
|
+
task_changes = event.payload.get("task_changes", [])
|
|
104
|
+
new_tasks = [c for c in task_changes if c.get("type") == "created"]
|
|
105
|
+
|
|
106
|
+
logger.info(f"TaskFileHandler: Spawning Architect for {len(new_tasks)} new tasks")
|
|
107
|
+
|
|
108
|
+
task = AgentTask(
|
|
109
|
+
task_id=f"architect-task-{event.timestamp.timestamp()}",
|
|
110
|
+
role_name="Architect",
|
|
111
|
+
issue_id="task-analysis",
|
|
112
|
+
prompt=self._build_prompt(file_path, new_tasks),
|
|
113
|
+
engine="gemini",
|
|
114
|
+
timeout=600,
|
|
115
|
+
metadata={
|
|
116
|
+
"trigger": "task_file_changed",
|
|
117
|
+
"file_path": file_path,
|
|
118
|
+
"new_tasks": new_tasks,
|
|
119
|
+
},
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
session_id = await self.scheduler.schedule(task)
|
|
124
|
+
logger.info(f"Architect scheduled: session={session_id}")
|
|
125
|
+
|
|
126
|
+
return ActionResult.success_result(
|
|
127
|
+
output={
|
|
128
|
+
"session_id": session_id,
|
|
129
|
+
"role": "Architect",
|
|
130
|
+
"trigger": "task_file_changed",
|
|
131
|
+
"tasks_analyzed": len(new_tasks),
|
|
132
|
+
},
|
|
133
|
+
metadata={"file_path": file_path},
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
except Exception as e:
|
|
137
|
+
logger.error(f"Failed to spawn Architect: {e}")
|
|
138
|
+
return ActionResult.failure_result(
|
|
139
|
+
error=f"Failed to schedule Architect: {e}",
|
|
140
|
+
metadata={"file_path": file_path},
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
144
|
+
"""Make handler callable - used as EventBus callback."""
|
|
145
|
+
try:
|
|
146
|
+
if self._should_handle(event):
|
|
147
|
+
return await self._handle(event)
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
def start(self) -> None:
|
|
153
|
+
"""Subscribe this handler to the EventBus."""
|
|
154
|
+
if self._subscribed:
|
|
155
|
+
return
|
|
156
|
+
|
|
157
|
+
event_bus.subscribe(AgentEventType.ISSUE_UPDATED, self)
|
|
158
|
+
self._subscribed = True
|
|
159
|
+
logger.info(f"{self.name} started, subscribed to ISSUE_UPDATED")
|
|
160
|
+
|
|
161
|
+
def stop(self) -> None:
|
|
162
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
163
|
+
if not self._subscribed:
|
|
164
|
+
return
|
|
165
|
+
|
|
166
|
+
event_bus.unsubscribe(AgentEventType.ISSUE_UPDATED, self)
|
|
167
|
+
self._subscribed = False
|
|
168
|
+
logger.info(f"{self.name} stopped")
|
|
169
|
+
|
|
170
|
+
def _build_prompt(self, file_path: str, new_tasks: list) -> str:
|
|
171
|
+
"""Build the prompt for the Architect agent."""
|
|
172
|
+
tasks_text = "\n".join([
|
|
173
|
+
f"- {t.get('content', 'Unknown task')}"
|
|
174
|
+
for t in new_tasks
|
|
175
|
+
])
|
|
176
|
+
|
|
177
|
+
return f"""You are the Architect. New tasks have been added to {file_path}:
|
|
178
|
+
|
|
179
|
+
{tasks_text}
|
|
180
|
+
|
|
181
|
+
Your task:
|
|
182
|
+
1. Analyze these tasks for clarity and completeness
|
|
183
|
+
2. If they represent feature requests or bugs, create appropriate Issue tickets
|
|
184
|
+
3. Set the Issue stage to 'draft' for review
|
|
185
|
+
4. Use `monoco issue create` command to create issues
|
|
186
|
+
|
|
187
|
+
Focus on understanding the intent and creating well-structured issues."""
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
# =============================================================================
|
|
191
|
+
# IssueStageHandler - Independent Microservice
|
|
192
|
+
# =============================================================================
|
|
193
|
+
|
|
194
|
+
class IssueStageHandler:
|
|
195
|
+
"""
|
|
196
|
+
Independent handler for Issue stage changes.
|
|
197
|
+
|
|
198
|
+
Trigger: ISSUE_STAGE_CHANGED event
|
|
199
|
+
Condition: Stage changed to 'doing' AND status is 'open'
|
|
200
|
+
Action: Spawn Engineer agent to implement the Issue
|
|
201
|
+
|
|
202
|
+
Emergent Workflow: Issue (doing) → Engineer → PR
|
|
203
|
+
|
|
204
|
+
This handler is stateless and self-contained.
|
|
205
|
+
|
|
206
|
+
Example:
|
|
207
|
+
>>> handler = IssueStageHandler(scheduler)
|
|
208
|
+
>>> handler.start()
|
|
209
|
+
>>> # ... handler runs independently ...
|
|
210
|
+
>>> handler.stop()
|
|
211
|
+
"""
|
|
212
|
+
|
|
213
|
+
def __init__(
|
|
214
|
+
self,
|
|
215
|
+
scheduler: AgentScheduler,
|
|
216
|
+
name: str = "IssueStageHandler",
|
|
217
|
+
):
|
|
218
|
+
self.scheduler = scheduler
|
|
219
|
+
self.name = name
|
|
220
|
+
self._subscribed = False
|
|
221
|
+
self._processed_issues: Set[str] = set()
|
|
222
|
+
|
|
223
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
224
|
+
"""
|
|
225
|
+
Check if we should handle this stage change.
|
|
226
|
+
|
|
227
|
+
Conditions:
|
|
228
|
+
- New stage is 'doing'
|
|
229
|
+
- Issue status is 'open'
|
|
230
|
+
- Not already processed
|
|
231
|
+
"""
|
|
232
|
+
new_stage = event.payload.get("new_stage")
|
|
233
|
+
issue_status = event.payload.get("issue_status")
|
|
234
|
+
issue_id = event.payload.get("issue_id")
|
|
235
|
+
|
|
236
|
+
if new_stage != "doing":
|
|
237
|
+
logger.debug(f"Stage is '{new_stage}', not 'doing', skipping")
|
|
238
|
+
return False
|
|
239
|
+
|
|
240
|
+
if issue_status != "open":
|
|
241
|
+
logger.debug(f"Issue status is '{issue_status}', not 'open', skipping")
|
|
242
|
+
return False
|
|
243
|
+
|
|
244
|
+
if issue_id in self._processed_issues:
|
|
245
|
+
logger.debug(f"Issue {issue_id} already processed, skipping")
|
|
246
|
+
return False
|
|
247
|
+
|
|
248
|
+
return True
|
|
249
|
+
|
|
250
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
251
|
+
"""
|
|
252
|
+
Handle the event by spawning Engineer agent.
|
|
253
|
+
|
|
254
|
+
The Engineer will:
|
|
255
|
+
1. Read the Issue file
|
|
256
|
+
2. Understand requirements
|
|
257
|
+
3. Implement the feature/fix
|
|
258
|
+
4. Create a PR when done
|
|
259
|
+
"""
|
|
260
|
+
issue_id = event.payload.get("issue_id", "unknown")
|
|
261
|
+
issue_title = event.payload.get("issue_title", "Unknown")
|
|
262
|
+
file_path = event.payload.get("path", "")
|
|
263
|
+
|
|
264
|
+
logger.info(f"IssueStageHandler: Spawning Engineer for {issue_id}")
|
|
265
|
+
|
|
266
|
+
self._processed_issues.add(issue_id)
|
|
267
|
+
|
|
268
|
+
task = AgentTask(
|
|
269
|
+
task_id=f"engineer-{issue_id}-{event.timestamp.timestamp()}",
|
|
270
|
+
role_name="Engineer",
|
|
271
|
+
issue_id=issue_id,
|
|
272
|
+
prompt=self._build_prompt(issue_id, issue_title, file_path),
|
|
273
|
+
engine="gemini",
|
|
274
|
+
timeout=1800,
|
|
275
|
+
metadata={
|
|
276
|
+
"trigger": "issue_stage_doing",
|
|
277
|
+
"issue_id": issue_id,
|
|
278
|
+
"issue_title": issue_title,
|
|
279
|
+
"file_path": file_path,
|
|
280
|
+
},
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
try:
|
|
284
|
+
session_id = await self.scheduler.schedule(task)
|
|
285
|
+
logger.info(f"Engineer scheduled: session={session_id}")
|
|
286
|
+
|
|
287
|
+
return ActionResult.success_result(
|
|
288
|
+
output={
|
|
289
|
+
"session_id": session_id,
|
|
290
|
+
"role": "Engineer",
|
|
291
|
+
"trigger": "issue_stage_doing",
|
|
292
|
+
"issue_id": issue_id,
|
|
293
|
+
},
|
|
294
|
+
metadata={"issue_id": issue_id},
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
except Exception as e:
|
|
298
|
+
logger.error(f"Failed to spawn Engineer for {issue_id}: {e}")
|
|
299
|
+
return ActionResult.failure_result(
|
|
300
|
+
error=f"Failed to schedule Engineer: {e}",
|
|
301
|
+
metadata={"issue_id": issue_id},
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
305
|
+
"""Make handler callable - used as EventBus callback."""
|
|
306
|
+
try:
|
|
307
|
+
if self._should_handle(event):
|
|
308
|
+
return await self._handle(event)
|
|
309
|
+
except Exception as e:
|
|
310
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
311
|
+
return None
|
|
312
|
+
|
|
313
|
+
def start(self) -> None:
|
|
314
|
+
"""Subscribe this handler to the EventBus."""
|
|
315
|
+
if self._subscribed:
|
|
316
|
+
return
|
|
317
|
+
|
|
318
|
+
event_bus.subscribe(AgentEventType.ISSUE_STAGE_CHANGED, self)
|
|
319
|
+
self._subscribed = True
|
|
320
|
+
logger.info(f"{self.name} started, subscribed to ISSUE_STAGE_CHANGED")
|
|
321
|
+
|
|
322
|
+
def stop(self) -> None:
|
|
323
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
324
|
+
if not self._subscribed:
|
|
325
|
+
return
|
|
326
|
+
|
|
327
|
+
event_bus.unsubscribe(AgentEventType.ISSUE_STAGE_CHANGED, self)
|
|
328
|
+
self._subscribed = False
|
|
329
|
+
logger.info(f"{self.name} stopped")
|
|
330
|
+
|
|
331
|
+
def _build_prompt(self, issue_id: str, issue_title: str, file_path: str) -> str:
|
|
332
|
+
"""Build the prompt for the Engineer agent."""
|
|
333
|
+
return f"""You are a Software Engineer. You have been assigned to implement:
|
|
334
|
+
|
|
335
|
+
Issue: {issue_id} - {issue_title}
|
|
336
|
+
File: {file_path}
|
|
337
|
+
|
|
338
|
+
Your task:
|
|
339
|
+
1. Read and understand the Issue requirements
|
|
340
|
+
2. Follow the Git workflow:
|
|
341
|
+
- Use `monoco issue start {issue_id} --branch` to create feature branch
|
|
342
|
+
- Implement the requirements
|
|
343
|
+
- Run tests to ensure quality
|
|
344
|
+
- Use `monoco issue sync-files` to track changes
|
|
345
|
+
- Submit PR when done
|
|
346
|
+
3. Follow coding standards and best practices
|
|
347
|
+
4. Ensure all tests pass
|
|
348
|
+
|
|
349
|
+
Start by reading the Issue file to understand the full requirements."""
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
# =============================================================================
|
|
353
|
+
# MemoThresholdHandler - Independent Microservice
|
|
354
|
+
# =============================================================================
|
|
355
|
+
|
|
356
|
+
class MemoThresholdHandler:
|
|
357
|
+
"""
|
|
358
|
+
Independent handler for Memo threshold events.
|
|
359
|
+
|
|
360
|
+
Trigger: MEMO_THRESHOLD event
|
|
361
|
+
Condition: Pending memo count exceeds threshold
|
|
362
|
+
Action: Spawn Architect agent to analyze and create Issues
|
|
363
|
+
|
|
364
|
+
Emergent Workflow: Memos (threshold) → Architect → Issues
|
|
365
|
+
|
|
366
|
+
This handler is stateless and self-contained.
|
|
367
|
+
|
|
368
|
+
Example:
|
|
369
|
+
>>> handler = MemoThresholdHandler(scheduler, threshold=5)
|
|
370
|
+
>>> handler.start()
|
|
371
|
+
>>> # ... handler runs independently ...
|
|
372
|
+
>>> handler.stop()
|
|
373
|
+
"""
|
|
374
|
+
|
|
375
|
+
DEFAULT_THRESHOLD = 5
|
|
376
|
+
|
|
377
|
+
def __init__(
|
|
378
|
+
self,
|
|
379
|
+
scheduler: AgentScheduler,
|
|
380
|
+
threshold: int = DEFAULT_THRESHOLD,
|
|
381
|
+
name: str = "MemoThresholdHandler",
|
|
382
|
+
):
|
|
383
|
+
self.scheduler = scheduler
|
|
384
|
+
self.name = name
|
|
385
|
+
self.threshold = threshold
|
|
386
|
+
self._subscribed = False
|
|
387
|
+
self._last_processed_count = 0
|
|
388
|
+
|
|
389
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
390
|
+
"""
|
|
391
|
+
Check if we should handle this memo threshold event.
|
|
392
|
+
|
|
393
|
+
Conditions:
|
|
394
|
+
- Event is MEMO_THRESHOLD
|
|
395
|
+
- Threshold was just crossed (not already above)
|
|
396
|
+
"""
|
|
397
|
+
pending_count = event.payload.get("pending_count", 0)
|
|
398
|
+
|
|
399
|
+
if pending_count < self.threshold:
|
|
400
|
+
logger.debug(f"Pending count {pending_count} below threshold {self.threshold}")
|
|
401
|
+
return False
|
|
402
|
+
|
|
403
|
+
if pending_count <= self._last_processed_count:
|
|
404
|
+
logger.debug(f"Already processed {self._last_processed_count} memos, skipping")
|
|
405
|
+
return False
|
|
406
|
+
|
|
407
|
+
return True
|
|
408
|
+
|
|
409
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
410
|
+
"""
|
|
411
|
+
Handle the event by spawning Architect agent.
|
|
412
|
+
|
|
413
|
+
The Architect will:
|
|
414
|
+
1. Read the Memos/inbox.md file
|
|
415
|
+
2. Analyze accumulated ideas
|
|
416
|
+
3. Create appropriate Issue tickets
|
|
417
|
+
4. Clear or organize processed memos
|
|
418
|
+
"""
|
|
419
|
+
file_path = event.payload.get("path", "Memos/inbox.md")
|
|
420
|
+
pending_count = event.payload.get("pending_count", 0)
|
|
421
|
+
|
|
422
|
+
logger.info(f"MemoThresholdHandler: Spawning Architect for {pending_count} memos")
|
|
423
|
+
|
|
424
|
+
self._last_processed_count = pending_count
|
|
425
|
+
|
|
426
|
+
task = AgentTask(
|
|
427
|
+
task_id=f"architect-memo-{event.timestamp.timestamp()}",
|
|
428
|
+
role_name="Architect",
|
|
429
|
+
issue_id="memo-analysis",
|
|
430
|
+
prompt=self._build_prompt(file_path, pending_count),
|
|
431
|
+
engine="gemini",
|
|
432
|
+
timeout=900,
|
|
433
|
+
metadata={
|
|
434
|
+
"trigger": "memo_threshold",
|
|
435
|
+
"file_path": file_path,
|
|
436
|
+
"pending_count": pending_count,
|
|
437
|
+
"threshold": self.threshold,
|
|
438
|
+
},
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
try:
|
|
442
|
+
session_id = await self.scheduler.schedule(task)
|
|
443
|
+
logger.info(f"Architect scheduled: session={session_id}")
|
|
444
|
+
|
|
445
|
+
return ActionResult.success_result(
|
|
446
|
+
output={
|
|
447
|
+
"session_id": session_id,
|
|
448
|
+
"role": "Architect",
|
|
449
|
+
"trigger": "memo_threshold",
|
|
450
|
+
"pending_count": pending_count,
|
|
451
|
+
},
|
|
452
|
+
metadata={"file_path": file_path},
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
except Exception as e:
|
|
456
|
+
logger.error(f"Failed to spawn Architect: {e}")
|
|
457
|
+
return ActionResult.failure_result(
|
|
458
|
+
error=f"Failed to schedule Architect: {e}",
|
|
459
|
+
metadata={"file_path": file_path},
|
|
460
|
+
)
|
|
461
|
+
|
|
462
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
463
|
+
"""Make handler callable - used as EventBus callback."""
|
|
464
|
+
try:
|
|
465
|
+
if self._should_handle(event):
|
|
466
|
+
return await self._handle(event)
|
|
467
|
+
except Exception as e:
|
|
468
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
469
|
+
return None
|
|
470
|
+
|
|
471
|
+
def start(self) -> None:
|
|
472
|
+
"""Subscribe this handler to the EventBus."""
|
|
473
|
+
if self._subscribed:
|
|
474
|
+
return
|
|
475
|
+
|
|
476
|
+
event_bus.subscribe(AgentEventType.MEMO_THRESHOLD, self)
|
|
477
|
+
self._subscribed = True
|
|
478
|
+
logger.info(f"{self.name} started, subscribed to MEMO_THRESHOLD")
|
|
479
|
+
|
|
480
|
+
def stop(self) -> None:
|
|
481
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
482
|
+
if not self._subscribed:
|
|
483
|
+
return
|
|
484
|
+
|
|
485
|
+
event_bus.unsubscribe(AgentEventType.MEMO_THRESHOLD, self)
|
|
486
|
+
self._subscribed = False
|
|
487
|
+
logger.info(f"{self.name} stopped")
|
|
488
|
+
|
|
489
|
+
def _build_prompt(self, file_path: str, pending_count: int) -> str:
|
|
490
|
+
"""Build the prompt for the Architect agent."""
|
|
491
|
+
return f"""You are the Architect. {pending_count} memos have accumulated in {file_path}.
|
|
492
|
+
|
|
493
|
+
Your task:
|
|
494
|
+
1. Read and analyze the accumulated memos
|
|
495
|
+
2. Categorize and prioritize the ideas
|
|
496
|
+
3. Create Issue tickets for actionable items:
|
|
497
|
+
- Use `monoco issue create` command
|
|
498
|
+
- Set appropriate type (feature, fix, chore)
|
|
499
|
+
- Set stage to 'draft' for review
|
|
500
|
+
4. Organize or clear processed memos
|
|
501
|
+
|
|
502
|
+
Focus on turning raw ideas into structured, actionable work items."""
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
# =============================================================================
|
|
506
|
+
# PRCreatedHandler - Independent Microservice
|
|
507
|
+
# =============================================================================
|
|
508
|
+
|
|
509
|
+
class PRCreatedHandler:
|
|
510
|
+
"""
|
|
511
|
+
Independent handler for PR creation events.
|
|
512
|
+
|
|
513
|
+
Trigger: PR_CREATED event
|
|
514
|
+
Condition: New PR created for an Issue
|
|
515
|
+
Action: Spawn Reviewer agent to review the PR
|
|
516
|
+
|
|
517
|
+
Emergent Workflow: PR → Reviewer → 审查报告
|
|
518
|
+
|
|
519
|
+
This handler is stateless and self-contained.
|
|
520
|
+
|
|
521
|
+
Example:
|
|
522
|
+
>>> handler = PRCreatedHandler(scheduler)
|
|
523
|
+
>>> handler.start()
|
|
524
|
+
>>> # ... handler runs independently ...
|
|
525
|
+
>>> handler.stop()
|
|
526
|
+
"""
|
|
527
|
+
|
|
528
|
+
def __init__(
|
|
529
|
+
self,
|
|
530
|
+
scheduler: AgentScheduler,
|
|
531
|
+
name: str = "PRCreatedHandler",
|
|
532
|
+
):
|
|
533
|
+
self.scheduler = scheduler
|
|
534
|
+
self.name = name
|
|
535
|
+
self._subscribed = False
|
|
536
|
+
self._processed_prs: Set[str] = set()
|
|
537
|
+
|
|
538
|
+
def _should_handle(self, event: AgentEvent) -> bool:
|
|
539
|
+
"""
|
|
540
|
+
Check if we should handle this PR creation event.
|
|
541
|
+
|
|
542
|
+
Conditions:
|
|
543
|
+
- Event is PR_CREATED
|
|
544
|
+
- Has valid PR URL or ID
|
|
545
|
+
- Not already processed
|
|
546
|
+
"""
|
|
547
|
+
pr_url = event.payload.get("pr_url", "")
|
|
548
|
+
pr_id = event.payload.get("pr_id", "")
|
|
549
|
+
|
|
550
|
+
pr_identifier = pr_id or pr_url
|
|
551
|
+
if not pr_identifier:
|
|
552
|
+
logger.debug("No PR identifier in event, skipping")
|
|
553
|
+
return False
|
|
554
|
+
|
|
555
|
+
if pr_identifier in self._processed_prs:
|
|
556
|
+
logger.debug(f"PR {pr_identifier} already processed, skipping")
|
|
557
|
+
return False
|
|
558
|
+
|
|
559
|
+
return True
|
|
560
|
+
|
|
561
|
+
async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
562
|
+
"""
|
|
563
|
+
Handle the event by spawning Reviewer agent.
|
|
564
|
+
|
|
565
|
+
The Reviewer will:
|
|
566
|
+
1. Fetch the PR details
|
|
567
|
+
2. Review code changes
|
|
568
|
+
3. Generate a review report
|
|
569
|
+
4. Output findings to file/Memos
|
|
570
|
+
"""
|
|
571
|
+
pr_url = event.payload.get("pr_url", "")
|
|
572
|
+
pr_id = event.payload.get("pr_id", "")
|
|
573
|
+
issue_id = event.payload.get("issue_id", "")
|
|
574
|
+
branch = event.payload.get("branch", "")
|
|
575
|
+
|
|
576
|
+
pr_identifier = pr_id or pr_url or f"{issue_id}-pr"
|
|
577
|
+
|
|
578
|
+
logger.info(f"PRCreatedHandler: Spawning Reviewer for PR {pr_identifier}")
|
|
579
|
+
|
|
580
|
+
self._processed_prs.add(pr_identifier)
|
|
581
|
+
|
|
582
|
+
task = AgentTask(
|
|
583
|
+
task_id=f"reviewer-{pr_identifier}-{event.timestamp.timestamp()}",
|
|
584
|
+
role_name="Reviewer",
|
|
585
|
+
issue_id=issue_id or "review",
|
|
586
|
+
prompt=self._build_prompt(pr_url, pr_id, issue_id, branch),
|
|
587
|
+
engine="gemini",
|
|
588
|
+
timeout=900,
|
|
589
|
+
metadata={
|
|
590
|
+
"trigger": "pr_created",
|
|
591
|
+
"pr_url": pr_url,
|
|
592
|
+
"pr_id": pr_id,
|
|
593
|
+
"issue_id": issue_id,
|
|
594
|
+
"branch": branch,
|
|
595
|
+
},
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
try:
|
|
599
|
+
session_id = await self.scheduler.schedule(task)
|
|
600
|
+
logger.info(f"Reviewer scheduled: session={session_id}")
|
|
601
|
+
|
|
602
|
+
return ActionResult.success_result(
|
|
603
|
+
output={
|
|
604
|
+
"session_id": session_id,
|
|
605
|
+
"role": "Reviewer",
|
|
606
|
+
"trigger": "pr_created",
|
|
607
|
+
"pr_identifier": pr_identifier,
|
|
608
|
+
},
|
|
609
|
+
metadata={"pr_identifier": pr_identifier},
|
|
610
|
+
)
|
|
611
|
+
|
|
612
|
+
except Exception as e:
|
|
613
|
+
logger.error(f"Failed to spawn Reviewer: {e}")
|
|
614
|
+
return ActionResult.failure_result(
|
|
615
|
+
error=f"Failed to schedule Reviewer: {e}",
|
|
616
|
+
metadata={"pr_identifier": pr_identifier},
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
|
|
620
|
+
"""Make handler callable - used as EventBus callback."""
|
|
621
|
+
try:
|
|
622
|
+
if self._should_handle(event):
|
|
623
|
+
return await self._handle(event)
|
|
624
|
+
except Exception as e:
|
|
625
|
+
logger.error(f"Handler error in {self.name}: {e}", exc_info=True)
|
|
626
|
+
return None
|
|
627
|
+
|
|
628
|
+
def start(self) -> None:
|
|
629
|
+
"""Subscribe this handler to the EventBus."""
|
|
630
|
+
if self._subscribed:
|
|
631
|
+
return
|
|
632
|
+
|
|
633
|
+
event_bus.subscribe(AgentEventType.PR_CREATED, self)
|
|
634
|
+
self._subscribed = True
|
|
635
|
+
logger.info(f"{self.name} started, subscribed to PR_CREATED")
|
|
636
|
+
|
|
637
|
+
def stop(self) -> None:
|
|
638
|
+
"""Unsubscribe this handler from the EventBus."""
|
|
639
|
+
if not self._subscribed:
|
|
640
|
+
return
|
|
641
|
+
|
|
642
|
+
event_bus.unsubscribe(AgentEventType.PR_CREATED, self)
|
|
643
|
+
self._subscribed = False
|
|
644
|
+
logger.info(f"{self.name} stopped")
|
|
645
|
+
|
|
646
|
+
def _build_prompt(
|
|
647
|
+
self,
|
|
648
|
+
pr_url: str,
|
|
649
|
+
pr_id: str,
|
|
650
|
+
issue_id: str,
|
|
651
|
+
branch: str,
|
|
652
|
+
) -> str:
|
|
653
|
+
"""Build the prompt for the Reviewer agent."""
|
|
654
|
+
pr_info = f"""
|
|
655
|
+
PR URL: {pr_url or 'N/A'}
|
|
656
|
+
PR ID: {pr_id or 'N/A'}
|
|
657
|
+
Issue: {issue_id or 'N/A'}
|
|
658
|
+
Branch: {branch or 'N/A'}
|
|
659
|
+
"""
|
|
660
|
+
|
|
661
|
+
return f"""You are a Code Reviewer. A new PR has been created:
|
|
662
|
+
|
|
663
|
+
{pr_info}
|
|
664
|
+
|
|
665
|
+
Your task:
|
|
666
|
+
1. Fetch and review the PR changes
|
|
667
|
+
2. Check against the original Issue requirements
|
|
668
|
+
3. Review for:
|
|
669
|
+
- Code quality and best practices
|
|
670
|
+
- Test coverage
|
|
671
|
+
- Documentation
|
|
672
|
+
- Security considerations
|
|
673
|
+
4. Generate a review report:
|
|
674
|
+
- Use `monoco memo add` to record findings
|
|
675
|
+
- Include specific file/line references
|
|
676
|
+
- Provide actionable feedback
|
|
677
|
+
|
|
678
|
+
Focus on thorough, constructive review that improves code quality."""
|
|
679
|
+
|
|
680
|
+
|
|
681
|
+
# =============================================================================
|
|
682
|
+
# Convenience Functions
|
|
683
|
+
# =============================================================================
|
|
684
|
+
|
|
685
|
+
def start_all_handlers(scheduler: AgentScheduler, memo_threshold: int = 5) -> list:
|
|
686
|
+
"""
|
|
687
|
+
Start all event handlers.
|
|
688
|
+
|
|
689
|
+
This is a convenience function - handlers remain independent
|
|
690
|
+
and do not form a Workflow or orchestration layer.
|
|
691
|
+
|
|
692
|
+
Args:
|
|
693
|
+
scheduler: The AgentScheduler for spawning agents
|
|
694
|
+
memo_threshold: Threshold for memo handler
|
|
695
|
+
|
|
696
|
+
Returns:
|
|
697
|
+
List of started handler instances
|
|
698
|
+
"""
|
|
699
|
+
handlers = [
|
|
700
|
+
TaskFileHandler(scheduler),
|
|
701
|
+
IssueStageHandler(scheduler),
|
|
702
|
+
MemoThresholdHandler(scheduler, threshold=memo_threshold),
|
|
703
|
+
PRCreatedHandler(scheduler),
|
|
704
|
+
]
|
|
705
|
+
|
|
706
|
+
for handler in handlers:
|
|
707
|
+
handler.start()
|
|
708
|
+
|
|
709
|
+
logger.info(f"Started {len(handlers)} independent handlers")
|
|
710
|
+
return handlers
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
def stop_all_handlers(handlers: list) -> None:
|
|
714
|
+
"""
|
|
715
|
+
Stop all event handlers.
|
|
716
|
+
|
|
717
|
+
Args:
|
|
718
|
+
handlers: List of handler instances to stop
|
|
719
|
+
"""
|
|
720
|
+
for handler in handlers:
|
|
721
|
+
handler.stop()
|
|
722
|
+
|
|
723
|
+
logger.info(f"Stopped {len(handlers)} handlers")
|