monoco-toolkit 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/automation/__init__.py +51 -0
- monoco/core/automation/config.py +338 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +723 -0
- monoco/core/config.py +31 -4
- monoco/core/executor/__init__.py +38 -0
- monoco/core/executor/agent_action.py +254 -0
- monoco/core/executor/git_action.py +303 -0
- monoco/core/executor/im_action.py +309 -0
- monoco/core/executor/pytest_action.py +218 -0
- monoco/core/git.py +38 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/loader.py +633 -0
- monoco/core/registry.py +34 -25
- monoco/core/router/__init__.py +55 -0
- monoco/core/router/action.py +341 -0
- monoco/core/router/router.py +392 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +171 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/skills.py +119 -80
- monoco/core/watcher/__init__.py +57 -0
- monoco/core/watcher/base.py +365 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +200 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/events.py +34 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +207 -0
- monoco/daemon/services.py +27 -58
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/adapter.py +17 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/{core/resources/en/skills/monoco_core → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/resources/{roles/role-engineer.yaml → zh/roles/monoco_role_engineer.yaml} +3 -3
- monoco/features/agent/resources/{roles/role-manager.yaml → zh/roles/monoco_role_manager.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-planner.yaml → zh/roles/monoco_role_planner.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-reviewer.yaml → zh/roles/monoco_role_reviewer.yaml} +8 -8
- monoco/{core/resources/zh/skills/monoco_core → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/worker.py +1 -1
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/adapter.py +18 -7
- monoco/features/glossary/resources/en/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/glossary/resources/zh/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/en/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +352 -20
- monoco/features/issue/core.py +475 -16
- monoco/features/issue/engine/machine.py +114 -4
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +2 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/skills/{monoco_issue → monoco_atom_issue}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/skills/{monoco_issue → monoco_atom_issue_lifecycle}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/en/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/commands.py +5 -3
- monoco/features/spike/resources/en/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/METADATA +7 -1
- monoco_toolkit-0.3.12.dist-info/RECORD +202 -0
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -91
- monoco/features/agent/session.py +0 -121
- monoco_toolkit-0.3.10.dist-info/RECORD +0 -156
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TaskWatcher - Monitors task files for changes.
|
|
3
|
+
|
|
4
|
+
Part of Layer 1 (File Watcher) in the event automation framework.
|
|
5
|
+
Emits events for task status changes and completion.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import logging
|
|
12
|
+
import re
|
|
13
|
+
from dataclasses import dataclass
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any, Dict, List, Optional, Set
|
|
16
|
+
|
|
17
|
+
from monoco.core.scheduler import AgentEventType, EventBus, event_bus
|
|
18
|
+
|
|
19
|
+
from .base import (
|
|
20
|
+
ChangeType,
|
|
21
|
+
FieldChange,
|
|
22
|
+
FileEvent,
|
|
23
|
+
FilesystemWatcher,
|
|
24
|
+
WatchConfig,
|
|
25
|
+
PollingWatcher,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class TaskFileEvent(FileEvent):
|
|
32
|
+
"""FileEvent specific to Task files."""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
path: Path,
|
|
37
|
+
change_type: ChangeType,
|
|
38
|
+
task_changes: Optional[List[Dict[str, Any]]] = None,
|
|
39
|
+
**kwargs,
|
|
40
|
+
):
|
|
41
|
+
super().__init__(
|
|
42
|
+
path=path,
|
|
43
|
+
change_type=change_type,
|
|
44
|
+
watcher_name="TaskWatcher",
|
|
45
|
+
**kwargs,
|
|
46
|
+
)
|
|
47
|
+
self.task_changes = task_changes or []
|
|
48
|
+
|
|
49
|
+
def to_agent_event_type(self) -> Optional[AgentEventType]:
|
|
50
|
+
"""Tasks map to issue updates for now."""
|
|
51
|
+
return AgentEventType.ISSUE_UPDATED
|
|
52
|
+
|
|
53
|
+
def to_payload(self) -> Dict[str, Any]:
|
|
54
|
+
"""Convert to payload with Task-specific fields."""
|
|
55
|
+
payload = super().to_payload()
|
|
56
|
+
payload["task_changes"] = self.task_changes
|
|
57
|
+
return payload
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class TaskItem:
|
|
62
|
+
"""Represents a single task item."""
|
|
63
|
+
content: str
|
|
64
|
+
state: str # " ", "x", "X", "-", "/"
|
|
65
|
+
line_number: int
|
|
66
|
+
level: int = 0
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def is_completed(self) -> bool:
|
|
70
|
+
return self.state.lower() == "x"
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def is_in_progress(self) -> bool:
|
|
74
|
+
return self.state in ("-", "/")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class TaskWatcher(PollingWatcher):
|
|
78
|
+
"""
|
|
79
|
+
Watcher for task files.
|
|
80
|
+
|
|
81
|
+
Monitors task files (e.g., tasks.md, TODO.md) for:
|
|
82
|
+
- Task creation
|
|
83
|
+
- Task status changes (todo -> doing -> done)
|
|
84
|
+
- Task completion
|
|
85
|
+
|
|
86
|
+
Example:
|
|
87
|
+
>>> config = WatchConfig(
|
|
88
|
+
... path=Path("./tasks.md"),
|
|
89
|
+
... patterns=["*.md"],
|
|
90
|
+
... )
|
|
91
|
+
>>> watcher = TaskWatcher(config)
|
|
92
|
+
>>> await watcher.start()
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
# Regex to match task items
|
|
96
|
+
TASK_PATTERN = re.compile(
|
|
97
|
+
r"^(\s*)-\s*\[([ xX\-/])\]\s*(.+)$",
|
|
98
|
+
re.MULTILINE,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
def __init__(
|
|
102
|
+
self,
|
|
103
|
+
config: WatchConfig,
|
|
104
|
+
event_bus: Optional[EventBus] = None,
|
|
105
|
+
name: str = "TaskWatcher",
|
|
106
|
+
):
|
|
107
|
+
super().__init__(config, event_bus, name)
|
|
108
|
+
self._task_cache: Dict[str, TaskItem] = {} # task_id -> TaskItem
|
|
109
|
+
|
|
110
|
+
async def _check_changes(self) -> None:
|
|
111
|
+
"""Check for task file changes."""
|
|
112
|
+
if not self.config.path.exists():
|
|
113
|
+
return
|
|
114
|
+
|
|
115
|
+
try:
|
|
116
|
+
content = self._read_file_content(self.config.path) or ""
|
|
117
|
+
current_tasks = self._parse_tasks(content)
|
|
118
|
+
|
|
119
|
+
# Detect changes
|
|
120
|
+
task_changes = self._detect_task_changes(current_tasks)
|
|
121
|
+
|
|
122
|
+
if task_changes:
|
|
123
|
+
await self._emit_task_changes(task_changes)
|
|
124
|
+
|
|
125
|
+
# Update cache
|
|
126
|
+
self._task_cache = current_tasks
|
|
127
|
+
|
|
128
|
+
except Exception as e:
|
|
129
|
+
logger.error(f"Error checking task file: {e}")
|
|
130
|
+
|
|
131
|
+
def _parse_tasks(self, content: str) -> Dict[str, TaskItem]:
|
|
132
|
+
"""Parse task items from content."""
|
|
133
|
+
tasks = {}
|
|
134
|
+
lines = content.split("\n")
|
|
135
|
+
|
|
136
|
+
for line_num, line in enumerate(lines, 1):
|
|
137
|
+
match = self.TASK_PATTERN.match(line)
|
|
138
|
+
if match:
|
|
139
|
+
indent = match.group(1)
|
|
140
|
+
state = match.group(2)
|
|
141
|
+
task_content = match.group(3).strip()
|
|
142
|
+
|
|
143
|
+
# Generate task ID from content hash
|
|
144
|
+
import hashlib
|
|
145
|
+
task_id = hashlib.md5(
|
|
146
|
+
f"{line_num}:{task_content}".encode()
|
|
147
|
+
).hexdigest()[:12]
|
|
148
|
+
|
|
149
|
+
tasks[task_id] = TaskItem(
|
|
150
|
+
content=task_content,
|
|
151
|
+
state=state,
|
|
152
|
+
line_number=line_num,
|
|
153
|
+
level=len(indent) // 2,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
return tasks
|
|
157
|
+
|
|
158
|
+
def _detect_task_changes(
|
|
159
|
+
self,
|
|
160
|
+
current_tasks: Dict[str, TaskItem],
|
|
161
|
+
) -> List[Dict[str, Any]]:
|
|
162
|
+
"""Detect changes between cached and current tasks."""
|
|
163
|
+
changes = []
|
|
164
|
+
current_ids = set(current_tasks.keys())
|
|
165
|
+
cached_ids = set(self._task_cache.keys())
|
|
166
|
+
|
|
167
|
+
# New tasks
|
|
168
|
+
for task_id in current_ids - cached_ids:
|
|
169
|
+
task = current_tasks[task_id]
|
|
170
|
+
changes.append({
|
|
171
|
+
"type": "created",
|
|
172
|
+
"task_id": task_id,
|
|
173
|
+
"content": task.content,
|
|
174
|
+
"state": task.state,
|
|
175
|
+
})
|
|
176
|
+
|
|
177
|
+
# Deleted tasks
|
|
178
|
+
for task_id in cached_ids - current_ids:
|
|
179
|
+
task = self._task_cache[task_id]
|
|
180
|
+
changes.append({
|
|
181
|
+
"type": "deleted",
|
|
182
|
+
"task_id": task_id,
|
|
183
|
+
"content": task.content,
|
|
184
|
+
})
|
|
185
|
+
|
|
186
|
+
# Modified tasks
|
|
187
|
+
for task_id in current_ids & cached_ids:
|
|
188
|
+
current = current_tasks[task_id]
|
|
189
|
+
cached = self._task_cache[task_id]
|
|
190
|
+
|
|
191
|
+
if current.state != cached.state:
|
|
192
|
+
changes.append({
|
|
193
|
+
"type": "state_changed",
|
|
194
|
+
"task_id": task_id,
|
|
195
|
+
"content": current.content,
|
|
196
|
+
"old_state": cached.state,
|
|
197
|
+
"new_state": current.state,
|
|
198
|
+
"is_completed": current.is_completed,
|
|
199
|
+
})
|
|
200
|
+
|
|
201
|
+
return changes
|
|
202
|
+
|
|
203
|
+
async def _emit_task_changes(self, changes: List[Dict[str, Any]]) -> None:
|
|
204
|
+
"""Emit events for task changes."""
|
|
205
|
+
event = TaskFileEvent(
|
|
206
|
+
path=self.config.path,
|
|
207
|
+
change_type=ChangeType.MODIFIED,
|
|
208
|
+
task_changes=changes,
|
|
209
|
+
metadata={
|
|
210
|
+
"total_changes": len(changes),
|
|
211
|
+
"completed_tasks": sum(1 for c in changes if c.get("is_completed")),
|
|
212
|
+
},
|
|
213
|
+
)
|
|
214
|
+
await self.emit(event)
|
|
215
|
+
|
|
216
|
+
# Log summary
|
|
217
|
+
created = sum(1 for c in changes if c["type"] == "created")
|
|
218
|
+
completed = sum(1 for c in changes if c["type"] == "state_changed" and c.get("is_completed"))
|
|
219
|
+
logger.debug(f"Task changes: {created} created, {completed} completed")
|
|
220
|
+
|
|
221
|
+
def get_task_stats(self) -> Dict[str, int]:
|
|
222
|
+
"""Get task statistics."""
|
|
223
|
+
total = len(self._task_cache)
|
|
224
|
+
completed = sum(1 for t in self._task_cache.values() if t.is_completed)
|
|
225
|
+
in_progress = sum(1 for t in self._task_cache.values() if t.is_in_progress)
|
|
226
|
+
|
|
227
|
+
return {
|
|
228
|
+
"total": total,
|
|
229
|
+
"completed": completed,
|
|
230
|
+
"in_progress": in_progress,
|
|
231
|
+
"pending": total - completed - in_progress,
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
235
|
+
"""Get watcher statistics."""
|
|
236
|
+
stats = super().get_stats()
|
|
237
|
+
stats.update(self.get_task_stats())
|
|
238
|
+
return stats
|
monoco/daemon/app.py
CHANGED
|
@@ -9,6 +9,8 @@ from typing import Optional, List, Dict
|
|
|
9
9
|
from monoco.daemon.services import Broadcaster, ProjectManager
|
|
10
10
|
from monoco.core.git import GitMonitor
|
|
11
11
|
from monoco.core.config import get_config, ConfigMonitor, ConfigScope, get_config_path
|
|
12
|
+
from monoco.daemon.scheduler import SchedulerService
|
|
13
|
+
from monoco.daemon.mailroom_service import MailroomService
|
|
12
14
|
|
|
13
15
|
# Configure logging
|
|
14
16
|
logging.basicConfig(level=logging.INFO)
|
|
@@ -34,6 +36,8 @@ broadcaster = Broadcaster()
|
|
|
34
36
|
git_monitor: GitMonitor | None = None
|
|
35
37
|
config_monitors: List[ConfigMonitor] = []
|
|
36
38
|
project_manager: ProjectManager | None = None
|
|
39
|
+
scheduler_service: SchedulerService | None = None
|
|
40
|
+
mailroom_service: MailroomService | None = None
|
|
37
41
|
|
|
38
42
|
|
|
39
43
|
@asynccontextmanager
|
|
@@ -41,7 +45,7 @@ async def lifespan(app: FastAPI):
|
|
|
41
45
|
# Startup
|
|
42
46
|
logger.info("Starting Monoco Daemon services...")
|
|
43
47
|
|
|
44
|
-
global project_manager, git_monitor, config_monitors
|
|
48
|
+
global project_manager, git_monitor, config_monitors, scheduler_service, mailroom_service
|
|
45
49
|
# Use MONOCO_SERVER_ROOT if set, otherwise CWD
|
|
46
50
|
env_root = os.getenv("MONOCO_SERVER_ROOT")
|
|
47
51
|
workspace_root = Path(env_root) if env_root else Path.cwd()
|
|
@@ -72,6 +76,17 @@ async def lifespan(app: FastAPI):
|
|
|
72
76
|
]
|
|
73
77
|
|
|
74
78
|
await project_manager.start_all()
|
|
79
|
+
# Start Scheduler
|
|
80
|
+
scheduler_service = SchedulerService(project_manager)
|
|
81
|
+
await scheduler_service.start()
|
|
82
|
+
|
|
83
|
+
# Start Mailroom Service
|
|
84
|
+
mailroom_service = MailroomService(
|
|
85
|
+
workspace_root=workspace_root,
|
|
86
|
+
broadcaster=broadcaster,
|
|
87
|
+
)
|
|
88
|
+
await mailroom_service.start()
|
|
89
|
+
|
|
75
90
|
git_task = asyncio.create_task(git_monitor.start())
|
|
76
91
|
config_tasks = [asyncio.create_task(m.start()) for m in config_monitors]
|
|
77
92
|
|
|
@@ -84,6 +99,10 @@ async def lifespan(app: FastAPI):
|
|
|
84
99
|
m.stop()
|
|
85
100
|
if project_manager:
|
|
86
101
|
project_manager.stop_all()
|
|
102
|
+
if scheduler_service:
|
|
103
|
+
scheduler_service.stop()
|
|
104
|
+
if mailroom_service:
|
|
105
|
+
await mailroom_service.stop()
|
|
87
106
|
|
|
88
107
|
await git_task
|
|
89
108
|
await asyncio.gather(*config_tasks)
|
|
@@ -301,6 +320,21 @@ async def create_issue_endpoint(payload: CreateIssueRequest):
|
|
|
301
320
|
related=payload.related,
|
|
302
321
|
subdir=payload.subdir,
|
|
303
322
|
)
|
|
323
|
+
|
|
324
|
+
# Link memos to the newly created issue
|
|
325
|
+
if payload.from_memos:
|
|
326
|
+
from monoco.features.memo.core import load_memos, update_memo
|
|
327
|
+
|
|
328
|
+
existing_memos = {m.uid: m for m in load_memos(project.issues_root)}
|
|
329
|
+
|
|
330
|
+
for memo_id in payload.from_memos:
|
|
331
|
+
if memo_id in existing_memos:
|
|
332
|
+
# Only update if not already linked to this issue (idempotency)
|
|
333
|
+
memo = existing_memos[memo_id]
|
|
334
|
+
if memo.ref != issue.id:
|
|
335
|
+
update_memo(project.issues_root, memo_id, {"status": "tracked", "ref": issue.id})
|
|
336
|
+
# Non-blocking: ignore missing memos (just log warning)
|
|
337
|
+
|
|
304
338
|
return issue
|
|
305
339
|
except Exception as e:
|
|
306
340
|
raise HTTPException(status_code=400, detail=str(e))
|
|
@@ -489,3 +523,45 @@ async def update_workspace_state(state: WorkspaceState):
|
|
|
489
523
|
raise HTTPException(
|
|
490
524
|
status_code=500, detail=f"Failed to persist state: {str(e)}"
|
|
491
525
|
)
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
# --- Mailroom API Endpoints ---
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
@app.get("/api/v1/mailroom/status")
|
|
532
|
+
async def get_mailroom_status():
|
|
533
|
+
"""
|
|
534
|
+
Get Mailroom service status, capabilities, and statistics.
|
|
535
|
+
"""
|
|
536
|
+
if not mailroom_service:
|
|
537
|
+
raise HTTPException(status_code=503, detail="Mailroom service not initialized")
|
|
538
|
+
|
|
539
|
+
return mailroom_service.get_status()
|
|
540
|
+
|
|
541
|
+
|
|
542
|
+
@app.post("/api/v1/mailroom/discover")
|
|
543
|
+
async def trigger_mailroom_discovery():
|
|
544
|
+
"""
|
|
545
|
+
Trigger environment discovery for conversion tools.
|
|
546
|
+
"""
|
|
547
|
+
if not mailroom_service:
|
|
548
|
+
raise HTTPException(status_code=503, detail="Mailroom service not initialized")
|
|
549
|
+
|
|
550
|
+
discovery = mailroom_service.get_discovery()
|
|
551
|
+
tools = discovery.discover(force=True)
|
|
552
|
+
|
|
553
|
+
total_tools = sum(len(t) for t in tools.values())
|
|
554
|
+
capabilities = discovery.get_capabilities_summary()
|
|
555
|
+
|
|
556
|
+
return {
|
|
557
|
+
"discovered": total_tools,
|
|
558
|
+
"capabilities": capabilities,
|
|
559
|
+
"tools": [
|
|
560
|
+
{
|
|
561
|
+
"name": tool.name,
|
|
562
|
+
"type": tool.tool_type.value,
|
|
563
|
+
"version": tool.version,
|
|
564
|
+
}
|
|
565
|
+
for tool in discovery.get_all_tools()
|
|
566
|
+
],
|
|
567
|
+
}
|
monoco/daemon/commands.py
CHANGED
|
@@ -15,6 +15,9 @@ def serve(
|
|
|
15
15
|
False, "--reload", "-r", help="Enable auto-reload for dev"
|
|
16
16
|
),
|
|
17
17
|
root: Optional[str] = typer.Option(None, "--root", help="Workspace root directory"),
|
|
18
|
+
max_agents: Optional[int] = typer.Option(
|
|
19
|
+
None, "--max-agents", help="Override global maximum concurrent agents (default: 3)"
|
|
20
|
+
),
|
|
18
21
|
):
|
|
19
22
|
"""
|
|
20
23
|
Start the Monoco Daemon server.
|
|
@@ -26,6 +29,13 @@ def serve(
|
|
|
26
29
|
print_output(
|
|
27
30
|
f"Workspace Root: {os.environ['MONOCO_SERVER_ROOT']}", title="Monoco Serve"
|
|
28
31
|
)
|
|
32
|
+
|
|
33
|
+
# Set max agents override if provided
|
|
34
|
+
if max_agents is not None:
|
|
35
|
+
os.environ["MONOCO_MAX_AGENTS"] = str(max_agents)
|
|
36
|
+
print_output(
|
|
37
|
+
f"Max Agents: {max_agents}", title="Monoco Serve"
|
|
38
|
+
)
|
|
29
39
|
|
|
30
40
|
print_output(
|
|
31
41
|
f"Starting Monoco Daemon on http://{host}:{port}", title="Monoco Serve"
|
monoco/daemon/events.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""
|
|
2
|
+
EventBus - Central event system for Agent scheduling.
|
|
3
|
+
|
|
4
|
+
DEPRECATED: This module has been moved to monoco.core.scheduler.
|
|
5
|
+
This file is kept for backward compatibility and re-exports from the new location.
|
|
6
|
+
|
|
7
|
+
Migration:
|
|
8
|
+
Old: from monoco.daemon.events import AgentEventType, event_bus
|
|
9
|
+
New: from monoco.core.scheduler import AgentEventType, event_bus
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import warnings
|
|
13
|
+
from monoco.core.scheduler import (
|
|
14
|
+
AgentEventType,
|
|
15
|
+
AgentEvent,
|
|
16
|
+
EventBus,
|
|
17
|
+
event_bus,
|
|
18
|
+
EventHandler,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
warnings.warn(
|
|
22
|
+
"monoco.daemon.events is deprecated. "
|
|
23
|
+
"Use monoco.core.scheduler instead.",
|
|
24
|
+
DeprecationWarning,
|
|
25
|
+
stacklevel=2
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
__all__ = [
|
|
29
|
+
"AgentEventType",
|
|
30
|
+
"AgentEvent",
|
|
31
|
+
"EventBus",
|
|
32
|
+
"event_bus",
|
|
33
|
+
"EventHandler",
|
|
34
|
+
]
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Mailroom Service for Monoco Daemon.
|
|
3
|
+
|
|
4
|
+
Manages automated document ingestion with concurrent processing,
|
|
5
|
+
environment discovery, and artifact registration.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import logging
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional, Dict, Any
|
|
14
|
+
|
|
15
|
+
from monoco.core.ingestion.discovery import EnvironmentDiscovery
|
|
16
|
+
from monoco.core.ingestion.worker import ConversionWorker
|
|
17
|
+
from monoco.core.ingestion.watcher import DropzoneWatcher, IngestionEvent
|
|
18
|
+
from monoco.core.artifacts.manager import ArtifactManager
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class MailroomService:
|
|
24
|
+
"""
|
|
25
|
+
Service for automated document ingestion in Monoco Daemon.
|
|
26
|
+
|
|
27
|
+
Features:
|
|
28
|
+
- Environment discovery for conversion tools
|
|
29
|
+
- Dropzone monitoring for new files
|
|
30
|
+
- Concurrent conversion processing
|
|
31
|
+
- Artifact registration
|
|
32
|
+
- SSE event broadcasting
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
workspace_root: Path,
|
|
38
|
+
broadcaster: Optional[Any] = None,
|
|
39
|
+
dropzone_path: Optional[Path] = None,
|
|
40
|
+
max_concurrent: int = 4,
|
|
41
|
+
):
|
|
42
|
+
"""
|
|
43
|
+
Initialize the Mailroom service.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
workspace_root: Root directory of the workspace
|
|
47
|
+
broadcaster: SSE broadcaster for events
|
|
48
|
+
dropzone_path: Path to dropzone directory (default: workspace/.monoco/dropzone)
|
|
49
|
+
max_concurrent: Maximum concurrent conversion tasks
|
|
50
|
+
"""
|
|
51
|
+
self.workspace_root = Path(workspace_root)
|
|
52
|
+
self.broadcaster = broadcaster
|
|
53
|
+
|
|
54
|
+
# Default dropzone location
|
|
55
|
+
self.dropzone_path = dropzone_path or (self.workspace_root / ".monoco" / "dropzone")
|
|
56
|
+
|
|
57
|
+
# Initialize components
|
|
58
|
+
self.discovery = EnvironmentDiscovery()
|
|
59
|
+
self.conversion_worker = ConversionWorker(
|
|
60
|
+
discovery=self.discovery,
|
|
61
|
+
max_concurrent=max_concurrent,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Artifact manager (lazy init)
|
|
65
|
+
self._artifact_manager: Optional[ArtifactManager] = None
|
|
66
|
+
|
|
67
|
+
# Watcher (lazy init)
|
|
68
|
+
self._watcher: Optional[DropzoneWatcher] = None
|
|
69
|
+
|
|
70
|
+
# State
|
|
71
|
+
self._running = False
|
|
72
|
+
self._stats: Dict[str, Any] = {
|
|
73
|
+
"files_detected": 0,
|
|
74
|
+
"conversions_success": 0,
|
|
75
|
+
"conversions_failed": 0,
|
|
76
|
+
"artifacts_registered": 0,
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def artifact_manager(self) -> ArtifactManager:
|
|
81
|
+
"""Get or create the artifact manager."""
|
|
82
|
+
if self._artifact_manager is None:
|
|
83
|
+
self._artifact_manager = ArtifactManager(self.workspace_root)
|
|
84
|
+
return self._artifact_manager
|
|
85
|
+
|
|
86
|
+
async def start(self) -> None:
|
|
87
|
+
"""Start the Mailroom service."""
|
|
88
|
+
if self._running:
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
logger.info("Starting Mailroom service...")
|
|
92
|
+
|
|
93
|
+
# Perform environment discovery
|
|
94
|
+
tools = self.discovery.discover()
|
|
95
|
+
total_tools = sum(len(t) for t in tools.values())
|
|
96
|
+
logger.info(f"Discovered {total_tools} conversion tools")
|
|
97
|
+
|
|
98
|
+
# Log discovered capabilities
|
|
99
|
+
capabilities = self.discovery.get_capabilities_summary()
|
|
100
|
+
logger.info(f"Capabilities: {capabilities}")
|
|
101
|
+
|
|
102
|
+
# Initialize and start dropzone watcher
|
|
103
|
+
self._watcher = DropzoneWatcher(
|
|
104
|
+
dropzone_path=self.dropzone_path,
|
|
105
|
+
artifact_manager=self.artifact_manager,
|
|
106
|
+
conversion_worker=self.conversion_worker,
|
|
107
|
+
process_existing=False, # Don't process existing files on startup
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
# Set up event callback
|
|
111
|
+
self._watcher.set_event_callback(self._on_ingestion_event)
|
|
112
|
+
|
|
113
|
+
# Start watching (this is synchronous, runs in background thread)
|
|
114
|
+
self._watcher.start()
|
|
115
|
+
|
|
116
|
+
self._running = True
|
|
117
|
+
logger.info(f"Mailroom service started. Dropzone: {self.dropzone_path}")
|
|
118
|
+
|
|
119
|
+
async def stop(self) -> None:
|
|
120
|
+
"""Stop the Mailroom service."""
|
|
121
|
+
if not self._running:
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
logger.info("Stopping Mailroom service...")
|
|
125
|
+
|
|
126
|
+
if self._watcher:
|
|
127
|
+
self._watcher.stop()
|
|
128
|
+
self._watcher = None
|
|
129
|
+
|
|
130
|
+
self._running = False
|
|
131
|
+
logger.info("Mailroom service stopped")
|
|
132
|
+
|
|
133
|
+
def _on_ingestion_event(self, event: IngestionEvent) -> None:
|
|
134
|
+
"""Handle ingestion events from the watcher."""
|
|
135
|
+
# Update stats
|
|
136
|
+
if event.event_type.value == "file_detected":
|
|
137
|
+
self._stats["files_detected"] += 1
|
|
138
|
+
elif event.event_type.value == "conversion_completed":
|
|
139
|
+
self._stats["conversions_success"] += 1
|
|
140
|
+
elif event.event_type.value == "conversion_failed":
|
|
141
|
+
self._stats["conversions_failed"] += 1
|
|
142
|
+
elif event.event_type.value == "artifact_registered":
|
|
143
|
+
self._stats["artifacts_registered"] += 1
|
|
144
|
+
|
|
145
|
+
# Broadcast via SSE if broadcaster available
|
|
146
|
+
if self.broadcaster:
|
|
147
|
+
asyncio.create_task(self._broadcast_event(event))
|
|
148
|
+
|
|
149
|
+
async def _broadcast_event(self, event: IngestionEvent) -> None:
|
|
150
|
+
"""Broadcast ingestion event to SSE clients."""
|
|
151
|
+
try:
|
|
152
|
+
payload = {
|
|
153
|
+
"type": event.event_type.value,
|
|
154
|
+
"file_path": str(event.file_path),
|
|
155
|
+
"task_id": event.task_id,
|
|
156
|
+
"artifact_id": event.artifact_id,
|
|
157
|
+
"error_message": event.error_message,
|
|
158
|
+
"metadata": event.metadata,
|
|
159
|
+
"timestamp": event.timestamp.isoformat(),
|
|
160
|
+
}
|
|
161
|
+
await self.broadcaster.broadcast("MAILROOM_EVENT", payload)
|
|
162
|
+
except Exception as e:
|
|
163
|
+
logger.error(f"Failed to broadcast mailroom event: {e}")
|
|
164
|
+
|
|
165
|
+
def get_status(self) -> Dict[str, Any]:
|
|
166
|
+
"""Get current service status and statistics."""
|
|
167
|
+
capabilities = self.discovery.get_capabilities_summary()
|
|
168
|
+
|
|
169
|
+
return {
|
|
170
|
+
"running": self._running,
|
|
171
|
+
"dropzone_path": str(self.dropzone_path),
|
|
172
|
+
"capabilities": capabilities,
|
|
173
|
+
"supported_extensions": self.conversion_worker.get_supported_extensions(),
|
|
174
|
+
"stats": self._stats.copy(),
|
|
175
|
+
"tools": [
|
|
176
|
+
{
|
|
177
|
+
"name": tool.name,
|
|
178
|
+
"type": tool.tool_type.value,
|
|
179
|
+
"version": tool.version,
|
|
180
|
+
"capabilities": [c.value for c in tool.capabilities],
|
|
181
|
+
}
|
|
182
|
+
for tool in self.discovery.get_all_tools()
|
|
183
|
+
],
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
def is_running(self) -> bool:
|
|
187
|
+
"""Check if the service is running."""
|
|
188
|
+
return self._running
|
|
189
|
+
|
|
190
|
+
def get_discovery(self) -> EnvironmentDiscovery:
|
|
191
|
+
"""Get the environment discovery instance."""
|
|
192
|
+
return self.discovery
|
|
193
|
+
|
|
194
|
+
def get_worker(self) -> ConversionWorker:
|
|
195
|
+
"""Get the conversion worker instance."""
|
|
196
|
+
return self.conversion_worker
|
monoco/daemon/models.py
CHANGED
|
@@ -18,6 +18,7 @@ class CreateIssueRequest(BaseModel):
|
|
|
18
18
|
related: List[str] = []
|
|
19
19
|
subdir: Optional[str] = None
|
|
20
20
|
project_id: Optional[str] = None # Added for multi-project support
|
|
21
|
+
from_memos: List[str] = [] # Memo IDs to link to this issue
|
|
21
22
|
|
|
22
23
|
|
|
23
24
|
class UpdateIssueRequest(BaseModel):
|