monoco-toolkit 0.3.11__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/automation/__init__.py +40 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +805 -0
- monoco/core/config.py +29 -11
- monoco/core/daemon/__init__.py +5 -0
- monoco/core/daemon/pid.py +290 -0
- monoco/core/git.py +15 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/injection.py +86 -8
- monoco/core/integrations.py +0 -24
- monoco/core/router/__init__.py +17 -0
- monoco/core/router/action.py +202 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +197 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/setup.py +9 -0
- monoco/core/sync.py +199 -4
- monoco/core/watcher/__init__.py +63 -0
- monoco/core/watcher/base.py +382 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/im.py +460 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +192 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +3 -60
- monoco/daemon/commands.py +459 -25
- monoco/daemon/events.py +34 -0
- monoco/daemon/scheduler.py +157 -201
- monoco/daemon/services.py +42 -243
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/features/agent/resources/en/AGENTS.md +14 -14
- monoco/features/agent/resources/en/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/en/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/en/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/en/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/resources/zh/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/zh/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/zh/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/zh/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/worker.py +1 -1
- monoco/features/hooks/__init__.py +61 -6
- monoco/features/hooks/commands.py +281 -271
- monoco/features/hooks/dispatchers/__init__.py +23 -0
- monoco/features/hooks/dispatchers/agent_dispatcher.py +486 -0
- monoco/features/hooks/dispatchers/git_dispatcher.py +478 -0
- monoco/features/hooks/manager.py +357 -0
- monoco/features/hooks/models.py +262 -0
- monoco/features/hooks/parser.py +322 -0
- monoco/features/hooks/universal_interceptor.py +503 -0
- monoco/features/im/__init__.py +67 -0
- monoco/features/im/core.py +782 -0
- monoco/features/im/models.py +311 -0
- monoco/features/issue/commands.py +133 -60
- monoco/features/issue/core.py +385 -40
- monoco/features/issue/domain_commands.py +0 -19
- monoco/features/issue/resources/en/AGENTS.md +17 -122
- monoco/features/issue/resources/hooks/agent/before-tool.sh +102 -0
- monoco/features/issue/resources/hooks/agent/session-start.sh +88 -0
- monoco/features/issue/resources/hooks/{post-checkout.sh → git/git-post-checkout.sh} +10 -9
- monoco/features/issue/resources/hooks/git/git-pre-commit.sh +31 -0
- monoco/features/issue/resources/hooks/{pre-push.sh → git/git-pre-push.sh} +7 -13
- monoco/features/issue/resources/zh/AGENTS.md +18 -123
- monoco/features/memo/cli.py +15 -64
- monoco/features/memo/core.py +6 -34
- monoco/features/memo/models.py +24 -15
- monoco/features/memo/resources/en/AGENTS.md +31 -0
- monoco/features/memo/resources/zh/AGENTS.md +28 -5
- monoco/features/spike/commands.py +5 -3
- monoco/main.py +5 -3
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/METADATA +1 -1
- monoco_toolkit-0.4.0.dist-info/RECORD +170 -0
- monoco/core/execution.py +0 -67
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -127
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +0 -61
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +0 -73
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +0 -55
- monoco/features/agent/resources/atoms/atom-review.yaml +0 -60
- monoco/features/agent/resources/en/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +0 -93
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +0 -85
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -114
- monoco/features/agent/resources/workflows/workflow-dev.yaml +0 -83
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +0 -72
- monoco/features/agent/resources/workflows/workflow-review.yaml +0 -94
- monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +0 -49
- monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +0 -47
- monoco/features/agent/resources/zh/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_manager/SKILL.md +0 -88
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +0 -259
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -137
- monoco/features/agent/session.py +0 -169
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +0 -278
- monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/hooks/adapter.py +0 -67
- monoco/features/hooks/core.py +0 -441
- monoco/features/i18n/resources/en/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/i18n/resources/zh/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/zh/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/issue/resources/en/skills/monoco_atom_issue/SKILL.md +0 -165
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/issue/resources/hooks/pre-commit.sh +0 -41
- monoco/features/issue/resources/zh/skills/monoco_atom_issue_lifecycle/SKILL.md +0 -190
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/memo/resources/zh/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/zh/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/spike/resources/en/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco/features/spike/resources/zh/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/zh/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco_toolkit-0.3.11.dist-info/RECORD +0 -181
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,782 @@
|
|
|
1
|
+
"""
|
|
2
|
+
IM Core Management Classes (FEAT-0167).
|
|
3
|
+
|
|
4
|
+
Provides channel management, message storage, and message routing
|
|
5
|
+
for the IM system.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import secrets
|
|
13
|
+
from datetime import datetime, timedelta
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any, Dict, List, Optional, Callable
|
|
16
|
+
|
|
17
|
+
from .models import (
|
|
18
|
+
IMChannel,
|
|
19
|
+
IMMessage,
|
|
20
|
+
IMParticipant,
|
|
21
|
+
IMAgentSession,
|
|
22
|
+
IMWebhookConfig,
|
|
23
|
+
IMStats,
|
|
24
|
+
PlatformType,
|
|
25
|
+
MessageStatus,
|
|
26
|
+
ContentType,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class IMStorageError(Exception):
|
|
33
|
+
"""Base exception for IM storage errors."""
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class ChannelNotFoundError(IMStorageError):
|
|
38
|
+
"""Raised when a channel is not found."""
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class MessageNotFoundError(IMStorageError):
|
|
43
|
+
"""Raised when a message is not found."""
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class IMChannelManager:
|
|
48
|
+
"""
|
|
49
|
+
Manages IM channels (groups, private chats, threads).
|
|
50
|
+
|
|
51
|
+
Responsibilities:
|
|
52
|
+
- Channel CRUD operations
|
|
53
|
+
- Participant management
|
|
54
|
+
- Channel configuration
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
def __init__(self, storage_dir: Path):
|
|
58
|
+
self.storage_dir = storage_dir
|
|
59
|
+
self.channels_file = storage_dir / "channels.jsonl"
|
|
60
|
+
self._channels: Dict[str, IMChannel] = {}
|
|
61
|
+
self._loaded = False
|
|
62
|
+
|
|
63
|
+
def _ensure_storage(self) -> None:
|
|
64
|
+
"""Ensure storage directory exists."""
|
|
65
|
+
self.storage_dir.mkdir(parents=True, exist_ok=True)
|
|
66
|
+
|
|
67
|
+
def _load_channels(self) -> None:
|
|
68
|
+
"""Load all channels from storage."""
|
|
69
|
+
if self._loaded:
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
self._ensure_storage()
|
|
73
|
+
|
|
74
|
+
if not self.channels_file.exists():
|
|
75
|
+
self._loaded = True
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
with open(self.channels_file, "r", encoding="utf-8") as f:
|
|
80
|
+
for line in f:
|
|
81
|
+
line = line.strip()
|
|
82
|
+
if not line:
|
|
83
|
+
continue
|
|
84
|
+
try:
|
|
85
|
+
data = json.loads(line)
|
|
86
|
+
channel = IMChannel.model_validate(data)
|
|
87
|
+
self._channels[channel.channel_id] = channel
|
|
88
|
+
except (json.JSONDecodeError, Exception) as e:
|
|
89
|
+
logger.warning(f"Failed to load channel: {e}")
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.error(f"Error loading channels: {e}")
|
|
93
|
+
|
|
94
|
+
self._loaded = True
|
|
95
|
+
|
|
96
|
+
def _save_channels(self) -> None:
|
|
97
|
+
"""Save all channels to storage."""
|
|
98
|
+
self._ensure_storage()
|
|
99
|
+
|
|
100
|
+
with open(self.channels_file, "w", encoding="utf-8") as f:
|
|
101
|
+
for channel in self._channels.values():
|
|
102
|
+
f.write(json.dumps(channel.model_dump(), default=str) + "\n")
|
|
103
|
+
|
|
104
|
+
def create_channel(
|
|
105
|
+
self,
|
|
106
|
+
channel_id: str,
|
|
107
|
+
platform: PlatformType,
|
|
108
|
+
channel_type: str = "group",
|
|
109
|
+
name: Optional[str] = None,
|
|
110
|
+
**kwargs
|
|
111
|
+
) -> IMChannel:
|
|
112
|
+
"""
|
|
113
|
+
Create a new channel.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
channel_id: Unique channel ID (platform-specific)
|
|
117
|
+
platform: Platform type
|
|
118
|
+
channel_type: Type of channel (group, private, thread)
|
|
119
|
+
name: Optional channel name
|
|
120
|
+
**kwargs: Additional channel attributes
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
The created IMChannel
|
|
124
|
+
"""
|
|
125
|
+
self._load_channels()
|
|
126
|
+
|
|
127
|
+
if channel_id in self._channels:
|
|
128
|
+
logger.warning(f"Channel {channel_id} already exists, returning existing")
|
|
129
|
+
return self._channels[channel_id]
|
|
130
|
+
|
|
131
|
+
channel = IMChannel(
|
|
132
|
+
channel_id=channel_id,
|
|
133
|
+
platform=platform,
|
|
134
|
+
channel_type=channel_type,
|
|
135
|
+
name=name,
|
|
136
|
+
**kwargs
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
self._channels[channel_id] = channel
|
|
140
|
+
self._save_channels()
|
|
141
|
+
|
|
142
|
+
logger.info(f"Created channel {channel_id} ({platform.value})")
|
|
143
|
+
return channel
|
|
144
|
+
|
|
145
|
+
def get_channel(self, channel_id: str) -> Optional[IMChannel]:
|
|
146
|
+
"""Get a channel by ID."""
|
|
147
|
+
self._load_channels()
|
|
148
|
+
return self._channels.get(channel_id)
|
|
149
|
+
|
|
150
|
+
def get_or_create_channel(
|
|
151
|
+
self,
|
|
152
|
+
channel_id: str,
|
|
153
|
+
platform: PlatformType,
|
|
154
|
+
**kwargs
|
|
155
|
+
) -> IMChannel:
|
|
156
|
+
"""Get existing channel or create new one."""
|
|
157
|
+
channel = self.get_channel(channel_id)
|
|
158
|
+
if channel:
|
|
159
|
+
return channel
|
|
160
|
+
return self.create_channel(channel_id, platform, **kwargs)
|
|
161
|
+
|
|
162
|
+
def update_channel(self, channel_id: str, **updates) -> Optional[IMChannel]:
|
|
163
|
+
"""Update channel attributes."""
|
|
164
|
+
self._load_channels()
|
|
165
|
+
|
|
166
|
+
if channel_id not in self._channels:
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
channel = self._channels[channel_id]
|
|
170
|
+
data = channel.model_dump()
|
|
171
|
+
data.update(updates)
|
|
172
|
+
data["last_activity"] = datetime.now()
|
|
173
|
+
|
|
174
|
+
self._channels[channel_id] = IMChannel.model_validate(data)
|
|
175
|
+
self._save_channels()
|
|
176
|
+
|
|
177
|
+
return self._channels[channel_id]
|
|
178
|
+
|
|
179
|
+
def delete_channel(self, channel_id: str) -> bool:
|
|
180
|
+
"""Delete a channel."""
|
|
181
|
+
self._load_channels()
|
|
182
|
+
|
|
183
|
+
if channel_id not in self._channels:
|
|
184
|
+
return False
|
|
185
|
+
|
|
186
|
+
del self._channels[channel_id]
|
|
187
|
+
self._save_channels()
|
|
188
|
+
|
|
189
|
+
logger.info(f"Deleted channel {channel_id}")
|
|
190
|
+
return True
|
|
191
|
+
|
|
192
|
+
def list_channels(
|
|
193
|
+
self,
|
|
194
|
+
platform: Optional[PlatformType] = None,
|
|
195
|
+
project_binding: Optional[str] = None
|
|
196
|
+
) -> List[IMChannel]:
|
|
197
|
+
"""List channels with optional filters."""
|
|
198
|
+
self._load_channels()
|
|
199
|
+
|
|
200
|
+
channels = list(self._channels.values())
|
|
201
|
+
|
|
202
|
+
if platform:
|
|
203
|
+
channels = [c for c in channels if c.platform == platform]
|
|
204
|
+
|
|
205
|
+
if project_binding:
|
|
206
|
+
channels = [c for c in channels if c.project_binding == project_binding]
|
|
207
|
+
|
|
208
|
+
return sorted(channels, key=lambda c: c.last_activity, reverse=True)
|
|
209
|
+
|
|
210
|
+
def add_participant(self, channel_id: str, participant: IMParticipant) -> bool:
|
|
211
|
+
"""Add a participant to a channel."""
|
|
212
|
+
self._load_channels()
|
|
213
|
+
|
|
214
|
+
if channel_id not in self._channels:
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
channel = self._channels[channel_id]
|
|
218
|
+
channel.add_participant(participant)
|
|
219
|
+
channel.update_activity()
|
|
220
|
+
|
|
221
|
+
self._save_channels()
|
|
222
|
+
return True
|
|
223
|
+
|
|
224
|
+
def remove_participant(self, channel_id: str, participant_id: str) -> bool:
|
|
225
|
+
"""Remove a participant from a channel."""
|
|
226
|
+
self._load_channels()
|
|
227
|
+
|
|
228
|
+
if channel_id not in self._channels:
|
|
229
|
+
return False
|
|
230
|
+
|
|
231
|
+
channel = self._channels[channel_id]
|
|
232
|
+
channel.remove_participant(participant_id)
|
|
233
|
+
channel.update_activity()
|
|
234
|
+
|
|
235
|
+
self._save_channels()
|
|
236
|
+
return True
|
|
237
|
+
|
|
238
|
+
def bind_project(self, channel_id: str, project_path: str) -> bool:
|
|
239
|
+
"""Bind a channel to a project."""
|
|
240
|
+
return self.update_channel(channel_id, project_binding=project_path) is not None
|
|
241
|
+
|
|
242
|
+
def unbind_project(self, channel_id: str) -> bool:
|
|
243
|
+
"""Unbind a channel from a project."""
|
|
244
|
+
return self.update_channel(channel_id, project_binding=None) is not None
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
class MessageStore:
|
|
248
|
+
"""
|
|
249
|
+
Stores and manages IM messages.
|
|
250
|
+
|
|
251
|
+
Uses JSONL format for append-only message storage.
|
|
252
|
+
Messages are stored per-channel for efficient querying.
|
|
253
|
+
"""
|
|
254
|
+
|
|
255
|
+
def __init__(self, storage_dir: Path):
|
|
256
|
+
self.storage_dir = storage_dir
|
|
257
|
+
self.messages_dir = storage_dir / "messages"
|
|
258
|
+
self._message_cache: Dict[str, IMMessage] = {}
|
|
259
|
+
self._cache_size = 1000
|
|
260
|
+
|
|
261
|
+
def _ensure_storage(self) -> None:
|
|
262
|
+
"""Ensure storage directory exists."""
|
|
263
|
+
self.messages_dir.mkdir(parents=True, exist_ok=True)
|
|
264
|
+
|
|
265
|
+
def _get_channel_file(self, channel_id: str) -> Path:
|
|
266
|
+
"""Get the message file path for a channel."""
|
|
267
|
+
# Hash channel_id to avoid filesystem issues
|
|
268
|
+
safe_name = channel_id.replace("/", "_").replace("\\", "_")
|
|
269
|
+
return self.messages_dir / f"{safe_name}.jsonl"
|
|
270
|
+
|
|
271
|
+
def save_message(self, message: IMMessage) -> None:
|
|
272
|
+
"""
|
|
273
|
+
Save a message to storage.
|
|
274
|
+
|
|
275
|
+
Appends to the channel's message file.
|
|
276
|
+
"""
|
|
277
|
+
self._ensure_storage()
|
|
278
|
+
|
|
279
|
+
channel_file = self._get_channel_file(message.channel_id)
|
|
280
|
+
|
|
281
|
+
with open(channel_file, "a", encoding="utf-8") as f:
|
|
282
|
+
f.write(json.dumps(message.model_dump(), default=str) + "\n")
|
|
283
|
+
|
|
284
|
+
# Update cache
|
|
285
|
+
self._message_cache[message.message_id] = message
|
|
286
|
+
|
|
287
|
+
# Trim cache if needed
|
|
288
|
+
if len(self._message_cache) > self._cache_size:
|
|
289
|
+
# Remove oldest 20% of cache
|
|
290
|
+
remove_count = self._cache_size // 5
|
|
291
|
+
keys = list(self._message_cache.keys())[:remove_count]
|
|
292
|
+
for key in keys:
|
|
293
|
+
del self._message_cache[key]
|
|
294
|
+
|
|
295
|
+
logger.debug(f"Saved message {message.message_id} to {channel_file}")
|
|
296
|
+
|
|
297
|
+
def get_message(self, message_id: str) -> Optional[IMMessage]:
|
|
298
|
+
"""Get a message by ID (uses cache first)."""
|
|
299
|
+
# Check cache first
|
|
300
|
+
if message_id in self._message_cache:
|
|
301
|
+
return self._message_cache[message_id]
|
|
302
|
+
|
|
303
|
+
# Search all channel files
|
|
304
|
+
if not self.messages_dir.exists():
|
|
305
|
+
return None
|
|
306
|
+
|
|
307
|
+
for channel_file in self.messages_dir.glob("*.jsonl"):
|
|
308
|
+
try:
|
|
309
|
+
with open(channel_file, "r", encoding="utf-8") as f:
|
|
310
|
+
for line in f:
|
|
311
|
+
data = json.loads(line.strip())
|
|
312
|
+
if data.get("message_id") == message_id:
|
|
313
|
+
message = IMMessage.model_validate(data)
|
|
314
|
+
self._message_cache[message_id] = message
|
|
315
|
+
return message
|
|
316
|
+
except Exception as e:
|
|
317
|
+
logger.warning(f"Error reading {channel_file}: {e}")
|
|
318
|
+
|
|
319
|
+
return None
|
|
320
|
+
|
|
321
|
+
def get_channel_messages(
|
|
322
|
+
self,
|
|
323
|
+
channel_id: str,
|
|
324
|
+
limit: int = 100,
|
|
325
|
+
before: Optional[datetime] = None,
|
|
326
|
+
after: Optional[datetime] = None,
|
|
327
|
+
status: Optional[MessageStatus] = None
|
|
328
|
+
) -> List[IMMessage]:
|
|
329
|
+
"""
|
|
330
|
+
Get messages for a channel.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
channel_id: Channel ID
|
|
334
|
+
limit: Maximum number of messages to return
|
|
335
|
+
before: Only return messages before this timestamp
|
|
336
|
+
after: Only return messages after this timestamp
|
|
337
|
+
status: Filter by message status
|
|
338
|
+
"""
|
|
339
|
+
channel_file = self._get_channel_file(channel_id)
|
|
340
|
+
|
|
341
|
+
if not channel_file.exists():
|
|
342
|
+
return []
|
|
343
|
+
|
|
344
|
+
messages = []
|
|
345
|
+
|
|
346
|
+
try:
|
|
347
|
+
with open(channel_file, "r", encoding="utf-8") as f:
|
|
348
|
+
for line in f:
|
|
349
|
+
line = line.strip()
|
|
350
|
+
if not line:
|
|
351
|
+
continue
|
|
352
|
+
try:
|
|
353
|
+
data = json.loads(line)
|
|
354
|
+
|
|
355
|
+
# Apply filters
|
|
356
|
+
timestamp = datetime.fromisoformat(data["timestamp"])
|
|
357
|
+
|
|
358
|
+
if before and timestamp >= before:
|
|
359
|
+
continue
|
|
360
|
+
if after and timestamp <= after:
|
|
361
|
+
continue
|
|
362
|
+
if status and data.get("status") != status.value:
|
|
363
|
+
continue
|
|
364
|
+
|
|
365
|
+
messages.append(IMMessage.model_validate(data))
|
|
366
|
+
except Exception as e:
|
|
367
|
+
logger.warning(f"Failed to parse message: {e}")
|
|
368
|
+
|
|
369
|
+
except Exception as e:
|
|
370
|
+
logger.error(f"Error reading messages: {e}")
|
|
371
|
+
|
|
372
|
+
# Sort by timestamp descending, then limit
|
|
373
|
+
messages.sort(key=lambda m: m.timestamp, reverse=True)
|
|
374
|
+
return messages[:limit]
|
|
375
|
+
|
|
376
|
+
def update_message_status(
|
|
377
|
+
self,
|
|
378
|
+
message_id: str,
|
|
379
|
+
status: MessageStatus,
|
|
380
|
+
step: Optional[str] = None
|
|
381
|
+
) -> bool:
|
|
382
|
+
"""
|
|
383
|
+
Update message status.
|
|
384
|
+
|
|
385
|
+
Note: This rewrites the entire channel file. For high-volume
|
|
386
|
+
scenarios, consider using a proper database.
|
|
387
|
+
"""
|
|
388
|
+
message = self.get_message(message_id)
|
|
389
|
+
if not message:
|
|
390
|
+
return False
|
|
391
|
+
|
|
392
|
+
message.status = status
|
|
393
|
+
if step:
|
|
394
|
+
from .models import ProcessingStep
|
|
395
|
+
message.processing_log.append(ProcessingStep(
|
|
396
|
+
step=step,
|
|
397
|
+
status="completed" if status != MessageStatus.ERROR else "failed"
|
|
398
|
+
))
|
|
399
|
+
|
|
400
|
+
# Rewrite channel file
|
|
401
|
+
channel_file = self._get_channel_file(message.channel_id)
|
|
402
|
+
|
|
403
|
+
if not channel_file.exists():
|
|
404
|
+
return False
|
|
405
|
+
|
|
406
|
+
try:
|
|
407
|
+
# Read all messages
|
|
408
|
+
messages = []
|
|
409
|
+
with open(channel_file, "r", encoding="utf-8") as f:
|
|
410
|
+
for line in f:
|
|
411
|
+
line = line.strip()
|
|
412
|
+
if not line:
|
|
413
|
+
continue
|
|
414
|
+
data = json.loads(line)
|
|
415
|
+
if data["message_id"] == message_id:
|
|
416
|
+
messages.append(message.model_dump())
|
|
417
|
+
else:
|
|
418
|
+
messages.append(data)
|
|
419
|
+
|
|
420
|
+
# Write back
|
|
421
|
+
with open(channel_file, "w", encoding="utf-8") as f:
|
|
422
|
+
for msg_data in messages:
|
|
423
|
+
f.write(json.dumps(msg_data, default=str) + "\n")
|
|
424
|
+
|
|
425
|
+
# Update cache
|
|
426
|
+
self._message_cache[message_id] = message
|
|
427
|
+
|
|
428
|
+
return True
|
|
429
|
+
|
|
430
|
+
except Exception as e:
|
|
431
|
+
logger.error(f"Error updating message status: {e}")
|
|
432
|
+
return False
|
|
433
|
+
|
|
434
|
+
def get_thread_messages(self, thread_id: str) -> List[IMMessage]:
|
|
435
|
+
"""Get all messages in a thread."""
|
|
436
|
+
messages = []
|
|
437
|
+
|
|
438
|
+
if not self.messages_dir.exists():
|
|
439
|
+
return messages
|
|
440
|
+
|
|
441
|
+
for channel_file in self.messages_dir.glob("*.jsonl"):
|
|
442
|
+
try:
|
|
443
|
+
with open(channel_file, "r", encoding="utf-8") as f:
|
|
444
|
+
for line in f:
|
|
445
|
+
data = json.loads(line.strip())
|
|
446
|
+
if data.get("thread_id") == thread_id:
|
|
447
|
+
messages.append(IMMessage.model_validate(data))
|
|
448
|
+
except Exception as e:
|
|
449
|
+
logger.warning(f"Error reading {channel_file}: {e}")
|
|
450
|
+
|
|
451
|
+
messages.sort(key=lambda m: m.timestamp)
|
|
452
|
+
return messages
|
|
453
|
+
|
|
454
|
+
def get_message_context(
|
|
455
|
+
self,
|
|
456
|
+
message_id: str,
|
|
457
|
+
window_size: int = 10
|
|
458
|
+
) -> List[IMMessage]:
|
|
459
|
+
"""
|
|
460
|
+
Get context messages around a specific message.
|
|
461
|
+
|
|
462
|
+
Returns messages before and after the target message.
|
|
463
|
+
"""
|
|
464
|
+
message = self.get_message(message_id)
|
|
465
|
+
if not message:
|
|
466
|
+
return []
|
|
467
|
+
|
|
468
|
+
# Get all messages in channel
|
|
469
|
+
channel_messages = self.get_channel_messages(
|
|
470
|
+
message.channel_id,
|
|
471
|
+
limit=window_size * 2
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
# Find index of target message
|
|
475
|
+
try:
|
|
476
|
+
idx = next(
|
|
477
|
+
i for i, m in enumerate(channel_messages)
|
|
478
|
+
if m.message_id == message_id
|
|
479
|
+
)
|
|
480
|
+
except StopIteration:
|
|
481
|
+
return []
|
|
482
|
+
|
|
483
|
+
# Return context window
|
|
484
|
+
start = max(0, idx - window_size // 2)
|
|
485
|
+
end = min(len(channel_messages), idx + window_size // 2 + 1)
|
|
486
|
+
|
|
487
|
+
return channel_messages[start:end]
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
class IMRouter:
|
|
491
|
+
"""
|
|
492
|
+
Routes incoming IM messages to appropriate handlers.
|
|
493
|
+
|
|
494
|
+
Makes routing decisions based on:
|
|
495
|
+
- Channel configuration
|
|
496
|
+
- Message content
|
|
497
|
+
- Agent availability
|
|
498
|
+
"""
|
|
499
|
+
|
|
500
|
+
def __init__(
|
|
501
|
+
self,
|
|
502
|
+
channel_manager: IMChannelManager,
|
|
503
|
+
message_store: MessageStore
|
|
504
|
+
):
|
|
505
|
+
self.channel_manager = channel_manager
|
|
506
|
+
self.message_store = message_store
|
|
507
|
+
self._handlers: Dict[str, Callable[[IMMessage], Any]] = {}
|
|
508
|
+
self._default_handler: Optional[Callable[[IMMessage], Any]] = None
|
|
509
|
+
|
|
510
|
+
def register_handler(
|
|
511
|
+
self,
|
|
512
|
+
handler_id: str,
|
|
513
|
+
handler: Callable[[IMMessage], Any]
|
|
514
|
+
) -> None:
|
|
515
|
+
"""Register a message handler."""
|
|
516
|
+
self._handlers[handler_id] = handler
|
|
517
|
+
logger.debug(f"Registered handler: {handler_id}")
|
|
518
|
+
|
|
519
|
+
def unregister_handler(self, handler_id: str) -> None:
|
|
520
|
+
"""Unregister a message handler."""
|
|
521
|
+
if handler_id in self._handlers:
|
|
522
|
+
del self._handlers[handler_id]
|
|
523
|
+
|
|
524
|
+
def set_default_handler(self, handler: Callable[[IMMessage], Any]) -> None:
|
|
525
|
+
"""Set the default handler for unrouted messages."""
|
|
526
|
+
self._default_handler = handler
|
|
527
|
+
|
|
528
|
+
def route(self, message: IMMessage) -> Optional[str]:
|
|
529
|
+
"""
|
|
530
|
+
Route a message to the appropriate handler.
|
|
531
|
+
|
|
532
|
+
Returns:
|
|
533
|
+
Handler ID if routed, None otherwise
|
|
534
|
+
"""
|
|
535
|
+
channel = self.channel_manager.get_channel(message.channel_id)
|
|
536
|
+
|
|
537
|
+
if not channel:
|
|
538
|
+
logger.warning(f"Unknown channel: {message.channel_id}")
|
|
539
|
+
return None
|
|
540
|
+
|
|
541
|
+
# Check if auto-reply is enabled
|
|
542
|
+
if not channel.auto_reply:
|
|
543
|
+
logger.debug(f"Auto-reply disabled for channel {channel.channel_id}")
|
|
544
|
+
return None
|
|
545
|
+
|
|
546
|
+
# Check if mention is required
|
|
547
|
+
if channel.require_mention:
|
|
548
|
+
# Check if any agent is mentioned
|
|
549
|
+
agent_mentioned = False
|
|
550
|
+
for participant in channel.participants:
|
|
551
|
+
if participant.participant_type.value == "agent":
|
|
552
|
+
if participant.participant_id in message.mentions:
|
|
553
|
+
agent_mentioned = True
|
|
554
|
+
break
|
|
555
|
+
|
|
556
|
+
if not agent_mentioned and not message.mention_all:
|
|
557
|
+
logger.debug(f"No agent mentioned in message {message.message_id}")
|
|
558
|
+
return None
|
|
559
|
+
|
|
560
|
+
# Determine handler based on message type and content
|
|
561
|
+
handler_id = self._determine_handler(message, channel)
|
|
562
|
+
|
|
563
|
+
if handler_id and handler_id in self._handlers:
|
|
564
|
+
try:
|
|
565
|
+
self._handlers[handler_id](message)
|
|
566
|
+
return handler_id
|
|
567
|
+
except Exception as e:
|
|
568
|
+
logger.error(f"Handler error for {handler_id}: {e}")
|
|
569
|
+
return None
|
|
570
|
+
|
|
571
|
+
# Use default handler
|
|
572
|
+
if self._default_handler:
|
|
573
|
+
try:
|
|
574
|
+
self._default_handler(message)
|
|
575
|
+
return "default"
|
|
576
|
+
except Exception as e:
|
|
577
|
+
logger.error(f"Default handler error: {e}")
|
|
578
|
+
|
|
579
|
+
return None
|
|
580
|
+
|
|
581
|
+
def _determine_handler(
|
|
582
|
+
self,
|
|
583
|
+
message: IMMessage,
|
|
584
|
+
channel: IMChannel
|
|
585
|
+
) -> Optional[str]:
|
|
586
|
+
"""
|
|
587
|
+
Determine which handler should process this message.
|
|
588
|
+
|
|
589
|
+
Override this method for custom routing logic.
|
|
590
|
+
"""
|
|
591
|
+
# Check for specific keywords or patterns
|
|
592
|
+
text = message.content.text or ""
|
|
593
|
+
|
|
594
|
+
# Route to specific agents based on keywords
|
|
595
|
+
if any(keyword in text.lower() for keyword in ["review", "审阅", "审核"]):
|
|
596
|
+
return "reviewer"
|
|
597
|
+
|
|
598
|
+
if any(keyword in text.lower() for keyword in ["plan", "规划", "计划", "设计"]):
|
|
599
|
+
return "planner"
|
|
600
|
+
|
|
601
|
+
if any(keyword in text.lower() for keyword in ["fix", "bug", "错误", "修复"]):
|
|
602
|
+
return "engineer"
|
|
603
|
+
|
|
604
|
+
# Use channel's default agent
|
|
605
|
+
if channel.default_agent:
|
|
606
|
+
return channel.default_agent
|
|
607
|
+
|
|
608
|
+
return None
|
|
609
|
+
|
|
610
|
+
|
|
611
|
+
class IMAgentSessionManager:
|
|
612
|
+
"""
|
|
613
|
+
Manages Agent sessions bound to IM channels.
|
|
614
|
+
|
|
615
|
+
Tracks active Agent interactions with channels.
|
|
616
|
+
"""
|
|
617
|
+
|
|
618
|
+
def __init__(self, storage_dir: Path):
|
|
619
|
+
self.storage_dir = storage_dir
|
|
620
|
+
self.sessions_dir = storage_dir / "sessions"
|
|
621
|
+
self._active_sessions: Dict[str, IMAgentSession] = {}
|
|
622
|
+
self._loaded = False
|
|
623
|
+
|
|
624
|
+
def _ensure_storage(self) -> None:
|
|
625
|
+
"""Ensure storage directory exists."""
|
|
626
|
+
self.sessions_dir.mkdir(parents=True, exist_ok=True)
|
|
627
|
+
|
|
628
|
+
def _generate_session_id(self) -> str:
|
|
629
|
+
"""Generate a unique session ID."""
|
|
630
|
+
return f"im-{secrets.token_hex(8)}"
|
|
631
|
+
|
|
632
|
+
def create_session(
|
|
633
|
+
self,
|
|
634
|
+
channel_id: str,
|
|
635
|
+
agent_role: str,
|
|
636
|
+
linked_issue_id: Optional[str] = None,
|
|
637
|
+
linked_task_id: Optional[str] = None
|
|
638
|
+
) -> IMAgentSession:
|
|
639
|
+
"""Create a new Agent session."""
|
|
640
|
+
self._ensure_storage()
|
|
641
|
+
|
|
642
|
+
session_id = self._generate_session_id()
|
|
643
|
+
session = IMAgentSession(
|
|
644
|
+
session_id=session_id,
|
|
645
|
+
channel_id=channel_id,
|
|
646
|
+
agent_role=agent_role,
|
|
647
|
+
linked_issue_id=linked_issue_id,
|
|
648
|
+
linked_task_id=linked_task_id,
|
|
649
|
+
)
|
|
650
|
+
|
|
651
|
+
self._active_sessions[session_id] = session
|
|
652
|
+
self._save_session(session)
|
|
653
|
+
|
|
654
|
+
logger.info(f"Created session {session_id} for channel {channel_id}")
|
|
655
|
+
return session
|
|
656
|
+
|
|
657
|
+
def get_session(self, session_id: str) -> Optional[IMAgentSession]:
|
|
658
|
+
"""Get a session by ID."""
|
|
659
|
+
# Check active sessions first
|
|
660
|
+
if session_id in self._active_sessions:
|
|
661
|
+
return self._active_sessions[session_id]
|
|
662
|
+
|
|
663
|
+
# Load from disk
|
|
664
|
+
session_file = self.sessions_dir / f"{session_id}.json"
|
|
665
|
+
if session_file.exists():
|
|
666
|
+
try:
|
|
667
|
+
data = json.loads(session_file.read_text(encoding="utf-8"))
|
|
668
|
+
session = IMAgentSession.model_validate(data)
|
|
669
|
+
self._active_sessions[session_id] = session
|
|
670
|
+
return session
|
|
671
|
+
except Exception as e:
|
|
672
|
+
logger.error(f"Error loading session: {e}")
|
|
673
|
+
|
|
674
|
+
return None
|
|
675
|
+
|
|
676
|
+
def get_channel_sessions(self, channel_id: str) -> List[IMAgentSession]:
|
|
677
|
+
"""Get all sessions for a channel."""
|
|
678
|
+
return [
|
|
679
|
+
s for s in self._active_sessions.values()
|
|
680
|
+
if s.channel_id == channel_id and s.status == "active"
|
|
681
|
+
]
|
|
682
|
+
|
|
683
|
+
def update_session(self, session: IMAgentSession) -> None:
|
|
684
|
+
"""Update a session."""
|
|
685
|
+
session.update_activity()
|
|
686
|
+
self._active_sessions[session.session_id] = session
|
|
687
|
+
self._save_session(session)
|
|
688
|
+
|
|
689
|
+
def _save_session(self, session: IMAgentSession) -> None:
|
|
690
|
+
"""Save a session to disk."""
|
|
691
|
+
self._ensure_storage()
|
|
692
|
+
|
|
693
|
+
session_file = self.sessions_dir / f"{session.session_id}.json"
|
|
694
|
+
session_file.write_text(
|
|
695
|
+
json.dumps(session.model_dump(), default=str),
|
|
696
|
+
encoding="utf-8"
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
def end_session(
|
|
700
|
+
self,
|
|
701
|
+
session_id: str,
|
|
702
|
+
status: str = "completed",
|
|
703
|
+
result_summary: Optional[str] = None
|
|
704
|
+
) -> bool:
|
|
705
|
+
"""End a session."""
|
|
706
|
+
session = self.get_session(session_id)
|
|
707
|
+
if not session:
|
|
708
|
+
return False
|
|
709
|
+
|
|
710
|
+
session.end_session(status)
|
|
711
|
+
if result_summary:
|
|
712
|
+
session.result_summary = result_summary
|
|
713
|
+
|
|
714
|
+
self._save_session(session)
|
|
715
|
+
|
|
716
|
+
if session_id in self._active_sessions:
|
|
717
|
+
del self._active_sessions[session_id]
|
|
718
|
+
|
|
719
|
+
logger.info(f"Ended session {session_id} with status {status}")
|
|
720
|
+
return True
|
|
721
|
+
|
|
722
|
+
def add_message_to_session(self, session_id: str, message_id: str) -> bool:
|
|
723
|
+
"""Add a message to a session's history."""
|
|
724
|
+
session = self.get_session(session_id)
|
|
725
|
+
if not session:
|
|
726
|
+
return False
|
|
727
|
+
|
|
728
|
+
session.message_ids.append(message_id)
|
|
729
|
+
session.context_message_count = len(session.message_ids)
|
|
730
|
+
self.update_session(session)
|
|
731
|
+
|
|
732
|
+
return True
|
|
733
|
+
|
|
734
|
+
def cleanup_stale_sessions(self, max_age_hours: int = 24) -> int:
|
|
735
|
+
"""Clean up sessions that have been inactive for too long."""
|
|
736
|
+
cutoff = datetime.now() - timedelta(hours=max_age_hours)
|
|
737
|
+
stale_sessions = [
|
|
738
|
+
sid for sid, s in self._active_sessions.items()
|
|
739
|
+
if s.last_activity < cutoff
|
|
740
|
+
]
|
|
741
|
+
|
|
742
|
+
for sid in stale_sessions:
|
|
743
|
+
self.end_session(sid, status="completed", result_summary="Session expired due to inactivity")
|
|
744
|
+
|
|
745
|
+
return len(stale_sessions)
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
class IMManager:
|
|
749
|
+
"""
|
|
750
|
+
Main entry point for IM system.
|
|
751
|
+
|
|
752
|
+
Provides unified access to all IM functionality.
|
|
753
|
+
"""
|
|
754
|
+
|
|
755
|
+
def __init__(self, project_root: Path):
|
|
756
|
+
self.storage_dir = project_root / ".monoco" / "im"
|
|
757
|
+
self.channels = IMChannelManager(self.storage_dir)
|
|
758
|
+
self.messages = MessageStore(self.storage_dir)
|
|
759
|
+
self.router = IMRouter(self.channels, self.messages)
|
|
760
|
+
self.sessions = IMAgentSessionManager(self.storage_dir)
|
|
761
|
+
|
|
762
|
+
# Ensure directory structure
|
|
763
|
+
self._init_storage()
|
|
764
|
+
|
|
765
|
+
def _init_storage(self) -> None:
|
|
766
|
+
"""Initialize storage directory structure."""
|
|
767
|
+
self.storage_dir.mkdir(parents=True, exist_ok=True)
|
|
768
|
+
(self.storage_dir / "messages").mkdir(exist_ok=True)
|
|
769
|
+
(self.storage_dir / "sessions").mkdir(exist_ok=True)
|
|
770
|
+
(self.storage_dir / "webhooks").mkdir(exist_ok=True)
|
|
771
|
+
|
|
772
|
+
logger.info(f"IM storage initialized at {self.storage_dir}")
|
|
773
|
+
|
|
774
|
+
def get_stats(self) -> IMStats:
|
|
775
|
+
"""Get IM system statistics."""
|
|
776
|
+
channels = self.channels.list_channels()
|
|
777
|
+
|
|
778
|
+
return IMStats(
|
|
779
|
+
total_channels=len(channels),
|
|
780
|
+
active_channels=len([c for c in channels if c.auto_reply]),
|
|
781
|
+
active_sessions=len(self.sessions._active_sessions),
|
|
782
|
+
)
|