monoco-toolkit 0.3.11__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/automation/__init__.py +40 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +805 -0
- monoco/core/config.py +29 -11
- monoco/core/daemon/__init__.py +5 -0
- monoco/core/daemon/pid.py +290 -0
- monoco/core/git.py +15 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/injection.py +86 -8
- monoco/core/integrations.py +0 -24
- monoco/core/router/__init__.py +17 -0
- monoco/core/router/action.py +202 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +197 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/setup.py +9 -0
- monoco/core/sync.py +199 -4
- monoco/core/watcher/__init__.py +63 -0
- monoco/core/watcher/base.py +382 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/im.py +460 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +192 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +3 -60
- monoco/daemon/commands.py +459 -25
- monoco/daemon/events.py +34 -0
- monoco/daemon/scheduler.py +157 -201
- monoco/daemon/services.py +42 -243
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/features/agent/resources/en/AGENTS.md +14 -14
- monoco/features/agent/resources/en/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/en/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/en/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/en/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/resources/zh/skills/monoco_role_engineer/SKILL.md +101 -0
- monoco/features/agent/resources/zh/skills/monoco_role_manager/SKILL.md +95 -0
- monoco/features/agent/resources/zh/skills/monoco_role_planner/SKILL.md +177 -0
- monoco/features/agent/resources/zh/skills/monoco_role_reviewer/SKILL.md +139 -0
- monoco/features/agent/worker.py +1 -1
- monoco/features/hooks/__init__.py +61 -6
- monoco/features/hooks/commands.py +281 -271
- monoco/features/hooks/dispatchers/__init__.py +23 -0
- monoco/features/hooks/dispatchers/agent_dispatcher.py +486 -0
- monoco/features/hooks/dispatchers/git_dispatcher.py +478 -0
- monoco/features/hooks/manager.py +357 -0
- monoco/features/hooks/models.py +262 -0
- monoco/features/hooks/parser.py +322 -0
- monoco/features/hooks/universal_interceptor.py +503 -0
- monoco/features/im/__init__.py +67 -0
- monoco/features/im/core.py +782 -0
- monoco/features/im/models.py +311 -0
- monoco/features/issue/commands.py +133 -60
- monoco/features/issue/core.py +385 -40
- monoco/features/issue/domain_commands.py +0 -19
- monoco/features/issue/resources/en/AGENTS.md +17 -122
- monoco/features/issue/resources/hooks/agent/before-tool.sh +102 -0
- monoco/features/issue/resources/hooks/agent/session-start.sh +88 -0
- monoco/features/issue/resources/hooks/{post-checkout.sh → git/git-post-checkout.sh} +10 -9
- monoco/features/issue/resources/hooks/git/git-pre-commit.sh +31 -0
- monoco/features/issue/resources/hooks/{pre-push.sh → git/git-pre-push.sh} +7 -13
- monoco/features/issue/resources/zh/AGENTS.md +18 -123
- monoco/features/memo/cli.py +15 -64
- monoco/features/memo/core.py +6 -34
- monoco/features/memo/models.py +24 -15
- monoco/features/memo/resources/en/AGENTS.md +31 -0
- monoco/features/memo/resources/zh/AGENTS.md +28 -5
- monoco/features/spike/commands.py +5 -3
- monoco/main.py +5 -3
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/METADATA +1 -1
- monoco_toolkit-0.4.0.dist-info/RECORD +170 -0
- monoco/core/execution.py +0 -67
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -127
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +0 -61
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +0 -73
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +0 -55
- monoco/features/agent/resources/atoms/atom-review.yaml +0 -60
- monoco/features/agent/resources/en/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +0 -93
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +0 -85
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -114
- monoco/features/agent/resources/workflows/workflow-dev.yaml +0 -83
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +0 -72
- monoco/features/agent/resources/workflows/workflow-review.yaml +0 -94
- monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +0 -49
- monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +0 -46
- monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +0 -47
- monoco/features/agent/resources/zh/skills/monoco_atom_core/SKILL.md +0 -99
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_manager/SKILL.md +0 -88
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +0 -259
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -137
- monoco/features/agent/session.py +0 -169
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +0 -278
- monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +0 -35
- monoco/features/hooks/adapter.py +0 -67
- monoco/features/hooks/core.py +0 -441
- monoco/features/i18n/resources/en/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/i18n/resources/zh/skills/monoco_atom_i18n/SKILL.md +0 -96
- monoco/features/i18n/resources/zh/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
- monoco/features/issue/resources/en/skills/monoco_atom_issue/SKILL.md +0 -165
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/issue/resources/hooks/pre-commit.sh +0 -41
- monoco/features/issue/resources/zh/skills/monoco_atom_issue_lifecycle/SKILL.md +0 -190
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +0 -224
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_management/SKILL.md +0 -159
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
- monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/memo/resources/zh/skills/monoco_atom_memo/SKILL.md +0 -77
- monoco/features/memo/resources/zh/skills/monoco_workflow_note_processing/SKILL.md +0 -140
- monoco/features/spike/resources/en/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco/features/spike/resources/zh/skills/monoco_atom_spike/SKILL.md +0 -76
- monoco/features/spike/resources/zh/skills/monoco_workflow_research/SKILL.md +0 -121
- monoco_toolkit-0.3.11.dist-info/RECORD +0 -181
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.4.0.dist-info}/licenses/LICENSE +0 -0
monoco/core/sync.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import typer
|
|
2
|
+
import subprocess
|
|
2
3
|
from pathlib import Path
|
|
3
4
|
from typing import Optional, List
|
|
4
5
|
from monoco.core.registry import FeatureRegistry
|
|
@@ -21,8 +22,10 @@ def _get_targets(root: Path, config, cli_target: Optional[Path]) -> List[Path]:
|
|
|
21
22
|
return targets
|
|
22
23
|
|
|
23
24
|
# 2. Registry Defaults (Dynamic Detection)
|
|
25
|
+
# We now default to ALL integrations instead of auto-detecting
|
|
26
|
+
# because we want to enable all agents by default.
|
|
24
27
|
integrations = get_active_integrations(
|
|
25
|
-
root, config_overrides=None, auto_detect=
|
|
28
|
+
root, config_overrides=None, auto_detect=False
|
|
26
29
|
)
|
|
27
30
|
|
|
28
31
|
if integrations:
|
|
@@ -133,8 +136,9 @@ def sync_command(
|
|
|
133
136
|
skill_manager = SkillManager(root, active_features)
|
|
134
137
|
|
|
135
138
|
# Get active integrations
|
|
139
|
+
# Disable auto-detect to distribute to all supported frameworks
|
|
136
140
|
integrations = get_active_integrations(
|
|
137
|
-
root, config_overrides=None, auto_detect=
|
|
141
|
+
root, config_overrides=None, auto_detect=False
|
|
138
142
|
)
|
|
139
143
|
|
|
140
144
|
if integrations:
|
|
@@ -165,7 +169,7 @@ def sync_command(
|
|
|
165
169
|
# 5. Distribute Workflows (if --workflows flag is set)
|
|
166
170
|
if workflows:
|
|
167
171
|
console.print("[bold blue]Distributing Flow Skills as Workflows...[/bold blue]")
|
|
168
|
-
|
|
172
|
+
|
|
169
173
|
try:
|
|
170
174
|
workflow_results = skill_manager.distribute_workflows(force=False, lang=skill_lang)
|
|
171
175
|
success_count = sum(1 for v in workflow_results.values() if v)
|
|
@@ -182,6 +186,103 @@ def sync_command(
|
|
|
182
186
|
f"[red] Failed to distribute workflows: {e}[/red]"
|
|
183
187
|
)
|
|
184
188
|
|
|
189
|
+
# 6. Sync Universal Hooks (Git & Agent)
|
|
190
|
+
console.print("[bold blue]Synchronizing Universal Hooks...[/bold blue]")
|
|
191
|
+
|
|
192
|
+
try:
|
|
193
|
+
from monoco.features.hooks import UniversalHookManager, HookType
|
|
194
|
+
from monoco.features.hooks.dispatchers import (
|
|
195
|
+
GitHookDispatcher,
|
|
196
|
+
ClaudeCodeDispatcher,
|
|
197
|
+
GeminiDispatcher,
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
hooks_manager = UniversalHookManager()
|
|
201
|
+
|
|
202
|
+
# Register Dispatchers
|
|
203
|
+
git_dispatcher = GitHookDispatcher()
|
|
204
|
+
hooks_manager.register_dispatcher(HookType.GIT, git_dispatcher)
|
|
205
|
+
|
|
206
|
+
# Register Agent Dispatchers for active platforms
|
|
207
|
+
# NOTE: Only Claude Code and Gemini CLI are officially supported
|
|
208
|
+
agent_dispatchers = {
|
|
209
|
+
"claude-code": ClaudeCodeDispatcher(),
|
|
210
|
+
"gemini-cli": GeminiDispatcher(),
|
|
211
|
+
}
|
|
212
|
+
for dispatcher in agent_dispatchers.values():
|
|
213
|
+
hooks_manager.register_dispatcher(HookType.AGENT, dispatcher)
|
|
214
|
+
|
|
215
|
+
# 6.1 Scan for hooks
|
|
216
|
+
all_hooks = []
|
|
217
|
+
|
|
218
|
+
# 6.1.1 Scan builtin hooks from hooks feature
|
|
219
|
+
try:
|
|
220
|
+
from monoco.features import hooks as hooks_module
|
|
221
|
+
hooks_feature_dir = Path(hooks_module.__file__).parent
|
|
222
|
+
builtin_hooks_dir = hooks_feature_dir / "resources" / "hooks"
|
|
223
|
+
if builtin_hooks_dir.exists():
|
|
224
|
+
groups = hooks_manager.scan(builtin_hooks_dir)
|
|
225
|
+
for group in groups.values():
|
|
226
|
+
all_hooks.extend(group.hooks)
|
|
227
|
+
except Exception as e:
|
|
228
|
+
console.print(f"[dim] No builtin hooks found: {e}[/dim]")
|
|
229
|
+
|
|
230
|
+
# 6.1.2 Scan for hooks in all active features
|
|
231
|
+
for feature in active_features:
|
|
232
|
+
if feature.name == "hooks":
|
|
233
|
+
continue # Already scanned
|
|
234
|
+
|
|
235
|
+
import importlib
|
|
236
|
+
try:
|
|
237
|
+
# Use the module where the feature class is defined (usually adapter.py)
|
|
238
|
+
module_name = feature.__class__.__module__
|
|
239
|
+
module = importlib.import_module(module_name)
|
|
240
|
+
|
|
241
|
+
if hasattr(module, "__file__") and module.__file__:
|
|
242
|
+
# feature_dir is the directory containing adapter.py
|
|
243
|
+
feature_dir = Path(module.__file__).parent
|
|
244
|
+
hooks_resource_dir = feature_dir / "resources" / "hooks"
|
|
245
|
+
|
|
246
|
+
if hooks_resource_dir.exists():
|
|
247
|
+
groups = hooks_manager.scan(hooks_resource_dir)
|
|
248
|
+
for group in groups.values():
|
|
249
|
+
all_hooks.extend(group.hooks)
|
|
250
|
+
except Exception:
|
|
251
|
+
continue
|
|
252
|
+
|
|
253
|
+
# 6.2 Sync Git Hooks
|
|
254
|
+
git_hooks = [h for h in all_hooks if h.metadata.type == HookType.GIT]
|
|
255
|
+
|
|
256
|
+
if not (root / ".git").exists():
|
|
257
|
+
console.print("[dim] Git repository not found. Initializing...[/dim]")
|
|
258
|
+
# Set global default branch to main
|
|
259
|
+
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=False)
|
|
260
|
+
subprocess.run(["git", "init"], cwd=root, check=False)
|
|
261
|
+
|
|
262
|
+
git_results = git_dispatcher.sync(git_hooks, root)
|
|
263
|
+
|
|
264
|
+
git_installed = sum(1 for v in git_results.values() if v)
|
|
265
|
+
if git_installed > 0:
|
|
266
|
+
console.print(f"[green] ✓ Synchronized {git_installed} Git hooks[/green]")
|
|
267
|
+
elif git_hooks:
|
|
268
|
+
console.print("[yellow] No Git hooks were successfully synchronized[/yellow]")
|
|
269
|
+
|
|
270
|
+
# 6.3 Sync Agent Hooks using the new ACL-based dispatchers
|
|
271
|
+
agent_hooks = [h for h in all_hooks if h.metadata.type == HookType.AGENT]
|
|
272
|
+
|
|
273
|
+
for provider, dispatcher in agent_dispatchers.items():
|
|
274
|
+
provider_hooks = [h for h in agent_hooks if h.metadata.provider == provider]
|
|
275
|
+
if provider_hooks:
|
|
276
|
+
results = dispatcher.sync(provider_hooks, root)
|
|
277
|
+
success_count = sum(1 for v in results.values() if v)
|
|
278
|
+
if success_count > 0:
|
|
279
|
+
console.print(f"[green] ✓ Synchronized {success_count} agent hooks to {provider}[/green]")
|
|
280
|
+
|
|
281
|
+
except Exception as e:
|
|
282
|
+
console.print(f"[red] Failed to synchronize Universal Hooks: {e}[/red]")
|
|
283
|
+
import traceback
|
|
284
|
+
console.print(f"[dim]{traceback.format_exc()}[/dim]")
|
|
285
|
+
|
|
185
286
|
# 4. Determine Targets
|
|
186
287
|
targets = _get_targets(root, config, target)
|
|
187
288
|
|
|
@@ -299,7 +400,7 @@ def uninstall_command(
|
|
|
299
400
|
|
|
300
401
|
# 3. Clean up Workflows
|
|
301
402
|
console.print("[bold blue]Cleaning up distributed workflows...[/bold blue]")
|
|
302
|
-
|
|
403
|
+
|
|
303
404
|
try:
|
|
304
405
|
removed_count = skill_manager.cleanup_workflows()
|
|
305
406
|
if removed_count > 0:
|
|
@@ -310,3 +411,97 @@ def uninstall_command(
|
|
|
310
411
|
console.print(
|
|
311
412
|
f"[red] Failed to clean workflows: {e}[/red]"
|
|
312
413
|
)
|
|
414
|
+
|
|
415
|
+
# 4. Clean up Git Hooks
|
|
416
|
+
console.print("[bold blue]Cleaning up Git Hooks...[/bold blue]")
|
|
417
|
+
|
|
418
|
+
try:
|
|
419
|
+
from monoco.features.hooks.dispatchers import GitHookDispatcher
|
|
420
|
+
|
|
421
|
+
git_dispatcher = GitHookDispatcher()
|
|
422
|
+
installed = git_dispatcher.list_installed(root)
|
|
423
|
+
|
|
424
|
+
uninstalled = 0
|
|
425
|
+
for hook_info in installed:
|
|
426
|
+
if git_dispatcher.uninstall(hook_info["event"], root):
|
|
427
|
+
uninstalled += 1
|
|
428
|
+
|
|
429
|
+
if uninstalled > 0:
|
|
430
|
+
console.print(
|
|
431
|
+
f"[green] ✓ Removed {uninstalled} Git hooks[/green]"
|
|
432
|
+
)
|
|
433
|
+
else:
|
|
434
|
+
console.print("[dim] No Monoco Git hooks to clean up[/dim]")
|
|
435
|
+
except Exception as e:
|
|
436
|
+
console.print(f"[red] Failed to clean Git hooks: {e}[/red]")
|
|
437
|
+
|
|
438
|
+
# 5. Clean up Agent Hooks
|
|
439
|
+
console.print("[bold blue]Cleaning up Agent Hooks...[/bold blue]")
|
|
440
|
+
|
|
441
|
+
try:
|
|
442
|
+
from monoco.features.hooks.dispatchers import (
|
|
443
|
+
ClaudeCodeDispatcher,
|
|
444
|
+
GeminiDispatcher,
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
# Clean up Claude Code hooks
|
|
448
|
+
claude_dispatcher = ClaudeCodeDispatcher()
|
|
449
|
+
claude_settings = claude_dispatcher.get_settings_path(root)
|
|
450
|
+
if claude_settings and claude_settings.exists():
|
|
451
|
+
try:
|
|
452
|
+
import json
|
|
453
|
+
with open(claude_settings, "r", encoding="utf-8") as f:
|
|
454
|
+
settings = json.load(f)
|
|
455
|
+
|
|
456
|
+
if "hooks" in settings:
|
|
457
|
+
original_count = len(settings["hooks"])
|
|
458
|
+
# Remove Monoco-managed hooks
|
|
459
|
+
for event in list(settings["hooks"].keys()):
|
|
460
|
+
configs = settings["hooks"][event]
|
|
461
|
+
if isinstance(configs, list):
|
|
462
|
+
settings["hooks"][event] = [
|
|
463
|
+
c for c in configs if not c.get("_monoco_managed")
|
|
464
|
+
]
|
|
465
|
+
# Clean up empty events
|
|
466
|
+
settings["hooks"] = {
|
|
467
|
+
k: v for k, v in settings["hooks"].items() if v
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
with open(claude_settings, "w", encoding="utf-8") as f:
|
|
471
|
+
json.dump(settings, f, indent=2, ensure_ascii=False)
|
|
472
|
+
|
|
473
|
+
console.print("[green] ✓ Cleaned up Claude Code hooks[/green]")
|
|
474
|
+
except Exception as e:
|
|
475
|
+
console.print(f"[red] Failed to clean Claude Code hooks: {e}[/red]")
|
|
476
|
+
|
|
477
|
+
# Clean up Gemini CLI hooks
|
|
478
|
+
gemini_dispatcher = GeminiDispatcher()
|
|
479
|
+
gemini_settings = gemini_dispatcher.get_settings_path(root)
|
|
480
|
+
if gemini_settings and gemini_settings.exists():
|
|
481
|
+
try:
|
|
482
|
+
import json
|
|
483
|
+
with open(gemini_settings, "r", encoding="utf-8") as f:
|
|
484
|
+
settings = json.load(f)
|
|
485
|
+
|
|
486
|
+
if "hooks" in settings:
|
|
487
|
+
# Remove Monoco-managed hooks
|
|
488
|
+
for event in list(settings["hooks"].keys()):
|
|
489
|
+
configs = settings["hooks"][event]
|
|
490
|
+
if isinstance(configs, list):
|
|
491
|
+
settings["hooks"][event] = [
|
|
492
|
+
c for c in configs if not c.get("_monoco_managed")
|
|
493
|
+
]
|
|
494
|
+
# Clean up empty events
|
|
495
|
+
settings["hooks"] = {
|
|
496
|
+
k: v for k, v in settings["hooks"].items() if v
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
with open(gemini_settings, "w", encoding="utf-8") as f:
|
|
500
|
+
json.dump(settings, f, indent=2, ensure_ascii=False)
|
|
501
|
+
|
|
502
|
+
console.print("[green] ✓ Cleaned up Gemini CLI hooks[/green]")
|
|
503
|
+
except Exception as e:
|
|
504
|
+
console.print(f"[red] Failed to clean Gemini CLI hooks: {e}[/red]")
|
|
505
|
+
|
|
506
|
+
except Exception as e:
|
|
507
|
+
console.print(f"[red] Failed to clean Agent hooks: {e}[/red]")
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Watcher Module - Layer 1 of the Event Automation Framework.
|
|
3
|
+
|
|
4
|
+
This module provides file system watching capabilities with event emission.
|
|
5
|
+
It is part of the three-layer architecture:
|
|
6
|
+
- Layer 1: File Watcher (this module)
|
|
7
|
+
- Layer 2: Action Router
|
|
8
|
+
- Layer 3: Action Executor
|
|
9
|
+
|
|
10
|
+
Example Usage:
|
|
11
|
+
>>> from monoco.core.watcher import IssueWatcher, WatchConfig
|
|
12
|
+
>>> from pathlib import Path
|
|
13
|
+
>>>
|
|
14
|
+
>>> config = WatchConfig(
|
|
15
|
+
... path=Path("./Issues"),
|
|
16
|
+
... patterns=["*.md"],
|
|
17
|
+
... recursive=True,
|
|
18
|
+
... )
|
|
19
|
+
>>> watcher = IssueWatcher(config)
|
|
20
|
+
>>> await watcher.start()
|
|
21
|
+
>>> # Events are automatically emitted to EventBus
|
|
22
|
+
>>> await watcher.stop()
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from .base import (
|
|
26
|
+
ChangeType,
|
|
27
|
+
FieldChange,
|
|
28
|
+
FileEvent,
|
|
29
|
+
FilesystemWatcher,
|
|
30
|
+
PollingWatcher,
|
|
31
|
+
WatchdogWatcher,
|
|
32
|
+
WatchConfig,
|
|
33
|
+
)
|
|
34
|
+
from .issue import IssueWatcher, IssueFileEvent
|
|
35
|
+
from .memo import MemoWatcher, MemoFileEvent
|
|
36
|
+
from .task import TaskWatcher, TaskFileEvent
|
|
37
|
+
from .dropzone import DropzoneWatcher, DropzoneFileEvent
|
|
38
|
+
from .im import IMWatcher, IMFileEvent, IMInboundWatcher, IMWebhookWatcher
|
|
39
|
+
|
|
40
|
+
__all__ = [
|
|
41
|
+
# Base classes
|
|
42
|
+
"ChangeType",
|
|
43
|
+
"FieldChange",
|
|
44
|
+
"FileEvent",
|
|
45
|
+
"FilesystemWatcher",
|
|
46
|
+
"PollingWatcher",
|
|
47
|
+
"WatchdogWatcher",
|
|
48
|
+
"WatchConfig",
|
|
49
|
+
# Concrete watchers
|
|
50
|
+
"IssueWatcher",
|
|
51
|
+
"IssueFileEvent",
|
|
52
|
+
"MemoWatcher",
|
|
53
|
+
"MemoFileEvent",
|
|
54
|
+
"TaskWatcher",
|
|
55
|
+
"TaskFileEvent",
|
|
56
|
+
"DropzoneWatcher",
|
|
57
|
+
"DropzoneFileEvent",
|
|
58
|
+
# IM watchers (FEAT-0167)
|
|
59
|
+
"IMWatcher",
|
|
60
|
+
"IMFileEvent",
|
|
61
|
+
"IMInboundWatcher",
|
|
62
|
+
"IMWebhookWatcher",
|
|
63
|
+
]
|
|
@@ -0,0 +1,382 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base abstractions for FilesystemWatcher - Layer 1 of the event automation framework.
|
|
3
|
+
|
|
4
|
+
This module defines the core abstractions for file system event watching:
|
|
5
|
+
- FilesystemWatcher: Abstract base class for all file watchers
|
|
6
|
+
- FileEvent: Dataclass representing a file system event
|
|
7
|
+
- WatchConfig: Configuration for file watching
|
|
8
|
+
- ChangeType: Enum for types of file changes
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import inspect
|
|
15
|
+
import logging
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
from dataclasses import dataclass, field
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from enum import Enum, auto
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Any, Callable, Dict, List, Optional, Set, Union
|
|
22
|
+
|
|
23
|
+
from monoco.core.scheduler import AgentEventType, EventBus, event_bus
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ChangeType(Enum):
|
|
29
|
+
"""Types of file system changes."""
|
|
30
|
+
CREATED = "created"
|
|
31
|
+
MODIFIED = "modified"
|
|
32
|
+
DELETED = "deleted"
|
|
33
|
+
MOVED = "moved"
|
|
34
|
+
RENAMED = "renamed"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class FileEvent:
|
|
39
|
+
"""
|
|
40
|
+
Represents a file system event.
|
|
41
|
+
|
|
42
|
+
Attributes:
|
|
43
|
+
path: Path to the file or directory
|
|
44
|
+
change_type: Type of change (created, modified, deleted, etc.)
|
|
45
|
+
watcher_name: Name of the watcher that emitted this event
|
|
46
|
+
old_path: Original path for move/rename events
|
|
47
|
+
old_content: Previous content hash or snapshot (for content tracking)
|
|
48
|
+
new_content: Current content hash or snapshot
|
|
49
|
+
metadata: Additional event metadata
|
|
50
|
+
timestamp: Event timestamp
|
|
51
|
+
"""
|
|
52
|
+
path: Path
|
|
53
|
+
change_type: ChangeType
|
|
54
|
+
watcher_name: str
|
|
55
|
+
old_path: Optional[Path] = None
|
|
56
|
+
old_content: Optional[str] = None
|
|
57
|
+
new_content: Optional[str] = None
|
|
58
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
59
|
+
timestamp: datetime = field(default_factory=datetime.now)
|
|
60
|
+
|
|
61
|
+
def to_agent_event_type(self) -> Optional[AgentEventType]:
|
|
62
|
+
"""Convert FileEvent to AgentEventType if applicable."""
|
|
63
|
+
# This will be overridden by specific watchers
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
def to_payload(self) -> Dict[str, Any]:
|
|
67
|
+
"""Convert to payload dict for EventBus."""
|
|
68
|
+
return {
|
|
69
|
+
"path": str(self.path),
|
|
70
|
+
"change_type": self.change_type.value,
|
|
71
|
+
"watcher_name": self.watcher_name,
|
|
72
|
+
"old_path": str(self.old_path) if self.old_path else None,
|
|
73
|
+
"old_content": self.old_content,
|
|
74
|
+
"new_content": self.new_content,
|
|
75
|
+
"metadata": self.metadata,
|
|
76
|
+
"timestamp": self.timestamp.isoformat(),
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@dataclass
|
|
81
|
+
class WatchConfig:
|
|
82
|
+
"""
|
|
83
|
+
Configuration for file watching.
|
|
84
|
+
|
|
85
|
+
Attributes:
|
|
86
|
+
path: Path to watch (file or directory)
|
|
87
|
+
patterns: Glob patterns to match (e.g., "*.md", "*.yaml")
|
|
88
|
+
exclude_patterns: Patterns to exclude
|
|
89
|
+
recursive: Whether to watch recursively
|
|
90
|
+
field_extractors: Optional field extractors for content parsing
|
|
91
|
+
poll_interval: Polling interval in seconds (for polling-based watchers)
|
|
92
|
+
"""
|
|
93
|
+
path: Path
|
|
94
|
+
patterns: List[str] = field(default_factory=lambda: ["*"])
|
|
95
|
+
exclude_patterns: List[str] = field(default_factory=list)
|
|
96
|
+
recursive: bool = True
|
|
97
|
+
field_extractors: Dict[str, Callable[[str], Any]] = field(default_factory=dict)
|
|
98
|
+
poll_interval: float = 5.0
|
|
99
|
+
|
|
100
|
+
def should_watch(self, file_path: Path) -> bool:
|
|
101
|
+
"""Check if a file should be watched based on patterns."""
|
|
102
|
+
# Check exclude patterns first
|
|
103
|
+
for pattern in self.exclude_patterns:
|
|
104
|
+
if file_path.match(pattern):
|
|
105
|
+
return False
|
|
106
|
+
|
|
107
|
+
# Check include patterns
|
|
108
|
+
for pattern in self.patterns:
|
|
109
|
+
if file_path.match(pattern):
|
|
110
|
+
return True
|
|
111
|
+
|
|
112
|
+
return False
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@dataclass
|
|
116
|
+
class FieldChange:
|
|
117
|
+
"""Represents a change in a specific field."""
|
|
118
|
+
field_name: str
|
|
119
|
+
old_value: Any
|
|
120
|
+
new_value: Any
|
|
121
|
+
change_type: ChangeType = ChangeType.MODIFIED
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class FilesystemWatcher(ABC):
|
|
125
|
+
"""
|
|
126
|
+
Abstract base class for file system watchers (Layer 1).
|
|
127
|
+
|
|
128
|
+
Responsibilities:
|
|
129
|
+
- Monitor file system changes
|
|
130
|
+
- Emit FileEvent objects
|
|
131
|
+
- Integrate with EventBus for event publishing
|
|
132
|
+
|
|
133
|
+
Lifecycle:
|
|
134
|
+
1. Create watcher with config
|
|
135
|
+
2. Call start() to begin watching
|
|
136
|
+
3. File events are emitted via emit() or callbacks
|
|
137
|
+
4. Call stop() to cleanup
|
|
138
|
+
|
|
139
|
+
Example:
|
|
140
|
+
>>> config = WatchConfig(path=Path("./Issues"), patterns=["*.md"])
|
|
141
|
+
>>> watcher = IssueWatcher(config)
|
|
142
|
+
>>> await watcher.start()
|
|
143
|
+
>>> # Events are automatically emitted to EventBus
|
|
144
|
+
>>> await watcher.stop()
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
def __init__(
|
|
148
|
+
self,
|
|
149
|
+
config: WatchConfig,
|
|
150
|
+
event_bus: Optional[EventBus] = None,
|
|
151
|
+
name: Optional[str] = None,
|
|
152
|
+
):
|
|
153
|
+
self.config = config
|
|
154
|
+
self.event_bus = event_bus or event_bus
|
|
155
|
+
self.name = name or self.__class__.__name__
|
|
156
|
+
self._running = False
|
|
157
|
+
self._callbacks: List[Callable[[FileEvent], None]] = []
|
|
158
|
+
self._state_cache: Dict[str, Any] = {} # For tracking state changes
|
|
159
|
+
|
|
160
|
+
@abstractmethod
|
|
161
|
+
async def start(self) -> None:
|
|
162
|
+
"""Start watching the file system."""
|
|
163
|
+
pass
|
|
164
|
+
|
|
165
|
+
@abstractmethod
|
|
166
|
+
async def stop(self) -> None:
|
|
167
|
+
"""Stop watching and cleanup resources."""
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
def is_running(self) -> bool:
|
|
171
|
+
"""Check if the watcher is currently running."""
|
|
172
|
+
return self._running
|
|
173
|
+
|
|
174
|
+
def register_callback(self, callback: Callable[[FileEvent], None]) -> None:
|
|
175
|
+
"""Register a callback for file events."""
|
|
176
|
+
self._callbacks.append(callback)
|
|
177
|
+
|
|
178
|
+
def unregister_callback(self, callback: Callable[[FileEvent], None]) -> None:
|
|
179
|
+
"""Unregister a callback."""
|
|
180
|
+
if callback in self._callbacks:
|
|
181
|
+
self._callbacks.remove(callback)
|
|
182
|
+
|
|
183
|
+
def _is_async_callable(self, callback: Callable) -> bool:
|
|
184
|
+
"""
|
|
185
|
+
Check if a callable is async (coroutine function or has async __call__).
|
|
186
|
+
|
|
187
|
+
This handles both:
|
|
188
|
+
- Regular async functions: async def func(): ...
|
|
189
|
+
- Callable objects with async __call__: class Handler: async def __call__(self, ...): ...
|
|
190
|
+
"""
|
|
191
|
+
# Direct check for coroutine function
|
|
192
|
+
if inspect.iscoroutinefunction(callback):
|
|
193
|
+
return True
|
|
194
|
+
# Check for callable object with async __call__ method
|
|
195
|
+
if hasattr(callback, "__call__") and not inspect.ismethod(callback):
|
|
196
|
+
if inspect.iscoroutinefunction(callback.__call__):
|
|
197
|
+
return True
|
|
198
|
+
return False
|
|
199
|
+
|
|
200
|
+
async def emit(self, event: FileEvent) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Emit a file event to all registered callbacks and EventBus.
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
event: The FileEvent to emit
|
|
206
|
+
"""
|
|
207
|
+
# Call local callbacks
|
|
208
|
+
for callback in self._callbacks:
|
|
209
|
+
try:
|
|
210
|
+
if self._is_async_callable(callback):
|
|
211
|
+
await callback(event)
|
|
212
|
+
else:
|
|
213
|
+
callback(event)
|
|
214
|
+
except Exception as e:
|
|
215
|
+
logger.error(f"Error in callback for {event}: {e}")
|
|
216
|
+
|
|
217
|
+
# Publish to EventBus if available
|
|
218
|
+
if self.event_bus:
|
|
219
|
+
try:
|
|
220
|
+
agent_event_type = event.to_agent_event_type()
|
|
221
|
+
if agent_event_type:
|
|
222
|
+
await self.event_bus.publish(
|
|
223
|
+
agent_event_type,
|
|
224
|
+
event.to_payload(),
|
|
225
|
+
source=f"watcher.{self.name}",
|
|
226
|
+
)
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.error(f"Error publishing to EventBus: {e}")
|
|
229
|
+
|
|
230
|
+
def _get_file_hash(self, file_path: Path) -> Optional[str]:
|
|
231
|
+
"""Get a hash of file content for change detection."""
|
|
232
|
+
try:
|
|
233
|
+
import hashlib
|
|
234
|
+
content = file_path.read_text(encoding="utf-8")
|
|
235
|
+
return hashlib.md5(content.encode()).hexdigest()
|
|
236
|
+
except Exception:
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
def _read_file_content(self, file_path: Path) -> Optional[str]:
|
|
240
|
+
"""Read file content safely."""
|
|
241
|
+
try:
|
|
242
|
+
return file_path.read_text(encoding="utf-8")
|
|
243
|
+
except Exception as e:
|
|
244
|
+
logger.debug(f"Could not read {file_path}: {e}")
|
|
245
|
+
return None
|
|
246
|
+
|
|
247
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
248
|
+
"""Get watcher statistics."""
|
|
249
|
+
return {
|
|
250
|
+
"name": self.name,
|
|
251
|
+
"running": self._running,
|
|
252
|
+
"config": {
|
|
253
|
+
"path": str(self.config.path),
|
|
254
|
+
"patterns": self.config.patterns,
|
|
255
|
+
"recursive": self.config.recursive,
|
|
256
|
+
},
|
|
257
|
+
"callbacks": len(self._callbacks),
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
class PollingWatcher(FilesystemWatcher):
|
|
262
|
+
"""
|
|
263
|
+
Base class for polling-based file watchers.
|
|
264
|
+
|
|
265
|
+
Useful for watching specific files or when native file system
|
|
266
|
+
events are not available/reliable.
|
|
267
|
+
"""
|
|
268
|
+
|
|
269
|
+
def __init__(
|
|
270
|
+
self,
|
|
271
|
+
config: WatchConfig,
|
|
272
|
+
event_bus: Optional[EventBus] = None,
|
|
273
|
+
name: Optional[str] = None,
|
|
274
|
+
):
|
|
275
|
+
super().__init__(config, event_bus, name)
|
|
276
|
+
self._poll_task: Optional[asyncio.Task] = None
|
|
277
|
+
self._file_states: Dict[Path, Dict[str, Any]] = {}
|
|
278
|
+
|
|
279
|
+
async def start(self) -> None:
|
|
280
|
+
"""Start polling loop."""
|
|
281
|
+
if self._running:
|
|
282
|
+
return
|
|
283
|
+
|
|
284
|
+
self._running = True
|
|
285
|
+
self._poll_task = asyncio.create_task(self._poll_loop())
|
|
286
|
+
logger.info(f"Started polling watcher: {self.name}")
|
|
287
|
+
|
|
288
|
+
async def stop(self) -> None:
|
|
289
|
+
"""Stop polling loop."""
|
|
290
|
+
if not self._running:
|
|
291
|
+
return
|
|
292
|
+
|
|
293
|
+
self._running = False
|
|
294
|
+
|
|
295
|
+
if self._poll_task:
|
|
296
|
+
self._poll_task.cancel()
|
|
297
|
+
try:
|
|
298
|
+
await self._poll_task
|
|
299
|
+
except asyncio.CancelledError:
|
|
300
|
+
pass
|
|
301
|
+
self._poll_task = None
|
|
302
|
+
|
|
303
|
+
logger.info(f"Stopped polling watcher: {self.name}")
|
|
304
|
+
|
|
305
|
+
async def _poll_loop(self) -> None:
|
|
306
|
+
"""Main polling loop."""
|
|
307
|
+
while self._running:
|
|
308
|
+
try:
|
|
309
|
+
await self._check_changes()
|
|
310
|
+
await asyncio.sleep(self.config.poll_interval)
|
|
311
|
+
except asyncio.CancelledError:
|
|
312
|
+
break
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logger.error(f"Error in poll loop: {e}")
|
|
315
|
+
await asyncio.sleep(self.config.poll_interval)
|
|
316
|
+
|
|
317
|
+
@abstractmethod
|
|
318
|
+
async def _check_changes(self) -> None:
|
|
319
|
+
"""Check for changes - implement in subclass."""
|
|
320
|
+
pass
|
|
321
|
+
|
|
322
|
+
def _scan_files(self) -> Dict[Path, Dict[str, Any]]:
|
|
323
|
+
"""Scan watched path and return file states."""
|
|
324
|
+
states = {}
|
|
325
|
+
|
|
326
|
+
if self.config.path.is_file():
|
|
327
|
+
files = [self.config.path]
|
|
328
|
+
else:
|
|
329
|
+
if self.config.recursive:
|
|
330
|
+
files = list(self.config.path.rglob("*"))
|
|
331
|
+
else:
|
|
332
|
+
files = list(self.config.path.glob("*"))
|
|
333
|
+
|
|
334
|
+
for file_path in files:
|
|
335
|
+
if not file_path.is_file():
|
|
336
|
+
continue
|
|
337
|
+
|
|
338
|
+
if not self.config.should_watch(file_path):
|
|
339
|
+
continue
|
|
340
|
+
|
|
341
|
+
try:
|
|
342
|
+
stat = file_path.stat()
|
|
343
|
+
content = self._read_file_content(file_path)
|
|
344
|
+
states[file_path] = {
|
|
345
|
+
"mtime": stat.st_mtime,
|
|
346
|
+
"size": stat.st_size,
|
|
347
|
+
"content": content,
|
|
348
|
+
"hash": self._get_file_hash(file_path) if content else None,
|
|
349
|
+
}
|
|
350
|
+
except Exception as e:
|
|
351
|
+
logger.debug(f"Could not stat {file_path}: {e}")
|
|
352
|
+
|
|
353
|
+
return states
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
class WatchdogWatcher(FilesystemWatcher):
|
|
357
|
+
"""
|
|
358
|
+
Base class for watchdog-based file watchers.
|
|
359
|
+
|
|
360
|
+
Uses the watchdog library for efficient native file system events.
|
|
361
|
+
"""
|
|
362
|
+
|
|
363
|
+
def __init__(
|
|
364
|
+
self,
|
|
365
|
+
config: WatchConfig,
|
|
366
|
+
event_bus: Optional[EventBus] = None,
|
|
367
|
+
name: Optional[str] = None,
|
|
368
|
+
):
|
|
369
|
+
super().__init__(config, event_bus, name)
|
|
370
|
+
self._observer: Optional[Any] = None
|
|
371
|
+
|
|
372
|
+
def _should_process(self, file_path: Path) -> bool:
|
|
373
|
+
"""Check if a file should be processed."""
|
|
374
|
+
# Skip hidden files
|
|
375
|
+
if file_path.name.startswith("."):
|
|
376
|
+
return False
|
|
377
|
+
|
|
378
|
+
# Skip temporary files
|
|
379
|
+
if file_path.suffix in (".tmp", ".temp", ".part", ".swp", "~"):
|
|
380
|
+
return False
|
|
381
|
+
|
|
382
|
+
return self.config.should_watch(file_path)
|