monoco-toolkit 0.3.11__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/automation/__init__.py +51 -0
- monoco/core/automation/config.py +338 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +723 -0
- monoco/core/config.py +1 -1
- monoco/core/executor/__init__.py +38 -0
- monoco/core/executor/agent_action.py +254 -0
- monoco/core/executor/git_action.py +303 -0
- monoco/core/executor/im_action.py +309 -0
- monoco/core/executor/pytest_action.py +218 -0
- monoco/core/git.py +15 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/router/__init__.py +55 -0
- monoco/core/router/action.py +341 -0
- monoco/core/router/router.py +392 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +171 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/watcher/__init__.py +57 -0
- monoco/core/watcher/base.py +365 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +200 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/events.py +34 -0
- monoco/daemon/scheduler.py +172 -201
- monoco/daemon/services.py +27 -243
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/features/agent/worker.py +1 -1
- monoco/features/issue/commands.py +90 -32
- monoco/features/issue/core.py +249 -4
- monoco/features/spike/commands.py +5 -3
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.3.12.dist-info}/METADATA +1 -1
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.3.12.dist-info}/RECORD +41 -20
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -127
- monoco/features/agent/session.py +0 -169
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.3.12.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.3.12.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.11.dist-info → monoco_toolkit-0.3.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Automation Module - Event-driven automation framework.
|
|
3
|
+
|
|
4
|
+
This module provides:
|
|
5
|
+
- YAML/JSON configuration parsing
|
|
6
|
+
- Trigger configuration management
|
|
7
|
+
- Field change detection
|
|
8
|
+
- Independent Event Handlers for Agent collaboration (FEAT-0162)
|
|
9
|
+
|
|
10
|
+
Architecture: No Workflow or Orchestration. Each handler is an independent,
|
|
11
|
+
stateless microservice that responds to specific events. Workflow emerges
|
|
12
|
+
from the natural interaction of handlers.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from .config import (
|
|
16
|
+
TriggerConfig,
|
|
17
|
+
AutomationConfig,
|
|
18
|
+
load_automation_config,
|
|
19
|
+
)
|
|
20
|
+
from .field_watcher import (
|
|
21
|
+
YAMLFrontMatterExtractor,
|
|
22
|
+
FieldWatcher,
|
|
23
|
+
FieldCondition,
|
|
24
|
+
)
|
|
25
|
+
from .handlers import (
|
|
26
|
+
TaskFileHandler,
|
|
27
|
+
IssueStageHandler,
|
|
28
|
+
MemoThresholdHandler,
|
|
29
|
+
PRCreatedHandler,
|
|
30
|
+
start_all_handlers,
|
|
31
|
+
stop_all_handlers,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
__all__ = [
|
|
35
|
+
# Config
|
|
36
|
+
"TriggerConfig",
|
|
37
|
+
"AutomationConfig",
|
|
38
|
+
"load_automation_config",
|
|
39
|
+
# Field watching
|
|
40
|
+
"YAMLFrontMatterExtractor",
|
|
41
|
+
"FieldWatcher",
|
|
42
|
+
"FieldCondition",
|
|
43
|
+
# Independent Event Handlers (FEAT-0162)
|
|
44
|
+
"TaskFileHandler",
|
|
45
|
+
"IssueStageHandler",
|
|
46
|
+
"MemoThresholdHandler",
|
|
47
|
+
"PRCreatedHandler",
|
|
48
|
+
# Convenience functions
|
|
49
|
+
"start_all_handlers",
|
|
50
|
+
"stop_all_handlers",
|
|
51
|
+
]
|
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Automation Configuration - YAML/JSON configuration for triggers.
|
|
3
|
+
|
|
4
|
+
Part of the Event Automation Framework.
|
|
5
|
+
Provides configuration schema and loading for automation triggers.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
from dataclasses import dataclass, field as dataclass_field
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Dict, List, Optional, Union
|
|
15
|
+
|
|
16
|
+
import yaml
|
|
17
|
+
|
|
18
|
+
from monoco.core.scheduler import AgentEventType
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class ActionConfig:
|
|
25
|
+
"""Configuration for an action."""
|
|
26
|
+
type: str
|
|
27
|
+
params: Dict[str, Any] = dataclass_field(default_factory=dict)
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ActionConfig":
|
|
31
|
+
"""Create from dict."""
|
|
32
|
+
return cls(
|
|
33
|
+
type=data.get("type", ""),
|
|
34
|
+
params=data.get("params", {}),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
class TriggerConfig:
|
|
40
|
+
"""
|
|
41
|
+
Configuration for a trigger.
|
|
42
|
+
|
|
43
|
+
Attributes:
|
|
44
|
+
name: Unique trigger name
|
|
45
|
+
watcher: Watcher type (IssueWatcher, MemoWatcher, etc.)
|
|
46
|
+
event_type: Event type to listen for
|
|
47
|
+
condition: Optional condition expression
|
|
48
|
+
field: Optional field to watch (for field-level triggers)
|
|
49
|
+
actions: List of actions to execute
|
|
50
|
+
enabled: Whether trigger is enabled
|
|
51
|
+
"""
|
|
52
|
+
name: str
|
|
53
|
+
watcher: str
|
|
54
|
+
event_type: Optional[str] = None
|
|
55
|
+
condition: Optional[str] = None
|
|
56
|
+
field: Optional[str] = None
|
|
57
|
+
actions: List[ActionConfig] = dataclass_field(default_factory=list)
|
|
58
|
+
enabled: bool = True
|
|
59
|
+
priority: int = 0
|
|
60
|
+
|
|
61
|
+
@classmethod
|
|
62
|
+
def from_dict(cls, data: Dict[str, Any]) -> "TriggerConfig":
|
|
63
|
+
"""Create from dict."""
|
|
64
|
+
actions = [
|
|
65
|
+
ActionConfig.from_dict(a) if isinstance(a, dict) else ActionConfig(type=a)
|
|
66
|
+
for a in data.get("actions", [])
|
|
67
|
+
]
|
|
68
|
+
|
|
69
|
+
return cls(
|
|
70
|
+
name=data.get("name", "unnamed"),
|
|
71
|
+
watcher=data.get("watcher", ""),
|
|
72
|
+
event_type=data.get("event_type"),
|
|
73
|
+
condition=data.get("condition"),
|
|
74
|
+
field=data.get("field"),
|
|
75
|
+
actions=actions,
|
|
76
|
+
enabled=data.get("enabled", True),
|
|
77
|
+
priority=data.get("priority", 0),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
def to_agent_event_type(self) -> Optional[AgentEventType]:
|
|
81
|
+
"""Convert event_type string to AgentEventType."""
|
|
82
|
+
if not self.event_type:
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
return AgentEventType(self.event_type)
|
|
87
|
+
except ValueError:
|
|
88
|
+
# Try to map common patterns
|
|
89
|
+
mapping = {
|
|
90
|
+
"issue.created": AgentEventType.ISSUE_CREATED,
|
|
91
|
+
"issue.updated": AgentEventType.ISSUE_UPDATED,
|
|
92
|
+
"issue.stage_changed": AgentEventType.ISSUE_STAGE_CHANGED,
|
|
93
|
+
"issue.status_changed": AgentEventType.ISSUE_STATUS_CHANGED,
|
|
94
|
+
"memo.created": AgentEventType.MEMO_CREATED,
|
|
95
|
+
"memo.threshold": AgentEventType.MEMO_THRESHOLD,
|
|
96
|
+
"session.completed": AgentEventType.SESSION_COMPLETED,
|
|
97
|
+
"session.failed": AgentEventType.SESSION_FAILED,
|
|
98
|
+
"pr.created": AgentEventType.PR_CREATED,
|
|
99
|
+
}
|
|
100
|
+
return mapping.get(self.event_type)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@dataclass
|
|
104
|
+
class AutomationConfig:
|
|
105
|
+
"""
|
|
106
|
+
Complete automation configuration.
|
|
107
|
+
|
|
108
|
+
Attributes:
|
|
109
|
+
version: Configuration version
|
|
110
|
+
triggers: List of trigger configurations
|
|
111
|
+
settings: Global settings
|
|
112
|
+
"""
|
|
113
|
+
version: str = "1.0"
|
|
114
|
+
triggers: List[TriggerConfig] = dataclass_field(default_factory=list)
|
|
115
|
+
settings: Dict[str, Any] = dataclass_field(default_factory=dict)
|
|
116
|
+
|
|
117
|
+
@classmethod
|
|
118
|
+
def from_dict(cls, data: Dict[str, Any]) -> "AutomationConfig":
|
|
119
|
+
"""Create from dict."""
|
|
120
|
+
triggers = [
|
|
121
|
+
TriggerConfig.from_dict(t)
|
|
122
|
+
for t in data.get("triggers", [])
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
return cls(
|
|
126
|
+
version=data.get("version", "1.0"),
|
|
127
|
+
triggers=triggers,
|
|
128
|
+
settings=data.get("settings", {}),
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
@classmethod
|
|
132
|
+
def from_yaml(cls, yaml_content: str) -> "AutomationConfig":
|
|
133
|
+
"""Load from YAML string."""
|
|
134
|
+
data = yaml.safe_load(yaml_content)
|
|
135
|
+
return cls.from_dict(data or {})
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
def from_json(cls, json_content: str) -> "AutomationConfig":
|
|
139
|
+
"""Load from JSON string."""
|
|
140
|
+
data = json.loads(json_content)
|
|
141
|
+
return cls.from_dict(data)
|
|
142
|
+
|
|
143
|
+
def to_yaml(self) -> str:
|
|
144
|
+
"""Export to YAML string."""
|
|
145
|
+
data = {
|
|
146
|
+
"version": self.version,
|
|
147
|
+
"triggers": [
|
|
148
|
+
{
|
|
149
|
+
"name": t.name,
|
|
150
|
+
"watcher": t.watcher,
|
|
151
|
+
"event_type": t.event_type,
|
|
152
|
+
"condition": t.condition,
|
|
153
|
+
"field": t.field,
|
|
154
|
+
"actions": [
|
|
155
|
+
{"type": a.type, "params": a.params}
|
|
156
|
+
for a in t.actions
|
|
157
|
+
],
|
|
158
|
+
"enabled": t.enabled,
|
|
159
|
+
"priority": t.priority,
|
|
160
|
+
}
|
|
161
|
+
for t in self.triggers
|
|
162
|
+
],
|
|
163
|
+
"settings": self.settings,
|
|
164
|
+
}
|
|
165
|
+
return yaml.dump(data, default_flow_style=False, sort_keys=False)
|
|
166
|
+
|
|
167
|
+
def to_json(self) -> str:
|
|
168
|
+
"""Export to JSON string."""
|
|
169
|
+
data = {
|
|
170
|
+
"version": self.version,
|
|
171
|
+
"triggers": [
|
|
172
|
+
{
|
|
173
|
+
"name": t.name,
|
|
174
|
+
"watcher": t.watcher,
|
|
175
|
+
"event_type": t.event_type,
|
|
176
|
+
"condition": t.condition,
|
|
177
|
+
"field": t.field,
|
|
178
|
+
"actions": [
|
|
179
|
+
{"type": a.type, "params": a.params}
|
|
180
|
+
for a in t.actions
|
|
181
|
+
],
|
|
182
|
+
"enabled": t.enabled,
|
|
183
|
+
"priority": t.priority,
|
|
184
|
+
}
|
|
185
|
+
for t in self.triggers
|
|
186
|
+
],
|
|
187
|
+
"settings": self.settings,
|
|
188
|
+
}
|
|
189
|
+
return json.dumps(data, indent=2)
|
|
190
|
+
|
|
191
|
+
def get_enabled_triggers(self) -> List[TriggerConfig]:
|
|
192
|
+
"""Get all enabled triggers."""
|
|
193
|
+
return [t for t in self.triggers if t.enabled]
|
|
194
|
+
|
|
195
|
+
def get_trigger(self, name: str) -> Optional[TriggerConfig]:
|
|
196
|
+
"""Get trigger by name."""
|
|
197
|
+
for trigger in self.triggers:
|
|
198
|
+
if trigger.name == name:
|
|
199
|
+
return trigger
|
|
200
|
+
return None
|
|
201
|
+
|
|
202
|
+
def add_trigger(self, trigger: TriggerConfig) -> None:
|
|
203
|
+
"""Add a trigger."""
|
|
204
|
+
# Remove existing trigger with same name
|
|
205
|
+
self.triggers = [t for t in self.triggers if t.name != trigger.name]
|
|
206
|
+
self.triggers.append(trigger)
|
|
207
|
+
|
|
208
|
+
def remove_trigger(self, name: str) -> bool:
|
|
209
|
+
"""Remove a trigger by name."""
|
|
210
|
+
original_count = len(self.triggers)
|
|
211
|
+
self.triggers = [t for t in self.triggers if t.name != name]
|
|
212
|
+
return len(self.triggers) < original_count
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def load_automation_config(
|
|
216
|
+
path: Union[str, Path],
|
|
217
|
+
create_default: bool = False,
|
|
218
|
+
) -> AutomationConfig:
|
|
219
|
+
"""
|
|
220
|
+
Load automation configuration from file.
|
|
221
|
+
|
|
222
|
+
Supports .yaml, .yml, and .json files.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
path: Path to configuration file
|
|
226
|
+
create_default: If True and file doesn't exist, create default config
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
AutomationConfig instance
|
|
230
|
+
"""
|
|
231
|
+
path = Path(path)
|
|
232
|
+
|
|
233
|
+
if not path.exists():
|
|
234
|
+
if create_default:
|
|
235
|
+
default_config = create_default_config()
|
|
236
|
+
path.write_text(default_config.to_yaml())
|
|
237
|
+
logger.info(f"Created default automation config at {path}")
|
|
238
|
+
return default_config
|
|
239
|
+
else:
|
|
240
|
+
logger.warning(f"Config file not found: {path}")
|
|
241
|
+
return AutomationConfig()
|
|
242
|
+
|
|
243
|
+
content = path.read_text(encoding="utf-8")
|
|
244
|
+
|
|
245
|
+
if path.suffix in (".yaml", ".yml"):
|
|
246
|
+
return AutomationConfig.from_yaml(content)
|
|
247
|
+
elif path.suffix == ".json":
|
|
248
|
+
return AutomationConfig.from_json(content)
|
|
249
|
+
else:
|
|
250
|
+
# Try YAML first, then JSON
|
|
251
|
+
try:
|
|
252
|
+
return AutomationConfig.from_yaml(content)
|
|
253
|
+
except yaml.YAMLError:
|
|
254
|
+
return AutomationConfig.from_json(content)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def create_default_config() -> AutomationConfig:
|
|
258
|
+
"""Create a default automation configuration."""
|
|
259
|
+
return AutomationConfig(
|
|
260
|
+
version="1.0",
|
|
261
|
+
triggers=[
|
|
262
|
+
TriggerConfig(
|
|
263
|
+
name="memo_threshold",
|
|
264
|
+
watcher="MemoWatcher",
|
|
265
|
+
event_type="memo.threshold",
|
|
266
|
+
condition="pending_count >= 5",
|
|
267
|
+
actions=[
|
|
268
|
+
ActionConfig(
|
|
269
|
+
type="SpawnAgentAction",
|
|
270
|
+
params={"role": "Architect"},
|
|
271
|
+
),
|
|
272
|
+
],
|
|
273
|
+
),
|
|
274
|
+
TriggerConfig(
|
|
275
|
+
name="issue_doing",
|
|
276
|
+
watcher="IssueWatcher",
|
|
277
|
+
event_type="issue.stage_changed",
|
|
278
|
+
field="stage",
|
|
279
|
+
condition="value == 'doing'",
|
|
280
|
+
actions=[
|
|
281
|
+
ActionConfig(
|
|
282
|
+
type="SpawnAgentAction",
|
|
283
|
+
params={"role": "Engineer"},
|
|
284
|
+
),
|
|
285
|
+
],
|
|
286
|
+
),
|
|
287
|
+
TriggerConfig(
|
|
288
|
+
name="issue_completed",
|
|
289
|
+
watcher="IssueWatcher",
|
|
290
|
+
event_type="issue.stage_changed",
|
|
291
|
+
field="stage",
|
|
292
|
+
condition="value == 'done'",
|
|
293
|
+
actions=[
|
|
294
|
+
ActionConfig(
|
|
295
|
+
type="SendIMAction",
|
|
296
|
+
params={
|
|
297
|
+
"channel": "console",
|
|
298
|
+
"message_template": "Issue {issue_id} completed!",
|
|
299
|
+
},
|
|
300
|
+
),
|
|
301
|
+
],
|
|
302
|
+
),
|
|
303
|
+
],
|
|
304
|
+
settings={
|
|
305
|
+
"default_poll_interval": 5.0,
|
|
306
|
+
"max_concurrent_actions": 10,
|
|
307
|
+
"action_timeout": 300,
|
|
308
|
+
},
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def save_automation_config(
|
|
313
|
+
config: AutomationConfig,
|
|
314
|
+
path: Union[str, Path],
|
|
315
|
+
format: str = "yaml",
|
|
316
|
+
) -> None:
|
|
317
|
+
"""
|
|
318
|
+
Save automation configuration to file.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
config: Configuration to save
|
|
322
|
+
path: Path to save to
|
|
323
|
+
format: "yaml" or "json"
|
|
324
|
+
"""
|
|
325
|
+
path = Path(path)
|
|
326
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
327
|
+
|
|
328
|
+
if format == "yaml":
|
|
329
|
+
content = config.to_yaml()
|
|
330
|
+
if path.suffix not in (".yaml", ".yml"):
|
|
331
|
+
path = path.with_suffix(".yaml")
|
|
332
|
+
else:
|
|
333
|
+
content = config.to_json()
|
|
334
|
+
if path.suffix != ".json":
|
|
335
|
+
path = path.with_suffix(".json")
|
|
336
|
+
|
|
337
|
+
path.write_text(content, encoding="utf-8")
|
|
338
|
+
logger.info(f"Saved automation config to {path}")
|
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Field Watcher - YAML Front Matter field change detection.
|
|
3
|
+
|
|
4
|
+
Part of the Event Automation Framework.
|
|
5
|
+
Provides field-level change detection for Markdown files with YAML Front Matter.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
import re
|
|
12
|
+
from dataclasses import dataclass, field
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Callable, Dict, List, Optional, Union
|
|
15
|
+
|
|
16
|
+
import yaml
|
|
17
|
+
|
|
18
|
+
from monoco.core.watcher.base import FieldChange, ChangeType
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class FieldCondition:
|
|
25
|
+
"""
|
|
26
|
+
Condition for field value matching.
|
|
27
|
+
|
|
28
|
+
Attributes:
|
|
29
|
+
field: Field name to check
|
|
30
|
+
operator: Comparison operator (eq, ne, gt, lt, gte, lte, in, contains)
|
|
31
|
+
value: Expected value
|
|
32
|
+
"""
|
|
33
|
+
field: str
|
|
34
|
+
operator: str # eq, ne, gt, lt, gte, lte, in, contains
|
|
35
|
+
value: Any
|
|
36
|
+
|
|
37
|
+
OPERATORS = {
|
|
38
|
+
"eq": lambda a, b: a == b,
|
|
39
|
+
"ne": lambda a, b: a != b,
|
|
40
|
+
"gt": lambda a, b: a is not None and b is not None and a > b,
|
|
41
|
+
"lt": lambda a, b: a is not None and b is not None and a < b,
|
|
42
|
+
"gte": lambda a, b: a is not None and b is not None and a >= b,
|
|
43
|
+
"lte": lambda a, b: a is not None and b is not None and a <= b,
|
|
44
|
+
"in": lambda a, b: a in b if b is not None else False,
|
|
45
|
+
"contains": lambda a, b: b in a if a is not None else False,
|
|
46
|
+
"exists": lambda a, b: a is not None,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
def evaluate(self, data: Dict[str, Any]) -> bool:
|
|
50
|
+
"""Evaluate condition against data."""
|
|
51
|
+
actual_value = data.get(self.field)
|
|
52
|
+
|
|
53
|
+
op_func = self.OPERATORS.get(self.operator)
|
|
54
|
+
if not op_func:
|
|
55
|
+
logger.warning(f"Unknown operator: {self.operator}")
|
|
56
|
+
return False
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
return op_func(actual_value, self.value)
|
|
60
|
+
except Exception as e:
|
|
61
|
+
logger.debug(f"Condition evaluation failed: {e}")
|
|
62
|
+
return False
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class YAMLFrontMatterExtractor:
|
|
66
|
+
"""
|
|
67
|
+
Extracts YAML Front Matter from Markdown files.
|
|
68
|
+
|
|
69
|
+
Provides methods to:
|
|
70
|
+
- Parse YAML Front Matter from content
|
|
71
|
+
- Extract specific fields
|
|
72
|
+
- Detect field changes between versions
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
FRONTMATTER_PATTERN = re.compile(
|
|
76
|
+
r'^---\s*\n(.*?)\n---\s*\n',
|
|
77
|
+
re.MULTILINE | re.DOTALL,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def extract(cls, content: str) -> Optional[Dict[str, Any]]:
|
|
82
|
+
"""
|
|
83
|
+
Extract YAML Front Matter from markdown content.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
content: Markdown file content
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Dict of front matter fields, or None if not found
|
|
90
|
+
"""
|
|
91
|
+
match = cls.FRONTMATTER_PATTERN.match(content)
|
|
92
|
+
if not match:
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
yaml_content = match.group(1)
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
return yaml.safe_load(yaml_content) or {}
|
|
99
|
+
except yaml.YAMLError as e:
|
|
100
|
+
logger.warning(f"Failed to parse YAML front matter: {e}")
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
@classmethod
|
|
104
|
+
def extract_from_file(cls, file_path: Path) -> Optional[Dict[str, Any]]:
|
|
105
|
+
"""Extract YAML Front Matter from a file."""
|
|
106
|
+
try:
|
|
107
|
+
content = file_path.read_text(encoding="utf-8")
|
|
108
|
+
return cls.extract(content)
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.debug(f"Could not read {file_path}: {e}")
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
@classmethod
|
|
114
|
+
def get_field(cls, content: str, field_name: str) -> Any:
|
|
115
|
+
"""Get a specific field from front matter."""
|
|
116
|
+
frontmatter = cls.extract(content)
|
|
117
|
+
if frontmatter is None:
|
|
118
|
+
return None
|
|
119
|
+
return frontmatter.get(field_name)
|
|
120
|
+
|
|
121
|
+
@classmethod
|
|
122
|
+
def detect_changes(
|
|
123
|
+
cls,
|
|
124
|
+
old_content: str,
|
|
125
|
+
new_content: str,
|
|
126
|
+
tracked_fields: Optional[List[str]] = None,
|
|
127
|
+
) -> List[FieldChange]:
|
|
128
|
+
"""
|
|
129
|
+
Detect changes in front matter fields.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
old_content: Previous file content
|
|
133
|
+
new_content: Current file content
|
|
134
|
+
tracked_fields: List of fields to track (None = all)
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
List of FieldChange objects
|
|
138
|
+
"""
|
|
139
|
+
old_fm = cls.extract(old_content) or {}
|
|
140
|
+
new_fm = cls.extract(new_content) or {}
|
|
141
|
+
|
|
142
|
+
changes = []
|
|
143
|
+
|
|
144
|
+
# Determine which fields to check
|
|
145
|
+
if tracked_fields:
|
|
146
|
+
fields_to_check = tracked_fields
|
|
147
|
+
else:
|
|
148
|
+
fields_to_check = list(set(old_fm.keys()) | set(new_fm.keys()))
|
|
149
|
+
|
|
150
|
+
for field_name in fields_to_check:
|
|
151
|
+
old_value = old_fm.get(field_name)
|
|
152
|
+
new_value = new_fm.get(field_name)
|
|
153
|
+
|
|
154
|
+
if old_value != new_value:
|
|
155
|
+
# Determine change type
|
|
156
|
+
if old_value is None and new_value is not None:
|
|
157
|
+
change_type = ChangeType.CREATED
|
|
158
|
+
elif old_value is not None and new_value is None:
|
|
159
|
+
change_type = ChangeType.DELETED
|
|
160
|
+
else:
|
|
161
|
+
change_type = ChangeType.MODIFIED
|
|
162
|
+
|
|
163
|
+
changes.append(FieldChange(
|
|
164
|
+
field_name=field_name,
|
|
165
|
+
old_value=old_value,
|
|
166
|
+
new_value=new_value,
|
|
167
|
+
change_type=change_type,
|
|
168
|
+
))
|
|
169
|
+
|
|
170
|
+
return changes
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
class FieldWatcher:
|
|
174
|
+
"""
|
|
175
|
+
Watches specific fields for changes and triggers conditions.
|
|
176
|
+
|
|
177
|
+
Maintains a cache of field values and emits events when:
|
|
178
|
+
- Fields change
|
|
179
|
+
- Conditions are met
|
|
180
|
+
|
|
181
|
+
Example:
|
|
182
|
+
>>> watcher = FieldWatcher(
|
|
183
|
+
... tracked_fields=["status", "stage"],
|
|
184
|
+
... )
|
|
185
|
+
>>>
|
|
186
|
+
>>> # Add a condition
|
|
187
|
+
>>> watcher.add_condition(FieldCondition(
|
|
188
|
+
... field="stage",
|
|
189
|
+
... operator="eq",
|
|
190
|
+
... value="doing",
|
|
191
|
+
... ))
|
|
192
|
+
>>>
|
|
193
|
+
>>> # Check file
|
|
194
|
+
>>> changes = watcher.check_file(path, content)
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
def __init__(
|
|
198
|
+
self,
|
|
199
|
+
tracked_fields: Optional[List[str]] = None,
|
|
200
|
+
):
|
|
201
|
+
self.tracked_fields = tracked_fields
|
|
202
|
+
self._field_cache: Dict[str, Dict[str, Any]] = {} # file_path -> field_values
|
|
203
|
+
self._conditions: List[FieldCondition] = []
|
|
204
|
+
self._condition_callbacks: List[Callable[[str, FieldCondition, Dict[str, Any]], None]] = []
|
|
205
|
+
|
|
206
|
+
def add_condition(self, condition: FieldCondition) -> None:
|
|
207
|
+
"""Add a condition to watch for."""
|
|
208
|
+
self._conditions.append(condition)
|
|
209
|
+
|
|
210
|
+
def add_callback(
|
|
211
|
+
self,
|
|
212
|
+
callback: Callable[[str, FieldCondition, Dict[str, Any]], None],
|
|
213
|
+
) -> None:
|
|
214
|
+
"""Add a callback for when conditions are met."""
|
|
215
|
+
self._condition_callbacks.append(callback)
|
|
216
|
+
|
|
217
|
+
def check_file(
|
|
218
|
+
self,
|
|
219
|
+
file_path: Union[str, Path],
|
|
220
|
+
content: str,
|
|
221
|
+
) -> List[FieldChange]:
|
|
222
|
+
"""
|
|
223
|
+
Check a file for field changes.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
file_path: Path to the file
|
|
227
|
+
content: Current file content
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
List of field changes
|
|
231
|
+
"""
|
|
232
|
+
path_key = str(file_path)
|
|
233
|
+
|
|
234
|
+
# Extract current fields
|
|
235
|
+
current_fm = YAMLFrontMatterExtractor.extract(content) or {}
|
|
236
|
+
|
|
237
|
+
if self.tracked_fields:
|
|
238
|
+
current_fields = {
|
|
239
|
+
f: current_fm.get(f)
|
|
240
|
+
for f in self.tracked_fields
|
|
241
|
+
}
|
|
242
|
+
else:
|
|
243
|
+
current_fields = current_fm
|
|
244
|
+
|
|
245
|
+
# Get cached fields
|
|
246
|
+
cached_fields = self._field_cache.get(path_key, {})
|
|
247
|
+
|
|
248
|
+
# Detect changes
|
|
249
|
+
changes = []
|
|
250
|
+
for field_name, new_value in current_fields.items():
|
|
251
|
+
old_value = cached_fields.get(field_name)
|
|
252
|
+
if old_value != new_value:
|
|
253
|
+
changes.append(FieldChange(
|
|
254
|
+
field_name=field_name,
|
|
255
|
+
old_value=old_value,
|
|
256
|
+
new_value=new_value,
|
|
257
|
+
change_type=ChangeType.MODIFIED if old_value is not None else ChangeType.CREATED,
|
|
258
|
+
))
|
|
259
|
+
|
|
260
|
+
# Update cache
|
|
261
|
+
self._field_cache[path_key] = current_fields
|
|
262
|
+
|
|
263
|
+
# Check conditions
|
|
264
|
+
if changes:
|
|
265
|
+
self._check_conditions(path_key, current_fields)
|
|
266
|
+
|
|
267
|
+
return changes
|
|
268
|
+
|
|
269
|
+
def _check_conditions(self, file_path: str, fields: Dict[str, Any]) -> None:
|
|
270
|
+
"""Check if any conditions are met."""
|
|
271
|
+
for condition in self._conditions:
|
|
272
|
+
if condition.evaluate(fields):
|
|
273
|
+
for callback in self._condition_callbacks:
|
|
274
|
+
try:
|
|
275
|
+
callback(file_path, condition, fields)
|
|
276
|
+
except Exception as e:
|
|
277
|
+
logger.error(f"Condition callback error: {e}")
|
|
278
|
+
|
|
279
|
+
def get_cached_fields(self, file_path: Union[str, Path]) -> Optional[Dict[str, Any]]:
|
|
280
|
+
"""Get cached fields for a file."""
|
|
281
|
+
return self._field_cache.get(str(file_path))
|
|
282
|
+
|
|
283
|
+
def clear_cache(self, file_path: Optional[Union[str, Path]] = None) -> None:
|
|
284
|
+
"""Clear the field cache."""
|
|
285
|
+
if file_path:
|
|
286
|
+
self._field_cache.pop(str(file_path), None)
|
|
287
|
+
else:
|
|
288
|
+
self._field_cache.clear()
|
|
289
|
+
|
|
290
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
291
|
+
"""Get watcher statistics."""
|
|
292
|
+
return {
|
|
293
|
+
"tracked_files": len(self._field_cache),
|
|
294
|
+
"tracked_fields": self.tracked_fields,
|
|
295
|
+
"conditions": len(self._conditions),
|
|
296
|
+
}
|