monoco-toolkit 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/automation/__init__.py +51 -0
- monoco/core/automation/config.py +338 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +723 -0
- monoco/core/config.py +31 -4
- monoco/core/executor/__init__.py +38 -0
- monoco/core/executor/agent_action.py +254 -0
- monoco/core/executor/git_action.py +303 -0
- monoco/core/executor/im_action.py +309 -0
- monoco/core/executor/pytest_action.py +218 -0
- monoco/core/git.py +38 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/loader.py +633 -0
- monoco/core/registry.py +34 -25
- monoco/core/router/__init__.py +55 -0
- monoco/core/router/action.py +341 -0
- monoco/core/router/router.py +392 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +171 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/skills.py +119 -80
- monoco/core/watcher/__init__.py +57 -0
- monoco/core/watcher/base.py +365 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +200 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/events.py +34 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +207 -0
- monoco/daemon/services.py +27 -58
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/adapter.py +17 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/{core/resources/en/skills/monoco_core → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/resources/{roles/role-engineer.yaml → zh/roles/monoco_role_engineer.yaml} +3 -3
- monoco/features/agent/resources/{roles/role-manager.yaml → zh/roles/monoco_role_manager.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-planner.yaml → zh/roles/monoco_role_planner.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-reviewer.yaml → zh/roles/monoco_role_reviewer.yaml} +8 -8
- monoco/{core/resources/zh/skills/monoco_core → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/worker.py +1 -1
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/adapter.py +18 -7
- monoco/features/glossary/resources/en/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/glossary/resources/zh/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/en/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +352 -20
- monoco/features/issue/core.py +475 -16
- monoco/features/issue/engine/machine.py +114 -4
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +2 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/skills/{monoco_issue → monoco_atom_issue}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/skills/{monoco_issue → monoco_atom_issue_lifecycle}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/en/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/commands.py +5 -3
- monoco/features/spike/resources/en/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/METADATA +7 -1
- monoco_toolkit-0.3.12.dist-info/RECORD +202 -0
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -91
- monoco/features/agent/session.py +0 -121
- monoco_toolkit-0.3.10.dist-info/RECORD +0 -156
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Artifact data models for Monoco Artifact System.
|
|
3
|
+
|
|
4
|
+
Defines the metadata structure, enums, and data classes for artifact management.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import hashlib
|
|
10
|
+
import json
|
|
11
|
+
from datetime import datetime, timezone
|
|
12
|
+
from enum import Enum
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Optional
|
|
15
|
+
|
|
16
|
+
from pydantic import BaseModel, Field, field_validator
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ArtifactSourceType(str, Enum):
|
|
20
|
+
"""Source type of the artifact."""
|
|
21
|
+
|
|
22
|
+
GENERATED = "generated" # AI-generated content
|
|
23
|
+
UPLOADED = "uploaded" # User-uploaded file
|
|
24
|
+
IMPORTED = "imported" # Imported from external source
|
|
25
|
+
DERIVED = "derived" # Derived from another artifact
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ArtifactStatus(str, Enum):
|
|
29
|
+
"""Lifecycle status of the artifact."""
|
|
30
|
+
|
|
31
|
+
ACTIVE = "active"
|
|
32
|
+
ARCHIVED = "archived"
|
|
33
|
+
EXPIRED = "expired"
|
|
34
|
+
DELETED = "deleted"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class ArtifactMetadata(BaseModel):
|
|
38
|
+
"""
|
|
39
|
+
Metadata record for an artifact in the manifest.
|
|
40
|
+
|
|
41
|
+
Each artifact is uniquely identified by its content hash (SHA256).
|
|
42
|
+
The manifest.jsonl contains one JSON line per artifact metadata.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
artifact_id: str = Field(
|
|
46
|
+
description="Unique identifier (ULID or UUID) for the artifact instance"
|
|
47
|
+
)
|
|
48
|
+
content_hash: str = Field(
|
|
49
|
+
description="SHA256 hash of the artifact content (CAS address)"
|
|
50
|
+
)
|
|
51
|
+
source_type: ArtifactSourceType = Field(description="How the artifact was created")
|
|
52
|
+
status: ArtifactStatus = Field(
|
|
53
|
+
default=ArtifactStatus.ACTIVE, description="Current lifecycle status"
|
|
54
|
+
)
|
|
55
|
+
created_at: datetime = Field(
|
|
56
|
+
default_factory=lambda: datetime.now(timezone.utc), description="Creation timestamp (UTC)"
|
|
57
|
+
)
|
|
58
|
+
updated_at: datetime = Field(
|
|
59
|
+
default_factory=lambda: datetime.now(timezone.utc), description="Last update timestamp (UTC)"
|
|
60
|
+
)
|
|
61
|
+
expires_at: Optional[datetime] = Field(
|
|
62
|
+
default=None, description="Optional expiration timestamp"
|
|
63
|
+
)
|
|
64
|
+
content_type: str = Field(
|
|
65
|
+
default="application/octet-stream", description="MIME type of the content"
|
|
66
|
+
)
|
|
67
|
+
size_bytes: int = Field(default=0, description="Size of the artifact in bytes")
|
|
68
|
+
original_filename: Optional[str] = Field(
|
|
69
|
+
default=None, description="Original filename if uploaded"
|
|
70
|
+
)
|
|
71
|
+
source_url: Optional[str] = Field(
|
|
72
|
+
default=None, description="Source URL if imported from external"
|
|
73
|
+
)
|
|
74
|
+
parent_artifact_id: Optional[str] = Field(
|
|
75
|
+
default=None, description="Parent artifact ID if this is derived"
|
|
76
|
+
)
|
|
77
|
+
tags: list[str] = Field(default_factory=list, description="User-defined tags")
|
|
78
|
+
metadata: dict[str, Any] = Field(
|
|
79
|
+
default_factory=dict, description="Additional metadata key-value pairs"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
@field_validator("content_hash")
|
|
83
|
+
@classmethod
|
|
84
|
+
def validate_content_hash(cls, v: str) -> str:
|
|
85
|
+
"""Validate that content_hash is a valid SHA256 hex string."""
|
|
86
|
+
if len(v) != 64:
|
|
87
|
+
raise ValueError("content_hash must be a 64-character SHA256 hex string")
|
|
88
|
+
try:
|
|
89
|
+
int(v, 16)
|
|
90
|
+
except ValueError:
|
|
91
|
+
raise ValueError("content_hash must be a valid hex string")
|
|
92
|
+
return v
|
|
93
|
+
|
|
94
|
+
def to_jsonl_line(self) -> str:
|
|
95
|
+
"""Serialize to a single JSON line for manifest.jsonl."""
|
|
96
|
+
return json.dumps(self.model_dump(mode="json"), ensure_ascii=False) + "\n"
|
|
97
|
+
|
|
98
|
+
@classmethod
|
|
99
|
+
def from_jsonl_line(cls, line: str) -> ArtifactMetadata:
|
|
100
|
+
"""Deserialize from a JSON line."""
|
|
101
|
+
data = json.loads(line.strip())
|
|
102
|
+
return cls.model_validate(data)
|
|
103
|
+
|
|
104
|
+
@property
|
|
105
|
+
def is_expired(self) -> bool:
|
|
106
|
+
"""Check if the artifact has expired."""
|
|
107
|
+
if self.expires_at is None:
|
|
108
|
+
return False
|
|
109
|
+
return datetime.now(timezone.utc) > self.expires_at
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def cas_path_components(self) -> tuple[str, str, str]:
|
|
113
|
+
"""
|
|
114
|
+
Generate CAS storage path components from content_hash.
|
|
115
|
+
|
|
116
|
+
Returns (prefix1, prefix2, filename) for tiered directory structure.
|
|
117
|
+
Example: hash='abc123...' -> ('ab', 'c1', 'abc123...')
|
|
118
|
+
"""
|
|
119
|
+
if len(self.content_hash) < 4:
|
|
120
|
+
raise ValueError("content_hash too short for path generation")
|
|
121
|
+
return (
|
|
122
|
+
self.content_hash[:2],
|
|
123
|
+
self.content_hash[2:4],
|
|
124
|
+
self.content_hash,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
@property
|
|
128
|
+
def cas_relative_path(self) -> str:
|
|
129
|
+
"""Get the relative CAS path for this artifact."""
|
|
130
|
+
p1, p2, filename = self.cas_path_components
|
|
131
|
+
return f"{p1}/{p2}/{filename}"
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def compute_content_hash(content: bytes) -> str:
|
|
135
|
+
"""
|
|
136
|
+
Compute SHA256 hash of content for CAS addressing.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
content: Raw bytes of the artifact content
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
64-character lowercase hex string of the SHA256 hash
|
|
143
|
+
"""
|
|
144
|
+
return hashlib.sha256(content).hexdigest()
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def compute_file_hash(file_path: Path) -> str:
|
|
148
|
+
"""
|
|
149
|
+
Compute SHA256 hash of a file for CAS addressing.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
file_path: Path to the file to hash
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
64-character lowercase hex string of the SHA256 hash
|
|
156
|
+
"""
|
|
157
|
+
hasher = hashlib.sha256()
|
|
158
|
+
with open(file_path, "rb") as f:
|
|
159
|
+
for chunk in iter(lambda: f.read(8192), b""):
|
|
160
|
+
hasher.update(chunk)
|
|
161
|
+
return hasher.hexdigest()
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Automation Module - Event-driven automation framework.
|
|
3
|
+
|
|
4
|
+
This module provides:
|
|
5
|
+
- YAML/JSON configuration parsing
|
|
6
|
+
- Trigger configuration management
|
|
7
|
+
- Field change detection
|
|
8
|
+
- Independent Event Handlers for Agent collaboration (FEAT-0162)
|
|
9
|
+
|
|
10
|
+
Architecture: No Workflow or Orchestration. Each handler is an independent,
|
|
11
|
+
stateless microservice that responds to specific events. Workflow emerges
|
|
12
|
+
from the natural interaction of handlers.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from .config import (
|
|
16
|
+
TriggerConfig,
|
|
17
|
+
AutomationConfig,
|
|
18
|
+
load_automation_config,
|
|
19
|
+
)
|
|
20
|
+
from .field_watcher import (
|
|
21
|
+
YAMLFrontMatterExtractor,
|
|
22
|
+
FieldWatcher,
|
|
23
|
+
FieldCondition,
|
|
24
|
+
)
|
|
25
|
+
from .handlers import (
|
|
26
|
+
TaskFileHandler,
|
|
27
|
+
IssueStageHandler,
|
|
28
|
+
MemoThresholdHandler,
|
|
29
|
+
PRCreatedHandler,
|
|
30
|
+
start_all_handlers,
|
|
31
|
+
stop_all_handlers,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
__all__ = [
|
|
35
|
+
# Config
|
|
36
|
+
"TriggerConfig",
|
|
37
|
+
"AutomationConfig",
|
|
38
|
+
"load_automation_config",
|
|
39
|
+
# Field watching
|
|
40
|
+
"YAMLFrontMatterExtractor",
|
|
41
|
+
"FieldWatcher",
|
|
42
|
+
"FieldCondition",
|
|
43
|
+
# Independent Event Handlers (FEAT-0162)
|
|
44
|
+
"TaskFileHandler",
|
|
45
|
+
"IssueStageHandler",
|
|
46
|
+
"MemoThresholdHandler",
|
|
47
|
+
"PRCreatedHandler",
|
|
48
|
+
# Convenience functions
|
|
49
|
+
"start_all_handlers",
|
|
50
|
+
"stop_all_handlers",
|
|
51
|
+
]
|
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Automation Configuration - YAML/JSON configuration for triggers.
|
|
3
|
+
|
|
4
|
+
Part of the Event Automation Framework.
|
|
5
|
+
Provides configuration schema and loading for automation triggers.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
from dataclasses import dataclass, field as dataclass_field
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Dict, List, Optional, Union
|
|
15
|
+
|
|
16
|
+
import yaml
|
|
17
|
+
|
|
18
|
+
from monoco.core.scheduler import AgentEventType
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class ActionConfig:
|
|
25
|
+
"""Configuration for an action."""
|
|
26
|
+
type: str
|
|
27
|
+
params: Dict[str, Any] = dataclass_field(default_factory=dict)
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ActionConfig":
|
|
31
|
+
"""Create from dict."""
|
|
32
|
+
return cls(
|
|
33
|
+
type=data.get("type", ""),
|
|
34
|
+
params=data.get("params", {}),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
class TriggerConfig:
|
|
40
|
+
"""
|
|
41
|
+
Configuration for a trigger.
|
|
42
|
+
|
|
43
|
+
Attributes:
|
|
44
|
+
name: Unique trigger name
|
|
45
|
+
watcher: Watcher type (IssueWatcher, MemoWatcher, etc.)
|
|
46
|
+
event_type: Event type to listen for
|
|
47
|
+
condition: Optional condition expression
|
|
48
|
+
field: Optional field to watch (for field-level triggers)
|
|
49
|
+
actions: List of actions to execute
|
|
50
|
+
enabled: Whether trigger is enabled
|
|
51
|
+
"""
|
|
52
|
+
name: str
|
|
53
|
+
watcher: str
|
|
54
|
+
event_type: Optional[str] = None
|
|
55
|
+
condition: Optional[str] = None
|
|
56
|
+
field: Optional[str] = None
|
|
57
|
+
actions: List[ActionConfig] = dataclass_field(default_factory=list)
|
|
58
|
+
enabled: bool = True
|
|
59
|
+
priority: int = 0
|
|
60
|
+
|
|
61
|
+
@classmethod
|
|
62
|
+
def from_dict(cls, data: Dict[str, Any]) -> "TriggerConfig":
|
|
63
|
+
"""Create from dict."""
|
|
64
|
+
actions = [
|
|
65
|
+
ActionConfig.from_dict(a) if isinstance(a, dict) else ActionConfig(type=a)
|
|
66
|
+
for a in data.get("actions", [])
|
|
67
|
+
]
|
|
68
|
+
|
|
69
|
+
return cls(
|
|
70
|
+
name=data.get("name", "unnamed"),
|
|
71
|
+
watcher=data.get("watcher", ""),
|
|
72
|
+
event_type=data.get("event_type"),
|
|
73
|
+
condition=data.get("condition"),
|
|
74
|
+
field=data.get("field"),
|
|
75
|
+
actions=actions,
|
|
76
|
+
enabled=data.get("enabled", True),
|
|
77
|
+
priority=data.get("priority", 0),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
def to_agent_event_type(self) -> Optional[AgentEventType]:
|
|
81
|
+
"""Convert event_type string to AgentEventType."""
|
|
82
|
+
if not self.event_type:
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
return AgentEventType(self.event_type)
|
|
87
|
+
except ValueError:
|
|
88
|
+
# Try to map common patterns
|
|
89
|
+
mapping = {
|
|
90
|
+
"issue.created": AgentEventType.ISSUE_CREATED,
|
|
91
|
+
"issue.updated": AgentEventType.ISSUE_UPDATED,
|
|
92
|
+
"issue.stage_changed": AgentEventType.ISSUE_STAGE_CHANGED,
|
|
93
|
+
"issue.status_changed": AgentEventType.ISSUE_STATUS_CHANGED,
|
|
94
|
+
"memo.created": AgentEventType.MEMO_CREATED,
|
|
95
|
+
"memo.threshold": AgentEventType.MEMO_THRESHOLD,
|
|
96
|
+
"session.completed": AgentEventType.SESSION_COMPLETED,
|
|
97
|
+
"session.failed": AgentEventType.SESSION_FAILED,
|
|
98
|
+
"pr.created": AgentEventType.PR_CREATED,
|
|
99
|
+
}
|
|
100
|
+
return mapping.get(self.event_type)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@dataclass
|
|
104
|
+
class AutomationConfig:
|
|
105
|
+
"""
|
|
106
|
+
Complete automation configuration.
|
|
107
|
+
|
|
108
|
+
Attributes:
|
|
109
|
+
version: Configuration version
|
|
110
|
+
triggers: List of trigger configurations
|
|
111
|
+
settings: Global settings
|
|
112
|
+
"""
|
|
113
|
+
version: str = "1.0"
|
|
114
|
+
triggers: List[TriggerConfig] = dataclass_field(default_factory=list)
|
|
115
|
+
settings: Dict[str, Any] = dataclass_field(default_factory=dict)
|
|
116
|
+
|
|
117
|
+
@classmethod
|
|
118
|
+
def from_dict(cls, data: Dict[str, Any]) -> "AutomationConfig":
|
|
119
|
+
"""Create from dict."""
|
|
120
|
+
triggers = [
|
|
121
|
+
TriggerConfig.from_dict(t)
|
|
122
|
+
for t in data.get("triggers", [])
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
return cls(
|
|
126
|
+
version=data.get("version", "1.0"),
|
|
127
|
+
triggers=triggers,
|
|
128
|
+
settings=data.get("settings", {}),
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
@classmethod
|
|
132
|
+
def from_yaml(cls, yaml_content: str) -> "AutomationConfig":
|
|
133
|
+
"""Load from YAML string."""
|
|
134
|
+
data = yaml.safe_load(yaml_content)
|
|
135
|
+
return cls.from_dict(data or {})
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
def from_json(cls, json_content: str) -> "AutomationConfig":
|
|
139
|
+
"""Load from JSON string."""
|
|
140
|
+
data = json.loads(json_content)
|
|
141
|
+
return cls.from_dict(data)
|
|
142
|
+
|
|
143
|
+
def to_yaml(self) -> str:
|
|
144
|
+
"""Export to YAML string."""
|
|
145
|
+
data = {
|
|
146
|
+
"version": self.version,
|
|
147
|
+
"triggers": [
|
|
148
|
+
{
|
|
149
|
+
"name": t.name,
|
|
150
|
+
"watcher": t.watcher,
|
|
151
|
+
"event_type": t.event_type,
|
|
152
|
+
"condition": t.condition,
|
|
153
|
+
"field": t.field,
|
|
154
|
+
"actions": [
|
|
155
|
+
{"type": a.type, "params": a.params}
|
|
156
|
+
for a in t.actions
|
|
157
|
+
],
|
|
158
|
+
"enabled": t.enabled,
|
|
159
|
+
"priority": t.priority,
|
|
160
|
+
}
|
|
161
|
+
for t in self.triggers
|
|
162
|
+
],
|
|
163
|
+
"settings": self.settings,
|
|
164
|
+
}
|
|
165
|
+
return yaml.dump(data, default_flow_style=False, sort_keys=False)
|
|
166
|
+
|
|
167
|
+
def to_json(self) -> str:
|
|
168
|
+
"""Export to JSON string."""
|
|
169
|
+
data = {
|
|
170
|
+
"version": self.version,
|
|
171
|
+
"triggers": [
|
|
172
|
+
{
|
|
173
|
+
"name": t.name,
|
|
174
|
+
"watcher": t.watcher,
|
|
175
|
+
"event_type": t.event_type,
|
|
176
|
+
"condition": t.condition,
|
|
177
|
+
"field": t.field,
|
|
178
|
+
"actions": [
|
|
179
|
+
{"type": a.type, "params": a.params}
|
|
180
|
+
for a in t.actions
|
|
181
|
+
],
|
|
182
|
+
"enabled": t.enabled,
|
|
183
|
+
"priority": t.priority,
|
|
184
|
+
}
|
|
185
|
+
for t in self.triggers
|
|
186
|
+
],
|
|
187
|
+
"settings": self.settings,
|
|
188
|
+
}
|
|
189
|
+
return json.dumps(data, indent=2)
|
|
190
|
+
|
|
191
|
+
def get_enabled_triggers(self) -> List[TriggerConfig]:
|
|
192
|
+
"""Get all enabled triggers."""
|
|
193
|
+
return [t for t in self.triggers if t.enabled]
|
|
194
|
+
|
|
195
|
+
def get_trigger(self, name: str) -> Optional[TriggerConfig]:
|
|
196
|
+
"""Get trigger by name."""
|
|
197
|
+
for trigger in self.triggers:
|
|
198
|
+
if trigger.name == name:
|
|
199
|
+
return trigger
|
|
200
|
+
return None
|
|
201
|
+
|
|
202
|
+
def add_trigger(self, trigger: TriggerConfig) -> None:
|
|
203
|
+
"""Add a trigger."""
|
|
204
|
+
# Remove existing trigger with same name
|
|
205
|
+
self.triggers = [t for t in self.triggers if t.name != trigger.name]
|
|
206
|
+
self.triggers.append(trigger)
|
|
207
|
+
|
|
208
|
+
def remove_trigger(self, name: str) -> bool:
|
|
209
|
+
"""Remove a trigger by name."""
|
|
210
|
+
original_count = len(self.triggers)
|
|
211
|
+
self.triggers = [t for t in self.triggers if t.name != name]
|
|
212
|
+
return len(self.triggers) < original_count
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def load_automation_config(
|
|
216
|
+
path: Union[str, Path],
|
|
217
|
+
create_default: bool = False,
|
|
218
|
+
) -> AutomationConfig:
|
|
219
|
+
"""
|
|
220
|
+
Load automation configuration from file.
|
|
221
|
+
|
|
222
|
+
Supports .yaml, .yml, and .json files.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
path: Path to configuration file
|
|
226
|
+
create_default: If True and file doesn't exist, create default config
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
AutomationConfig instance
|
|
230
|
+
"""
|
|
231
|
+
path = Path(path)
|
|
232
|
+
|
|
233
|
+
if not path.exists():
|
|
234
|
+
if create_default:
|
|
235
|
+
default_config = create_default_config()
|
|
236
|
+
path.write_text(default_config.to_yaml())
|
|
237
|
+
logger.info(f"Created default automation config at {path}")
|
|
238
|
+
return default_config
|
|
239
|
+
else:
|
|
240
|
+
logger.warning(f"Config file not found: {path}")
|
|
241
|
+
return AutomationConfig()
|
|
242
|
+
|
|
243
|
+
content = path.read_text(encoding="utf-8")
|
|
244
|
+
|
|
245
|
+
if path.suffix in (".yaml", ".yml"):
|
|
246
|
+
return AutomationConfig.from_yaml(content)
|
|
247
|
+
elif path.suffix == ".json":
|
|
248
|
+
return AutomationConfig.from_json(content)
|
|
249
|
+
else:
|
|
250
|
+
# Try YAML first, then JSON
|
|
251
|
+
try:
|
|
252
|
+
return AutomationConfig.from_yaml(content)
|
|
253
|
+
except yaml.YAMLError:
|
|
254
|
+
return AutomationConfig.from_json(content)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def create_default_config() -> AutomationConfig:
|
|
258
|
+
"""Create a default automation configuration."""
|
|
259
|
+
return AutomationConfig(
|
|
260
|
+
version="1.0",
|
|
261
|
+
triggers=[
|
|
262
|
+
TriggerConfig(
|
|
263
|
+
name="memo_threshold",
|
|
264
|
+
watcher="MemoWatcher",
|
|
265
|
+
event_type="memo.threshold",
|
|
266
|
+
condition="pending_count >= 5",
|
|
267
|
+
actions=[
|
|
268
|
+
ActionConfig(
|
|
269
|
+
type="SpawnAgentAction",
|
|
270
|
+
params={"role": "Architect"},
|
|
271
|
+
),
|
|
272
|
+
],
|
|
273
|
+
),
|
|
274
|
+
TriggerConfig(
|
|
275
|
+
name="issue_doing",
|
|
276
|
+
watcher="IssueWatcher",
|
|
277
|
+
event_type="issue.stage_changed",
|
|
278
|
+
field="stage",
|
|
279
|
+
condition="value == 'doing'",
|
|
280
|
+
actions=[
|
|
281
|
+
ActionConfig(
|
|
282
|
+
type="SpawnAgentAction",
|
|
283
|
+
params={"role": "Engineer"},
|
|
284
|
+
),
|
|
285
|
+
],
|
|
286
|
+
),
|
|
287
|
+
TriggerConfig(
|
|
288
|
+
name="issue_completed",
|
|
289
|
+
watcher="IssueWatcher",
|
|
290
|
+
event_type="issue.stage_changed",
|
|
291
|
+
field="stage",
|
|
292
|
+
condition="value == 'done'",
|
|
293
|
+
actions=[
|
|
294
|
+
ActionConfig(
|
|
295
|
+
type="SendIMAction",
|
|
296
|
+
params={
|
|
297
|
+
"channel": "console",
|
|
298
|
+
"message_template": "Issue {issue_id} completed!",
|
|
299
|
+
},
|
|
300
|
+
),
|
|
301
|
+
],
|
|
302
|
+
),
|
|
303
|
+
],
|
|
304
|
+
settings={
|
|
305
|
+
"default_poll_interval": 5.0,
|
|
306
|
+
"max_concurrent_actions": 10,
|
|
307
|
+
"action_timeout": 300,
|
|
308
|
+
},
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def save_automation_config(
|
|
313
|
+
config: AutomationConfig,
|
|
314
|
+
path: Union[str, Path],
|
|
315
|
+
format: str = "yaml",
|
|
316
|
+
) -> None:
|
|
317
|
+
"""
|
|
318
|
+
Save automation configuration to file.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
config: Configuration to save
|
|
322
|
+
path: Path to save to
|
|
323
|
+
format: "yaml" or "json"
|
|
324
|
+
"""
|
|
325
|
+
path = Path(path)
|
|
326
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
327
|
+
|
|
328
|
+
if format == "yaml":
|
|
329
|
+
content = config.to_yaml()
|
|
330
|
+
if path.suffix not in (".yaml", ".yml"):
|
|
331
|
+
path = path.with_suffix(".yaml")
|
|
332
|
+
else:
|
|
333
|
+
content = config.to_json()
|
|
334
|
+
if path.suffix != ".json":
|
|
335
|
+
path = path.with_suffix(".json")
|
|
336
|
+
|
|
337
|
+
path.write_text(content, encoding="utf-8")
|
|
338
|
+
logger.info(f"Saved automation config to {path}")
|