monoco-toolkit 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/cli/project.py +35 -31
- monoco/cli/workspace.py +26 -16
- monoco/core/agent/__init__.py +0 -2
- monoco/core/agent/action.py +44 -20
- monoco/core/agent/adapters.py +20 -16
- monoco/core/agent/protocol.py +5 -4
- monoco/core/agent/state.py +21 -21
- monoco/core/config.py +90 -33
- monoco/core/execution.py +21 -16
- monoco/core/feature.py +8 -5
- monoco/core/git.py +61 -30
- monoco/core/hooks.py +57 -0
- monoco/core/injection.py +47 -44
- monoco/core/integrations.py +50 -35
- monoco/core/lsp.py +12 -1
- monoco/core/output.py +35 -16
- monoco/core/registry.py +3 -2
- monoco/core/setup.py +190 -124
- monoco/core/skills.py +121 -107
- monoco/core/state.py +12 -10
- monoco/core/sync.py +85 -56
- monoco/core/telemetry.py +10 -6
- monoco/core/workspace.py +26 -19
- monoco/daemon/app.py +123 -79
- monoco/daemon/commands.py +14 -13
- monoco/daemon/models.py +11 -3
- monoco/daemon/reproduce_stats.py +8 -8
- monoco/daemon/services.py +32 -33
- monoco/daemon/stats.py +59 -40
- monoco/features/config/commands.py +38 -25
- monoco/features/i18n/adapter.py +4 -5
- monoco/features/i18n/commands.py +83 -49
- monoco/features/i18n/core.py +94 -54
- monoco/features/issue/adapter.py +6 -7
- monoco/features/issue/commands.py +500 -260
- monoco/features/issue/core.py +504 -293
- monoco/features/issue/domain/lifecycle.py +33 -23
- monoco/features/issue/domain/models.py +71 -38
- monoco/features/issue/domain/parser.py +92 -69
- monoco/features/issue/domain/workspace.py +19 -16
- monoco/features/issue/engine/__init__.py +3 -3
- monoco/features/issue/engine/config.py +18 -25
- monoco/features/issue/engine/machine.py +72 -39
- monoco/features/issue/engine/models.py +4 -2
- monoco/features/issue/linter.py +326 -111
- monoco/features/issue/lsp/definition.py +26 -19
- monoco/features/issue/migration.py +45 -34
- monoco/features/issue/models.py +30 -13
- monoco/features/issue/monitor.py +24 -8
- monoco/features/issue/resources/en/AGENTS.md +5 -0
- monoco/features/issue/resources/en/SKILL.md +30 -2
- monoco/features/issue/resources/zh/AGENTS.md +5 -0
- monoco/features/issue/resources/zh/SKILL.md +26 -1
- monoco/features/issue/validator.py +417 -172
- monoco/features/skills/__init__.py +0 -1
- monoco/features/skills/core.py +24 -18
- monoco/features/spike/adapter.py +4 -5
- monoco/features/spike/commands.py +51 -38
- monoco/features/spike/core.py +24 -16
- monoco/main.py +34 -21
- {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/METADATA +10 -3
- monoco_toolkit-0.3.0.dist-info/RECORD +84 -0
- monoco_toolkit-0.2.7.dist-info/RECORD +0 -83
- {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,7 +2,8 @@ from pathlib import Path
|
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
from monoco.core.lsp import Location, Position, Range
|
|
4
4
|
from ..domain.parser import MarkdownParser
|
|
5
|
-
from ..domain.workspace import WorkspaceSymbolIndex
|
|
5
|
+
from ..domain.workspace import WorkspaceSymbolIndex
|
|
6
|
+
|
|
6
7
|
|
|
7
8
|
class DefinitionProvider:
|
|
8
9
|
def __init__(self, workspace_root: Path):
|
|
@@ -18,25 +19,29 @@ class DefinitionProvider:
|
|
|
18
19
|
return []
|
|
19
20
|
|
|
20
21
|
content = file_path.read_text()
|
|
21
|
-
|
|
22
|
+
|
|
22
23
|
# 1. Parse the document to find spans
|
|
23
24
|
# We only need to find the span at the specific line
|
|
24
25
|
issue = MarkdownParser.parse(content, path=str(file_path))
|
|
25
|
-
|
|
26
|
+
|
|
26
27
|
target_span = None
|
|
27
28
|
for block in issue.body.blocks:
|
|
28
29
|
# Check if position is within block
|
|
29
|
-
|
|
30
|
+
# Note: block.line_start is inclusive, line_end is exclusive for content
|
|
30
31
|
if block.line_start <= position.line < block.line_end:
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
32
|
+
for span in block.spans:
|
|
33
|
+
if span.range.start.line == position.line:
|
|
34
|
+
# Check character range
|
|
35
|
+
if (
|
|
36
|
+
span.range.start.character
|
|
37
|
+
<= position.character
|
|
38
|
+
<= span.range.end.character
|
|
39
|
+
):
|
|
40
|
+
target_span = span
|
|
41
|
+
break
|
|
37
42
|
if target_span:
|
|
38
43
|
break
|
|
39
|
-
|
|
44
|
+
|
|
40
45
|
if not target_span:
|
|
41
46
|
return []
|
|
42
47
|
|
|
@@ -45,28 +50,30 @@ class DefinitionProvider:
|
|
|
45
50
|
issue_id = target_span.metadata.get("issue_id")
|
|
46
51
|
if issue_id:
|
|
47
52
|
# Resolve using Workspace Index
|
|
48
|
-
location = self.index.resolve(
|
|
53
|
+
location = self.index.resolve(
|
|
54
|
+
issue_id, context_project=self._get_context_project(file_path)
|
|
55
|
+
)
|
|
49
56
|
if location:
|
|
50
57
|
return [
|
|
51
58
|
Location(
|
|
52
59
|
uri=f"file://{location.file_path}",
|
|
53
60
|
range=Range(
|
|
54
61
|
start=Position(line=0, character=0),
|
|
55
|
-
end=Position(line=0, character=0)
|
|
56
|
-
)
|
|
62
|
+
end=Position(line=0, character=0),
|
|
63
|
+
),
|
|
57
64
|
)
|
|
58
65
|
]
|
|
59
|
-
|
|
66
|
+
|
|
60
67
|
return []
|
|
61
68
|
|
|
62
69
|
def _get_context_project(self, file_path: Path) -> Optional[str]:
|
|
63
70
|
# Simple heuristic: look for parent directory name if it's a known project structure?
|
|
64
|
-
# Or rely on configuration.
|
|
71
|
+
# Or rely on configuration.
|
|
65
72
|
# For now, let's assume the index handles context if passed, or we pass None.
|
|
66
73
|
# Actually resolving context project from file path is tricky without config loaded for that specific root.
|
|
67
74
|
# Let's try to deduce from path relative to workspace root.
|
|
68
75
|
try:
|
|
69
|
-
|
|
70
|
-
|
|
76
|
+
rel = file_path.relative_to(self.workspace_root)
|
|
77
|
+
return rel.parts[0] # First dir is likely project name in a workspace
|
|
71
78
|
except ValueError:
|
|
72
|
-
|
|
79
|
+
return "local"
|
|
@@ -1,11 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import re
|
|
3
3
|
import yaml
|
|
4
|
-
import hashlib
|
|
5
|
-
import secrets
|
|
6
4
|
from pathlib import Path
|
|
7
|
-
from typing import List, Dict, Any
|
|
8
|
-
from datetime import datetime
|
|
9
5
|
from .models import generate_uid
|
|
10
6
|
|
|
11
7
|
# Migration Mappings
|
|
@@ -21,20 +17,13 @@ DIR_MAP = {
|
|
|
21
17
|
"features": "Features",
|
|
22
18
|
"chores": "Chores",
|
|
23
19
|
"fixes": "Fixes",
|
|
24
|
-
"epics": "Epics"
|
|
20
|
+
"epics": "Epics",
|
|
25
21
|
}
|
|
26
22
|
|
|
27
|
-
TYPE_MAP = {
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
"bug": "fix"
|
|
31
|
-
}
|
|
23
|
+
TYPE_MAP = {"story": "feature", "task": "chore", "bug": "fix"}
|
|
24
|
+
|
|
25
|
+
ID_PREFIX_MAP = {"STORY": "FEAT", "TASK": "CHORE", "BUG": "FIX"}
|
|
32
26
|
|
|
33
|
-
ID_PREFIX_MAP = {
|
|
34
|
-
"STORY": "FEAT",
|
|
35
|
-
"TASK": "CHORE",
|
|
36
|
-
"BUG": "FIX"
|
|
37
|
-
}
|
|
38
27
|
|
|
39
28
|
def migrate_issues_directory(issues_dir: Path):
|
|
40
29
|
"""
|
|
@@ -48,7 +37,7 @@ def migrate_issues_directory(issues_dir: Path):
|
|
|
48
37
|
old_path = issues_dir / old_name
|
|
49
38
|
if old_path.exists():
|
|
50
39
|
new_path = issues_dir / new_name
|
|
51
|
-
|
|
40
|
+
|
|
52
41
|
# Case sensitivity check for some filesystems
|
|
53
42
|
same_inode = False
|
|
54
43
|
try:
|
|
@@ -64,6 +53,7 @@ def migrate_issues_directory(issues_dir: Path):
|
|
|
64
53
|
|
|
65
54
|
if new_path.exists():
|
|
66
55
|
import shutil
|
|
56
|
+
|
|
67
57
|
for item in old_path.iterdir():
|
|
68
58
|
dest = new_path / item.name
|
|
69
59
|
if dest.exists() and item.is_dir():
|
|
@@ -81,20 +71,37 @@ def migrate_issues_directory(issues_dir: Path):
|
|
|
81
71
|
subdir = issues_dir / subdir_name
|
|
82
72
|
if not subdir.exists():
|
|
83
73
|
continue
|
|
84
|
-
|
|
74
|
+
|
|
85
75
|
for file_path in subdir.rglob("*.md"):
|
|
86
76
|
content = file_path.read_text(encoding="utf-8")
|
|
87
77
|
new_content = content
|
|
88
|
-
|
|
78
|
+
|
|
89
79
|
# Replace Type in Frontmatter
|
|
90
80
|
for old_type, new_type in TYPE_MAP.items():
|
|
91
|
-
new_content = re.sub(
|
|
92
|
-
|
|
81
|
+
new_content = re.sub(
|
|
82
|
+
rf"^type:\s*{old_type}",
|
|
83
|
+
f"type: {new_type}",
|
|
84
|
+
new_content,
|
|
85
|
+
flags=re.IGNORECASE | re.MULTILINE,
|
|
86
|
+
)
|
|
87
|
+
|
|
93
88
|
# Replace ID Prefixes
|
|
94
89
|
for old_prefix, new_prefix in ID_PREFIX_MAP.items():
|
|
95
|
-
new_content = new_content.replace(
|
|
96
|
-
|
|
97
|
-
|
|
90
|
+
new_content = new_content.replace(
|
|
91
|
+
f"[[{old_prefix}-", f"[[{new_prefix}-"
|
|
92
|
+
)
|
|
93
|
+
new_content = re.sub(
|
|
94
|
+
rf"^id: {old_prefix}-",
|
|
95
|
+
f"id: {new_prefix}-",
|
|
96
|
+
new_content,
|
|
97
|
+
flags=re.MULTILINE,
|
|
98
|
+
)
|
|
99
|
+
new_content = re.sub(
|
|
100
|
+
rf"^parent: {old_prefix}-",
|
|
101
|
+
f"parent: {new_prefix}-",
|
|
102
|
+
new_content,
|
|
103
|
+
flags=re.MULTILINE,
|
|
104
|
+
)
|
|
98
105
|
new_content = new_content.replace(f"{old_prefix}-", f"{new_prefix}-")
|
|
99
106
|
|
|
100
107
|
# Structural Updates (UID, Stage)
|
|
@@ -104,18 +111,20 @@ def migrate_issues_directory(issues_dir: Path):
|
|
|
104
111
|
try:
|
|
105
112
|
data = yaml.safe_load(yaml_str) or {}
|
|
106
113
|
changed = False
|
|
107
|
-
|
|
108
|
-
if
|
|
109
|
-
data[
|
|
114
|
+
|
|
115
|
+
if "uid" not in data:
|
|
116
|
+
data["uid"] = generate_uid()
|
|
110
117
|
changed = True
|
|
111
|
-
|
|
112
|
-
if
|
|
113
|
-
data[
|
|
118
|
+
|
|
119
|
+
if "stage" in data and data["stage"] == "todo":
|
|
120
|
+
data["stage"] = "draft"
|
|
114
121
|
changed = True
|
|
115
|
-
|
|
122
|
+
|
|
116
123
|
if changed:
|
|
117
|
-
|
|
118
|
-
|
|
124
|
+
new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
|
|
125
|
+
new_content = new_content.replace(
|
|
126
|
+
match.group(1), "\n" + new_yaml
|
|
127
|
+
)
|
|
119
128
|
except yaml.YAMLError:
|
|
120
129
|
pass
|
|
121
130
|
|
|
@@ -127,8 +136,10 @@ def migrate_issues_directory(issues_dir: Path):
|
|
|
127
136
|
new_filename = filename
|
|
128
137
|
for old_prefix, new_prefix in ID_PREFIX_MAP.items():
|
|
129
138
|
if filename.startswith(f"{old_prefix}-"):
|
|
130
|
-
new_filename = filename.replace(
|
|
139
|
+
new_filename = filename.replace(
|
|
140
|
+
f"{old_prefix}-", f"{new_prefix}-", 1
|
|
141
|
+
)
|
|
131
142
|
break
|
|
132
|
-
|
|
143
|
+
|
|
133
144
|
if new_filename != filename:
|
|
134
145
|
file_path.rename(file_path.parent / new_filename)
|
monoco/features/issue/models.py
CHANGED
|
@@ -10,6 +10,7 @@ class IssueID:
|
|
|
10
10
|
"""
|
|
11
11
|
Helper for parsing Issue IDs that might be namespaced (e.g. 'toolkit::FEAT-0001').
|
|
12
12
|
"""
|
|
13
|
+
|
|
13
14
|
def __init__(self, raw: str):
|
|
14
15
|
self.raw = raw
|
|
15
16
|
if "::" in raw:
|
|
@@ -22,7 +23,7 @@ class IssueID:
|
|
|
22
23
|
if self.namespace:
|
|
23
24
|
return f"{self.namespace}::{self.local_id}"
|
|
24
25
|
return self.local_id
|
|
25
|
-
|
|
26
|
+
|
|
26
27
|
def __repr__(self):
|
|
27
28
|
return f"IssueID({self.raw})"
|
|
28
29
|
|
|
@@ -34,9 +35,11 @@ class IssueID:
|
|
|
34
35
|
"""Check if this ID matches another ID string."""
|
|
35
36
|
return str(self) == other_id or (self.is_local and self.local_id == other_id)
|
|
36
37
|
|
|
38
|
+
|
|
37
39
|
def current_time() -> datetime:
|
|
38
40
|
return datetime.now().replace(microsecond=0)
|
|
39
41
|
|
|
42
|
+
|
|
40
43
|
def generate_uid() -> str:
|
|
41
44
|
"""
|
|
42
45
|
Generate a globally unique 6-character short hash for issue identity.
|
|
@@ -55,11 +58,13 @@ class IssueType(str, Enum):
|
|
|
55
58
|
CHORE = "chore"
|
|
56
59
|
FIX = "fix"
|
|
57
60
|
|
|
61
|
+
|
|
58
62
|
class IssueStatus(str, Enum):
|
|
59
63
|
OPEN = "open"
|
|
60
64
|
CLOSED = "closed"
|
|
61
65
|
BACKLOG = "backlog"
|
|
62
66
|
|
|
67
|
+
|
|
63
68
|
class IssueStage(str, Enum):
|
|
64
69
|
DRAFT = "draft"
|
|
65
70
|
DOING = "doing"
|
|
@@ -67,43 +72,48 @@ class IssueStage(str, Enum):
|
|
|
67
72
|
DONE = "done"
|
|
68
73
|
FREEZED = "freezed"
|
|
69
74
|
|
|
75
|
+
|
|
70
76
|
class IssueSolution(str, Enum):
|
|
71
77
|
IMPLEMENTED = "implemented"
|
|
72
78
|
CANCELLED = "cancelled"
|
|
73
79
|
WONTFIX = "wontfix"
|
|
74
80
|
DUPLICATE = "duplicate"
|
|
75
81
|
|
|
82
|
+
|
|
76
83
|
class IsolationType(str, Enum):
|
|
77
84
|
BRANCH = "branch"
|
|
78
85
|
WORKTREE = "worktree"
|
|
79
86
|
|
|
87
|
+
|
|
80
88
|
class IssueIsolation(BaseModel):
|
|
81
89
|
type: str
|
|
82
90
|
ref: str # Git branch name
|
|
83
91
|
path: Optional[str] = None # Worktree path (relative to repo root or absolute)
|
|
84
92
|
created_at: datetime = Field(default_factory=current_time)
|
|
85
93
|
|
|
94
|
+
|
|
86
95
|
class IssueAction(BaseModel):
|
|
87
96
|
label: str
|
|
88
97
|
target_status: Optional[str] = None
|
|
89
98
|
target_stage: Optional[str] = None
|
|
90
99
|
target_solution: Optional[str] = None
|
|
91
100
|
icon: Optional[str] = None
|
|
92
|
-
|
|
101
|
+
|
|
93
102
|
# Generic execution extensions
|
|
94
103
|
command: Optional[str] = None
|
|
95
104
|
params: Dict[str, Any] = {}
|
|
96
105
|
|
|
106
|
+
|
|
97
107
|
class IssueMetadata(BaseModel):
|
|
98
108
|
model_config = {"extra": "allow"}
|
|
99
|
-
|
|
109
|
+
|
|
100
110
|
id: str
|
|
101
111
|
uid: Optional[str] = None # Global unique identifier for cross-project identity
|
|
102
112
|
type: str
|
|
103
113
|
status: str = "open"
|
|
104
114
|
stage: Optional[str] = None
|
|
105
115
|
title: str
|
|
106
|
-
|
|
116
|
+
|
|
107
117
|
# Time Anchors
|
|
108
118
|
created_at: datetime = Field(default_factory=current_time)
|
|
109
119
|
opened_at: Optional[datetime] = None
|
|
@@ -116,15 +126,16 @@ class IssueMetadata(BaseModel):
|
|
|
116
126
|
isolation: Optional[IssueIsolation] = None
|
|
117
127
|
dependencies: List[str] = []
|
|
118
128
|
related: List[str] = []
|
|
129
|
+
domains: List[str] = []
|
|
119
130
|
tags: List[str] = []
|
|
131
|
+
files: List[str] = []
|
|
120
132
|
path: Optional[str] = None # Absolute path to the issue file
|
|
121
|
-
|
|
133
|
+
|
|
122
134
|
# Proxy UI Actions (Excluded from file persistence)
|
|
123
135
|
# Modified: Remove exclude=True to allow API/CLI inspection. Must be manually excluded during YAML Dump.
|
|
124
136
|
actions: List[IssueAction] = Field(default=[])
|
|
125
137
|
|
|
126
|
-
|
|
127
|
-
@model_validator(mode='before')
|
|
138
|
+
@model_validator(mode="before")
|
|
128
139
|
@classmethod
|
|
129
140
|
def normalize_fields(cls, v: Any) -> Any:
|
|
130
141
|
if isinstance(v, dict):
|
|
@@ -138,10 +149,13 @@ class IssueMetadata(BaseModel):
|
|
|
138
149
|
"Parent": "parent",
|
|
139
150
|
"Solution": "solution",
|
|
140
151
|
"Sprint": "sprint",
|
|
152
|
+
"Domains": "domains",
|
|
141
153
|
}
|
|
142
154
|
for old_k, new_k in field_map.items():
|
|
143
155
|
if old_k in v and new_k not in v:
|
|
144
|
-
v[new_k] = v[
|
|
156
|
+
v[new_k] = v[
|
|
157
|
+
old_k
|
|
158
|
+
] # Don't pop yet to avoid mutation issues if used elsewhere, or pop if safe.
|
|
145
159
|
# Pydantic v2 mode='before' is usually a copy if we want to be safe, but let's just add it.
|
|
146
160
|
|
|
147
161
|
# Normalize type and status to lowercase for compatibility
|
|
@@ -158,18 +172,21 @@ class IssueMetadata(BaseModel):
|
|
|
158
172
|
v["stage"] = "draft"
|
|
159
173
|
return v
|
|
160
174
|
|
|
161
|
-
@model_validator(mode=
|
|
162
|
-
def validate_lifecycle(self) ->
|
|
175
|
+
@model_validator(mode="after")
|
|
176
|
+
def validate_lifecycle(self) -> "IssueMetadata":
|
|
163
177
|
# Logic Definition:
|
|
164
178
|
# status: backlog -> stage: freezed
|
|
165
179
|
# status: closed -> stage: done
|
|
166
180
|
# status: open -> stage: draft | doing | review | done (default draft)
|
|
167
|
-
|
|
181
|
+
|
|
168
182
|
# NOTE: We do NOT auto-correct state here anymore to allow Linter to detect inconsistencies.
|
|
169
183
|
# Auto-correction should be applied explicitly by 'create' or 'update' commands via core logic.
|
|
170
|
-
|
|
184
|
+
|
|
171
185
|
return self
|
|
172
186
|
|
|
187
|
+
|
|
173
188
|
class IssueDetail(IssueMetadata):
|
|
174
189
|
body: str = ""
|
|
175
|
-
raw_content: Optional[
|
|
190
|
+
raw_content: Optional[
|
|
191
|
+
str
|
|
192
|
+
] = None # Full file content including frontmatter for editing
|
monoco/features/issue/monitor.py
CHANGED
|
@@ -2,15 +2,21 @@ import re
|
|
|
2
2
|
import asyncio
|
|
3
3
|
import logging
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Callable, Awaitable
|
|
5
|
+
from typing import Callable, Awaitable
|
|
6
6
|
|
|
7
7
|
from watchdog.observers import Observer
|
|
8
8
|
from watchdog.events import FileSystemEventHandler
|
|
9
9
|
|
|
10
10
|
logger = logging.getLogger("monoco.features.issue.monitor")
|
|
11
11
|
|
|
12
|
+
|
|
12
13
|
class IssueEventHandler(FileSystemEventHandler):
|
|
13
|
-
def __init__(
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
loop,
|
|
17
|
+
on_upsert: Callable[[dict], Awaitable[None]],
|
|
18
|
+
on_delete: Callable[[dict], Awaitable[None]],
|
|
19
|
+
):
|
|
14
20
|
self.loop = loop
|
|
15
21
|
self.on_upsert = on_upsert
|
|
16
22
|
self.on_delete = on_delete
|
|
@@ -19,16 +25,17 @@ class IssueEventHandler(FileSystemEventHandler):
|
|
|
19
25
|
if not path_str.endswith(".md"):
|
|
20
26
|
return
|
|
21
27
|
asyncio.run_coroutine_threadsafe(self._handle_upsert(path_str), self.loop)
|
|
22
|
-
|
|
28
|
+
|
|
23
29
|
async def _handle_upsert(self, path_str: str):
|
|
24
30
|
try:
|
|
25
31
|
from monoco.features.issue.core import parse_issue
|
|
32
|
+
|
|
26
33
|
path = Path(path_str)
|
|
27
34
|
if not path.exists():
|
|
28
35
|
return
|
|
29
36
|
issue = parse_issue(path)
|
|
30
37
|
if issue:
|
|
31
|
-
await self.on_upsert(issue.model_dump(mode=
|
|
38
|
+
await self.on_upsert(issue.model_dump(mode="json"))
|
|
32
39
|
except Exception as e:
|
|
33
40
|
logger.error(f"Error handling upsert for {path_str}: {e}")
|
|
34
41
|
|
|
@@ -54,7 +61,7 @@ class IssueEventHandler(FileSystemEventHandler):
|
|
|
54
61
|
def on_modified(self, event):
|
|
55
62
|
if not event.is_directory:
|
|
56
63
|
self._process_upsert(event.src_path)
|
|
57
|
-
|
|
64
|
+
|
|
58
65
|
def on_deleted(self, event):
|
|
59
66
|
if not event.is_directory:
|
|
60
67
|
self._process_delete(event.src_path)
|
|
@@ -64,11 +71,18 @@ class IssueEventHandler(FileSystemEventHandler):
|
|
|
64
71
|
self._process_delete(event.src_path)
|
|
65
72
|
self._process_upsert(event.dest_path)
|
|
66
73
|
|
|
74
|
+
|
|
67
75
|
class IssueMonitor:
|
|
68
76
|
"""
|
|
69
77
|
Monitor the Issues directory for changes using Watchdog and trigger callbacks.
|
|
70
78
|
"""
|
|
71
|
-
|
|
79
|
+
|
|
80
|
+
def __init__(
|
|
81
|
+
self,
|
|
82
|
+
issues_root: Path,
|
|
83
|
+
on_upsert: Callable[[dict], Awaitable[None]],
|
|
84
|
+
on_delete: Callable[[dict], Awaitable[None]],
|
|
85
|
+
):
|
|
72
86
|
self.issues_root = issues_root
|
|
73
87
|
self.on_upsert = on_upsert
|
|
74
88
|
self.on_delete = on_delete
|
|
@@ -78,9 +92,11 @@ class IssueMonitor:
|
|
|
78
92
|
async def start(self):
|
|
79
93
|
self.loop = asyncio.get_running_loop()
|
|
80
94
|
event_handler = IssueEventHandler(self.loop, self.on_upsert, self.on_delete)
|
|
81
|
-
|
|
95
|
+
|
|
82
96
|
if not self.issues_root.exists():
|
|
83
|
-
logger.warning(
|
|
97
|
+
logger.warning(
|
|
98
|
+
f"Issues root {self.issues_root} does not exist. creating..."
|
|
99
|
+
)
|
|
84
100
|
self.issues_root.mkdir(parents=True, exist_ok=True)
|
|
85
101
|
|
|
86
102
|
self.observer.schedule(event_handler, str(self.issues_root), recursive=True)
|
|
@@ -8,8 +8,13 @@ System for managing tasks using `monoco issue`.
|
|
|
8
8
|
- **Status**: `monoco issue open|close|backlog <id>`
|
|
9
9
|
- **Check**: `monoco issue lint` (Must run after manual edits)
|
|
10
10
|
- **Lifecycle**: `monoco issue start|submit|delete <id>`
|
|
11
|
+
- **Sync Context**: `monoco issue sync-files [id]` (Update file tracking)
|
|
11
12
|
- **Structure**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` (e.g. `Issues/Features/open/`). Do not deviate.
|
|
12
13
|
- **Rules**:
|
|
13
14
|
1. **Heading**: Must have `## {ID}: {Title}` (matches metadata).
|
|
14
15
|
2. **Checkboxes**: Min 2 using `- [ ]`, `- [x]`, `- [-]`, `- [/]`.
|
|
15
16
|
3. **Review**: `## Review Comments` section required for Review/Done stages.
|
|
17
|
+
4. **Environment Policies**:
|
|
18
|
+
- Must use `monoco issue start --branch`.
|
|
19
|
+
- 🛑 **NO** direct coding on `main`/`master` (Linter will fail).
|
|
20
|
+
- Must update `files` field after coding (via `sync-files` or manual).
|
|
@@ -23,6 +23,28 @@ Use this skill to create and manage **Issues** (Universal Atoms) in Monoco proje
|
|
|
23
23
|
- **🧹 CHORE**: Engineering maintenance, no direct user value. Mindset: Builder.
|
|
24
24
|
- **🐞 FIX**: Correcting deviations. Mindset: Debugger.
|
|
25
25
|
|
|
26
|
+
## Workflow Policies
|
|
27
|
+
|
|
28
|
+
### 1. Strict Git Workflow
|
|
29
|
+
|
|
30
|
+
Monoco enforces a **Feature Branch** model.
|
|
31
|
+
|
|
32
|
+
- **Start**: Agents **MUST** use `monoco issue start <ID> --branch` to start working.
|
|
33
|
+
- This creates and switches to a standard `feat/<ID>-<slug>` branch.
|
|
34
|
+
- **Do NOT** manually create branches using `git checkout -b`.
|
|
35
|
+
- **Protected Main**: **NO** direct modification on `main`, `master`, or `production` branches. Linter will block this.
|
|
36
|
+
- **Submit**: Run `monoco issue submit <ID>` when work is ready for review.
|
|
37
|
+
- This moves the issue to `Review` stage and generates a Delivery Report.
|
|
38
|
+
- **Note**: This does **not** merge the code. You (or the user) must handle the Merge/PR process.
|
|
39
|
+
|
|
40
|
+
### 2. File Tracking
|
|
41
|
+
|
|
42
|
+
Agents must track modified files to maintain Self-Contained Context.
|
|
43
|
+
|
|
44
|
+
- **Mechanism**: Issue Ticket Front Matter contains a `files: []` field.
|
|
45
|
+
- **Automated (Recommended)**: Run `monoco issue sync-files` inside the Feature Branch. It diffs against the base branch.
|
|
46
|
+
- **Manual (Fallback)**: If working without branches, Agent MUST **actively** append modified paths to the `files` list.
|
|
47
|
+
|
|
26
48
|
## Guidelines
|
|
27
49
|
|
|
28
50
|
### Directory Structure & Naming
|
|
@@ -45,7 +67,6 @@ Issues are validated via `monoco issue lint`. key constraints:
|
|
|
45
67
|
Use `monoco issue`:
|
|
46
68
|
|
|
47
69
|
1. **Create**: `monoco issue create <type> --title "..."`
|
|
48
|
-
|
|
49
70
|
- Params: `--parent <id>`, `--dependency <id>`, `--related <id>`, `--sprint <id>`, `--tags <tag>`
|
|
50
71
|
|
|
51
72
|
2. **Transition**: `monoco issue open/close/backlog <id>`
|
|
@@ -56,7 +77,8 @@ Use `monoco issue`:
|
|
|
56
77
|
|
|
57
78
|
5. **Modification**: `monoco issue start/submit/delete <id>`
|
|
58
79
|
|
|
59
|
-
6. **
|
|
80
|
+
6. **Sync**: `monoco issue sync-files [id]` (Sync code changes to Issue file)
|
|
81
|
+
|
|
60
82
|
7. **Validation**: `monoco issue lint` (Enforces compliance)
|
|
61
83
|
|
|
62
84
|
## Validation Rules (FEAT-0082)
|
|
@@ -85,3 +107,9 @@ The `status` (folder) and `stage` (front matter) must be compatible:
|
|
|
85
107
|
- **open**: Draft, Doing, Review, Done
|
|
86
108
|
- **backlog**: Draft, Doing, Review
|
|
87
109
|
- **closed**: Done
|
|
110
|
+
|
|
111
|
+
### 5. Environment Policy
|
|
112
|
+
|
|
113
|
+
Linter includes environment-aware guardrails:
|
|
114
|
+
|
|
115
|
+
- 🛑 **Dirty Main Protection**: Fails if uncommitted changes are detected on protected branches (`main`/`master`).
|
|
@@ -8,8 +8,13 @@
|
|
|
8
8
|
- **状态**: `monoco issue open|close|backlog <id>`
|
|
9
9
|
- **检查**: `monoco issue lint` (手动编辑后必须运行)
|
|
10
10
|
- **生命周期**: `monoco issue start|submit|delete <id>`
|
|
11
|
+
- **上下文同步**: `monoco issue sync-files [id]` (更新文件追踪)
|
|
11
12
|
- **结构**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` (如 `Issues/Features/open/`)。
|
|
12
13
|
- **强制规则**:
|
|
13
14
|
1. **标题**: 必须包含 `## {ID}: {Title}` 标题(与 Front Matter 一致)。
|
|
14
15
|
2. **内容**: 至少 2 个 Checkbox,使用 `- [ ]`, `- [x]`, `- [-]`, `- [/]`。
|
|
15
16
|
3. **评审**: `review`/`done` 阶段必须包含 `## Review Comments` 章节且内容不为空。
|
|
17
|
+
4. **环境策略**:
|
|
18
|
+
- 必须使用 `monoco issue start --branch` 创建 Feature 分支。
|
|
19
|
+
- 🛑 **禁止**直接在 `main`/`master` 分支修改代码 (Linter 会报错)。
|
|
20
|
+
- 修改代码后**必须**更新 `files` 字段(通过 `sync-files` 或手动)。
|
|
@@ -57,6 +57,24 @@ Monoco 不仅仅复刻 Jira,而是基于 **"思维模式 (Mindset)"** 重新
|
|
|
57
57
|
- **次要**: `CHORE` (工程维护/支撑) - 通常独立存在。
|
|
58
58
|
- **原子性原则**: Feature = Design + Dev + Test + Doc + i18n。它们是一体的。
|
|
59
59
|
|
|
60
|
+
## 工作流策略 (Workflow Policies)
|
|
61
|
+
|
|
62
|
+
### 1. 严格 Git 工作流 (Strict Git Workflow)
|
|
63
|
+
|
|
64
|
+
Monoco 强制采用 **Feature Branch** 模式。
|
|
65
|
+
|
|
66
|
+
- **Start**: 必须使用 `monoco issue start <ID> --branch` 启动任务。这会自动创建 `feat/<ID>-<slug>` 分支。
|
|
67
|
+
- **禁止主干开发**: **严禁** 直接在 `main`, `master`, `production` 分支上修改代码。Linter 会拦截此类行为。
|
|
68
|
+
- **Submit**: 在提交 PR 前,运行 `monoco issue submit <ID>` 进行清理和预发布检查。
|
|
69
|
+
|
|
70
|
+
### 2. 文件追踪 (File Tracking)
|
|
71
|
+
|
|
72
|
+
为了保证上下文的自包含性 (Self-Contained Context),Agent 必须记录修改过的文件。
|
|
73
|
+
|
|
74
|
+
- **机制**: Issue Ticket 的 Front Matter 包含 `files: []` 字段。
|
|
75
|
+
- **自动化 (推荐)**: 在 Feature Branch 中运行 `monoco issue sync-files`。它会自动对比当前分支与 Base 分支的差异并更新列表。
|
|
76
|
+
- **手动 (备选)**: 如果进行非分支开发,Agent 必须**主动**将修改的文件路径写入 `files` 列表。
|
|
77
|
+
|
|
60
78
|
## 准则 (Guidelines)
|
|
61
79
|
|
|
62
80
|
### 目录结构
|
|
@@ -81,7 +99,8 @@ Monoco 不仅仅复刻 Jira,而是基于 **"思维模式 (Mindset)"** 重新
|
|
|
81
99
|
|
|
82
100
|
5. **Modification**: `monoco issue start/submit/delete <id>`
|
|
83
101
|
|
|
84
|
-
6. **
|
|
102
|
+
6. **Sync**: `monoco issue sync-files [id]` (同步代码变更到 Issue 文件)
|
|
103
|
+
|
|
85
104
|
7. **Validation**: `monoco issue lint` (强制执行合规性检查)
|
|
86
105
|
|
|
87
106
|
## 合规与结构校验 (Validation Rules)
|
|
@@ -111,3 +130,9 @@ Monoco 不仅仅复刻 Jira,而是基于 **"思维模式 (Mindset)"** 重新
|
|
|
111
130
|
- **open**: Draft, Doing, Review, Done
|
|
112
131
|
- **backlog**: Draft, Doing, Review
|
|
113
132
|
- **closed**: Done
|
|
133
|
+
|
|
134
|
+
### 5. 环境策略 (Environment Policy)
|
|
135
|
+
|
|
136
|
+
Linter 包含环境感知防护:
|
|
137
|
+
|
|
138
|
+
- 🛑 **Dirty Main Protection**: 当检测到处于受保护分支 (`main`/`master`) 且存在未提交变更时,Lint 将失败并阻止操作。
|