monoco-toolkit 0.3.10__py3-none-any.whl → 0.3.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/config.py +31 -4
- monoco/core/git.py +23 -0
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/loader.py +633 -0
- monoco/core/registry.py +34 -25
- monoco/core/skills.py +119 -80
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +236 -0
- monoco/daemon/services.py +185 -0
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/adapter.py +17 -7
- monoco/features/agent/apoptosis.py +4 -4
- monoco/features/agent/manager.py +41 -5
- monoco/{core/resources/en/skills/monoco_core → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/resources/{roles/role-engineer.yaml → zh/roles/monoco_role_engineer.yaml} +3 -3
- monoco/features/agent/resources/{roles/role-manager.yaml → zh/roles/monoco_role_manager.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-planner.yaml → zh/roles/monoco_role_planner.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-reviewer.yaml → zh/roles/monoco_role_reviewer.yaml} +8 -8
- monoco/{core/resources/zh/skills/monoco_core → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/session.py +59 -11
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/adapter.py +18 -7
- monoco/features/glossary/resources/en/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/glossary/resources/zh/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/en/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +281 -7
- monoco/features/issue/core.py +227 -13
- monoco/features/issue/engine/machine.py +114 -4
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +2 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/skills/{monoco_issue → monoco_atom_issue}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/skills/{monoco_issue → monoco_atom_issue_lifecycle}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/en/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/resources/en/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.11.dist-info}/METADATA +7 -1
- monoco_toolkit-0.3.11.dist-info/RECORD +181 -0
- monoco_toolkit-0.3.10.dist-info/RECORD +0 -156
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.11.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.11.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.11.dist-info}/licenses/LICENSE +0 -0
monoco/features/issue/core.py
CHANGED
|
@@ -108,6 +108,8 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
108
108
|
data["domains"] = []
|
|
109
109
|
if "files" not in data:
|
|
110
110
|
data["files"] = []
|
|
111
|
+
if "solution" not in data:
|
|
112
|
+
data["solution"] = None
|
|
111
113
|
|
|
112
114
|
# Custom YAML Dumper to preserve None as 'null' and order
|
|
113
115
|
# Helper to order keys: id, uid, type, status, stage, title, ... graph ...
|
|
@@ -147,6 +149,10 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
147
149
|
if "criticality" in data:
|
|
148
150
|
ordered_data["criticality"] = data["criticality"]
|
|
149
151
|
|
|
152
|
+
# Add solution if present (for template generation)
|
|
153
|
+
if "solution" in data:
|
|
154
|
+
ordered_data["solution"] = data["solution"]
|
|
155
|
+
|
|
150
156
|
# Add remaining
|
|
151
157
|
for k, v in data.items():
|
|
152
158
|
if k not in ordered_data:
|
|
@@ -161,6 +167,10 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
161
167
|
yaml_header = yaml_header.replace(
|
|
162
168
|
"parent: null", "parent: null # <EPIC-ID> Optional"
|
|
163
169
|
)
|
|
170
|
+
if "solution" in ordered_data and ordered_data["solution"] is None:
|
|
171
|
+
yaml_header = yaml_header.replace(
|
|
172
|
+
"solution: null", "solution: null # implemented, cancelled, wontfix, duplicate"
|
|
173
|
+
)
|
|
164
174
|
|
|
165
175
|
return yaml_header
|
|
166
176
|
|
|
@@ -189,15 +199,19 @@ def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
|
|
|
189
199
|
return None
|
|
190
200
|
|
|
191
201
|
|
|
192
|
-
def find_next_id(issue_type: str, issues_root: Path) -> str:
|
|
202
|
+
def find_next_id(issue_type: str, issues_root: Path, include_archived: bool = False) -> str:
|
|
193
203
|
prefix_map = get_prefix_map(issues_root)
|
|
194
204
|
prefix = prefix_map.get(issue_type, "ISSUE")
|
|
195
205
|
pattern = re.compile(rf"{prefix}-(\d+)")
|
|
196
206
|
max_id = 0
|
|
197
207
|
|
|
198
208
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
199
|
-
# Scan all subdirs: open, backlog, closed
|
|
200
|
-
|
|
209
|
+
# Scan all subdirs: open, backlog, closed, archived (optional)
|
|
210
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
211
|
+
if include_archived:
|
|
212
|
+
status_dirs.append("archived")
|
|
213
|
+
|
|
214
|
+
for status_dir in status_dirs:
|
|
201
215
|
d = base_dir / status_dir
|
|
202
216
|
if d.exists():
|
|
203
217
|
for f in d.rglob("*.md"):
|
|
@@ -388,7 +402,15 @@ def get_available_actions(meta: IssueMetadata) -> List[Any]:
|
|
|
388
402
|
return actions
|
|
389
403
|
|
|
390
404
|
|
|
391
|
-
def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
405
|
+
def find_issue_path(issues_root: Path, issue_id: str, include_archived: bool = True) -> Optional[Path]:
|
|
406
|
+
"""
|
|
407
|
+
Find the path of an issue file.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
issues_root: Root directory of issues
|
|
411
|
+
issue_id: Issue ID to find
|
|
412
|
+
include_archived: Whether to search in archived directory (default: True for find operations)
|
|
413
|
+
"""
|
|
392
414
|
parsed = IssueID(issue_id)
|
|
393
415
|
|
|
394
416
|
if not parsed.is_local:
|
|
@@ -414,7 +436,7 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
414
436
|
return None
|
|
415
437
|
|
|
416
438
|
# Recursively search in member project
|
|
417
|
-
return find_issue_path(member_issues, parsed.local_id)
|
|
439
|
+
return find_issue_path(member_issues, parsed.local_id, include_archived)
|
|
418
440
|
|
|
419
441
|
# Local Search
|
|
420
442
|
try:
|
|
@@ -428,9 +450,19 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
428
450
|
return None
|
|
429
451
|
|
|
430
452
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
431
|
-
# Search in
|
|
432
|
-
for
|
|
433
|
-
|
|
453
|
+
# Search in standard status subdirs first
|
|
454
|
+
for status_dir in ["open", "backlog", "closed"]:
|
|
455
|
+
d = base_dir / status_dir
|
|
456
|
+
if d.exists():
|
|
457
|
+
for f in d.rglob(f"{parsed.local_id}-*.md"):
|
|
458
|
+
return f
|
|
459
|
+
|
|
460
|
+
# Search in archived if enabled
|
|
461
|
+
if include_archived:
|
|
462
|
+
archived_dir = base_dir / "archived"
|
|
463
|
+
if archived_dir.exists():
|
|
464
|
+
for f in archived_dir.rglob(f"{parsed.local_id}-*.md"):
|
|
465
|
+
return f
|
|
434
466
|
return None
|
|
435
467
|
|
|
436
468
|
|
|
@@ -952,6 +984,8 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
|
|
|
952
984
|
|
|
953
985
|
if issue.isolation and issue.isolation.ref:
|
|
954
986
|
target_ref = issue.isolation.ref
|
|
987
|
+
if target_ref == "current":
|
|
988
|
+
target_ref = git.get_current_branch(project_root)
|
|
955
989
|
else:
|
|
956
990
|
# Heuristic Search
|
|
957
991
|
# 1. Is current branch related?
|
|
@@ -1003,6 +1037,118 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
|
|
|
1003
1037
|
return []
|
|
1004
1038
|
|
|
1005
1039
|
|
|
1040
|
+
def merge_issue_changes(
|
|
1041
|
+
issues_root: Path, issue_id: str, project_root: Path
|
|
1042
|
+
) -> List[str]:
|
|
1043
|
+
"""
|
|
1044
|
+
Perform Smart Atomic Merge or selection checkout of touched files.
|
|
1045
|
+
Ensures safe mainline synchronization by only merging files tracked in 'files' field.
|
|
1046
|
+
"""
|
|
1047
|
+
path = find_issue_path(issues_root, issue_id)
|
|
1048
|
+
if not path:
|
|
1049
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
1050
|
+
|
|
1051
|
+
issue = parse_issue(path)
|
|
1052
|
+
if not issue:
|
|
1053
|
+
raise ValueError(f"Could not parse issue {issue_id}")
|
|
1054
|
+
|
|
1055
|
+
if not issue.files:
|
|
1056
|
+
# If no files tracked, nothing to merge.
|
|
1057
|
+
return []
|
|
1058
|
+
|
|
1059
|
+
# Determine Source (Feature Branch)
|
|
1060
|
+
# We prioritize what's in the issue metadata on LOCAL (main).
|
|
1061
|
+
# If not there, we try heuristic.
|
|
1062
|
+
source_ref = None
|
|
1063
|
+
if issue.isolation and issue.isolation.ref:
|
|
1064
|
+
source_ref = issue.isolation.ref
|
|
1065
|
+
else:
|
|
1066
|
+
# Heuristic: Search for branch by convention
|
|
1067
|
+
# We can't use 'current' here safely if we are on main,
|
|
1068
|
+
# but let's assume we might be calling this from elsewhere?
|
|
1069
|
+
# Actually, for 'close', we are likely on main.
|
|
1070
|
+
# So we search for a branch named 'feat/{id}-*' or similar?
|
|
1071
|
+
pass
|
|
1072
|
+
|
|
1073
|
+
# If local metadata doesn't have isolation ref, we might be stuck.
|
|
1074
|
+
# But let's assume valid workflow.
|
|
1075
|
+
if not source_ref:
|
|
1076
|
+
# Try to find a branch starting with feat/{id} or {id}
|
|
1077
|
+
# This is a bit weak, needs better implementation in 'git' or 'issue' module
|
|
1078
|
+
# For now, if we can't find it, we error.
|
|
1079
|
+
pass
|
|
1080
|
+
|
|
1081
|
+
if not source_ref or not git.branch_exists(project_root, source_ref):
|
|
1082
|
+
# Fallback: maybe we are currently ON the feature branch?
|
|
1083
|
+
# If so, source_ref should be current. But we expect to call this from MAIN.
|
|
1084
|
+
pass
|
|
1085
|
+
|
|
1086
|
+
if not source_ref:
|
|
1087
|
+
raise RuntimeError(f"Could not determine source branch for Issue {issue_id}. Ensure isolation ref is set.")
|
|
1088
|
+
|
|
1089
|
+
if not git.branch_exists(project_root, source_ref):
|
|
1090
|
+
raise RuntimeError(f"Source branch {source_ref} does not exist.")
|
|
1091
|
+
|
|
1092
|
+
# RE-READ Issue from Source Branch
|
|
1093
|
+
# The 'files' list on main might be outdated. We need the list from the feature branch.
|
|
1094
|
+
relative_path = path.relative_to(project_root)
|
|
1095
|
+
|
|
1096
|
+
try:
|
|
1097
|
+
# Read file content from git
|
|
1098
|
+
code, access_content, _ = git._run_git(["show", f"{source_ref}:{relative_path}"], project_root)
|
|
1099
|
+
if code == 0:
|
|
1100
|
+
# Parse it
|
|
1101
|
+
# We need to extract yaml frontmatter manually or use existing parser if it supported text input
|
|
1102
|
+
import re
|
|
1103
|
+
import yaml
|
|
1104
|
+
match = re.search(r"^---(.*?)---", access_content, re.DOTALL | re.MULTILINE)
|
|
1105
|
+
if match:
|
|
1106
|
+
data = yaml.safe_load(match.group(1)) or {}
|
|
1107
|
+
source_files = data.get("files", [])
|
|
1108
|
+
if source_files:
|
|
1109
|
+
# Update issue object with latest files for this operation
|
|
1110
|
+
issue.files = source_files
|
|
1111
|
+
except Exception as e:
|
|
1112
|
+
# If reading fails (maybe path changed?), we fall back to local 'files'
|
|
1113
|
+
pass
|
|
1114
|
+
|
|
1115
|
+
if not issue.files:
|
|
1116
|
+
return []
|
|
1117
|
+
|
|
1118
|
+
# 1. Conflict Check
|
|
1119
|
+
# A conflict occurs if a file in 'files' has changed on HEAD (main)
|
|
1120
|
+
# since the common ancestor of HEAD and source_ref.
|
|
1121
|
+
|
|
1122
|
+
current_head = git.get_current_branch(project_root)
|
|
1123
|
+
try:
|
|
1124
|
+
base = git.get_merge_base(project_root, current_head, source_ref)
|
|
1125
|
+
except Exception as e:
|
|
1126
|
+
raise RuntimeError(f"Failed to determine merge base: {e}")
|
|
1127
|
+
|
|
1128
|
+
conflicts = []
|
|
1129
|
+
for f in issue.files:
|
|
1130
|
+
# Has main changed this file?
|
|
1131
|
+
if git.has_diff(project_root, base, current_head, [f]):
|
|
1132
|
+
# Has feature also changed this file?
|
|
1133
|
+
if git.has_diff(project_root, base, source_ref, [f]):
|
|
1134
|
+
conflicts.append(f)
|
|
1135
|
+
|
|
1136
|
+
if conflicts:
|
|
1137
|
+
raise RuntimeError(
|
|
1138
|
+
f"Atomic Merge Conflict: The following files changed on both mainline and {issue_id}:\n"
|
|
1139
|
+
+ "\n".join([f" - {f}" for f in conflicts])
|
|
1140
|
+
+ "\n\nPlease resolve manually using Cherry-Pick as per AGENTS.md policy."
|
|
1141
|
+
)
|
|
1142
|
+
|
|
1143
|
+
# 2. Perform Atomic Merge (Selective Checkout)
|
|
1144
|
+
try:
|
|
1145
|
+
git.git_checkout_files(project_root, source_ref, issue.files)
|
|
1146
|
+
except Exception as e:
|
|
1147
|
+
raise RuntimeError(f"Selective checkout failed: {e}")
|
|
1148
|
+
|
|
1149
|
+
return issue.files
|
|
1150
|
+
|
|
1151
|
+
|
|
1006
1152
|
# Resources
|
|
1007
1153
|
SKILL_CONTENT = """
|
|
1008
1154
|
---
|
|
@@ -1068,10 +1214,15 @@ def get_resources() -> Dict[str, Any]:
|
|
|
1068
1214
|
|
|
1069
1215
|
|
|
1070
1216
|
def list_issues(
|
|
1071
|
-
issues_root: Path, recursive_workspace: bool = False
|
|
1217
|
+
issues_root: Path, recursive_workspace: bool = False, include_archived: bool = False
|
|
1072
1218
|
) -> List[IssueMetadata]:
|
|
1073
1219
|
"""
|
|
1074
1220
|
List all issues in the project.
|
|
1221
|
+
|
|
1222
|
+
Args:
|
|
1223
|
+
issues_root: Root directory of issues
|
|
1224
|
+
recursive_workspace: Include issues from workspace members
|
|
1225
|
+
include_archived: Include archived issues (default: False)
|
|
1075
1226
|
"""
|
|
1076
1227
|
issues = []
|
|
1077
1228
|
engine = get_engine(str(issues_root.parent))
|
|
@@ -1079,7 +1230,11 @@ def list_issues(
|
|
|
1079
1230
|
|
|
1080
1231
|
for issue_type in all_types:
|
|
1081
1232
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
1082
|
-
|
|
1233
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
1234
|
+
if include_archived:
|
|
1235
|
+
status_dirs.append("archived")
|
|
1236
|
+
|
|
1237
|
+
for status_dir in status_dirs:
|
|
1083
1238
|
d = base_dir / status_dir
|
|
1084
1239
|
if d.exists():
|
|
1085
1240
|
for f in d.rglob("*.md"):
|
|
@@ -1249,6 +1404,56 @@ def update_issue_content(
|
|
|
1249
1404
|
os.unlink(tmp_path)
|
|
1250
1405
|
|
|
1251
1406
|
|
|
1407
|
+
def update_issue_field(
|
|
1408
|
+
issue_path: Path,
|
|
1409
|
+
field: str,
|
|
1410
|
+
value: Any,
|
|
1411
|
+
) -> IssueMetadata:
|
|
1412
|
+
"""
|
|
1413
|
+
Update a specific field in an issue's frontmatter.
|
|
1414
|
+
|
|
1415
|
+
Args:
|
|
1416
|
+
issue_path: Path to the issue file
|
|
1417
|
+
field: Field name to update
|
|
1418
|
+
value: New value for the field
|
|
1419
|
+
|
|
1420
|
+
Returns:
|
|
1421
|
+
Updated IssueMetadata
|
|
1422
|
+
"""
|
|
1423
|
+
# Read full content
|
|
1424
|
+
content = issue_path.read_text()
|
|
1425
|
+
|
|
1426
|
+
# Split Frontmatter and Body
|
|
1427
|
+
match = re.search(r"^---(.*?)---\n(.*)", content, re.DOTALL | re.MULTILINE)
|
|
1428
|
+
if not match:
|
|
1429
|
+
raise ValueError(f"Could not parse frontmatter for {issue_path}")
|
|
1430
|
+
|
|
1431
|
+
yaml_str = match.group(1)
|
|
1432
|
+
body = match.group(2)
|
|
1433
|
+
|
|
1434
|
+
try:
|
|
1435
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
1436
|
+
except yaml.YAMLError as e:
|
|
1437
|
+
raise ValueError(f"Invalid YAML metadata: {e}")
|
|
1438
|
+
|
|
1439
|
+
# Update the field
|
|
1440
|
+
data[field] = value
|
|
1441
|
+
data["updated_at"] = current_time()
|
|
1442
|
+
|
|
1443
|
+
# Serialize back directly (not through model) to preserve all fields
|
|
1444
|
+
yaml_header = yaml.dump(
|
|
1445
|
+
data, sort_keys=False, allow_unicode=True, default_flow_style=False
|
|
1446
|
+
)
|
|
1447
|
+
|
|
1448
|
+
# Reconstruct File
|
|
1449
|
+
new_content = f"---\n{yaml_header}---\n{body}"
|
|
1450
|
+
issue_path.write_text(new_content)
|
|
1451
|
+
|
|
1452
|
+
# Re-hydrate through Model to validate and return
|
|
1453
|
+
updated_meta = IssueMetadata(**data)
|
|
1454
|
+
return updated_meta
|
|
1455
|
+
|
|
1456
|
+
|
|
1252
1457
|
def generate_delivery_report(
|
|
1253
1458
|
issues_root: Path, issue_id: str, project_root: Path
|
|
1254
1459
|
) -> IssueMetadata:
|
|
@@ -1436,10 +1641,15 @@ def check_issue_match(
|
|
|
1436
1641
|
return True
|
|
1437
1642
|
|
|
1438
1643
|
|
|
1439
|
-
def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
1644
|
+
def search_issues(issues_root: Path, query: str, include_archived: bool = False) -> List[IssueMetadata]:
|
|
1440
1645
|
"""
|
|
1441
1646
|
Search issues using advanced query syntax.
|
|
1442
1647
|
Returns list of matching IssueMetadata.
|
|
1648
|
+
|
|
1649
|
+
Args:
|
|
1650
|
+
issues_root: Root directory of issues
|
|
1651
|
+
query: Search query string
|
|
1652
|
+
include_archived: Include archived issues in search (default: False)
|
|
1443
1653
|
"""
|
|
1444
1654
|
explicit_positives, terms, negatives = parse_search_query(query)
|
|
1445
1655
|
|
|
@@ -1449,7 +1659,7 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
|
1449
1659
|
# Let's align with "grep": empty pattern matches everything?
|
|
1450
1660
|
# Or strict: empty query -> all.
|
|
1451
1661
|
if not explicit_positives and not terms and not negatives:
|
|
1452
|
-
return list_issues(issues_root)
|
|
1662
|
+
return list_issues(issues_root, include_archived=include_archived)
|
|
1453
1663
|
|
|
1454
1664
|
matches = []
|
|
1455
1665
|
all_files = []
|
|
@@ -1469,7 +1679,11 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
|
1469
1679
|
|
|
1470
1680
|
for issue_type in all_types:
|
|
1471
1681
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
1472
|
-
|
|
1682
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
1683
|
+
if include_archived:
|
|
1684
|
+
status_dirs.append("archived")
|
|
1685
|
+
|
|
1686
|
+
for status_dir in status_dirs:
|
|
1473
1687
|
d = base_dir / status_dir
|
|
1474
1688
|
if d.exists():
|
|
1475
1689
|
for f in d.rglob("*.md"):
|
|
@@ -79,6 +79,113 @@ class StateMachine:
|
|
|
79
79
|
allowed.append(t)
|
|
80
80
|
return allowed
|
|
81
81
|
|
|
82
|
+
def get_available_solutions(self, from_status: str, from_stage: Optional[str]) -> List[str]:
|
|
83
|
+
"""Get all valid solutions for transitions from the current state."""
|
|
84
|
+
solutions = set()
|
|
85
|
+
for t in self.transitions:
|
|
86
|
+
# Skip non-transitions (agent actions with same status/stage)
|
|
87
|
+
if t.from_status is None and t.from_stage is None:
|
|
88
|
+
continue
|
|
89
|
+
|
|
90
|
+
if t.from_status and t.from_status != from_status:
|
|
91
|
+
continue
|
|
92
|
+
if t.from_stage and t.from_stage != from_stage:
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
if t.required_solution:
|
|
96
|
+
solutions.add(t.required_solution)
|
|
97
|
+
return sorted(list(solutions))
|
|
98
|
+
|
|
99
|
+
def get_valid_transitions_from_state(
|
|
100
|
+
self, from_status: str, from_stage: Optional[str]
|
|
101
|
+
) -> List[TransitionConfig]:
|
|
102
|
+
"""Get all valid transitions from a given state."""
|
|
103
|
+
valid = []
|
|
104
|
+
for t in self.transitions:
|
|
105
|
+
# Skip non-transitions (agent actions with same status/stage)
|
|
106
|
+
if t.from_status is None and t.from_stage is None:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
if t.from_status and t.from_status != from_status:
|
|
110
|
+
continue
|
|
111
|
+
if t.from_stage and t.from_stage != from_stage:
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
valid.append(t)
|
|
115
|
+
return valid
|
|
116
|
+
|
|
117
|
+
def _format_state(self, status: str, stage: Optional[str]) -> str:
|
|
118
|
+
"""Format a state for display in error messages."""
|
|
119
|
+
# Handle Enum values
|
|
120
|
+
if hasattr(status, 'value'):
|
|
121
|
+
status = status.value
|
|
122
|
+
if stage and hasattr(stage, 'value'):
|
|
123
|
+
stage = stage.value
|
|
124
|
+
|
|
125
|
+
if stage:
|
|
126
|
+
return f"{status}({stage})"
|
|
127
|
+
return status
|
|
128
|
+
|
|
129
|
+
def _build_transition_not_found_error(
|
|
130
|
+
self,
|
|
131
|
+
from_status: str,
|
|
132
|
+
from_stage: Optional[str],
|
|
133
|
+
to_status: str,
|
|
134
|
+
to_stage: Optional[str],
|
|
135
|
+
) -> str:
|
|
136
|
+
"""Build a descriptive error message when no transition is found."""
|
|
137
|
+
current_state = self._format_state(from_status, from_stage)
|
|
138
|
+
target_state = self._format_state(to_status, to_stage)
|
|
139
|
+
|
|
140
|
+
error_msg = (
|
|
141
|
+
f"Lifecycle Policy: Transition from '{current_state}' "
|
|
142
|
+
f"to '{target_state}' is not defined."
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
# Add available transitions hint
|
|
146
|
+
valid_transitions = self.get_valid_transitions_from_state(from_status, from_stage)
|
|
147
|
+
if valid_transitions:
|
|
148
|
+
error_msg += " Available transitions from this state:"
|
|
149
|
+
for t in valid_transitions:
|
|
150
|
+
target = self._format_state(t.to_status, t.to_stage)
|
|
151
|
+
if t.required_solution:
|
|
152
|
+
error_msg += f"\n - {t.name}: '{current_state}' -> '{target}' (requires --solution {t.required_solution})"
|
|
153
|
+
else:
|
|
154
|
+
error_msg += f"\n - {t.name}: '{current_state}' -> '{target}'"
|
|
155
|
+
else:
|
|
156
|
+
error_msg += " No transitions are available from this state."
|
|
157
|
+
|
|
158
|
+
return error_msg
|
|
159
|
+
|
|
160
|
+
def _build_invalid_solution_error(
|
|
161
|
+
self,
|
|
162
|
+
transition: TransitionConfig,
|
|
163
|
+
provided_solution: Optional[str],
|
|
164
|
+
from_status: str,
|
|
165
|
+
from_stage: Optional[str],
|
|
166
|
+
) -> str:
|
|
167
|
+
"""Build a descriptive error message when solution is invalid or missing."""
|
|
168
|
+
current_state = self._format_state(from_status, from_stage)
|
|
169
|
+
target_state = self._format_state(transition.to_status, transition.to_stage)
|
|
170
|
+
|
|
171
|
+
if provided_solution:
|
|
172
|
+
error_msg = (
|
|
173
|
+
f"Lifecycle Policy: Transition '{transition.label}' from '{current_state}' "
|
|
174
|
+
f"to '{target_state}' does not accept solution '{provided_solution}'."
|
|
175
|
+
)
|
|
176
|
+
else:
|
|
177
|
+
error_msg = (
|
|
178
|
+
f"Lifecycle Policy: Transition '{transition.label}' from '{current_state}' "
|
|
179
|
+
f"to '{target_state}' requires a solution."
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# Get valid solutions for this transition
|
|
183
|
+
valid_solutions = self.get_available_solutions(from_status, from_stage)
|
|
184
|
+
if valid_solutions:
|
|
185
|
+
error_msg += f" Valid solutions are: {', '.join(valid_solutions)}."
|
|
186
|
+
|
|
187
|
+
return error_msg
|
|
188
|
+
|
|
82
189
|
def find_transition(
|
|
83
190
|
self,
|
|
84
191
|
from_status: str,
|
|
@@ -134,7 +241,7 @@ class StateMachine:
|
|
|
134
241
|
to_stage: Optional[str],
|
|
135
242
|
solution: Optional[str] = None,
|
|
136
243
|
meta: Optional[IssueMetadata] = None,
|
|
137
|
-
) ->
|
|
244
|
+
) -> TransitionConfig:
|
|
138
245
|
"""
|
|
139
246
|
Validate if a transition is allowed. Raises ValueError if not.
|
|
140
247
|
If meta is provided, also validates criticality-based policies.
|
|
@@ -149,13 +256,16 @@ class StateMachine:
|
|
|
149
256
|
|
|
150
257
|
if not transition:
|
|
151
258
|
raise ValueError(
|
|
152
|
-
|
|
153
|
-
|
|
259
|
+
self._build_transition_not_found_error(
|
|
260
|
+
from_status, from_stage, to_status, to_stage
|
|
261
|
+
)
|
|
154
262
|
)
|
|
155
263
|
|
|
156
264
|
if transition.required_solution and solution != transition.required_solution:
|
|
157
265
|
raise ValueError(
|
|
158
|
-
|
|
266
|
+
self._build_invalid_solution_error(
|
|
267
|
+
transition, solution, from_status, from_stage
|
|
268
|
+
)
|
|
159
269
|
)
|
|
160
270
|
|
|
161
271
|
# Criticality-based policy checks
|
monoco/features/issue/linter.py
CHANGED
|
@@ -29,7 +29,7 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnost
|
|
|
29
29
|
|
|
30
30
|
# 1. Collection Phase (Build Index)
|
|
31
31
|
# Helper to collect issues from a project
|
|
32
|
-
def collect_project_issues(project_issues_root: Path, project_name: str = "local"):
|
|
32
|
+
def collect_project_issues(project_issues_root: Path, project_name: str = "local", include_archived: bool = False):
|
|
33
33
|
project_issues = []
|
|
34
34
|
project_diagnostics = []
|
|
35
35
|
for subdir in ["Epics", "Features", "Chores", "Fixes", "Domains"]:
|
|
@@ -128,10 +128,30 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnost
|
|
|
128
128
|
else:
|
|
129
129
|
# Standard Issues (Epics/Features/etc)
|
|
130
130
|
files = []
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
131
|
+
# Standard status directories
|
|
132
|
+
status_dirs = ["open", "closed", "backlog"]
|
|
133
|
+
# Include archived if requested (for full validation)
|
|
134
|
+
if include_archived:
|
|
135
|
+
status_dirs.append("archived")
|
|
136
|
+
|
|
137
|
+
for item in d.iterdir():
|
|
138
|
+
if item.is_dir():
|
|
139
|
+
status = item.name.lower()
|
|
140
|
+
if status in status_dirs:
|
|
141
|
+
files.extend(item.rglob("*.md"))
|
|
142
|
+
elif status != "archived": # archived is handled separately if include_archived
|
|
143
|
+
# Report Illegal Directory immediately
|
|
144
|
+
project_diagnostics.append(
|
|
145
|
+
Diagnostic(
|
|
146
|
+
range=Range(
|
|
147
|
+
start=Position(line=0, character=0),
|
|
148
|
+
end=Position(line=0, character=0),
|
|
149
|
+
),
|
|
150
|
+
message=f"Illegal Directory: Issues should be in 'open/', 'closed/', or 'backlog/' directories, not '{status}/' directory.",
|
|
151
|
+
severity=DiagnosticSeverity.Error,
|
|
152
|
+
source="System",
|
|
153
|
+
)
|
|
154
|
+
)
|
|
135
155
|
|
|
136
156
|
for f in files:
|
|
137
157
|
try:
|
|
@@ -283,12 +303,45 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnost
|
|
|
283
303
|
|
|
284
304
|
# 2. Validation Phase
|
|
285
305
|
valid_domains = set()
|
|
306
|
+
# Build list of actual IssueMetadata objects for domain governance checks
|
|
307
|
+
all_issue_metas = []
|
|
308
|
+
|
|
286
309
|
# Now validate
|
|
287
310
|
for path, meta, project_name in all_issues:
|
|
288
311
|
if meta == "DOMAIN":
|
|
289
312
|
valid_domains.add(
|
|
290
313
|
project_name
|
|
291
314
|
) # Record the domain name (which was stored in project_name slot)
|
|
315
|
+
else:
|
|
316
|
+
all_issue_metas.append(meta)
|
|
317
|
+
|
|
318
|
+
# FEAT-0136: Project-Level Domain Governance Check
|
|
319
|
+
# Calculate scale metrics
|
|
320
|
+
num_issues = len(all_issue_metas)
|
|
321
|
+
num_epics = len([i for i in all_issue_metas if i.type == "epic"])
|
|
322
|
+
is_large_scale = num_issues > 128 or num_epics > 32
|
|
323
|
+
|
|
324
|
+
# Check Domain Coverage for large-scale projects
|
|
325
|
+
if is_large_scale and num_epics > 0:
|
|
326
|
+
epics = [i for i in all_issue_metas if i.type == "epic"]
|
|
327
|
+
untracked_epics = [e for e in epics if not e.domains]
|
|
328
|
+
untracked_ratio = len(untracked_epics) / len(epics)
|
|
329
|
+
|
|
330
|
+
# Rule: Untracked Epics / Total Epics <= 25%
|
|
331
|
+
if untracked_ratio > 0.25:
|
|
332
|
+
# Report this as a project-level diagnostic (attached to first epic or general)
|
|
333
|
+
diagnostics.append(
|
|
334
|
+
Diagnostic(
|
|
335
|
+
range=Range(
|
|
336
|
+
start=Position(line=0, character=0),
|
|
337
|
+
end=Position(line=0, character=0),
|
|
338
|
+
),
|
|
339
|
+
message=f"Domain Governance: Coverage is too low for a project of this scale ({len(untracked_epics)}/{len(epics)} Epics untracked). "
|
|
340
|
+
f"At least 75% of Epics must have domains assigned.",
|
|
341
|
+
severity=DiagnosticSeverity.Error,
|
|
342
|
+
source="DomainGovernance",
|
|
343
|
+
)
|
|
344
|
+
)
|
|
292
345
|
|
|
293
346
|
for path, meta, project_name in all_issues:
|
|
294
347
|
if meta == "DOMAIN":
|
|
@@ -325,6 +378,7 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnost
|
|
|
325
378
|
|
|
326
379
|
# A. Run Core Validator
|
|
327
380
|
# Pass valid_domains kwarg (Validator needs update to accept it)
|
|
381
|
+
# FEAT-0136: Also pass all_issue_metas for domain governance checks
|
|
328
382
|
file_diagnostics = validator.validate(
|
|
329
383
|
meta,
|
|
330
384
|
content,
|
|
@@ -332,6 +386,7 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnost
|
|
|
332
386
|
current_project=project_name,
|
|
333
387
|
workspace_root=workspace_root_name,
|
|
334
388
|
valid_domains=valid_domains,
|
|
389
|
+
all_issues=all_issue_metas,
|
|
335
390
|
)
|
|
336
391
|
|
|
337
392
|
# Add context to diagnostics (Path)
|
monoco/features/issue/models.py
CHANGED
|
@@ -74,6 +74,7 @@ class IssueStatus(str, Enum):
|
|
|
74
74
|
OPEN = "open"
|
|
75
75
|
CLOSED = "closed"
|
|
76
76
|
BACKLOG = "backlog"
|
|
77
|
+
ARCHIVED = "archived"
|
|
77
78
|
|
|
78
79
|
|
|
79
80
|
class IssueStage(str, Enum):
|
|
@@ -223,8 +224,7 @@ class IssueMetadata(BaseModel):
|
|
|
223
224
|
# Stage normalization
|
|
224
225
|
if "stage" in v and isinstance(v["stage"], str):
|
|
225
226
|
v["stage"] = v["stage"].lower()
|
|
226
|
-
|
|
227
|
-
v["stage"] = "draft"
|
|
227
|
+
|
|
228
228
|
try:
|
|
229
229
|
v["stage"] = IssueStage(v["stage"])
|
|
230
230
|
except ValueError:
|