monoco-toolkit 0.3.9__py3-none-any.whl → 0.3.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/config.py +38 -4
- monoco/core/git.py +23 -0
- monoco/core/hooks/builtin/git_cleanup.py +1 -1
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/injection.py +63 -29
- monoco/core/integrations.py +2 -2
- monoco/core/loader.py +633 -0
- monoco/core/output.py +5 -5
- monoco/core/registry.py +34 -19
- monoco/core/resource/__init__.py +5 -0
- monoco/core/resource/finder.py +98 -0
- monoco/core/resource/manager.py +91 -0
- monoco/core/resource/models.py +35 -0
- monoco/core/skill_framework.py +292 -0
- monoco/core/skills.py +524 -385
- monoco/core/sync.py +73 -1
- monoco/core/workflow_converter.py +420 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +236 -0
- monoco/daemon/services.py +185 -0
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +2 -2
- monoco/features/agent/adapter.py +41 -0
- monoco/features/agent/apoptosis.py +44 -0
- monoco/features/agent/cli.py +101 -144
- monoco/features/agent/config.py +35 -21
- monoco/features/agent/defaults.py +6 -49
- monoco/features/agent/engines.py +32 -6
- monoco/features/agent/manager.py +47 -6
- monoco/features/agent/models.py +2 -2
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +61 -0
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +73 -0
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +55 -0
- monoco/features/agent/resources/atoms/atom-review.yaml +60 -0
- monoco/{core/resources/en → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +3 -1
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +94 -0
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +93 -0
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +85 -0
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +114 -0
- monoco/features/agent/resources/workflows/workflow-dev.yaml +83 -0
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +72 -0
- monoco/features/agent/resources/workflows/workflow-review.yaml +94 -0
- monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +49 -0
- monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +46 -0
- monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +46 -0
- monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +47 -0
- monoco/{core/resources/zh → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +3 -1
- monoco/features/agent/resources/{skills/flow_engineer → zh/skills/monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/{skills/flow_manager → zh/skills/monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +259 -0
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +137 -0
- monoco/features/agent/session.py +59 -11
- monoco/features/agent/worker.py +38 -2
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/__init__.py +0 -0
- monoco/features/glossary/adapter.py +42 -0
- monoco/features/glossary/config.py +5 -0
- monoco/features/glossary/resources/en/AGENTS.md +29 -0
- monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +35 -0
- monoco/features/glossary/resources/zh/AGENTS.md +29 -0
- monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +35 -0
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/{SKILL.md → skills/monoco_atom_i18n/SKILL.md} +3 -1
- monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +105 -0
- monoco/features/i18n/resources/zh/{SKILL.md → skills/monoco_atom_i18n/SKILL.md} +3 -1
- monoco/features/i18n/resources/{skills/i18n_scan_workflow → zh/skills/monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +281 -7
- monoco/features/issue/core.py +272 -19
- monoco/features/issue/engine/machine.py +118 -5
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +3 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/{SKILL.md → skills/monoco_atom_issue/SKILL.md} +3 -1
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +167 -0
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +224 -0
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +159 -0
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +203 -0
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/{SKILL.md → skills/monoco_atom_issue_lifecycle/SKILL.md} +3 -1
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +167 -0
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +224 -0
- monoco/features/issue/resources/{skills/issue_lifecycle_workflow → zh/skills/monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +203 -0
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +77 -0
- monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +140 -0
- monoco/features/memo/resources/zh/{SKILL.md → skills/monoco_atom_memo/SKILL.md} +3 -1
- monoco/features/memo/resources/{skills/note_processing_workflow → zh/skills/monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/resources/en/{SKILL.md → skills/monoco_atom_spike/SKILL.md} +3 -1
- monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +121 -0
- monoco/features/spike/resources/zh/{SKILL.md → skills/monoco_atom_spike/SKILL.md} +3 -1
- monoco/features/spike/resources/{skills/research_workflow → zh/skills/monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- monoco_toolkit-0.3.11.dist-info/METADATA +130 -0
- monoco_toolkit-0.3.11.dist-info/RECORD +181 -0
- monoco/features/agent/reliability.py +0 -106
- monoco/features/agent/resources/skills/flow_reviewer/SKILL.md +0 -114
- monoco_toolkit-0.3.9.dist-info/METADATA +0 -127
- monoco_toolkit-0.3.9.dist-info/RECORD +0 -115
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.11.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.11.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.11.dist-info}/licenses/LICENSE +0 -0
monoco/features/issue/core.py
CHANGED
|
@@ -108,6 +108,8 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
108
108
|
data["domains"] = []
|
|
109
109
|
if "files" not in data:
|
|
110
110
|
data["files"] = []
|
|
111
|
+
if "solution" not in data:
|
|
112
|
+
data["solution"] = None
|
|
111
113
|
|
|
112
114
|
# Custom YAML Dumper to preserve None as 'null' and order
|
|
113
115
|
# Helper to order keys: id, uid, type, status, stage, title, ... graph ...
|
|
@@ -147,6 +149,10 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
147
149
|
if "criticality" in data:
|
|
148
150
|
ordered_data["criticality"] = data["criticality"]
|
|
149
151
|
|
|
152
|
+
# Add solution if present (for template generation)
|
|
153
|
+
if "solution" in data:
|
|
154
|
+
ordered_data["solution"] = data["solution"]
|
|
155
|
+
|
|
150
156
|
# Add remaining
|
|
151
157
|
for k, v in data.items():
|
|
152
158
|
if k not in ordered_data:
|
|
@@ -161,6 +167,10 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
161
167
|
yaml_header = yaml_header.replace(
|
|
162
168
|
"parent: null", "parent: null # <EPIC-ID> Optional"
|
|
163
169
|
)
|
|
170
|
+
if "solution" in ordered_data and ordered_data["solution"] is None:
|
|
171
|
+
yaml_header = yaml_header.replace(
|
|
172
|
+
"solution: null", "solution: null # implemented, cancelled, wontfix, duplicate"
|
|
173
|
+
)
|
|
164
174
|
|
|
165
175
|
return yaml_header
|
|
166
176
|
|
|
@@ -189,15 +199,19 @@ def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
|
|
|
189
199
|
return None
|
|
190
200
|
|
|
191
201
|
|
|
192
|
-
def find_next_id(issue_type: str, issues_root: Path) -> str:
|
|
202
|
+
def find_next_id(issue_type: str, issues_root: Path, include_archived: bool = False) -> str:
|
|
193
203
|
prefix_map = get_prefix_map(issues_root)
|
|
194
204
|
prefix = prefix_map.get(issue_type, "ISSUE")
|
|
195
205
|
pattern = re.compile(rf"{prefix}-(\d+)")
|
|
196
206
|
max_id = 0
|
|
197
207
|
|
|
198
208
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
199
|
-
# Scan all subdirs: open, backlog, closed
|
|
200
|
-
|
|
209
|
+
# Scan all subdirs: open, backlog, closed, archived (optional)
|
|
210
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
211
|
+
if include_archived:
|
|
212
|
+
status_dirs.append("archived")
|
|
213
|
+
|
|
214
|
+
for status_dir in status_dirs:
|
|
201
215
|
d = base_dir / status_dir
|
|
202
216
|
if d.exists():
|
|
203
217
|
for f in d.rglob("*.md"):
|
|
@@ -388,7 +402,15 @@ def get_available_actions(meta: IssueMetadata) -> List[Any]:
|
|
|
388
402
|
return actions
|
|
389
403
|
|
|
390
404
|
|
|
391
|
-
def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
405
|
+
def find_issue_path(issues_root: Path, issue_id: str, include_archived: bool = True) -> Optional[Path]:
|
|
406
|
+
"""
|
|
407
|
+
Find the path of an issue file.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
issues_root: Root directory of issues
|
|
411
|
+
issue_id: Issue ID to find
|
|
412
|
+
include_archived: Whether to search in archived directory (default: True for find operations)
|
|
413
|
+
"""
|
|
392
414
|
parsed = IssueID(issue_id)
|
|
393
415
|
|
|
394
416
|
if not parsed.is_local:
|
|
@@ -414,7 +436,7 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
414
436
|
return None
|
|
415
437
|
|
|
416
438
|
# Recursively search in member project
|
|
417
|
-
return find_issue_path(member_issues, parsed.local_id)
|
|
439
|
+
return find_issue_path(member_issues, parsed.local_id, include_archived)
|
|
418
440
|
|
|
419
441
|
# Local Search
|
|
420
442
|
try:
|
|
@@ -428,9 +450,19 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
428
450
|
return None
|
|
429
451
|
|
|
430
452
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
431
|
-
# Search in
|
|
432
|
-
for
|
|
433
|
-
|
|
453
|
+
# Search in standard status subdirs first
|
|
454
|
+
for status_dir in ["open", "backlog", "closed"]:
|
|
455
|
+
d = base_dir / status_dir
|
|
456
|
+
if d.exists():
|
|
457
|
+
for f in d.rglob(f"{parsed.local_id}-*.md"):
|
|
458
|
+
return f
|
|
459
|
+
|
|
460
|
+
# Search in archived if enabled
|
|
461
|
+
if include_archived:
|
|
462
|
+
archived_dir = base_dir / "archived"
|
|
463
|
+
if archived_dir.exists():
|
|
464
|
+
for f in archived_dir.rglob(f"{parsed.local_id}-*.md"):
|
|
465
|
+
return f
|
|
434
466
|
return None
|
|
435
467
|
|
|
436
468
|
|
|
@@ -515,7 +547,7 @@ def update_issue(
|
|
|
515
547
|
temp_meta = IssueMetadata(**data)
|
|
516
548
|
|
|
517
549
|
# Use engine to validate the transition
|
|
518
|
-
engine.validate_transition(
|
|
550
|
+
transition = engine.validate_transition(
|
|
519
551
|
from_status=current_status,
|
|
520
552
|
from_stage=current_stage,
|
|
521
553
|
to_status=target_status,
|
|
@@ -659,6 +691,8 @@ def update_issue(
|
|
|
659
691
|
path.write_text(new_content)
|
|
660
692
|
|
|
661
693
|
# 3. Handle physical move if status changed
|
|
694
|
+
# Save old path before move for git tracking
|
|
695
|
+
old_path_before_move = path
|
|
662
696
|
if status and status != current_status:
|
|
663
697
|
# Move file
|
|
664
698
|
prefix = issue_id.split("-")[0].upper()
|
|
@@ -702,17 +736,16 @@ def update_issue(
|
|
|
702
736
|
# Only auto-commit if we're in a git repo
|
|
703
737
|
git_service = IssueGitService(project_root)
|
|
704
738
|
if git_service.is_git_repository():
|
|
705
|
-
#
|
|
739
|
+
# Use the saved old path before file move for git tracking
|
|
706
740
|
old_path_for_git = None
|
|
707
|
-
if status and status != current_status:
|
|
708
|
-
|
|
709
|
-
old_path_for_git = find_issue_path(issues_root, issue_id)
|
|
741
|
+
if status and status != current_status and old_path_before_move != path:
|
|
742
|
+
old_path_for_git = old_path_before_move
|
|
710
743
|
|
|
711
744
|
commit_result = git_service.commit_issue_change(
|
|
712
745
|
issue_id=issue_id,
|
|
713
746
|
action=action,
|
|
714
747
|
issue_file_path=path,
|
|
715
|
-
old_file_path=old_path_for_git
|
|
748
|
+
old_file_path=old_path_for_git,
|
|
716
749
|
no_commit=no_commit,
|
|
717
750
|
)
|
|
718
751
|
# Attach commit result to metadata for optional inspection
|
|
@@ -721,9 +754,47 @@ def update_issue(
|
|
|
721
754
|
# Update returned metadata with final absolute path
|
|
722
755
|
updated_meta.path = str(path.absolute())
|
|
723
756
|
updated_meta.actions = get_available_actions(updated_meta)
|
|
757
|
+
|
|
758
|
+
# Execute Post Actions (Trigger)
|
|
759
|
+
if transition and hasattr(transition, "post_actions") and transition.post_actions:
|
|
760
|
+
_execute_post_actions(transition.post_actions, updated_meta)
|
|
761
|
+
|
|
724
762
|
return updated_meta
|
|
725
763
|
|
|
726
764
|
|
|
765
|
+
def _execute_post_actions(actions: List[str], meta: IssueMetadata):
|
|
766
|
+
"""
|
|
767
|
+
Execute a list of shell commands as post-actions.
|
|
768
|
+
Supports template substitution with issue metadata.
|
|
769
|
+
"""
|
|
770
|
+
import shlex
|
|
771
|
+
import subprocess
|
|
772
|
+
from rich.console import Console
|
|
773
|
+
|
|
774
|
+
console = Console()
|
|
775
|
+
data = meta.model_dump(mode="json")
|
|
776
|
+
|
|
777
|
+
for action in actions:
|
|
778
|
+
try:
|
|
779
|
+
# Safe template substitution
|
|
780
|
+
cmd = action.format(**data)
|
|
781
|
+
except KeyError as e:
|
|
782
|
+
console.print(f"[yellow]Trigger Warning:[/yellow] Missing key for template '{action}': {e}")
|
|
783
|
+
continue
|
|
784
|
+
|
|
785
|
+
console.print(f"[bold cyan]Triggering:[/bold cyan] {cmd}")
|
|
786
|
+
|
|
787
|
+
args = shlex.split(cmd)
|
|
788
|
+
|
|
789
|
+
try:
|
|
790
|
+
# Run in foreground to allow interaction if needed (e.g. agent output)
|
|
791
|
+
subprocess.run(args, check=True)
|
|
792
|
+
except subprocess.CalledProcessError as e:
|
|
793
|
+
console.print(f"[red]Trigger Failed:[/red] Command '{cmd}' exited with code {e.returncode}")
|
|
794
|
+
except Exception as e:
|
|
795
|
+
console.print(f"[red]Trigger Error:[/red] {e}")
|
|
796
|
+
|
|
797
|
+
|
|
727
798
|
def start_issue_isolation(
|
|
728
799
|
issues_root: Path, issue_id: str, mode: str, project_root: Path
|
|
729
800
|
) -> IssueMetadata:
|
|
@@ -913,6 +984,8 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
|
|
|
913
984
|
|
|
914
985
|
if issue.isolation and issue.isolation.ref:
|
|
915
986
|
target_ref = issue.isolation.ref
|
|
987
|
+
if target_ref == "current":
|
|
988
|
+
target_ref = git.get_current_branch(project_root)
|
|
916
989
|
else:
|
|
917
990
|
# Heuristic Search
|
|
918
991
|
# 1. Is current branch related?
|
|
@@ -964,6 +1037,118 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
|
|
|
964
1037
|
return []
|
|
965
1038
|
|
|
966
1039
|
|
|
1040
|
+
def merge_issue_changes(
|
|
1041
|
+
issues_root: Path, issue_id: str, project_root: Path
|
|
1042
|
+
) -> List[str]:
|
|
1043
|
+
"""
|
|
1044
|
+
Perform Smart Atomic Merge or selection checkout of touched files.
|
|
1045
|
+
Ensures safe mainline synchronization by only merging files tracked in 'files' field.
|
|
1046
|
+
"""
|
|
1047
|
+
path = find_issue_path(issues_root, issue_id)
|
|
1048
|
+
if not path:
|
|
1049
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
1050
|
+
|
|
1051
|
+
issue = parse_issue(path)
|
|
1052
|
+
if not issue:
|
|
1053
|
+
raise ValueError(f"Could not parse issue {issue_id}")
|
|
1054
|
+
|
|
1055
|
+
if not issue.files:
|
|
1056
|
+
# If no files tracked, nothing to merge.
|
|
1057
|
+
return []
|
|
1058
|
+
|
|
1059
|
+
# Determine Source (Feature Branch)
|
|
1060
|
+
# We prioritize what's in the issue metadata on LOCAL (main).
|
|
1061
|
+
# If not there, we try heuristic.
|
|
1062
|
+
source_ref = None
|
|
1063
|
+
if issue.isolation and issue.isolation.ref:
|
|
1064
|
+
source_ref = issue.isolation.ref
|
|
1065
|
+
else:
|
|
1066
|
+
# Heuristic: Search for branch by convention
|
|
1067
|
+
# We can't use 'current' here safely if we are on main,
|
|
1068
|
+
# but let's assume we might be calling this from elsewhere?
|
|
1069
|
+
# Actually, for 'close', we are likely on main.
|
|
1070
|
+
# So we search for a branch named 'feat/{id}-*' or similar?
|
|
1071
|
+
pass
|
|
1072
|
+
|
|
1073
|
+
# If local metadata doesn't have isolation ref, we might be stuck.
|
|
1074
|
+
# But let's assume valid workflow.
|
|
1075
|
+
if not source_ref:
|
|
1076
|
+
# Try to find a branch starting with feat/{id} or {id}
|
|
1077
|
+
# This is a bit weak, needs better implementation in 'git' or 'issue' module
|
|
1078
|
+
# For now, if we can't find it, we error.
|
|
1079
|
+
pass
|
|
1080
|
+
|
|
1081
|
+
if not source_ref or not git.branch_exists(project_root, source_ref):
|
|
1082
|
+
# Fallback: maybe we are currently ON the feature branch?
|
|
1083
|
+
# If so, source_ref should be current. But we expect to call this from MAIN.
|
|
1084
|
+
pass
|
|
1085
|
+
|
|
1086
|
+
if not source_ref:
|
|
1087
|
+
raise RuntimeError(f"Could not determine source branch for Issue {issue_id}. Ensure isolation ref is set.")
|
|
1088
|
+
|
|
1089
|
+
if not git.branch_exists(project_root, source_ref):
|
|
1090
|
+
raise RuntimeError(f"Source branch {source_ref} does not exist.")
|
|
1091
|
+
|
|
1092
|
+
# RE-READ Issue from Source Branch
|
|
1093
|
+
# The 'files' list on main might be outdated. We need the list from the feature branch.
|
|
1094
|
+
relative_path = path.relative_to(project_root)
|
|
1095
|
+
|
|
1096
|
+
try:
|
|
1097
|
+
# Read file content from git
|
|
1098
|
+
code, access_content, _ = git._run_git(["show", f"{source_ref}:{relative_path}"], project_root)
|
|
1099
|
+
if code == 0:
|
|
1100
|
+
# Parse it
|
|
1101
|
+
# We need to extract yaml frontmatter manually or use existing parser if it supported text input
|
|
1102
|
+
import re
|
|
1103
|
+
import yaml
|
|
1104
|
+
match = re.search(r"^---(.*?)---", access_content, re.DOTALL | re.MULTILINE)
|
|
1105
|
+
if match:
|
|
1106
|
+
data = yaml.safe_load(match.group(1)) or {}
|
|
1107
|
+
source_files = data.get("files", [])
|
|
1108
|
+
if source_files:
|
|
1109
|
+
# Update issue object with latest files for this operation
|
|
1110
|
+
issue.files = source_files
|
|
1111
|
+
except Exception as e:
|
|
1112
|
+
# If reading fails (maybe path changed?), we fall back to local 'files'
|
|
1113
|
+
pass
|
|
1114
|
+
|
|
1115
|
+
if not issue.files:
|
|
1116
|
+
return []
|
|
1117
|
+
|
|
1118
|
+
# 1. Conflict Check
|
|
1119
|
+
# A conflict occurs if a file in 'files' has changed on HEAD (main)
|
|
1120
|
+
# since the common ancestor of HEAD and source_ref.
|
|
1121
|
+
|
|
1122
|
+
current_head = git.get_current_branch(project_root)
|
|
1123
|
+
try:
|
|
1124
|
+
base = git.get_merge_base(project_root, current_head, source_ref)
|
|
1125
|
+
except Exception as e:
|
|
1126
|
+
raise RuntimeError(f"Failed to determine merge base: {e}")
|
|
1127
|
+
|
|
1128
|
+
conflicts = []
|
|
1129
|
+
for f in issue.files:
|
|
1130
|
+
# Has main changed this file?
|
|
1131
|
+
if git.has_diff(project_root, base, current_head, [f]):
|
|
1132
|
+
# Has feature also changed this file?
|
|
1133
|
+
if git.has_diff(project_root, base, source_ref, [f]):
|
|
1134
|
+
conflicts.append(f)
|
|
1135
|
+
|
|
1136
|
+
if conflicts:
|
|
1137
|
+
raise RuntimeError(
|
|
1138
|
+
f"Atomic Merge Conflict: The following files changed on both mainline and {issue_id}:\n"
|
|
1139
|
+
+ "\n".join([f" - {f}" for f in conflicts])
|
|
1140
|
+
+ "\n\nPlease resolve manually using Cherry-Pick as per AGENTS.md policy."
|
|
1141
|
+
)
|
|
1142
|
+
|
|
1143
|
+
# 2. Perform Atomic Merge (Selective Checkout)
|
|
1144
|
+
try:
|
|
1145
|
+
git.git_checkout_files(project_root, source_ref, issue.files)
|
|
1146
|
+
except Exception as e:
|
|
1147
|
+
raise RuntimeError(f"Selective checkout failed: {e}")
|
|
1148
|
+
|
|
1149
|
+
return issue.files
|
|
1150
|
+
|
|
1151
|
+
|
|
967
1152
|
# Resources
|
|
968
1153
|
SKILL_CONTENT = """
|
|
969
1154
|
---
|
|
@@ -1029,10 +1214,15 @@ def get_resources() -> Dict[str, Any]:
|
|
|
1029
1214
|
|
|
1030
1215
|
|
|
1031
1216
|
def list_issues(
|
|
1032
|
-
issues_root: Path, recursive_workspace: bool = False
|
|
1217
|
+
issues_root: Path, recursive_workspace: bool = False, include_archived: bool = False
|
|
1033
1218
|
) -> List[IssueMetadata]:
|
|
1034
1219
|
"""
|
|
1035
1220
|
List all issues in the project.
|
|
1221
|
+
|
|
1222
|
+
Args:
|
|
1223
|
+
issues_root: Root directory of issues
|
|
1224
|
+
recursive_workspace: Include issues from workspace members
|
|
1225
|
+
include_archived: Include archived issues (default: False)
|
|
1036
1226
|
"""
|
|
1037
1227
|
issues = []
|
|
1038
1228
|
engine = get_engine(str(issues_root.parent))
|
|
@@ -1040,7 +1230,11 @@ def list_issues(
|
|
|
1040
1230
|
|
|
1041
1231
|
for issue_type in all_types:
|
|
1042
1232
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
1043
|
-
|
|
1233
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
1234
|
+
if include_archived:
|
|
1235
|
+
status_dirs.append("archived")
|
|
1236
|
+
|
|
1237
|
+
for status_dir in status_dirs:
|
|
1044
1238
|
d = base_dir / status_dir
|
|
1045
1239
|
if d.exists():
|
|
1046
1240
|
for f in d.rglob("*.md"):
|
|
@@ -1210,6 +1404,56 @@ def update_issue_content(
|
|
|
1210
1404
|
os.unlink(tmp_path)
|
|
1211
1405
|
|
|
1212
1406
|
|
|
1407
|
+
def update_issue_field(
|
|
1408
|
+
issue_path: Path,
|
|
1409
|
+
field: str,
|
|
1410
|
+
value: Any,
|
|
1411
|
+
) -> IssueMetadata:
|
|
1412
|
+
"""
|
|
1413
|
+
Update a specific field in an issue's frontmatter.
|
|
1414
|
+
|
|
1415
|
+
Args:
|
|
1416
|
+
issue_path: Path to the issue file
|
|
1417
|
+
field: Field name to update
|
|
1418
|
+
value: New value for the field
|
|
1419
|
+
|
|
1420
|
+
Returns:
|
|
1421
|
+
Updated IssueMetadata
|
|
1422
|
+
"""
|
|
1423
|
+
# Read full content
|
|
1424
|
+
content = issue_path.read_text()
|
|
1425
|
+
|
|
1426
|
+
# Split Frontmatter and Body
|
|
1427
|
+
match = re.search(r"^---(.*?)---\n(.*)", content, re.DOTALL | re.MULTILINE)
|
|
1428
|
+
if not match:
|
|
1429
|
+
raise ValueError(f"Could not parse frontmatter for {issue_path}")
|
|
1430
|
+
|
|
1431
|
+
yaml_str = match.group(1)
|
|
1432
|
+
body = match.group(2)
|
|
1433
|
+
|
|
1434
|
+
try:
|
|
1435
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
1436
|
+
except yaml.YAMLError as e:
|
|
1437
|
+
raise ValueError(f"Invalid YAML metadata: {e}")
|
|
1438
|
+
|
|
1439
|
+
# Update the field
|
|
1440
|
+
data[field] = value
|
|
1441
|
+
data["updated_at"] = current_time()
|
|
1442
|
+
|
|
1443
|
+
# Serialize back directly (not through model) to preserve all fields
|
|
1444
|
+
yaml_header = yaml.dump(
|
|
1445
|
+
data, sort_keys=False, allow_unicode=True, default_flow_style=False
|
|
1446
|
+
)
|
|
1447
|
+
|
|
1448
|
+
# Reconstruct File
|
|
1449
|
+
new_content = f"---\n{yaml_header}---\n{body}"
|
|
1450
|
+
issue_path.write_text(new_content)
|
|
1451
|
+
|
|
1452
|
+
# Re-hydrate through Model to validate and return
|
|
1453
|
+
updated_meta = IssueMetadata(**data)
|
|
1454
|
+
return updated_meta
|
|
1455
|
+
|
|
1456
|
+
|
|
1213
1457
|
def generate_delivery_report(
|
|
1214
1458
|
issues_root: Path, issue_id: str, project_root: Path
|
|
1215
1459
|
) -> IssueMetadata:
|
|
@@ -1397,10 +1641,15 @@ def check_issue_match(
|
|
|
1397
1641
|
return True
|
|
1398
1642
|
|
|
1399
1643
|
|
|
1400
|
-
def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
1644
|
+
def search_issues(issues_root: Path, query: str, include_archived: bool = False) -> List[IssueMetadata]:
|
|
1401
1645
|
"""
|
|
1402
1646
|
Search issues using advanced query syntax.
|
|
1403
1647
|
Returns list of matching IssueMetadata.
|
|
1648
|
+
|
|
1649
|
+
Args:
|
|
1650
|
+
issues_root: Root directory of issues
|
|
1651
|
+
query: Search query string
|
|
1652
|
+
include_archived: Include archived issues in search (default: False)
|
|
1404
1653
|
"""
|
|
1405
1654
|
explicit_positives, terms, negatives = parse_search_query(query)
|
|
1406
1655
|
|
|
@@ -1410,7 +1659,7 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
|
1410
1659
|
# Let's align with "grep": empty pattern matches everything?
|
|
1411
1660
|
# Or strict: empty query -> all.
|
|
1412
1661
|
if not explicit_positives and not terms and not negatives:
|
|
1413
|
-
return list_issues(issues_root)
|
|
1662
|
+
return list_issues(issues_root, include_archived=include_archived)
|
|
1414
1663
|
|
|
1415
1664
|
matches = []
|
|
1416
1665
|
all_files = []
|
|
@@ -1430,7 +1679,11 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
|
1430
1679
|
|
|
1431
1680
|
for issue_type in all_types:
|
|
1432
1681
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
1433
|
-
|
|
1682
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
1683
|
+
if include_archived:
|
|
1684
|
+
status_dirs.append("archived")
|
|
1685
|
+
|
|
1686
|
+
for status_dir in status_dirs:
|
|
1434
1687
|
d = base_dir / status_dir
|
|
1435
1688
|
if d.exists():
|
|
1436
1689
|
for f in d.rglob("*.md"):
|
|
@@ -79,6 +79,113 @@ class StateMachine:
|
|
|
79
79
|
allowed.append(t)
|
|
80
80
|
return allowed
|
|
81
81
|
|
|
82
|
+
def get_available_solutions(self, from_status: str, from_stage: Optional[str]) -> List[str]:
|
|
83
|
+
"""Get all valid solutions for transitions from the current state."""
|
|
84
|
+
solutions = set()
|
|
85
|
+
for t in self.transitions:
|
|
86
|
+
# Skip non-transitions (agent actions with same status/stage)
|
|
87
|
+
if t.from_status is None and t.from_stage is None:
|
|
88
|
+
continue
|
|
89
|
+
|
|
90
|
+
if t.from_status and t.from_status != from_status:
|
|
91
|
+
continue
|
|
92
|
+
if t.from_stage and t.from_stage != from_stage:
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
if t.required_solution:
|
|
96
|
+
solutions.add(t.required_solution)
|
|
97
|
+
return sorted(list(solutions))
|
|
98
|
+
|
|
99
|
+
def get_valid_transitions_from_state(
|
|
100
|
+
self, from_status: str, from_stage: Optional[str]
|
|
101
|
+
) -> List[TransitionConfig]:
|
|
102
|
+
"""Get all valid transitions from a given state."""
|
|
103
|
+
valid = []
|
|
104
|
+
for t in self.transitions:
|
|
105
|
+
# Skip non-transitions (agent actions with same status/stage)
|
|
106
|
+
if t.from_status is None and t.from_stage is None:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
if t.from_status and t.from_status != from_status:
|
|
110
|
+
continue
|
|
111
|
+
if t.from_stage and t.from_stage != from_stage:
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
valid.append(t)
|
|
115
|
+
return valid
|
|
116
|
+
|
|
117
|
+
def _format_state(self, status: str, stage: Optional[str]) -> str:
|
|
118
|
+
"""Format a state for display in error messages."""
|
|
119
|
+
# Handle Enum values
|
|
120
|
+
if hasattr(status, 'value'):
|
|
121
|
+
status = status.value
|
|
122
|
+
if stage and hasattr(stage, 'value'):
|
|
123
|
+
stage = stage.value
|
|
124
|
+
|
|
125
|
+
if stage:
|
|
126
|
+
return f"{status}({stage})"
|
|
127
|
+
return status
|
|
128
|
+
|
|
129
|
+
def _build_transition_not_found_error(
|
|
130
|
+
self,
|
|
131
|
+
from_status: str,
|
|
132
|
+
from_stage: Optional[str],
|
|
133
|
+
to_status: str,
|
|
134
|
+
to_stage: Optional[str],
|
|
135
|
+
) -> str:
|
|
136
|
+
"""Build a descriptive error message when no transition is found."""
|
|
137
|
+
current_state = self._format_state(from_status, from_stage)
|
|
138
|
+
target_state = self._format_state(to_status, to_stage)
|
|
139
|
+
|
|
140
|
+
error_msg = (
|
|
141
|
+
f"Lifecycle Policy: Transition from '{current_state}' "
|
|
142
|
+
f"to '{target_state}' is not defined."
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
# Add available transitions hint
|
|
146
|
+
valid_transitions = self.get_valid_transitions_from_state(from_status, from_stage)
|
|
147
|
+
if valid_transitions:
|
|
148
|
+
error_msg += " Available transitions from this state:"
|
|
149
|
+
for t in valid_transitions:
|
|
150
|
+
target = self._format_state(t.to_status, t.to_stage)
|
|
151
|
+
if t.required_solution:
|
|
152
|
+
error_msg += f"\n - {t.name}: '{current_state}' -> '{target}' (requires --solution {t.required_solution})"
|
|
153
|
+
else:
|
|
154
|
+
error_msg += f"\n - {t.name}: '{current_state}' -> '{target}'"
|
|
155
|
+
else:
|
|
156
|
+
error_msg += " No transitions are available from this state."
|
|
157
|
+
|
|
158
|
+
return error_msg
|
|
159
|
+
|
|
160
|
+
def _build_invalid_solution_error(
|
|
161
|
+
self,
|
|
162
|
+
transition: TransitionConfig,
|
|
163
|
+
provided_solution: Optional[str],
|
|
164
|
+
from_status: str,
|
|
165
|
+
from_stage: Optional[str],
|
|
166
|
+
) -> str:
|
|
167
|
+
"""Build a descriptive error message when solution is invalid or missing."""
|
|
168
|
+
current_state = self._format_state(from_status, from_stage)
|
|
169
|
+
target_state = self._format_state(transition.to_status, transition.to_stage)
|
|
170
|
+
|
|
171
|
+
if provided_solution:
|
|
172
|
+
error_msg = (
|
|
173
|
+
f"Lifecycle Policy: Transition '{transition.label}' from '{current_state}' "
|
|
174
|
+
f"to '{target_state}' does not accept solution '{provided_solution}'."
|
|
175
|
+
)
|
|
176
|
+
else:
|
|
177
|
+
error_msg = (
|
|
178
|
+
f"Lifecycle Policy: Transition '{transition.label}' from '{current_state}' "
|
|
179
|
+
f"to '{target_state}' requires a solution."
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# Get valid solutions for this transition
|
|
183
|
+
valid_solutions = self.get_available_solutions(from_status, from_stage)
|
|
184
|
+
if valid_solutions:
|
|
185
|
+
error_msg += f" Valid solutions are: {', '.join(valid_solutions)}."
|
|
186
|
+
|
|
187
|
+
return error_msg
|
|
188
|
+
|
|
82
189
|
def find_transition(
|
|
83
190
|
self,
|
|
84
191
|
from_status: str,
|
|
@@ -134,13 +241,14 @@ class StateMachine:
|
|
|
134
241
|
to_stage: Optional[str],
|
|
135
242
|
solution: Optional[str] = None,
|
|
136
243
|
meta: Optional[IssueMetadata] = None,
|
|
137
|
-
) ->
|
|
244
|
+
) -> TransitionConfig:
|
|
138
245
|
"""
|
|
139
246
|
Validate if a transition is allowed. Raises ValueError if not.
|
|
140
247
|
If meta is provided, also validates criticality-based policies.
|
|
248
|
+
Returns the TransitionConfig if a transition occurred, None if no change.
|
|
141
249
|
"""
|
|
142
250
|
if from_status == to_status and from_stage == to_stage:
|
|
143
|
-
return # No change is always allowed (unless we want to enforce specific updates)
|
|
251
|
+
return None # No change is always allowed (unless we want to enforce specific updates)
|
|
144
252
|
|
|
145
253
|
transition = self.find_transition(
|
|
146
254
|
from_status, from_stage, to_status, to_stage, solution
|
|
@@ -148,18 +256,23 @@ class StateMachine:
|
|
|
148
256
|
|
|
149
257
|
if not transition:
|
|
150
258
|
raise ValueError(
|
|
151
|
-
|
|
152
|
-
|
|
259
|
+
self._build_transition_not_found_error(
|
|
260
|
+
from_status, from_stage, to_status, to_stage
|
|
261
|
+
)
|
|
153
262
|
)
|
|
154
263
|
|
|
155
264
|
if transition.required_solution and solution != transition.required_solution:
|
|
156
265
|
raise ValueError(
|
|
157
|
-
|
|
266
|
+
self._build_invalid_solution_error(
|
|
267
|
+
transition, solution, from_status, from_stage
|
|
268
|
+
)
|
|
158
269
|
)
|
|
159
270
|
|
|
160
271
|
# Criticality-based policy checks
|
|
161
272
|
if meta and meta.criticality:
|
|
162
273
|
self._validate_criticality_policy(meta, from_stage, to_stage)
|
|
274
|
+
|
|
275
|
+
return transition
|
|
163
276
|
|
|
164
277
|
def _validate_criticality_policy(
|
|
165
278
|
self,
|