monoco-toolkit 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/automation/__init__.py +51 -0
- monoco/core/automation/config.py +338 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +723 -0
- monoco/core/config.py +31 -4
- monoco/core/executor/__init__.py +38 -0
- monoco/core/executor/agent_action.py +254 -0
- monoco/core/executor/git_action.py +303 -0
- monoco/core/executor/im_action.py +309 -0
- monoco/core/executor/pytest_action.py +218 -0
- monoco/core/git.py +38 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/loader.py +633 -0
- monoco/core/registry.py +34 -25
- monoco/core/router/__init__.py +55 -0
- monoco/core/router/action.py +341 -0
- monoco/core/router/router.py +392 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +171 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/skills.py +119 -80
- monoco/core/watcher/__init__.py +57 -0
- monoco/core/watcher/base.py +365 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +200 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/events.py +34 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +207 -0
- monoco/daemon/services.py +27 -58
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/adapter.py +17 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/{core/resources/en/skills/monoco_core → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/resources/{roles/role-engineer.yaml → zh/roles/monoco_role_engineer.yaml} +3 -3
- monoco/features/agent/resources/{roles/role-manager.yaml → zh/roles/monoco_role_manager.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-planner.yaml → zh/roles/monoco_role_planner.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-reviewer.yaml → zh/roles/monoco_role_reviewer.yaml} +8 -8
- monoco/{core/resources/zh/skills/monoco_core → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/worker.py +1 -1
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/adapter.py +18 -7
- monoco/features/glossary/resources/en/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/glossary/resources/zh/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/en/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +352 -20
- monoco/features/issue/core.py +475 -16
- monoco/features/issue/engine/machine.py +114 -4
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +2 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/skills/{monoco_issue → monoco_atom_issue}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/skills/{monoco_issue → monoco_atom_issue_lifecycle}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/en/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/commands.py +5 -3
- monoco/features/spike/resources/en/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/METADATA +7 -1
- monoco_toolkit-0.3.12.dist-info/RECORD +202 -0
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -91
- monoco/features/agent/session.py +0 -121
- monoco_toolkit-0.3.10.dist-info/RECORD +0 -156
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/licenses/LICENSE +0 -0
monoco/features/issue/core.py
CHANGED
|
@@ -46,6 +46,30 @@ def get_issue_dir(issue_type: str, issues_root: Path) -> Path:
|
|
|
46
46
|
return issues_root / folder
|
|
47
47
|
|
|
48
48
|
|
|
49
|
+
def _parse_isolation_ref(ref: str) -> str:
|
|
50
|
+
"""
|
|
51
|
+
Parse isolation ref and strip any 'branch:' or 'worktree:' prefix.
|
|
52
|
+
|
|
53
|
+
The isolation.ref field may contain prefixes like 'branch:feat/xxx' or 'worktree:xxx'
|
|
54
|
+
for display/logging purposes, but git commands need the raw branch name.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
ref: The isolation ref string, may contain prefix
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
The clean branch name without prefix
|
|
61
|
+
"""
|
|
62
|
+
if not ref:
|
|
63
|
+
return ref
|
|
64
|
+
|
|
65
|
+
# Strip known prefixes
|
|
66
|
+
for prefix in ("branch:", "worktree:"):
|
|
67
|
+
if ref.startswith(prefix):
|
|
68
|
+
return ref[len(prefix):]
|
|
69
|
+
|
|
70
|
+
return ref
|
|
71
|
+
|
|
72
|
+
|
|
49
73
|
def _get_slug(title: str) -> str:
|
|
50
74
|
slug = title.lower()
|
|
51
75
|
# Replace non-word characters (including punctuation, spaces) with hyphens
|
|
@@ -108,6 +132,8 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
108
132
|
data["domains"] = []
|
|
109
133
|
if "files" not in data:
|
|
110
134
|
data["files"] = []
|
|
135
|
+
if "solution" not in data:
|
|
136
|
+
data["solution"] = None
|
|
111
137
|
|
|
112
138
|
# Custom YAML Dumper to preserve None as 'null' and order
|
|
113
139
|
# Helper to order keys: id, uid, type, status, stage, title, ... graph ...
|
|
@@ -147,6 +173,10 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
147
173
|
if "criticality" in data:
|
|
148
174
|
ordered_data["criticality"] = data["criticality"]
|
|
149
175
|
|
|
176
|
+
# Add solution if present (for template generation)
|
|
177
|
+
if "solution" in data:
|
|
178
|
+
ordered_data["solution"] = data["solution"]
|
|
179
|
+
|
|
150
180
|
# Add remaining
|
|
151
181
|
for k, v in data.items():
|
|
152
182
|
if k not in ordered_data:
|
|
@@ -161,6 +191,10 @@ def _serialize_metadata(metadata: IssueMetadata) -> str:
|
|
|
161
191
|
yaml_header = yaml_header.replace(
|
|
162
192
|
"parent: null", "parent: null # <EPIC-ID> Optional"
|
|
163
193
|
)
|
|
194
|
+
if "solution" in ordered_data and ordered_data["solution"] is None:
|
|
195
|
+
yaml_header = yaml_header.replace(
|
|
196
|
+
"solution: null", "solution: null # implemented, cancelled, wontfix, duplicate"
|
|
197
|
+
)
|
|
164
198
|
|
|
165
199
|
return yaml_header
|
|
166
200
|
|
|
@@ -189,15 +223,19 @@ def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
|
|
|
189
223
|
return None
|
|
190
224
|
|
|
191
225
|
|
|
192
|
-
def find_next_id(issue_type: str, issues_root: Path) -> str:
|
|
226
|
+
def find_next_id(issue_type: str, issues_root: Path, include_archived: bool = False) -> str:
|
|
193
227
|
prefix_map = get_prefix_map(issues_root)
|
|
194
228
|
prefix = prefix_map.get(issue_type, "ISSUE")
|
|
195
229
|
pattern = re.compile(rf"{prefix}-(\d+)")
|
|
196
230
|
max_id = 0
|
|
197
231
|
|
|
198
232
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
199
|
-
# Scan all subdirs: open, backlog, closed
|
|
200
|
-
|
|
233
|
+
# Scan all subdirs: open, backlog, closed, archived (optional)
|
|
234
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
235
|
+
if include_archived:
|
|
236
|
+
status_dirs.append("archived")
|
|
237
|
+
|
|
238
|
+
for status_dir in status_dirs:
|
|
201
239
|
d = base_dir / status_dir
|
|
202
240
|
if d.exists():
|
|
203
241
|
for f in d.rglob("*.md"):
|
|
@@ -388,7 +426,15 @@ def get_available_actions(meta: IssueMetadata) -> List[Any]:
|
|
|
388
426
|
return actions
|
|
389
427
|
|
|
390
428
|
|
|
391
|
-
def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
429
|
+
def find_issue_path(issues_root: Path, issue_id: str, include_archived: bool = True) -> Optional[Path]:
|
|
430
|
+
"""
|
|
431
|
+
Find the path of an issue file.
|
|
432
|
+
|
|
433
|
+
Args:
|
|
434
|
+
issues_root: Root directory of issues
|
|
435
|
+
issue_id: Issue ID to find
|
|
436
|
+
include_archived: Whether to search in archived directory (default: True for find operations)
|
|
437
|
+
"""
|
|
392
438
|
parsed = IssueID(issue_id)
|
|
393
439
|
|
|
394
440
|
if not parsed.is_local:
|
|
@@ -414,7 +460,7 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
414
460
|
return None
|
|
415
461
|
|
|
416
462
|
# Recursively search in member project
|
|
417
|
-
return find_issue_path(member_issues, parsed.local_id)
|
|
463
|
+
return find_issue_path(member_issues, parsed.local_id, include_archived)
|
|
418
464
|
|
|
419
465
|
# Local Search
|
|
420
466
|
try:
|
|
@@ -428,12 +474,243 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
428
474
|
return None
|
|
429
475
|
|
|
430
476
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
431
|
-
# Search in
|
|
432
|
-
for
|
|
433
|
-
|
|
477
|
+
# Search in standard status subdirs first
|
|
478
|
+
for status_dir in ["open", "backlog", "closed"]:
|
|
479
|
+
d = base_dir / status_dir
|
|
480
|
+
if d.exists():
|
|
481
|
+
for f in d.rglob(f"{parsed.local_id}-*.md"):
|
|
482
|
+
return f
|
|
483
|
+
|
|
484
|
+
# Search in archived if enabled
|
|
485
|
+
if include_archived:
|
|
486
|
+
archived_dir = base_dir / "archived"
|
|
487
|
+
if archived_dir.exists():
|
|
488
|
+
for f in archived_dir.rglob(f"{parsed.local_id}-*.md"):
|
|
489
|
+
return f
|
|
434
490
|
return None
|
|
435
491
|
|
|
436
492
|
|
|
493
|
+
def find_issue_path_across_branches(
|
|
494
|
+
issues_root: Path,
|
|
495
|
+
issue_id: str,
|
|
496
|
+
project_root: Optional[Path] = None,
|
|
497
|
+
include_archived: bool = True
|
|
498
|
+
) -> Tuple[Optional[Path], Optional[str]]:
|
|
499
|
+
"""
|
|
500
|
+
Find issue path across all local git branches.
|
|
501
|
+
|
|
502
|
+
Implements the "Golden Path" logic:
|
|
503
|
+
- If issue found in exactly one branch -> return it silently
|
|
504
|
+
- If issue found in multiple branches -> raise error (conflict)
|
|
505
|
+
- If issue not found in any branch -> return None
|
|
506
|
+
|
|
507
|
+
Args:
|
|
508
|
+
issues_root: Root directory of issues
|
|
509
|
+
issue_id: Issue ID to find
|
|
510
|
+
project_root: Project root (defaults to issues_root.parent)
|
|
511
|
+
include_archived: Whether to search in archived directory
|
|
512
|
+
|
|
513
|
+
Returns:
|
|
514
|
+
Tuple of (file_path, branch_name) or (None, None) if not found
|
|
515
|
+
|
|
516
|
+
Raises:
|
|
517
|
+
RuntimeError: If issue found in multiple branches (conflict)
|
|
518
|
+
"""
|
|
519
|
+
# First, try to find in current working tree
|
|
520
|
+
local_path = find_issue_path(issues_root, issue_id, include_archived)
|
|
521
|
+
|
|
522
|
+
# Determine project root
|
|
523
|
+
if project_root is None:
|
|
524
|
+
project_root = issues_root.parent
|
|
525
|
+
|
|
526
|
+
# If not a git repo, just return local result
|
|
527
|
+
if not git.is_git_repo(project_root):
|
|
528
|
+
return (local_path, None) if local_path else (None, None)
|
|
529
|
+
|
|
530
|
+
# Get current branch
|
|
531
|
+
current_branch = git.get_current_branch(project_root)
|
|
532
|
+
|
|
533
|
+
# Search in all branches to detect conflicts (FIX-0006)
|
|
534
|
+
# If found locally, we still check other branches to detect potential duplicate files (conflict)
|
|
535
|
+
if local_path:
|
|
536
|
+
# Get relative path for git checking
|
|
537
|
+
try:
|
|
538
|
+
rel_path = local_path.relative_to(project_root)
|
|
539
|
+
except ValueError:
|
|
540
|
+
# If path is outside project root, we can't search other branches anyway
|
|
541
|
+
return local_path, current_branch
|
|
542
|
+
|
|
543
|
+
conflicting_branches = _find_branches_with_file(project_root, str(rel_path), current_branch)
|
|
544
|
+
|
|
545
|
+
# SPECIAL CASE (FIX-0006): During 'issue close' or similar operations,
|
|
546
|
+
# it is expected that the file exists in the feature branch (isolation branch)
|
|
547
|
+
# and now in the target branch (e.g. main). This is NOT a conflict.
|
|
548
|
+
if conflicting_branches:
|
|
549
|
+
# Try to parse metadata to see if these "conflicts" are actually the isolation branch
|
|
550
|
+
try:
|
|
551
|
+
meta = parse_issue(local_path)
|
|
552
|
+
if meta and meta.isolation:
|
|
553
|
+
iso_type = getattr(meta.isolation, "type", None)
|
|
554
|
+
iso_ref = getattr(meta.isolation, "ref", None)
|
|
555
|
+
|
|
556
|
+
source_branch = None
|
|
557
|
+
if iso_type == "branch":
|
|
558
|
+
source_branch = iso_ref
|
|
559
|
+
elif iso_ref and iso_ref.startswith("branch:"):
|
|
560
|
+
source_branch = iso_ref[7:]
|
|
561
|
+
|
|
562
|
+
if source_branch:
|
|
563
|
+
# Filter out the source branch from conflicts
|
|
564
|
+
conflicting_branches = [b for b in conflicting_branches if b != source_branch]
|
|
565
|
+
except Exception:
|
|
566
|
+
# If parsing fails, stick with raw conflicts to be safe
|
|
567
|
+
pass
|
|
568
|
+
|
|
569
|
+
if conflicting_branches:
|
|
570
|
+
raise RuntimeError(
|
|
571
|
+
f"Issue {issue_id} found in multiple branches: {current_branch}, {', '.join(conflicting_branches)}. "
|
|
572
|
+
f"Please resolve the conflict by merging branches or deleting duplicate issue files."
|
|
573
|
+
)
|
|
574
|
+
return local_path, current_branch
|
|
575
|
+
|
|
576
|
+
# Not found locally, search in all branches
|
|
577
|
+
return _search_issue_in_branches(issues_root, issue_id, project_root, include_archived)
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def _find_branches_with_file(project_root: Path, rel_path: str, exclude_branch: str) -> List[str]:
|
|
581
|
+
"""
|
|
582
|
+
Find all branches (except excluded) that contain the given file path.
|
|
583
|
+
|
|
584
|
+
Args:
|
|
585
|
+
project_root: Project root path
|
|
586
|
+
rel_path: Relative path from project root
|
|
587
|
+
exclude_branch: Branch to exclude from search
|
|
588
|
+
|
|
589
|
+
Returns:
|
|
590
|
+
List of branch names containing the file
|
|
591
|
+
"""
|
|
592
|
+
branches_with_file = []
|
|
593
|
+
|
|
594
|
+
# Get all local branches
|
|
595
|
+
code, stdout, _ = git._run_git(["branch", "--format=%(refname:short)"], project_root)
|
|
596
|
+
if code != 0:
|
|
597
|
+
return []
|
|
598
|
+
|
|
599
|
+
all_branches = [b.strip() for b in stdout.splitlines() if b.strip()]
|
|
600
|
+
|
|
601
|
+
for branch in all_branches:
|
|
602
|
+
if branch == exclude_branch:
|
|
603
|
+
continue
|
|
604
|
+
|
|
605
|
+
# Check if file exists in this branch
|
|
606
|
+
code, _, _ = git._run_git(["show", f"{branch}:{rel_path}"], project_root)
|
|
607
|
+
if code == 0:
|
|
608
|
+
branches_with_file.append(branch)
|
|
609
|
+
|
|
610
|
+
return branches_with_file
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
def _search_issue_in_branches(
|
|
614
|
+
issues_root: Path,
|
|
615
|
+
issue_id: str,
|
|
616
|
+
project_root: Path,
|
|
617
|
+
include_archived: bool = True
|
|
618
|
+
) -> Tuple[Optional[Path], Optional[str]]:
|
|
619
|
+
"""
|
|
620
|
+
Search for an issue file across all branches.
|
|
621
|
+
|
|
622
|
+
Returns:
|
|
623
|
+
Tuple of (file_path, branch_name) or (None, None)
|
|
624
|
+
|
|
625
|
+
Raises:
|
|
626
|
+
RuntimeError: If issue found in multiple branches
|
|
627
|
+
"""
|
|
628
|
+
parsed = IssueID(issue_id)
|
|
629
|
+
|
|
630
|
+
if not parsed.is_local:
|
|
631
|
+
# For workspace issues, just use standard find
|
|
632
|
+
path = find_issue_path(issues_root, issue_id, include_archived)
|
|
633
|
+
return (path, None) if path else (None, None)
|
|
634
|
+
|
|
635
|
+
# Get issue type from prefix
|
|
636
|
+
try:
|
|
637
|
+
prefix = parsed.local_id.split("-")[0].upper()
|
|
638
|
+
except IndexError:
|
|
639
|
+
return None, None
|
|
640
|
+
|
|
641
|
+
reverse_prefix_map = get_reverse_prefix_map(issues_root)
|
|
642
|
+
issue_type = reverse_prefix_map.get(prefix)
|
|
643
|
+
if not issue_type:
|
|
644
|
+
return None, None
|
|
645
|
+
|
|
646
|
+
# Build possible paths to search
|
|
647
|
+
base_dir = get_issue_dir(issue_type, issues_root)
|
|
648
|
+
rel_base = base_dir.relative_to(project_root)
|
|
649
|
+
|
|
650
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
651
|
+
if include_archived:
|
|
652
|
+
status_dirs.append("archived")
|
|
653
|
+
|
|
654
|
+
# Search pattern: Issues/{Type}/{status}/{issue_id}-*.md
|
|
655
|
+
found_in_branches: List[Tuple[str, str]] = [] # (branch, file_path)
|
|
656
|
+
|
|
657
|
+
# Get all local branches
|
|
658
|
+
code, stdout, _ = git._run_git(["branch", "--format=%(refname:short)"], project_root)
|
|
659
|
+
if code != 0:
|
|
660
|
+
return None, None
|
|
661
|
+
|
|
662
|
+
all_branches = [b.strip() for b in stdout.splitlines() if b.strip()]
|
|
663
|
+
|
|
664
|
+
for branch in all_branches:
|
|
665
|
+
# List files in each status directory for this branch
|
|
666
|
+
for status_dir in status_dirs:
|
|
667
|
+
dir_path = f"{rel_base}/{status_dir}"
|
|
668
|
+
|
|
669
|
+
# List all files in this directory on the branch
|
|
670
|
+
code, stdout, _ = git._run_git(
|
|
671
|
+
["ls-tree", "-r", "--name-only", branch, dir_path],
|
|
672
|
+
project_root
|
|
673
|
+
)
|
|
674
|
+
|
|
675
|
+
if code != 0 or not stdout.strip():
|
|
676
|
+
continue
|
|
677
|
+
|
|
678
|
+
# Find matching issue file (Handling quoted paths from git ls-tree)
|
|
679
|
+
pattern = f"{parsed.local_id}-"
|
|
680
|
+
for line in stdout.splitlines():
|
|
681
|
+
line = line.strip()
|
|
682
|
+
# Git quotes non-ASCII paths: "Issues/Chores/...md"
|
|
683
|
+
if line.startswith('"') and line.endswith('"'):
|
|
684
|
+
line = line[1:-1]
|
|
685
|
+
|
|
686
|
+
if pattern in line and line.endswith(".md"):
|
|
687
|
+
found_in_branches.append((branch, line))
|
|
688
|
+
|
|
689
|
+
if not found_in_branches:
|
|
690
|
+
return None, None
|
|
691
|
+
|
|
692
|
+
if len(found_in_branches) > 1:
|
|
693
|
+
# Found in multiple branches - conflict
|
|
694
|
+
branches = [b for b, _ in found_in_branches]
|
|
695
|
+
raise RuntimeError(
|
|
696
|
+
f"Issue {issue_id} found in multiple branches: {', '.join(branches)}. "
|
|
697
|
+
f"Please resolve the conflict by merging branches or deleting duplicate issue files."
|
|
698
|
+
)
|
|
699
|
+
|
|
700
|
+
# Golden path: exactly one match
|
|
701
|
+
branch, file_path = found_in_branches[0]
|
|
702
|
+
|
|
703
|
+
# Checkout the file to working tree
|
|
704
|
+
try:
|
|
705
|
+
git.git_checkout_files(project_root, branch, [file_path])
|
|
706
|
+
full_path = project_root / file_path
|
|
707
|
+
return full_path, branch
|
|
708
|
+
except Exception:
|
|
709
|
+
# If checkout fails, return the path anyway - caller can handle
|
|
710
|
+
full_path = project_root / file_path
|
|
711
|
+
return full_path, branch
|
|
712
|
+
|
|
713
|
+
|
|
437
714
|
def update_issue(
|
|
438
715
|
issues_root: Path,
|
|
439
716
|
issue_id: str,
|
|
@@ -867,7 +1144,7 @@ def prune_issue_resources(
|
|
|
867
1144
|
return []
|
|
868
1145
|
|
|
869
1146
|
if issue.isolation.type == IsolationType.BRANCH:
|
|
870
|
-
branch = issue.isolation.ref
|
|
1147
|
+
branch = _parse_isolation_ref(issue.isolation.ref)
|
|
871
1148
|
current = git.get_current_branch(project_root)
|
|
872
1149
|
if current == branch:
|
|
873
1150
|
raise RuntimeError(
|
|
@@ -893,7 +1170,7 @@ def prune_issue_resources(
|
|
|
893
1170
|
# Also delete the branch associated?
|
|
894
1171
|
# Worktree create makes a branch. When removing worktree, branch remains.
|
|
895
1172
|
# Usually we want to remove the branch too if it was created for this issue.
|
|
896
|
-
branch = issue.isolation.ref
|
|
1173
|
+
branch = _parse_isolation_ref(issue.isolation.ref)
|
|
897
1174
|
if branch and git.branch_exists(project_root, branch):
|
|
898
1175
|
# We can't delete branch if it is checked out in the worktree we just removed?
|
|
899
1176
|
# git worktree remove unlocks the branch.
|
|
@@ -951,7 +1228,9 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
|
|
|
951
1228
|
target_ref = None
|
|
952
1229
|
|
|
953
1230
|
if issue.isolation and issue.isolation.ref:
|
|
954
|
-
target_ref = issue.isolation.ref
|
|
1231
|
+
target_ref = _parse_isolation_ref(issue.isolation.ref)
|
|
1232
|
+
if target_ref == "current":
|
|
1233
|
+
target_ref = git.get_current_branch(project_root)
|
|
955
1234
|
else:
|
|
956
1235
|
# Heuristic Search
|
|
957
1236
|
# 1. Is current branch related?
|
|
@@ -1003,6 +1282,118 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
|
|
|
1003
1282
|
return []
|
|
1004
1283
|
|
|
1005
1284
|
|
|
1285
|
+
def merge_issue_changes(
|
|
1286
|
+
issues_root: Path, issue_id: str, project_root: Path
|
|
1287
|
+
) -> List[str]:
|
|
1288
|
+
"""
|
|
1289
|
+
Perform Smart Atomic Merge or selection checkout of touched files.
|
|
1290
|
+
Ensures safe mainline synchronization by only merging files tracked in 'files' field.
|
|
1291
|
+
"""
|
|
1292
|
+
path = find_issue_path(issues_root, issue_id)
|
|
1293
|
+
if not path:
|
|
1294
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
1295
|
+
|
|
1296
|
+
issue = parse_issue(path)
|
|
1297
|
+
if not issue:
|
|
1298
|
+
raise ValueError(f"Could not parse issue {issue_id}")
|
|
1299
|
+
|
|
1300
|
+
if not issue.files:
|
|
1301
|
+
# If no files tracked, nothing to merge.
|
|
1302
|
+
return []
|
|
1303
|
+
|
|
1304
|
+
# Determine Source (Feature Branch)
|
|
1305
|
+
# We prioritize what's in the issue metadata on LOCAL (main).
|
|
1306
|
+
# If not there, we try heuristic.
|
|
1307
|
+
source_ref = None
|
|
1308
|
+
if issue.isolation and issue.isolation.ref:
|
|
1309
|
+
source_ref = _parse_isolation_ref(issue.isolation.ref)
|
|
1310
|
+
else:
|
|
1311
|
+
# Heuristic: Search for branch by convention
|
|
1312
|
+
# We can't use 'current' here safely if we are on main,
|
|
1313
|
+
# but let's assume we might be calling this from elsewhere?
|
|
1314
|
+
# Actually, for 'close', we are likely on main.
|
|
1315
|
+
# So we search for a branch named 'feat/{id}-*' or similar?
|
|
1316
|
+
pass
|
|
1317
|
+
|
|
1318
|
+
# If local metadata doesn't have isolation ref, we might be stuck.
|
|
1319
|
+
# But let's assume valid workflow.
|
|
1320
|
+
if not source_ref:
|
|
1321
|
+
# Try to find a branch starting with feat/{id} or {id}
|
|
1322
|
+
# This is a bit weak, needs better implementation in 'git' or 'issue' module
|
|
1323
|
+
# For now, if we can't find it, we error.
|
|
1324
|
+
pass
|
|
1325
|
+
|
|
1326
|
+
if not source_ref or not git.branch_exists(project_root, source_ref):
|
|
1327
|
+
# Fallback: maybe we are currently ON the feature branch?
|
|
1328
|
+
# If so, source_ref should be current. But we expect to call this from MAIN.
|
|
1329
|
+
pass
|
|
1330
|
+
|
|
1331
|
+
if not source_ref:
|
|
1332
|
+
raise RuntimeError(f"Could not determine source branch for Issue {issue_id}. Ensure isolation ref is set.")
|
|
1333
|
+
|
|
1334
|
+
if not git.branch_exists(project_root, source_ref):
|
|
1335
|
+
raise RuntimeError(f"Source branch {source_ref} does not exist.")
|
|
1336
|
+
|
|
1337
|
+
# RE-READ Issue from Source Branch
|
|
1338
|
+
# The 'files' list on main might be outdated. We need the list from the feature branch.
|
|
1339
|
+
relative_path = path.relative_to(project_root)
|
|
1340
|
+
|
|
1341
|
+
try:
|
|
1342
|
+
# Read file content from git
|
|
1343
|
+
code, access_content, _ = git._run_git(["show", f"{source_ref}:{relative_path}"], project_root)
|
|
1344
|
+
if code == 0:
|
|
1345
|
+
# Parse it
|
|
1346
|
+
# We need to extract yaml frontmatter manually or use existing parser if it supported text input
|
|
1347
|
+
import re
|
|
1348
|
+
import yaml
|
|
1349
|
+
match = re.search(r"^---(.*?)---", access_content, re.DOTALL | re.MULTILINE)
|
|
1350
|
+
if match:
|
|
1351
|
+
data = yaml.safe_load(match.group(1)) or {}
|
|
1352
|
+
source_files = data.get("files", [])
|
|
1353
|
+
if source_files:
|
|
1354
|
+
# Update issue object with latest files for this operation
|
|
1355
|
+
issue.files = source_files
|
|
1356
|
+
except Exception as e:
|
|
1357
|
+
# If reading fails (maybe path changed?), we fall back to local 'files'
|
|
1358
|
+
pass
|
|
1359
|
+
|
|
1360
|
+
if not issue.files:
|
|
1361
|
+
return []
|
|
1362
|
+
|
|
1363
|
+
# 1. Conflict Check
|
|
1364
|
+
# A conflict occurs if a file in 'files' has changed on HEAD (main)
|
|
1365
|
+
# since the common ancestor of HEAD and source_ref.
|
|
1366
|
+
|
|
1367
|
+
current_head = git.get_current_branch(project_root)
|
|
1368
|
+
try:
|
|
1369
|
+
base = git.get_merge_base(project_root, current_head, source_ref)
|
|
1370
|
+
except Exception as e:
|
|
1371
|
+
raise RuntimeError(f"Failed to determine merge base: {e}")
|
|
1372
|
+
|
|
1373
|
+
conflicts = []
|
|
1374
|
+
for f in issue.files:
|
|
1375
|
+
# Has main changed this file?
|
|
1376
|
+
if git.has_diff(project_root, base, current_head, [f]):
|
|
1377
|
+
# Has feature also changed this file?
|
|
1378
|
+
if git.has_diff(project_root, base, source_ref, [f]):
|
|
1379
|
+
conflicts.append(f)
|
|
1380
|
+
|
|
1381
|
+
if conflicts:
|
|
1382
|
+
raise RuntimeError(
|
|
1383
|
+
f"Atomic Merge Conflict: The following files changed on both mainline and {issue_id}:\n"
|
|
1384
|
+
+ "\n".join([f" - {f}" for f in conflicts])
|
|
1385
|
+
+ "\n\nPlease resolve manually using Cherry-Pick as per AGENTS.md policy."
|
|
1386
|
+
)
|
|
1387
|
+
|
|
1388
|
+
# 2. Perform Atomic Merge (Selective Checkout)
|
|
1389
|
+
try:
|
|
1390
|
+
git.git_checkout_files(project_root, source_ref, issue.files)
|
|
1391
|
+
except Exception as e:
|
|
1392
|
+
raise RuntimeError(f"Selective checkout failed: {e}")
|
|
1393
|
+
|
|
1394
|
+
return issue.files
|
|
1395
|
+
|
|
1396
|
+
|
|
1006
1397
|
# Resources
|
|
1007
1398
|
SKILL_CONTENT = """
|
|
1008
1399
|
---
|
|
@@ -1068,10 +1459,15 @@ def get_resources() -> Dict[str, Any]:
|
|
|
1068
1459
|
|
|
1069
1460
|
|
|
1070
1461
|
def list_issues(
|
|
1071
|
-
issues_root: Path, recursive_workspace: bool = False
|
|
1462
|
+
issues_root: Path, recursive_workspace: bool = False, include_archived: bool = False
|
|
1072
1463
|
) -> List[IssueMetadata]:
|
|
1073
1464
|
"""
|
|
1074
1465
|
List all issues in the project.
|
|
1466
|
+
|
|
1467
|
+
Args:
|
|
1468
|
+
issues_root: Root directory of issues
|
|
1469
|
+
recursive_workspace: Include issues from workspace members
|
|
1470
|
+
include_archived: Include archived issues (default: False)
|
|
1075
1471
|
"""
|
|
1076
1472
|
issues = []
|
|
1077
1473
|
engine = get_engine(str(issues_root.parent))
|
|
@@ -1079,7 +1475,11 @@ def list_issues(
|
|
|
1079
1475
|
|
|
1080
1476
|
for issue_type in all_types:
|
|
1081
1477
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
1082
|
-
|
|
1478
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
1479
|
+
if include_archived:
|
|
1480
|
+
status_dirs.append("archived")
|
|
1481
|
+
|
|
1482
|
+
for status_dir in status_dirs:
|
|
1083
1483
|
d = base_dir / status_dir
|
|
1084
1484
|
if d.exists():
|
|
1085
1485
|
for f in d.rglob("*.md"):
|
|
@@ -1249,6 +1649,56 @@ def update_issue_content(
|
|
|
1249
1649
|
os.unlink(tmp_path)
|
|
1250
1650
|
|
|
1251
1651
|
|
|
1652
|
+
def update_issue_field(
|
|
1653
|
+
issue_path: Path,
|
|
1654
|
+
field: str,
|
|
1655
|
+
value: Any,
|
|
1656
|
+
) -> IssueMetadata:
|
|
1657
|
+
"""
|
|
1658
|
+
Update a specific field in an issue's frontmatter.
|
|
1659
|
+
|
|
1660
|
+
Args:
|
|
1661
|
+
issue_path: Path to the issue file
|
|
1662
|
+
field: Field name to update
|
|
1663
|
+
value: New value for the field
|
|
1664
|
+
|
|
1665
|
+
Returns:
|
|
1666
|
+
Updated IssueMetadata
|
|
1667
|
+
"""
|
|
1668
|
+
# Read full content
|
|
1669
|
+
content = issue_path.read_text()
|
|
1670
|
+
|
|
1671
|
+
# Split Frontmatter and Body
|
|
1672
|
+
match = re.search(r"^---(.*?)---\n(.*)", content, re.DOTALL | re.MULTILINE)
|
|
1673
|
+
if not match:
|
|
1674
|
+
raise ValueError(f"Could not parse frontmatter for {issue_path}")
|
|
1675
|
+
|
|
1676
|
+
yaml_str = match.group(1)
|
|
1677
|
+
body = match.group(2)
|
|
1678
|
+
|
|
1679
|
+
try:
|
|
1680
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
1681
|
+
except yaml.YAMLError as e:
|
|
1682
|
+
raise ValueError(f"Invalid YAML metadata: {e}")
|
|
1683
|
+
|
|
1684
|
+
# Update the field
|
|
1685
|
+
data[field] = value
|
|
1686
|
+
data["updated_at"] = current_time()
|
|
1687
|
+
|
|
1688
|
+
# Serialize back directly (not through model) to preserve all fields
|
|
1689
|
+
yaml_header = yaml.dump(
|
|
1690
|
+
data, sort_keys=False, allow_unicode=True, default_flow_style=False
|
|
1691
|
+
)
|
|
1692
|
+
|
|
1693
|
+
# Reconstruct File
|
|
1694
|
+
new_content = f"---\n{yaml_header}---\n{body}"
|
|
1695
|
+
issue_path.write_text(new_content)
|
|
1696
|
+
|
|
1697
|
+
# Re-hydrate through Model to validate and return
|
|
1698
|
+
updated_meta = IssueMetadata(**data)
|
|
1699
|
+
return updated_meta
|
|
1700
|
+
|
|
1701
|
+
|
|
1252
1702
|
def generate_delivery_report(
|
|
1253
1703
|
issues_root: Path, issue_id: str, project_root: Path
|
|
1254
1704
|
) -> IssueMetadata:
|
|
@@ -1436,10 +1886,15 @@ def check_issue_match(
|
|
|
1436
1886
|
return True
|
|
1437
1887
|
|
|
1438
1888
|
|
|
1439
|
-
def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
1889
|
+
def search_issues(issues_root: Path, query: str, include_archived: bool = False) -> List[IssueMetadata]:
|
|
1440
1890
|
"""
|
|
1441
1891
|
Search issues using advanced query syntax.
|
|
1442
1892
|
Returns list of matching IssueMetadata.
|
|
1893
|
+
|
|
1894
|
+
Args:
|
|
1895
|
+
issues_root: Root directory of issues
|
|
1896
|
+
query: Search query string
|
|
1897
|
+
include_archived: Include archived issues in search (default: False)
|
|
1443
1898
|
"""
|
|
1444
1899
|
explicit_positives, terms, negatives = parse_search_query(query)
|
|
1445
1900
|
|
|
@@ -1449,7 +1904,7 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
|
1449
1904
|
# Let's align with "grep": empty pattern matches everything?
|
|
1450
1905
|
# Or strict: empty query -> all.
|
|
1451
1906
|
if not explicit_positives and not terms and not negatives:
|
|
1452
|
-
return list_issues(issues_root)
|
|
1907
|
+
return list_issues(issues_root, include_archived=include_archived)
|
|
1453
1908
|
|
|
1454
1909
|
matches = []
|
|
1455
1910
|
all_files = []
|
|
@@ -1469,7 +1924,11 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
|
1469
1924
|
|
|
1470
1925
|
for issue_type in all_types:
|
|
1471
1926
|
base_dir = get_issue_dir(issue_type, issues_root)
|
|
1472
|
-
|
|
1927
|
+
status_dirs = ["open", "backlog", "closed"]
|
|
1928
|
+
if include_archived:
|
|
1929
|
+
status_dirs.append("archived")
|
|
1930
|
+
|
|
1931
|
+
for status_dir in status_dirs:
|
|
1473
1932
|
d = base_dir / status_dir
|
|
1474
1933
|
if d.exists():
|
|
1475
1934
|
for f in d.rglob("*.md"):
|