monoco-toolkit 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1174 @@
1
+ import os
2
+ import re
3
+ import yaml
4
+ from pathlib import Path
5
+ from typing import List, Dict, Optional, Tuple, Any, Set, Set
6
+ from datetime import datetime
7
+ from .models import IssueMetadata, IssueType, IssueStatus, IssueSolution, IssueStage, IssueDetail, IsolationType, IssueIsolation, IssueID, current_time, generate_uid
8
+ from monoco.core import git
9
+ from monoco.core.config import get_config
10
+
11
+ PREFIX_MAP = {
12
+ IssueType.EPIC: "EPIC",
13
+ IssueType.FEATURE: "FEAT",
14
+ IssueType.CHORE: "CHORE",
15
+ IssueType.FIX: "FIX"
16
+ }
17
+
18
+ REVERSE_PREFIX_MAP = {v: k for k, v in PREFIX_MAP.items()}
19
+
20
+ def _get_slug(title: str) -> str:
21
+ slug = title.lower()
22
+ # Replace non-word characters (including punctuation, spaces) with hyphens
23
+ # \w matches Unicode word characters (letters, numbers, underscores)
24
+ slug = re.sub(r"[^\w]+", "-", slug)
25
+ slug = slug.strip("-")[:50]
26
+
27
+ if not slug:
28
+ slug = "issue"
29
+
30
+ return slug
31
+
32
+ def get_issue_dir(issue_type: IssueType, issues_root: Path) -> Path:
33
+ mapping = {
34
+ IssueType.EPIC: "Epics",
35
+ IssueType.FEATURE: "Features",
36
+ IssueType.CHORE: "Chores",
37
+ IssueType.FIX: "Fixes",
38
+ }
39
+ return issues_root / mapping[issue_type]
40
+
41
+ def parse_issue(file_path: Path) -> Optional[IssueMetadata]:
42
+ if not file_path.suffix == ".md":
43
+ return None
44
+
45
+ content = file_path.read_text()
46
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
47
+ if not match:
48
+ return None
49
+
50
+ try:
51
+ data = yaml.safe_load(match.group(1))
52
+ if not isinstance(data, dict):
53
+ return None
54
+ return IssueMetadata(**data)
55
+ except Exception:
56
+ return None
57
+
58
+ def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
59
+ if not file_path.suffix == ".md":
60
+ return None
61
+
62
+ content = file_path.read_text()
63
+ # Robust splitting
64
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
65
+ if not match:
66
+ return None
67
+
68
+ yaml_str = match.group(1)
69
+ body = content[match.end():].lstrip()
70
+
71
+ try:
72
+ data = yaml.safe_load(yaml_str)
73
+ if not isinstance(data, dict):
74
+ return None
75
+ return IssueDetail(**data, body=body, raw_content=content)
76
+ except Exception:
77
+ return None
78
+
79
+ def find_next_id(issue_type: IssueType, issues_root: Path) -> str:
80
+ prefix = PREFIX_MAP[issue_type]
81
+ pattern = re.compile(rf"{prefix}-(\d+)")
82
+ max_id = 0
83
+
84
+ base_dir = get_issue_dir(issue_type, issues_root)
85
+ # Scan all subdirs: open, backlog, closed
86
+ for status_dir in ["open", "backlog", "closed"]:
87
+ d = base_dir / status_dir
88
+ if d.exists():
89
+ for f in d.rglob("*.md"):
90
+ match = pattern.search(f.name)
91
+ if match:
92
+ max_id = max(max_id, int(match.group(1)))
93
+
94
+ return f"{prefix}-{max_id + 1:04d}"
95
+
96
+ def create_issue_file(
97
+ issues_root: Path,
98
+ issue_type: IssueType,
99
+ title: str,
100
+ parent: Optional[str] = None,
101
+ status: IssueStatus = IssueStatus.OPEN,
102
+ stage: Optional[IssueStage] = None,
103
+ dependencies: List[str] = [],
104
+ related: List[str] = [],
105
+ subdir: Optional[str] = None,
106
+ sprint: Optional[str] = None,
107
+ tags: List[str] = []
108
+ ) -> Tuple[IssueMetadata, Path]:
109
+
110
+ # Validation
111
+ for dep_id in dependencies:
112
+ if not find_issue_path(issues_root, dep_id):
113
+ raise ValueError(f"Dependency issue {dep_id} not found.")
114
+
115
+ for rel_id in related:
116
+ if not find_issue_path(issues_root, rel_id):
117
+ raise ValueError(f"Related issue {rel_id} not found.")
118
+
119
+ issue_id = find_next_id(issue_type, issues_root)
120
+ base_type_dir = get_issue_dir(issue_type, issues_root)
121
+ target_dir = base_type_dir / status.value
122
+
123
+ if subdir:
124
+ target_dir = target_dir / subdir
125
+
126
+ target_dir.mkdir(parents=True, exist_ok=True)
127
+
128
+ metadata = IssueMetadata(
129
+ id=issue_id,
130
+ uid=generate_uid(), # Generate global unique identifier
131
+ type=issue_type,
132
+ status=status,
133
+ stage=stage,
134
+ title=title,
135
+ parent=parent,
136
+ dependencies=dependencies,
137
+ related=related,
138
+ sprint=sprint,
139
+ tags=tags,
140
+ opened_at=current_time() if status == IssueStatus.OPEN else None
141
+ )
142
+
143
+
144
+ yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
145
+ slug = _get_slug(title)
146
+ filename = f"{issue_id}-{slug}.md"
147
+
148
+ file_content = f"""---
149
+ {yaml_header}---
150
+
151
+ ## {issue_id}: {title}
152
+
153
+ ## Objective
154
+
155
+ ## Acceptance Criteria
156
+
157
+ ## Technical Tasks
158
+
159
+ - [ ]
160
+ """
161
+ file_path = target_dir / filename
162
+ file_path.write_text(file_content)
163
+
164
+ return metadata, file_path
165
+ def validate_transition(
166
+ current_status: IssueStatus,
167
+ current_stage: Optional[IssueStage],
168
+ target_status: IssueStatus,
169
+ target_stage: Optional[IssueStage],
170
+ target_solution: Optional[str],
171
+ issue_dependencies: List[str],
172
+ issues_root: Path,
173
+ issue_id: str
174
+ ):
175
+ """
176
+ Centralized validation logic for state transitions.
177
+ """
178
+ # Policy: Prevent Backlog -> Review
179
+ if target_stage == IssueStage.REVIEW and current_status == IssueStatus.BACKLOG:
180
+ raise ValueError(f"Lifecycle Policy: Cannot submit Backlog issue directly. Run `monoco issue pull {issue_id}` first.")
181
+
182
+ if target_status == IssueStatus.CLOSED:
183
+ if not target_solution:
184
+ raise ValueError(f"Closing an issue requires a solution. Please provide --solution or edit the file metadata.")
185
+
186
+ # Policy: IMPLEMENTED requires REVIEW stage (unless we are already in REVIEW)
187
+ # Check current stage.
188
+ if target_solution == IssueSolution.IMPLEMENTED.value:
189
+ # If we are transitioning FROM Review, it's fine.
190
+ # If we are transitioning TO Closed, current stage must be Review.
191
+ if current_stage != IssueStage.REVIEW:
192
+ raise ValueError(f"Lifecycle Policy: 'Implemented' issues must be submitted for review first.\nCurrent stage: {current_stage}\nAction: Run `monoco issue submit {issue_id}`.")
193
+
194
+ # Policy: No closing from DOING (General Safety)
195
+ if current_stage == IssueStage.DOING:
196
+ raise ValueError("Cannot close issue in progress (Doing). Please review (`monoco issue submit`) or stop (`monoco issue open`) first.")
197
+
198
+ # Policy: Dependencies must be closed
199
+ if issue_dependencies:
200
+ for dep_id in issue_dependencies:
201
+ dep_path = find_issue_path(issues_root, dep_id)
202
+ if dep_path:
203
+ dep_meta = parse_issue(dep_path)
204
+ if dep_meta and dep_meta.status != IssueStatus.CLOSED:
205
+ raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is not closed.")
206
+
207
+ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
208
+ parsed = IssueID(issue_id)
209
+
210
+ if not parsed.is_local:
211
+ # Resolve Workspace
212
+ # Assumption: issues_root is direct child of project_root.
213
+ # This is a weak assumption but fits current architecture.
214
+ project_root = issues_root.parent
215
+ conf = get_config(str(project_root))
216
+
217
+ member_rel_path = conf.project.members.get(parsed.namespace)
218
+ if not member_rel_path:
219
+ return None
220
+
221
+ member_root = (project_root / member_rel_path).resolve()
222
+ # Assume standard "Issues" directory for members to avoid loading full config
223
+ member_issues = member_root / "Issues"
224
+
225
+ if not member_issues.exists():
226
+ return None
227
+
228
+ # Recursively search in member project
229
+ return find_issue_path(member_issues, parsed.local_id)
230
+
231
+ # Local Search
232
+ try:
233
+ prefix = parsed.local_id.split("-")[0].upper()
234
+ except IndexError:
235
+ return None
236
+
237
+ issue_type = REVERSE_PREFIX_MAP.get(prefix)
238
+ if not issue_type:
239
+ return None
240
+
241
+ base_dir = get_issue_dir(issue_type, issues_root)
242
+ # Search in all status subdirs recursively
243
+ for f in base_dir.rglob(f"{parsed.local_id}-*.md"):
244
+ return f
245
+ return None
246
+
247
+ def update_issue(issues_root: Path, issue_id: str, status: Optional[IssueStatus] = None, stage: Optional[IssueStage] = None, solution: Optional[IssueSolution] = None) -> IssueMetadata:
248
+ path = find_issue_path(issues_root, issue_id)
249
+ if not path:
250
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
251
+
252
+ # Read full content
253
+ content = path.read_text()
254
+
255
+ # Split Frontmatter and Body
256
+ match = re.search(r"^---(.*?)---\n(.*)$\n", content, re.DOTALL | re.MULTILINE)
257
+ if not match:
258
+ # Fallback
259
+ match_simple = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
260
+ if match_simple:
261
+ yaml_str = match_simple.group(1)
262
+ body = content[match_simple.end():]
263
+ else:
264
+ raise ValueError(f"Could not parse frontmatter for {issue_id}")
265
+ else:
266
+ yaml_str = match.group(1)
267
+ body = match.group(2)
268
+
269
+ try:
270
+ data = yaml.safe_load(yaml_str) or {}
271
+ except yaml.YAMLError:
272
+ raise ValueError(f"Invalid YAML metadata in {issue_id}")
273
+
274
+ current_status_str = data.get("status", "open") # default to open if missing?
275
+ # Normalize current status to Enum for comparison
276
+ try:
277
+ current_status = IssueStatus(current_status_str.lower())
278
+ except ValueError:
279
+ current_status = IssueStatus.OPEN
280
+
281
+ # Logic: Status Update
282
+ target_status = status if status else current_status
283
+
284
+ # Validation: For closing
285
+ effective_solution = solution.value if solution else data.get("solution")
286
+
287
+ # Policy: Prevent Backlog -> Review
288
+ if stage == IssueStage.REVIEW and current_status == IssueStatus.BACKLOG:
289
+ raise ValueError(f"Lifecycle Policy: Cannot submit Backlog issue directly. Run `monoco issue pull {issue_id}` first.")
290
+
291
+ if target_status == IssueStatus.CLOSED:
292
+ if not effective_solution:
293
+ raise ValueError(f"Closing an issue requires a solution. Please provide --solution or edit the file metadata.")
294
+
295
+ current_data_stage = data.get('stage')
296
+
297
+ # Policy: IMPLEMENTED requires REVIEW stage
298
+ if effective_solution == IssueSolution.IMPLEMENTED.value:
299
+ if current_data_stage != IssueStage.REVIEW.value:
300
+ raise ValueError(f"Lifecycle Policy: 'Implemented' issues must be submitted for review first.\nCurrent stage: {current_data_stage}\nAction: Run `monoco issue submit {issue_id}`.")
301
+
302
+ # Policy: No closing from DOING (General Safety)
303
+ if current_data_stage == IssueStage.DOING.value:
304
+ raise ValueError("Cannot close issue in progress (Doing). Please review (`monoco issue submit`) or stop (`monoco issue open`) first.")
305
+
306
+ # Policy: Dependencies must be closed
307
+ dependencies = data.get('dependencies', [])
308
+ if dependencies:
309
+ for dep_id in dependencies:
310
+ dep_path = find_issue_path(issues_root, dep_id)
311
+ if dep_path:
312
+ dep_meta = parse_issue(dep_path)
313
+ if dep_meta and dep_meta.status != IssueStatus.CLOSED:
314
+ raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status.value}].")
315
+
316
+ # Update Data
317
+ if status:
318
+ data['status'] = status.value
319
+
320
+ if stage:
321
+ data['stage'] = stage.value
322
+ if solution:
323
+ data['solution'] = solution.value
324
+
325
+ # Lifecycle Hooks
326
+ # 1. Opened At: If transitioning to OPEN
327
+ if target_status == IssueStatus.OPEN and current_status != IssueStatus.OPEN:
328
+ # Only set if not already set? Or always reset?
329
+ # Let's set it if not present, or update it to reflect "Latest activation"
330
+ # FEAT-0012 says: "update opened_at to now"
331
+ data['opened_at'] = current_time()
332
+
333
+ # 2. Backlog Push: Handled by IssueMetadata.validate_lifecycle (Status=Backlog -> Stage=None)
334
+ # 3. Closed: Handled by IssueMetadata.validate_lifecycle (Status=Closed -> Stage=Done, ClosedAt=Now)
335
+
336
+ # Touch updated_at
337
+ data['updated_at'] = current_time()
338
+
339
+ # Re-hydrate through Model to trigger Logic (Stage, ClosedAt defaults)
340
+ try:
341
+ updated_meta = IssueMetadata(**data)
342
+ except Exception as e:
343
+ raise ValueError(f"Failed to validate updated metadata: {e}")
344
+
345
+ # Serialize back
346
+ new_yaml = yaml.dump(updated_meta.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
347
+
348
+ # Reconstruct File
349
+ match_header = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
350
+ if not match_header:
351
+ body_content = body
352
+ else:
353
+ body_content = content[match_header.end():]
354
+
355
+ if body_content.startswith('\n'):
356
+ body_content = body_content[1:]
357
+
358
+ new_content = f"---\n{new_yaml}---\n{body_content}"
359
+
360
+ path.write_text(new_content)
361
+
362
+ # 3. Handle physical move if status changed
363
+ if status and status != current_status:
364
+ # Move file
365
+ prefix = issue_id.split("-")[0].upper()
366
+ base_type_dir = get_issue_dir(REVERSE_PREFIX_MAP[prefix], issues_root)
367
+
368
+ try:
369
+ rel_path = path.relative_to(base_type_dir)
370
+ structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
371
+ except ValueError:
372
+ structure_path = Path(path.name)
373
+
374
+ target_path = base_type_dir / target_status.value / structure_path
375
+
376
+ if path != target_path:
377
+ target_path.parent.mkdir(parents=True, exist_ok=True)
378
+ path.rename(target_path)
379
+
380
+ # Hook: Recursive Aggregation (FEAT-0003)
381
+ if updated_meta.parent:
382
+ recalculate_parent(issues_root, updated_meta.parent)
383
+
384
+ return updated_meta
385
+
386
+ def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType, project_root: Path) -> IssueMetadata:
387
+ """
388
+ Start physical isolation for an issue (Branch or Worktree).
389
+ """
390
+ path = find_issue_path(issues_root, issue_id)
391
+ if not path:
392
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
393
+
394
+ issue = parse_issue(path)
395
+
396
+ # Idempotency / Conflict Check
397
+ if issue.isolation:
398
+ if issue.isolation.type == mode:
399
+ # Already isolated in same mode, maybe just switch context?
400
+ # For now, we just warn or return.
401
+ # If branch exists, we make sure it's checked out in CLI layer maybe?
402
+ # But here we assume we want to setup metadata.
403
+ pass
404
+ else:
405
+ raise ValueError(f"Issue {issue_id} is already isolated as '{issue.isolation.type}'. Please cleanup first.")
406
+
407
+ slug = _get_slug(issue.title)
408
+ branch_name = f"feat/{issue_id.lower()}-{slug}"
409
+
410
+ isolation_meta = None
411
+
412
+ if mode == IsolationType.BRANCH:
413
+ if not git.branch_exists(project_root, branch_name):
414
+ git.create_branch(project_root, branch_name, checkout=True)
415
+ else:
416
+ # Check if we are already on it?
417
+ # If not, checkout.
418
+ current = git.get_current_branch(project_root)
419
+ if current != branch_name:
420
+ git.checkout_branch(project_root, branch_name)
421
+
422
+ isolation_meta = IssueIsolation(type=IsolationType.BRANCH, ref=branch_name)
423
+
424
+ elif mode == IsolationType.WORKTREE:
425
+ wt_path = project_root / ".monoco" / "worktrees" / f"{issue_id.lower()}-{slug}"
426
+
427
+ # Check if worktree exists physically
428
+ if wt_path.exists():
429
+ # Check if valid git worktree?
430
+ pass
431
+ else:
432
+ wt_path.parent.mkdir(parents=True, exist_ok=True)
433
+ git.worktree_add(project_root, branch_name, wt_path)
434
+
435
+ isolation_meta = IssueIsolation(type=IsolationType.WORKTREE, ref=branch_name, path=str(wt_path))
436
+
437
+ # Persist Metadata
438
+ # We load raw, update isolation field, save.
439
+ content = path.read_text()
440
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
441
+ if match:
442
+ yaml_str = match.group(1)
443
+ data = yaml.safe_load(yaml_str) or {}
444
+
445
+ data['isolation'] = isolation_meta.model_dump(mode='json')
446
+ # Also ensure stage is DOING (logic link)
447
+ data['stage'] = IssueStage.DOING.value
448
+ data['updated_at'] = current_time()
449
+
450
+ new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
451
+ new_content = content.replace(match.group(1), "\n" + new_yaml)
452
+ path.write_text(new_content)
453
+
454
+ return IssueMetadata(**data)
455
+
456
+ return issue
457
+
458
+ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project_root: Path) -> List[str]:
459
+ """
460
+ Cleanup physical resources. Returns list of actions taken.
461
+ """
462
+ path = find_issue_path(issues_root, issue_id)
463
+ if not path:
464
+ # Issue might be deleted?
465
+ # If we can't find issue, we can't read metadata to know what to prune.
466
+ # We rely on CLI to pass context or we fail.
467
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
468
+
469
+ issue = parse_issue(path)
470
+ deleted_items = []
471
+
472
+ if not issue.isolation:
473
+ return []
474
+
475
+ if issue.isolation.type == IsolationType.BRANCH:
476
+ branch = issue.isolation.ref
477
+ current = git.get_current_branch(project_root)
478
+ if current == branch:
479
+ raise RuntimeError(f"Cannot delete active branch '{branch}'. Please checkout 'main' first.")
480
+
481
+ if git.branch_exists(project_root, branch):
482
+ git.delete_branch(project_root, branch, force=force)
483
+ deleted_items.append(f"branch:{branch}")
484
+
485
+ elif issue.isolation.type == IsolationType.WORKTREE:
486
+ wt_path_str = issue.isolation.path
487
+ if wt_path_str:
488
+ wt_path = Path(wt_path_str)
489
+ # Normalize path if relative
490
+ if not wt_path.is_absolute():
491
+ wt_path = project_root / wt_path
492
+
493
+ if wt_path.exists():
494
+ git.worktree_remove(project_root, wt_path, force=force)
495
+ deleted_items.append(f"worktree:{wt_path.name}")
496
+
497
+ # Also delete the branch associated?
498
+ # Worktree create makes a branch. When removing worktree, branch remains.
499
+ # Usually we want to remove the branch too if it was created for this issue.
500
+ branch = issue.isolation.ref
501
+ if branch and git.branch_exists(project_root, branch):
502
+ # We can't delete branch if it is checked out in the worktree we just removed?
503
+ # git worktree remove unlocks the branch.
504
+ git.delete_branch(project_root, branch, force=force)
505
+ deleted_items.append(f"branch:{branch}")
506
+
507
+ # Clear Metadata
508
+ content = path.read_text()
509
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
510
+ if match:
511
+ yaml_str = match.group(1)
512
+ data = yaml.safe_load(yaml_str) or {}
513
+
514
+ if 'isolation' in data:
515
+ del data['isolation']
516
+ data['updated_at'] = current_time()
517
+
518
+ new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
519
+ new_content = content.replace(match.group(1), "\n" + new_yaml)
520
+ path.write_text(new_content)
521
+
522
+ return deleted_items
523
+
524
+
525
+
526
+ def delete_issue_file(issues_root: Path, issue_id: str):
527
+ """
528
+ Physical removal of an issue file.
529
+ """
530
+ path = find_issue_path(issues_root, issue_id)
531
+ if not path:
532
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
533
+
534
+ path.unlink()
535
+
536
+ # Resources
537
+ SKILL_CONTENT = """
538
+ ---
539
+ name: issues-management
540
+ description: Monoco Issue System 的官方技能定义。将 Issue 视为通用原子 (Universal Atom),管理 Epic/Feature/Chore/Fix 的生命周期。
541
+ ---
542
+
543
+ # 自我管理 (Monoco Issue System)
544
+
545
+ 使用此技能在 Monoco 项目中创建和管理 **Issue** (通用原子)。
546
+
547
+ ## 核心本体论 (Core Ontology)
548
+
549
+ ### 1. 战略层 (Strategy)
550
+ - **🏆 EPIC (史诗)**: 宏大目标,愿景的容器。Mindset: Architect。
551
+
552
+ ### 2. 价值层 (Value)
553
+ - **✨ FEATURE (特性)**: 用户视角的价值增量。Mindset: Product Owner。
554
+ - **原子性原则**: Feature = Design + Dev + Test + Doc + i18n。它们是一体的。
555
+
556
+ ### 3. 执行层 (Execution)
557
+ - **🧹 CHORE (杂务)**: 工程性维护,不产生直接用户价值。Mindset: Builder。
558
+ - **🐞 FIX (修复)**: 修正偏差。Mindset: Debugger。
559
+
560
+ ## 准则 (Guidelines)
561
+
562
+ ### 目录结构 (Strict Enforced)
563
+ `Issues/{Type}/{status}/`
564
+
565
+ - **Type Level (Capitalized Plural)**: `Epics`, `Features`, `Chores`, `Fixes`
566
+ - **Status Level (Lowercase)**: `open`, `backlog`, `closed`
567
+
568
+ ### 路径流转
569
+ 使用 `monoco issue`:
570
+ 1. **Create**: `monoco issue create <type> --title "..."`
571
+ 2. **Transition**: `monoco issue open/close/backlog <id>`
572
+ 3. **View**: `monoco issue scope`
573
+ 4. **Validation**: `monoco issue lint`
574
+ 5. **Modification**: `monoco issue start/submit/delete <id>`
575
+ """
576
+
577
+ PROMPT_CONTENT = """
578
+ ### Issue Management
579
+ System for managing tasks using `monoco issue`.
580
+ - **Create**: `monoco issue create <type> -t "Title"` (types: epic, feature, chore, fix)
581
+ - **Status**: `monoco issue open|close|backlog <id>`
582
+ - **Check**: `monoco issue lint` (Must run after manual edits)
583
+ - **Lifecycle**: `monoco issue start|submit|delete <id>`
584
+ - **Structure**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` (e.g. `Issues/Features/open/`). Do not deviate.
585
+ """
586
+
587
+ def init(issues_root: Path):
588
+ """Initialize the Issues directory structure."""
589
+ issues_root.mkdir(parents=True, exist_ok=True)
590
+
591
+ # Standard Directories based on new Terminology
592
+ for subdir in ["Epics", "Features", "Chores", "Fixes"]:
593
+ (issues_root / subdir).mkdir(exist_ok=True)
594
+ # Create status subdirs? Usually handled by open/backlog,
595
+ # but creating them initially is good for guidance.
596
+ for status in ["open", "backlog", "closed"]:
597
+ (issues_root / subdir / status).mkdir(exist_ok=True)
598
+
599
+ # Create gitkeep to ensure they are tracked? Optional.
600
+
601
+ def get_resources() -> Dict[str, Any]:
602
+ return {
603
+ "skills": {
604
+ "issues-management": SKILL_CONTENT
605
+ },
606
+ "prompts": {
607
+ "issues-management": PROMPT_CONTENT
608
+ }
609
+ }
610
+
611
+
612
+ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[IssueMetadata]:
613
+ """
614
+ List all issues in the project.
615
+ """
616
+ issues = []
617
+ for issue_type in IssueType:
618
+ base_dir = get_issue_dir(issue_type, issues_root)
619
+ for status_dir in ["open", "backlog", "closed"]:
620
+ d = base_dir / status_dir
621
+ if d.exists():
622
+ for f in d.rglob("*.md"):
623
+ meta = parse_issue(f)
624
+ if meta:
625
+ issues.append(meta)
626
+
627
+ if recursive_workspace:
628
+ # Resolve Workspace Members
629
+ try:
630
+ # weak assumption: issues_root.parent is project_root
631
+ project_root = issues_root.parent
632
+ conf = get_config(str(project_root))
633
+
634
+ for name, rel_path in conf.project.members.items():
635
+ member_root = (project_root / rel_path).resolve()
636
+ member_issues_dir = member_root / "Issues" # Standard convention
637
+
638
+ if member_issues_dir.exists():
639
+ # Fetch member issues (non-recursive to avoid loops)
640
+ member_issues = list_issues(member_issues_dir, False)
641
+ for m in member_issues:
642
+ # Namespace the ID to avoid collisions and indicate origin
643
+ m.id = f"{name}::{m.id}"
644
+ issues.append(m)
645
+ except Exception:
646
+ # Fail silently on workspace resolution errors (config missing etc)
647
+ pass
648
+
649
+ return issues
650
+
651
+ def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
652
+ """
653
+ Get open issues grouped by their stage for Kanban view.
654
+ """
655
+ board = {
656
+ IssueStage.TODO.value: [],
657
+ IssueStage.DOING.value: [],
658
+ IssueStage.REVIEW.value: [],
659
+ IssueStage.DONE.value: []
660
+ }
661
+
662
+ issues = list_issues(issues_root)
663
+ for issue in issues:
664
+ if issue.status == IssueStatus.OPEN and issue.stage:
665
+ stage_val = issue.stage.value
666
+ if stage_val in board:
667
+ board[stage_val].append(issue)
668
+ elif issue.status == IssueStatus.CLOSED:
669
+ # Optionally show recently closed items in DONE column
670
+ board[IssueStage.DONE.value].append(issue)
671
+
672
+ return board
673
+
674
+ def validate_issue_integrity(meta: IssueMetadata, all_issue_ids: Set[str] = set()) -> List[str]:
675
+ """
676
+ Validate metadata integrity (Solution, Lifecycle, etc.)
677
+ UI-agnostic.
678
+ """
679
+ errors = []
680
+ if meta.status == IssueStatus.CLOSED and not meta.solution:
681
+ errors.append(f"Solution Missing: {meta.id} is closed but has no solution field.")
682
+
683
+ if meta.parent:
684
+ if all_issue_ids and meta.parent not in all_issue_ids:
685
+ errors.append(f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}.")
686
+
687
+ if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
688
+ errors.append(f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage}).")
689
+
690
+ return errors
691
+
692
+ def update_issue_content(issues_root: Path, issue_id: str, new_content: str) -> IssueMetadata:
693
+ """
694
+ Update the raw content of an issue file.
695
+ Validates integrity before saving.
696
+ Handles file moves if status changes.
697
+ """
698
+ path = find_issue_path(issues_root, issue_id)
699
+ if not path:
700
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
701
+
702
+ # 1. Parse New Content (using temp file to reuse parse_issue logic)
703
+ import tempfile
704
+ import os
705
+
706
+ with tempfile.NamedTemporaryFile(mode='w+', suffix='.md', delete=False) as tmp:
707
+ tmp.write(new_content)
708
+ tmp_path = Path(tmp.name)
709
+
710
+ try:
711
+ meta = parse_issue(tmp_path)
712
+ if not meta:
713
+ raise ValueError("Invalid Issue Content: Frontmatter missing or invalid.")
714
+
715
+ if meta.id != issue_id:
716
+ raise ValueError(f"Cannot change Issue ID (Original: {issue_id}, New: {meta.id})")
717
+
718
+ # 2. Integrity Check
719
+ errors = validate_issue_integrity(meta)
720
+ if errors:
721
+ raise ValueError(f"Validation Failed: {'; '.join(errors)}")
722
+
723
+ # 3. Write and Move
724
+ # We overwrite the *current* path first
725
+ path.write_text(new_content)
726
+
727
+ # Check if we need to move (Status Change)
728
+ # We need to re-derive the expected path based on new status
729
+ # Reuse logic from update_issue (simplified)
730
+
731
+ prefix = issue_id.split("-")[0].upper()
732
+ base_type_dir = get_issue_dir(REVERSE_PREFIX_MAP[prefix], issues_root)
733
+
734
+ # Calculate structure path (preserve subdir)
735
+ try:
736
+ rel_path = path.relative_to(base_type_dir)
737
+ # Remove the first component (current status directory) which might be 'open', 'closed' etc.
738
+ # But wait, find_issue_path found it. 'rel_path' includes status dir.
739
+ # e.g. open/Backend/Auth/FEAT-123.md -> parts=('open', 'Backend', 'Auth', 'FEAT-123.md')
740
+ structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
741
+ except ValueError:
742
+ # Fallback if path is weird
743
+ structure_path = Path(path.name)
744
+
745
+ target_path = base_type_dir / meta.status.value / structure_path
746
+
747
+ if path != target_path:
748
+ target_path.parent.mkdir(parents=True, exist_ok=True)
749
+ path.rename(target_path)
750
+
751
+ return meta
752
+
753
+ finally:
754
+ if os.path.exists(tmp_path):
755
+ os.unlink(tmp_path)
756
+
757
+ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Path) -> IssueMetadata:
758
+ """
759
+ Scan git history for commits related to this issue (Ref: ID),
760
+ aggregate touched files, and append/update '## Delivery' section in the issue body.
761
+ """
762
+ from monoco.core import git
763
+
764
+ path = find_issue_path(issues_root, issue_id)
765
+ if not path:
766
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
767
+
768
+ # 1. Scan Git
769
+ commits = git.search_commits_by_message(project_root, f"Ref: {issue_id}")
770
+
771
+ if not commits:
772
+ return parse_issue(path)
773
+
774
+ # 2. Aggregate Data
775
+ all_files = set()
776
+ commit_list_md = []
777
+
778
+ for c in commits:
779
+ short_hash = c['hash'][:7]
780
+ commit_list_md.append(f"- `{short_hash}` {c['subject']}")
781
+ for f in c['files']:
782
+ all_files.add(f)
783
+
784
+ sorted_files = sorted(list(all_files))
785
+
786
+ # 3. Format Report
787
+ delivery_section = f"""
788
+ ## Delivery
789
+ <!-- Monoco Auto Generated -->
790
+ **Commits ({len(commits)})**:
791
+ {chr(10).join(commit_list_md)}
792
+
793
+ **Touched Files ({len(sorted_files)})**:
794
+ """ + "\n".join([f"- `{f}`" for f in sorted_files])
795
+
796
+ # 4. Update File Content
797
+ content = path.read_text()
798
+
799
+ # Check if Delivery section exists
800
+ if "## Delivery" in content:
801
+ # Replace existing section
802
+ # We assume Delivery is the last section or we replace until end or next H2?
803
+ # For simplicity, if ## Delivery exists, we regex replace it and everything after it
804
+ # OR we just replace the section block if we can identify it.
805
+ # Let's assume it's at the end or we replace the specific block `## Delivery...`
806
+ # But regex matching across newlines is tricky if we don't know where it ends.
807
+ # Safe bet: If "## Delivery" exists, find it and replace everything after it?
808
+ # Or look for "<!-- Monoco Auto Generated -->"
809
+
810
+ pattern = r"## Delivery.*"
811
+ # If we use DOTALL, it replaces everything until end of string?
812
+ # Yes, usually Delivery report is appended at the end.
813
+ content = re.sub(pattern, delivery_section.strip(), content, flags=re.DOTALL)
814
+ else:
815
+ # Append
816
+ if not content.endswith("\n"):
817
+ content += "\n"
818
+ content += "\n" + delivery_section.strip() + "\n"
819
+
820
+ path.write_text(content)
821
+
822
+ # 5. Update Metadata (delivery stats)
823
+ # We might want to store 'files_count' in metadata for the recursive aggregation (FEAT-0003)
824
+ # But IssueMetadata doesn't have a 'delivery' dict field yet.
825
+ # We can add it to 'extra' or extend the model later.
826
+ # For now, just persisting the text is enough for FEAT-0002.
827
+
828
+ return parse_issue(path)
829
+
830
+ def get_children(issues_root: Path, parent_id: str) -> List[IssueMetadata]:
831
+ """Find all direct children of an issue."""
832
+ all_issues = list_issues(issues_root)
833
+ return [i for i in all_issues if i.parent == parent_id]
834
+
835
+ def count_files_in_delivery(issue_path: Path) -> int:
836
+ """Parse the ## Delivery section to count files."""
837
+ try:
838
+ content = issue_path.read_text()
839
+ match = re.search(r"\*\*Touched Files \((\d+)\)\*\*", content)
840
+ if match:
841
+ return int(match.group(1))
842
+ except Exception:
843
+ pass
844
+ return 0
845
+
846
+ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
847
+ """
848
+ Parse a search query string into explicit positives, optional terms, and negatives.
849
+ Supported syntax:
850
+ - `+term`: Must include (AND)
851
+ - `-term`: Must not include (NOT)
852
+ - `term`: Optional (Nice to have) - OR logic if no +term exists
853
+ - `"phrase with space"`: Quoted match
854
+ """
855
+ if not query:
856
+ return [], [], []
857
+
858
+ import shlex
859
+ try:
860
+ tokens = shlex.split(query)
861
+ except ValueError:
862
+ # Fallback for unbalanced quotes
863
+ tokens = query.split()
864
+
865
+ explicit_positives = []
866
+ terms = []
867
+ negatives = []
868
+
869
+ for token in tokens:
870
+ token_lower = token.lower()
871
+ if token_lower.startswith("-") and len(token_lower) > 1:
872
+ negatives.append(token_lower[1:])
873
+ elif token_lower.startswith("+") and len(token_lower) > 1:
874
+ explicit_positives.append(token_lower[1:])
875
+ else:
876
+ terms.append(token_lower)
877
+
878
+ return explicit_positives, terms, negatives
879
+
880
+ def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms: List[str], negatives: List[str], full_content: str = "") -> bool:
881
+ """
882
+ Check if an issue matches the search criteria.
883
+ Consider fields: id, title, status, stage, type, tags, dependencies, related.
884
+ Optional: full_content (body) if available.
885
+ """
886
+ # 1. Aggregate Searchable Text
887
+ # We join all fields with spaces to create a searchable blob
888
+ searchable_parts = [
889
+ issue.id,
890
+ issue.title,
891
+ issue.status.value,
892
+ issue.type.value,
893
+ str(issue.stage.value) if issue.stage else "",
894
+ *(issue.tags or []),
895
+ *(issue.dependencies or []),
896
+ *(issue.related or []),
897
+ full_content
898
+ ]
899
+
900
+ # Normalize blob
901
+ blob = " ".join(filter(None, searchable_parts)).lower()
902
+
903
+ # 2. Check Negatives (Fast Fail)
904
+ for term in negatives:
905
+ if term in blob:
906
+ return False
907
+
908
+ # 3. Check Explicit Positives (Must match ALL)
909
+ for term in explicit_positives:
910
+ if term not in blob:
911
+ return False
912
+
913
+ # 4. Check Terms (Nice to Have)
914
+ # If explicit_positives exist, terms are optional (implicit inclusion).
915
+ # If NO explicit_positives, terms act as Implicit OR (must match at least one).
916
+ if terms:
917
+ if not explicit_positives:
918
+ # Must match at least one term
919
+ if not any(term in blob for term in terms):
920
+ return False
921
+
922
+ return True
923
+
924
+ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
925
+ """
926
+ Search issues using advanced query syntax.
927
+ Returns list of matching IssueMetadata.
928
+ """
929
+ explicit_positives, terms, negatives = parse_search_query(query)
930
+
931
+ # Optimization: If no query, return empty? Or all?
932
+ # Usually search implies input. CLI `list` is for all.
933
+ # But if query is empty string, we return all?
934
+ # Let's align with "grep": empty pattern matches everything?
935
+ # Or strict: empty query -> all.
936
+ if not explicit_positives and not terms and not negatives:
937
+ return list_issues(issues_root)
938
+
939
+ matches = []
940
+ all_files = []
941
+
942
+ # 1. Gather all files first (we need to read content for deep search)
943
+ # Using list_issues is inefficient if we need body content, as list_issues only parses frontmatter (usually).
944
+ # But parse_issue uses `IssueMetadata` which ignores body.
945
+ # We need a robust way. `parse_issue` reads full text but discards body in current implementation?
946
+ # Wait, `parse_issue` in core.py *only* reads frontmatter via `yaml.safe_load(match.group(1))`.
947
+ # It does NOT return body.
948
+
949
+ # To support deep search (Body), we need to read files.
950
+ # Let's iterate files directly.
951
+
952
+ for issue_type in IssueType:
953
+ base_dir = get_issue_dir(issue_type, issues_root)
954
+ for status_dir in ["open", "backlog", "closed"]:
955
+ d = base_dir / status_dir
956
+ if d.exists():
957
+ for f in d.rglob("*.md"):
958
+ all_files.append(f)
959
+
960
+ for f in all_files:
961
+ # We need full content for body search
962
+ try:
963
+ content = f.read_text()
964
+ # Parse Metadata
965
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
966
+ if not match:
967
+ continue
968
+
969
+ yaml_str = match.group(1)
970
+ data = yaml.safe_load(yaml_str)
971
+ if not isinstance(data, dict):
972
+ continue
973
+
974
+ meta = IssueMetadata(**data)
975
+
976
+ # Match
977
+ if check_issue_match(meta, explicit_positives, terms, negatives, full_content=content):
978
+ matches.append(meta)
979
+
980
+ except Exception:
981
+ continue
982
+
983
+ return matches
984
+
985
+ def recalculate_parent(issues_root: Path, parent_id: str):
986
+ """
987
+ Update parent Epic/Feature stats based on children.
988
+ - Progress (Closed/Total)
989
+ - Total Files Touched (Sum of children's delivery)
990
+ """
991
+ parent_path = find_issue_path(issues_root, parent_id)
992
+ if not parent_path:
993
+ return # Should we warn?
994
+
995
+ children = get_children(issues_root, parent_id)
996
+ if not children:
997
+ return
998
+
999
+ total = len(children)
1000
+ closed = len([c for c in children if c.status == IssueStatus.CLOSED])
1001
+ # Progress string: "3/5"
1002
+ progress_str = f"{closed}/{total}"
1003
+
1004
+ # Files count
1005
+ total_files = 0
1006
+ for child in children:
1007
+ child_path = find_issue_path(issues_root, child.id)
1008
+ if child_path:
1009
+ total_files += count_files_in_delivery(child_path)
1010
+
1011
+ # Update Parent # We need to reuse update logic but without validation/status change
1012
+ # Just generic metadata update.
1013
+ # update_issue is too heavy/strict.
1014
+ # Let's implement a lighter `patch_metadata` helper or reuse logic.
1015
+
1016
+ content = parent_path.read_text()
1017
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
1018
+ if match:
1019
+ yaml_str = match.group(1)
1020
+ data = yaml.safe_load(yaml_str) or {}
1021
+
1022
+ # Check if changed to avoid churn
1023
+ old_progress = data.get("progress")
1024
+ old_files = data.get("files_count")
1025
+
1026
+ if old_progress == progress_str and old_files == total_files:
1027
+ return
1028
+
1029
+ data["progress"] = progress_str
1030
+ data["files_count"] = total_files
1031
+
1032
+ # Also maybe update status?
1033
+ # FEAT-0003 Req: "If first child starts doing, auto-start Parent?"
1034
+ # If parent is OPEN/TODO and child is DOING/REVIEW/DONE, set parent to DOING?
1035
+ current_status = data.get("status", "open").lower()
1036
+ current_stage = data.get("stage", "todo").lower()
1037
+
1038
+ if current_status == "open" and current_stage == "todo":
1039
+ # Check if any child is active
1040
+ active_children = [c for c in children if c.status == IssueStatus.OPEN and c.stage != IssueStage.TODO]
1041
+ closed_children = [c for c in children if c.status == IssueStatus.CLOSED]
1042
+
1043
+ if active_children or closed_children:
1044
+ data["stage"] = "doing"
1045
+
1046
+ # Serialize
1047
+ new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
1048
+ # Replace header
1049
+ new_content = content.replace(match.group(1), "\n" + new_yaml)
1050
+ parent_path.write_text(new_content)
1051
+
1052
+ # Recurse up?
1053
+ parent_parent = data.get("parent")
1054
+ if parent_parent:
1055
+ recalculate_parent(issues_root, parent_parent)
1056
+
1057
+ def move_issue(
1058
+ source_issues_root: Path,
1059
+ issue_id: str,
1060
+ target_issues_root: Path,
1061
+ renumber: bool = False
1062
+ ) -> Tuple[IssueMetadata, Path]:
1063
+ """
1064
+ Move an issue from one project to another.
1065
+
1066
+ Args:
1067
+ source_issues_root: Source project's Issues directory
1068
+ issue_id: ID of the issue to move
1069
+ target_issues_root: Target project's Issues directory
1070
+ renumber: If True, automatically renumber on ID conflict
1071
+
1072
+ Returns:
1073
+ Tuple of (updated metadata, new file path)
1074
+
1075
+ Raises:
1076
+ FileNotFoundError: If source issue doesn't exist
1077
+ ValueError: If ID conflict exists and renumber=False
1078
+ """
1079
+ # 1. Find source issue
1080
+ source_path = find_issue_path(source_issues_root, issue_id)
1081
+ if not source_path:
1082
+ raise FileNotFoundError(f"Issue {issue_id} not found in source project.")
1083
+
1084
+ # 2. Parse issue metadata
1085
+ issue = parse_issue_detail(source_path)
1086
+ if not issue:
1087
+ raise ValueError(f"Failed to parse issue {issue_id}.")
1088
+
1089
+ # 3. Check for ID conflict in target
1090
+ target_conflict_path = find_issue_path(target_issues_root, issue_id)
1091
+
1092
+ if target_conflict_path:
1093
+ # Conflict detected
1094
+ conflict_issue = parse_issue(target_conflict_path)
1095
+
1096
+ # Check if it's the same issue (same UID)
1097
+ if issue.uid and conflict_issue and conflict_issue.uid == issue.uid:
1098
+ raise ValueError(
1099
+ f"Issue {issue_id} (uid: {issue.uid}) already exists in target project. "
1100
+ "This appears to be a duplicate."
1101
+ )
1102
+
1103
+ # Different issues with same ID
1104
+ if not renumber:
1105
+ conflict_info = ""
1106
+ if conflict_issue:
1107
+ conflict_info = f" (uid: {conflict_issue.uid}, created: {conflict_issue.created_at}, stage: {conflict_issue.stage})"
1108
+ raise ValueError(
1109
+ f"ID conflict: Target project already has {issue_id}{conflict_info}.\n"
1110
+ f"Use --renumber to automatically assign a new ID."
1111
+ )
1112
+
1113
+ # Auto-renumber
1114
+ new_id = find_next_id(issue.type, target_issues_root)
1115
+ old_id = issue.id
1116
+ issue.id = new_id
1117
+ else:
1118
+ new_id = issue.id
1119
+ old_id = issue.id
1120
+
1121
+ # 4. Construct target path
1122
+ target_type_dir = get_issue_dir(issue.type, target_issues_root)
1123
+ target_status_dir = target_type_dir / issue.status.value
1124
+
1125
+ # Preserve subdirectory structure if any
1126
+ try:
1127
+ source_type_dir = get_issue_dir(issue.type, source_issues_root)
1128
+ rel_path = source_path.relative_to(source_type_dir)
1129
+ # Remove status directory component
1130
+ structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(source_path.name)
1131
+ except ValueError:
1132
+ structure_path = Path(source_path.name)
1133
+
1134
+ # Update filename if ID changed
1135
+ if new_id != old_id:
1136
+ old_filename = source_path.name
1137
+ new_filename = old_filename.replace(old_id, new_id, 1)
1138
+ structure_path = structure_path.parent / new_filename if structure_path.parent != Path('.') else Path(new_filename)
1139
+
1140
+ target_path = target_status_dir / structure_path
1141
+ target_path.parent.mkdir(parents=True, exist_ok=True)
1142
+
1143
+ # 5. Update content if ID changed
1144
+ if new_id != old_id:
1145
+ # Update frontmatter
1146
+ content = issue.raw_content
1147
+ match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
1148
+ if match:
1149
+ yaml_str = match.group(1)
1150
+ data = yaml.safe_load(yaml_str) or {}
1151
+ data['id'] = new_id
1152
+ data['updated_at'] = current_time()
1153
+
1154
+ new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
1155
+
1156
+ # Update body (replace old ID in heading)
1157
+ body = content[match.end():]
1158
+ body = body.replace(f"## {old_id}:", f"## {new_id}:", 1)
1159
+
1160
+ new_content = f"---\n{new_yaml}---{body}"
1161
+ else:
1162
+ new_content = issue.raw_content
1163
+ else:
1164
+ new_content = issue.raw_content
1165
+
1166
+ # 6. Write to target
1167
+ target_path.write_text(new_content)
1168
+
1169
+ # 7. Remove source
1170
+ source_path.unlink()
1171
+
1172
+ # 8. Return updated metadata
1173
+ final_meta = parse_issue(target_path)
1174
+ return final_meta, target_path