monoco-toolkit 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/__init__.py +0 -0
- monoco/core/config.py +113 -0
- monoco/core/git.py +184 -0
- monoco/core/output.py +97 -0
- monoco/core/setup.py +285 -0
- monoco/core/telemetry.py +89 -0
- monoco/core/workspace.py +40 -0
- monoco/daemon/__init__.py +0 -0
- monoco/daemon/app.py +378 -0
- monoco/daemon/commands.py +36 -0
- monoco/daemon/models.py +24 -0
- monoco/daemon/reproduce_stats.py +41 -0
- monoco/daemon/services.py +265 -0
- monoco/daemon/stats.py +124 -0
- monoco/features/__init__.py +0 -0
- monoco/features/config/commands.py +70 -0
- monoco/features/i18n/__init__.py +0 -0
- monoco/features/i18n/commands.py +121 -0
- monoco/features/i18n/core.py +178 -0
- monoco/features/issue/commands.py +710 -0
- monoco/features/issue/core.py +1183 -0
- monoco/features/issue/linter.py +172 -0
- monoco/features/issue/models.py +157 -0
- monoco/features/pty/core.py +185 -0
- monoco/features/pty/router.py +138 -0
- monoco/features/pty/server.py +56 -0
- monoco/features/skills/__init__.py +1 -0
- monoco/features/skills/core.py +96 -0
- monoco/features/spike/commands.py +110 -0
- monoco/features/spike/core.py +154 -0
- monoco/main.py +110 -0
- monoco_toolkit-0.1.5.dist-info/METADATA +93 -0
- monoco_toolkit-0.1.5.dist-info/RECORD +36 -0
- monoco_toolkit-0.1.5.dist-info/WHEEL +4 -0
- monoco_toolkit-0.1.5.dist-info/entry_points.txt +2 -0
- monoco_toolkit-0.1.5.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1183 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import re
|
|
3
|
+
import yaml
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import List, Dict, Optional, Tuple, Any, Set, Set
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from .models import IssueMetadata, IssueType, IssueStatus, IssueSolution, IssueStage, IssueDetail, IsolationType, IssueIsolation, IssueID, current_time, generate_uid
|
|
8
|
+
from monoco.core import git
|
|
9
|
+
from monoco.core.config import get_config
|
|
10
|
+
|
|
11
|
+
PREFIX_MAP = {
|
|
12
|
+
IssueType.EPIC: "EPIC",
|
|
13
|
+
IssueType.FEATURE: "FEAT",
|
|
14
|
+
IssueType.CHORE: "CHORE",
|
|
15
|
+
IssueType.FIX: "FIX"
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
REVERSE_PREFIX_MAP = {v: k for k, v in PREFIX_MAP.items()}
|
|
19
|
+
|
|
20
|
+
def _get_slug(title: str) -> str:
|
|
21
|
+
slug = title.lower()
|
|
22
|
+
# Replace non-word characters (including punctuation, spaces) with hyphens
|
|
23
|
+
# \w matches Unicode word characters (letters, numbers, underscores)
|
|
24
|
+
slug = re.sub(r"[^\w]+", "-", slug)
|
|
25
|
+
slug = slug.strip("-")[:50]
|
|
26
|
+
|
|
27
|
+
if not slug:
|
|
28
|
+
slug = "issue"
|
|
29
|
+
|
|
30
|
+
return slug
|
|
31
|
+
|
|
32
|
+
def get_issue_dir(issue_type: IssueType, issues_root: Path) -> Path:
|
|
33
|
+
mapping = {
|
|
34
|
+
IssueType.EPIC: "Epics",
|
|
35
|
+
IssueType.FEATURE: "Features",
|
|
36
|
+
IssueType.CHORE: "Chores",
|
|
37
|
+
IssueType.FIX: "Fixes",
|
|
38
|
+
}
|
|
39
|
+
return issues_root / mapping[issue_type]
|
|
40
|
+
|
|
41
|
+
def parse_issue(file_path: Path) -> Optional[IssueMetadata]:
|
|
42
|
+
if not file_path.suffix == ".md":
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
content = file_path.read_text()
|
|
46
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
47
|
+
if not match:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
data = yaml.safe_load(match.group(1))
|
|
52
|
+
if not isinstance(data, dict):
|
|
53
|
+
return None
|
|
54
|
+
|
|
55
|
+
# Inject path before validation to ensure it persists
|
|
56
|
+
data['path'] = str(file_path.absolute())
|
|
57
|
+
meta = IssueMetadata(**data)
|
|
58
|
+
return meta
|
|
59
|
+
except Exception:
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
|
|
63
|
+
if not file_path.suffix == ".md":
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
content = file_path.read_text()
|
|
67
|
+
# Robust splitting
|
|
68
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
69
|
+
if not match:
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
yaml_str = match.group(1)
|
|
73
|
+
body = content[match.end():].lstrip()
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
data = yaml.safe_load(yaml_str)
|
|
77
|
+
if not isinstance(data, dict):
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
data['path'] = str(file_path.absolute())
|
|
81
|
+
return IssueDetail(**data, body=body, raw_content=content)
|
|
82
|
+
except Exception:
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
def find_next_id(issue_type: IssueType, issues_root: Path) -> str:
|
|
86
|
+
prefix = PREFIX_MAP[issue_type]
|
|
87
|
+
pattern = re.compile(rf"{prefix}-(\d+)")
|
|
88
|
+
max_id = 0
|
|
89
|
+
|
|
90
|
+
base_dir = get_issue_dir(issue_type, issues_root)
|
|
91
|
+
# Scan all subdirs: open, backlog, closed
|
|
92
|
+
for status_dir in ["open", "backlog", "closed"]:
|
|
93
|
+
d = base_dir / status_dir
|
|
94
|
+
if d.exists():
|
|
95
|
+
for f in d.rglob("*.md"):
|
|
96
|
+
match = pattern.search(f.name)
|
|
97
|
+
if match:
|
|
98
|
+
max_id = max(max_id, int(match.group(1)))
|
|
99
|
+
|
|
100
|
+
return f"{prefix}-{max_id + 1:04d}"
|
|
101
|
+
|
|
102
|
+
def create_issue_file(
|
|
103
|
+
issues_root: Path,
|
|
104
|
+
issue_type: IssueType,
|
|
105
|
+
title: str,
|
|
106
|
+
parent: Optional[str] = None,
|
|
107
|
+
status: IssueStatus = IssueStatus.OPEN,
|
|
108
|
+
stage: Optional[IssueStage] = None,
|
|
109
|
+
dependencies: List[str] = [],
|
|
110
|
+
related: List[str] = [],
|
|
111
|
+
subdir: Optional[str] = None,
|
|
112
|
+
sprint: Optional[str] = None,
|
|
113
|
+
tags: List[str] = []
|
|
114
|
+
) -> Tuple[IssueMetadata, Path]:
|
|
115
|
+
|
|
116
|
+
# Validation
|
|
117
|
+
for dep_id in dependencies:
|
|
118
|
+
if not find_issue_path(issues_root, dep_id):
|
|
119
|
+
raise ValueError(f"Dependency issue {dep_id} not found.")
|
|
120
|
+
|
|
121
|
+
for rel_id in related:
|
|
122
|
+
if not find_issue_path(issues_root, rel_id):
|
|
123
|
+
raise ValueError(f"Related issue {rel_id} not found.")
|
|
124
|
+
|
|
125
|
+
issue_id = find_next_id(issue_type, issues_root)
|
|
126
|
+
base_type_dir = get_issue_dir(issue_type, issues_root)
|
|
127
|
+
target_dir = base_type_dir / status.value
|
|
128
|
+
|
|
129
|
+
if subdir:
|
|
130
|
+
target_dir = target_dir / subdir
|
|
131
|
+
|
|
132
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
133
|
+
|
|
134
|
+
metadata = IssueMetadata(
|
|
135
|
+
id=issue_id,
|
|
136
|
+
uid=generate_uid(), # Generate global unique identifier
|
|
137
|
+
type=issue_type,
|
|
138
|
+
status=status,
|
|
139
|
+
stage=stage,
|
|
140
|
+
title=title,
|
|
141
|
+
parent=parent,
|
|
142
|
+
dependencies=dependencies,
|
|
143
|
+
related=related,
|
|
144
|
+
sprint=sprint,
|
|
145
|
+
tags=tags,
|
|
146
|
+
opened_at=current_time() if status == IssueStatus.OPEN else None
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
|
|
151
|
+
slug = _get_slug(title)
|
|
152
|
+
filename = f"{issue_id}-{slug}.md"
|
|
153
|
+
|
|
154
|
+
file_content = f"""---
|
|
155
|
+
{yaml_header}---
|
|
156
|
+
|
|
157
|
+
## {issue_id}: {title}
|
|
158
|
+
|
|
159
|
+
## Objective
|
|
160
|
+
|
|
161
|
+
## Acceptance Criteria
|
|
162
|
+
|
|
163
|
+
## Technical Tasks
|
|
164
|
+
|
|
165
|
+
- [ ]
|
|
166
|
+
"""
|
|
167
|
+
file_path = target_dir / filename
|
|
168
|
+
file_path.write_text(file_content)
|
|
169
|
+
|
|
170
|
+
# Inject path into returned metadata
|
|
171
|
+
metadata.path = str(file_path.absolute())
|
|
172
|
+
|
|
173
|
+
return metadata, file_path
|
|
174
|
+
def validate_transition(
|
|
175
|
+
current_status: IssueStatus,
|
|
176
|
+
current_stage: Optional[IssueStage],
|
|
177
|
+
target_status: IssueStatus,
|
|
178
|
+
target_stage: Optional[IssueStage],
|
|
179
|
+
target_solution: Optional[str],
|
|
180
|
+
issue_dependencies: List[str],
|
|
181
|
+
issues_root: Path,
|
|
182
|
+
issue_id: str
|
|
183
|
+
):
|
|
184
|
+
"""
|
|
185
|
+
Centralized validation logic for state transitions.
|
|
186
|
+
"""
|
|
187
|
+
# Policy: Prevent Backlog -> Review
|
|
188
|
+
if target_stage == IssueStage.REVIEW and current_status == IssueStatus.BACKLOG:
|
|
189
|
+
raise ValueError(f"Lifecycle Policy: Cannot submit Backlog issue directly. Run `monoco issue pull {issue_id}` first.")
|
|
190
|
+
|
|
191
|
+
if target_status == IssueStatus.CLOSED:
|
|
192
|
+
if not target_solution:
|
|
193
|
+
raise ValueError(f"Closing an issue requires a solution. Please provide --solution or edit the file metadata.")
|
|
194
|
+
|
|
195
|
+
# Policy: IMPLEMENTED requires REVIEW stage (unless we are already in REVIEW)
|
|
196
|
+
# Check current stage.
|
|
197
|
+
if target_solution == IssueSolution.IMPLEMENTED.value:
|
|
198
|
+
# If we are transitioning FROM Review, it's fine.
|
|
199
|
+
# If we are transitioning TO Closed, current stage must be Review.
|
|
200
|
+
if current_stage != IssueStage.REVIEW:
|
|
201
|
+
raise ValueError(f"Lifecycle Policy: 'Implemented' issues must be submitted for review first.\nCurrent stage: {current_stage}\nAction: Run `monoco issue submit {issue_id}`.")
|
|
202
|
+
|
|
203
|
+
# Policy: No closing from DOING (General Safety)
|
|
204
|
+
if current_stage == IssueStage.DOING:
|
|
205
|
+
raise ValueError("Cannot close issue in progress (Doing). Please review (`monoco issue submit`) or stop (`monoco issue open`) first.")
|
|
206
|
+
|
|
207
|
+
# Policy: Dependencies must be closed
|
|
208
|
+
if issue_dependencies:
|
|
209
|
+
for dep_id in issue_dependencies:
|
|
210
|
+
dep_path = find_issue_path(issues_root, dep_id)
|
|
211
|
+
if dep_path:
|
|
212
|
+
dep_meta = parse_issue(dep_path)
|
|
213
|
+
if dep_meta and dep_meta.status != IssueStatus.CLOSED:
|
|
214
|
+
raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is not closed.")
|
|
215
|
+
|
|
216
|
+
def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
217
|
+
parsed = IssueID(issue_id)
|
|
218
|
+
|
|
219
|
+
if not parsed.is_local:
|
|
220
|
+
# Resolve Workspace
|
|
221
|
+
# Assumption: issues_root is direct child of project_root.
|
|
222
|
+
# This is a weak assumption but fits current architecture.
|
|
223
|
+
project_root = issues_root.parent
|
|
224
|
+
conf = get_config(str(project_root))
|
|
225
|
+
|
|
226
|
+
member_rel_path = conf.project.members.get(parsed.namespace)
|
|
227
|
+
if not member_rel_path:
|
|
228
|
+
return None
|
|
229
|
+
|
|
230
|
+
member_root = (project_root / member_rel_path).resolve()
|
|
231
|
+
# Assume standard "Issues" directory for members to avoid loading full config
|
|
232
|
+
member_issues = member_root / "Issues"
|
|
233
|
+
|
|
234
|
+
if not member_issues.exists():
|
|
235
|
+
return None
|
|
236
|
+
|
|
237
|
+
# Recursively search in member project
|
|
238
|
+
return find_issue_path(member_issues, parsed.local_id)
|
|
239
|
+
|
|
240
|
+
# Local Search
|
|
241
|
+
try:
|
|
242
|
+
prefix = parsed.local_id.split("-")[0].upper()
|
|
243
|
+
except IndexError:
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
issue_type = REVERSE_PREFIX_MAP.get(prefix)
|
|
247
|
+
if not issue_type:
|
|
248
|
+
return None
|
|
249
|
+
|
|
250
|
+
base_dir = get_issue_dir(issue_type, issues_root)
|
|
251
|
+
# Search in all status subdirs recursively
|
|
252
|
+
for f in base_dir.rglob(f"{parsed.local_id}-*.md"):
|
|
253
|
+
return f
|
|
254
|
+
return None
|
|
255
|
+
|
|
256
|
+
def update_issue(issues_root: Path, issue_id: str, status: Optional[IssueStatus] = None, stage: Optional[IssueStage] = None, solution: Optional[IssueSolution] = None) -> IssueMetadata:
|
|
257
|
+
path = find_issue_path(issues_root, issue_id)
|
|
258
|
+
if not path:
|
|
259
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
260
|
+
|
|
261
|
+
# Read full content
|
|
262
|
+
content = path.read_text()
|
|
263
|
+
|
|
264
|
+
# Split Frontmatter and Body
|
|
265
|
+
match = re.search(r"^---(.*?)---\n(.*)$\n", content, re.DOTALL | re.MULTILINE)
|
|
266
|
+
if not match:
|
|
267
|
+
# Fallback
|
|
268
|
+
match_simple = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
269
|
+
if match_simple:
|
|
270
|
+
yaml_str = match_simple.group(1)
|
|
271
|
+
body = content[match_simple.end():]
|
|
272
|
+
else:
|
|
273
|
+
raise ValueError(f"Could not parse frontmatter for {issue_id}")
|
|
274
|
+
else:
|
|
275
|
+
yaml_str = match.group(1)
|
|
276
|
+
body = match.group(2)
|
|
277
|
+
|
|
278
|
+
try:
|
|
279
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
280
|
+
except yaml.YAMLError:
|
|
281
|
+
raise ValueError(f"Invalid YAML metadata in {issue_id}")
|
|
282
|
+
|
|
283
|
+
current_status_str = data.get("status", "open") # default to open if missing?
|
|
284
|
+
# Normalize current status to Enum for comparison
|
|
285
|
+
try:
|
|
286
|
+
current_status = IssueStatus(current_status_str.lower())
|
|
287
|
+
except ValueError:
|
|
288
|
+
current_status = IssueStatus.OPEN
|
|
289
|
+
|
|
290
|
+
# Logic: Status Update
|
|
291
|
+
target_status = status if status else current_status
|
|
292
|
+
|
|
293
|
+
# Validation: For closing
|
|
294
|
+
effective_solution = solution.value if solution else data.get("solution")
|
|
295
|
+
|
|
296
|
+
# Policy: Prevent Backlog -> Review
|
|
297
|
+
if stage == IssueStage.REVIEW and current_status == IssueStatus.BACKLOG:
|
|
298
|
+
raise ValueError(f"Lifecycle Policy: Cannot submit Backlog issue directly. Run `monoco issue pull {issue_id}` first.")
|
|
299
|
+
|
|
300
|
+
if target_status == IssueStatus.CLOSED:
|
|
301
|
+
if not effective_solution:
|
|
302
|
+
raise ValueError(f"Closing an issue requires a solution. Please provide --solution or edit the file metadata.")
|
|
303
|
+
|
|
304
|
+
current_data_stage = data.get('stage')
|
|
305
|
+
|
|
306
|
+
# Policy: IMPLEMENTED requires REVIEW stage
|
|
307
|
+
if effective_solution == IssueSolution.IMPLEMENTED.value:
|
|
308
|
+
if current_data_stage != IssueStage.REVIEW.value:
|
|
309
|
+
raise ValueError(f"Lifecycle Policy: 'Implemented' issues must be submitted for review first.\nCurrent stage: {current_data_stage}\nAction: Run `monoco issue submit {issue_id}`.")
|
|
310
|
+
|
|
311
|
+
# Policy: No closing from DOING (General Safety)
|
|
312
|
+
if current_data_stage == IssueStage.DOING.value:
|
|
313
|
+
raise ValueError("Cannot close issue in progress (Doing). Please review (`monoco issue submit`) or stop (`monoco issue open`) first.")
|
|
314
|
+
|
|
315
|
+
# Policy: Dependencies must be closed
|
|
316
|
+
dependencies = data.get('dependencies', [])
|
|
317
|
+
if dependencies:
|
|
318
|
+
for dep_id in dependencies:
|
|
319
|
+
dep_path = find_issue_path(issues_root, dep_id)
|
|
320
|
+
if dep_path:
|
|
321
|
+
dep_meta = parse_issue(dep_path)
|
|
322
|
+
if dep_meta and dep_meta.status != IssueStatus.CLOSED:
|
|
323
|
+
raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status.value}].")
|
|
324
|
+
|
|
325
|
+
# Update Data
|
|
326
|
+
if status:
|
|
327
|
+
data['status'] = status.value
|
|
328
|
+
|
|
329
|
+
if stage:
|
|
330
|
+
data['stage'] = stage.value
|
|
331
|
+
if solution:
|
|
332
|
+
data['solution'] = solution.value
|
|
333
|
+
|
|
334
|
+
# Lifecycle Hooks
|
|
335
|
+
# 1. Opened At: If transitioning to OPEN
|
|
336
|
+
if target_status == IssueStatus.OPEN and current_status != IssueStatus.OPEN:
|
|
337
|
+
# Only set if not already set? Or always reset?
|
|
338
|
+
# Let's set it if not present, or update it to reflect "Latest activation"
|
|
339
|
+
# FEAT-0012 says: "update opened_at to now"
|
|
340
|
+
data['opened_at'] = current_time()
|
|
341
|
+
|
|
342
|
+
# 2. Backlog Push: Handled by IssueMetadata.validate_lifecycle (Status=Backlog -> Stage=None)
|
|
343
|
+
# 3. Closed: Handled by IssueMetadata.validate_lifecycle (Status=Closed -> Stage=Done, ClosedAt=Now)
|
|
344
|
+
|
|
345
|
+
# Touch updated_at
|
|
346
|
+
data['updated_at'] = current_time()
|
|
347
|
+
|
|
348
|
+
# Re-hydrate through Model to trigger Logic (Stage, ClosedAt defaults)
|
|
349
|
+
try:
|
|
350
|
+
updated_meta = IssueMetadata(**data)
|
|
351
|
+
except Exception as e:
|
|
352
|
+
raise ValueError(f"Failed to validate updated metadata: {e}")
|
|
353
|
+
|
|
354
|
+
# Serialize back
|
|
355
|
+
new_yaml = yaml.dump(updated_meta.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
|
|
356
|
+
|
|
357
|
+
# Reconstruct File
|
|
358
|
+
match_header = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
359
|
+
if not match_header:
|
|
360
|
+
body_content = body
|
|
361
|
+
else:
|
|
362
|
+
body_content = content[match_header.end():]
|
|
363
|
+
|
|
364
|
+
if body_content.startswith('\n'):
|
|
365
|
+
body_content = body_content[1:]
|
|
366
|
+
|
|
367
|
+
new_content = f"---\n{new_yaml}---\n{body_content}"
|
|
368
|
+
|
|
369
|
+
path.write_text(new_content)
|
|
370
|
+
|
|
371
|
+
# 3. Handle physical move if status changed
|
|
372
|
+
if status and status != current_status:
|
|
373
|
+
# Move file
|
|
374
|
+
prefix = issue_id.split("-")[0].upper()
|
|
375
|
+
base_type_dir = get_issue_dir(REVERSE_PREFIX_MAP[prefix], issues_root)
|
|
376
|
+
|
|
377
|
+
try:
|
|
378
|
+
rel_path = path.relative_to(base_type_dir)
|
|
379
|
+
structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
|
|
380
|
+
except ValueError:
|
|
381
|
+
structure_path = Path(path.name)
|
|
382
|
+
|
|
383
|
+
target_path = base_type_dir / target_status.value / structure_path
|
|
384
|
+
|
|
385
|
+
if path != target_path:
|
|
386
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
387
|
+
path.rename(target_path)
|
|
388
|
+
|
|
389
|
+
# Hook: Recursive Aggregation (FEAT-0003)
|
|
390
|
+
if updated_meta.parent:
|
|
391
|
+
recalculate_parent(issues_root, updated_meta.parent)
|
|
392
|
+
|
|
393
|
+
return updated_meta
|
|
394
|
+
|
|
395
|
+
def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType, project_root: Path) -> IssueMetadata:
|
|
396
|
+
"""
|
|
397
|
+
Start physical isolation for an issue (Branch or Worktree).
|
|
398
|
+
"""
|
|
399
|
+
path = find_issue_path(issues_root, issue_id)
|
|
400
|
+
if not path:
|
|
401
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
402
|
+
|
|
403
|
+
issue = parse_issue(path)
|
|
404
|
+
|
|
405
|
+
# Idempotency / Conflict Check
|
|
406
|
+
if issue.isolation:
|
|
407
|
+
if issue.isolation.type == mode:
|
|
408
|
+
# Already isolated in same mode, maybe just switch context?
|
|
409
|
+
# For now, we just warn or return.
|
|
410
|
+
# If branch exists, we make sure it's checked out in CLI layer maybe?
|
|
411
|
+
# But here we assume we want to setup metadata.
|
|
412
|
+
pass
|
|
413
|
+
else:
|
|
414
|
+
raise ValueError(f"Issue {issue_id} is already isolated as '{issue.isolation.type}'. Please cleanup first.")
|
|
415
|
+
|
|
416
|
+
slug = _get_slug(issue.title)
|
|
417
|
+
branch_name = f"feat/{issue_id.lower()}-{slug}"
|
|
418
|
+
|
|
419
|
+
isolation_meta = None
|
|
420
|
+
|
|
421
|
+
if mode == IsolationType.BRANCH:
|
|
422
|
+
if not git.branch_exists(project_root, branch_name):
|
|
423
|
+
git.create_branch(project_root, branch_name, checkout=True)
|
|
424
|
+
else:
|
|
425
|
+
# Check if we are already on it?
|
|
426
|
+
# If not, checkout.
|
|
427
|
+
current = git.get_current_branch(project_root)
|
|
428
|
+
if current != branch_name:
|
|
429
|
+
git.checkout_branch(project_root, branch_name)
|
|
430
|
+
|
|
431
|
+
isolation_meta = IssueIsolation(type=IsolationType.BRANCH, ref=branch_name)
|
|
432
|
+
|
|
433
|
+
elif mode == IsolationType.WORKTREE:
|
|
434
|
+
wt_path = project_root / ".monoco" / "worktrees" / f"{issue_id.lower()}-{slug}"
|
|
435
|
+
|
|
436
|
+
# Check if worktree exists physically
|
|
437
|
+
if wt_path.exists():
|
|
438
|
+
# Check if valid git worktree?
|
|
439
|
+
pass
|
|
440
|
+
else:
|
|
441
|
+
wt_path.parent.mkdir(parents=True, exist_ok=True)
|
|
442
|
+
git.worktree_add(project_root, branch_name, wt_path)
|
|
443
|
+
|
|
444
|
+
isolation_meta = IssueIsolation(type=IsolationType.WORKTREE, ref=branch_name, path=str(wt_path))
|
|
445
|
+
|
|
446
|
+
# Persist Metadata
|
|
447
|
+
# We load raw, update isolation field, save.
|
|
448
|
+
content = path.read_text()
|
|
449
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
450
|
+
if match:
|
|
451
|
+
yaml_str = match.group(1)
|
|
452
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
453
|
+
|
|
454
|
+
data['isolation'] = isolation_meta.model_dump(mode='json')
|
|
455
|
+
# Also ensure stage is DOING (logic link)
|
|
456
|
+
data['stage'] = IssueStage.DOING.value
|
|
457
|
+
data['updated_at'] = current_time()
|
|
458
|
+
|
|
459
|
+
new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
|
|
460
|
+
new_content = content.replace(match.group(1), "\n" + new_yaml)
|
|
461
|
+
path.write_text(new_content)
|
|
462
|
+
|
|
463
|
+
return IssueMetadata(**data)
|
|
464
|
+
|
|
465
|
+
return issue
|
|
466
|
+
|
|
467
|
+
def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project_root: Path) -> List[str]:
|
|
468
|
+
"""
|
|
469
|
+
Cleanup physical resources. Returns list of actions taken.
|
|
470
|
+
"""
|
|
471
|
+
path = find_issue_path(issues_root, issue_id)
|
|
472
|
+
if not path:
|
|
473
|
+
# Issue might be deleted?
|
|
474
|
+
# If we can't find issue, we can't read metadata to know what to prune.
|
|
475
|
+
# We rely on CLI to pass context or we fail.
|
|
476
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
477
|
+
|
|
478
|
+
issue = parse_issue(path)
|
|
479
|
+
deleted_items = []
|
|
480
|
+
|
|
481
|
+
if not issue.isolation:
|
|
482
|
+
return []
|
|
483
|
+
|
|
484
|
+
if issue.isolation.type == IsolationType.BRANCH:
|
|
485
|
+
branch = issue.isolation.ref
|
|
486
|
+
current = git.get_current_branch(project_root)
|
|
487
|
+
if current == branch:
|
|
488
|
+
raise RuntimeError(f"Cannot delete active branch '{branch}'. Please checkout 'main' first.")
|
|
489
|
+
|
|
490
|
+
if git.branch_exists(project_root, branch):
|
|
491
|
+
git.delete_branch(project_root, branch, force=force)
|
|
492
|
+
deleted_items.append(f"branch:{branch}")
|
|
493
|
+
|
|
494
|
+
elif issue.isolation.type == IsolationType.WORKTREE:
|
|
495
|
+
wt_path_str = issue.isolation.path
|
|
496
|
+
if wt_path_str:
|
|
497
|
+
wt_path = Path(wt_path_str)
|
|
498
|
+
# Normalize path if relative
|
|
499
|
+
if not wt_path.is_absolute():
|
|
500
|
+
wt_path = project_root / wt_path
|
|
501
|
+
|
|
502
|
+
if wt_path.exists():
|
|
503
|
+
git.worktree_remove(project_root, wt_path, force=force)
|
|
504
|
+
deleted_items.append(f"worktree:{wt_path.name}")
|
|
505
|
+
|
|
506
|
+
# Also delete the branch associated?
|
|
507
|
+
# Worktree create makes a branch. When removing worktree, branch remains.
|
|
508
|
+
# Usually we want to remove the branch too if it was created for this issue.
|
|
509
|
+
branch = issue.isolation.ref
|
|
510
|
+
if branch and git.branch_exists(project_root, branch):
|
|
511
|
+
# We can't delete branch if it is checked out in the worktree we just removed?
|
|
512
|
+
# git worktree remove unlocks the branch.
|
|
513
|
+
git.delete_branch(project_root, branch, force=force)
|
|
514
|
+
deleted_items.append(f"branch:{branch}")
|
|
515
|
+
|
|
516
|
+
# Clear Metadata
|
|
517
|
+
content = path.read_text()
|
|
518
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
519
|
+
if match:
|
|
520
|
+
yaml_str = match.group(1)
|
|
521
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
522
|
+
|
|
523
|
+
if 'isolation' in data:
|
|
524
|
+
del data['isolation']
|
|
525
|
+
data['updated_at'] = current_time()
|
|
526
|
+
|
|
527
|
+
new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
|
|
528
|
+
new_content = content.replace(match.group(1), "\n" + new_yaml)
|
|
529
|
+
path.write_text(new_content)
|
|
530
|
+
|
|
531
|
+
return deleted_items
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
def delete_issue_file(issues_root: Path, issue_id: str):
|
|
536
|
+
"""
|
|
537
|
+
Physical removal of an issue file.
|
|
538
|
+
"""
|
|
539
|
+
path = find_issue_path(issues_root, issue_id)
|
|
540
|
+
if not path:
|
|
541
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
542
|
+
|
|
543
|
+
path.unlink()
|
|
544
|
+
|
|
545
|
+
# Resources
|
|
546
|
+
SKILL_CONTENT = """
|
|
547
|
+
---
|
|
548
|
+
name: issues-management
|
|
549
|
+
description: Monoco Issue System 的官方技能定义。将 Issue 视为通用原子 (Universal Atom),管理 Epic/Feature/Chore/Fix 的生命周期。
|
|
550
|
+
---
|
|
551
|
+
|
|
552
|
+
# 自我管理 (Monoco Issue System)
|
|
553
|
+
|
|
554
|
+
使用此技能在 Monoco 项目中创建和管理 **Issue** (通用原子)。
|
|
555
|
+
|
|
556
|
+
## 核心本体论 (Core Ontology)
|
|
557
|
+
|
|
558
|
+
### 1. 战略层 (Strategy)
|
|
559
|
+
- **🏆 EPIC (史诗)**: 宏大目标,愿景的容器。Mindset: Architect。
|
|
560
|
+
|
|
561
|
+
### 2. 价值层 (Value)
|
|
562
|
+
- **✨ FEATURE (特性)**: 用户视角的价值增量。Mindset: Product Owner。
|
|
563
|
+
- **原子性原则**: Feature = Design + Dev + Test + Doc + i18n。它们是一体的。
|
|
564
|
+
|
|
565
|
+
### 3. 执行层 (Execution)
|
|
566
|
+
- **🧹 CHORE (杂务)**: 工程性维护,不产生直接用户价值。Mindset: Builder。
|
|
567
|
+
- **🐞 FIX (修复)**: 修正偏差。Mindset: Debugger。
|
|
568
|
+
|
|
569
|
+
## 准则 (Guidelines)
|
|
570
|
+
|
|
571
|
+
### 目录结构 (Strict Enforced)
|
|
572
|
+
`Issues/{Type}/{status}/`
|
|
573
|
+
|
|
574
|
+
- **Type Level (Capitalized Plural)**: `Epics`, `Features`, `Chores`, `Fixes`
|
|
575
|
+
- **Status Level (Lowercase)**: `open`, `backlog`, `closed`
|
|
576
|
+
|
|
577
|
+
### 路径流转
|
|
578
|
+
使用 `monoco issue`:
|
|
579
|
+
1. **Create**: `monoco issue create <type> --title "..."`
|
|
580
|
+
2. **Transition**: `monoco issue open/close/backlog <id>`
|
|
581
|
+
3. **View**: `monoco issue scope`
|
|
582
|
+
4. **Validation**: `monoco issue lint`
|
|
583
|
+
5. **Modification**: `monoco issue start/submit/delete <id>`
|
|
584
|
+
"""
|
|
585
|
+
|
|
586
|
+
PROMPT_CONTENT = """
|
|
587
|
+
### Issue Management
|
|
588
|
+
System for managing tasks using `monoco issue`.
|
|
589
|
+
- **Create**: `monoco issue create <type> -t "Title"` (types: epic, feature, chore, fix)
|
|
590
|
+
- **Status**: `monoco issue open|close|backlog <id>`
|
|
591
|
+
- **Check**: `monoco issue lint` (Must run after manual edits)
|
|
592
|
+
- **Lifecycle**: `monoco issue start|submit|delete <id>`
|
|
593
|
+
- **Structure**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` (e.g. `Issues/Features/open/`). Do not deviate.
|
|
594
|
+
"""
|
|
595
|
+
|
|
596
|
+
def init(issues_root: Path):
|
|
597
|
+
"""Initialize the Issues directory structure."""
|
|
598
|
+
issues_root.mkdir(parents=True, exist_ok=True)
|
|
599
|
+
|
|
600
|
+
# Standard Directories based on new Terminology
|
|
601
|
+
for subdir in ["Epics", "Features", "Chores", "Fixes"]:
|
|
602
|
+
(issues_root / subdir).mkdir(exist_ok=True)
|
|
603
|
+
# Create status subdirs? Usually handled by open/backlog,
|
|
604
|
+
# but creating them initially is good for guidance.
|
|
605
|
+
for status in ["open", "backlog", "closed"]:
|
|
606
|
+
(issues_root / subdir / status).mkdir(exist_ok=True)
|
|
607
|
+
|
|
608
|
+
# Create gitkeep to ensure they are tracked? Optional.
|
|
609
|
+
|
|
610
|
+
def get_resources() -> Dict[str, Any]:
|
|
611
|
+
return {
|
|
612
|
+
"skills": {
|
|
613
|
+
"issues-management": SKILL_CONTENT
|
|
614
|
+
},
|
|
615
|
+
"prompts": {
|
|
616
|
+
"issues-management": PROMPT_CONTENT
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
|
|
621
|
+
def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[IssueMetadata]:
|
|
622
|
+
"""
|
|
623
|
+
List all issues in the project.
|
|
624
|
+
"""
|
|
625
|
+
issues = []
|
|
626
|
+
for issue_type in IssueType:
|
|
627
|
+
base_dir = get_issue_dir(issue_type, issues_root)
|
|
628
|
+
for status_dir in ["open", "backlog", "closed"]:
|
|
629
|
+
d = base_dir / status_dir
|
|
630
|
+
if d.exists():
|
|
631
|
+
for f in d.rglob("*.md"):
|
|
632
|
+
meta = parse_issue(f)
|
|
633
|
+
if meta:
|
|
634
|
+
issues.append(meta)
|
|
635
|
+
|
|
636
|
+
if recursive_workspace:
|
|
637
|
+
# Resolve Workspace Members
|
|
638
|
+
try:
|
|
639
|
+
# weak assumption: issues_root.parent is project_root
|
|
640
|
+
project_root = issues_root.parent
|
|
641
|
+
conf = get_config(str(project_root))
|
|
642
|
+
|
|
643
|
+
for name, rel_path in conf.project.members.items():
|
|
644
|
+
member_root = (project_root / rel_path).resolve()
|
|
645
|
+
member_issues_dir = member_root / "Issues" # Standard convention
|
|
646
|
+
|
|
647
|
+
if member_issues_dir.exists():
|
|
648
|
+
# Fetch member issues (non-recursive to avoid loops)
|
|
649
|
+
member_issues = list_issues(member_issues_dir, False)
|
|
650
|
+
for m in member_issues:
|
|
651
|
+
# Namespace the ID to avoid collisions and indicate origin
|
|
652
|
+
m.id = f"{name}::{m.id}"
|
|
653
|
+
issues.append(m)
|
|
654
|
+
except Exception:
|
|
655
|
+
# Fail silently on workspace resolution errors (config missing etc)
|
|
656
|
+
pass
|
|
657
|
+
|
|
658
|
+
return issues
|
|
659
|
+
|
|
660
|
+
def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
|
|
661
|
+
"""
|
|
662
|
+
Get open issues grouped by their stage for Kanban view.
|
|
663
|
+
"""
|
|
664
|
+
board = {
|
|
665
|
+
IssueStage.TODO.value: [],
|
|
666
|
+
IssueStage.DOING.value: [],
|
|
667
|
+
IssueStage.REVIEW.value: [],
|
|
668
|
+
IssueStage.DONE.value: []
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
issues = list_issues(issues_root)
|
|
672
|
+
for issue in issues:
|
|
673
|
+
if issue.status == IssueStatus.OPEN and issue.stage:
|
|
674
|
+
stage_val = issue.stage.value
|
|
675
|
+
if stage_val in board:
|
|
676
|
+
board[stage_val].append(issue)
|
|
677
|
+
elif issue.status == IssueStatus.CLOSED:
|
|
678
|
+
# Optionally show recently closed items in DONE column
|
|
679
|
+
board[IssueStage.DONE.value].append(issue)
|
|
680
|
+
|
|
681
|
+
return board
|
|
682
|
+
|
|
683
|
+
def validate_issue_integrity(meta: IssueMetadata, all_issue_ids: Set[str] = set()) -> List[str]:
|
|
684
|
+
"""
|
|
685
|
+
Validate metadata integrity (Solution, Lifecycle, etc.)
|
|
686
|
+
UI-agnostic.
|
|
687
|
+
"""
|
|
688
|
+
errors = []
|
|
689
|
+
if meta.status == IssueStatus.CLOSED and not meta.solution:
|
|
690
|
+
errors.append(f"Solution Missing: {meta.id} is closed but has no solution field.")
|
|
691
|
+
|
|
692
|
+
if meta.parent:
|
|
693
|
+
if all_issue_ids and meta.parent not in all_issue_ids:
|
|
694
|
+
errors.append(f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}.")
|
|
695
|
+
|
|
696
|
+
if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
|
|
697
|
+
errors.append(f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage}).")
|
|
698
|
+
|
|
699
|
+
return errors
|
|
700
|
+
|
|
701
|
+
def update_issue_content(issues_root: Path, issue_id: str, new_content: str) -> IssueMetadata:
|
|
702
|
+
"""
|
|
703
|
+
Update the raw content of an issue file.
|
|
704
|
+
Validates integrity before saving.
|
|
705
|
+
Handles file moves if status changes.
|
|
706
|
+
"""
|
|
707
|
+
path = find_issue_path(issues_root, issue_id)
|
|
708
|
+
if not path:
|
|
709
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
710
|
+
|
|
711
|
+
# 1. Parse New Content (using temp file to reuse parse_issue logic)
|
|
712
|
+
import tempfile
|
|
713
|
+
import os
|
|
714
|
+
|
|
715
|
+
with tempfile.NamedTemporaryFile(mode='w+', suffix='.md', delete=False) as tmp:
|
|
716
|
+
tmp.write(new_content)
|
|
717
|
+
tmp_path = Path(tmp.name)
|
|
718
|
+
|
|
719
|
+
try:
|
|
720
|
+
meta = parse_issue(tmp_path)
|
|
721
|
+
if not meta:
|
|
722
|
+
raise ValueError("Invalid Issue Content: Frontmatter missing or invalid.")
|
|
723
|
+
|
|
724
|
+
if meta.id != issue_id:
|
|
725
|
+
raise ValueError(f"Cannot change Issue ID (Original: {issue_id}, New: {meta.id})")
|
|
726
|
+
|
|
727
|
+
# 2. Integrity Check
|
|
728
|
+
errors = validate_issue_integrity(meta)
|
|
729
|
+
if errors:
|
|
730
|
+
raise ValueError(f"Validation Failed: {'; '.join(errors)}")
|
|
731
|
+
|
|
732
|
+
# 3. Write and Move
|
|
733
|
+
# We overwrite the *current* path first
|
|
734
|
+
path.write_text(new_content)
|
|
735
|
+
|
|
736
|
+
# Check if we need to move (Status Change)
|
|
737
|
+
# We need to re-derive the expected path based on new status
|
|
738
|
+
# Reuse logic from update_issue (simplified)
|
|
739
|
+
|
|
740
|
+
prefix = issue_id.split("-")[0].upper()
|
|
741
|
+
base_type_dir = get_issue_dir(REVERSE_PREFIX_MAP[prefix], issues_root)
|
|
742
|
+
|
|
743
|
+
# Calculate structure path (preserve subdir)
|
|
744
|
+
try:
|
|
745
|
+
rel_path = path.relative_to(base_type_dir)
|
|
746
|
+
# Remove the first component (current status directory) which might be 'open', 'closed' etc.
|
|
747
|
+
# But wait, find_issue_path found it. 'rel_path' includes status dir.
|
|
748
|
+
# e.g. open/Backend/Auth/FEAT-123.md -> parts=('open', 'Backend', 'Auth', 'FEAT-123.md')
|
|
749
|
+
structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
|
|
750
|
+
except ValueError:
|
|
751
|
+
# Fallback if path is weird
|
|
752
|
+
structure_path = Path(path.name)
|
|
753
|
+
|
|
754
|
+
target_path = base_type_dir / meta.status.value / structure_path
|
|
755
|
+
|
|
756
|
+
if path != target_path:
|
|
757
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
758
|
+
path.rename(target_path)
|
|
759
|
+
|
|
760
|
+
return meta
|
|
761
|
+
|
|
762
|
+
finally:
|
|
763
|
+
if os.path.exists(tmp_path):
|
|
764
|
+
os.unlink(tmp_path)
|
|
765
|
+
|
|
766
|
+
def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Path) -> IssueMetadata:
|
|
767
|
+
"""
|
|
768
|
+
Scan git history for commits related to this issue (Ref: ID),
|
|
769
|
+
aggregate touched files, and append/update '## Delivery' section in the issue body.
|
|
770
|
+
"""
|
|
771
|
+
from monoco.core import git
|
|
772
|
+
|
|
773
|
+
path = find_issue_path(issues_root, issue_id)
|
|
774
|
+
if not path:
|
|
775
|
+
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
776
|
+
|
|
777
|
+
# 1. Scan Git
|
|
778
|
+
commits = git.search_commits_by_message(project_root, f"Ref: {issue_id}")
|
|
779
|
+
|
|
780
|
+
if not commits:
|
|
781
|
+
return parse_issue(path)
|
|
782
|
+
|
|
783
|
+
# 2. Aggregate Data
|
|
784
|
+
all_files = set()
|
|
785
|
+
commit_list_md = []
|
|
786
|
+
|
|
787
|
+
for c in commits:
|
|
788
|
+
short_hash = c['hash'][:7]
|
|
789
|
+
commit_list_md.append(f"- `{short_hash}` {c['subject']}")
|
|
790
|
+
for f in c['files']:
|
|
791
|
+
all_files.add(f)
|
|
792
|
+
|
|
793
|
+
sorted_files = sorted(list(all_files))
|
|
794
|
+
|
|
795
|
+
# 3. Format Report
|
|
796
|
+
delivery_section = f"""
|
|
797
|
+
## Delivery
|
|
798
|
+
<!-- Monoco Auto Generated -->
|
|
799
|
+
**Commits ({len(commits)})**:
|
|
800
|
+
{chr(10).join(commit_list_md)}
|
|
801
|
+
|
|
802
|
+
**Touched Files ({len(sorted_files)})**:
|
|
803
|
+
""" + "\n".join([f"- `{f}`" for f in sorted_files])
|
|
804
|
+
|
|
805
|
+
# 4. Update File Content
|
|
806
|
+
content = path.read_text()
|
|
807
|
+
|
|
808
|
+
# Check if Delivery section exists
|
|
809
|
+
if "## Delivery" in content:
|
|
810
|
+
# Replace existing section
|
|
811
|
+
# We assume Delivery is the last section or we replace until end or next H2?
|
|
812
|
+
# For simplicity, if ## Delivery exists, we regex replace it and everything after it
|
|
813
|
+
# OR we just replace the section block if we can identify it.
|
|
814
|
+
# Let's assume it's at the end or we replace the specific block `## Delivery...`
|
|
815
|
+
# But regex matching across newlines is tricky if we don't know where it ends.
|
|
816
|
+
# Safe bet: If "## Delivery" exists, find it and replace everything after it?
|
|
817
|
+
# Or look for "<!-- Monoco Auto Generated -->"
|
|
818
|
+
|
|
819
|
+
pattern = r"## Delivery.*"
|
|
820
|
+
# If we use DOTALL, it replaces everything until end of string?
|
|
821
|
+
# Yes, usually Delivery report is appended at the end.
|
|
822
|
+
content = re.sub(pattern, delivery_section.strip(), content, flags=re.DOTALL)
|
|
823
|
+
else:
|
|
824
|
+
# Append
|
|
825
|
+
if not content.endswith("\n"):
|
|
826
|
+
content += "\n"
|
|
827
|
+
content += "\n" + delivery_section.strip() + "\n"
|
|
828
|
+
|
|
829
|
+
path.write_text(content)
|
|
830
|
+
|
|
831
|
+
# 5. Update Metadata (delivery stats)
|
|
832
|
+
# We might want to store 'files_count' in metadata for the recursive aggregation (FEAT-0003)
|
|
833
|
+
# But IssueMetadata doesn't have a 'delivery' dict field yet.
|
|
834
|
+
# We can add it to 'extra' or extend the model later.
|
|
835
|
+
# For now, just persisting the text is enough for FEAT-0002.
|
|
836
|
+
|
|
837
|
+
return parse_issue(path)
|
|
838
|
+
|
|
839
|
+
def get_children(issues_root: Path, parent_id: str) -> List[IssueMetadata]:
|
|
840
|
+
"""Find all direct children of an issue."""
|
|
841
|
+
all_issues = list_issues(issues_root)
|
|
842
|
+
return [i for i in all_issues if i.parent == parent_id]
|
|
843
|
+
|
|
844
|
+
def count_files_in_delivery(issue_path: Path) -> int:
|
|
845
|
+
"""Parse the ## Delivery section to count files."""
|
|
846
|
+
try:
|
|
847
|
+
content = issue_path.read_text()
|
|
848
|
+
match = re.search(r"\*\*Touched Files \((\d+)\)\*\*", content)
|
|
849
|
+
if match:
|
|
850
|
+
return int(match.group(1))
|
|
851
|
+
except Exception:
|
|
852
|
+
pass
|
|
853
|
+
return 0
|
|
854
|
+
|
|
855
|
+
def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
|
|
856
|
+
"""
|
|
857
|
+
Parse a search query string into explicit positives, optional terms, and negatives.
|
|
858
|
+
Supported syntax:
|
|
859
|
+
- `+term`: Must include (AND)
|
|
860
|
+
- `-term`: Must not include (NOT)
|
|
861
|
+
- `term`: Optional (Nice to have) - OR logic if no +term exists
|
|
862
|
+
- `"phrase with space"`: Quoted match
|
|
863
|
+
"""
|
|
864
|
+
if not query:
|
|
865
|
+
return [], [], []
|
|
866
|
+
|
|
867
|
+
import shlex
|
|
868
|
+
try:
|
|
869
|
+
tokens = shlex.split(query)
|
|
870
|
+
except ValueError:
|
|
871
|
+
# Fallback for unbalanced quotes
|
|
872
|
+
tokens = query.split()
|
|
873
|
+
|
|
874
|
+
explicit_positives = []
|
|
875
|
+
terms = []
|
|
876
|
+
negatives = []
|
|
877
|
+
|
|
878
|
+
for token in tokens:
|
|
879
|
+
token_lower = token.lower()
|
|
880
|
+
if token_lower.startswith("-") and len(token_lower) > 1:
|
|
881
|
+
negatives.append(token_lower[1:])
|
|
882
|
+
elif token_lower.startswith("+") and len(token_lower) > 1:
|
|
883
|
+
explicit_positives.append(token_lower[1:])
|
|
884
|
+
else:
|
|
885
|
+
terms.append(token_lower)
|
|
886
|
+
|
|
887
|
+
return explicit_positives, terms, negatives
|
|
888
|
+
|
|
889
|
+
def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms: List[str], negatives: List[str], full_content: str = "") -> bool:
|
|
890
|
+
"""
|
|
891
|
+
Check if an issue matches the search criteria.
|
|
892
|
+
Consider fields: id, title, status, stage, type, tags, dependencies, related.
|
|
893
|
+
Optional: full_content (body) if available.
|
|
894
|
+
"""
|
|
895
|
+
# 1. Aggregate Searchable Text
|
|
896
|
+
# We join all fields with spaces to create a searchable blob
|
|
897
|
+
searchable_parts = [
|
|
898
|
+
issue.id,
|
|
899
|
+
issue.title,
|
|
900
|
+
issue.status.value,
|
|
901
|
+
issue.type.value,
|
|
902
|
+
str(issue.stage.value) if issue.stage else "",
|
|
903
|
+
*(issue.tags or []),
|
|
904
|
+
*(issue.dependencies or []),
|
|
905
|
+
*(issue.related or []),
|
|
906
|
+
full_content
|
|
907
|
+
]
|
|
908
|
+
|
|
909
|
+
# Normalize blob
|
|
910
|
+
blob = " ".join(filter(None, searchable_parts)).lower()
|
|
911
|
+
|
|
912
|
+
# 2. Check Negatives (Fast Fail)
|
|
913
|
+
for term in negatives:
|
|
914
|
+
if term in blob:
|
|
915
|
+
return False
|
|
916
|
+
|
|
917
|
+
# 3. Check Explicit Positives (Must match ALL)
|
|
918
|
+
for term in explicit_positives:
|
|
919
|
+
if term not in blob:
|
|
920
|
+
return False
|
|
921
|
+
|
|
922
|
+
# 4. Check Terms (Nice to Have)
|
|
923
|
+
# If explicit_positives exist, terms are optional (implicit inclusion).
|
|
924
|
+
# If NO explicit_positives, terms act as Implicit OR (must match at least one).
|
|
925
|
+
if terms:
|
|
926
|
+
if not explicit_positives:
|
|
927
|
+
# Must match at least one term
|
|
928
|
+
if not any(term in blob for term in terms):
|
|
929
|
+
return False
|
|
930
|
+
|
|
931
|
+
return True
|
|
932
|
+
|
|
933
|
+
def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
|
|
934
|
+
"""
|
|
935
|
+
Search issues using advanced query syntax.
|
|
936
|
+
Returns list of matching IssueMetadata.
|
|
937
|
+
"""
|
|
938
|
+
explicit_positives, terms, negatives = parse_search_query(query)
|
|
939
|
+
|
|
940
|
+
# Optimization: If no query, return empty? Or all?
|
|
941
|
+
# Usually search implies input. CLI `list` is for all.
|
|
942
|
+
# But if query is empty string, we return all?
|
|
943
|
+
# Let's align with "grep": empty pattern matches everything?
|
|
944
|
+
# Or strict: empty query -> all.
|
|
945
|
+
if not explicit_positives and not terms and not negatives:
|
|
946
|
+
return list_issues(issues_root)
|
|
947
|
+
|
|
948
|
+
matches = []
|
|
949
|
+
all_files = []
|
|
950
|
+
|
|
951
|
+
# 1. Gather all files first (we need to read content for deep search)
|
|
952
|
+
# Using list_issues is inefficient if we need body content, as list_issues only parses frontmatter (usually).
|
|
953
|
+
# But parse_issue uses `IssueMetadata` which ignores body.
|
|
954
|
+
# We need a robust way. `parse_issue` reads full text but discards body in current implementation?
|
|
955
|
+
# Wait, `parse_issue` in core.py *only* reads frontmatter via `yaml.safe_load(match.group(1))`.
|
|
956
|
+
# It does NOT return body.
|
|
957
|
+
|
|
958
|
+
# To support deep search (Body), we need to read files.
|
|
959
|
+
# Let's iterate files directly.
|
|
960
|
+
|
|
961
|
+
for issue_type in IssueType:
|
|
962
|
+
base_dir = get_issue_dir(issue_type, issues_root)
|
|
963
|
+
for status_dir in ["open", "backlog", "closed"]:
|
|
964
|
+
d = base_dir / status_dir
|
|
965
|
+
if d.exists():
|
|
966
|
+
for f in d.rglob("*.md"):
|
|
967
|
+
all_files.append(f)
|
|
968
|
+
|
|
969
|
+
for f in all_files:
|
|
970
|
+
# We need full content for body search
|
|
971
|
+
try:
|
|
972
|
+
content = f.read_text()
|
|
973
|
+
# Parse Metadata
|
|
974
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
975
|
+
if not match:
|
|
976
|
+
continue
|
|
977
|
+
|
|
978
|
+
yaml_str = match.group(1)
|
|
979
|
+
data = yaml.safe_load(yaml_str)
|
|
980
|
+
if not isinstance(data, dict):
|
|
981
|
+
continue
|
|
982
|
+
|
|
983
|
+
meta = IssueMetadata(**data)
|
|
984
|
+
|
|
985
|
+
# Match
|
|
986
|
+
if check_issue_match(meta, explicit_positives, terms, negatives, full_content=content):
|
|
987
|
+
matches.append(meta)
|
|
988
|
+
|
|
989
|
+
except Exception:
|
|
990
|
+
continue
|
|
991
|
+
|
|
992
|
+
return matches
|
|
993
|
+
|
|
994
|
+
def recalculate_parent(issues_root: Path, parent_id: str):
|
|
995
|
+
"""
|
|
996
|
+
Update parent Epic/Feature stats based on children.
|
|
997
|
+
- Progress (Closed/Total)
|
|
998
|
+
- Total Files Touched (Sum of children's delivery)
|
|
999
|
+
"""
|
|
1000
|
+
parent_path = find_issue_path(issues_root, parent_id)
|
|
1001
|
+
if not parent_path:
|
|
1002
|
+
return # Should we warn?
|
|
1003
|
+
|
|
1004
|
+
children = get_children(issues_root, parent_id)
|
|
1005
|
+
if not children:
|
|
1006
|
+
return
|
|
1007
|
+
|
|
1008
|
+
total = len(children)
|
|
1009
|
+
closed = len([c for c in children if c.status == IssueStatus.CLOSED])
|
|
1010
|
+
# Progress string: "3/5"
|
|
1011
|
+
progress_str = f"{closed}/{total}"
|
|
1012
|
+
|
|
1013
|
+
# Files count
|
|
1014
|
+
total_files = 0
|
|
1015
|
+
for child in children:
|
|
1016
|
+
child_path = find_issue_path(issues_root, child.id)
|
|
1017
|
+
if child_path:
|
|
1018
|
+
total_files += count_files_in_delivery(child_path)
|
|
1019
|
+
|
|
1020
|
+
# Update Parent # We need to reuse update logic but without validation/status change
|
|
1021
|
+
# Just generic metadata update.
|
|
1022
|
+
# update_issue is too heavy/strict.
|
|
1023
|
+
# Let's implement a lighter `patch_metadata` helper or reuse logic.
|
|
1024
|
+
|
|
1025
|
+
content = parent_path.read_text()
|
|
1026
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
1027
|
+
if match:
|
|
1028
|
+
yaml_str = match.group(1)
|
|
1029
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
1030
|
+
|
|
1031
|
+
# Check if changed to avoid churn
|
|
1032
|
+
old_progress = data.get("progress")
|
|
1033
|
+
old_files = data.get("files_count")
|
|
1034
|
+
|
|
1035
|
+
if old_progress == progress_str and old_files == total_files:
|
|
1036
|
+
return
|
|
1037
|
+
|
|
1038
|
+
data["progress"] = progress_str
|
|
1039
|
+
data["files_count"] = total_files
|
|
1040
|
+
|
|
1041
|
+
# Also maybe update status?
|
|
1042
|
+
# FEAT-0003 Req: "If first child starts doing, auto-start Parent?"
|
|
1043
|
+
# If parent is OPEN/TODO and child is DOING/REVIEW/DONE, set parent to DOING?
|
|
1044
|
+
current_status = data.get("status", "open").lower()
|
|
1045
|
+
current_stage = data.get("stage", "todo").lower()
|
|
1046
|
+
|
|
1047
|
+
if current_status == "open" and current_stage == "todo":
|
|
1048
|
+
# Check if any child is active
|
|
1049
|
+
active_children = [c for c in children if c.status == IssueStatus.OPEN and c.stage != IssueStage.TODO]
|
|
1050
|
+
closed_children = [c for c in children if c.status == IssueStatus.CLOSED]
|
|
1051
|
+
|
|
1052
|
+
if active_children or closed_children:
|
|
1053
|
+
data["stage"] = "doing"
|
|
1054
|
+
|
|
1055
|
+
# Serialize
|
|
1056
|
+
new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
|
|
1057
|
+
# Replace header
|
|
1058
|
+
new_content = content.replace(match.group(1), "\n" + new_yaml)
|
|
1059
|
+
parent_path.write_text(new_content)
|
|
1060
|
+
|
|
1061
|
+
# Recurse up?
|
|
1062
|
+
parent_parent = data.get("parent")
|
|
1063
|
+
if parent_parent:
|
|
1064
|
+
recalculate_parent(issues_root, parent_parent)
|
|
1065
|
+
|
|
1066
|
+
def move_issue(
|
|
1067
|
+
source_issues_root: Path,
|
|
1068
|
+
issue_id: str,
|
|
1069
|
+
target_issues_root: Path,
|
|
1070
|
+
renumber: bool = False
|
|
1071
|
+
) -> Tuple[IssueMetadata, Path]:
|
|
1072
|
+
"""
|
|
1073
|
+
Move an issue from one project to another.
|
|
1074
|
+
|
|
1075
|
+
Args:
|
|
1076
|
+
source_issues_root: Source project's Issues directory
|
|
1077
|
+
issue_id: ID of the issue to move
|
|
1078
|
+
target_issues_root: Target project's Issues directory
|
|
1079
|
+
renumber: If True, automatically renumber on ID conflict
|
|
1080
|
+
|
|
1081
|
+
Returns:
|
|
1082
|
+
Tuple of (updated metadata, new file path)
|
|
1083
|
+
|
|
1084
|
+
Raises:
|
|
1085
|
+
FileNotFoundError: If source issue doesn't exist
|
|
1086
|
+
ValueError: If ID conflict exists and renumber=False
|
|
1087
|
+
"""
|
|
1088
|
+
# 1. Find source issue
|
|
1089
|
+
source_path = find_issue_path(source_issues_root, issue_id)
|
|
1090
|
+
if not source_path:
|
|
1091
|
+
raise FileNotFoundError(f"Issue {issue_id} not found in source project.")
|
|
1092
|
+
|
|
1093
|
+
# 2. Parse issue metadata
|
|
1094
|
+
issue = parse_issue_detail(source_path)
|
|
1095
|
+
if not issue:
|
|
1096
|
+
raise ValueError(f"Failed to parse issue {issue_id}.")
|
|
1097
|
+
|
|
1098
|
+
# 3. Check for ID conflict in target
|
|
1099
|
+
target_conflict_path = find_issue_path(target_issues_root, issue_id)
|
|
1100
|
+
|
|
1101
|
+
if target_conflict_path:
|
|
1102
|
+
# Conflict detected
|
|
1103
|
+
conflict_issue = parse_issue(target_conflict_path)
|
|
1104
|
+
|
|
1105
|
+
# Check if it's the same issue (same UID)
|
|
1106
|
+
if issue.uid and conflict_issue and conflict_issue.uid == issue.uid:
|
|
1107
|
+
raise ValueError(
|
|
1108
|
+
f"Issue {issue_id} (uid: {issue.uid}) already exists in target project. "
|
|
1109
|
+
"This appears to be a duplicate."
|
|
1110
|
+
)
|
|
1111
|
+
|
|
1112
|
+
# Different issues with same ID
|
|
1113
|
+
if not renumber:
|
|
1114
|
+
conflict_info = ""
|
|
1115
|
+
if conflict_issue:
|
|
1116
|
+
conflict_info = f" (uid: {conflict_issue.uid}, created: {conflict_issue.created_at}, stage: {conflict_issue.stage})"
|
|
1117
|
+
raise ValueError(
|
|
1118
|
+
f"ID conflict: Target project already has {issue_id}{conflict_info}.\n"
|
|
1119
|
+
f"Use --renumber to automatically assign a new ID."
|
|
1120
|
+
)
|
|
1121
|
+
|
|
1122
|
+
# Auto-renumber
|
|
1123
|
+
new_id = find_next_id(issue.type, target_issues_root)
|
|
1124
|
+
old_id = issue.id
|
|
1125
|
+
issue.id = new_id
|
|
1126
|
+
else:
|
|
1127
|
+
new_id = issue.id
|
|
1128
|
+
old_id = issue.id
|
|
1129
|
+
|
|
1130
|
+
# 4. Construct target path
|
|
1131
|
+
target_type_dir = get_issue_dir(issue.type, target_issues_root)
|
|
1132
|
+
target_status_dir = target_type_dir / issue.status.value
|
|
1133
|
+
|
|
1134
|
+
# Preserve subdirectory structure if any
|
|
1135
|
+
try:
|
|
1136
|
+
source_type_dir = get_issue_dir(issue.type, source_issues_root)
|
|
1137
|
+
rel_path = source_path.relative_to(source_type_dir)
|
|
1138
|
+
# Remove status directory component
|
|
1139
|
+
structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(source_path.name)
|
|
1140
|
+
except ValueError:
|
|
1141
|
+
structure_path = Path(source_path.name)
|
|
1142
|
+
|
|
1143
|
+
# Update filename if ID changed
|
|
1144
|
+
if new_id != old_id:
|
|
1145
|
+
old_filename = source_path.name
|
|
1146
|
+
new_filename = old_filename.replace(old_id, new_id, 1)
|
|
1147
|
+
structure_path = structure_path.parent / new_filename if structure_path.parent != Path('.') else Path(new_filename)
|
|
1148
|
+
|
|
1149
|
+
target_path = target_status_dir / structure_path
|
|
1150
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1151
|
+
|
|
1152
|
+
# 5. Update content if ID changed
|
|
1153
|
+
if new_id != old_id:
|
|
1154
|
+
# Update frontmatter
|
|
1155
|
+
content = issue.raw_content
|
|
1156
|
+
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
1157
|
+
if match:
|
|
1158
|
+
yaml_str = match.group(1)
|
|
1159
|
+
data = yaml.safe_load(yaml_str) or {}
|
|
1160
|
+
data['id'] = new_id
|
|
1161
|
+
data['updated_at'] = current_time()
|
|
1162
|
+
|
|
1163
|
+
new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
|
|
1164
|
+
|
|
1165
|
+
# Update body (replace old ID in heading)
|
|
1166
|
+
body = content[match.end():]
|
|
1167
|
+
body = body.replace(f"## {old_id}:", f"## {new_id}:", 1)
|
|
1168
|
+
|
|
1169
|
+
new_content = f"---\n{new_yaml}---{body}"
|
|
1170
|
+
else:
|
|
1171
|
+
new_content = issue.raw_content
|
|
1172
|
+
else:
|
|
1173
|
+
new_content = issue.raw_content
|
|
1174
|
+
|
|
1175
|
+
# 6. Write to target
|
|
1176
|
+
target_path.write_text(new_content)
|
|
1177
|
+
|
|
1178
|
+
# 7. Remove source
|
|
1179
|
+
source_path.unlink()
|
|
1180
|
+
|
|
1181
|
+
# 8. Return updated metadata
|
|
1182
|
+
final_meta = parse_issue(target_path)
|
|
1183
|
+
return final_meta, target_path
|