monoco-toolkit 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. monoco/cli/project.py +35 -31
  2. monoco/cli/workspace.py +26 -16
  3. monoco/core/agent/__init__.py +0 -2
  4. monoco/core/agent/action.py +44 -20
  5. monoco/core/agent/adapters.py +20 -16
  6. monoco/core/agent/protocol.py +5 -4
  7. monoco/core/agent/state.py +21 -21
  8. monoco/core/config.py +90 -33
  9. monoco/core/execution.py +21 -16
  10. monoco/core/feature.py +8 -5
  11. monoco/core/git.py +61 -30
  12. monoco/core/hooks.py +57 -0
  13. monoco/core/injection.py +47 -44
  14. monoco/core/integrations.py +50 -35
  15. monoco/core/lsp.py +12 -1
  16. monoco/core/output.py +35 -16
  17. monoco/core/registry.py +3 -2
  18. monoco/core/setup.py +190 -124
  19. monoco/core/skills.py +121 -107
  20. monoco/core/state.py +12 -10
  21. monoco/core/sync.py +85 -56
  22. monoco/core/telemetry.py +10 -6
  23. monoco/core/workspace.py +26 -19
  24. monoco/daemon/app.py +123 -79
  25. monoco/daemon/commands.py +14 -13
  26. monoco/daemon/models.py +11 -3
  27. monoco/daemon/reproduce_stats.py +8 -8
  28. monoco/daemon/services.py +32 -33
  29. monoco/daemon/stats.py +59 -40
  30. monoco/features/config/commands.py +38 -25
  31. monoco/features/i18n/adapter.py +4 -5
  32. monoco/features/i18n/commands.py +83 -49
  33. monoco/features/i18n/core.py +94 -54
  34. monoco/features/issue/adapter.py +6 -7
  35. monoco/features/issue/commands.py +500 -260
  36. monoco/features/issue/core.py +504 -293
  37. monoco/features/issue/domain/lifecycle.py +33 -23
  38. monoco/features/issue/domain/models.py +71 -38
  39. monoco/features/issue/domain/parser.py +92 -69
  40. monoco/features/issue/domain/workspace.py +19 -16
  41. monoco/features/issue/engine/__init__.py +3 -3
  42. monoco/features/issue/engine/config.py +18 -25
  43. monoco/features/issue/engine/machine.py +72 -39
  44. monoco/features/issue/engine/models.py +4 -2
  45. monoco/features/issue/linter.py +326 -111
  46. monoco/features/issue/lsp/definition.py +26 -19
  47. monoco/features/issue/migration.py +45 -34
  48. monoco/features/issue/models.py +30 -13
  49. monoco/features/issue/monitor.py +24 -8
  50. monoco/features/issue/resources/en/AGENTS.md +5 -0
  51. monoco/features/issue/resources/en/SKILL.md +30 -2
  52. monoco/features/issue/resources/zh/AGENTS.md +5 -0
  53. monoco/features/issue/resources/zh/SKILL.md +26 -1
  54. monoco/features/issue/validator.py +417 -172
  55. monoco/features/skills/__init__.py +0 -1
  56. monoco/features/skills/core.py +24 -18
  57. monoco/features/spike/adapter.py +4 -5
  58. monoco/features/spike/commands.py +51 -38
  59. monoco/features/spike/core.py +24 -16
  60. monoco/main.py +34 -21
  61. {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/METADATA +10 -3
  62. monoco_toolkit-0.3.0.dist-info/RECORD +84 -0
  63. monoco_toolkit-0.2.7.dist-info/RECORD +0 -83
  64. {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/WHEEL +0 -0
  65. {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/entry_points.txt +0 -0
  66. {monoco_toolkit-0.2.7.dist-info → monoco_toolkit-0.3.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,10 +1,20 @@
1
- import os
2
1
  import re
3
2
  import yaml
4
3
  from pathlib import Path
5
4
  from typing import List, Dict, Optional, Tuple, Any, Set, Set
6
- from datetime import datetime
7
- from .models import IssueMetadata, IssueType, IssueStatus, IssueSolution, IssueStage, IssueDetail, IsolationType, IssueIsolation, IssueID, current_time, generate_uid
5
+ from .models import (
6
+ IssueMetadata,
7
+ IssueType,
8
+ IssueStatus,
9
+ IssueSolution,
10
+ IssueStage,
11
+ IssueDetail,
12
+ IsolationType,
13
+ IssueIsolation,
14
+ IssueID,
15
+ current_time,
16
+ generate_uid,
17
+ )
8
18
  from monoco.core import git
9
19
  from monoco.core.config import get_config, MonocoConfig
10
20
  from monoco.core.lsp import DiagnosticSeverity
@@ -12,82 +22,89 @@ from .validator import IssueValidator
12
22
 
13
23
  from .engine import get_engine
14
24
 
25
+
15
26
  def get_prefix_map(issues_root: Path) -> Dict[str, str]:
16
27
  engine = get_engine(str(issues_root.parent))
17
28
  return engine.get_prefix_map()
18
29
 
30
+
19
31
  def get_reverse_prefix_map(issues_root: Path) -> Dict[str, str]:
20
32
  prefix_map = get_prefix_map(issues_root)
21
33
  return {v: k for k, v in prefix_map.items()}
22
34
 
35
+
23
36
  def get_issue_dir(issue_type: str, issues_root: Path) -> Path:
24
37
  engine = get_engine(str(issues_root.parent))
25
38
  folder_map = engine.get_folder_map()
26
39
  folder = folder_map.get(issue_type, issue_type.capitalize() + "s")
27
40
  return issues_root / folder
28
41
 
42
+
29
43
  def _get_slug(title: str) -> str:
30
44
  slug = title.lower()
31
45
  # Replace non-word characters (including punctuation, spaces) with hyphens
32
46
  # \w matches Unicode word characters (letters, numbers, underscores)
33
47
  slug = re.sub(r"[^\w]+", "-", slug)
34
48
  slug = slug.strip("-")[:50]
35
-
49
+
36
50
  if not slug:
37
51
  slug = "issue"
38
-
52
+
39
53
  return slug
40
54
 
55
+
41
56
  def parse_issue(file_path: Path) -> Optional[IssueMetadata]:
42
57
  if not file_path.suffix == ".md":
43
58
  return None
44
-
59
+
45
60
  content = file_path.read_text()
46
61
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
47
62
  if not match:
48
63
  return None
49
-
64
+
50
65
  try:
51
66
  data = yaml.safe_load(match.group(1))
52
67
  if not isinstance(data, dict):
53
- return None
54
-
55
- data['path'] = str(file_path.absolute())
68
+ return None
69
+
70
+ data["path"] = str(file_path.absolute())
56
71
  meta = IssueMetadata(**data)
57
72
  meta.actions = get_available_actions(meta)
58
73
  return meta
59
74
  except Exception:
60
75
  return None
61
76
 
77
+
62
78
  def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
63
79
  if not file_path.suffix == ".md":
64
80
  return None
65
-
81
+
66
82
  content = file_path.read_text()
67
83
  # Robust splitting
68
84
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
69
85
  if not match:
70
86
  return None
71
-
87
+
72
88
  yaml_str = match.group(1)
73
- body = content[match.end():].lstrip()
74
-
89
+ body = content[match.end() :].lstrip()
90
+
75
91
  try:
76
92
  data = yaml.safe_load(yaml_str)
77
93
  if not isinstance(data, dict):
78
- return None
79
-
80
- data['path'] = str(file_path.absolute())
94
+ return None
95
+
96
+ data["path"] = str(file_path.absolute())
81
97
  return IssueDetail(**data, body=body, raw_content=content)
82
98
  except Exception:
83
99
  return None
84
100
 
101
+
85
102
  def find_next_id(issue_type: str, issues_root: Path) -> str:
86
103
  prefix_map = get_prefix_map(issues_root)
87
104
  prefix = prefix_map.get(issue_type, "ISSUE")
88
105
  pattern = re.compile(rf"{prefix}-(\d+)")
89
106
  max_id = 0
90
-
107
+
91
108
  base_dir = get_issue_dir(issue_type, issues_root)
92
109
  # Scan all subdirs: open, backlog, closed
93
110
  for status_dir in ["open", "backlog", "closed"]:
@@ -97,28 +114,28 @@ def find_next_id(issue_type: str, issues_root: Path) -> str:
97
114
  match = pattern.search(f.name)
98
115
  if match:
99
116
  max_id = max(max_id, int(match.group(1)))
100
-
117
+
101
118
  return f"{prefix}-{max_id + 1:04d}"
102
119
 
120
+
103
121
  def create_issue_file(
104
- issues_root: Path,
105
- issue_type: IssueType,
106
- title: str,
107
- parent: Optional[str] = None,
108
- status: IssueStatus = IssueStatus.OPEN,
122
+ issues_root: Path,
123
+ issue_type: IssueType,
124
+ title: str,
125
+ parent: Optional[str] = None,
126
+ status: IssueStatus = IssueStatus.OPEN,
109
127
  stage: Optional[IssueStage] = None,
110
- dependencies: List[str] = [],
111
- related: List[str] = [],
128
+ dependencies: List[str] = [],
129
+ related: List[str] = [],
112
130
  subdir: Optional[str] = None,
113
131
  sprint: Optional[str] = None,
114
- tags: List[str] = []
132
+ tags: List[str] = [],
115
133
  ) -> Tuple[IssueMetadata, Path]:
116
-
117
134
  # Validation
118
135
  for dep_id in dependencies:
119
136
  if not find_issue_path(issues_root, dep_id):
120
137
  raise ValueError(f"Dependency issue {dep_id} not found.")
121
-
138
+
122
139
  for rel_id in related:
123
140
  if not find_issue_path(issues_root, rel_id):
124
141
  raise ValueError(f"Related issue {rel_id} not found.")
@@ -126,12 +143,34 @@ def create_issue_file(
126
143
  issue_id = find_next_id(issue_type, issues_root)
127
144
  base_type_dir = get_issue_dir(issue_type, issues_root)
128
145
  target_dir = base_type_dir / status
129
-
146
+
130
147
  if subdir:
131
148
  target_dir = target_dir / subdir
132
-
149
+
133
150
  target_dir.mkdir(parents=True, exist_ok=True)
134
-
151
+
152
+ # Auto-Populate Tags with required IDs (Requirement: Maintain tags field with parent/deps/related/self IDs)
153
+ # Ensure they are prefixed with '#' for tagging convention if not present (usually tags are just strings, but user asked for #ID)
154
+ auto_tags = set(tags) if tags else set()
155
+
156
+ # 1. Add Parent
157
+ if parent:
158
+ auto_tags.add(f"#{parent}")
159
+
160
+ # 2. Add Dependencies
161
+ for dep in dependencies:
162
+ auto_tags.add(f"#{dep}")
163
+
164
+ # 3. Add Related
165
+ for rel in related:
166
+ auto_tags.add(f"#{rel}")
167
+
168
+ # 4. Add Self (as per instruction "auto add this issue... number")
169
+ # Note: issue_id is generated just above
170
+ auto_tags.add(f"#{issue_id}")
171
+
172
+ final_tags = sorted(list(auto_tags))
173
+
135
174
  metadata = IssueMetadata(
136
175
  id=issue_id,
137
176
  uid=generate_uid(), # Generate global unique identifier
@@ -143,27 +182,41 @@ def create_issue_file(
143
182
  dependencies=dependencies,
144
183
  related=related,
145
184
  sprint=sprint,
146
- tags=tags,
147
- opened_at=current_time() if status == IssueStatus.OPEN else None
185
+ tags=final_tags,
186
+ opened_at=current_time() if status == IssueStatus.OPEN else None,
148
187
  )
149
-
188
+
150
189
  # Enforce lifecycle policies (defaults, auto-corrections)
151
190
  from .engine import get_engine
191
+
152
192
  get_engine().enforce_policy(metadata)
153
193
 
154
194
  # Serialize metadata
155
195
  # Explicitly exclude actions and path from file persistence
156
- yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json', exclude={'actions', 'path'}), sort_keys=False, allow_unicode=True)
157
-
196
+ yaml_header = yaml.dump(
197
+ metadata.model_dump(
198
+ exclude_none=True, mode="json", exclude={"actions", "path"}
199
+ ),
200
+ sort_keys=False,
201
+ allow_unicode=True,
202
+ )
203
+
158
204
  # Inject Self-Documenting Hints (Interactive Frontmatter)
159
205
  if "parent:" not in yaml_header:
160
206
  yaml_header += "# parent: <EPIC-ID> # Optional: Parent Issue ID\n"
161
207
  if "solution:" not in yaml_header:
162
208
  yaml_header += "# solution: null # Required for Closed state (implemented, cancelled, etc.)\n"
163
209
 
210
+ if "dependencies:" not in yaml_header:
211
+ yaml_header += "# dependencies: [] # List of dependency IDs\n"
212
+ if "related:" not in yaml_header:
213
+ yaml_header += "# related: [] # List of related issue IDs\n"
214
+ if "files:" not in yaml_header:
215
+ yaml_header += "# files: [] # List of modified files\n"
216
+
164
217
  slug = _get_slug(title)
165
218
  filename = f"{issue_id}-{slug}.md"
166
-
219
+
167
220
  # Enhanced Template with Instructional Comments
168
221
  file_content = f"""---
169
222
  {yaml_header}---
@@ -195,33 +248,40 @@ def create_issue_file(
195
248
  """
196
249
  file_path = target_dir / filename
197
250
  file_path.write_text(file_content)
198
-
251
+
199
252
  # Inject path into returned metadata
200
253
  metadata.path = str(file_path.absolute())
201
-
254
+
202
255
  return metadata, file_path
256
+
257
+
203
258
  def get_available_actions(meta: IssueMetadata) -> List[Any]:
204
259
  from .models import IssueAction
205
260
  from .engine import get_engine
206
-
261
+
207
262
  engine = get_engine()
208
263
  transitions = engine.get_available_transitions(meta)
209
-
264
+
210
265
  actions = []
211
266
  for t in transitions:
212
267
  command = t.command_template.format(id=meta.id) if t.command_template else ""
213
-
214
- actions.append(IssueAction(
215
- label=t.label,
216
- icon=t.icon,
217
- target_status=t.to_status if t.to_status != meta.status or t.to_stage != meta.stage else None,
218
- target_stage=t.to_stage if t.to_stage != meta.stage else None,
219
- target_solution=t.required_solution,
220
- command=command
221
- ))
268
+
269
+ actions.append(
270
+ IssueAction(
271
+ label=t.label,
272
+ icon=t.icon,
273
+ target_status=t.to_status
274
+ if t.to_status != meta.status or t.to_stage != meta.stage
275
+ else None,
276
+ target_stage=t.to_stage if t.to_stage != meta.stage else None,
277
+ target_solution=t.required_solution,
278
+ command=command,
279
+ )
280
+ )
222
281
 
223
282
  return actions
224
283
 
284
+
225
285
  def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
226
286
  parsed = IssueID(issue_id)
227
287
 
@@ -232,21 +292,21 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
232
292
  # Resolve Workspace
233
293
  # Traverse up from issues_root to find a config that defines the namespace
234
294
  project_root = issues_root.parent
235
-
295
+
236
296
  # Try current root first
237
297
  conf = MonocoConfig.load(str(project_root))
238
298
  member_rel_path = conf.project.members.get(parsed.namespace)
239
-
299
+
240
300
  if not member_rel_path:
241
301
  return None
242
-
302
+
243
303
  member_root = (project_root / member_rel_path).resolve()
244
304
  # Assume standard "Issues" directory for members to avoid loading full config
245
305
  member_issues = member_root / "Issues"
246
-
306
+
247
307
  if not member_issues.exists():
248
- return None
249
-
308
+ return None
309
+
250
310
  # Recursively search in member project
251
311
  return find_issue_path(member_issues, parsed.local_id)
252
312
 
@@ -255,38 +315,40 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
255
315
  prefix = parsed.local_id.split("-")[0].upper()
256
316
  except IndexError:
257
317
  return None
258
-
318
+
259
319
  reverse_prefix_map = get_reverse_prefix_map(issues_root)
260
320
  issue_type = reverse_prefix_map.get(prefix)
261
321
  if not issue_type:
262
322
  return None
263
-
323
+
264
324
  base_dir = get_issue_dir(issue_type, issues_root)
265
325
  # Search in all status subdirs recursively
266
326
  for f in base_dir.rglob(f"{parsed.local_id}-*.md"):
267
327
  return f
268
328
  return None
269
329
 
330
+
270
331
  def update_issue(
271
- issues_root: Path,
272
- issue_id: str,
273
- status: Optional[IssueStatus] = None,
274
- stage: Optional[IssueStage] = None,
332
+ issues_root: Path,
333
+ issue_id: str,
334
+ status: Optional[IssueStatus] = None,
335
+ stage: Optional[IssueStage] = None,
275
336
  solution: Optional[IssueSolution] = None,
276
337
  title: Optional[str] = None,
277
338
  parent: Optional[str] = None,
278
339
  sprint: Optional[str] = None,
279
340
  dependencies: Optional[List[str]] = None,
280
341
  related: Optional[List[str]] = None,
281
- tags: Optional[List[str]] = None
342
+ tags: Optional[List[str]] = None,
343
+ files: Optional[List[str]] = None,
282
344
  ) -> IssueMetadata:
283
345
  path = find_issue_path(issues_root, issue_id)
284
346
  if not path:
285
347
  raise FileNotFoundError(f"Issue {issue_id} not found.")
286
-
348
+
287
349
  # Read full content
288
350
  content = path.read_text()
289
-
351
+
290
352
  # Split Frontmatter and Body
291
353
  match = re.search(r"^---(.*?)---\n(.*)\n", content, re.DOTALL | re.MULTILINE)
292
354
  if not match:
@@ -294,7 +356,7 @@ def update_issue(
294
356
  match_simple = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
295
357
  if match_simple:
296
358
  yaml_str = match_simple.group(1)
297
- body = content[match_simple.end():]
359
+ body = content[match_simple.end() :]
298
360
  else:
299
361
  raise ValueError(f"Could not parse frontmatter for {issue_id}")
300
362
  else:
@@ -306,19 +368,19 @@ def update_issue(
306
368
  except yaml.YAMLError:
307
369
  raise ValueError(f"Invalid YAML metadata in {issue_id}")
308
370
 
309
- current_status_str = data.get("status", "open") # default to open if missing?
371
+ current_status_str = data.get("status", "open") # default to open if missing?
310
372
  # Normalize current status to Enum for comparison
311
373
  try:
312
374
  current_status = IssueStatus(current_status_str.lower())
313
375
  except ValueError:
314
376
  current_status = IssueStatus.OPEN
315
-
377
+
316
378
  current_stage_str = data.get("stage")
317
379
  current_stage = IssueStage(current_stage_str.lower()) if current_stage_str else None
318
380
 
319
381
  # Logic: Status Update
320
382
  target_status = status if status else current_status
321
-
383
+
322
384
  # If status is changing, we don't default target_stage to current_stage
323
385
  # because the new status might have different allowed stages.
324
386
  # enforce_policy will handle setting the correct default stage for the new status.
@@ -326,11 +388,12 @@ def update_issue(
326
388
  target_stage = stage
327
389
  else:
328
390
  target_stage = stage if stage else current_stage
329
-
391
+
330
392
  # Engine Validation
331
393
  from .engine import get_engine
394
+
332
395
  engine = get_engine()
333
-
396
+
334
397
  # Map solution string to enum if present
335
398
  effective_solution = solution
336
399
  if not effective_solution and data.get("solution"):
@@ -345,86 +408,94 @@ def update_issue(
345
408
  from_stage=current_stage,
346
409
  to_status=target_status,
347
410
  to_stage=target_stage,
348
- solution=effective_solution
411
+ solution=effective_solution,
349
412
  )
350
413
 
351
414
  if target_status == "closed":
352
415
  # Policy: Dependencies must be closed
353
- dependencies_to_check = dependencies if dependencies is not None else data.get('dependencies', [])
416
+ dependencies_to_check = (
417
+ dependencies if dependencies is not None else data.get("dependencies", [])
418
+ )
354
419
  if dependencies_to_check:
355
420
  for dep_id in dependencies_to_check:
356
421
  dep_path = find_issue_path(issues_root, dep_id)
357
422
  if dep_path:
358
423
  dep_meta = parse_issue(dep_path)
359
424
  if dep_meta and dep_meta.status != "closed":
360
- raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status}].")
361
-
425
+ raise ValueError(
426
+ f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status}]."
427
+ )
428
+
362
429
  # Validate new parent/dependencies/related exist
363
430
  if parent is not None and parent != "":
364
431
  if not find_issue_path(issues_root, parent):
365
432
  raise ValueError(f"Parent issue {parent} not found.")
366
-
433
+
367
434
  if dependencies is not None:
368
435
  for dep_id in dependencies:
369
436
  if not find_issue_path(issues_root, dep_id):
370
437
  raise ValueError(f"Dependency issue {dep_id} not found.")
371
-
438
+
372
439
  if related is not None:
373
440
  for rel_id in related:
374
441
  if not find_issue_path(issues_root, rel_id):
375
442
  raise ValueError(f"Related issue {rel_id} not found.")
376
-
443
+
377
444
  # Update Data
378
445
  if status:
379
- data['status'] = status
446
+ data["status"] = status
380
447
 
381
448
  if stage:
382
- data['stage'] = stage
449
+ data["stage"] = stage
383
450
  if solution:
384
- data['solution'] = solution
385
-
451
+ data["solution"] = solution
452
+
386
453
  if title:
387
- data['title'] = title
454
+ data["title"] = title
388
455
 
389
456
  if parent is not None:
390
457
  if parent == "":
391
- data.pop('parent', None) # Remove parent field
458
+ data.pop("parent", None) # Remove parent field
392
459
  else:
393
- data['parent'] = parent
460
+ data["parent"] = parent
394
461
 
395
462
  if sprint is not None:
396
- data['sprint'] = sprint
397
-
463
+ data["sprint"] = sprint
464
+
398
465
  if dependencies is not None:
399
- data['dependencies'] = dependencies
400
-
466
+ data["dependencies"] = dependencies
467
+
401
468
  if related is not None:
402
- data['related'] = related
403
-
469
+ data["related"] = related
470
+
404
471
  if tags is not None:
405
- data['tags'] = tags
406
-
472
+ data["tags"] = tags
473
+
474
+ if files is not None:
475
+ data["files"] = files
476
+
407
477
  # Lifecycle Hooks
408
478
  # 1. Opened At: If transitioning to OPEN
409
479
  if target_status == IssueStatus.OPEN and current_status != IssueStatus.OPEN:
410
480
  # Only set if not already set? Or always reset?
411
481
  # Let's set it if not present, or update it to reflect "Latest activation"
412
482
  # FEAT-0012 says: "update opened_at to now"
413
- data['opened_at'] = current_time()
414
-
483
+ data["opened_at"] = current_time()
484
+
415
485
  # 2. Backlog Push: Handled by IssueMetadata.validate_lifecycle (Status=Backlog -> Stage=None)
416
486
  # 3. Closed: Handled by IssueMetadata.validate_lifecycle (Status=Closed -> Stage=Done, ClosedAt=Now)
417
487
 
418
488
  # Touch updated_at
419
- data['updated_at'] = current_time()
420
-
489
+ data["updated_at"] = current_time()
490
+
421
491
  # Re-hydrate through Model
422
492
  try:
423
493
  updated_meta = IssueMetadata(**data)
424
-
494
+
425
495
  # Enforce lifecycle policies (defaults, auto-corrections)
426
496
  # This ensures that when we update, we also fix invalid states (like Closed but not Done)
427
497
  from .engine import get_engine
498
+
428
499
  get_engine().enforce_policy(updated_meta)
429
500
 
430
501
  # Delegate to IssueValidator for static state validation
@@ -435,59 +506,72 @@ def update_issue(
435
506
  diagnostics = validator.validate(updated_meta, body)
436
507
  errors = [d for d in diagnostics if d.severity == DiagnosticSeverity.Error]
437
508
  if errors:
438
- raise ValueError(f"Validation Failed: {errors[0].message}")
509
+ raise ValueError(f"Validation Failed: {errors[0].message}")
439
510
 
440
511
  except Exception as e:
441
512
  raise ValueError(f"Failed to validate updated metadata: {e}")
442
-
513
+
443
514
  # Serialize back
444
515
  # Explicitly exclude actions and path from file persistence
445
- new_yaml = yaml.dump(updated_meta.model_dump(exclude_none=True, mode='json', exclude={'actions', 'path'}), sort_keys=False, allow_unicode=True)
446
-
516
+ new_yaml = yaml.dump(
517
+ updated_meta.model_dump(
518
+ exclude_none=True, mode="json", exclude={"actions", "path"}
519
+ ),
520
+ sort_keys=False,
521
+ allow_unicode=True,
522
+ )
523
+
447
524
  # Reconstruct File
448
525
  match_header = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
449
526
  if not match_header:
450
527
  body_content = body
451
528
  else:
452
- body_content = content[match_header.end():]
453
-
454
- if body_content.startswith('\n'):
455
- body_content = body_content[1:]
456
-
529
+ body_content = content[match_header.end() :]
530
+
531
+ if body_content.startswith("\n"):
532
+ body_content = body_content[1:]
533
+
457
534
  new_content = f"---\n{new_yaml}---\n{body_content}"
458
-
535
+
459
536
  path.write_text(new_content)
460
-
537
+
461
538
  # 3. Handle physical move if status changed
462
539
  if status and status != current_status:
463
540
  # Move file
464
541
  prefix = issue_id.split("-")[0].upper()
465
542
  reverse_prefix_map = get_reverse_prefix_map(issues_root)
466
543
  base_type_dir = get_issue_dir(reverse_prefix_map[prefix], issues_root)
467
-
544
+
468
545
  try:
469
546
  rel_path = path.relative_to(base_type_dir)
470
- structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
547
+ structure_path = (
548
+ Path(*rel_path.parts[1:])
549
+ if len(rel_path.parts) > 1
550
+ else Path(path.name)
551
+ )
471
552
  except ValueError:
472
553
  structure_path = Path(path.name)
473
554
 
474
555
  target_path = base_type_dir / target_status / structure_path
475
-
556
+
476
557
  if path != target_path:
477
558
  target_path.parent.mkdir(parents=True, exist_ok=True)
478
559
  path.rename(target_path)
479
- path = target_path # Update local path variable for returned meta
560
+ path = target_path # Update local path variable for returned meta
480
561
 
481
562
  # Hook: Recursive Aggregation (FEAT-0003)
482
563
  if updated_meta.parent:
483
564
  recalculate_parent(issues_root, updated_meta.parent)
484
-
565
+
485
566
  # Update returned metadata with final absolute path
486
567
  updated_meta.path = str(path.absolute())
487
568
  updated_meta.actions = get_available_actions(updated_meta)
488
569
  return updated_meta
489
570
 
490
- def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_root: Path) -> IssueMetadata:
571
+
572
+ def start_issue_isolation(
573
+ issues_root: Path, issue_id: str, mode: str, project_root: Path
574
+ ) -> IssueMetadata:
491
575
  """
492
576
  Start physical isolation for an issue (Branch or Worktree).
493
577
  """
@@ -498,48 +582,52 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_r
498
582
  issue = parse_issue(path)
499
583
  if not issue:
500
584
  raise ValueError(f"Could not parse metadata for issue {issue_id}")
501
-
585
+
502
586
  # Idempotency / Conflict Check
503
587
  if issue.isolation:
504
588
  if issue.isolation.type == mode:
505
- # Already isolated in same mode, maybe just switch context?
506
- # For now, we just warn or return.
507
- # If branch exists, we make sure it's checked out in CLI layer maybe?
508
- # But here we assume we want to setup metadata.
509
- pass
589
+ # Already isolated in same mode, maybe just switch context?
590
+ # For now, we just warn or return.
591
+ # If branch exists, we make sure it's checked out in CLI layer maybe?
592
+ # But here we assume we want to setup metadata.
593
+ pass
510
594
  else:
511
- raise ValueError(f"Issue {issue_id} is already isolated as '{issue.isolation.type}'. Please cleanup first.")
595
+ raise ValueError(
596
+ f"Issue {issue_id} is already isolated as '{issue.isolation.type}'. Please cleanup first."
597
+ )
512
598
 
513
599
  slug = _get_slug(issue.title)
514
- branch_name = f"feat/{issue_id.lower()}-{slug}"
515
-
600
+ branch_name = f"feat/{issue_id.lower()}-{slug}"
601
+
516
602
  isolation_meta = None
517
603
 
518
604
  if mode == "branch":
519
605
  if not git.branch_exists(project_root, branch_name):
520
606
  git.create_branch(project_root, branch_name, checkout=True)
521
607
  else:
522
- # Check if we are already on it?
608
+ # Check if we are already on it?
523
609
  # If not, checkout.
524
610
  current = git.get_current_branch(project_root)
525
611
  if current != branch_name:
526
612
  git.checkout_branch(project_root, branch_name)
527
-
613
+
528
614
  isolation_meta = IssueIsolation(type="branch", ref=branch_name)
529
615
 
530
616
  elif mode == "worktree":
531
617
  wt_path = project_root / ".monoco" / "worktrees" / f"{issue_id.lower()}-{slug}"
532
-
618
+
533
619
  # Check if worktree exists physically
534
620
  if wt_path.exists():
535
- # Check if valid git worktree?
536
- pass
621
+ # Check if valid git worktree?
622
+ pass
537
623
  else:
538
- wt_path.parent.mkdir(parents=True, exist_ok=True)
539
- git.worktree_add(project_root, branch_name, wt_path)
540
-
541
- isolation_meta = IssueIsolation(type="worktree", ref=branch_name, path=str(wt_path))
542
-
624
+ wt_path.parent.mkdir(parents=True, exist_ok=True)
625
+ git.worktree_add(project_root, branch_name, wt_path)
626
+
627
+ isolation_meta = IssueIsolation(
628
+ type="worktree", ref=branch_name, path=str(wt_path)
629
+ )
630
+
543
631
  # Persist Metadata
544
632
  # We load raw, update isolation field, save.
545
633
  content = path.read_text()
@@ -547,21 +635,24 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_r
547
635
  if match:
548
636
  yaml_str = match.group(1)
549
637
  data = yaml.safe_load(yaml_str) or {}
550
-
551
- data['isolation'] = isolation_meta.model_dump(mode='json')
638
+
639
+ data["isolation"] = isolation_meta.model_dump(mode="json")
552
640
  # Also ensure stage is DOING (logic link)
553
- data['stage'] = "doing"
554
- data['updated_at'] = current_time()
641
+ data["stage"] = "doing"
642
+ data["updated_at"] = current_time()
555
643
 
556
644
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
557
645
  new_content = content.replace(match.group(1), "\n" + new_yaml)
558
646
  path.write_text(new_content)
559
-
647
+
560
648
  return IssueMetadata(**data)
561
-
649
+
562
650
  return issue
563
651
 
564
- def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project_root: Path) -> List[str]:
652
+
653
+ def prune_issue_resources(
654
+ issues_root: Path, issue_id: str, force: bool, project_root: Path
655
+ ) -> List[str]:
565
656
  """
566
657
  Cleanup physical resources. Returns list of actions taken.
567
658
  """
@@ -571,11 +662,11 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
571
662
  # If we can't find issue, we can't read metadata to know what to prune.
572
663
  # We rely on CLI to pass context or we fail.
573
664
  raise FileNotFoundError(f"Issue {issue_id} not found.")
574
-
665
+
575
666
  issue = parse_issue(path)
576
667
  if not issue:
577
668
  raise ValueError(f"Could not parse metadata for issue {issue_id}")
578
-
669
+
579
670
  deleted_items = []
580
671
 
581
672
  if not issue.isolation:
@@ -585,12 +676,14 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
585
676
  branch = issue.isolation.ref
586
677
  current = git.get_current_branch(project_root)
587
678
  if current == branch:
588
- raise RuntimeError(f"Cannot delete active branch '{branch}'. Please checkout 'main' first.")
589
-
679
+ raise RuntimeError(
680
+ f"Cannot delete active branch '{branch}'. Please checkout 'main' first."
681
+ )
682
+
590
683
  if git.branch_exists(project_root, branch):
591
684
  git.delete_branch(project_root, branch, force=force)
592
685
  deleted_items.append(f"branch:{branch}")
593
-
686
+
594
687
  elif issue.isolation.type == IsolationType.WORKTREE:
595
688
  wt_path_str = issue.isolation.path
596
689
  if wt_path_str:
@@ -598,20 +691,20 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
598
691
  # Normalize path if relative
599
692
  if not wt_path.is_absolute():
600
693
  wt_path = project_root / wt_path
601
-
694
+
602
695
  if wt_path.exists():
603
696
  git.worktree_remove(project_root, wt_path, force=force)
604
697
  deleted_items.append(f"worktree:{wt_path.name}")
605
-
606
- # Also delete the branch associated?
698
+
699
+ # Also delete the branch associated?
607
700
  # Worktree create makes a branch. When removing worktree, branch remains.
608
701
  # Usually we want to remove the branch too if it was created for this issue.
609
702
  branch = issue.isolation.ref
610
703
  if branch and git.branch_exists(project_root, branch):
611
- # We can't delete branch if it is checked out in the worktree we just removed?
612
- # git worktree remove unlocks the branch.
613
- git.delete_branch(project_root, branch, force=force)
614
- deleted_items.append(f"branch:{branch}")
704
+ # We can't delete branch if it is checked out in the worktree we just removed?
705
+ # git worktree remove unlocks the branch.
706
+ git.delete_branch(project_root, branch, force=force)
707
+ deleted_items.append(f"branch:{branch}")
615
708
 
616
709
  # Clear Metadata
617
710
  content = path.read_text()
@@ -619,17 +712,16 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
619
712
  if match:
620
713
  yaml_str = match.group(1)
621
714
  data = yaml.safe_load(yaml_str) or {}
622
-
623
- if 'isolation' in data:
624
- del data['isolation']
625
- data['updated_at'] = current_time()
626
-
715
+
716
+ if "isolation" in data:
717
+ del data["isolation"]
718
+ data["updated_at"] = current_time()
719
+
627
720
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
628
721
  new_content = content.replace(match.group(1), "\n" + new_yaml)
629
722
  path.write_text(new_content)
630
-
631
- return deleted_items
632
723
 
724
+ return deleted_items
633
725
 
634
726
 
635
727
  def delete_issue_file(issues_root: Path, issue_id: str):
@@ -639,9 +731,84 @@ def delete_issue_file(issues_root: Path, issue_id: str):
639
731
  path = find_issue_path(issues_root, issue_id)
640
732
  if not path:
641
733
  raise FileNotFoundError(f"Issue {issue_id} not found.")
642
-
734
+
643
735
  path.unlink()
644
-
736
+
737
+
738
+ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> List[str]:
739
+ """
740
+ Sync 'files' field in issue metadata with actual changed files in git.
741
+ Strategies:
742
+ 1. Isolation Ref: If issue has isolation (branch/worktree), use that ref.
743
+ 2. Convention: If no isolation, look for branch `*/<id>-*`.
744
+ 3. Current Branch: If current branch matches pattern.
745
+
746
+ Compares against default branch (usually 'main' or 'master').
747
+ """
748
+ path = find_issue_path(issues_root, issue_id)
749
+ if not path:
750
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
751
+
752
+ issue = parse_issue(path)
753
+ if not issue:
754
+ raise ValueError(f"Could not parse issue {issue_id}")
755
+
756
+ # Determine Target Branch
757
+ target_ref = None
758
+
759
+ if issue.isolation and issue.isolation.ref:
760
+ target_ref = issue.isolation.ref
761
+ else:
762
+ # Heuristic Search
763
+ # 1. Is current branch related?
764
+ current = git.get_current_branch(project_root)
765
+ if issue_id.lower() in current.lower():
766
+ target_ref = current
767
+ else:
768
+ # 2. Search for branch
769
+ # Limitation: core.git doesn't list all branches yet.
770
+ # We skip this for now to avoid complexity, relying on isolation or current context.
771
+ pass
772
+
773
+ if not target_ref:
774
+ raise RuntimeError(
775
+ f"Could not determine git branch for Issue {issue_id}. Please ensure issue is started or you are on the feature branch."
776
+ )
777
+
778
+ # Determine Base Branch (assume main, or config?)
779
+ # For now hardcode main, eventually read from config
780
+ base_ref = "main"
781
+
782
+ # Check if base exists, if not try master
783
+ if not git.branch_exists(project_root, base_ref):
784
+ if git.branch_exists(project_root, "master"):
785
+ base_ref = "master"
786
+ else:
787
+ # Fallback: remote/main?
788
+ pass
789
+
790
+ # Git Diff
791
+ # git diff --name-only base...target
792
+ cmd = ["diff", "--name-only", f"{base_ref}...{target_ref}"]
793
+ code, stdout, stderr = git._run_git(cmd, project_root)
794
+
795
+ if code != 0:
796
+ raise RuntimeError(f"Git diff failed: {stderr}")
797
+
798
+ changed_files = [f.strip() for f in stdout.splitlines() if f.strip()]
799
+
800
+ # Sort for consistency
801
+ changed_files.sort()
802
+
803
+ # Update Issue
804
+ # Only update if changed
805
+ if changed_files != issue.files:
806
+ update_issue(issues_root, issue_id, files=changed_files)
807
+ return changed_files
808
+
809
+ return []
810
+
811
+
645
812
  # Resources
646
813
  SKILL_CONTENT = """
647
814
  ---
@@ -675,7 +842,7 @@ description: Monoco Issue System 的官方技能定义。将 Issue 视为通用
675
842
  - **Status Level (Lowercase)**: `open`, `backlog`, `closed`
676
843
 
677
844
  ### 路径流转
678
- 使用 `monoco issue`:
845
+ 使用 `monoco issue`:
679
846
  1. **Create**: `monoco issue create <type> --title "..."`
680
847
  2. **Transition**: `monoco issue open/close/backlog <id>`
681
848
  3. **View**: `monoco issue scope`
@@ -687,34 +854,35 @@ description: Monoco Issue System 的官方技能定义。将 Issue 视为通用
687
854
  def init(issues_root: Path):
688
855
  """Initialize the Issues directory structure."""
689
856
  issues_root.mkdir(parents=True, exist_ok=True)
690
-
857
+
691
858
  # Standard Directories based on new Terminology
692
859
  for subdir in ["Epics", "Features", "Chores", "Fixes"]:
693
860
  (issues_root / subdir).mkdir(exist_ok=True)
694
- # Create status subdirs? Usually handled by open/backlog,
861
+ # Create status subdirs? Usually handled by open/backlog,
695
862
  # but creating them initially is good for guidance.
696
863
  for status in ["open", "backlog", "closed"]:
697
864
  (issues_root / subdir / status).mkdir(exist_ok=True)
698
-
865
+
699
866
  # Create gitkeep to ensure they are tracked? Optional.
700
867
 
868
+
701
869
  def get_resources() -> Dict[str, Any]:
702
870
  return {
703
- "skills": {
704
- "issues-management": SKILL_CONTENT
705
- },
706
- "prompts": {} # Handled by adapter via resource files
871
+ "skills": {"issues-management": SKILL_CONTENT},
872
+ "prompts": {}, # Handled by adapter via resource files
707
873
  }
708
874
 
709
875
 
710
- def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[IssueMetadata]:
876
+ def list_issues(
877
+ issues_root: Path, recursive_workspace: bool = False
878
+ ) -> List[IssueMetadata]:
711
879
  """
712
880
  List all issues in the project.
713
881
  """
714
882
  issues = []
715
883
  engine = get_engine(str(issues_root.parent))
716
884
  all_types = engine.get_all_types()
717
-
885
+
718
886
  for issue_type in all_types:
719
887
  base_dir = get_issue_dir(issue_type, issues_root)
720
888
  for status_dir in ["open", "backlog", "closed"]:
@@ -731,11 +899,11 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
731
899
  # weak assumption: issues_root.parent is project_root
732
900
  project_root = issues_root.parent
733
901
  conf = get_config(str(project_root))
734
-
902
+
735
903
  for name, rel_path in conf.project.members.items():
736
904
  member_root = (project_root / rel_path).resolve()
737
- member_issues_dir = member_root / "Issues" # Standard convention
738
-
905
+ member_issues_dir = member_root / "Issues" # Standard convention
906
+
739
907
  if member_issues_dir.exists():
740
908
  # Fetch member issues (non-recursive to avoid loops)
741
909
  member_issues = list_issues(member_issues_dir, False)
@@ -744,16 +912,16 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
744
912
  # CRITICAL: Also namespace references to keep parent-child structure intact
745
913
  if m.parent and "::" not in m.parent:
746
914
  m.parent = f"{name}::{m.parent}"
747
-
915
+
748
916
  if m.dependencies:
749
917
  m.dependencies = [
750
- f"{name}::{d}" if d and "::" not in d else d
918
+ f"{name}::{d}" if d and "::" not in d else d
751
919
  for d in m.dependencies
752
920
  ]
753
-
921
+
754
922
  if m.related:
755
923
  m.related = [
756
- f"{name}::{r}" if r and "::" not in r else r
924
+ f"{name}::{r}" if r and "::" not in r else r
757
925
  for r in m.related
758
926
  ]
759
927
 
@@ -765,17 +933,13 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
765
933
 
766
934
  return issues
767
935
 
936
+
768
937
  def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
769
938
  """
770
939
  Get open issues grouped by their stage for Kanban view.
771
940
  """
772
- board = {
773
- "draft": [],
774
- "doing": [],
775
- "review": [],
776
- "done": []
777
- }
778
-
941
+ board = {"draft": [], "doing": [], "review": [], "done": []}
942
+
779
943
  issues = list_issues(issues_root)
780
944
  for issue in issues:
781
945
  if issue.status == "open" and issue.stage:
@@ -785,28 +949,40 @@ def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
785
949
  elif issue.status == "closed":
786
950
  # Optionally show recently closed items in DONE column
787
951
  board["done"].append(issue)
788
-
952
+
789
953
  return board
790
954
 
791
- def validate_issue_integrity(meta: IssueMetadata, all_issue_ids: Set[str] = set()) -> List[str]:
955
+
956
+ def validate_issue_integrity(
957
+ meta: IssueMetadata, all_issue_ids: Set[str] = set()
958
+ ) -> List[str]:
792
959
  """
793
960
  Validate metadata integrity (Solution, Lifecycle, etc.)
794
961
  UI-agnostic.
795
962
  """
796
963
  errors = []
797
964
  if meta.status == "closed" and not meta.solution:
798
- errors.append(f"Solution Missing: {meta.id} is closed but has no solution field.")
799
-
965
+ errors.append(
966
+ f"Solution Missing: {meta.id} is closed but has no solution field."
967
+ )
968
+
800
969
  if meta.parent:
801
970
  if all_issue_ids and meta.parent not in all_issue_ids:
802
- errors.append(f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}.")
971
+ errors.append(
972
+ f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}."
973
+ )
803
974
 
804
975
  if meta.status == "backlog" and meta.stage != "freezed":
805
- errors.append(f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage}).")
806
-
976
+ errors.append(
977
+ f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage})."
978
+ )
979
+
807
980
  return errors
808
981
 
809
- def update_issue_content(issues_root: Path, issue_id: str, new_content: str) -> IssueMetadata:
982
+
983
+ def update_issue_content(
984
+ issues_root: Path, issue_id: str, new_content: str
985
+ ) -> IssueMetadata:
810
986
  """
811
987
  Update the raw content of an issue file.
812
988
  Validates integrity before saving.
@@ -819,91 +995,100 @@ def update_issue_content(issues_root: Path, issue_id: str, new_content: str) ->
819
995
  # 1. Parse New Content (using temp file to reuse parse_issue logic)
820
996
  import tempfile
821
997
  import os
822
-
823
- with tempfile.NamedTemporaryFile(mode='w+', suffix='.md', delete=False) as tmp:
998
+
999
+ with tempfile.NamedTemporaryFile(mode="w+", suffix=".md", delete=False) as tmp:
824
1000
  tmp.write(new_content)
825
1001
  tmp_path = Path(tmp.name)
826
-
1002
+
827
1003
  try:
828
1004
  meta = parse_issue(tmp_path)
829
1005
  if not meta:
830
- raise ValueError("Invalid Issue Content: Frontmatter missing or invalid.")
831
-
1006
+ raise ValueError("Invalid Issue Content: Frontmatter missing or invalid.")
1007
+
832
1008
  if meta.id != issue_id:
833
- raise ValueError(f"Cannot change Issue ID (Original: {issue_id}, New: {meta.id})")
1009
+ raise ValueError(
1010
+ f"Cannot change Issue ID (Original: {issue_id}, New: {meta.id})"
1011
+ )
834
1012
 
835
1013
  # 2. Integrity Check
836
1014
  errors = validate_issue_integrity(meta)
837
1015
  if errors:
838
1016
  raise ValueError(f"Validation Failed: {'; '.join(errors)}")
839
-
1017
+
840
1018
  # 3. Write and Move
841
1019
  # We overwrite the *current* path first
842
1020
  path.write_text(new_content)
843
-
1021
+
844
1022
  # Check if we need to move (Status Change)
845
1023
  # We need to re-derive the expected path based on new status
846
1024
  # Reuse logic from update_issue (simplified)
847
-
1025
+
848
1026
  prefix = issue_id.split("-")[0].upper()
849
1027
  reverse_prefix_map = get_reverse_prefix_map(issues_root)
850
1028
  base_type_dir = get_issue_dir(reverse_prefix_map[prefix], issues_root)
851
-
1029
+
852
1030
  # Calculate structure path (preserve subdir)
853
1031
  try:
854
1032
  rel_path = path.relative_to(base_type_dir)
855
1033
  # Remove the first component (current status directory) which might be 'open', 'closed' etc.
856
1034
  # But wait, find_issue_path found it. 'rel_path' includes status dir.
857
1035
  # e.g. open/Backend/Auth/FEAT-123.md -> parts=('open', 'Backend', 'Auth', 'FEAT-123.md')
858
- structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
1036
+ structure_path = (
1037
+ Path(*rel_path.parts[1:])
1038
+ if len(rel_path.parts) > 1
1039
+ else Path(path.name)
1040
+ )
859
1041
  except ValueError:
860
1042
  # Fallback if path is weird
861
1043
  structure_path = Path(path.name)
862
1044
 
863
1045
  target_path = base_type_dir / meta.status / structure_path
864
-
1046
+
865
1047
  if path != target_path:
866
1048
  target_path.parent.mkdir(parents=True, exist_ok=True)
867
1049
  path.rename(target_path)
868
-
1050
+
869
1051
  return meta
870
1052
 
871
1053
  finally:
872
1054
  if os.path.exists(tmp_path):
873
1055
  os.unlink(tmp_path)
874
1056
 
875
- def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Path) -> IssueMetadata:
1057
+
1058
+ def generate_delivery_report(
1059
+ issues_root: Path, issue_id: str, project_root: Path
1060
+ ) -> IssueMetadata:
876
1061
  """
877
1062
  Scan git history for commits related to this issue (Ref: ID),
878
1063
  aggregate touched files, and append/update '## Delivery' section in the issue body.
879
1064
  """
880
1065
  from monoco.core import git
881
-
1066
+
882
1067
  path = find_issue_path(issues_root, issue_id)
883
1068
  if not path:
884
1069
  raise FileNotFoundError(f"Issue {issue_id} not found.")
885
1070
 
886
1071
  # 1. Scan Git
887
1072
  commits = git.search_commits_by_message(project_root, f"Ref: {issue_id}")
888
-
1073
+
889
1074
  if not commits:
890
1075
  meta = parse_issue(path)
891
1076
  if not meta:
892
1077
  raise ValueError(f"Could not parse metadata for issue {issue_id}")
893
1078
  return meta
894
-
1079
+
895
1080
  # 2. Aggregate Data
896
1081
  all_files = set()
897
1082
  commit_list_md = []
898
-
1083
+
899
1084
  for c in commits:
900
- short_hash = c['hash'][:7]
1085
+ short_hash = c["hash"][:7]
901
1086
  commit_list_md.append(f"- `{short_hash}` {c['subject']}")
902
- for f in c['files']:
1087
+ for f in c["files"]:
903
1088
  all_files.add(f)
904
-
1089
+
905
1090
  sorted_files = sorted(list(all_files))
906
-
1091
+
907
1092
  # 3. Format Report
908
1093
  delivery_section = f"""
909
1094
  ## Delivery
@@ -916,18 +1101,18 @@ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Pat
916
1101
 
917
1102
  # 4. Update File Content
918
1103
  content = path.read_text()
919
-
1104
+
920
1105
  # Check if Delivery section exists
921
1106
  if "## Delivery" in content:
922
1107
  # Replace existing section
923
1108
  # We assume Delivery is the last section or we replace until end or next H2?
924
- # For simplicity, if ## Delivery exists, we regex replace it and everything after it
1109
+ # For simplicity, if ## Delivery exists, we regex replace it and everything after it
925
1110
  # OR we just replace the section block if we can identify it.
926
1111
  # Let's assume it's at the end or we replace the specific block `## Delivery...`
927
1112
  # But regex matching across newlines is tricky if we don't know where it ends.
928
1113
  # Safe bet: If "## Delivery" exists, find it and replace everything after it?
929
1114
  # Or look for "<!-- Monoco Auto Generated -->"
930
-
1115
+
931
1116
  pattern = r"## Delivery.*"
932
1117
  # If we use DOTALL, it replaces everything until end of string?
933
1118
  # Yes, usually Delivery report is appended at the end.
@@ -937,25 +1122,27 @@ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Pat
937
1122
  if not content.endswith("\n"):
938
1123
  content += "\n"
939
1124
  content += "\n" + delivery_section.strip() + "\n"
940
-
1125
+
941
1126
  path.write_text(content)
942
-
1127
+
943
1128
  # 5. Update Metadata (delivery stats)
944
1129
  # We might want to store 'files_count' in metadata for the recursive aggregation (FEAT-0003)
945
1130
  # But IssueMetadata doesn't have a 'delivery' dict field yet.
946
1131
  # We can add it to 'extra' or extend the model later.
947
1132
  # For now, just persisting the text is enough for FEAT-0002.
948
-
1133
+
949
1134
  meta = parse_issue(path)
950
1135
  if not meta:
951
- raise ValueError(f"Could not parse metadata for issue {issue_id}")
1136
+ raise ValueError(f"Could not parse metadata for issue {issue_id}")
952
1137
  return meta
953
1138
 
1139
+
954
1140
  def get_children(issues_root: Path, parent_id: str) -> List[IssueMetadata]:
955
1141
  """Find all direct children of an issue."""
956
1142
  all_issues = list_issues(issues_root)
957
1143
  return [i for i in all_issues if i.parent == parent_id]
958
1144
 
1145
+
959
1146
  def count_files_in_delivery(issue_path: Path) -> int:
960
1147
  """Parse the ## Delivery section to count files."""
961
1148
  try:
@@ -967,6 +1154,7 @@ def count_files_in_delivery(issue_path: Path) -> int:
967
1154
  pass
968
1155
  return 0
969
1156
 
1157
+
970
1158
  def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
971
1159
  """
972
1160
  Parse a search query string into explicit positives, optional terms, and negatives.
@@ -978,8 +1166,9 @@ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
978
1166
  """
979
1167
  if not query:
980
1168
  return [], [], []
981
-
1169
+
982
1170
  import shlex
1171
+
983
1172
  try:
984
1173
  tokens = shlex.split(query)
985
1174
  except ValueError:
@@ -989,7 +1178,7 @@ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
989
1178
  explicit_positives = []
990
1179
  terms = []
991
1180
  negatives = []
992
-
1181
+
993
1182
  for token in tokens:
994
1183
  token_lower = token.lower()
995
1184
  if token_lower.startswith("-") and len(token_lower) > 1:
@@ -998,10 +1187,17 @@ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
998
1187
  explicit_positives.append(token_lower[1:])
999
1188
  else:
1000
1189
  terms.append(token_lower)
1001
-
1190
+
1002
1191
  return explicit_positives, terms, negatives
1003
1192
 
1004
- def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms: List[str], negatives: List[str], full_content: str = "") -> bool:
1193
+
1194
+ def check_issue_match(
1195
+ issue: IssueMetadata,
1196
+ explicit_positives: List[str],
1197
+ terms: List[str],
1198
+ negatives: List[str],
1199
+ full_content: str = "",
1200
+ ) -> bool:
1005
1201
  """
1006
1202
  Check if an issue matches the search criteria.
1007
1203
  Consider fields: id, title, status, stage, type, tags, dependencies, related.
@@ -1018,64 +1214,65 @@ def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms
1018
1214
  *(issue.tags or []),
1019
1215
  *(issue.dependencies or []),
1020
1216
  *(issue.related or []),
1021
- full_content
1217
+ full_content,
1022
1218
  ]
1023
-
1219
+
1024
1220
  # Normalize blob
1025
1221
  blob = " ".join(filter(None, searchable_parts)).lower()
1026
-
1222
+
1027
1223
  # 2. Check Negatives (Fast Fail)
1028
1224
  for term in negatives:
1029
1225
  if term in blob:
1030
1226
  return False
1031
-
1227
+
1032
1228
  # 3. Check Explicit Positives (Must match ALL)
1033
1229
  for term in explicit_positives:
1034
1230
  if term not in blob:
1035
1231
  return False
1036
-
1232
+
1037
1233
  # 4. Check Terms (Nice to Have)
1038
1234
  # If explicit_positives exist, terms are optional (implicit inclusion).
1039
1235
  # If NO explicit_positives, terms act as Implicit OR (must match at least one).
1040
1236
  if terms:
1041
1237
  if not explicit_positives:
1042
- # Must match at least one term
1043
- if not any(term in blob for term in terms):
1044
- return False
1045
-
1238
+ # Must match at least one term
1239
+ if not any(term in blob for term in terms):
1240
+ return False
1241
+
1046
1242
  return True
1047
1243
 
1244
+
1048
1245
  def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
1049
1246
  """
1050
1247
  Search issues using advanced query syntax.
1051
1248
  Returns list of matching IssueMetadata.
1052
1249
  """
1053
1250
  explicit_positives, terms, negatives = parse_search_query(query)
1054
-
1251
+
1055
1252
  # Optimization: If no query, return empty? Or all?
1056
1253
  # Usually search implies input. CLI `list` is for all.
1057
1254
  # But if query is empty string, we return all?
1058
- # Let's align with "grep": empty pattern matches everything?
1255
+ # Let's align with "grep": empty pattern matches everything?
1059
1256
  # Or strict: empty query -> all.
1060
1257
  if not explicit_positives and not terms and not negatives:
1061
- return list_issues(issues_root)
1258
+ return list_issues(issues_root)
1062
1259
 
1063
1260
  matches = []
1064
1261
  all_files = []
1065
-
1262
+
1066
1263
  # 1. Gather all files first (we need to read content for deep search)
1067
1264
  # Using list_issues is inefficient if we need body content, as list_issues only parses frontmatter (usually).
1068
1265
  # But parse_issue uses `IssueMetadata` which ignores body.
1069
1266
  # We need a robust way. `parse_issue` reads full text but discards body in current implementation?
1070
1267
  # Wait, `parse_issue` in core.py *only* reads frontmatter via `yaml.safe_load(match.group(1))`.
1071
1268
  # It does NOT return body.
1072
-
1269
+
1073
1270
  # To support deep search (Body), we need to read files.
1074
1271
  # Let's iterate files directly.
1075
-
1272
+
1076
1273
  engine = get_engine(str(issues_root.parent))
1077
1274
  all_types = engine.get_all_types()
1078
-
1275
+
1079
1276
  for issue_type in all_types:
1080
1277
  base_dir = get_issue_dir(issue_type, issues_root)
1081
1278
  for status_dir in ["open", "backlog", "closed"]:
@@ -1092,23 +1289,26 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
1092
1289
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
1093
1290
  if not match:
1094
1291
  continue
1095
-
1292
+
1096
1293
  yaml_str = match.group(1)
1097
1294
  data = yaml.safe_load(yaml_str)
1098
1295
  if not isinstance(data, dict):
1099
- continue
1100
-
1296
+ continue
1297
+
1101
1298
  meta = IssueMetadata(**data)
1102
-
1299
+
1103
1300
  # Match
1104
- if check_issue_match(meta, explicit_positives, terms, negatives, full_content=content):
1301
+ if check_issue_match(
1302
+ meta, explicit_positives, terms, negatives, full_content=content
1303
+ ):
1105
1304
  matches.append(meta)
1106
-
1305
+
1107
1306
  except Exception:
1108
1307
  continue
1109
-
1308
+
1110
1309
  return matches
1111
1310
 
1311
+
1112
1312
  def recalculate_parent(issues_root: Path, parent_id: str):
1113
1313
  """
1114
1314
  Update parent Epic/Feature stats based on children.
@@ -1117,8 +1317,8 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1117
1317
  """
1118
1318
  parent_path = find_issue_path(issues_root, parent_id)
1119
1319
  if not parent_path:
1120
- return # Should we warn?
1121
-
1320
+ return # Should we warn?
1321
+
1122
1322
  children = get_children(issues_root, parent_id)
1123
1323
  if not children:
1124
1324
  return
@@ -1127,7 +1327,7 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1127
1327
  closed = len([c for c in children if c.status == "closed"])
1128
1328
  # Progress string: "3/5"
1129
1329
  progress_str = f"{closed}/{total}"
1130
-
1330
+
1131
1331
  # Files count
1132
1332
  total_files = 0
1133
1333
  for child in children:
@@ -1137,68 +1337,71 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1137
1337
 
1138
1338
  # Update Parent # We need to reuse update logic but without validation/status change
1139
1339
  # Just generic metadata update.
1140
- # update_issue is too heavy/strict.
1141
- # Let's implement a lighter `patch_metadata` helper or reuse logic.
1142
-
1340
+ # update_issue is too heavy/strict.
1341
+ # Let's implement a lighter `patch_metadata` helper or reuse logic.
1342
+
1143
1343
  content = parent_path.read_text()
1144
1344
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
1145
1345
  if match:
1146
1346
  yaml_str = match.group(1)
1147
1347
  data = yaml.safe_load(yaml_str) or {}
1148
-
1348
+
1149
1349
  # Check if changed to avoid churn
1150
1350
  old_progress = data.get("progress")
1151
1351
  old_files = data.get("files_count")
1152
-
1352
+
1153
1353
  if old_progress == progress_str and old_files == total_files:
1154
1354
  return
1155
-
1355
+
1156
1356
  data["progress"] = progress_str
1157
1357
  data["files_count"] = total_files
1158
-
1358
+
1159
1359
  # Also maybe update status?
1160
1360
  # FEAT-0003 Req: "If first child starts doing, auto-start Parent?"
1161
1361
  # If parent is OPEN/TODO and child is DOING/REVIEW/DONE, set parent to DOING?
1162
1362
  current_status = data.get("status", "open").lower()
1163
1363
  current_stage = data.get("stage", "draft").lower()
1164
-
1364
+
1165
1365
  if current_status == "open" and current_stage == "draft":
1166
1366
  # Check if any child is active
1167
- active_children = [c for c in children if c.status == "open" and c.stage != "draft"]
1367
+ active_children = [
1368
+ c for c in children if c.status == "open" and c.stage != "draft"
1369
+ ]
1168
1370
  closed_children = [c for c in children if c.status == "closed"]
1169
-
1371
+
1170
1372
  if active_children or closed_children:
1171
1373
  data["stage"] = "doing"
1172
-
1374
+
1173
1375
  # Serialize
1174
1376
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
1175
1377
  # Replace header
1176
1378
  new_content = content.replace(match.group(1), "\n" + new_yaml)
1177
1379
  parent_path.write_text(new_content)
1178
-
1380
+
1179
1381
  # Recurse up?
1180
1382
  parent_parent = data.get("parent")
1181
1383
  if parent_parent:
1182
1384
  recalculate_parent(issues_root, parent_parent)
1183
1385
 
1386
+
1184
1387
  def move_issue(
1185
1388
  source_issues_root: Path,
1186
1389
  issue_id: str,
1187
1390
  target_issues_root: Path,
1188
- renumber: bool = False
1391
+ renumber: bool = False,
1189
1392
  ) -> Tuple[IssueMetadata, Path]:
1190
1393
  """
1191
1394
  Move an issue from one project to another.
1192
-
1395
+
1193
1396
  Args:
1194
1397
  source_issues_root: Source project's Issues directory
1195
1398
  issue_id: ID of the issue to move
1196
1399
  target_issues_root: Target project's Issues directory
1197
1400
  renumber: If True, automatically renumber on ID conflict
1198
-
1401
+
1199
1402
  Returns:
1200
1403
  Tuple of (updated metadata, new file path)
1201
-
1404
+
1202
1405
  Raises:
1203
1406
  FileNotFoundError: If source issue doesn't exist
1204
1407
  ValueError: If ID conflict exists and renumber=False
@@ -1207,26 +1410,26 @@ def move_issue(
1207
1410
  source_path = find_issue_path(source_issues_root, issue_id)
1208
1411
  if not source_path:
1209
1412
  raise FileNotFoundError(f"Issue {issue_id} not found in source project.")
1210
-
1413
+
1211
1414
  # 2. Parse issue metadata
1212
1415
  issue = parse_issue_detail(source_path)
1213
1416
  if not issue:
1214
1417
  raise ValueError(f"Failed to parse issue {issue_id}.")
1215
-
1418
+
1216
1419
  # 3. Check for ID conflict in target
1217
1420
  target_conflict_path = find_issue_path(target_issues_root, issue_id)
1218
-
1421
+
1219
1422
  if target_conflict_path:
1220
1423
  # Conflict detected
1221
1424
  conflict_issue = parse_issue(target_conflict_path)
1222
-
1425
+
1223
1426
  # Check if it's the same issue (same UID)
1224
1427
  if issue.uid and conflict_issue and conflict_issue.uid == issue.uid:
1225
1428
  raise ValueError(
1226
1429
  f"Issue {issue_id} (uid: {issue.uid}) already exists in target project. "
1227
1430
  "This appears to be a duplicate."
1228
1431
  )
1229
-
1432
+
1230
1433
  # Different issues with same ID
1231
1434
  if not renumber:
1232
1435
  conflict_info = ""
@@ -1236,7 +1439,7 @@ def move_issue(
1236
1439
  f"ID conflict: Target project already has {issue_id}{conflict_info}.\n"
1237
1440
  f"Use --renumber to automatically assign a new ID."
1238
1441
  )
1239
-
1442
+
1240
1443
  # Auto-renumber
1241
1444
  new_id = find_next_id(issue.type, target_issues_root)
1242
1445
  old_id = issue.id
@@ -1244,29 +1447,37 @@ def move_issue(
1244
1447
  else:
1245
1448
  new_id = issue.id
1246
1449
  old_id = issue.id
1247
-
1450
+
1248
1451
  # 4. Construct target path
1249
1452
  target_type_dir = get_issue_dir(issue.type, target_issues_root)
1250
1453
  target_status_dir = target_type_dir / issue.status
1251
-
1454
+
1252
1455
  # Preserve subdirectory structure if any
1253
1456
  try:
1254
1457
  source_type_dir = get_issue_dir(issue.type, source_issues_root)
1255
1458
  rel_path = source_path.relative_to(source_type_dir)
1256
1459
  # Remove status directory component
1257
- structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(source_path.name)
1460
+ structure_path = (
1461
+ Path(*rel_path.parts[1:])
1462
+ if len(rel_path.parts) > 1
1463
+ else Path(source_path.name)
1464
+ )
1258
1465
  except ValueError:
1259
1466
  structure_path = Path(source_path.name)
1260
-
1467
+
1261
1468
  # Update filename if ID changed
1262
1469
  if new_id != old_id:
1263
1470
  old_filename = source_path.name
1264
1471
  new_filename = old_filename.replace(old_id, new_id, 1)
1265
- structure_path = structure_path.parent / new_filename if structure_path.parent != Path('.') else Path(new_filename)
1266
-
1472
+ structure_path = (
1473
+ structure_path.parent / new_filename
1474
+ if structure_path.parent != Path(".")
1475
+ else Path(new_filename)
1476
+ )
1477
+
1267
1478
  target_path = target_status_dir / structure_path
1268
1479
  target_path.parent.mkdir(parents=True, exist_ok=True)
1269
-
1480
+
1270
1481
  # 5. Update content if ID changed
1271
1482
  if new_id != old_id:
1272
1483
  # Update frontmatter
@@ -1275,30 +1486,30 @@ def move_issue(
1275
1486
  if match:
1276
1487
  yaml_str = match.group(1)
1277
1488
  data = yaml.safe_load(yaml_str) or {}
1278
- data['id'] = new_id
1279
- data['updated_at'] = current_time()
1280
-
1489
+ data["id"] = new_id
1490
+ data["updated_at"] = current_time()
1491
+
1281
1492
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
1282
-
1493
+
1283
1494
  # Update body (replace old ID in heading)
1284
- body = content[match.end():]
1495
+ body = content[match.end() :]
1285
1496
  body = body.replace(f"## {old_id}:", f"## {new_id}:", 1)
1286
-
1497
+
1287
1498
  new_content = f"---\n{new_yaml}---{body}"
1288
1499
  else:
1289
1500
  new_content = issue.raw_content or ""
1290
1501
  else:
1291
1502
  new_content = issue.raw_content or ""
1292
-
1503
+
1293
1504
  # 6. Write to target
1294
1505
  target_path.write_text(new_content)
1295
-
1506
+
1296
1507
  # 7. Remove source
1297
1508
  source_path.unlink()
1298
-
1509
+
1299
1510
  # 8. Return updated metadata
1300
1511
  final_meta = parse_issue(target_path)
1301
1512
  if not final_meta:
1302
1513
  raise ValueError(f"Failed to parse moved issue at {target_path}")
1303
-
1514
+
1304
1515
  return final_meta, target_path