monoco-toolkit 0.2.8__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. monoco/cli/project.py +35 -31
  2. monoco/cli/workspace.py +26 -16
  3. monoco/core/agent/__init__.py +0 -2
  4. monoco/core/agent/action.py +44 -20
  5. monoco/core/agent/adapters.py +20 -16
  6. monoco/core/agent/protocol.py +5 -4
  7. monoco/core/agent/state.py +21 -21
  8. monoco/core/config.py +90 -33
  9. monoco/core/execution.py +21 -16
  10. monoco/core/feature.py +8 -5
  11. monoco/core/git.py +61 -30
  12. monoco/core/hooks.py +57 -0
  13. monoco/core/injection.py +47 -44
  14. monoco/core/integrations.py +50 -35
  15. monoco/core/lsp.py +12 -1
  16. monoco/core/output.py +35 -16
  17. monoco/core/registry.py +3 -2
  18. monoco/core/setup.py +190 -124
  19. monoco/core/skills.py +121 -107
  20. monoco/core/state.py +12 -10
  21. monoco/core/sync.py +85 -56
  22. monoco/core/telemetry.py +10 -6
  23. monoco/core/workspace.py +26 -19
  24. monoco/daemon/app.py +123 -79
  25. monoco/daemon/commands.py +14 -13
  26. monoco/daemon/models.py +11 -3
  27. monoco/daemon/reproduce_stats.py +8 -8
  28. monoco/daemon/services.py +32 -33
  29. monoco/daemon/stats.py +59 -40
  30. monoco/features/config/commands.py +38 -25
  31. monoco/features/i18n/adapter.py +4 -5
  32. monoco/features/i18n/commands.py +83 -49
  33. monoco/features/i18n/core.py +94 -54
  34. monoco/features/issue/adapter.py +6 -7
  35. monoco/features/issue/commands.py +468 -272
  36. monoco/features/issue/core.py +419 -312
  37. monoco/features/issue/domain/lifecycle.py +33 -23
  38. monoco/features/issue/domain/models.py +71 -38
  39. monoco/features/issue/domain/parser.py +92 -69
  40. monoco/features/issue/domain/workspace.py +19 -16
  41. monoco/features/issue/engine/__init__.py +3 -3
  42. monoco/features/issue/engine/config.py +18 -25
  43. monoco/features/issue/engine/machine.py +72 -39
  44. monoco/features/issue/engine/models.py +4 -2
  45. monoco/features/issue/linter.py +287 -157
  46. monoco/features/issue/lsp/definition.py +26 -19
  47. monoco/features/issue/migration.py +45 -34
  48. monoco/features/issue/models.py +29 -13
  49. monoco/features/issue/monitor.py +24 -8
  50. monoco/features/issue/resources/en/SKILL.md +6 -2
  51. monoco/features/issue/validator.py +383 -208
  52. monoco/features/skills/__init__.py +0 -1
  53. monoco/features/skills/core.py +24 -18
  54. monoco/features/spike/adapter.py +4 -5
  55. monoco/features/spike/commands.py +51 -38
  56. monoco/features/spike/core.py +24 -16
  57. monoco/main.py +34 -21
  58. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.0.dist-info}/METADATA +1 -1
  59. monoco_toolkit-0.3.0.dist-info/RECORD +84 -0
  60. monoco_toolkit-0.2.8.dist-info/RECORD +0 -83
  61. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.0.dist-info}/WHEEL +0 -0
  62. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.0.dist-info}/entry_points.txt +0 -0
  63. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,10 +1,20 @@
1
- import os
2
1
  import re
3
2
  import yaml
4
3
  from pathlib import Path
5
4
  from typing import List, Dict, Optional, Tuple, Any, Set, Set
6
- from datetime import datetime
7
- from .models import IssueMetadata, IssueType, IssueStatus, IssueSolution, IssueStage, IssueDetail, IsolationType, IssueIsolation, IssueID, current_time, generate_uid
5
+ from .models import (
6
+ IssueMetadata,
7
+ IssueType,
8
+ IssueStatus,
9
+ IssueSolution,
10
+ IssueStage,
11
+ IssueDetail,
12
+ IsolationType,
13
+ IssueIsolation,
14
+ IssueID,
15
+ current_time,
16
+ generate_uid,
17
+ )
8
18
  from monoco.core import git
9
19
  from monoco.core.config import get_config, MonocoConfig
10
20
  from monoco.core.lsp import DiagnosticSeverity
@@ -12,82 +22,89 @@ from .validator import IssueValidator
12
22
 
13
23
  from .engine import get_engine
14
24
 
25
+
15
26
  def get_prefix_map(issues_root: Path) -> Dict[str, str]:
16
27
  engine = get_engine(str(issues_root.parent))
17
28
  return engine.get_prefix_map()
18
29
 
30
+
19
31
  def get_reverse_prefix_map(issues_root: Path) -> Dict[str, str]:
20
32
  prefix_map = get_prefix_map(issues_root)
21
33
  return {v: k for k, v in prefix_map.items()}
22
34
 
35
+
23
36
  def get_issue_dir(issue_type: str, issues_root: Path) -> Path:
24
37
  engine = get_engine(str(issues_root.parent))
25
38
  folder_map = engine.get_folder_map()
26
39
  folder = folder_map.get(issue_type, issue_type.capitalize() + "s")
27
40
  return issues_root / folder
28
41
 
42
+
29
43
  def _get_slug(title: str) -> str:
30
44
  slug = title.lower()
31
45
  # Replace non-word characters (including punctuation, spaces) with hyphens
32
46
  # \w matches Unicode word characters (letters, numbers, underscores)
33
47
  slug = re.sub(r"[^\w]+", "-", slug)
34
48
  slug = slug.strip("-")[:50]
35
-
49
+
36
50
  if not slug:
37
51
  slug = "issue"
38
-
52
+
39
53
  return slug
40
54
 
55
+
41
56
  def parse_issue(file_path: Path) -> Optional[IssueMetadata]:
42
57
  if not file_path.suffix == ".md":
43
58
  return None
44
-
59
+
45
60
  content = file_path.read_text()
46
61
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
47
62
  if not match:
48
63
  return None
49
-
64
+
50
65
  try:
51
66
  data = yaml.safe_load(match.group(1))
52
67
  if not isinstance(data, dict):
53
- return None
54
-
55
- data['path'] = str(file_path.absolute())
68
+ return None
69
+
70
+ data["path"] = str(file_path.absolute())
56
71
  meta = IssueMetadata(**data)
57
72
  meta.actions = get_available_actions(meta)
58
73
  return meta
59
74
  except Exception:
60
75
  return None
61
76
 
77
+
62
78
  def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
63
79
  if not file_path.suffix == ".md":
64
80
  return None
65
-
81
+
66
82
  content = file_path.read_text()
67
83
  # Robust splitting
68
84
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
69
85
  if not match:
70
86
  return None
71
-
87
+
72
88
  yaml_str = match.group(1)
73
- body = content[match.end():].lstrip()
74
-
89
+ body = content[match.end() :].lstrip()
90
+
75
91
  try:
76
92
  data = yaml.safe_load(yaml_str)
77
93
  if not isinstance(data, dict):
78
- return None
79
-
80
- data['path'] = str(file_path.absolute())
94
+ return None
95
+
96
+ data["path"] = str(file_path.absolute())
81
97
  return IssueDetail(**data, body=body, raw_content=content)
82
98
  except Exception:
83
99
  return None
84
100
 
101
+
85
102
  def find_next_id(issue_type: str, issues_root: Path) -> str:
86
103
  prefix_map = get_prefix_map(issues_root)
87
104
  prefix = prefix_map.get(issue_type, "ISSUE")
88
105
  pattern = re.compile(rf"{prefix}-(\d+)")
89
106
  max_id = 0
90
-
107
+
91
108
  base_dir = get_issue_dir(issue_type, issues_root)
92
109
  # Scan all subdirs: open, backlog, closed
93
110
  for status_dir in ["open", "backlog", "closed"]:
@@ -97,28 +114,28 @@ def find_next_id(issue_type: str, issues_root: Path) -> str:
97
114
  match = pattern.search(f.name)
98
115
  if match:
99
116
  max_id = max(max_id, int(match.group(1)))
100
-
117
+
101
118
  return f"{prefix}-{max_id + 1:04d}"
102
119
 
120
+
103
121
  def create_issue_file(
104
- issues_root: Path,
105
- issue_type: IssueType,
106
- title: str,
107
- parent: Optional[str] = None,
108
- status: IssueStatus = IssueStatus.OPEN,
122
+ issues_root: Path,
123
+ issue_type: IssueType,
124
+ title: str,
125
+ parent: Optional[str] = None,
126
+ status: IssueStatus = IssueStatus.OPEN,
109
127
  stage: Optional[IssueStage] = None,
110
- dependencies: List[str] = [],
111
- related: List[str] = [],
128
+ dependencies: List[str] = [],
129
+ related: List[str] = [],
112
130
  subdir: Optional[str] = None,
113
131
  sprint: Optional[str] = None,
114
- tags: List[str] = []
132
+ tags: List[str] = [],
115
133
  ) -> Tuple[IssueMetadata, Path]:
116
-
117
134
  # Validation
118
135
  for dep_id in dependencies:
119
136
  if not find_issue_path(issues_root, dep_id):
120
137
  raise ValueError(f"Dependency issue {dep_id} not found.")
121
-
138
+
122
139
  for rel_id in related:
123
140
  if not find_issue_path(issues_root, rel_id):
124
141
  raise ValueError(f"Related issue {rel_id} not found.")
@@ -126,32 +143,32 @@ def create_issue_file(
126
143
  issue_id = find_next_id(issue_type, issues_root)
127
144
  base_type_dir = get_issue_dir(issue_type, issues_root)
128
145
  target_dir = base_type_dir / status
129
-
146
+
130
147
  if subdir:
131
148
  target_dir = target_dir / subdir
132
-
149
+
133
150
  target_dir.mkdir(parents=True, exist_ok=True)
134
-
151
+
135
152
  # Auto-Populate Tags with required IDs (Requirement: Maintain tags field with parent/deps/related/self IDs)
136
153
  # Ensure they are prefixed with '#' for tagging convention if not present (usually tags are just strings, but user asked for #ID)
137
154
  auto_tags = set(tags) if tags else set()
138
-
155
+
139
156
  # 1. Add Parent
140
157
  if parent:
141
158
  auto_tags.add(f"#{parent}")
142
-
159
+
143
160
  # 2. Add Dependencies
144
161
  for dep in dependencies:
145
162
  auto_tags.add(f"#{dep}")
146
-
163
+
147
164
  # 3. Add Related
148
165
  for rel in related:
149
166
  auto_tags.add(f"#{rel}")
150
-
167
+
151
168
  # 4. Add Self (as per instruction "auto add this issue... number")
152
169
  # Note: issue_id is generated just above
153
170
  auto_tags.add(f"#{issue_id}")
154
-
171
+
155
172
  final_tags = sorted(list(auto_tags))
156
173
 
157
174
  metadata = IssueMetadata(
@@ -166,17 +183,24 @@ def create_issue_file(
166
183
  related=related,
167
184
  sprint=sprint,
168
185
  tags=final_tags,
169
- opened_at=current_time() if status == IssueStatus.OPEN else None
186
+ opened_at=current_time() if status == IssueStatus.OPEN else None,
170
187
  )
171
-
188
+
172
189
  # Enforce lifecycle policies (defaults, auto-corrections)
173
190
  from .engine import get_engine
191
+
174
192
  get_engine().enforce_policy(metadata)
175
193
 
176
194
  # Serialize metadata
177
195
  # Explicitly exclude actions and path from file persistence
178
- yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json', exclude={'actions', 'path'}), sort_keys=False, allow_unicode=True)
179
-
196
+ yaml_header = yaml.dump(
197
+ metadata.model_dump(
198
+ exclude_none=True, mode="json", exclude={"actions", "path"}
199
+ ),
200
+ sort_keys=False,
201
+ allow_unicode=True,
202
+ )
203
+
180
204
  # Inject Self-Documenting Hints (Interactive Frontmatter)
181
205
  if "parent:" not in yaml_header:
182
206
  yaml_header += "# parent: <EPIC-ID> # Optional: Parent Issue ID\n"
@@ -192,7 +216,7 @@ def create_issue_file(
192
216
 
193
217
  slug = _get_slug(title)
194
218
  filename = f"{issue_id}-{slug}.md"
195
-
219
+
196
220
  # Enhanced Template with Instructional Comments
197
221
  file_content = f"""---
198
222
  {yaml_header}---
@@ -224,33 +248,40 @@ def create_issue_file(
224
248
  """
225
249
  file_path = target_dir / filename
226
250
  file_path.write_text(file_content)
227
-
251
+
228
252
  # Inject path into returned metadata
229
253
  metadata.path = str(file_path.absolute())
230
-
254
+
231
255
  return metadata, file_path
256
+
257
+
232
258
  def get_available_actions(meta: IssueMetadata) -> List[Any]:
233
259
  from .models import IssueAction
234
260
  from .engine import get_engine
235
-
261
+
236
262
  engine = get_engine()
237
263
  transitions = engine.get_available_transitions(meta)
238
-
264
+
239
265
  actions = []
240
266
  for t in transitions:
241
267
  command = t.command_template.format(id=meta.id) if t.command_template else ""
242
-
243
- actions.append(IssueAction(
244
- label=t.label,
245
- icon=t.icon,
246
- target_status=t.to_status if t.to_status != meta.status or t.to_stage != meta.stage else None,
247
- target_stage=t.to_stage if t.to_stage != meta.stage else None,
248
- target_solution=t.required_solution,
249
- command=command
250
- ))
268
+
269
+ actions.append(
270
+ IssueAction(
271
+ label=t.label,
272
+ icon=t.icon,
273
+ target_status=t.to_status
274
+ if t.to_status != meta.status or t.to_stage != meta.stage
275
+ else None,
276
+ target_stage=t.to_stage if t.to_stage != meta.stage else None,
277
+ target_solution=t.required_solution,
278
+ command=command,
279
+ )
280
+ )
251
281
 
252
282
  return actions
253
283
 
284
+
254
285
  def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
255
286
  parsed = IssueID(issue_id)
256
287
 
@@ -261,21 +292,21 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
261
292
  # Resolve Workspace
262
293
  # Traverse up from issues_root to find a config that defines the namespace
263
294
  project_root = issues_root.parent
264
-
295
+
265
296
  # Try current root first
266
297
  conf = MonocoConfig.load(str(project_root))
267
298
  member_rel_path = conf.project.members.get(parsed.namespace)
268
-
299
+
269
300
  if not member_rel_path:
270
301
  return None
271
-
302
+
272
303
  member_root = (project_root / member_rel_path).resolve()
273
304
  # Assume standard "Issues" directory for members to avoid loading full config
274
305
  member_issues = member_root / "Issues"
275
-
306
+
276
307
  if not member_issues.exists():
277
- return None
278
-
308
+ return None
309
+
279
310
  # Recursively search in member project
280
311
  return find_issue_path(member_issues, parsed.local_id)
281
312
 
@@ -284,23 +315,24 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
284
315
  prefix = parsed.local_id.split("-")[0].upper()
285
316
  except IndexError:
286
317
  return None
287
-
318
+
288
319
  reverse_prefix_map = get_reverse_prefix_map(issues_root)
289
320
  issue_type = reverse_prefix_map.get(prefix)
290
321
  if not issue_type:
291
322
  return None
292
-
323
+
293
324
  base_dir = get_issue_dir(issue_type, issues_root)
294
325
  # Search in all status subdirs recursively
295
326
  for f in base_dir.rglob(f"{parsed.local_id}-*.md"):
296
327
  return f
297
328
  return None
298
329
 
330
+
299
331
  def update_issue(
300
- issues_root: Path,
301
- issue_id: str,
302
- status: Optional[IssueStatus] = None,
303
- stage: Optional[IssueStage] = None,
332
+ issues_root: Path,
333
+ issue_id: str,
334
+ status: Optional[IssueStatus] = None,
335
+ stage: Optional[IssueStage] = None,
304
336
  solution: Optional[IssueSolution] = None,
305
337
  title: Optional[str] = None,
306
338
  parent: Optional[str] = None,
@@ -308,15 +340,15 @@ def update_issue(
308
340
  dependencies: Optional[List[str]] = None,
309
341
  related: Optional[List[str]] = None,
310
342
  tags: Optional[List[str]] = None,
311
- files: Optional[List[str]] = None
343
+ files: Optional[List[str]] = None,
312
344
  ) -> IssueMetadata:
313
345
  path = find_issue_path(issues_root, issue_id)
314
346
  if not path:
315
347
  raise FileNotFoundError(f"Issue {issue_id} not found.")
316
-
348
+
317
349
  # Read full content
318
350
  content = path.read_text()
319
-
351
+
320
352
  # Split Frontmatter and Body
321
353
  match = re.search(r"^---(.*?)---\n(.*)\n", content, re.DOTALL | re.MULTILINE)
322
354
  if not match:
@@ -324,7 +356,7 @@ def update_issue(
324
356
  match_simple = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
325
357
  if match_simple:
326
358
  yaml_str = match_simple.group(1)
327
- body = content[match_simple.end():]
359
+ body = content[match_simple.end() :]
328
360
  else:
329
361
  raise ValueError(f"Could not parse frontmatter for {issue_id}")
330
362
  else:
@@ -336,19 +368,19 @@ def update_issue(
336
368
  except yaml.YAMLError:
337
369
  raise ValueError(f"Invalid YAML metadata in {issue_id}")
338
370
 
339
- current_status_str = data.get("status", "open") # default to open if missing?
371
+ current_status_str = data.get("status", "open") # default to open if missing?
340
372
  # Normalize current status to Enum for comparison
341
373
  try:
342
374
  current_status = IssueStatus(current_status_str.lower())
343
375
  except ValueError:
344
376
  current_status = IssueStatus.OPEN
345
-
377
+
346
378
  current_stage_str = data.get("stage")
347
379
  current_stage = IssueStage(current_stage_str.lower()) if current_stage_str else None
348
380
 
349
381
  # Logic: Status Update
350
382
  target_status = status if status else current_status
351
-
383
+
352
384
  # If status is changing, we don't default target_stage to current_stage
353
385
  # because the new status might have different allowed stages.
354
386
  # enforce_policy will handle setting the correct default stage for the new status.
@@ -356,11 +388,12 @@ def update_issue(
356
388
  target_stage = stage
357
389
  else:
358
390
  target_stage = stage if stage else current_stage
359
-
391
+
360
392
  # Engine Validation
361
393
  from .engine import get_engine
394
+
362
395
  engine = get_engine()
363
-
396
+
364
397
  # Map solution string to enum if present
365
398
  effective_solution = solution
366
399
  if not effective_solution and data.get("solution"):
@@ -375,89 +408,94 @@ def update_issue(
375
408
  from_stage=current_stage,
376
409
  to_status=target_status,
377
410
  to_stage=target_stage,
378
- solution=effective_solution
411
+ solution=effective_solution,
379
412
  )
380
413
 
381
414
  if target_status == "closed":
382
415
  # Policy: Dependencies must be closed
383
- dependencies_to_check = dependencies if dependencies is not None else data.get('dependencies', [])
416
+ dependencies_to_check = (
417
+ dependencies if dependencies is not None else data.get("dependencies", [])
418
+ )
384
419
  if dependencies_to_check:
385
420
  for dep_id in dependencies_to_check:
386
421
  dep_path = find_issue_path(issues_root, dep_id)
387
422
  if dep_path:
388
423
  dep_meta = parse_issue(dep_path)
389
424
  if dep_meta and dep_meta.status != "closed":
390
- raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status}].")
391
-
425
+ raise ValueError(
426
+ f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status}]."
427
+ )
428
+
392
429
  # Validate new parent/dependencies/related exist
393
430
  if parent is not None and parent != "":
394
431
  if not find_issue_path(issues_root, parent):
395
432
  raise ValueError(f"Parent issue {parent} not found.")
396
-
433
+
397
434
  if dependencies is not None:
398
435
  for dep_id in dependencies:
399
436
  if not find_issue_path(issues_root, dep_id):
400
437
  raise ValueError(f"Dependency issue {dep_id} not found.")
401
-
438
+
402
439
  if related is not None:
403
440
  for rel_id in related:
404
441
  if not find_issue_path(issues_root, rel_id):
405
442
  raise ValueError(f"Related issue {rel_id} not found.")
406
-
443
+
407
444
  # Update Data
408
445
  if status:
409
- data['status'] = status
446
+ data["status"] = status
410
447
 
411
448
  if stage:
412
- data['stage'] = stage
449
+ data["stage"] = stage
413
450
  if solution:
414
- data['solution'] = solution
415
-
451
+ data["solution"] = solution
452
+
416
453
  if title:
417
- data['title'] = title
454
+ data["title"] = title
418
455
 
419
456
  if parent is not None:
420
457
  if parent == "":
421
- data.pop('parent', None) # Remove parent field
458
+ data.pop("parent", None) # Remove parent field
422
459
  else:
423
- data['parent'] = parent
460
+ data["parent"] = parent
424
461
 
425
462
  if sprint is not None:
426
- data['sprint'] = sprint
427
-
463
+ data["sprint"] = sprint
464
+
428
465
  if dependencies is not None:
429
- data['dependencies'] = dependencies
430
-
466
+ data["dependencies"] = dependencies
467
+
431
468
  if related is not None:
432
- data['related'] = related
433
-
469
+ data["related"] = related
470
+
434
471
  if tags is not None:
435
- data['tags'] = tags
472
+ data["tags"] = tags
436
473
 
437
474
  if files is not None:
438
- data['files'] = files
439
-
475
+ data["files"] = files
476
+
440
477
  # Lifecycle Hooks
441
478
  # 1. Opened At: If transitioning to OPEN
442
479
  if target_status == IssueStatus.OPEN and current_status != IssueStatus.OPEN:
443
480
  # Only set if not already set? Or always reset?
444
481
  # Let's set it if not present, or update it to reflect "Latest activation"
445
482
  # FEAT-0012 says: "update opened_at to now"
446
- data['opened_at'] = current_time()
447
-
483
+ data["opened_at"] = current_time()
484
+
448
485
  # 2. Backlog Push: Handled by IssueMetadata.validate_lifecycle (Status=Backlog -> Stage=None)
449
486
  # 3. Closed: Handled by IssueMetadata.validate_lifecycle (Status=Closed -> Stage=Done, ClosedAt=Now)
450
487
 
451
488
  # Touch updated_at
452
- data['updated_at'] = current_time()
453
-
489
+ data["updated_at"] = current_time()
490
+
454
491
  # Re-hydrate through Model
455
492
  try:
456
493
  updated_meta = IssueMetadata(**data)
457
-
494
+
458
495
  # Enforce lifecycle policies (defaults, auto-corrections)
459
496
  # This ensures that when we update, we also fix invalid states (like Closed but not Done)
460
497
  from .engine import get_engine
498
+
461
499
  get_engine().enforce_policy(updated_meta)
462
500
 
463
501
  # Delegate to IssueValidator for static state validation
@@ -468,59 +506,72 @@ def update_issue(
468
506
  diagnostics = validator.validate(updated_meta, body)
469
507
  errors = [d for d in diagnostics if d.severity == DiagnosticSeverity.Error]
470
508
  if errors:
471
- raise ValueError(f"Validation Failed: {errors[0].message}")
509
+ raise ValueError(f"Validation Failed: {errors[0].message}")
472
510
 
473
511
  except Exception as e:
474
512
  raise ValueError(f"Failed to validate updated metadata: {e}")
475
-
513
+
476
514
  # Serialize back
477
515
  # Explicitly exclude actions and path from file persistence
478
- new_yaml = yaml.dump(updated_meta.model_dump(exclude_none=True, mode='json', exclude={'actions', 'path'}), sort_keys=False, allow_unicode=True)
479
-
516
+ new_yaml = yaml.dump(
517
+ updated_meta.model_dump(
518
+ exclude_none=True, mode="json", exclude={"actions", "path"}
519
+ ),
520
+ sort_keys=False,
521
+ allow_unicode=True,
522
+ )
523
+
480
524
  # Reconstruct File
481
525
  match_header = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
482
526
  if not match_header:
483
527
  body_content = body
484
528
  else:
485
- body_content = content[match_header.end():]
486
-
487
- if body_content.startswith('\n'):
488
- body_content = body_content[1:]
489
-
529
+ body_content = content[match_header.end() :]
530
+
531
+ if body_content.startswith("\n"):
532
+ body_content = body_content[1:]
533
+
490
534
  new_content = f"---\n{new_yaml}---\n{body_content}"
491
-
535
+
492
536
  path.write_text(new_content)
493
-
537
+
494
538
  # 3. Handle physical move if status changed
495
539
  if status and status != current_status:
496
540
  # Move file
497
541
  prefix = issue_id.split("-")[0].upper()
498
542
  reverse_prefix_map = get_reverse_prefix_map(issues_root)
499
543
  base_type_dir = get_issue_dir(reverse_prefix_map[prefix], issues_root)
500
-
544
+
501
545
  try:
502
546
  rel_path = path.relative_to(base_type_dir)
503
- structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
547
+ structure_path = (
548
+ Path(*rel_path.parts[1:])
549
+ if len(rel_path.parts) > 1
550
+ else Path(path.name)
551
+ )
504
552
  except ValueError:
505
553
  structure_path = Path(path.name)
506
554
 
507
555
  target_path = base_type_dir / target_status / structure_path
508
-
556
+
509
557
  if path != target_path:
510
558
  target_path.parent.mkdir(parents=True, exist_ok=True)
511
559
  path.rename(target_path)
512
- path = target_path # Update local path variable for returned meta
560
+ path = target_path # Update local path variable for returned meta
513
561
 
514
562
  # Hook: Recursive Aggregation (FEAT-0003)
515
563
  if updated_meta.parent:
516
564
  recalculate_parent(issues_root, updated_meta.parent)
517
-
565
+
518
566
  # Update returned metadata with final absolute path
519
567
  updated_meta.path = str(path.absolute())
520
568
  updated_meta.actions = get_available_actions(updated_meta)
521
569
  return updated_meta
522
570
 
523
- def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_root: Path) -> IssueMetadata:
571
+
572
+ def start_issue_isolation(
573
+ issues_root: Path, issue_id: str, mode: str, project_root: Path
574
+ ) -> IssueMetadata:
524
575
  """
525
576
  Start physical isolation for an issue (Branch or Worktree).
526
577
  """
@@ -531,48 +582,52 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_r
531
582
  issue = parse_issue(path)
532
583
  if not issue:
533
584
  raise ValueError(f"Could not parse metadata for issue {issue_id}")
534
-
585
+
535
586
  # Idempotency / Conflict Check
536
587
  if issue.isolation:
537
588
  if issue.isolation.type == mode:
538
- # Already isolated in same mode, maybe just switch context?
539
- # For now, we just warn or return.
540
- # If branch exists, we make sure it's checked out in CLI layer maybe?
541
- # But here we assume we want to setup metadata.
542
- pass
589
+ # Already isolated in same mode, maybe just switch context?
590
+ # For now, we just warn or return.
591
+ # If branch exists, we make sure it's checked out in CLI layer maybe?
592
+ # But here we assume we want to setup metadata.
593
+ pass
543
594
  else:
544
- raise ValueError(f"Issue {issue_id} is already isolated as '{issue.isolation.type}'. Please cleanup first.")
595
+ raise ValueError(
596
+ f"Issue {issue_id} is already isolated as '{issue.isolation.type}'. Please cleanup first."
597
+ )
545
598
 
546
599
  slug = _get_slug(issue.title)
547
- branch_name = f"feat/{issue_id.lower()}-{slug}"
548
-
600
+ branch_name = f"feat/{issue_id.lower()}-{slug}"
601
+
549
602
  isolation_meta = None
550
603
 
551
604
  if mode == "branch":
552
605
  if not git.branch_exists(project_root, branch_name):
553
606
  git.create_branch(project_root, branch_name, checkout=True)
554
607
  else:
555
- # Check if we are already on it?
608
+ # Check if we are already on it?
556
609
  # If not, checkout.
557
610
  current = git.get_current_branch(project_root)
558
611
  if current != branch_name:
559
612
  git.checkout_branch(project_root, branch_name)
560
-
613
+
561
614
  isolation_meta = IssueIsolation(type="branch", ref=branch_name)
562
615
 
563
616
  elif mode == "worktree":
564
617
  wt_path = project_root / ".monoco" / "worktrees" / f"{issue_id.lower()}-{slug}"
565
-
618
+
566
619
  # Check if worktree exists physically
567
620
  if wt_path.exists():
568
- # Check if valid git worktree?
569
- pass
621
+ # Check if valid git worktree?
622
+ pass
570
623
  else:
571
- wt_path.parent.mkdir(parents=True, exist_ok=True)
572
- git.worktree_add(project_root, branch_name, wt_path)
573
-
574
- isolation_meta = IssueIsolation(type="worktree", ref=branch_name, path=str(wt_path))
575
-
624
+ wt_path.parent.mkdir(parents=True, exist_ok=True)
625
+ git.worktree_add(project_root, branch_name, wt_path)
626
+
627
+ isolation_meta = IssueIsolation(
628
+ type="worktree", ref=branch_name, path=str(wt_path)
629
+ )
630
+
576
631
  # Persist Metadata
577
632
  # We load raw, update isolation field, save.
578
633
  content = path.read_text()
@@ -580,21 +635,24 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_r
580
635
  if match:
581
636
  yaml_str = match.group(1)
582
637
  data = yaml.safe_load(yaml_str) or {}
583
-
584
- data['isolation'] = isolation_meta.model_dump(mode='json')
638
+
639
+ data["isolation"] = isolation_meta.model_dump(mode="json")
585
640
  # Also ensure stage is DOING (logic link)
586
- data['stage'] = "doing"
587
- data['updated_at'] = current_time()
641
+ data["stage"] = "doing"
642
+ data["updated_at"] = current_time()
588
643
 
589
644
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
590
645
  new_content = content.replace(match.group(1), "\n" + new_yaml)
591
646
  path.write_text(new_content)
592
-
647
+
593
648
  return IssueMetadata(**data)
594
-
649
+
595
650
  return issue
596
651
 
597
- def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project_root: Path) -> List[str]:
652
+
653
+ def prune_issue_resources(
654
+ issues_root: Path, issue_id: str, force: bool, project_root: Path
655
+ ) -> List[str]:
598
656
  """
599
657
  Cleanup physical resources. Returns list of actions taken.
600
658
  """
@@ -604,11 +662,11 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
604
662
  # If we can't find issue, we can't read metadata to know what to prune.
605
663
  # We rely on CLI to pass context or we fail.
606
664
  raise FileNotFoundError(f"Issue {issue_id} not found.")
607
-
665
+
608
666
  issue = parse_issue(path)
609
667
  if not issue:
610
668
  raise ValueError(f"Could not parse metadata for issue {issue_id}")
611
-
669
+
612
670
  deleted_items = []
613
671
 
614
672
  if not issue.isolation:
@@ -618,12 +676,14 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
618
676
  branch = issue.isolation.ref
619
677
  current = git.get_current_branch(project_root)
620
678
  if current == branch:
621
- raise RuntimeError(f"Cannot delete active branch '{branch}'. Please checkout 'main' first.")
622
-
679
+ raise RuntimeError(
680
+ f"Cannot delete active branch '{branch}'. Please checkout 'main' first."
681
+ )
682
+
623
683
  if git.branch_exists(project_root, branch):
624
684
  git.delete_branch(project_root, branch, force=force)
625
685
  deleted_items.append(f"branch:{branch}")
626
-
686
+
627
687
  elif issue.isolation.type == IsolationType.WORKTREE:
628
688
  wt_path_str = issue.isolation.path
629
689
  if wt_path_str:
@@ -631,20 +691,20 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
631
691
  # Normalize path if relative
632
692
  if not wt_path.is_absolute():
633
693
  wt_path = project_root / wt_path
634
-
694
+
635
695
  if wt_path.exists():
636
696
  git.worktree_remove(project_root, wt_path, force=force)
637
697
  deleted_items.append(f"worktree:{wt_path.name}")
638
-
639
- # Also delete the branch associated?
698
+
699
+ # Also delete the branch associated?
640
700
  # Worktree create makes a branch. When removing worktree, branch remains.
641
701
  # Usually we want to remove the branch too if it was created for this issue.
642
702
  branch = issue.isolation.ref
643
703
  if branch and git.branch_exists(project_root, branch):
644
- # We can't delete branch if it is checked out in the worktree we just removed?
645
- # git worktree remove unlocks the branch.
646
- git.delete_branch(project_root, branch, force=force)
647
- deleted_items.append(f"branch:{branch}")
704
+ # We can't delete branch if it is checked out in the worktree we just removed?
705
+ # git worktree remove unlocks the branch.
706
+ git.delete_branch(project_root, branch, force=force)
707
+ deleted_items.append(f"branch:{branch}")
648
708
 
649
709
  # Clear Metadata
650
710
  content = path.read_text()
@@ -652,17 +712,16 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
652
712
  if match:
653
713
  yaml_str = match.group(1)
654
714
  data = yaml.safe_load(yaml_str) or {}
655
-
656
- if 'isolation' in data:
657
- del data['isolation']
658
- data['updated_at'] = current_time()
659
-
715
+
716
+ if "isolation" in data:
717
+ del data["isolation"]
718
+ data["updated_at"] = current_time()
719
+
660
720
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
661
721
  new_content = content.replace(match.group(1), "\n" + new_yaml)
662
722
  path.write_text(new_content)
663
-
664
- return deleted_items
665
723
 
724
+ return deleted_items
666
725
 
667
726
 
668
727
  def delete_issue_file(issues_root: Path, issue_id: str):
@@ -672,9 +731,10 @@ def delete_issue_file(issues_root: Path, issue_id: str):
672
731
  path = find_issue_path(issues_root, issue_id)
673
732
  if not path:
674
733
  raise FileNotFoundError(f"Issue {issue_id} not found.")
675
-
734
+
676
735
  path.unlink()
677
736
 
737
+
678
738
  def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> List[str]:
679
739
  """
680
740
  Sync 'files' field in issue metadata with actual changed files in git.
@@ -682,20 +742,20 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
682
742
  1. Isolation Ref: If issue has isolation (branch/worktree), use that ref.
683
743
  2. Convention: If no isolation, look for branch `*/<id>-*`.
684
744
  3. Current Branch: If current branch matches pattern.
685
-
745
+
686
746
  Compares against default branch (usually 'main' or 'master').
687
747
  """
688
748
  path = find_issue_path(issues_root, issue_id)
689
749
  if not path:
690
750
  raise FileNotFoundError(f"Issue {issue_id} not found.")
691
-
751
+
692
752
  issue = parse_issue(path)
693
753
  if not issue:
694
754
  raise ValueError(f"Could not parse issue {issue_id}")
695
755
 
696
756
  # Determine Target Branch
697
757
  target_ref = None
698
-
758
+
699
759
  if issue.isolation and issue.isolation.ref:
700
760
  target_ref = issue.isolation.ref
701
761
  else:
@@ -706,46 +766,49 @@ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> Li
706
766
  target_ref = current
707
767
  else:
708
768
  # 2. Search for branch
709
- # Limitation: core.git doesn't list all branches yet.
769
+ # Limitation: core.git doesn't list all branches yet.
710
770
  # We skip this for now to avoid complexity, relying on isolation or current context.
711
771
  pass
712
772
 
713
773
  if not target_ref:
714
- raise RuntimeError(f"Could not determine git branch for Issue {issue_id}. Please ensure issue is started or you are on the feature branch.")
774
+ raise RuntimeError(
775
+ f"Could not determine git branch for Issue {issue_id}. Please ensure issue is started or you are on the feature branch."
776
+ )
715
777
 
716
778
  # Determine Base Branch (assume main, or config?)
717
779
  # For now hardcode main, eventually read from config
718
- base_ref = "main"
719
-
780
+ base_ref = "main"
781
+
720
782
  # Check if base exists, if not try master
721
783
  if not git.branch_exists(project_root, base_ref):
722
784
  if git.branch_exists(project_root, "master"):
723
785
  base_ref = "master"
724
786
  else:
725
- # Fallback: remote/main?
726
- pass
787
+ # Fallback: remote/main?
788
+ pass
727
789
 
728
790
  # Git Diff
729
791
  # git diff --name-only base...target
730
792
  cmd = ["diff", "--name-only", f"{base_ref}...{target_ref}"]
731
793
  code, stdout, stderr = git._run_git(cmd, project_root)
732
-
794
+
733
795
  if code != 0:
734
796
  raise RuntimeError(f"Git diff failed: {stderr}")
735
-
797
+
736
798
  changed_files = [f.strip() for f in stdout.splitlines() if f.strip()]
737
-
799
+
738
800
  # Sort for consistency
739
801
  changed_files.sort()
740
-
802
+
741
803
  # Update Issue
742
804
  # Only update if changed
743
805
  if changed_files != issue.files:
744
806
  update_issue(issues_root, issue_id, files=changed_files)
745
807
  return changed_files
746
-
808
+
747
809
  return []
748
-
810
+
811
+
749
812
  # Resources
750
813
  SKILL_CONTENT = """
751
814
  ---
@@ -779,7 +842,7 @@ description: Monoco Issue System 的官方技能定义。将 Issue 视为通用
779
842
  - **Status Level (Lowercase)**: `open`, `backlog`, `closed`
780
843
 
781
844
  ### 路径流转
782
- 使用 `monoco issue`:
845
+ 使用 `monoco issue`:
783
846
  1. **Create**: `monoco issue create <type> --title "..."`
784
847
  2. **Transition**: `monoco issue open/close/backlog <id>`
785
848
  3. **View**: `monoco issue scope`
@@ -791,34 +854,35 @@ description: Monoco Issue System 的官方技能定义。将 Issue 视为通用
791
854
  def init(issues_root: Path):
792
855
  """Initialize the Issues directory structure."""
793
856
  issues_root.mkdir(parents=True, exist_ok=True)
794
-
857
+
795
858
  # Standard Directories based on new Terminology
796
859
  for subdir in ["Epics", "Features", "Chores", "Fixes"]:
797
860
  (issues_root / subdir).mkdir(exist_ok=True)
798
- # Create status subdirs? Usually handled by open/backlog,
861
+ # Create status subdirs? Usually handled by open/backlog,
799
862
  # but creating them initially is good for guidance.
800
863
  for status in ["open", "backlog", "closed"]:
801
864
  (issues_root / subdir / status).mkdir(exist_ok=True)
802
-
865
+
803
866
  # Create gitkeep to ensure they are tracked? Optional.
804
867
 
868
+
805
869
  def get_resources() -> Dict[str, Any]:
806
870
  return {
807
- "skills": {
808
- "issues-management": SKILL_CONTENT
809
- },
810
- "prompts": {} # Handled by adapter via resource files
871
+ "skills": {"issues-management": SKILL_CONTENT},
872
+ "prompts": {}, # Handled by adapter via resource files
811
873
  }
812
874
 
813
875
 
814
- def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[IssueMetadata]:
876
+ def list_issues(
877
+ issues_root: Path, recursive_workspace: bool = False
878
+ ) -> List[IssueMetadata]:
815
879
  """
816
880
  List all issues in the project.
817
881
  """
818
882
  issues = []
819
883
  engine = get_engine(str(issues_root.parent))
820
884
  all_types = engine.get_all_types()
821
-
885
+
822
886
  for issue_type in all_types:
823
887
  base_dir = get_issue_dir(issue_type, issues_root)
824
888
  for status_dir in ["open", "backlog", "closed"]:
@@ -835,11 +899,11 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
835
899
  # weak assumption: issues_root.parent is project_root
836
900
  project_root = issues_root.parent
837
901
  conf = get_config(str(project_root))
838
-
902
+
839
903
  for name, rel_path in conf.project.members.items():
840
904
  member_root = (project_root / rel_path).resolve()
841
- member_issues_dir = member_root / "Issues" # Standard convention
842
-
905
+ member_issues_dir = member_root / "Issues" # Standard convention
906
+
843
907
  if member_issues_dir.exists():
844
908
  # Fetch member issues (non-recursive to avoid loops)
845
909
  member_issues = list_issues(member_issues_dir, False)
@@ -848,16 +912,16 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
848
912
  # CRITICAL: Also namespace references to keep parent-child structure intact
849
913
  if m.parent and "::" not in m.parent:
850
914
  m.parent = f"{name}::{m.parent}"
851
-
915
+
852
916
  if m.dependencies:
853
917
  m.dependencies = [
854
- f"{name}::{d}" if d and "::" not in d else d
918
+ f"{name}::{d}" if d and "::" not in d else d
855
919
  for d in m.dependencies
856
920
  ]
857
-
921
+
858
922
  if m.related:
859
923
  m.related = [
860
- f"{name}::{r}" if r and "::" not in r else r
924
+ f"{name}::{r}" if r and "::" not in r else r
861
925
  for r in m.related
862
926
  ]
863
927
 
@@ -869,17 +933,13 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
869
933
 
870
934
  return issues
871
935
 
936
+
872
937
  def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
873
938
  """
874
939
  Get open issues grouped by their stage for Kanban view.
875
940
  """
876
- board = {
877
- "draft": [],
878
- "doing": [],
879
- "review": [],
880
- "done": []
881
- }
882
-
941
+ board = {"draft": [], "doing": [], "review": [], "done": []}
942
+
883
943
  issues = list_issues(issues_root)
884
944
  for issue in issues:
885
945
  if issue.status == "open" and issue.stage:
@@ -889,28 +949,40 @@ def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
889
949
  elif issue.status == "closed":
890
950
  # Optionally show recently closed items in DONE column
891
951
  board["done"].append(issue)
892
-
952
+
893
953
  return board
894
954
 
895
- def validate_issue_integrity(meta: IssueMetadata, all_issue_ids: Set[str] = set()) -> List[str]:
955
+
956
+ def validate_issue_integrity(
957
+ meta: IssueMetadata, all_issue_ids: Set[str] = set()
958
+ ) -> List[str]:
896
959
  """
897
960
  Validate metadata integrity (Solution, Lifecycle, etc.)
898
961
  UI-agnostic.
899
962
  """
900
963
  errors = []
901
964
  if meta.status == "closed" and not meta.solution:
902
- errors.append(f"Solution Missing: {meta.id} is closed but has no solution field.")
903
-
965
+ errors.append(
966
+ f"Solution Missing: {meta.id} is closed but has no solution field."
967
+ )
968
+
904
969
  if meta.parent:
905
970
  if all_issue_ids and meta.parent not in all_issue_ids:
906
- errors.append(f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}.")
971
+ errors.append(
972
+ f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}."
973
+ )
907
974
 
908
975
  if meta.status == "backlog" and meta.stage != "freezed":
909
- errors.append(f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage}).")
910
-
976
+ errors.append(
977
+ f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage})."
978
+ )
979
+
911
980
  return errors
912
981
 
913
- def update_issue_content(issues_root: Path, issue_id: str, new_content: str) -> IssueMetadata:
982
+
983
+ def update_issue_content(
984
+ issues_root: Path, issue_id: str, new_content: str
985
+ ) -> IssueMetadata:
914
986
  """
915
987
  Update the raw content of an issue file.
916
988
  Validates integrity before saving.
@@ -923,91 +995,100 @@ def update_issue_content(issues_root: Path, issue_id: str, new_content: str) ->
923
995
  # 1. Parse New Content (using temp file to reuse parse_issue logic)
924
996
  import tempfile
925
997
  import os
926
-
927
- with tempfile.NamedTemporaryFile(mode='w+', suffix='.md', delete=False) as tmp:
998
+
999
+ with tempfile.NamedTemporaryFile(mode="w+", suffix=".md", delete=False) as tmp:
928
1000
  tmp.write(new_content)
929
1001
  tmp_path = Path(tmp.name)
930
-
1002
+
931
1003
  try:
932
1004
  meta = parse_issue(tmp_path)
933
1005
  if not meta:
934
- raise ValueError("Invalid Issue Content: Frontmatter missing or invalid.")
935
-
1006
+ raise ValueError("Invalid Issue Content: Frontmatter missing or invalid.")
1007
+
936
1008
  if meta.id != issue_id:
937
- raise ValueError(f"Cannot change Issue ID (Original: {issue_id}, New: {meta.id})")
1009
+ raise ValueError(
1010
+ f"Cannot change Issue ID (Original: {issue_id}, New: {meta.id})"
1011
+ )
938
1012
 
939
1013
  # 2. Integrity Check
940
1014
  errors = validate_issue_integrity(meta)
941
1015
  if errors:
942
1016
  raise ValueError(f"Validation Failed: {'; '.join(errors)}")
943
-
1017
+
944
1018
  # 3. Write and Move
945
1019
  # We overwrite the *current* path first
946
1020
  path.write_text(new_content)
947
-
1021
+
948
1022
  # Check if we need to move (Status Change)
949
1023
  # We need to re-derive the expected path based on new status
950
1024
  # Reuse logic from update_issue (simplified)
951
-
1025
+
952
1026
  prefix = issue_id.split("-")[0].upper()
953
1027
  reverse_prefix_map = get_reverse_prefix_map(issues_root)
954
1028
  base_type_dir = get_issue_dir(reverse_prefix_map[prefix], issues_root)
955
-
1029
+
956
1030
  # Calculate structure path (preserve subdir)
957
1031
  try:
958
1032
  rel_path = path.relative_to(base_type_dir)
959
1033
  # Remove the first component (current status directory) which might be 'open', 'closed' etc.
960
1034
  # But wait, find_issue_path found it. 'rel_path' includes status dir.
961
1035
  # e.g. open/Backend/Auth/FEAT-123.md -> parts=('open', 'Backend', 'Auth', 'FEAT-123.md')
962
- structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(path.name)
1036
+ structure_path = (
1037
+ Path(*rel_path.parts[1:])
1038
+ if len(rel_path.parts) > 1
1039
+ else Path(path.name)
1040
+ )
963
1041
  except ValueError:
964
1042
  # Fallback if path is weird
965
1043
  structure_path = Path(path.name)
966
1044
 
967
1045
  target_path = base_type_dir / meta.status / structure_path
968
-
1046
+
969
1047
  if path != target_path:
970
1048
  target_path.parent.mkdir(parents=True, exist_ok=True)
971
1049
  path.rename(target_path)
972
-
1050
+
973
1051
  return meta
974
1052
 
975
1053
  finally:
976
1054
  if os.path.exists(tmp_path):
977
1055
  os.unlink(tmp_path)
978
1056
 
979
- def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Path) -> IssueMetadata:
1057
+
1058
+ def generate_delivery_report(
1059
+ issues_root: Path, issue_id: str, project_root: Path
1060
+ ) -> IssueMetadata:
980
1061
  """
981
1062
  Scan git history for commits related to this issue (Ref: ID),
982
1063
  aggregate touched files, and append/update '## Delivery' section in the issue body.
983
1064
  """
984
1065
  from monoco.core import git
985
-
1066
+
986
1067
  path = find_issue_path(issues_root, issue_id)
987
1068
  if not path:
988
1069
  raise FileNotFoundError(f"Issue {issue_id} not found.")
989
1070
 
990
1071
  # 1. Scan Git
991
1072
  commits = git.search_commits_by_message(project_root, f"Ref: {issue_id}")
992
-
1073
+
993
1074
  if not commits:
994
1075
  meta = parse_issue(path)
995
1076
  if not meta:
996
1077
  raise ValueError(f"Could not parse metadata for issue {issue_id}")
997
1078
  return meta
998
-
1079
+
999
1080
  # 2. Aggregate Data
1000
1081
  all_files = set()
1001
1082
  commit_list_md = []
1002
-
1083
+
1003
1084
  for c in commits:
1004
- short_hash = c['hash'][:7]
1085
+ short_hash = c["hash"][:7]
1005
1086
  commit_list_md.append(f"- `{short_hash}` {c['subject']}")
1006
- for f in c['files']:
1087
+ for f in c["files"]:
1007
1088
  all_files.add(f)
1008
-
1089
+
1009
1090
  sorted_files = sorted(list(all_files))
1010
-
1091
+
1011
1092
  # 3. Format Report
1012
1093
  delivery_section = f"""
1013
1094
  ## Delivery
@@ -1020,18 +1101,18 @@ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Pat
1020
1101
 
1021
1102
  # 4. Update File Content
1022
1103
  content = path.read_text()
1023
-
1104
+
1024
1105
  # Check if Delivery section exists
1025
1106
  if "## Delivery" in content:
1026
1107
  # Replace existing section
1027
1108
  # We assume Delivery is the last section or we replace until end or next H2?
1028
- # For simplicity, if ## Delivery exists, we regex replace it and everything after it
1109
+ # For simplicity, if ## Delivery exists, we regex replace it and everything after it
1029
1110
  # OR we just replace the section block if we can identify it.
1030
1111
  # Let's assume it's at the end or we replace the specific block `## Delivery...`
1031
1112
  # But regex matching across newlines is tricky if we don't know where it ends.
1032
1113
  # Safe bet: If "## Delivery" exists, find it and replace everything after it?
1033
1114
  # Or look for "<!-- Monoco Auto Generated -->"
1034
-
1115
+
1035
1116
  pattern = r"## Delivery.*"
1036
1117
  # If we use DOTALL, it replaces everything until end of string?
1037
1118
  # Yes, usually Delivery report is appended at the end.
@@ -1041,25 +1122,27 @@ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Pat
1041
1122
  if not content.endswith("\n"):
1042
1123
  content += "\n"
1043
1124
  content += "\n" + delivery_section.strip() + "\n"
1044
-
1125
+
1045
1126
  path.write_text(content)
1046
-
1127
+
1047
1128
  # 5. Update Metadata (delivery stats)
1048
1129
  # We might want to store 'files_count' in metadata for the recursive aggregation (FEAT-0003)
1049
1130
  # But IssueMetadata doesn't have a 'delivery' dict field yet.
1050
1131
  # We can add it to 'extra' or extend the model later.
1051
1132
  # For now, just persisting the text is enough for FEAT-0002.
1052
-
1133
+
1053
1134
  meta = parse_issue(path)
1054
1135
  if not meta:
1055
- raise ValueError(f"Could not parse metadata for issue {issue_id}")
1136
+ raise ValueError(f"Could not parse metadata for issue {issue_id}")
1056
1137
  return meta
1057
1138
 
1139
+
1058
1140
  def get_children(issues_root: Path, parent_id: str) -> List[IssueMetadata]:
1059
1141
  """Find all direct children of an issue."""
1060
1142
  all_issues = list_issues(issues_root)
1061
1143
  return [i for i in all_issues if i.parent == parent_id]
1062
1144
 
1145
+
1063
1146
  def count_files_in_delivery(issue_path: Path) -> int:
1064
1147
  """Parse the ## Delivery section to count files."""
1065
1148
  try:
@@ -1071,6 +1154,7 @@ def count_files_in_delivery(issue_path: Path) -> int:
1071
1154
  pass
1072
1155
  return 0
1073
1156
 
1157
+
1074
1158
  def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
1075
1159
  """
1076
1160
  Parse a search query string into explicit positives, optional terms, and negatives.
@@ -1082,8 +1166,9 @@ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
1082
1166
  """
1083
1167
  if not query:
1084
1168
  return [], [], []
1085
-
1169
+
1086
1170
  import shlex
1171
+
1087
1172
  try:
1088
1173
  tokens = shlex.split(query)
1089
1174
  except ValueError:
@@ -1093,7 +1178,7 @@ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
1093
1178
  explicit_positives = []
1094
1179
  terms = []
1095
1180
  negatives = []
1096
-
1181
+
1097
1182
  for token in tokens:
1098
1183
  token_lower = token.lower()
1099
1184
  if token_lower.startswith("-") and len(token_lower) > 1:
@@ -1102,10 +1187,17 @@ def parse_search_query(query: str) -> Tuple[List[str], List[str], List[str]]:
1102
1187
  explicit_positives.append(token_lower[1:])
1103
1188
  else:
1104
1189
  terms.append(token_lower)
1105
-
1190
+
1106
1191
  return explicit_positives, terms, negatives
1107
1192
 
1108
- def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms: List[str], negatives: List[str], full_content: str = "") -> bool:
1193
+
1194
+ def check_issue_match(
1195
+ issue: IssueMetadata,
1196
+ explicit_positives: List[str],
1197
+ terms: List[str],
1198
+ negatives: List[str],
1199
+ full_content: str = "",
1200
+ ) -> bool:
1109
1201
  """
1110
1202
  Check if an issue matches the search criteria.
1111
1203
  Consider fields: id, title, status, stage, type, tags, dependencies, related.
@@ -1122,64 +1214,65 @@ def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms
1122
1214
  *(issue.tags or []),
1123
1215
  *(issue.dependencies or []),
1124
1216
  *(issue.related or []),
1125
- full_content
1217
+ full_content,
1126
1218
  ]
1127
-
1219
+
1128
1220
  # Normalize blob
1129
1221
  blob = " ".join(filter(None, searchable_parts)).lower()
1130
-
1222
+
1131
1223
  # 2. Check Negatives (Fast Fail)
1132
1224
  for term in negatives:
1133
1225
  if term in blob:
1134
1226
  return False
1135
-
1227
+
1136
1228
  # 3. Check Explicit Positives (Must match ALL)
1137
1229
  for term in explicit_positives:
1138
1230
  if term not in blob:
1139
1231
  return False
1140
-
1232
+
1141
1233
  # 4. Check Terms (Nice to Have)
1142
1234
  # If explicit_positives exist, terms are optional (implicit inclusion).
1143
1235
  # If NO explicit_positives, terms act as Implicit OR (must match at least one).
1144
1236
  if terms:
1145
1237
  if not explicit_positives:
1146
- # Must match at least one term
1147
- if not any(term in blob for term in terms):
1148
- return False
1149
-
1238
+ # Must match at least one term
1239
+ if not any(term in blob for term in terms):
1240
+ return False
1241
+
1150
1242
  return True
1151
1243
 
1244
+
1152
1245
  def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
1153
1246
  """
1154
1247
  Search issues using advanced query syntax.
1155
1248
  Returns list of matching IssueMetadata.
1156
1249
  """
1157
1250
  explicit_positives, terms, negatives = parse_search_query(query)
1158
-
1251
+
1159
1252
  # Optimization: If no query, return empty? Or all?
1160
1253
  # Usually search implies input. CLI `list` is for all.
1161
1254
  # But if query is empty string, we return all?
1162
- # Let's align with "grep": empty pattern matches everything?
1255
+ # Let's align with "grep": empty pattern matches everything?
1163
1256
  # Or strict: empty query -> all.
1164
1257
  if not explicit_positives and not terms and not negatives:
1165
- return list_issues(issues_root)
1258
+ return list_issues(issues_root)
1166
1259
 
1167
1260
  matches = []
1168
1261
  all_files = []
1169
-
1262
+
1170
1263
  # 1. Gather all files first (we need to read content for deep search)
1171
1264
  # Using list_issues is inefficient if we need body content, as list_issues only parses frontmatter (usually).
1172
1265
  # But parse_issue uses `IssueMetadata` which ignores body.
1173
1266
  # We need a robust way. `parse_issue` reads full text but discards body in current implementation?
1174
1267
  # Wait, `parse_issue` in core.py *only* reads frontmatter via `yaml.safe_load(match.group(1))`.
1175
1268
  # It does NOT return body.
1176
-
1269
+
1177
1270
  # To support deep search (Body), we need to read files.
1178
1271
  # Let's iterate files directly.
1179
-
1272
+
1180
1273
  engine = get_engine(str(issues_root.parent))
1181
1274
  all_types = engine.get_all_types()
1182
-
1275
+
1183
1276
  for issue_type in all_types:
1184
1277
  base_dir = get_issue_dir(issue_type, issues_root)
1185
1278
  for status_dir in ["open", "backlog", "closed"]:
@@ -1196,23 +1289,26 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
1196
1289
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
1197
1290
  if not match:
1198
1291
  continue
1199
-
1292
+
1200
1293
  yaml_str = match.group(1)
1201
1294
  data = yaml.safe_load(yaml_str)
1202
1295
  if not isinstance(data, dict):
1203
- continue
1204
-
1296
+ continue
1297
+
1205
1298
  meta = IssueMetadata(**data)
1206
-
1299
+
1207
1300
  # Match
1208
- if check_issue_match(meta, explicit_positives, terms, negatives, full_content=content):
1301
+ if check_issue_match(
1302
+ meta, explicit_positives, terms, negatives, full_content=content
1303
+ ):
1209
1304
  matches.append(meta)
1210
-
1305
+
1211
1306
  except Exception:
1212
1307
  continue
1213
-
1308
+
1214
1309
  return matches
1215
1310
 
1311
+
1216
1312
  def recalculate_parent(issues_root: Path, parent_id: str):
1217
1313
  """
1218
1314
  Update parent Epic/Feature stats based on children.
@@ -1221,8 +1317,8 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1221
1317
  """
1222
1318
  parent_path = find_issue_path(issues_root, parent_id)
1223
1319
  if not parent_path:
1224
- return # Should we warn?
1225
-
1320
+ return # Should we warn?
1321
+
1226
1322
  children = get_children(issues_root, parent_id)
1227
1323
  if not children:
1228
1324
  return
@@ -1231,7 +1327,7 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1231
1327
  closed = len([c for c in children if c.status == "closed"])
1232
1328
  # Progress string: "3/5"
1233
1329
  progress_str = f"{closed}/{total}"
1234
-
1330
+
1235
1331
  # Files count
1236
1332
  total_files = 0
1237
1333
  for child in children:
@@ -1241,68 +1337,71 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1241
1337
 
1242
1338
  # Update Parent # We need to reuse update logic but without validation/status change
1243
1339
  # Just generic metadata update.
1244
- # update_issue is too heavy/strict.
1245
- # Let's implement a lighter `patch_metadata` helper or reuse logic.
1246
-
1340
+ # update_issue is too heavy/strict.
1341
+ # Let's implement a lighter `patch_metadata` helper or reuse logic.
1342
+
1247
1343
  content = parent_path.read_text()
1248
1344
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
1249
1345
  if match:
1250
1346
  yaml_str = match.group(1)
1251
1347
  data = yaml.safe_load(yaml_str) or {}
1252
-
1348
+
1253
1349
  # Check if changed to avoid churn
1254
1350
  old_progress = data.get("progress")
1255
1351
  old_files = data.get("files_count")
1256
-
1352
+
1257
1353
  if old_progress == progress_str and old_files == total_files:
1258
1354
  return
1259
-
1355
+
1260
1356
  data["progress"] = progress_str
1261
1357
  data["files_count"] = total_files
1262
-
1358
+
1263
1359
  # Also maybe update status?
1264
1360
  # FEAT-0003 Req: "If first child starts doing, auto-start Parent?"
1265
1361
  # If parent is OPEN/TODO and child is DOING/REVIEW/DONE, set parent to DOING?
1266
1362
  current_status = data.get("status", "open").lower()
1267
1363
  current_stage = data.get("stage", "draft").lower()
1268
-
1364
+
1269
1365
  if current_status == "open" and current_stage == "draft":
1270
1366
  # Check if any child is active
1271
- active_children = [c for c in children if c.status == "open" and c.stage != "draft"]
1367
+ active_children = [
1368
+ c for c in children if c.status == "open" and c.stage != "draft"
1369
+ ]
1272
1370
  closed_children = [c for c in children if c.status == "closed"]
1273
-
1371
+
1274
1372
  if active_children or closed_children:
1275
1373
  data["stage"] = "doing"
1276
-
1374
+
1277
1375
  # Serialize
1278
1376
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
1279
1377
  # Replace header
1280
1378
  new_content = content.replace(match.group(1), "\n" + new_yaml)
1281
1379
  parent_path.write_text(new_content)
1282
-
1380
+
1283
1381
  # Recurse up?
1284
1382
  parent_parent = data.get("parent")
1285
1383
  if parent_parent:
1286
1384
  recalculate_parent(issues_root, parent_parent)
1287
1385
 
1386
+
1288
1387
  def move_issue(
1289
1388
  source_issues_root: Path,
1290
1389
  issue_id: str,
1291
1390
  target_issues_root: Path,
1292
- renumber: bool = False
1391
+ renumber: bool = False,
1293
1392
  ) -> Tuple[IssueMetadata, Path]:
1294
1393
  """
1295
1394
  Move an issue from one project to another.
1296
-
1395
+
1297
1396
  Args:
1298
1397
  source_issues_root: Source project's Issues directory
1299
1398
  issue_id: ID of the issue to move
1300
1399
  target_issues_root: Target project's Issues directory
1301
1400
  renumber: If True, automatically renumber on ID conflict
1302
-
1401
+
1303
1402
  Returns:
1304
1403
  Tuple of (updated metadata, new file path)
1305
-
1404
+
1306
1405
  Raises:
1307
1406
  FileNotFoundError: If source issue doesn't exist
1308
1407
  ValueError: If ID conflict exists and renumber=False
@@ -1311,26 +1410,26 @@ def move_issue(
1311
1410
  source_path = find_issue_path(source_issues_root, issue_id)
1312
1411
  if not source_path:
1313
1412
  raise FileNotFoundError(f"Issue {issue_id} not found in source project.")
1314
-
1413
+
1315
1414
  # 2. Parse issue metadata
1316
1415
  issue = parse_issue_detail(source_path)
1317
1416
  if not issue:
1318
1417
  raise ValueError(f"Failed to parse issue {issue_id}.")
1319
-
1418
+
1320
1419
  # 3. Check for ID conflict in target
1321
1420
  target_conflict_path = find_issue_path(target_issues_root, issue_id)
1322
-
1421
+
1323
1422
  if target_conflict_path:
1324
1423
  # Conflict detected
1325
1424
  conflict_issue = parse_issue(target_conflict_path)
1326
-
1425
+
1327
1426
  # Check if it's the same issue (same UID)
1328
1427
  if issue.uid and conflict_issue and conflict_issue.uid == issue.uid:
1329
1428
  raise ValueError(
1330
1429
  f"Issue {issue_id} (uid: {issue.uid}) already exists in target project. "
1331
1430
  "This appears to be a duplicate."
1332
1431
  )
1333
-
1432
+
1334
1433
  # Different issues with same ID
1335
1434
  if not renumber:
1336
1435
  conflict_info = ""
@@ -1340,7 +1439,7 @@ def move_issue(
1340
1439
  f"ID conflict: Target project already has {issue_id}{conflict_info}.\n"
1341
1440
  f"Use --renumber to automatically assign a new ID."
1342
1441
  )
1343
-
1442
+
1344
1443
  # Auto-renumber
1345
1444
  new_id = find_next_id(issue.type, target_issues_root)
1346
1445
  old_id = issue.id
@@ -1348,29 +1447,37 @@ def move_issue(
1348
1447
  else:
1349
1448
  new_id = issue.id
1350
1449
  old_id = issue.id
1351
-
1450
+
1352
1451
  # 4. Construct target path
1353
1452
  target_type_dir = get_issue_dir(issue.type, target_issues_root)
1354
1453
  target_status_dir = target_type_dir / issue.status
1355
-
1454
+
1356
1455
  # Preserve subdirectory structure if any
1357
1456
  try:
1358
1457
  source_type_dir = get_issue_dir(issue.type, source_issues_root)
1359
1458
  rel_path = source_path.relative_to(source_type_dir)
1360
1459
  # Remove status directory component
1361
- structure_path = Path(*rel_path.parts[1:]) if len(rel_path.parts) > 1 else Path(source_path.name)
1460
+ structure_path = (
1461
+ Path(*rel_path.parts[1:])
1462
+ if len(rel_path.parts) > 1
1463
+ else Path(source_path.name)
1464
+ )
1362
1465
  except ValueError:
1363
1466
  structure_path = Path(source_path.name)
1364
-
1467
+
1365
1468
  # Update filename if ID changed
1366
1469
  if new_id != old_id:
1367
1470
  old_filename = source_path.name
1368
1471
  new_filename = old_filename.replace(old_id, new_id, 1)
1369
- structure_path = structure_path.parent / new_filename if structure_path.parent != Path('.') else Path(new_filename)
1370
-
1472
+ structure_path = (
1473
+ structure_path.parent / new_filename
1474
+ if structure_path.parent != Path(".")
1475
+ else Path(new_filename)
1476
+ )
1477
+
1371
1478
  target_path = target_status_dir / structure_path
1372
1479
  target_path.parent.mkdir(parents=True, exist_ok=True)
1373
-
1480
+
1374
1481
  # 5. Update content if ID changed
1375
1482
  if new_id != old_id:
1376
1483
  # Update frontmatter
@@ -1379,30 +1486,30 @@ def move_issue(
1379
1486
  if match:
1380
1487
  yaml_str = match.group(1)
1381
1488
  data = yaml.safe_load(yaml_str) or {}
1382
- data['id'] = new_id
1383
- data['updated_at'] = current_time()
1384
-
1489
+ data["id"] = new_id
1490
+ data["updated_at"] = current_time()
1491
+
1385
1492
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
1386
-
1493
+
1387
1494
  # Update body (replace old ID in heading)
1388
- body = content[match.end():]
1495
+ body = content[match.end() :]
1389
1496
  body = body.replace(f"## {old_id}:", f"## {new_id}:", 1)
1390
-
1497
+
1391
1498
  new_content = f"---\n{new_yaml}---{body}"
1392
1499
  else:
1393
1500
  new_content = issue.raw_content or ""
1394
1501
  else:
1395
1502
  new_content = issue.raw_content or ""
1396
-
1503
+
1397
1504
  # 6. Write to target
1398
1505
  target_path.write_text(new_content)
1399
-
1506
+
1400
1507
  # 7. Remove source
1401
1508
  source_path.unlink()
1402
-
1509
+
1403
1510
  # 8. Return updated metadata
1404
1511
  final_meta = parse_issue(target_path)
1405
1512
  if not final_meta:
1406
1513
  raise ValueError(f"Failed to parse moved issue at {target_path}")
1407
-
1514
+
1408
1515
  return final_meta, target_path