monoco-toolkit 0.2.5__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. monoco/core/agent/adapters.py +24 -1
  2. monoco/core/config.py +77 -17
  3. monoco/core/integrations.py +8 -0
  4. monoco/core/lsp.py +7 -0
  5. monoco/core/output.py +8 -1
  6. monoco/core/resources/zh/SKILL.md +6 -7
  7. monoco/core/setup.py +8 -0
  8. monoco/features/i18n/resources/zh/SKILL.md +5 -5
  9. monoco/features/issue/commands.py +179 -55
  10. monoco/features/issue/core.py +263 -124
  11. monoco/features/issue/domain/__init__.py +0 -0
  12. monoco/features/issue/domain/lifecycle.py +126 -0
  13. monoco/features/issue/domain/models.py +170 -0
  14. monoco/features/issue/domain/parser.py +223 -0
  15. monoco/features/issue/domain/workspace.py +104 -0
  16. monoco/features/issue/engine/__init__.py +22 -0
  17. monoco/features/issue/engine/config.py +172 -0
  18. monoco/features/issue/engine/machine.py +185 -0
  19. monoco/features/issue/engine/models.py +18 -0
  20. monoco/features/issue/linter.py +118 -12
  21. monoco/features/issue/lsp/__init__.py +3 -0
  22. monoco/features/issue/lsp/definition.py +72 -0
  23. monoco/features/issue/models.py +27 -9
  24. monoco/features/issue/resources/en/AGENTS.md +5 -0
  25. monoco/features/issue/resources/en/SKILL.md +26 -2
  26. monoco/features/issue/resources/zh/AGENTS.md +5 -0
  27. monoco/features/issue/resources/zh/SKILL.md +34 -10
  28. monoco/features/issue/validator.py +252 -66
  29. monoco/features/spike/core.py +5 -22
  30. monoco/features/spike/resources/zh/SKILL.md +2 -2
  31. monoco/main.py +2 -26
  32. monoco_toolkit-0.2.8.dist-info/METADATA +136 -0
  33. {monoco_toolkit-0.2.5.dist-info → monoco_toolkit-0.2.8.dist-info}/RECORD +36 -30
  34. monoco/features/agent/commands.py +0 -166
  35. monoco/features/agent/doctor.py +0 -30
  36. monoco/features/pty/core.py +0 -185
  37. monoco/features/pty/router.py +0 -138
  38. monoco/features/pty/server.py +0 -56
  39. monoco_toolkit-0.2.5.dist-info/METADATA +0 -93
  40. {monoco_toolkit-0.2.5.dist-info → monoco_toolkit-0.2.8.dist-info}/WHEEL +0 -0
  41. {monoco_toolkit-0.2.5.dist-info → monoco_toolkit-0.2.8.dist-info}/entry_points.txt +0 -0
  42. {monoco_toolkit-0.2.5.dist-info → monoco_toolkit-0.2.8.dist-info}/licenses/LICENSE +0 -0
@@ -10,35 +10,21 @@ from monoco.core.config import get_config, MonocoConfig
10
10
  from monoco.core.lsp import DiagnosticSeverity
11
11
  from .validator import IssueValidator
12
12
 
13
- PREFIX_MAP = {
14
- IssueType.EPIC: "EPIC",
15
- IssueType.FEATURE: "FEAT",
16
- IssueType.CHORE: "CHORE",
17
- IssueType.FIX: "FIX"
18
- }
13
+ from .engine import get_engine
19
14
 
20
- REVERSE_PREFIX_MAP = {v: k for k, v in PREFIX_MAP.items()}
15
+ def get_prefix_map(issues_root: Path) -> Dict[str, str]:
16
+ engine = get_engine(str(issues_root.parent))
17
+ return engine.get_prefix_map()
21
18
 
22
- def enforce_lifecycle_policy(meta: IssueMetadata) -> None:
23
- """
24
- Apply business rules to ensure IssueMetadata consistency.
25
- Should be called during Create or Update (but NOT during Read/Lint).
26
- """
27
- if meta.status == IssueStatus.BACKLOG:
28
- meta.stage = IssueStage.FREEZED
29
-
30
- elif meta.status == IssueStatus.CLOSED:
31
- # Enforce stage=done for closed issues
32
- if meta.stage != IssueStage.DONE:
33
- meta.stage = IssueStage.DONE
34
- # Auto-fill closed_at if missing
35
- if not meta.closed_at:
36
- meta.closed_at = current_time()
37
-
38
- elif meta.status == IssueStatus.OPEN:
39
- # Ensure valid stage for open status
40
- if meta.stage is None:
41
- meta.stage = IssueStage.DRAFT
19
+ def get_reverse_prefix_map(issues_root: Path) -> Dict[str, str]:
20
+ prefix_map = get_prefix_map(issues_root)
21
+ return {v: k for k, v in prefix_map.items()}
22
+
23
+ def get_issue_dir(issue_type: str, issues_root: Path) -> Path:
24
+ engine = get_engine(str(issues_root.parent))
25
+ folder_map = engine.get_folder_map()
26
+ folder = folder_map.get(issue_type, issue_type.capitalize() + "s")
27
+ return issues_root / folder
42
28
 
43
29
  def _get_slug(title: str) -> str:
44
30
  slug = title.lower()
@@ -52,15 +38,6 @@ def _get_slug(title: str) -> str:
52
38
 
53
39
  return slug
54
40
 
55
- def get_issue_dir(issue_type: IssueType, issues_root: Path) -> Path:
56
- mapping = {
57
- IssueType.EPIC: "Epics",
58
- IssueType.FEATURE: "Features",
59
- IssueType.CHORE: "Chores",
60
- IssueType.FIX: "Fixes",
61
- }
62
- return issues_root / mapping[issue_type]
63
-
64
41
  def parse_issue(file_path: Path) -> Optional[IssueMetadata]:
65
42
  if not file_path.suffix == ".md":
66
43
  return None
@@ -105,8 +82,9 @@ def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
105
82
  except Exception:
106
83
  return None
107
84
 
108
- def find_next_id(issue_type: IssueType, issues_root: Path) -> str:
109
- prefix = PREFIX_MAP[issue_type]
85
+ def find_next_id(issue_type: str, issues_root: Path) -> str:
86
+ prefix_map = get_prefix_map(issues_root)
87
+ prefix = prefix_map.get(issue_type, "ISSUE")
110
88
  pattern = re.compile(rf"{prefix}-(\d+)")
111
89
  max_id = 0
112
90
 
@@ -147,13 +125,35 @@ def create_issue_file(
147
125
 
148
126
  issue_id = find_next_id(issue_type, issues_root)
149
127
  base_type_dir = get_issue_dir(issue_type, issues_root)
150
- target_dir = base_type_dir / status.value
128
+ target_dir = base_type_dir / status
151
129
 
152
130
  if subdir:
153
131
  target_dir = target_dir / subdir
154
132
 
155
133
  target_dir.mkdir(parents=True, exist_ok=True)
156
134
 
135
+ # Auto-Populate Tags with required IDs (Requirement: Maintain tags field with parent/deps/related/self IDs)
136
+ # Ensure they are prefixed with '#' for tagging convention if not present (usually tags are just strings, but user asked for #ID)
137
+ auto_tags = set(tags) if tags else set()
138
+
139
+ # 1. Add Parent
140
+ if parent:
141
+ auto_tags.add(f"#{parent}")
142
+
143
+ # 2. Add Dependencies
144
+ for dep in dependencies:
145
+ auto_tags.add(f"#{dep}")
146
+
147
+ # 3. Add Related
148
+ for rel in related:
149
+ auto_tags.add(f"#{rel}")
150
+
151
+ # 4. Add Self (as per instruction "auto add this issue... number")
152
+ # Note: issue_id is generated just above
153
+ auto_tags.add(f"#{issue_id}")
154
+
155
+ final_tags = sorted(list(auto_tags))
156
+
157
157
  metadata = IssueMetadata(
158
158
  id=issue_id,
159
159
  uid=generate_uid(), # Generate global unique identifier
@@ -165,29 +165,62 @@ def create_issue_file(
165
165
  dependencies=dependencies,
166
166
  related=related,
167
167
  sprint=sprint,
168
- tags=tags,
168
+ tags=final_tags,
169
169
  opened_at=current_time() if status == IssueStatus.OPEN else None
170
170
  )
171
171
 
172
172
  # Enforce lifecycle policies (defaults, auto-corrections)
173
- enforce_lifecycle_policy(metadata)
173
+ from .engine import get_engine
174
+ get_engine().enforce_policy(metadata)
175
+
176
+ # Serialize metadata
177
+ # Explicitly exclude actions and path from file persistence
178
+ yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json', exclude={'actions', 'path'}), sort_keys=False, allow_unicode=True)
179
+
180
+ # Inject Self-Documenting Hints (Interactive Frontmatter)
181
+ if "parent:" not in yaml_header:
182
+ yaml_header += "# parent: <EPIC-ID> # Optional: Parent Issue ID\n"
183
+ if "solution:" not in yaml_header:
184
+ yaml_header += "# solution: null # Required for Closed state (implemented, cancelled, etc.)\n"
185
+
186
+ if "dependencies:" not in yaml_header:
187
+ yaml_header += "# dependencies: [] # List of dependency IDs\n"
188
+ if "related:" not in yaml_header:
189
+ yaml_header += "# related: [] # List of related issue IDs\n"
190
+ if "files:" not in yaml_header:
191
+ yaml_header += "# files: [] # List of modified files\n"
174
192
 
175
- yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
176
193
  slug = _get_slug(title)
177
194
  filename = f"{issue_id}-{slug}.md"
178
195
 
196
+ # Enhanced Template with Instructional Comments
179
197
  file_content = f"""---
180
198
  {yaml_header}---
181
199
 
182
200
  ## {issue_id}: {title}
183
201
 
184
202
  ## Objective
203
+ <!-- Describe the "Why" and "What" clearly. Focus on value. -->
185
204
 
186
205
  ## Acceptance Criteria
206
+ <!-- Define binary conditions for success. -->
207
+ - [ ] Criteria 1
187
208
 
188
209
  ## Technical Tasks
210
+ <!-- Breakdown into atomic steps. Use nested lists for sub-tasks. -->
211
+
212
+ <!-- Status Syntax: -->
213
+ <!-- [ ] To Do -->
214
+ <!-- [/] Doing -->
215
+ <!-- [x] Done -->
216
+ <!-- [~] Cancelled -->
217
+ <!-- - [ ] Parent Task -->
218
+ <!-- - [ ] Sub Task -->
189
219
 
190
- - [ ]
220
+ - [ ] Task 1
221
+
222
+ ## Review Comments
223
+ <!-- Required for Review/Done stage. Record review feedback here. -->
191
224
  """
192
225
  file_path = target_dir / filename
193
226
  file_path.write_text(file_content)
@@ -198,32 +231,23 @@ def create_issue_file(
198
231
  return metadata, file_path
199
232
  def get_available_actions(meta: IssueMetadata) -> List[Any]:
200
233
  from .models import IssueAction
234
+ from .engine import get_engine
235
+
236
+ engine = get_engine()
237
+ transitions = engine.get_available_transitions(meta)
238
+
201
239
  actions = []
202
-
203
- if meta.status == IssueStatus.OPEN:
204
- # Stage-based movements
205
- if meta.stage == IssueStage.DRAFT:
206
- actions.append(IssueAction(label="Start", target_status=IssueStatus.OPEN, target_stage=IssueStage.DOING))
207
- actions.append(IssueAction(label="Freeze", target_status=IssueStatus.BACKLOG))
208
- elif meta.stage == IssueStage.DOING:
209
- actions.append(IssueAction(label="Stop", target_status=IssueStatus.OPEN, target_stage=IssueStage.DRAFT))
210
- actions.append(IssueAction(label="Submit", target_status=IssueStatus.OPEN, target_stage=IssueStage.REVIEW))
211
- elif meta.stage == IssueStage.REVIEW:
212
- actions.append(IssueAction(label="Approve", target_status=IssueStatus.CLOSED, target_stage=IssueStage.DONE, target_solution=IssueSolution.IMPLEMENTED))
213
- actions.append(IssueAction(label="Reject", target_status=IssueStatus.OPEN, target_stage=IssueStage.DOING))
214
- elif meta.stage == IssueStage.DONE:
215
- actions.append(IssueAction(label="Reopen", target_status=IssueStatus.OPEN, target_stage=IssueStage.DRAFT))
216
- actions.append(IssueAction(label="Close", target_status=IssueStatus.CLOSED, target_stage=IssueStage.DONE, target_solution=IssueSolution.IMPLEMENTED))
240
+ for t in transitions:
241
+ command = t.command_template.format(id=meta.id) if t.command_template else ""
217
242
 
218
- # Generic cancel
219
- actions.append(IssueAction(label="Cancel", target_status=IssueStatus.CLOSED, target_stage=IssueStage.DONE, target_solution=IssueSolution.CANCELLED))
220
-
221
- elif meta.status == IssueStatus.BACKLOG:
222
- actions.append(IssueAction(label="Pull", target_status=IssueStatus.OPEN, target_stage=IssueStage.DRAFT))
223
- actions.append(IssueAction(label="Cancel", target_status=IssueStatus.CLOSED, target_stage=IssueStage.DONE, target_solution=IssueSolution.CANCELLED))
224
-
225
- elif meta.status == IssueStatus.CLOSED:
226
- actions.append(IssueAction(label="Reopen", target_status=IssueStatus.OPEN, target_stage=IssueStage.DRAFT))
243
+ actions.append(IssueAction(
244
+ label=t.label,
245
+ icon=t.icon,
246
+ target_status=t.to_status if t.to_status != meta.status or t.to_stage != meta.stage else None,
247
+ target_stage=t.to_stage if t.to_stage != meta.stage else None,
248
+ target_solution=t.required_solution,
249
+ command=command
250
+ ))
227
251
 
228
252
  return actions
229
253
 
@@ -261,7 +285,8 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
261
285
  except IndexError:
262
286
  return None
263
287
 
264
- issue_type = REVERSE_PREFIX_MAP.get(prefix)
288
+ reverse_prefix_map = get_reverse_prefix_map(issues_root)
289
+ issue_type = reverse_prefix_map.get(prefix)
265
290
  if not issue_type:
266
291
  return None
267
292
 
@@ -282,7 +307,8 @@ def update_issue(
282
307
  sprint: Optional[str] = None,
283
308
  dependencies: Optional[List[str]] = None,
284
309
  related: Optional[List[str]] = None,
285
- tags: Optional[List[str]] = None
310
+ tags: Optional[List[str]] = None,
311
+ files: Optional[List[str]] = None
286
312
  ) -> IssueMetadata:
287
313
  path = find_issue_path(issues_root, issue_id)
288
314
  if not path:
@@ -316,30 +342,43 @@ def update_issue(
316
342
  current_status = IssueStatus(current_status_str.lower())
317
343
  except ValueError:
318
344
  current_status = IssueStatus.OPEN
345
+
346
+ current_stage_str = data.get("stage")
347
+ current_stage = IssueStage(current_stage_str.lower()) if current_stage_str else None
319
348
 
320
349
  # Logic: Status Update
321
350
  target_status = status if status else current_status
322
351
 
323
- # Validation: For closing
324
- effective_solution = solution.value if solution else data.get("solution")
352
+ # If status is changing, we don't default target_stage to current_stage
353
+ # because the new status might have different allowed stages.
354
+ # enforce_policy will handle setting the correct default stage for the new status.
355
+ if status and status != current_status:
356
+ target_stage = stage
357
+ else:
358
+ target_stage = stage if stage else current_stage
325
359
 
326
- # Policy: Prevent Backlog -> Review
327
- if stage == IssueStage.REVIEW and current_status == IssueStatus.BACKLOG:
328
- raise ValueError(f"Lifecycle Policy: Cannot submit Backlog issue directly. Run `monoco issue pull {issue_id}` first.")
360
+ # Engine Validation
361
+ from .engine import get_engine
362
+ engine = get_engine()
363
+
364
+ # Map solution string to enum if present
365
+ effective_solution = solution
366
+ if not effective_solution and data.get("solution"):
367
+ try:
368
+ effective_solution = IssueSolution(data.get("solution").lower())
369
+ except ValueError:
370
+ pass
329
371
 
330
- if target_status == IssueStatus.CLOSED:
331
- # Validator will check solution presence
332
- current_data_stage = data.get('stage')
333
-
334
- # Policy: IMPLEMENTED requires REVIEW stage
335
- if effective_solution == IssueSolution.IMPLEMENTED.value:
336
- if current_data_stage != IssueStage.REVIEW.value:
337
- raise ValueError(f"Lifecycle Policy: 'Implemented' issues must be submitted for review first.\nCurrent stage: {current_data_stage}\nAction: Run `monoco issue submit {issue_id}`.")
338
-
339
- # Policy: No closing from DOING (General Safety)
340
- if current_data_stage == IssueStage.DOING.value:
341
- raise ValueError("Cannot close issue in progress (Doing). Please review (`monoco issue submit`) or stop (`monoco issue open`) first.")
342
-
372
+ # Use engine to validate the transition
373
+ engine.validate_transition(
374
+ from_status=current_status,
375
+ from_stage=current_stage,
376
+ to_status=target_status,
377
+ to_stage=target_stage,
378
+ solution=effective_solution
379
+ )
380
+
381
+ if target_status == "closed":
343
382
  # Policy: Dependencies must be closed
344
383
  dependencies_to_check = dependencies if dependencies is not None else data.get('dependencies', [])
345
384
  if dependencies_to_check:
@@ -347,8 +386,8 @@ def update_issue(
347
386
  dep_path = find_issue_path(issues_root, dep_id)
348
387
  if dep_path:
349
388
  dep_meta = parse_issue(dep_path)
350
- if dep_meta and dep_meta.status != IssueStatus.CLOSED:
351
- raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status.value}].")
389
+ if dep_meta and dep_meta.status != "closed":
390
+ raise ValueError(f"Dependency Block: Cannot close {issue_id} because dependency {dep_id} is [Status: {dep_meta.status}].")
352
391
 
353
392
  # Validate new parent/dependencies/related exist
354
393
  if parent is not None and parent != "":
@@ -367,12 +406,12 @@ def update_issue(
367
406
 
368
407
  # Update Data
369
408
  if status:
370
- data['status'] = status.value
409
+ data['status'] = status
371
410
 
372
411
  if stage:
373
- data['stage'] = stage.value
412
+ data['stage'] = stage
374
413
  if solution:
375
- data['solution'] = solution.value
414
+ data['solution'] = solution
376
415
 
377
416
  if title:
378
417
  data['title'] = title
@@ -394,6 +433,9 @@ def update_issue(
394
433
 
395
434
  if tags is not None:
396
435
  data['tags'] = tags
436
+
437
+ if files is not None:
438
+ data['files'] = files
397
439
 
398
440
  # Lifecycle Hooks
399
441
  # 1. Opened At: If transitioning to OPEN
@@ -415,7 +457,8 @@ def update_issue(
415
457
 
416
458
  # Enforce lifecycle policies (defaults, auto-corrections)
417
459
  # This ensures that when we update, we also fix invalid states (like Closed but not Done)
418
- enforce_lifecycle_policy(updated_meta)
460
+ from .engine import get_engine
461
+ get_engine().enforce_policy(updated_meta)
419
462
 
420
463
  # Delegate to IssueValidator for static state validation
421
464
  # We need to construct the full content to validate body-dependent rules (like checkboxes)
@@ -431,7 +474,8 @@ def update_issue(
431
474
  raise ValueError(f"Failed to validate updated metadata: {e}")
432
475
 
433
476
  # Serialize back
434
- new_yaml = yaml.dump(updated_meta.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
477
+ # Explicitly exclude actions and path from file persistence
478
+ new_yaml = yaml.dump(updated_meta.model_dump(exclude_none=True, mode='json', exclude={'actions', 'path'}), sort_keys=False, allow_unicode=True)
435
479
 
436
480
  # Reconstruct File
437
481
  match_header = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
@@ -451,7 +495,8 @@ def update_issue(
451
495
  if status and status != current_status:
452
496
  # Move file
453
497
  prefix = issue_id.split("-")[0].upper()
454
- base_type_dir = get_issue_dir(REVERSE_PREFIX_MAP[prefix], issues_root)
498
+ reverse_prefix_map = get_reverse_prefix_map(issues_root)
499
+ base_type_dir = get_issue_dir(reverse_prefix_map[prefix], issues_root)
455
500
 
456
501
  try:
457
502
  rel_path = path.relative_to(base_type_dir)
@@ -459,7 +504,7 @@ def update_issue(
459
504
  except ValueError:
460
505
  structure_path = Path(path.name)
461
506
 
462
- target_path = base_type_dir / target_status.value / structure_path
507
+ target_path = base_type_dir / target_status / structure_path
463
508
 
464
509
  if path != target_path:
465
510
  target_path.parent.mkdir(parents=True, exist_ok=True)
@@ -475,7 +520,7 @@ def update_issue(
475
520
  updated_meta.actions = get_available_actions(updated_meta)
476
521
  return updated_meta
477
522
 
478
- def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType, project_root: Path) -> IssueMetadata:
523
+ def start_issue_isolation(issues_root: Path, issue_id: str, mode: str, project_root: Path) -> IssueMetadata:
479
524
  """
480
525
  Start physical isolation for an issue (Branch or Worktree).
481
526
  """
@@ -503,7 +548,7 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType,
503
548
 
504
549
  isolation_meta = None
505
550
 
506
- if mode == IsolationType.BRANCH:
551
+ if mode == "branch":
507
552
  if not git.branch_exists(project_root, branch_name):
508
553
  git.create_branch(project_root, branch_name, checkout=True)
509
554
  else:
@@ -513,9 +558,9 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType,
513
558
  if current != branch_name:
514
559
  git.checkout_branch(project_root, branch_name)
515
560
 
516
- isolation_meta = IssueIsolation(type=IsolationType.BRANCH, ref=branch_name)
561
+ isolation_meta = IssueIsolation(type="branch", ref=branch_name)
517
562
 
518
- elif mode == IsolationType.WORKTREE:
563
+ elif mode == "worktree":
519
564
  wt_path = project_root / ".monoco" / "worktrees" / f"{issue_id.lower()}-{slug}"
520
565
 
521
566
  # Check if worktree exists physically
@@ -526,7 +571,7 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType,
526
571
  wt_path.parent.mkdir(parents=True, exist_ok=True)
527
572
  git.worktree_add(project_root, branch_name, wt_path)
528
573
 
529
- isolation_meta = IssueIsolation(type=IsolationType.WORKTREE, ref=branch_name, path=str(wt_path))
574
+ isolation_meta = IssueIsolation(type="worktree", ref=branch_name, path=str(wt_path))
530
575
 
531
576
  # Persist Metadata
532
577
  # We load raw, update isolation field, save.
@@ -538,7 +583,7 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType,
538
583
 
539
584
  data['isolation'] = isolation_meta.model_dump(mode='json')
540
585
  # Also ensure stage is DOING (logic link)
541
- data['stage'] = IssueStage.DOING.value
586
+ data['stage'] = "doing"
542
587
  data['updated_at'] = current_time()
543
588
 
544
589
  new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
@@ -629,6 +674,77 @@ def delete_issue_file(issues_root: Path, issue_id: str):
629
674
  raise FileNotFoundError(f"Issue {issue_id} not found.")
630
675
 
631
676
  path.unlink()
677
+
678
+ def sync_issue_files(issues_root: Path, issue_id: str, project_root: Path) -> List[str]:
679
+ """
680
+ Sync 'files' field in issue metadata with actual changed files in git.
681
+ Strategies:
682
+ 1. Isolation Ref: If issue has isolation (branch/worktree), use that ref.
683
+ 2. Convention: If no isolation, look for branch `*/<id>-*`.
684
+ 3. Current Branch: If current branch matches pattern.
685
+
686
+ Compares against default branch (usually 'main' or 'master').
687
+ """
688
+ path = find_issue_path(issues_root, issue_id)
689
+ if not path:
690
+ raise FileNotFoundError(f"Issue {issue_id} not found.")
691
+
692
+ issue = parse_issue(path)
693
+ if not issue:
694
+ raise ValueError(f"Could not parse issue {issue_id}")
695
+
696
+ # Determine Target Branch
697
+ target_ref = None
698
+
699
+ if issue.isolation and issue.isolation.ref:
700
+ target_ref = issue.isolation.ref
701
+ else:
702
+ # Heuristic Search
703
+ # 1. Is current branch related?
704
+ current = git.get_current_branch(project_root)
705
+ if issue_id.lower() in current.lower():
706
+ target_ref = current
707
+ else:
708
+ # 2. Search for branch
709
+ # Limitation: core.git doesn't list all branches yet.
710
+ # We skip this for now to avoid complexity, relying on isolation or current context.
711
+ pass
712
+
713
+ if not target_ref:
714
+ raise RuntimeError(f"Could not determine git branch for Issue {issue_id}. Please ensure issue is started or you are on the feature branch.")
715
+
716
+ # Determine Base Branch (assume main, or config?)
717
+ # For now hardcode main, eventually read from config
718
+ base_ref = "main"
719
+
720
+ # Check if base exists, if not try master
721
+ if not git.branch_exists(project_root, base_ref):
722
+ if git.branch_exists(project_root, "master"):
723
+ base_ref = "master"
724
+ else:
725
+ # Fallback: remote/main?
726
+ pass
727
+
728
+ # Git Diff
729
+ # git diff --name-only base...target
730
+ cmd = ["diff", "--name-only", f"{base_ref}...{target_ref}"]
731
+ code, stdout, stderr = git._run_git(cmd, project_root)
732
+
733
+ if code != 0:
734
+ raise RuntimeError(f"Git diff failed: {stderr}")
735
+
736
+ changed_files = [f.strip() for f in stdout.splitlines() if f.strip()]
737
+
738
+ # Sort for consistency
739
+ changed_files.sort()
740
+
741
+ # Update Issue
742
+ # Only update if changed
743
+ if changed_files != issue.files:
744
+ update_issue(issues_root, issue_id, files=changed_files)
745
+ return changed_files
746
+
747
+ return []
632
748
 
633
749
  # Resources
634
750
  SKILL_CONTENT = """
@@ -663,7 +779,7 @@ description: Monoco Issue System 的官方技能定义。将 Issue 视为通用
663
779
  - **Status Level (Lowercase)**: `open`, `backlog`, `closed`
664
780
 
665
781
  ### 路径流转
666
- 使用 `monoco issue`:
782
+ 使用 `monoco issue`:
667
783
  1. **Create**: `monoco issue create <type> --title "..."`
668
784
  2. **Transition**: `monoco issue open/close/backlog <id>`
669
785
  3. **View**: `monoco issue scope`
@@ -700,7 +816,10 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
700
816
  List all issues in the project.
701
817
  """
702
818
  issues = []
703
- for issue_type in IssueType:
819
+ engine = get_engine(str(issues_root.parent))
820
+ all_types = engine.get_all_types()
821
+
822
+ for issue_type in all_types:
704
823
  base_dir = get_issue_dir(issue_type, issues_root)
705
824
  for status_dir in ["open", "backlog", "closed"]:
706
825
  d = base_dir / status_dir
@@ -726,6 +845,22 @@ def list_issues(issues_root: Path, recursive_workspace: bool = False) -> List[Is
726
845
  member_issues = list_issues(member_issues_dir, False)
727
846
  for m in member_issues:
728
847
  # Namespace the ID to avoid collisions and indicate origin
848
+ # CRITICAL: Also namespace references to keep parent-child structure intact
849
+ if m.parent and "::" not in m.parent:
850
+ m.parent = f"{name}::{m.parent}"
851
+
852
+ if m.dependencies:
853
+ m.dependencies = [
854
+ f"{name}::{d}" if d and "::" not in d else d
855
+ for d in m.dependencies
856
+ ]
857
+
858
+ if m.related:
859
+ m.related = [
860
+ f"{name}::{r}" if r and "::" not in r else r
861
+ for r in m.related
862
+ ]
863
+
729
864
  m.id = f"{name}::{m.id}"
730
865
  issues.append(m)
731
866
  except Exception:
@@ -739,21 +874,21 @@ def get_board_data(issues_root: Path) -> Dict[str, List[IssueMetadata]]:
739
874
  Get open issues grouped by their stage for Kanban view.
740
875
  """
741
876
  board = {
742
- IssueStage.DRAFT.value: [],
743
- IssueStage.DOING.value: [],
744
- IssueStage.REVIEW.value: [],
745
- IssueStage.DONE.value: []
877
+ "draft": [],
878
+ "doing": [],
879
+ "review": [],
880
+ "done": []
746
881
  }
747
882
 
748
883
  issues = list_issues(issues_root)
749
884
  for issue in issues:
750
- if issue.status == IssueStatus.OPEN and issue.stage:
751
- stage_val = issue.stage.value
885
+ if issue.status == "open" and issue.stage:
886
+ stage_val = issue.stage
752
887
  if stage_val in board:
753
888
  board[stage_val].append(issue)
754
- elif issue.status == IssueStatus.CLOSED:
889
+ elif issue.status == "closed":
755
890
  # Optionally show recently closed items in DONE column
756
- board[IssueStage.DONE.value].append(issue)
891
+ board["done"].append(issue)
757
892
 
758
893
  return board
759
894
 
@@ -763,14 +898,14 @@ def validate_issue_integrity(meta: IssueMetadata, all_issue_ids: Set[str] = set(
763
898
  UI-agnostic.
764
899
  """
765
900
  errors = []
766
- if meta.status == IssueStatus.CLOSED and not meta.solution:
901
+ if meta.status == "closed" and not meta.solution:
767
902
  errors.append(f"Solution Missing: {meta.id} is closed but has no solution field.")
768
903
 
769
904
  if meta.parent:
770
905
  if all_issue_ids and meta.parent not in all_issue_ids:
771
906
  errors.append(f"Broken Link: {meta.id} refers to non-existent parent {meta.parent}.")
772
907
 
773
- if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
908
+ if meta.status == "backlog" and meta.stage != "freezed":
774
909
  errors.append(f"Lifecycle Error: {meta.id} is backlog but stage is not freezed (found: {meta.stage}).")
775
910
 
776
911
  return errors
@@ -815,7 +950,8 @@ def update_issue_content(issues_root: Path, issue_id: str, new_content: str) ->
815
950
  # Reuse logic from update_issue (simplified)
816
951
 
817
952
  prefix = issue_id.split("-")[0].upper()
818
- base_type_dir = get_issue_dir(REVERSE_PREFIX_MAP[prefix], issues_root)
953
+ reverse_prefix_map = get_reverse_prefix_map(issues_root)
954
+ base_type_dir = get_issue_dir(reverse_prefix_map[prefix], issues_root)
819
955
 
820
956
  # Calculate structure path (preserve subdir)
821
957
  try:
@@ -828,7 +964,7 @@ def update_issue_content(issues_root: Path, issue_id: str, new_content: str) ->
828
964
  # Fallback if path is weird
829
965
  structure_path = Path(path.name)
830
966
 
831
- target_path = base_type_dir / meta.status.value / structure_path
967
+ target_path = base_type_dir / meta.status / structure_path
832
968
 
833
969
  if path != target_path:
834
970
  target_path.parent.mkdir(parents=True, exist_ok=True)
@@ -980,9 +1116,9 @@ def check_issue_match(issue: IssueMetadata, explicit_positives: List[str], terms
980
1116
  searchable_parts = [
981
1117
  issue.id,
982
1118
  issue.title,
983
- issue.status.value,
984
- issue.type.value,
985
- str(issue.stage.value) if issue.stage else "",
1119
+ issue.status,
1120
+ issue.type,
1121
+ str(issue.stage) if issue.stage else "",
986
1122
  *(issue.tags or []),
987
1123
  *(issue.dependencies or []),
988
1124
  *(issue.related or []),
@@ -1041,7 +1177,10 @@ def search_issues(issues_root: Path, query: str) -> List[IssueMetadata]:
1041
1177
  # To support deep search (Body), we need to read files.
1042
1178
  # Let's iterate files directly.
1043
1179
 
1044
- for issue_type in IssueType:
1180
+ engine = get_engine(str(issues_root.parent))
1181
+ all_types = engine.get_all_types()
1182
+
1183
+ for issue_type in all_types:
1045
1184
  base_dir = get_issue_dir(issue_type, issues_root)
1046
1185
  for status_dir in ["open", "backlog", "closed"]:
1047
1186
  d = base_dir / status_dir
@@ -1089,7 +1228,7 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1089
1228
  return
1090
1229
 
1091
1230
  total = len(children)
1092
- closed = len([c for c in children if c.status == IssueStatus.CLOSED])
1231
+ closed = len([c for c in children if c.status == "closed"])
1093
1232
  # Progress string: "3/5"
1094
1233
  progress_str = f"{closed}/{total}"
1095
1234
 
@@ -1129,8 +1268,8 @@ def recalculate_parent(issues_root: Path, parent_id: str):
1129
1268
 
1130
1269
  if current_status == "open" and current_stage == "draft":
1131
1270
  # Check if any child is active
1132
- active_children = [c for c in children if c.status == IssueStatus.OPEN and c.stage != IssueStage.DRAFT]
1133
- closed_children = [c for c in children if c.status == IssueStatus.CLOSED]
1271
+ active_children = [c for c in children if c.status == "open" and c.stage != "draft"]
1272
+ closed_children = [c for c in children if c.status == "closed"]
1134
1273
 
1135
1274
  if active_children or closed_children:
1136
1275
  data["stage"] = "doing"
@@ -1212,7 +1351,7 @@ def move_issue(
1212
1351
 
1213
1352
  # 4. Construct target path
1214
1353
  target_type_dir = get_issue_dir(issue.type, target_issues_root)
1215
- target_status_dir = target_type_dir / issue.status.value
1354
+ target_status_dir = target_type_dir / issue.status
1216
1355
 
1217
1356
  # Preserve subdirectory structure if any
1218
1357
  try:
File without changes