monoco-toolkit 0.1.1__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. monoco/cli/__init__.py +0 -0
  2. monoco/cli/project.py +87 -0
  3. monoco/cli/workspace.py +46 -0
  4. monoco/core/agent/__init__.py +5 -0
  5. monoco/core/agent/action.py +144 -0
  6. monoco/core/agent/adapters.py +106 -0
  7. monoco/core/agent/protocol.py +31 -0
  8. monoco/core/agent/state.py +106 -0
  9. monoco/core/config.py +152 -17
  10. monoco/core/execution.py +62 -0
  11. monoco/core/feature.py +58 -0
  12. monoco/core/git.py +51 -2
  13. monoco/core/injection.py +196 -0
  14. monoco/core/integrations.py +234 -0
  15. monoco/core/lsp.py +61 -0
  16. monoco/core/output.py +13 -2
  17. monoco/core/registry.py +36 -0
  18. monoco/core/resources/en/AGENTS.md +8 -0
  19. monoco/core/resources/en/SKILL.md +66 -0
  20. monoco/core/resources/zh/AGENTS.md +8 -0
  21. monoco/core/resources/zh/SKILL.md +66 -0
  22. monoco/core/setup.py +88 -110
  23. monoco/core/skills.py +444 -0
  24. monoco/core/state.py +53 -0
  25. monoco/core/sync.py +224 -0
  26. monoco/core/telemetry.py +4 -1
  27. monoco/core/workspace.py +85 -20
  28. monoco/daemon/app.py +127 -58
  29. monoco/daemon/models.py +4 -0
  30. monoco/daemon/services.py +56 -155
  31. monoco/features/agent/commands.py +166 -0
  32. monoco/features/agent/doctor.py +30 -0
  33. monoco/features/config/commands.py +125 -44
  34. monoco/features/i18n/adapter.py +29 -0
  35. monoco/features/i18n/commands.py +89 -10
  36. monoco/features/i18n/core.py +113 -27
  37. monoco/features/i18n/resources/en/AGENTS.md +8 -0
  38. monoco/features/i18n/resources/en/SKILL.md +94 -0
  39. monoco/features/i18n/resources/zh/AGENTS.md +8 -0
  40. monoco/features/i18n/resources/zh/SKILL.md +94 -0
  41. monoco/features/issue/adapter.py +34 -0
  42. monoco/features/issue/commands.py +183 -65
  43. monoco/features/issue/core.py +172 -77
  44. monoco/features/issue/linter.py +215 -116
  45. monoco/features/issue/migration.py +134 -0
  46. monoco/features/issue/models.py +23 -19
  47. monoco/features/issue/monitor.py +94 -0
  48. monoco/features/issue/resources/en/AGENTS.md +15 -0
  49. monoco/features/issue/resources/en/SKILL.md +87 -0
  50. monoco/features/issue/resources/zh/AGENTS.md +15 -0
  51. monoco/features/issue/resources/zh/SKILL.md +114 -0
  52. monoco/features/issue/validator.py +269 -0
  53. monoco/features/pty/core.py +185 -0
  54. monoco/features/pty/router.py +138 -0
  55. monoco/features/pty/server.py +56 -0
  56. monoco/features/spike/adapter.py +30 -0
  57. monoco/features/spike/commands.py +45 -24
  58. monoco/features/spike/core.py +4 -21
  59. monoco/features/spike/resources/en/AGENTS.md +7 -0
  60. monoco/features/spike/resources/en/SKILL.md +74 -0
  61. monoco/features/spike/resources/zh/AGENTS.md +7 -0
  62. monoco/features/spike/resources/zh/SKILL.md +74 -0
  63. monoco/main.py +115 -2
  64. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/METADATA +2 -2
  65. monoco_toolkit-0.2.5.dist-info/RECORD +77 -0
  66. monoco_toolkit-0.1.1.dist-info/RECORD +0 -33
  67. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/WHEEL +0 -0
  68. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/entry_points.txt +0 -0
  69. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/licenses/LICENSE +0 -0
@@ -1,68 +1,27 @@
1
- from typing import List, Optional, Tuple, Set
1
+ from typing import List, Optional, Set, Tuple
2
2
  from pathlib import Path
3
3
  from rich.console import Console
4
4
  from rich.table import Table
5
5
  import typer
6
+ import re
6
7
 
7
8
  from . import core
8
- from .models import IssueStatus, IssueStage
9
+ from .validator import IssueValidator
10
+ from monoco.core.lsp import Diagnostic, DiagnosticSeverity
9
11
 
10
12
  console = Console()
11
13
 
12
-
13
- def validate_issue(path: Path, meta: core.IssueMetadata, all_issue_ids: Set[str] = set(), issues_root: Optional[Path] = None) -> List[str]:
14
- """
15
- Validate a single issue's integrity.
14
+ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnostic]:
16
15
  """
17
- errors = []
18
-
19
- # A. Directory/Status Consistency
20
- expected_status = meta.status.value
21
- path_parts = path.parts
22
- # We might be validating a temp file, so we skip path check if it's not in the tree?
23
- # Or strict check? For "Safe Edit", the file might be in a temp dir.
24
- # So we probably only care about content/metadata integrity.
25
-
26
- # But wait, if we overwrite the file, it MUST be valid.
27
- # Let's assume the validation is about the content itself (metadata logic).
28
-
29
- # B. Solution Compliance
30
- if meta.status == IssueStatus.CLOSED and not meta.solution:
31
- errors.append(f"[red]Solution Missing:[/red] {meta.id} is closed but has no [dim]solution[/dim] field.")
32
-
33
- # C. Link Integrity
34
- if meta.parent:
35
- if all_issue_ids and meta.parent not in all_issue_ids:
36
- # Check workspace (fallback)
37
- found = False
38
- if issues_root:
39
- if core.find_issue_path(issues_root, meta.parent):
40
- found = True
41
-
42
- if not found:
43
- errors.append(f"[red]Broken Link:[/red] {meta.id} refers to non-existent parent [bold]{meta.parent}[/bold].")
44
-
45
- # D. Lifecycle Guard (Backlog)
46
- if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
47
- errors.append(f"[red]Lifecycle Error:[/red] {meta.id} is backlog but stage is not [bold]freezed[/bold] (found: {meta.stage}).")
48
-
49
- return errors
50
-
51
- def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
16
+ Verify the integrity of the Issues directory using LSP Validator.
52
17
  """
53
- Verify the integrity of the Issues directory.
54
- Returns a list of error messages.
18
+ diagnostics = []
19
+ validator = IssueValidator(issues_root)
55
20
 
56
- If recursive=True, performs workspace-level validation including:
57
- - Cross-project ID collision detection
58
- - Cross-project UID collision detection
59
- """
60
- errors = []
61
- all_issue_ids = set() # For parent reference validation (includes namespaced IDs)
62
- id_to_projects = {} # local_id -> [(project_name, meta, file)]
63
- all_uids = {} # uid -> (project, issue_id)
21
+ all_issue_ids = set()
64
22
  all_issues = []
65
23
 
24
+ # 1. Collection Phase (Build Index)
66
25
  # Helper to collect issues from a project
67
26
  def collect_project_issues(project_issues_root: Path, project_name: str = "local"):
68
27
  project_issues = []
@@ -81,92 +40,232 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
81
40
  local_id = meta.id
82
41
  full_id = f"{project_name}::{local_id}" if project_name != "local" else local_id
83
42
 
84
- # Track ID occurrences per project
85
- if local_id not in id_to_projects:
86
- id_to_projects[local_id] = []
87
- id_to_projects[local_id].append((project_name, meta, f))
88
-
89
- # Add IDs for reference validation
90
- all_issue_ids.add(local_id) # Local ID
43
+ all_issue_ids.add(local_id)
91
44
  if project_name != "local":
92
- all_issue_ids.add(full_id) # Namespaced ID
93
-
94
- # Check UID collision (if UID exists)
95
- if meta.uid:
96
- if meta.uid in all_uids:
97
- existing_project, existing_id = all_uids[meta.uid]
98
- errors.append(
99
- f"[red]UID Collision:[/red] UID {meta.uid} is duplicated.\n"
100
- f" - {existing_project}::{existing_id}\n"
101
- f" - {project_name}::{local_id}"
102
- )
103
- else:
104
- all_uids[meta.uid] = (project_name, local_id)
45
+ all_issue_ids.add(full_id)
105
46
 
106
- project_issues.append((f, meta, project_name))
47
+ project_issues.append((f, meta))
107
48
  return project_issues
108
49
 
109
- # 1. Collect local issues
110
50
  all_issues.extend(collect_project_issues(issues_root, "local"))
111
-
112
- # 2. If recursive, collect workspace member issues
51
+
113
52
  if recursive:
114
53
  try:
115
54
  from monoco.core.config import get_config
116
55
  project_root = issues_root.parent
117
56
  conf = get_config(str(project_root))
118
-
119
57
  for member_name, rel_path in conf.project.members.items():
120
58
  member_root = (project_root / rel_path).resolve()
121
59
  member_issues_dir = member_root / "Issues"
122
-
123
60
  if member_issues_dir.exists():
124
- all_issues.extend(collect_project_issues(member_issues_dir, member_name))
125
- except Exception as e:
126
- # Fail gracefully if workspace config is missing
61
+ collect_project_issues(member_issues_dir, member_name)
62
+ except Exception:
127
63
  pass
128
64
 
129
- # 3. Check for ID collisions within same project
130
- for local_id, occurrences in id_to_projects.items():
131
- # Group by project
132
- projects_with_id = {}
133
- for project_name, meta, f in occurrences:
134
- if project_name not in projects_with_id:
135
- projects_with_id[project_name] = []
136
- projects_with_id[project_name].append((meta, f))
65
+ # 2. Validation Phase
66
+ for path, meta in all_issues:
67
+ content = path.read_text() # Re-read content for validation
137
68
 
138
- # Check for duplicates within same project
139
- for project_name, metas in projects_with_id.items():
140
- if len(metas) > 1:
141
- # Same ID appears multiple times in same project - this is an error
142
- error_msg = f"[red]ID Collision:[/red] {local_id} appears {len(metas)} times in project '{project_name}':\n"
143
- for idx, (meta, f) in enumerate(metas, 1):
144
- error_msg += f" {idx}. uid: {meta.uid or 'N/A'} | created: {meta.created_at} | stage: {meta.stage} | status: {meta.status.value}\n"
145
- error_msg += f" [yellow]→ Action:[/yellow] Remove duplicate or use 'monoco issue move --to <target> --renumber' to resolve."
146
- errors.append(error_msg)
147
-
148
- # 4. Validation
149
- for path, meta, project_name in all_issues:
150
- # A. Directory/Status Consistency (Only check this for files in the tree)
151
- expected_status = meta.status.value
152
- path_parts = path.parts
153
- if expected_status not in path_parts:
154
- errors.append(f"[yellow]Placement Error:[/yellow] {meta.id} has status [cyan]{expected_status}[/cyan] but is not under a [dim]{expected_status}/[/dim] directory.")
69
+ # A. Run Core Validator
70
+ file_diagnostics = validator.validate(meta, content, all_issue_ids)
155
71
 
156
- # Reuse common logic
157
- errors.extend(validate_issue(path, meta, all_issue_ids, issues_root))
158
-
159
- return errors
72
+ # Add context to diagnostics (Path)
73
+ for d in file_diagnostics:
74
+ d.source = f"{meta.id}" # Use ID as source context
75
+ d.data = {'path': path} # Attach path for potential fixers
76
+ diagnostics.append(d)
77
+
78
+ return diagnostics
160
79
 
161
80
 
162
- def run_lint(issues_root: Path, recursive: bool = False):
163
- errors = check_integrity(issues_root, recursive)
81
+ def run_lint(issues_root: Path, recursive: bool = False, fix: bool = False, format: str = "table", file_path: Optional[str] = None):
82
+ """
83
+ Run lint with optional auto-fix and format selection.
84
+
85
+ Args:
86
+ issues_root: Root directory of issues
87
+ recursive: Recursively scan workspace members
88
+ fix: Apply auto-fixes
89
+ format: Output format (table, json)
90
+ file_path: Optional path to a single file to validate (LSP mode)
91
+ """
92
+ # Single-file mode (for LSP integration)
93
+ if file_path:
94
+ file = Path(file_path).resolve()
95
+ if not file.exists():
96
+ console.print(f"[red]Error:[/red] File not found: {file_path}")
97
+ raise typer.Exit(code=1)
98
+
99
+ # Parse and validate single file
100
+ try:
101
+ meta = core.parse_issue(file)
102
+ if not meta:
103
+ console.print(f"[red]Error:[/red] Failed to parse issue metadata from {file_path}")
104
+ raise typer.Exit(code=1)
105
+
106
+ content = file.read_text()
107
+ validator = IssueValidator(issues_root)
108
+
109
+ # For single-file mode, we need to build a minimal index
110
+ # We'll scan the entire workspace to get all issue IDs for reference validation
111
+ all_issue_ids = set()
112
+ for subdir in ["Epics", "Features", "Chores", "Fixes"]:
113
+ d = issues_root / subdir
114
+ if d.exists():
115
+ for status in ["open", "closed", "backlog"]:
116
+ status_dir = d / status
117
+ if status_dir.exists():
118
+ for f in status_dir.rglob("*.md"):
119
+ try:
120
+ m = core.parse_issue(f)
121
+ if m:
122
+ all_issue_ids.add(m.id)
123
+ except Exception:
124
+ pass
125
+
126
+ diagnostics = validator.validate(meta, content, all_issue_ids)
127
+
128
+ # Add context
129
+ for d in diagnostics:
130
+ d.source = meta.id
131
+ d.data = {'path': file}
132
+
133
+ except Exception as e:
134
+ console.print(f"[red]Error:[/red] Validation failed: {e}")
135
+ raise typer.Exit(code=1)
136
+ else:
137
+ # Full workspace scan mode
138
+ diagnostics = check_integrity(issues_root, recursive)
139
+
140
+ # Filter only Warnings and Errors
141
+ issues = [d for d in diagnostics if d.severity <= DiagnosticSeverity.Warning]
164
142
 
165
- if not errors:
143
+ if fix:
144
+ fixed_count = 0
145
+ console.print("[dim]Attempting auto-fixes...[/dim]")
146
+
147
+ # We must track processed paths to avoid redundant writes if multiple errors exist
148
+ processed_paths = set()
149
+
150
+ for d in issues:
151
+ path = d.data.get('path')
152
+ if not path: continue
153
+
154
+ # Read fresh content iteration
155
+ pass
156
+
157
+ # Group diagnostics by file path
158
+ from collections import defaultdict
159
+ file_diags = defaultdict(list)
160
+ for d in issues:
161
+ if d.data.get('path'):
162
+ file_diags[d.data['path']].append(d)
163
+
164
+ for path, diags in file_diags.items():
165
+ try:
166
+ content = path.read_text()
167
+ new_content = content
168
+ has_changes = False
169
+
170
+ # Parse meta once for the file
171
+ try:
172
+ meta = core.parse_issue(path)
173
+ except Exception:
174
+ console.print(f"[yellow]Skipping fix for {path.name}: Cannot parse metadata[/yellow]")
175
+ continue
176
+
177
+ # Apply fixes for this file
178
+ for d in diags:
179
+ if "Structure Error" in d.message:
180
+ expected_header = f"## {meta.id}: {meta.title}"
181
+
182
+ # Check if strictly present
183
+ if expected_header in new_content:
184
+ continue
185
+
186
+ # Strategy: Look for existing heading with same ID to replace
187
+ # Matches: "## ID..." or "## ID ..."
188
+ # Regex: ^##\s+ID\b.*$
189
+ # We use meta.id which is safe.
190
+ heading_regex = re.compile(rf"^##\s+{re.escape(meta.id)}.*$", re.MULTILINE)
191
+
192
+ match_existing = heading_regex.search(new_content)
193
+
194
+ if match_existing:
195
+ # Replace existing incorrect heading
196
+ # We use sub to replace just the first occurrence
197
+ new_content = heading_regex.sub(expected_header, new_content, count=1)
198
+ has_changes = True
199
+ else:
200
+ # Insert after frontmatter
201
+ fm_match = re.search(r"^---(.*?)---", new_content, re.DOTALL | re.MULTILINE)
202
+ if fm_match:
203
+ end_pos = fm_match.end()
204
+ header_block = f"\n\n{expected_header}\n"
205
+ new_content = new_content[:end_pos] + header_block + new_content[end_pos:].lstrip()
206
+ has_changes = True
207
+
208
+ if "Review Requirement: Missing '## Review Comments' section" in d.message:
209
+ if "## Review Comments" not in new_content:
210
+ new_content = new_content.rstrip() + "\n\n## Review Comments\n\n- [ ] Self-Review\n"
211
+ has_changes = True
212
+
213
+ if has_changes:
214
+ path.write_text(new_content)
215
+ fixed_count += 1
216
+ console.print(f"[dim]Fixed: {path.name}[/dim]")
217
+ except Exception as e:
218
+ console.print(f"[red]Failed to fix {path.name}: {e}[/red]")
219
+
220
+ console.print(f"[green]Applied auto-fixes to {fixed_count} files.[/green]")
221
+
222
+ # Re-run validation to verify
223
+ if file_path:
224
+ # Re-validate single file
225
+ file = Path(file_path).resolve()
226
+ meta = core.parse_issue(file)
227
+ content = file.read_text()
228
+ validator = IssueValidator(issues_root)
229
+ diagnostics = validator.validate(meta, content, all_issue_ids)
230
+ for d in diagnostics:
231
+ d.source = meta.id
232
+ d.data = {'path': file}
233
+ else:
234
+ diagnostics = check_integrity(issues_root, recursive)
235
+ issues = [d for d in diagnostics if d.severity <= DiagnosticSeverity.Warning]
236
+
237
+ # Output formatting
238
+ if format == "json":
239
+ import json
240
+ from pydantic import RootModel
241
+ # Use RootModel to export a list of models
242
+ print(RootModel(issues).model_dump_json(indent=2))
243
+ if any(d.severity == DiagnosticSeverity.Error for d in issues):
244
+ raise typer.Exit(code=1)
245
+ return
246
+
247
+ if not issues:
166
248
  console.print("[green]✔[/green] Issue integrity check passed. No integrity errors found.")
167
249
  else:
168
- table = Table(title="Issue Integrity Issues", show_header=False, border_style="red")
169
- for err in errors:
170
- table.add_row(err)
250
+ table = Table(title="Issue Integrity Report", show_header=True, header_style="bold magenta", border_style="red")
251
+ table.add_column("Issue", style="cyan")
252
+ table.add_column("Severity", justify="center")
253
+ table.add_column("Line", justify="right", style="dim")
254
+ table.add_column("Message")
255
+
256
+ for d in issues:
257
+ sev_style = "red" if d.severity == DiagnosticSeverity.Error else "yellow"
258
+ sev_label = "ERROR" if d.severity == DiagnosticSeverity.Error else "WARN"
259
+ line_str = str(d.range.start.line + 1) if d.range else "-"
260
+ table.add_row(
261
+ d.source or "Unknown",
262
+ f"[{sev_style}]{sev_label}[/{sev_style}]",
263
+ line_str,
264
+ d.message
265
+ )
266
+
171
267
  console.print(table)
172
- raise typer.Exit(code=1)
268
+
269
+ if any(d.severity == DiagnosticSeverity.Error for d in issues):
270
+ raise typer.Exit(code=1)
271
+
@@ -0,0 +1,134 @@
1
+ import os
2
+ import re
3
+ import yaml
4
+ import hashlib
5
+ import secrets
6
+ from pathlib import Path
7
+ from typing import List, Dict, Any
8
+ from datetime import datetime
9
+ from .models import generate_uid
10
+
11
+ # Migration Mappings
12
+ DIR_MAP = {
13
+ "STORIES": "Features",
14
+ "Stories": "Features",
15
+ "TASKS": "Chores",
16
+ "Tasks": "Chores",
17
+ "BUGS": "Fixes",
18
+ "Bugs": "Fixes",
19
+ "EPICS": "Epics",
20
+ "Epics": "Epics",
21
+ "features": "Features",
22
+ "chores": "Chores",
23
+ "fixes": "Fixes",
24
+ "epics": "Epics"
25
+ }
26
+
27
+ TYPE_MAP = {
28
+ "story": "feature",
29
+ "task": "chore",
30
+ "bug": "fix"
31
+ }
32
+
33
+ ID_PREFIX_MAP = {
34
+ "STORY": "FEAT",
35
+ "TASK": "CHORE",
36
+ "BUG": "FIX"
37
+ }
38
+
39
+ def migrate_issues_directory(issues_dir: Path):
40
+ """
41
+ Core migration logic to upgrade an Issues directory to the latest Monoco standard.
42
+ """
43
+ if not issues_dir.exists():
44
+ return
45
+
46
+ # 1. Rename Directories
47
+ for old_name, new_name in DIR_MAP.items():
48
+ old_path = issues_dir / old_name
49
+ if old_path.exists():
50
+ new_path = issues_dir / new_name
51
+
52
+ # Case sensitivity check for some filesystems
53
+ same_inode = False
54
+ try:
55
+ if new_path.exists() and os.path.samefile(old_path, new_path):
56
+ same_inode = True
57
+ except OSError:
58
+ pass
59
+
60
+ if same_inode:
61
+ if old_path.name != new_path.name:
62
+ old_path.rename(new_path)
63
+ continue
64
+
65
+ if new_path.exists():
66
+ import shutil
67
+ for item in old_path.iterdir():
68
+ dest = new_path / item.name
69
+ if dest.exists() and item.is_dir():
70
+ for subitem in item.iterdir():
71
+ shutil.move(str(subitem), str(dest / subitem.name))
72
+ shutil.rmtree(item)
73
+ else:
74
+ shutil.move(str(item), str(dest))
75
+ shutil.rmtree(old_path)
76
+ else:
77
+ old_path.rename(new_path)
78
+
79
+ # 2. Rename Files and Update Content
80
+ for subdir_name in ["Features", "Chores", "Fixes", "Epics"]:
81
+ subdir = issues_dir / subdir_name
82
+ if not subdir.exists():
83
+ continue
84
+
85
+ for file_path in subdir.rglob("*.md"):
86
+ content = file_path.read_text(encoding="utf-8")
87
+ new_content = content
88
+
89
+ # Replace Type in Frontmatter
90
+ for old_type, new_type in TYPE_MAP.items():
91
+ new_content = re.sub(rf"^type:\s*{old_type}", f"type: {new_type}", new_content, flags=re.IGNORECASE | re.MULTILINE)
92
+
93
+ # Replace ID Prefixes
94
+ for old_prefix, new_prefix in ID_PREFIX_MAP.items():
95
+ new_content = new_content.replace(f"[[{old_prefix}-", f"[[{new_prefix}-")
96
+ new_content = re.sub(rf"^id: {old_prefix}-", f"id: {new_prefix}-", new_content, flags=re.MULTILINE)
97
+ new_content = re.sub(rf"^parent: {old_prefix}-", f"parent: {new_prefix}-", new_content, flags=re.MULTILINE)
98
+ new_content = new_content.replace(f"{old_prefix}-", f"{new_prefix}-")
99
+
100
+ # Structural Updates (UID, Stage)
101
+ match = re.search(r"^---(.*?)---", new_content, re.DOTALL | re.MULTILINE)
102
+ if match:
103
+ yaml_str = match.group(1)
104
+ try:
105
+ data = yaml.safe_load(yaml_str) or {}
106
+ changed = False
107
+
108
+ if 'uid' not in data:
109
+ data['uid'] = generate_uid()
110
+ changed = True
111
+
112
+ if 'stage' in data and data['stage'] == 'todo':
113
+ data['stage'] = 'draft'
114
+ changed = True
115
+
116
+ if changed:
117
+ new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
118
+ new_content = new_content.replace(match.group(1), "\n" + new_yaml)
119
+ except yaml.YAMLError:
120
+ pass
121
+
122
+ if new_content != content:
123
+ file_path.write_text(new_content, encoding="utf-8")
124
+
125
+ # Rename File
126
+ filename = file_path.name
127
+ new_filename = filename
128
+ for old_prefix, new_prefix in ID_PREFIX_MAP.items():
129
+ if filename.startswith(f"{old_prefix}-"):
130
+ new_filename = filename.replace(f"{old_prefix}-", f"{new_prefix}-", 1)
131
+ break
132
+
133
+ if new_filename != filename:
134
+ file_path.rename(file_path.parent / new_filename)
@@ -1,5 +1,5 @@
1
1
  from enum import Enum
2
- from typing import List, Optional, Any
2
+ from typing import List, Optional, Any, Dict
3
3
  from pydantic import BaseModel, Field, model_validator
4
4
  from datetime import datetime
5
5
  import hashlib
@@ -61,7 +61,7 @@ class IssueStatus(str, Enum):
61
61
  BACKLOG = "backlog"
62
62
 
63
63
  class IssueStage(str, Enum):
64
- TODO = "todo"
64
+ DRAFT = "draft"
65
65
  DOING = "doing"
66
66
  REVIEW = "review"
67
67
  DONE = "done"
@@ -83,6 +83,17 @@ class IssueIsolation(BaseModel):
83
83
  path: Optional[str] = None # Worktree path (relative to repo root or absolute)
84
84
  created_at: datetime = Field(default_factory=current_time)
85
85
 
86
+ class IssueAction(BaseModel):
87
+ label: str
88
+ target_status: Optional[IssueStatus] = None
89
+ target_stage: Optional[IssueStage] = None
90
+ target_solution: Optional[IssueSolution] = None
91
+ icon: Optional[str] = None
92
+
93
+ # Generic execution extensions
94
+ command: Optional[str] = None
95
+ params: Dict[str, Any] = {}
96
+
86
97
  class IssueMetadata(BaseModel):
87
98
  model_config = {"extra": "allow"}
88
99
 
@@ -106,6 +117,10 @@ class IssueMetadata(BaseModel):
106
117
  dependencies: List[str] = []
107
118
  related: List[str] = []
108
119
  tags: List[str] = []
120
+ path: Optional[str] = None # Absolute path to the issue file
121
+
122
+ # Proxy UI Actions (Excluded from file persistence)
123
+ actions: List[IssueAction] = Field(default=[], exclude=True)
109
124
 
110
125
 
111
126
  @model_validator(mode='before')
@@ -122,30 +137,19 @@ class IssueMetadata(BaseModel):
122
137
  # Stage normalization
123
138
  if "stage" in v and isinstance(v["stage"], str):
124
139
  v["stage"] = v["stage"].lower()
140
+ if v["stage"] == "todo":
141
+ v["stage"] = "draft"
125
142
  return v
126
143
 
127
144
  @model_validator(mode='after')
128
145
  def validate_lifecycle(self) -> 'IssueMetadata':
129
146
  # Logic Definition:
130
- # status: backlog -> stage: null
147
+ # status: backlog -> stage: freezed
131
148
  # status: closed -> stage: done
132
- # status: open -> stage: todo | doing | review (default todo)
133
-
134
- if self.status == IssueStatus.BACKLOG:
135
- self.stage = IssueStage.FREEZED
136
-
137
- elif self.status == IssueStatus.CLOSED:
138
- # Enforce stage=done for closed issues
139
- if self.stage != IssueStage.DONE:
140
- self.stage = IssueStage.DONE
141
- # Auto-fill closed_at if missing
142
- if not self.closed_at:
143
- self.closed_at = current_time()
149
+ # status: open -> stage: draft | doing | review | done (default draft)
144
150
 
145
- elif self.status == IssueStatus.OPEN:
146
- # Ensure valid stage for open status
147
- if self.stage is None or self.stage == IssueStage.DONE:
148
- self.stage = IssueStage.TODO
151
+ # NOTE: We do NOT auto-correct state here anymore to allow Linter to detect inconsistencies.
152
+ # Auto-correction should be applied explicitly by 'create' or 'update' commands via core logic.
149
153
 
150
154
  return self
151
155
 
@@ -0,0 +1,94 @@
1
+ import re
2
+ import asyncio
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Callable, Awaitable, Any, Optional
6
+
7
+ from watchdog.observers import Observer
8
+ from watchdog.events import FileSystemEventHandler
9
+
10
+ logger = logging.getLogger("monoco.features.issue.monitor")
11
+
12
+ class IssueEventHandler(FileSystemEventHandler):
13
+ def __init__(self, loop, on_upsert: Callable[[dict], Awaitable[None]], on_delete: Callable[[dict], Awaitable[None]]):
14
+ self.loop = loop
15
+ self.on_upsert = on_upsert
16
+ self.on_delete = on_delete
17
+
18
+ def _process_upsert(self, path_str: str):
19
+ if not path_str.endswith(".md"):
20
+ return
21
+ asyncio.run_coroutine_threadsafe(self._handle_upsert(path_str), self.loop)
22
+
23
+ async def _handle_upsert(self, path_str: str):
24
+ try:
25
+ from monoco.features.issue.core import parse_issue
26
+ path = Path(path_str)
27
+ if not path.exists():
28
+ return
29
+ issue = parse_issue(path)
30
+ if issue:
31
+ await self.on_upsert(issue.model_dump(mode='json'))
32
+ except Exception as e:
33
+ logger.error(f"Error handling upsert for {path_str}: {e}")
34
+
35
+ def _process_delete(self, path_str: str):
36
+ if not path_str.endswith(".md"):
37
+ return
38
+ asyncio.run_coroutine_threadsafe(self._handle_delete(path_str), self.loop)
39
+
40
+ async def _handle_delete(self, path_str: str):
41
+ try:
42
+ filename = Path(path_str).name
43
+ match = re.match(r"([A-Z]+-\d{4})", filename)
44
+ if match:
45
+ issue_id = match.group(1)
46
+ await self.on_delete({"id": issue_id})
47
+ except Exception as e:
48
+ logger.error(f"Error handling delete for {path_str}: {e}")
49
+
50
+ def on_created(self, event):
51
+ if not event.is_directory:
52
+ self._process_upsert(event.src_path)
53
+
54
+ def on_modified(self, event):
55
+ if not event.is_directory:
56
+ self._process_upsert(event.src_path)
57
+
58
+ def on_deleted(self, event):
59
+ if not event.is_directory:
60
+ self._process_delete(event.src_path)
61
+
62
+ def on_moved(self, event):
63
+ if not event.is_directory:
64
+ self._process_delete(event.src_path)
65
+ self._process_upsert(event.dest_path)
66
+
67
+ class IssueMonitor:
68
+ """
69
+ Monitor the Issues directory for changes using Watchdog and trigger callbacks.
70
+ """
71
+ def __init__(self, issues_root: Path, on_upsert: Callable[[dict], Awaitable[None]], on_delete: Callable[[dict], Awaitable[None]]):
72
+ self.issues_root = issues_root
73
+ self.on_upsert = on_upsert
74
+ self.on_delete = on_delete
75
+ self.observer = Observer()
76
+ self.loop = None
77
+
78
+ async def start(self):
79
+ self.loop = asyncio.get_running_loop()
80
+ event_handler = IssueEventHandler(self.loop, self.on_upsert, self.on_delete)
81
+
82
+ if not self.issues_root.exists():
83
+ logger.warning(f"Issues root {self.issues_root} does not exist. creating...")
84
+ self.issues_root.mkdir(parents=True, exist_ok=True)
85
+
86
+ self.observer.schedule(event_handler, str(self.issues_root), recursive=True)
87
+ self.observer.start()
88
+ logger.info(f"Issue Monitor started (Watchdog). Watching {self.issues_root}")
89
+
90
+ def stop(self):
91
+ if self.observer.is_alive():
92
+ self.observer.stop()
93
+ self.observer.join()
94
+ logger.info(f"Issue Monitor stopped for {self.issues_root}")