monoco-toolkit 0.1.1__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. monoco/cli/__init__.py +0 -0
  2. monoco/cli/project.py +87 -0
  3. monoco/cli/workspace.py +46 -0
  4. monoco/core/agent/__init__.py +5 -0
  5. monoco/core/agent/action.py +144 -0
  6. monoco/core/agent/adapters.py +129 -0
  7. monoco/core/agent/protocol.py +31 -0
  8. monoco/core/agent/state.py +106 -0
  9. monoco/core/config.py +212 -17
  10. monoco/core/execution.py +62 -0
  11. monoco/core/feature.py +58 -0
  12. monoco/core/git.py +51 -2
  13. monoco/core/injection.py +196 -0
  14. monoco/core/integrations.py +242 -0
  15. monoco/core/lsp.py +68 -0
  16. monoco/core/output.py +21 -3
  17. monoco/core/registry.py +36 -0
  18. monoco/core/resources/en/AGENTS.md +8 -0
  19. monoco/core/resources/en/SKILL.md +66 -0
  20. monoco/core/resources/zh/AGENTS.md +8 -0
  21. monoco/core/resources/zh/SKILL.md +65 -0
  22. monoco/core/setup.py +96 -110
  23. monoco/core/skills.py +444 -0
  24. monoco/core/state.py +53 -0
  25. monoco/core/sync.py +224 -0
  26. monoco/core/telemetry.py +4 -1
  27. monoco/core/workspace.py +85 -20
  28. monoco/daemon/app.py +127 -58
  29. monoco/daemon/models.py +4 -0
  30. monoco/daemon/services.py +56 -155
  31. monoco/features/config/commands.py +125 -44
  32. monoco/features/i18n/adapter.py +29 -0
  33. monoco/features/i18n/commands.py +89 -10
  34. monoco/features/i18n/core.py +113 -27
  35. monoco/features/i18n/resources/en/AGENTS.md +8 -0
  36. monoco/features/i18n/resources/en/SKILL.md +94 -0
  37. monoco/features/i18n/resources/zh/AGENTS.md +8 -0
  38. monoco/features/i18n/resources/zh/SKILL.md +94 -0
  39. monoco/features/issue/adapter.py +34 -0
  40. monoco/features/issue/commands.py +343 -101
  41. monoco/features/issue/core.py +384 -150
  42. monoco/features/issue/domain/__init__.py +0 -0
  43. monoco/features/issue/domain/lifecycle.py +126 -0
  44. monoco/features/issue/domain/models.py +170 -0
  45. monoco/features/issue/domain/parser.py +223 -0
  46. monoco/features/issue/domain/workspace.py +104 -0
  47. monoco/features/issue/engine/__init__.py +22 -0
  48. monoco/features/issue/engine/config.py +172 -0
  49. monoco/features/issue/engine/machine.py +185 -0
  50. monoco/features/issue/engine/models.py +18 -0
  51. monoco/features/issue/linter.py +325 -120
  52. monoco/features/issue/lsp/__init__.py +3 -0
  53. monoco/features/issue/lsp/definition.py +72 -0
  54. monoco/features/issue/migration.py +134 -0
  55. monoco/features/issue/models.py +46 -24
  56. monoco/features/issue/monitor.py +94 -0
  57. monoco/features/issue/resources/en/AGENTS.md +20 -0
  58. monoco/features/issue/resources/en/SKILL.md +111 -0
  59. monoco/features/issue/resources/zh/AGENTS.md +20 -0
  60. monoco/features/issue/resources/zh/SKILL.md +138 -0
  61. monoco/features/issue/validator.py +455 -0
  62. monoco/features/spike/adapter.py +30 -0
  63. monoco/features/spike/commands.py +45 -24
  64. monoco/features/spike/core.py +6 -40
  65. monoco/features/spike/resources/en/AGENTS.md +7 -0
  66. monoco/features/spike/resources/en/SKILL.md +74 -0
  67. monoco/features/spike/resources/zh/AGENTS.md +7 -0
  68. monoco/features/spike/resources/zh/SKILL.md +74 -0
  69. monoco/main.py +91 -2
  70. monoco_toolkit-0.2.8.dist-info/METADATA +136 -0
  71. monoco_toolkit-0.2.8.dist-info/RECORD +83 -0
  72. monoco_toolkit-0.1.1.dist-info/METADATA +0 -93
  73. monoco_toolkit-0.1.1.dist-info/RECORD +0 -33
  74. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.8.dist-info}/WHEEL +0 -0
  75. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.8.dist-info}/entry_points.txt +0 -0
  76. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.8.dist-info}/licenses/LICENSE +0 -0
@@ -1,68 +1,54 @@
1
- from typing import List, Optional, Tuple, Set
1
+ from typing import List, Optional, Set, Tuple
2
2
  from pathlib import Path
3
3
  from rich.console import Console
4
4
  from rich.table import Table
5
5
  import typer
6
-
6
+ import re
7
+ from monoco.core import git
7
8
  from . import core
8
- from .models import IssueStatus, IssueStage
9
+ from .validator import IssueValidator
10
+ from monoco.core.lsp import Diagnostic, DiagnosticSeverity
9
11
 
10
12
  console = Console()
11
13
 
12
-
13
- def validate_issue(path: Path, meta: core.IssueMetadata, all_issue_ids: Set[str] = set(), issues_root: Optional[Path] = None) -> List[str]:
14
+ def check_environment_policy(project_root: Path):
14
15
  """
15
- Validate a single issue's integrity.
16
+ Guardrail: Prevent direct modifications on protected branches (main/master).
16
17
  """
17
- errors = []
18
-
19
- # A. Directory/Status Consistency
20
- expected_status = meta.status.value
21
- path_parts = path.parts
22
- # We might be validating a temp file, so we skip path check if it's not in the tree?
23
- # Or strict check? For "Safe Edit", the file might be in a temp dir.
24
- # So we probably only care about content/metadata integrity.
25
-
26
- # But wait, if we overwrite the file, it MUST be valid.
27
- # Let's assume the validation is about the content itself (metadata logic).
28
-
29
- # B. Solution Compliance
30
- if meta.status == IssueStatus.CLOSED and not meta.solution:
31
- errors.append(f"[red]Solution Missing:[/red] {meta.id} is closed but has no [dim]solution[/dim] field.")
32
-
33
- # C. Link Integrity
34
- if meta.parent:
35
- if all_issue_ids and meta.parent not in all_issue_ids:
36
- # Check workspace (fallback)
37
- found = False
38
- if issues_root:
39
- if core.find_issue_path(issues_root, meta.parent):
40
- found = True
41
-
42
- if not found:
43
- errors.append(f"[red]Broken Link:[/red] {meta.id} refers to non-existent parent [bold]{meta.parent}[/bold].")
18
+ # Only enforce if it is a git repo
19
+ try:
20
+ if not git.is_git_repo(project_root):
21
+ return
44
22
 
45
- # D. Lifecycle Guard (Backlog)
46
- if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
47
- errors.append(f"[red]Lifecycle Error:[/red] {meta.id} is backlog but stage is not [bold]freezed[/bold] (found: {meta.stage}).")
23
+ current_branch = git.get_current_branch(project_root)
24
+ # Standard protected branches
25
+ if current_branch in ["main", "master", "production"]:
26
+ # Check if dirty (uncommitted changes)
27
+ changed_files = git.get_git_status(project_root)
28
+ if changed_files:
29
+ console.print(f"\n[bold red]🛑 Environment Policy Violation[/bold red]")
30
+ console.print(f"You are modifying code directly on protected branch: [bold cyan]{current_branch}[/bold cyan]")
31
+ console.print(f"Found {len(changed_files)} uncommitted changes.")
32
+ console.print(f"[yellow]Action Required:[/yellow] Please stash your changes and switch to a feature branch.")
33
+ console.print(f" > git stash")
34
+ console.print(f" > monoco issue start <ID> --branch")
35
+ console.print(f" > git stash pop")
36
+ raise typer.Exit(code=1)
37
+ except Exception:
38
+ # Fail safe: Do not block linting if git check fails unexpectedly
39
+ pass
48
40
 
49
- return errors
50
-
51
- def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
41
+ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnostic]:
52
42
  """
53
- Verify the integrity of the Issues directory.
54
- Returns a list of error messages.
55
-
56
- If recursive=True, performs workspace-level validation including:
57
- - Cross-project ID collision detection
58
- - Cross-project UID collision detection
43
+ Verify the integrity of the Issues directory using LSP Validator.
59
44
  """
60
- errors = []
61
- all_issue_ids = set() # For parent reference validation (includes namespaced IDs)
62
- id_to_projects = {} # local_id -> [(project_name, meta, file)]
63
- all_uids = {} # uid -> (project, issue_id)
45
+ diagnostics = []
46
+ validator = IssueValidator(issues_root)
47
+
48
+ all_issue_ids = set()
64
49
  all_issues = []
65
50
 
51
+ # 1. Collection Phase (Build Index)
66
52
  # Helper to collect issues from a project
67
53
  def collect_project_issues(project_issues_root: Path, project_name: str = "local"):
68
54
  project_issues = []
@@ -79,94 +65,313 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
79
65
  meta = core.parse_issue(f)
80
66
  if meta:
81
67
  local_id = meta.id
82
- full_id = f"{project_name}::{local_id}" if project_name != "local" else local_id
83
-
84
- # Track ID occurrences per project
85
- if local_id not in id_to_projects:
86
- id_to_projects[local_id] = []
87
- id_to_projects[local_id].append((project_name, meta, f))
68
+ full_id = f"{project_name}::{local_id}"
88
69
 
89
- # Add IDs for reference validation
90
- all_issue_ids.add(local_id) # Local ID
91
- if project_name != "local":
92
- all_issue_ids.add(full_id) # Namespaced ID
70
+ all_issue_ids.add(local_id)
71
+ all_issue_ids.add(full_id)
93
72
 
94
- # Check UID collision (if UID exists)
95
- if meta.uid:
96
- if meta.uid in all_uids:
97
- existing_project, existing_id = all_uids[meta.uid]
98
- errors.append(
99
- f"[red]UID Collision:[/red] UID {meta.uid} is duplicated.\n"
100
- f" - {existing_project}::{existing_id}\n"
101
- f" - {project_name}::{local_id}"
102
- )
103
- else:
104
- all_uids[meta.uid] = (project_name, local_id)
105
-
106
- project_issues.append((f, meta, project_name))
73
+ project_issues.append((f, meta))
107
74
  return project_issues
108
75
 
109
- # 1. Collect local issues
110
- all_issues.extend(collect_project_issues(issues_root, "local"))
76
+ from monoco.core.config import get_config
77
+ conf = get_config(str(issues_root.parent))
78
+
79
+ # Identify local project name
80
+ local_project_name = "local"
81
+ if conf and conf.project and conf.project.name:
82
+ local_project_name = conf.project.name.lower()
111
83
 
112
- # 2. If recursive, collect workspace member issues
84
+ # Find Topmost Workspace Root
85
+ workspace_root = issues_root.parent
86
+ for parent in [workspace_root] + list(workspace_root.parents):
87
+ if (parent / ".monoco" / "workspace.yaml").exists() or (parent / ".monoco" / "project.yaml").exists():
88
+ workspace_root = parent
89
+
90
+ # Collect from local issues_root
91
+ all_issues.extend(collect_project_issues(issues_root, local_project_name))
92
+
113
93
  if recursive:
114
94
  try:
115
- from monoco.core.config import get_config
116
- project_root = issues_root.parent
117
- conf = get_config(str(project_root))
95
+ # Re-read config from workspace root to get all members
96
+ ws_conf = get_config(str(workspace_root))
118
97
 
119
- for member_name, rel_path in conf.project.members.items():
120
- member_root = (project_root / rel_path).resolve()
98
+ # Index Root project if different from current
99
+ if workspace_root != issues_root.parent:
100
+ root_issues_dir = workspace_root / "Issues"
101
+ if root_issues_dir.exists():
102
+ all_issues.extend(collect_project_issues(root_issues_dir, ws_conf.project.name.lower()))
103
+
104
+ # Index all members
105
+ for member_name, rel_path in ws_conf.project.members.items():
106
+ member_root = (workspace_root / rel_path).resolve()
121
107
  member_issues_dir = member_root / "Issues"
122
-
123
- if member_issues_dir.exists():
124
- all_issues.extend(collect_project_issues(member_issues_dir, member_name))
125
- except Exception as e:
126
- # Fail gracefully if workspace config is missing
108
+ if member_issues_dir.exists() and member_issues_dir != issues_root:
109
+ all_issues.extend(collect_project_issues(member_issues_dir, member_name.lower()))
110
+ except Exception:
127
111
  pass
128
112
 
129
- # 3. Check for ID collisions within same project
130
- for local_id, occurrences in id_to_projects.items():
131
- # Group by project
132
- projects_with_id = {}
133
- for project_name, meta, f in occurrences:
134
- if project_name not in projects_with_id:
135
- projects_with_id[project_name] = []
136
- projects_with_id[project_name].append((meta, f))
113
+ # 2. Validation Phase
114
+ for path, meta in all_issues:
115
+ content = path.read_text() # Re-read content for validation
137
116
 
138
- # Check for duplicates within same project
139
- for project_name, metas in projects_with_id.items():
140
- if len(metas) > 1:
141
- # Same ID appears multiple times in same project - this is an error
142
- error_msg = f"[red]ID Collision:[/red] {local_id} appears {len(metas)} times in project '{project_name}':\n"
143
- for idx, (meta, f) in enumerate(metas, 1):
144
- error_msg += f" {idx}. uid: {meta.uid or 'N/A'} | created: {meta.created_at} | stage: {meta.stage} | status: {meta.status.value}\n"
145
- error_msg += f" [yellow]→ Action:[/yellow] Remove duplicate or use 'monoco issue move --to <target> --renumber' to resolve."
146
- errors.append(error_msg)
147
-
148
- # 4. Validation
149
- for path, meta, project_name in all_issues:
150
- # A. Directory/Status Consistency (Only check this for files in the tree)
151
- expected_status = meta.status.value
152
- path_parts = path.parts
153
- if expected_status not in path_parts:
154
- errors.append(f"[yellow]Placement Error:[/yellow] {meta.id} has status [cyan]{expected_status}[/cyan] but is not under a [dim]{expected_status}/[/dim] directory.")
117
+ # A. Run Core Validator
118
+ file_diagnostics = validator.validate(meta, content, all_issue_ids)
155
119
 
156
- # Reuse common logic
157
- errors.extend(validate_issue(path, meta, all_issue_ids, issues_root))
120
+ # Add context to diagnostics (Path)
121
+ for d in file_diagnostics:
122
+ d.source = f"{meta.id}" # Use ID as source context
123
+ d.data = {'path': path} # Attach path for potential fixers
124
+ diagnostics.append(d)
125
+
126
+ return diagnostics
158
127
 
159
- return errors
160
128
 
129
+ def run_lint(issues_root: Path, recursive: bool = False, fix: bool = False, format: str = "table", file_path: Optional[str] = None):
130
+ """
131
+ Run lint with optional auto-fix and format selection.
132
+
133
+ Args:
134
+ issues_root: Root directory of issues
135
+ recursive: Recursively scan workspace members
136
+ fix: Apply auto-fixes
137
+ format: Output format (table, json)
138
+ file_path: Optional path to a single file to validate (LSP mode)
139
+ """
140
+ # 0. Environment Policy Check (Guardrail)
141
+ # We assume issues_root.parent is the project root or close enough for git context
142
+ check_environment_policy(issues_root.parent)
161
143
 
162
- def run_lint(issues_root: Path, recursive: bool = False):
163
- errors = check_integrity(issues_root, recursive)
144
+ # Single-file mode (for LSP integration)
145
+ if file_path:
146
+ file = Path(file_path).resolve()
147
+ if not file.exists():
148
+ console.print(f"[red]Error:[/red] File not found: {file_path}")
149
+ raise typer.Exit(code=1)
150
+
151
+ # Parse and validate single file
152
+ try:
153
+ meta = core.parse_issue(file)
154
+ if not meta:
155
+ console.print(f"[red]Error:[/red] Failed to parse issue metadata from {file_path}")
156
+ raise typer.Exit(code=1)
157
+
158
+ content = file.read_text()
159
+ validator = IssueValidator(issues_root)
160
+
161
+ # For single-file mode, we need to build a minimal index
162
+ # We'll scan the entire workspace to get all issue IDs for reference validation
163
+ all_issue_ids = set()
164
+ for subdir in ["Epics", "Features", "Chores", "Fixes"]:
165
+ d = issues_root / subdir
166
+ if d.exists():
167
+ for status in ["open", "closed", "backlog"]:
168
+ status_dir = d / status
169
+ if status_dir.exists():
170
+ for f in status_dir.rglob("*.md"):
171
+ try:
172
+ m = core.parse_issue(f)
173
+ if m:
174
+ all_issue_ids.add(m.id)
175
+ except Exception:
176
+ pass
177
+
178
+ diagnostics = validator.validate(meta, content, all_issue_ids)
179
+
180
+ # Add context
181
+ for d in diagnostics:
182
+ d.source = meta.id
183
+ d.data = {'path': file}
184
+
185
+ except Exception as e:
186
+ console.print(f"[red]Error:[/red] Validation failed: {e}")
187
+ raise typer.Exit(code=1)
188
+ else:
189
+ # Full workspace scan mode
190
+ diagnostics = check_integrity(issues_root, recursive)
191
+
192
+ # Filter only Warnings and Errors
193
+ issues = [d for d in diagnostics if d.severity <= DiagnosticSeverity.Warning]
164
194
 
165
- if not errors:
195
+ if fix:
196
+ fixed_count = 0
197
+ console.print("[dim]Attempting auto-fixes...[/dim]")
198
+
199
+ # We must track processed paths to avoid redundant writes if multiple errors exist
200
+ processed_paths = set()
201
+
202
+ for d in issues:
203
+ path = d.data.get('path')
204
+ if not path: continue
205
+
206
+ # Read fresh content iteration
207
+ pass
208
+
209
+ # Group diagnostics by file path
210
+ from collections import defaultdict
211
+ file_diags = defaultdict(list)
212
+ for d in issues:
213
+ if d.data.get('path'):
214
+ file_diags[d.data['path']].append(d)
215
+
216
+ for path, diags in file_diags.items():
217
+ try:
218
+ content = path.read_text()
219
+ new_content = content
220
+ has_changes = False
221
+
222
+ # Parse meta once for the file
223
+ try:
224
+ meta = core.parse_issue(path)
225
+ except Exception:
226
+ console.print(f"[yellow]Skipping fix for {path.name}: Cannot parse metadata[/yellow]")
227
+ continue
228
+
229
+ # Apply fixes for this file
230
+ for d in diags:
231
+ if "Structure Error" in d.message:
232
+ expected_header = f"## {meta.id}: {meta.title}"
233
+
234
+ # Check if strictly present
235
+ if expected_header in new_content:
236
+ continue
237
+
238
+ # Strategy: Look for existing heading with same ID to replace
239
+ # Matches: "## ID..." or "## ID ..."
240
+ # Regex: ^##\s+ID\b.*$
241
+ # We use meta.id which is safe.
242
+ heading_regex = re.compile(rf"^##\s+{re.escape(meta.id)}.*$", re.MULTILINE)
243
+
244
+ match_existing = heading_regex.search(new_content)
245
+
246
+ if match_existing:
247
+ # Replace existing incorrect heading
248
+ # We use sub to replace just the first occurrence
249
+ new_content = heading_regex.sub(expected_header, new_content, count=1)
250
+ has_changes = True
251
+ else:
252
+ # Insert after frontmatter
253
+ fm_match = re.search(r"^---(.*?)---", new_content, re.DOTALL | re.MULTILINE)
254
+ if fm_match:
255
+ end_pos = fm_match.end()
256
+ header_block = f"\n\n{expected_header}\n"
257
+ new_content = new_content[:end_pos] + header_block + new_content[end_pos:].lstrip()
258
+ has_changes = True
259
+
260
+ if "Review Requirement: Missing '## Review Comments' section" in d.message:
261
+ if "## Review Comments" not in new_content:
262
+ new_content = new_content.rstrip() + "\n\n## Review Comments\n\n- [ ] Self-Review\n"
263
+ has_changes = True
264
+
265
+ if "Malformed ID" in d.message:
266
+ lines = new_content.splitlines()
267
+ if d.range and d.range.start.line < len(lines):
268
+ line_idx = d.range.start.line
269
+ line = lines[line_idx]
270
+ # Remove # from quoted strings or raw values
271
+ new_line = line.replace("'#", "'").replace('"#', '"')
272
+ if new_line != line:
273
+ lines[line_idx] = new_line
274
+ new_content = "\n".join(lines) + "\n"
275
+ has_changes = True
276
+
277
+ if "Tag Check: Missing required context tags" in d.message:
278
+ # Extract missing tags from message
279
+ # Message format: "Tag Check: Missing required context tags: #TAG1, #TAG2"
280
+ try:
281
+ parts = d.message.split(": ")
282
+ if len(parts) >= 3:
283
+ tags_str = parts[-1]
284
+ missing_tags = [t.strip() for t in tags_str.split(",")]
285
+
286
+ # We need to update content via core.update_issue logic effectively
287
+ # But we are in a loop potentially with other string edits.
288
+ # IMPORTANT: Mixed strategy (Regex vs Object Update) is risky.
289
+ # However, tags are in YAML frontmatter.
290
+ # Since we might have modified new_content already (string), using core.update_issue on file is dangerous (race condition with memory).
291
+ # Better to append to tags list in YAML via regex or yaml parser on new_content.
292
+
293
+ # Parsing Frontmatter from new_content
294
+ fm_match = re.search(r"^---(.*?)---", new_content, re.DOTALL | re.MULTILINE)
295
+ if fm_match:
296
+ import yaml
297
+ fm_text = fm_match.group(1)
298
+ data = yaml.safe_load(fm_text) or {}
299
+ current_tags = data.get('tags', [])
300
+ if not isinstance(current_tags, list): current_tags = []
301
+
302
+ # Add missing
303
+ updated_tags = sorted(list(set(current_tags) | set(missing_tags)))
304
+ data['tags'] = updated_tags
305
+
306
+ # Dump back
307
+ new_fm_text = yaml.dump(data, sort_keys=False, allow_unicode=True)
308
+
309
+ # Replace FM block
310
+ new_content = new_content.replace(fm_match.group(1), "\n" + new_fm_text)
311
+ has_changes = True
312
+ except Exception as ex:
313
+ console.print(f"[red]Failed to fix tags: {ex}[/red]")
314
+
315
+ if has_changes:
316
+ path.write_text(new_content)
317
+ fixed_count += 1
318
+ console.print(f"[dim]Fixed: {path.name}[/dim]")
319
+ except Exception as e:
320
+ console.print(f"[red]Failed to fix {path.name}: {e}[/red]")
321
+
322
+ console.print(f"[green]Applied auto-fixes to {fixed_count} files.[/green]")
323
+
324
+ # Re-run validation to verify
325
+ if file_path:
326
+ # Re-validate single file
327
+ file = Path(file_path).resolve()
328
+ meta = core.parse_issue(file)
329
+ content = file.read_text()
330
+ validator = IssueValidator(issues_root)
331
+ diagnostics = validator.validate(meta, content, all_issue_ids)
332
+ for d in diagnostics:
333
+ d.source = meta.id
334
+ d.data = {'path': file}
335
+ else:
336
+ diagnostics = check_integrity(issues_root, recursive)
337
+ issues = [d for d in diagnostics if d.severity <= DiagnosticSeverity.Warning]
338
+
339
+ # Output formatting
340
+ if format == "json":
341
+ import json
342
+ from pydantic import RootModel
343
+ # Use RootModel to export a list of models
344
+ print(RootModel(issues).model_dump_json(indent=2))
345
+ if any(d.severity == DiagnosticSeverity.Error for d in issues):
346
+ raise typer.Exit(code=1)
347
+ return
348
+
349
+ if not issues:
166
350
  console.print("[green]✔[/green] Issue integrity check passed. No integrity errors found.")
167
351
  else:
168
- table = Table(title="Issue Integrity Issues", show_header=False, border_style="red")
169
- for err in errors:
170
- table.add_row(err)
352
+ table = Table(title="Issue Integrity Report", show_header=True, header_style="bold magenta", border_style="red")
353
+ table.add_column("Issue", style="cyan")
354
+ table.add_column("Severity", justify="center")
355
+ table.add_column("Line", justify="right", style="dim")
356
+ table.add_column("Message")
357
+
358
+ for d in issues:
359
+ sev_style = "red" if d.severity == DiagnosticSeverity.Error else "yellow"
360
+ sev_label = "ERROR" if d.severity == DiagnosticSeverity.Error else "WARN"
361
+ line_str = str(d.range.start.line + 1) if d.range else "-"
362
+ table.add_row(
363
+ d.source or "Unknown",
364
+ f"[{sev_style}]{sev_label}[/{sev_style}]",
365
+ line_str,
366
+ d.message
367
+ )
368
+
171
369
  console.print(table)
172
- raise typer.Exit(code=1)
370
+
371
+ if any(d.severity == DiagnosticSeverity.Error for d in issues):
372
+ console.print("\n[yellow]Tip: Run 'monoco issue lint --fix' to attempt automatic repairs.[/yellow]")
373
+ raise typer.Exit(code=1)
374
+
375
+ if issues:
376
+ console.print("\n[yellow]Tip: Run 'monoco issue lint --fix' to attempt automatic repairs.[/yellow]")
377
+
@@ -0,0 +1,3 @@
1
+ from .definition import DefinitionProvider
2
+
3
+ __all__ = ["DefinitionProvider"]
@@ -0,0 +1,72 @@
1
+ from pathlib import Path
2
+ from typing import Optional, List
3
+ from monoco.core.lsp import Location, Position, Range
4
+ from ..domain.parser import MarkdownParser
5
+ from ..domain.workspace import WorkspaceSymbolIndex, IssueLocation
6
+
7
+ class DefinitionProvider:
8
+ def __init__(self, workspace_root: Path):
9
+ self.workspace_root = workspace_root
10
+ self.index = WorkspaceSymbolIndex(workspace_root)
11
+ # Lazy indexing handled by the index class itself
12
+
13
+ def provide_definition(self, file_path: Path, position: Position) -> List[Location]:
14
+ """
15
+ Resolve definition at the given position in the file.
16
+ """
17
+ if not file_path.exists():
18
+ return []
19
+
20
+ content = file_path.read_text()
21
+
22
+ # 1. Parse the document to find spans
23
+ # We only need to find the span at the specific line
24
+ issue = MarkdownParser.parse(content, path=str(file_path))
25
+
26
+ target_span = None
27
+ for block in issue.body.blocks:
28
+ # Check if position is within block
29
+ # Note: block.line_start is inclusive, line_end is exclusive for content
30
+ if block.line_start <= position.line < block.line_end:
31
+ for span in block.spans:
32
+ if span.range.start.line == position.line:
33
+ # Check character range
34
+ if span.range.start.character <= position.character <= span.range.end.character:
35
+ target_span = span
36
+ break
37
+ if target_span:
38
+ break
39
+
40
+ if not target_span:
41
+ return []
42
+
43
+ # 2. Resolve based on span type
44
+ if target_span.type in ["wikilink", "issue_id"]:
45
+ issue_id = target_span.metadata.get("issue_id")
46
+ if issue_id:
47
+ # Resolve using Workspace Index
48
+ location = self.index.resolve(issue_id, context_project=self._get_context_project(file_path))
49
+ if location:
50
+ return [
51
+ Location(
52
+ uri=f"file://{location.file_path}",
53
+ range=Range(
54
+ start=Position(line=0, character=0),
55
+ end=Position(line=0, character=0)
56
+ )
57
+ )
58
+ ]
59
+
60
+ return []
61
+
62
+ def _get_context_project(self, file_path: Path) -> Optional[str]:
63
+ # Simple heuristic: look for parent directory name if it's a known project structure?
64
+ # Or rely on configuration.
65
+ # For now, let's assume the index handles context if passed, or we pass None.
66
+ # Actually resolving context project from file path is tricky without config loaded for that specific root.
67
+ # Let's try to deduce from path relative to workspace root.
68
+ try:
69
+ rel = file_path.relative_to(self.workspace_root)
70
+ return rel.parts[0] # First dir is likely project name in a workspace
71
+ except ValueError:
72
+ return "local"