monoco-toolkit 0.2.2__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/cli/__init__.py +0 -0
- monoco/cli/project.py +79 -0
- monoco/cli/workspace.py +38 -0
- monoco/core/agent/state.py +32 -43
- monoco/core/config.py +46 -20
- monoco/core/integrations.py +53 -0
- monoco/core/lsp.py +61 -0
- monoco/core/setup.py +87 -124
- monoco/core/workspace.py +15 -9
- monoco/features/issue/commands.py +48 -1
- monoco/features/issue/core.py +78 -14
- monoco/features/issue/linter.py +215 -116
- monoco/features/issue/models.py +2 -15
- monoco/features/issue/resources/en/AGENTS.md +7 -1
- monoco/features/issue/resources/en/SKILL.md +39 -3
- monoco/features/issue/resources/zh/AGENTS.md +8 -2
- monoco/features/issue/resources/zh/SKILL.md +32 -3
- monoco/features/issue/validator.py +246 -0
- monoco/main.py +54 -4
- {monoco_toolkit-0.2.2.dist-info → monoco_toolkit-0.2.3.dist-info}/METADATA +1 -1
- {monoco_toolkit-0.2.2.dist-info → monoco_toolkit-0.2.3.dist-info}/RECORD +24 -19
- {monoco_toolkit-0.2.2.dist-info → monoco_toolkit-0.2.3.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.2.2.dist-info → monoco_toolkit-0.2.3.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.2.2.dist-info → monoco_toolkit-0.2.3.dist-info}/licenses/LICENSE +0 -0
monoco/core/setup.py
CHANGED
|
@@ -2,6 +2,7 @@ import os
|
|
|
2
2
|
import subprocess
|
|
3
3
|
import yaml
|
|
4
4
|
from pathlib import Path
|
|
5
|
+
from typing import Optional
|
|
5
6
|
import typer
|
|
6
7
|
from rich.console import Console
|
|
7
8
|
from monoco.core.output import print_output
|
|
@@ -107,12 +108,18 @@ def ask_with_selection(message: str, default: str) -> str:
|
|
|
107
108
|
def init_cli(
|
|
108
109
|
ctx: typer.Context,
|
|
109
110
|
global_only: bool = typer.Option(False, "--global", help="Only configure global user settings"),
|
|
110
|
-
project_only: bool = typer.Option(False, "--project", help="Only configure current project")
|
|
111
|
+
project_only: bool = typer.Option(False, "--project", help="Only configure current project"),
|
|
112
|
+
# Non-interactive arguments
|
|
113
|
+
name: Optional[str] = typer.Option(None, "--name", "-n", help="Project Name"),
|
|
114
|
+
key: Optional[str] = typer.Option(None, "--key", "-k", help="Project Key"),
|
|
115
|
+
author: Optional[str] = typer.Option(None, "--author", "-a", help="Author Name"),
|
|
116
|
+
telemetry: Optional[bool] = typer.Option(None, "--telemetry/--no-telemetry", help="Enable/Disable telemetry")
|
|
111
117
|
):
|
|
112
118
|
"""
|
|
113
119
|
Initialize Monoco configuration (Global and/or Project).
|
|
114
120
|
"""
|
|
115
|
-
|
|
121
|
+
# Force non-interactive for now as requested
|
|
122
|
+
interactive = False
|
|
116
123
|
|
|
117
124
|
home_dir = Path.home() / ".monoco"
|
|
118
125
|
global_config_path = home_dir / "config.yaml"
|
|
@@ -126,9 +133,31 @@ def init_cli(
|
|
|
126
133
|
home_dir.mkdir(parents=True, exist_ok=True)
|
|
127
134
|
|
|
128
135
|
default_author = get_git_user() or os.getenv("USER", "developer")
|
|
129
|
-
author = ask_with_selection("Your Name (for issue tracking)", default_author)
|
|
130
136
|
|
|
131
|
-
|
|
137
|
+
if author is None:
|
|
138
|
+
if interactive:
|
|
139
|
+
author = ask_with_selection("Your Name (for issue tracking)", default_author)
|
|
140
|
+
else:
|
|
141
|
+
# Fallback or Error?
|
|
142
|
+
# For global author, we can use default if not provided, or error?
|
|
143
|
+
# User said "Directly error saying what field is missing"
|
|
144
|
+
# But author has a reasonable default. Let's try to use default if available, else error.
|
|
145
|
+
if not default_author:
|
|
146
|
+
console.print("[red]Error:[/red] Missing required field: --author")
|
|
147
|
+
raise typer.Exit(code=1)
|
|
148
|
+
author = default_author
|
|
149
|
+
|
|
150
|
+
if telemetry is None:
|
|
151
|
+
if interactive:
|
|
152
|
+
from rich.prompt import Confirm
|
|
153
|
+
telemetry = Confirm.ask("Enable anonymous telemetry to help improve Monoco?", default=True)
|
|
154
|
+
else:
|
|
155
|
+
# Default to True or False? Let's default to False for non-interactive safety or True?
|
|
156
|
+
# Usually explicit is better. Let's assume False if not specified in non-interactive.
|
|
157
|
+
# Or maybe we just skip it if not provided?
|
|
158
|
+
# Let's check user intent: "Report what field is missing".
|
|
159
|
+
# Telemetry is optional. Let's set it to False if missing.
|
|
160
|
+
telemetry = False
|
|
132
161
|
|
|
133
162
|
user_config = {
|
|
134
163
|
"core": {
|
|
@@ -136,7 +165,7 @@ def init_cli(
|
|
|
136
165
|
# Editor is handled by env/config defaults, no need to prompt
|
|
137
166
|
},
|
|
138
167
|
"telemetry": {
|
|
139
|
-
"enabled":
|
|
168
|
+
"enabled": telemetry
|
|
140
169
|
}
|
|
141
170
|
}
|
|
142
171
|
|
|
@@ -151,30 +180,60 @@ def init_cli(
|
|
|
151
180
|
# --- 2. Project Configuration ---
|
|
152
181
|
cwd = Path.cwd()
|
|
153
182
|
project_config_dir = cwd / ".monoco"
|
|
154
|
-
|
|
183
|
+
workspace_config_path = project_config_dir / "workspace.yaml"
|
|
184
|
+
project_config_path = project_config_dir / "project.yaml"
|
|
155
185
|
|
|
156
186
|
# Check if we should init project
|
|
157
|
-
if project_config_path.exists():
|
|
158
|
-
if
|
|
159
|
-
|
|
160
|
-
|
|
187
|
+
if workspace_config_path.exists() or project_config_path.exists():
|
|
188
|
+
if interactive:
|
|
189
|
+
from rich.prompt import Confirm
|
|
190
|
+
if not Confirm.ask(f"Project/Workspace config already exists in [dim]{project_config_dir}[/dim]. Overwrite?"):
|
|
191
|
+
console.print("[yellow]Skipping project initialization.[/yellow]")
|
|
192
|
+
return
|
|
193
|
+
else:
|
|
194
|
+
console.print(f"[yellow]Project/Workspace config already exists in {project_config_dir}. Use manual edit or delete it to re-init.[/yellow]")
|
|
195
|
+
return
|
|
161
196
|
|
|
162
197
|
console.rule("[bold blue]Project Setup[/bold blue]")
|
|
163
198
|
|
|
164
199
|
default_name = cwd.name
|
|
165
|
-
|
|
200
|
+
|
|
201
|
+
if name is None:
|
|
202
|
+
if interactive:
|
|
203
|
+
name = ask_with_selection("Project Name", default_name)
|
|
204
|
+
else:
|
|
205
|
+
console.print("[red]Error:[/red] Missing required field: --name")
|
|
206
|
+
raise typer.Exit(code=1)
|
|
207
|
+
|
|
208
|
+
project_name = name
|
|
166
209
|
|
|
167
210
|
default_key = generate_key(project_name)
|
|
168
|
-
|
|
211
|
+
|
|
212
|
+
if key is None:
|
|
213
|
+
if interactive:
|
|
214
|
+
key = ask_with_selection("Project Key (prefix for issues)", default_key)
|
|
215
|
+
else:
|
|
216
|
+
console.print("[red]Error:[/red] Missing required field: --key")
|
|
217
|
+
raise typer.Exit(code=1)
|
|
218
|
+
|
|
219
|
+
project_key = key
|
|
169
220
|
|
|
170
221
|
|
|
171
222
|
project_config_dir.mkdir(exist_ok=True)
|
|
172
223
|
|
|
224
|
+
# 2a. Create project.yaml (Identity)
|
|
173
225
|
project_config = {
|
|
174
226
|
"project": {
|
|
175
227
|
"name": project_name,
|
|
176
228
|
"key": project_key
|
|
177
|
-
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
with open(project_config_path, "w") as f:
|
|
233
|
+
yaml.dump(project_config, f, default_flow_style=False)
|
|
234
|
+
|
|
235
|
+
# 2b. Create workspace.yaml (Environment)
|
|
236
|
+
workspace_config = {
|
|
178
237
|
"paths": {
|
|
179
238
|
"issues": "Issues",
|
|
180
239
|
"spikes": ".references",
|
|
@@ -182,120 +241,24 @@ def init_cli(
|
|
|
182
241
|
}
|
|
183
242
|
}
|
|
184
243
|
|
|
185
|
-
with open(
|
|
186
|
-
yaml.dump(
|
|
187
|
-
|
|
188
|
-
# 2b. Generate Config Template
|
|
189
|
-
template_path = project_config_dir / "config_template.yaml"
|
|
190
|
-
template_content = """# Monoco Configuration Template
|
|
191
|
-
# This file serves as a reference for all available configuration options.
|
|
192
|
-
# Rename this file to config.yaml to use it.
|
|
244
|
+
with open(workspace_config_path, "w") as f:
|
|
245
|
+
yaml.dump(workspace_config, f, default_flow_style=False)
|
|
193
246
|
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
# Logging verbosity (DEBUG, INFO, WARNING, ERROR)
|
|
199
|
-
# log_level: "INFO"
|
|
200
|
-
|
|
201
|
-
# Preferred text editor
|
|
202
|
-
# editor: "vim"
|
|
203
|
-
|
|
204
|
-
project:
|
|
205
|
-
# The display name of the project
|
|
206
|
-
name: "My Project"
|
|
207
|
-
|
|
208
|
-
# The prefix used for issue IDs (e.g. MON-001)
|
|
209
|
-
key: "MON"
|
|
210
|
-
|
|
211
|
-
# Managed external research repositories (name -> url)
|
|
212
|
-
# spike_repos:
|
|
213
|
-
# react: "https://github.com/facebook/react"
|
|
214
|
-
|
|
215
|
-
paths:
|
|
216
|
-
# Directory for tracking issues
|
|
217
|
-
issues: "Issues"
|
|
218
|
-
|
|
219
|
-
# Directory for specifications/documents
|
|
220
|
-
specs: "SPECS"
|
|
221
|
-
|
|
222
|
-
# Directory for research references (spikes)
|
|
223
|
-
spikes: ".references"
|
|
224
|
-
|
|
225
|
-
i18n:
|
|
226
|
-
# Source language code
|
|
227
|
-
source_lang: "en"
|
|
228
|
-
|
|
229
|
-
# Target language codes for translation
|
|
230
|
-
target_langs:
|
|
231
|
-
- "zh"
|
|
232
|
-
|
|
233
|
-
ui:
|
|
234
|
-
# Custom Domain Terminology Mapping
|
|
235
|
-
# Use this to rename core concepts in the UI without changing internal logic.
|
|
236
|
-
dictionary:
|
|
237
|
-
# Entities
|
|
238
|
-
epic: "Saga"
|
|
239
|
-
feature: "Story"
|
|
240
|
-
chore: "Task"
|
|
241
|
-
fix: "Bug"
|
|
247
|
+
# 2c. Generate Config Template (Optional - might need update)
|
|
248
|
+
# For now, let's skip template generation or update it later.
|
|
249
|
+
# Or generate a workspace_template.yaml
|
|
242
250
|
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
review: "QA"
|
|
247
|
-
done: "Released"
|
|
248
|
-
"""
|
|
249
|
-
with open(template_path, "w") as f:
|
|
250
|
-
f.write(template_content)
|
|
251
|
+
console.print(f"[green]✓ Project initialized in {cwd}[/green]")
|
|
252
|
+
console.print(f"[dim] - Identity: .monoco/project.yaml[/dim]")
|
|
253
|
+
console.print(f"[dim] - Environment: .monoco/workspace.yaml[/dim]")
|
|
251
254
|
|
|
252
|
-
#
|
|
253
|
-
|
|
254
|
-
# 3. Scaffold Directories & Modules
|
|
255
|
-
|
|
256
|
-
from monoco.core.registry import FeatureRegistry
|
|
257
|
-
from monoco.features.issue.adapter import IssueFeature
|
|
258
|
-
from monoco.features.spike.adapter import SpikeFeature
|
|
259
|
-
from monoco.features.i18n.adapter import I18nFeature
|
|
260
|
-
|
|
261
|
-
registry = FeatureRegistry()
|
|
262
|
-
registry.register(IssueFeature())
|
|
263
|
-
registry.register(SpikeFeature())
|
|
264
|
-
registry.register(I18nFeature())
|
|
255
|
+
# Check for issue feature init (this logic was implicit in caller?)
|
|
256
|
+
# No, init_cli is the main logic.
|
|
265
257
|
|
|
266
|
-
# Initialize
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
feature.initialize(cwd, project_config)
|
|
270
|
-
console.print(f" [dim]Initialized feature: {feature.name}[/dim]")
|
|
271
|
-
except Exception as e:
|
|
272
|
-
console.print(f" [red]Failed to initialize {feature.name}: {e}[/red]")
|
|
273
|
-
|
|
274
|
-
# Trigger initial sync to set up Agent Environment
|
|
275
|
-
from monoco.core.sync import sync_command
|
|
276
|
-
# We call sync command logic directly or simulate it?
|
|
277
|
-
# Just invoke the collection logic via sync normally would be best,
|
|
278
|
-
# but sync_command is a click command wrapper.
|
|
279
|
-
# For now let's just initialize the physical structures.
|
|
280
|
-
# The 'skills.init' call in old code did more than just init structure,
|
|
281
|
-
# it wrote SKILL.md files.
|
|
282
|
-
# In V2, we rely on 'monoco sync' to do that injection.
|
|
283
|
-
# So we should prompt user to run sync or do it automatically.
|
|
258
|
+
# Initialize basic directories
|
|
259
|
+
(cwd / "Issues").mkdir(exist_ok=True)
|
|
260
|
+
(cwd / ".references").mkdir(exist_ok=True)
|
|
284
261
|
|
|
285
|
-
|
|
286
|
-
console.print("
|
|
287
|
-
try:
|
|
288
|
-
# We need to reuse logic from sync.py
|
|
289
|
-
# Simplest is to run the sync workflow here manually/programmatically
|
|
290
|
-
# But for now, let's keep it clean and just say:
|
|
291
|
-
pass
|
|
292
|
-
except Exception:
|
|
293
|
-
pass
|
|
294
|
-
|
|
295
|
-
console.print(f"[green]✓ Project config initialized at {project_config_path}[/green]")
|
|
296
|
-
console.print(f"[green]✓ Config template generated at {template_path}[/green]")
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
console.print(f"[green]Access configured! issues will be created as {project_key}-XXX[/green]")
|
|
262
|
+
console.print("\n[bold green]✓ Monoco Project Initialized![/bold green]")
|
|
263
|
+
console.print(f"Access configured! issues will be created as [bold]{project_key}-XXX[/bold]")
|
|
301
264
|
|
monoco/core/workspace.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
from pathlib import Path
|
|
2
3
|
from typing import List, Optional, Dict
|
|
3
4
|
from pydantic import BaseModel, Field, ConfigDict
|
|
@@ -26,7 +27,7 @@ def is_project_root(path: Path) -> bool:
|
|
|
26
27
|
"""
|
|
27
28
|
Check if a directory serves as a Monoco project root.
|
|
28
29
|
Criteria:
|
|
29
|
-
- has .monoco/ directory (which should contain
|
|
30
|
+
- has .monoco/ directory (which should contain project.yaml)
|
|
30
31
|
"""
|
|
31
32
|
if not path.is_dir():
|
|
32
33
|
return False
|
|
@@ -66,15 +67,20 @@ def find_projects(workspace_root: Path) -> List[MonocoProject]:
|
|
|
66
67
|
if root_project:
|
|
67
68
|
projects.append(root_project)
|
|
68
69
|
|
|
69
|
-
# 2.
|
|
70
|
-
for
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
70
|
+
# 2. Recursive Scan
|
|
71
|
+
for root, dirs, files in os.walk(workspace_root):
|
|
72
|
+
# Skip hidden directories and node_modules
|
|
73
|
+
dirs[:] = [d for d in dirs if not d.startswith('.') and d != 'node_modules' and d != 'venv']
|
|
74
|
+
|
|
75
|
+
for d in dirs:
|
|
76
|
+
project_path = Path(root) / d
|
|
77
|
+
# Avoid re-adding root if it was somehow added (unlikely here)
|
|
78
|
+
if project_path == workspace_root: continue
|
|
74
79
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
80
|
+
if is_project_root(project_path):
|
|
81
|
+
p = load_project(project_path)
|
|
82
|
+
if p:
|
|
83
|
+
projects.append(p)
|
|
78
84
|
|
|
79
85
|
return projects
|
|
80
86
|
|
|
@@ -68,6 +68,42 @@ def create(
|
|
|
68
68
|
console.print(f"[red]✘ Error:[/red] {str(e)}")
|
|
69
69
|
raise typer.Exit(code=1)
|
|
70
70
|
|
|
71
|
+
@app.command("update")
|
|
72
|
+
def update(
|
|
73
|
+
issue_id: str = typer.Argument(..., help="Issue ID to update"),
|
|
74
|
+
title: Optional[str] = typer.Option(None, "--title", "-t", help="New title"),
|
|
75
|
+
status: Optional[IssueStatus] = typer.Option(None, "--status", help="New status"),
|
|
76
|
+
stage: Optional[IssueStage] = typer.Option(None, "--stage", help="New stage"),
|
|
77
|
+
parent: Optional[str] = typer.Option(None, "--parent", "-p", help="Parent Issue ID"),
|
|
78
|
+
sprint: Optional[str] = typer.Option(None, "--sprint", help="Sprint ID"),
|
|
79
|
+
dependencies: Optional[List[str]] = typer.Option(None, "--dependency", "-d", help="Issue dependency ID(s)"),
|
|
80
|
+
related: Optional[List[str]] = typer.Option(None, "--related", "-r", help="Related Issue ID(s)"),
|
|
81
|
+
tags: Optional[List[str]] = typer.Option(None, "--tag", help="Tags"),
|
|
82
|
+
root: Optional[str] = typer.Option(None, "--root", help="Override issues root directory"),
|
|
83
|
+
):
|
|
84
|
+
"""Update an existing issue."""
|
|
85
|
+
config = get_config()
|
|
86
|
+
issues_root = _resolve_issues_root(config, root)
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
core.update_issue(
|
|
90
|
+
issues_root,
|
|
91
|
+
issue_id,
|
|
92
|
+
status=status,
|
|
93
|
+
stage=stage,
|
|
94
|
+
title=title,
|
|
95
|
+
parent=parent,
|
|
96
|
+
sprint=sprint,
|
|
97
|
+
dependencies=dependencies,
|
|
98
|
+
related=related,
|
|
99
|
+
tags=tags
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
console.print(f"[green]✔[/green] Updated [bold]{issue_id}[/bold].")
|
|
103
|
+
except Exception as e:
|
|
104
|
+
console.print(f"[red]✘ Error:[/red] {str(e)}")
|
|
105
|
+
raise typer.Exit(code=1)
|
|
106
|
+
|
|
71
107
|
@app.command("open")
|
|
72
108
|
def move_open(
|
|
73
109
|
issue_id: str = typer.Argument(..., help="Issue ID to open"),
|
|
@@ -173,6 +209,14 @@ def move_close(
|
|
|
173
209
|
config = get_config()
|
|
174
210
|
issues_root = _resolve_issues_root(config, root)
|
|
175
211
|
project_root = _resolve_project_root(config)
|
|
212
|
+
|
|
213
|
+
# Pre-flight check for interactive guidance (Requirement FEAT-0082 #6)
|
|
214
|
+
if solution is None:
|
|
215
|
+
valid_solutions = [e.value for e in IssueSolution]
|
|
216
|
+
console.print(f"[red]✘ Error:[/red] Closing an issue requires a solution.")
|
|
217
|
+
console.print(f"Please specify one of: [bold]{', '.join(valid_solutions)}[/bold]")
|
|
218
|
+
raise typer.Exit(code=1)
|
|
219
|
+
|
|
176
220
|
try:
|
|
177
221
|
core.update_issue(issues_root, issue_id, status=IssueStatus.CLOSED, solution=solution)
|
|
178
222
|
console.print(f"[dim]✔[/dim] Issue [bold]{issue_id}[/bold] closed.")
|
|
@@ -515,13 +559,16 @@ def scope(
|
|
|
515
559
|
@app.command("lint")
|
|
516
560
|
def lint(
|
|
517
561
|
recursive: bool = typer.Option(False, "--recursive", "-r", help="Recursively scan subdirectories"),
|
|
562
|
+
fix: bool = typer.Option(False, "--fix", help="Attempt to automatically fix issues (e.g. missing headings)"),
|
|
563
|
+
format: str = typer.Option("table", "--format", "-f", help="Output format (table, json)"),
|
|
564
|
+
file: Optional[str] = typer.Option(None, "--file", help="Validate a single file instead of scanning the entire workspace"),
|
|
518
565
|
root: Optional[str] = typer.Option(None, "--root", help="Override issues root directory"),
|
|
519
566
|
):
|
|
520
567
|
"""Verify the integrity of the Issues directory (declarative check)."""
|
|
521
568
|
from . import linter
|
|
522
569
|
config = get_config()
|
|
523
570
|
issues_root = _resolve_issues_root(config, root)
|
|
524
|
-
linter.run_lint(issues_root, recursive=recursive)
|
|
571
|
+
linter.run_lint(issues_root, recursive=recursive, fix=fix, format=format, file_path=file)
|
|
525
572
|
|
|
526
573
|
def _resolve_issues_root(config, cli_root: Optional[str]) -> Path:
|
|
527
574
|
"""
|
monoco/features/issue/core.py
CHANGED
|
@@ -6,7 +6,9 @@ from typing import List, Dict, Optional, Tuple, Any, Set, Set
|
|
|
6
6
|
from datetime import datetime
|
|
7
7
|
from .models import IssueMetadata, IssueType, IssueStatus, IssueSolution, IssueStage, IssueDetail, IsolationType, IssueIsolation, IssueID, current_time, generate_uid
|
|
8
8
|
from monoco.core import git
|
|
9
|
-
from monoco.core.config import get_config
|
|
9
|
+
from monoco.core.config import get_config, MonocoConfig
|
|
10
|
+
from monoco.core.lsp import DiagnosticSeverity
|
|
11
|
+
from .validator import IssueValidator
|
|
10
12
|
|
|
11
13
|
PREFIX_MAP = {
|
|
12
14
|
IssueType.EPIC: "EPIC",
|
|
@@ -17,6 +19,27 @@ PREFIX_MAP = {
|
|
|
17
19
|
|
|
18
20
|
REVERSE_PREFIX_MAP = {v: k for k, v in PREFIX_MAP.items()}
|
|
19
21
|
|
|
22
|
+
def enforce_lifecycle_policy(meta: IssueMetadata) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Apply business rules to ensure IssueMetadata consistency.
|
|
25
|
+
Should be called during Create or Update (but NOT during Read/Lint).
|
|
26
|
+
"""
|
|
27
|
+
if meta.status == IssueStatus.BACKLOG:
|
|
28
|
+
meta.stage = IssueStage.FREEZED
|
|
29
|
+
|
|
30
|
+
elif meta.status == IssueStatus.CLOSED:
|
|
31
|
+
# Enforce stage=done for closed issues
|
|
32
|
+
if meta.stage != IssueStage.DONE:
|
|
33
|
+
meta.stage = IssueStage.DONE
|
|
34
|
+
# Auto-fill closed_at if missing
|
|
35
|
+
if not meta.closed_at:
|
|
36
|
+
meta.closed_at = current_time()
|
|
37
|
+
|
|
38
|
+
elif meta.status == IssueStatus.OPEN:
|
|
39
|
+
# Ensure valid stage for open status
|
|
40
|
+
if meta.stage is None:
|
|
41
|
+
meta.stage = IssueStage.DRAFT
|
|
42
|
+
|
|
20
43
|
def _get_slug(title: str) -> str:
|
|
21
44
|
slug = title.lower()
|
|
22
45
|
# Replace non-word characters (including punctuation, spaces) with hyphens
|
|
@@ -145,8 +168,10 @@ def create_issue_file(
|
|
|
145
168
|
tags=tags,
|
|
146
169
|
opened_at=current_time() if status == IssueStatus.OPEN else None
|
|
147
170
|
)
|
|
148
|
-
|
|
149
171
|
|
|
172
|
+
# Enforce lifecycle policies (defaults, auto-corrections)
|
|
173
|
+
enforce_lifecycle_policy(metadata)
|
|
174
|
+
|
|
150
175
|
yaml_header = yaml.dump(metadata.model_dump(exclude_none=True, mode='json'), sort_keys=False, allow_unicode=True)
|
|
151
176
|
slug = _get_slug(title)
|
|
152
177
|
filename = f"{issue_id}-{slug}.md"
|
|
@@ -206,13 +231,17 @@ def find_issue_path(issues_root: Path, issue_id: str) -> Optional[Path]:
|
|
|
206
231
|
parsed = IssueID(issue_id)
|
|
207
232
|
|
|
208
233
|
if not parsed.is_local:
|
|
234
|
+
if not parsed.namespace:
|
|
235
|
+
return None
|
|
236
|
+
|
|
209
237
|
# Resolve Workspace
|
|
210
|
-
#
|
|
211
|
-
# This is a weak assumption but fits current architecture.
|
|
238
|
+
# Traverse up from issues_root to find a config that defines the namespace
|
|
212
239
|
project_root = issues_root.parent
|
|
213
|
-
conf = get_config(str(project_root))
|
|
214
240
|
|
|
241
|
+
# Try current root first
|
|
242
|
+
conf = MonocoConfig.load(str(project_root))
|
|
215
243
|
member_rel_path = conf.project.members.get(parsed.namespace)
|
|
244
|
+
|
|
216
245
|
if not member_rel_path:
|
|
217
246
|
return None
|
|
218
247
|
|
|
@@ -248,7 +277,9 @@ def update_issue(
|
|
|
248
277
|
status: Optional[IssueStatus] = None,
|
|
249
278
|
stage: Optional[IssueStage] = None,
|
|
250
279
|
solution: Optional[IssueSolution] = None,
|
|
280
|
+
title: Optional[str] = None,
|
|
251
281
|
parent: Optional[str] = None,
|
|
282
|
+
sprint: Optional[str] = None,
|
|
252
283
|
dependencies: Optional[List[str]] = None,
|
|
253
284
|
related: Optional[List[str]] = None,
|
|
254
285
|
tags: Optional[List[str]] = None
|
|
@@ -297,9 +328,7 @@ def update_issue(
|
|
|
297
328
|
raise ValueError(f"Lifecycle Policy: Cannot submit Backlog issue directly. Run `monoco issue pull {issue_id}` first.")
|
|
298
329
|
|
|
299
330
|
if target_status == IssueStatus.CLOSED:
|
|
300
|
-
|
|
301
|
-
raise ValueError(f"Closing an issue requires a solution. Please provide --solution or edit the file metadata.")
|
|
302
|
-
|
|
331
|
+
# Validator will check solution presence
|
|
303
332
|
current_data_stage = data.get('stage')
|
|
304
333
|
|
|
305
334
|
# Policy: IMPLEMENTED requires REVIEW stage
|
|
@@ -345,11 +374,17 @@ def update_issue(
|
|
|
345
374
|
if solution:
|
|
346
375
|
data['solution'] = solution.value
|
|
347
376
|
|
|
377
|
+
if title:
|
|
378
|
+
data['title'] = title
|
|
379
|
+
|
|
348
380
|
if parent is not None:
|
|
349
381
|
if parent == "":
|
|
350
382
|
data.pop('parent', None) # Remove parent field
|
|
351
383
|
else:
|
|
352
384
|
data['parent'] = parent
|
|
385
|
+
|
|
386
|
+
if sprint is not None:
|
|
387
|
+
data['sprint'] = sprint
|
|
353
388
|
|
|
354
389
|
if dependencies is not None:
|
|
355
390
|
data['dependencies'] = dependencies
|
|
@@ -374,9 +409,24 @@ def update_issue(
|
|
|
374
409
|
# Touch updated_at
|
|
375
410
|
data['updated_at'] = current_time()
|
|
376
411
|
|
|
377
|
-
# Re-hydrate through Model
|
|
412
|
+
# Re-hydrate through Model
|
|
378
413
|
try:
|
|
379
414
|
updated_meta = IssueMetadata(**data)
|
|
415
|
+
|
|
416
|
+
# Enforce lifecycle policies (defaults, auto-corrections)
|
|
417
|
+
# This ensures that when we update, we also fix invalid states (like Closed but not Done)
|
|
418
|
+
enforce_lifecycle_policy(updated_meta)
|
|
419
|
+
|
|
420
|
+
# Delegate to IssueValidator for static state validation
|
|
421
|
+
# We need to construct the full content to validate body-dependent rules (like checkboxes)
|
|
422
|
+
# Note: 'body' here is the OLD body. We assume update_issue doesn't change body.
|
|
423
|
+
# If body is invalid (unchecked boxes) and we move to DONE, this MUST fail.
|
|
424
|
+
validator = IssueValidator(issues_root)
|
|
425
|
+
diagnostics = validator.validate(updated_meta, body)
|
|
426
|
+
errors = [d for d in diagnostics if d.severity == DiagnosticSeverity.Error]
|
|
427
|
+
if errors:
|
|
428
|
+
raise ValueError(f"Validation Failed: {errors[0].message}")
|
|
429
|
+
|
|
380
430
|
except Exception as e:
|
|
381
431
|
raise ValueError(f"Failed to validate updated metadata: {e}")
|
|
382
432
|
|
|
@@ -434,6 +484,8 @@ def start_issue_isolation(issues_root: Path, issue_id: str, mode: IsolationType,
|
|
|
434
484
|
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
435
485
|
|
|
436
486
|
issue = parse_issue(path)
|
|
487
|
+
if not issue:
|
|
488
|
+
raise ValueError(f"Could not parse metadata for issue {issue_id}")
|
|
437
489
|
|
|
438
490
|
# Idempotency / Conflict Check
|
|
439
491
|
if issue.isolation:
|
|
@@ -509,6 +561,9 @@ def prune_issue_resources(issues_root: Path, issue_id: str, force: bool, project
|
|
|
509
561
|
raise FileNotFoundError(f"Issue {issue_id} not found.")
|
|
510
562
|
|
|
511
563
|
issue = parse_issue(path)
|
|
564
|
+
if not issue:
|
|
565
|
+
raise ValueError(f"Could not parse metadata for issue {issue_id}")
|
|
566
|
+
|
|
512
567
|
deleted_items = []
|
|
513
568
|
|
|
514
569
|
if not issue.isolation:
|
|
@@ -800,7 +855,10 @@ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Pat
|
|
|
800
855
|
commits = git.search_commits_by_message(project_root, f"Ref: {issue_id}")
|
|
801
856
|
|
|
802
857
|
if not commits:
|
|
803
|
-
|
|
858
|
+
meta = parse_issue(path)
|
|
859
|
+
if not meta:
|
|
860
|
+
raise ValueError(f"Could not parse metadata for issue {issue_id}")
|
|
861
|
+
return meta
|
|
804
862
|
|
|
805
863
|
# 2. Aggregate Data
|
|
806
864
|
all_files = set()
|
|
@@ -856,7 +914,10 @@ def generate_delivery_report(issues_root: Path, issue_id: str, project_root: Pat
|
|
|
856
914
|
# We can add it to 'extra' or extend the model later.
|
|
857
915
|
# For now, just persisting the text is enough for FEAT-0002.
|
|
858
916
|
|
|
859
|
-
|
|
917
|
+
meta = parse_issue(path)
|
|
918
|
+
if not meta:
|
|
919
|
+
raise ValueError(f"Could not parse metadata for issue {issue_id}")
|
|
920
|
+
return meta
|
|
860
921
|
|
|
861
922
|
def get_children(issues_root: Path, parent_id: str) -> List[IssueMetadata]:
|
|
862
923
|
"""Find all direct children of an issue."""
|
|
@@ -1174,7 +1235,7 @@ def move_issue(
|
|
|
1174
1235
|
# 5. Update content if ID changed
|
|
1175
1236
|
if new_id != old_id:
|
|
1176
1237
|
# Update frontmatter
|
|
1177
|
-
content = issue.raw_content
|
|
1238
|
+
content = issue.raw_content or ""
|
|
1178
1239
|
match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
|
|
1179
1240
|
if match:
|
|
1180
1241
|
yaml_str = match.group(1)
|
|
@@ -1190,9 +1251,9 @@ def move_issue(
|
|
|
1190
1251
|
|
|
1191
1252
|
new_content = f"---\n{new_yaml}---{body}"
|
|
1192
1253
|
else:
|
|
1193
|
-
new_content = issue.raw_content
|
|
1254
|
+
new_content = issue.raw_content or ""
|
|
1194
1255
|
else:
|
|
1195
|
-
new_content = issue.raw_content
|
|
1256
|
+
new_content = issue.raw_content or ""
|
|
1196
1257
|
|
|
1197
1258
|
# 6. Write to target
|
|
1198
1259
|
target_path.write_text(new_content)
|
|
@@ -1202,4 +1263,7 @@ def move_issue(
|
|
|
1202
1263
|
|
|
1203
1264
|
# 8. Return updated metadata
|
|
1204
1265
|
final_meta = parse_issue(target_path)
|
|
1266
|
+
if not final_meta:
|
|
1267
|
+
raise ValueError(f"Failed to parse moved issue at {target_path}")
|
|
1268
|
+
|
|
1205
1269
|
return final_meta, target_path
|