monoco-toolkit 0.2.0__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,68 +1,27 @@
1
- from typing import List, Optional, Tuple, Set
1
+ from typing import List, Optional, Set, Tuple
2
2
  from pathlib import Path
3
3
  from rich.console import Console
4
4
  from rich.table import Table
5
5
  import typer
6
+ import re
6
7
 
7
8
  from . import core
8
- from .models import IssueStatus, IssueStage
9
+ from .validator import IssueValidator
10
+ from monoco.core.lsp import Diagnostic, DiagnosticSeverity
9
11
 
10
12
  console = Console()
11
13
 
12
-
13
- def validate_issue(path: Path, meta: core.IssueMetadata, all_issue_ids: Set[str] = set(), issues_root: Optional[Path] = None) -> List[str]:
14
- """
15
- Validate a single issue's integrity.
14
+ def check_integrity(issues_root: Path, recursive: bool = False) -> List[Diagnostic]:
16
15
  """
17
- errors = []
18
-
19
- # A. Directory/Status Consistency
20
- expected_status = meta.status.value
21
- path_parts = path.parts
22
- # We might be validating a temp file, so we skip path check if it's not in the tree?
23
- # Or strict check? For "Safe Edit", the file might be in a temp dir.
24
- # So we probably only care about content/metadata integrity.
25
-
26
- # But wait, if we overwrite the file, it MUST be valid.
27
- # Let's assume the validation is about the content itself (metadata logic).
28
-
29
- # B. Solution Compliance
30
- if meta.status == IssueStatus.CLOSED and not meta.solution:
31
- errors.append(f"[red]Solution Missing:[/red] {meta.id} is closed but has no [dim]solution[/dim] field.")
32
-
33
- # C. Link Integrity
34
- if meta.parent:
35
- if all_issue_ids and meta.parent not in all_issue_ids:
36
- # Check workspace (fallback)
37
- found = False
38
- if issues_root:
39
- if core.find_issue_path(issues_root, meta.parent):
40
- found = True
41
-
42
- if not found:
43
- errors.append(f"[red]Broken Link:[/red] {meta.id} refers to non-existent parent [bold]{meta.parent}[/bold].")
44
-
45
- # D. Lifecycle Guard (Backlog)
46
- if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
47
- errors.append(f"[red]Lifecycle Error:[/red] {meta.id} is backlog but stage is not [bold]freezed[/bold] (found: {meta.stage}).")
48
-
49
- return errors
50
-
51
- def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
16
+ Verify the integrity of the Issues directory using LSP Validator.
52
17
  """
53
- Verify the integrity of the Issues directory.
54
- Returns a list of error messages.
18
+ diagnostics = []
19
+ validator = IssueValidator(issues_root)
55
20
 
56
- If recursive=True, performs workspace-level validation including:
57
- - Cross-project ID collision detection
58
- - Cross-project UID collision detection
59
- """
60
- errors = []
61
- all_issue_ids = set() # For parent reference validation (includes namespaced IDs)
62
- id_to_projects = {} # local_id -> [(project_name, meta, file)]
63
- all_uids = {} # uid -> (project, issue_id)
21
+ all_issue_ids = set()
64
22
  all_issues = []
65
23
 
24
+ # 1. Collection Phase (Build Index)
66
25
  # Helper to collect issues from a project
67
26
  def collect_project_issues(project_issues_root: Path, project_name: str = "local"):
68
27
  project_issues = []
@@ -81,92 +40,232 @@ def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
81
40
  local_id = meta.id
82
41
  full_id = f"{project_name}::{local_id}" if project_name != "local" else local_id
83
42
 
84
- # Track ID occurrences per project
85
- if local_id not in id_to_projects:
86
- id_to_projects[local_id] = []
87
- id_to_projects[local_id].append((project_name, meta, f))
88
-
89
- # Add IDs for reference validation
90
- all_issue_ids.add(local_id) # Local ID
43
+ all_issue_ids.add(local_id)
91
44
  if project_name != "local":
92
- all_issue_ids.add(full_id) # Namespaced ID
93
-
94
- # Check UID collision (if UID exists)
95
- if meta.uid:
96
- if meta.uid in all_uids:
97
- existing_project, existing_id = all_uids[meta.uid]
98
- errors.append(
99
- f"[red]UID Collision:[/red] UID {meta.uid} is duplicated.\n"
100
- f" - {existing_project}::{existing_id}\n"
101
- f" - {project_name}::{local_id}"
102
- )
103
- else:
104
- all_uids[meta.uid] = (project_name, local_id)
45
+ all_issue_ids.add(full_id)
105
46
 
106
- project_issues.append((f, meta, project_name))
47
+ project_issues.append((f, meta))
107
48
  return project_issues
108
49
 
109
- # 1. Collect local issues
110
50
  all_issues.extend(collect_project_issues(issues_root, "local"))
111
-
112
- # 2. If recursive, collect workspace member issues
51
+
113
52
  if recursive:
114
53
  try:
115
54
  from monoco.core.config import get_config
116
55
  project_root = issues_root.parent
117
56
  conf = get_config(str(project_root))
118
-
119
57
  for member_name, rel_path in conf.project.members.items():
120
58
  member_root = (project_root / rel_path).resolve()
121
59
  member_issues_dir = member_root / "Issues"
122
-
123
60
  if member_issues_dir.exists():
124
- all_issues.extend(collect_project_issues(member_issues_dir, member_name))
125
- except Exception as e:
126
- # Fail gracefully if workspace config is missing
61
+ collect_project_issues(member_issues_dir, member_name)
62
+ except Exception:
127
63
  pass
128
64
 
129
- # 3. Check for ID collisions within same project
130
- for local_id, occurrences in id_to_projects.items():
131
- # Group by project
132
- projects_with_id = {}
133
- for project_name, meta, f in occurrences:
134
- if project_name not in projects_with_id:
135
- projects_with_id[project_name] = []
136
- projects_with_id[project_name].append((meta, f))
65
+ # 2. Validation Phase
66
+ for path, meta in all_issues:
67
+ content = path.read_text() # Re-read content for validation
137
68
 
138
- # Check for duplicates within same project
139
- for project_name, metas in projects_with_id.items():
140
- if len(metas) > 1:
141
- # Same ID appears multiple times in same project - this is an error
142
- error_msg = f"[red]ID Collision:[/red] {local_id} appears {len(metas)} times in project '{project_name}':\n"
143
- for idx, (meta, f) in enumerate(metas, 1):
144
- error_msg += f" {idx}. uid: {meta.uid or 'N/A'} | created: {meta.created_at} | stage: {meta.stage} | status: {meta.status.value}\n"
145
- error_msg += f" [yellow]→ Action:[/yellow] Remove duplicate or use 'monoco issue move --to <target> --renumber' to resolve."
146
- errors.append(error_msg)
147
-
148
- # 4. Validation
149
- for path, meta, project_name in all_issues:
150
- # A. Directory/Status Consistency (Only check this for files in the tree)
151
- expected_status = meta.status.value
152
- path_parts = path.parts
153
- if expected_status not in path_parts:
154
- errors.append(f"[yellow]Placement Error:[/yellow] {meta.id} has status [cyan]{expected_status}[/cyan] but is not under a [dim]{expected_status}/[/dim] directory.")
69
+ # A. Run Core Validator
70
+ file_diagnostics = validator.validate(meta, content, all_issue_ids)
155
71
 
156
- # Reuse common logic
157
- errors.extend(validate_issue(path, meta, all_issue_ids, issues_root))
158
-
159
- return errors
72
+ # Add context to diagnostics (Path)
73
+ for d in file_diagnostics:
74
+ d.source = f"{meta.id}" # Use ID as source context
75
+ d.data = {'path': path} # Attach path for potential fixers
76
+ diagnostics.append(d)
77
+
78
+ return diagnostics
160
79
 
161
80
 
162
- def run_lint(issues_root: Path, recursive: bool = False):
163
- errors = check_integrity(issues_root, recursive)
81
+ def run_lint(issues_root: Path, recursive: bool = False, fix: bool = False, format: str = "table", file_path: Optional[str] = None):
82
+ """
83
+ Run lint with optional auto-fix and format selection.
84
+
85
+ Args:
86
+ issues_root: Root directory of issues
87
+ recursive: Recursively scan workspace members
88
+ fix: Apply auto-fixes
89
+ format: Output format (table, json)
90
+ file_path: Optional path to a single file to validate (LSP mode)
91
+ """
92
+ # Single-file mode (for LSP integration)
93
+ if file_path:
94
+ file = Path(file_path).resolve()
95
+ if not file.exists():
96
+ console.print(f"[red]Error:[/red] File not found: {file_path}")
97
+ raise typer.Exit(code=1)
98
+
99
+ # Parse and validate single file
100
+ try:
101
+ meta = core.parse_issue(file)
102
+ if not meta:
103
+ console.print(f"[red]Error:[/red] Failed to parse issue metadata from {file_path}")
104
+ raise typer.Exit(code=1)
105
+
106
+ content = file.read_text()
107
+ validator = IssueValidator(issues_root)
108
+
109
+ # For single-file mode, we need to build a minimal index
110
+ # We'll scan the entire workspace to get all issue IDs for reference validation
111
+ all_issue_ids = set()
112
+ for subdir in ["Epics", "Features", "Chores", "Fixes"]:
113
+ d = issues_root / subdir
114
+ if d.exists():
115
+ for status in ["open", "closed", "backlog"]:
116
+ status_dir = d / status
117
+ if status_dir.exists():
118
+ for f in status_dir.rglob("*.md"):
119
+ try:
120
+ m = core.parse_issue(f)
121
+ if m:
122
+ all_issue_ids.add(m.id)
123
+ except Exception:
124
+ pass
125
+
126
+ diagnostics = validator.validate(meta, content, all_issue_ids)
127
+
128
+ # Add context
129
+ for d in diagnostics:
130
+ d.source = meta.id
131
+ d.data = {'path': file}
132
+
133
+ except Exception as e:
134
+ console.print(f"[red]Error:[/red] Validation failed: {e}")
135
+ raise typer.Exit(code=1)
136
+ else:
137
+ # Full workspace scan mode
138
+ diagnostics = check_integrity(issues_root, recursive)
139
+
140
+ # Filter only Warnings and Errors
141
+ issues = [d for d in diagnostics if d.severity <= DiagnosticSeverity.Warning]
164
142
 
165
- if not errors:
143
+ if fix:
144
+ fixed_count = 0
145
+ console.print("[dim]Attempting auto-fixes...[/dim]")
146
+
147
+ # We must track processed paths to avoid redundant writes if multiple errors exist
148
+ processed_paths = set()
149
+
150
+ for d in issues:
151
+ path = d.data.get('path')
152
+ if not path: continue
153
+
154
+ # Read fresh content iteration
155
+ pass
156
+
157
+ # Group diagnostics by file path
158
+ from collections import defaultdict
159
+ file_diags = defaultdict(list)
160
+ for d in issues:
161
+ if d.data.get('path'):
162
+ file_diags[d.data['path']].append(d)
163
+
164
+ for path, diags in file_diags.items():
165
+ try:
166
+ content = path.read_text()
167
+ new_content = content
168
+ has_changes = False
169
+
170
+ # Parse meta once for the file
171
+ try:
172
+ meta = core.parse_issue(path)
173
+ except Exception:
174
+ console.print(f"[yellow]Skipping fix for {path.name}: Cannot parse metadata[/yellow]")
175
+ continue
176
+
177
+ # Apply fixes for this file
178
+ for d in diags:
179
+ if "Structure Error" in d.message:
180
+ expected_header = f"## {meta.id}: {meta.title}"
181
+
182
+ # Check if strictly present
183
+ if expected_header in new_content:
184
+ continue
185
+
186
+ # Strategy: Look for existing heading with same ID to replace
187
+ # Matches: "## ID..." or "## ID ..."
188
+ # Regex: ^##\s+ID\b.*$
189
+ # We use meta.id which is safe.
190
+ heading_regex = re.compile(rf"^##\s+{re.escape(meta.id)}.*$", re.MULTILINE)
191
+
192
+ match_existing = heading_regex.search(new_content)
193
+
194
+ if match_existing:
195
+ # Replace existing incorrect heading
196
+ # We use sub to replace just the first occurrence
197
+ new_content = heading_regex.sub(expected_header, new_content, count=1)
198
+ has_changes = True
199
+ else:
200
+ # Insert after frontmatter
201
+ fm_match = re.search(r"^---(.*?)---", new_content, re.DOTALL | re.MULTILINE)
202
+ if fm_match:
203
+ end_pos = fm_match.end()
204
+ header_block = f"\n\n{expected_header}\n"
205
+ new_content = new_content[:end_pos] + header_block + new_content[end_pos:].lstrip()
206
+ has_changes = True
207
+
208
+ if "Review Requirement: Missing '## Review Comments' section" in d.message:
209
+ if "## Review Comments" not in new_content:
210
+ new_content = new_content.rstrip() + "\n\n## Review Comments\n\n- [ ] Self-Review\n"
211
+ has_changes = True
212
+
213
+ if has_changes:
214
+ path.write_text(new_content)
215
+ fixed_count += 1
216
+ console.print(f"[dim]Fixed: {path.name}[/dim]")
217
+ except Exception as e:
218
+ console.print(f"[red]Failed to fix {path.name}: {e}[/red]")
219
+
220
+ console.print(f"[green]Applied auto-fixes to {fixed_count} files.[/green]")
221
+
222
+ # Re-run validation to verify
223
+ if file_path:
224
+ # Re-validate single file
225
+ file = Path(file_path).resolve()
226
+ meta = core.parse_issue(file)
227
+ content = file.read_text()
228
+ validator = IssueValidator(issues_root)
229
+ diagnostics = validator.validate(meta, content, all_issue_ids)
230
+ for d in diagnostics:
231
+ d.source = meta.id
232
+ d.data = {'path': file}
233
+ else:
234
+ diagnostics = check_integrity(issues_root, recursive)
235
+ issues = [d for d in diagnostics if d.severity <= DiagnosticSeverity.Warning]
236
+
237
+ # Output formatting
238
+ if format == "json":
239
+ import json
240
+ from pydantic import RootModel
241
+ # Use RootModel to export a list of models
242
+ print(RootModel(issues).model_dump_json(indent=2))
243
+ if any(d.severity == DiagnosticSeverity.Error for d in issues):
244
+ raise typer.Exit(code=1)
245
+ return
246
+
247
+ if not issues:
166
248
  console.print("[green]✔[/green] Issue integrity check passed. No integrity errors found.")
167
249
  else:
168
- table = Table(title="Issue Integrity Issues", show_header=False, border_style="red")
169
- for err in errors:
170
- table.add_row(err)
250
+ table = Table(title="Issue Integrity Report", show_header=True, header_style="bold magenta", border_style="red")
251
+ table.add_column("Issue", style="cyan")
252
+ table.add_column("Severity", justify="center")
253
+ table.add_column("Line", justify="right", style="dim")
254
+ table.add_column("Message")
255
+
256
+ for d in issues:
257
+ sev_style = "red" if d.severity == DiagnosticSeverity.Error else "yellow"
258
+ sev_label = "ERROR" if d.severity == DiagnosticSeverity.Error else "WARN"
259
+ line_str = str(d.range.start.line + 1) if d.range else "-"
260
+ table.add_row(
261
+ d.source or "Unknown",
262
+ f"[{sev_style}]{sev_label}[/{sev_style}]",
263
+ line_str,
264
+ d.message
265
+ )
266
+
171
267
  console.print(table)
172
- raise typer.Exit(code=1)
268
+
269
+ if any(d.severity == DiagnosticSeverity.Error for d in issues):
270
+ raise typer.Exit(code=1)
271
+
@@ -147,22 +147,9 @@ class IssueMetadata(BaseModel):
147
147
  # status: backlog -> stage: freezed
148
148
  # status: closed -> stage: done
149
149
  # status: open -> stage: draft | doing | review | done (default draft)
150
-
151
- if self.status == IssueStatus.BACKLOG:
152
- self.stage = IssueStage.FREEZED
153
-
154
- elif self.status == IssueStatus.CLOSED:
155
- # Enforce stage=done for closed issues
156
- if self.stage != IssueStage.DONE:
157
- self.stage = IssueStage.DONE
158
- # Auto-fill closed_at if missing
159
- if not self.closed_at:
160
- self.closed_at = current_time()
161
150
 
162
- elif self.status == IssueStatus.OPEN:
163
- # Ensure valid stage for open status
164
- if self.stage is None:
165
- self.stage = IssueStage.DRAFT
151
+ # NOTE: We do NOT auto-correct state here anymore to allow Linter to detect inconsistencies.
152
+ # Auto-correction should be applied explicitly by 'create' or 'update' commands via core logic.
166
153
 
167
154
  return self
168
155
 
@@ -1,4 +1,6 @@
1
- ### Issue Management
1
+ # Issue Management (Agent Guidance)
2
+
3
+ ## Issue Management
2
4
 
3
5
  System for managing tasks using `monoco issue`.
4
6
 
@@ -7,3 +9,7 @@ System for managing tasks using `monoco issue`.
7
9
  - **Check**: `monoco issue lint` (Must run after manual edits)
8
10
  - **Lifecycle**: `monoco issue start|submit|delete <id>`
9
11
  - **Structure**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` (e.g. `Issues/Features/open/`). Do not deviate.
12
+ - **Rules**:
13
+ 1. **Heading**: Must have `## {ID}: {Title}` (matches metadata).
14
+ 2. **Checkboxes**: Min 2 using `- [ ]`, `- [x]`, `- [-]`, `- [/]`.
15
+ 3. **Review**: `## Review Comments` section required for Review/Done stages.
@@ -25,12 +25,20 @@ Use this skill to create and manage **Issues** (Universal Atoms) in Monoco proje
25
25
 
26
26
  ## Guidelines
27
27
 
28
- ### Directory Structure
28
+ ### Directory Structure & Naming
29
29
 
30
30
  `Issues/{CapitalizedPluralType}/{lowercase_status}/`
31
31
 
32
- - `{TYPE}`: `Epics`, `Features`, `Chores`, `Fixes`
33
- - `{STATUS}`: `open`, `backlog`, `closed`
32
+ - **Types**: `Epics`, `Features`, `Chores`, `Fixes`
33
+ - **Statuses**: `open`, `backlog`, `closed`
34
+
35
+ ### Structural Integrity
36
+
37
+ Issues are validated via `monoco issue lint`. key constraints:
38
+
39
+ 1. **Mandatory Heading**: `## {ID}: {Title}` must match front matter.
40
+ 2. **Min Checkboxes**: At least 2 checkboxes (AC/Tasks).
41
+ 3. **Review Protocol**: `## Review Comments` required for `review` or `done` stages.
34
42
 
35
43
  ### Path Transitions
36
44
 
@@ -49,3 +57,31 @@ Use `monoco issue`:
49
57
  5. **Modification**: `monoco issue start/submit/delete <id>`
50
58
 
51
59
  6. **Commit**: `monoco issue commit` (Atomic commit for issue files)
60
+ 7. **Validation**: `monoco issue lint` (Enforces compliance)
61
+
62
+ ## Validation Rules (FEAT-0082)
63
+
64
+ To ensure data integrity, all Issue tickets must follow these strict rules:
65
+
66
+ ### 1. Structural Consistency
67
+
68
+ - Must contain a Level 2 Heading matching exactly: `## {ID}: {Title}`.
69
+ - Example: `## FEAT-0082: Issue Ticket Validator`
70
+
71
+ ### 2. Content Completeness
72
+
73
+ - **Checkboxes**: Minimum of 2 checkboxes required (one for AC, one for Tasks).
74
+ - **Review Comments**: If `stage` is `review` or `done`, a `## Review Comments` section is mandatory and must not be empty.
75
+
76
+ ### 3. Checkbox Syntax & Hierarchy
77
+
78
+ - Use only `- [ ]`, `- [x]`, `- [-]`, or `- [/]`.
79
+ - **Inheritance**: If nested checkboxes exist, the parent state must reflect child states (e.g., if any child is `[/]`, parent must be `[/]`; if all children are `[x]`, parent must be `[x]`).
80
+
81
+ ### 4. State Matrix
82
+
83
+ The `status` (folder) and `stage` (front matter) must be compatible:
84
+
85
+ - **open**: Draft, Doing, Review, Done
86
+ - **backlog**: Draft, Doing, Review
87
+ - **closed**: Done
@@ -1,4 +1,6 @@
1
- ### Issue 管理
1
+ # Issue 管理 (Agent 指引)
2
+
3
+ ## Issue 管理
2
4
 
3
5
  使用 `monoco issue` 管理任务的系统。
4
6
 
@@ -6,4 +8,8 @@
6
8
  - **状态**: `monoco issue open|close|backlog <id>`
7
9
  - **检查**: `monoco issue lint` (手动编辑后必须运行)
8
10
  - **生命周期**: `monoco issue start|submit|delete <id>`
9
- - **结构**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` (例如 `Issues/Features/open/`)。不要偏离此结构。
11
+ - **结构**: `Issues/{CapitalizedPluralType}/{lowercase_status}/` ( `Issues/Features/open/`)
12
+ - **强制规则**:
13
+ 1. **标题**: 必须包含 `## {ID}: {Title}` 标题(与 Front Matter 一致)。
14
+ 2. **内容**: 至少 2 个 Checkbox,使用 `- [ ]`, `- [x]`, `- [-]`, `- [/]`。
15
+ 3. **评审**: `review`/`done` 阶段必须包含 `## Review Comments` 章节且内容不为空。
@@ -38,7 +38,7 @@ Monoco 不仅仅复刻 Jira,而是基于 **"思维模式 (Mindset)"** 重新
38
38
  - **Focus**: "How" (为了支撑系统运转,必须做什么)。
39
39
  - **Prefix**: `CHORE-`
40
40
 
41
- _(取代了 Task 概念)_
41
+ > 注:取代了传统的 Task 概念。
42
42
 
43
43
  #### 🐞 FIX (修复)
44
44
 
@@ -47,7 +47,7 @@ _(取代了 Task 概念)_
47
47
  - **Focus**: "Fix" (恢复原状)。
48
48
  - **Prefix**: `FIX-`
49
49
 
50
- _(取代了 Bug 概念)_
50
+ > 注:取代了传统的 Bug 概念。
51
51
 
52
52
  ---
53
53
 
@@ -82,4 +82,33 @@ _(取代了 Bug 概念)_
82
82
 
83
83
  5. **Modification**: `monoco issue start/submit/delete <id>`
84
84
 
85
- 6. **Commit**: `monoco issue commit` (Atomic commit for issue files)
85
+ 6. **Commit**: `monoco issue commit` (原子化提交 Issue 文件)
86
+ 7. **Validation**: `monoco issue lint` (强制执行合规性检查)
87
+
88
+ ## 合规与结构校验 (Validation Rules)
89
+
90
+ 为了确保数据严谨性,所有 Issue Ticket 必须遵循以下强制规则:
91
+
92
+ ### 1. 结构一致性 (Structural Consistency)
93
+
94
+ - 必须包含一个二级标题 (`##`),内容必须与 Front Matter 中的 ID 和 Title 严格匹配。
95
+ - 格式:`## {ID}: {Title}`
96
+ - 示例:`## FEAT-0082: Issue Ticket Validator`
97
+
98
+ ### 2. 内容完整性 (Content Completeness)
99
+
100
+ - **Checkbox 数量**: 每个 Ticket 必须包含至少 2 个 Checkbox(通常代表 AC 和 Tasks)。
101
+ - **评审记录**: 当 `stage` 为 `review` 或 `done` 时,必须包含 `## Review Comments` 标题且内容不能为空。
102
+
103
+ ### 3. Checkbox 语法与层级 (Checkbox Matrix)
104
+
105
+ - 仅限使用: `- [ ]`, `- [x]`, `- [-]`, `- [/]`。
106
+ - **层级继承**: 若存在嵌套 Checkbox,父项状态必须正确反映子项的聚合结果(例如:任一子项为 `[/]` 则父项必为 `[/]`;子项全选则父项为 `[x]`)。
107
+
108
+ ### 4. 状态矩阵 (State Matrix)
109
+
110
+ `status` (物理存放目录) 与 `stage` (Front Matter 字段) 必须兼容:
111
+
112
+ - **open**: Draft, Doing, Review, Done
113
+ - **backlog**: Draft, Doing, Review
114
+ - **closed**: Done