monoco-toolkit 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/__init__.py +0 -0
- monoco/core/config.py +113 -0
- monoco/core/git.py +184 -0
- monoco/core/output.py +97 -0
- monoco/core/setup.py +285 -0
- monoco/core/telemetry.py +89 -0
- monoco/core/workspace.py +40 -0
- monoco/daemon/__init__.py +0 -0
- monoco/daemon/app.py +378 -0
- monoco/daemon/commands.py +36 -0
- monoco/daemon/models.py +24 -0
- monoco/daemon/reproduce_stats.py +41 -0
- monoco/daemon/services.py +265 -0
- monoco/daemon/stats.py +124 -0
- monoco/features/__init__.py +0 -0
- monoco/features/config/commands.py +70 -0
- monoco/features/i18n/__init__.py +0 -0
- monoco/features/i18n/commands.py +121 -0
- monoco/features/i18n/core.py +178 -0
- monoco/features/issue/commands.py +710 -0
- monoco/features/issue/core.py +1183 -0
- monoco/features/issue/linter.py +172 -0
- monoco/features/issue/models.py +157 -0
- monoco/features/pty/core.py +185 -0
- monoco/features/pty/router.py +138 -0
- monoco/features/pty/server.py +56 -0
- monoco/features/skills/__init__.py +1 -0
- monoco/features/skills/core.py +96 -0
- monoco/features/spike/commands.py +110 -0
- monoco/features/spike/core.py +154 -0
- monoco/main.py +110 -0
- monoco_toolkit-0.1.5.dist-info/METADATA +93 -0
- monoco_toolkit-0.1.5.dist-info/RECORD +36 -0
- monoco_toolkit-0.1.5.dist-info/WHEEL +4 -0
- monoco_toolkit-0.1.5.dist-info/entry_points.txt +2 -0
- monoco_toolkit-0.1.5.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import subprocess
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
from typing import List, Optional, Dict, Any
|
|
7
|
+
from asyncio import Queue
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from monoco.features.issue.core import parse_issue, IssueMetadata
|
|
11
|
+
import json
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger("monoco.daemon.services")
|
|
14
|
+
|
|
15
|
+
class Broadcaster:
|
|
16
|
+
"""
|
|
17
|
+
Manages SSE subscriptions and broadcasts events to all connected clients.
|
|
18
|
+
"""
|
|
19
|
+
def __init__(self):
|
|
20
|
+
self.subscribers: List[Queue] = []
|
|
21
|
+
|
|
22
|
+
async def subscribe(self) -> Queue:
|
|
23
|
+
queue = Queue()
|
|
24
|
+
self.subscribers.append(queue)
|
|
25
|
+
logger.info(f"New client subscribed. Total clients: {len(self.subscribers)}")
|
|
26
|
+
return queue
|
|
27
|
+
|
|
28
|
+
async def unsubscribe(self, queue: Queue):
|
|
29
|
+
if queue in self.subscribers:
|
|
30
|
+
self.subscribers.remove(queue)
|
|
31
|
+
logger.info(f"Client unsubscribed. Total clients: {len(self.subscribers)}")
|
|
32
|
+
|
|
33
|
+
async def broadcast(self, event_type: str, payload: dict):
|
|
34
|
+
if not self.subscribers:
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
message = {
|
|
38
|
+
"event": event_type,
|
|
39
|
+
"data": json.dumps(payload)
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Dispatch to all queues
|
|
43
|
+
for queue in self.subscribers:
|
|
44
|
+
await queue.put(message)
|
|
45
|
+
|
|
46
|
+
logger.debug(f"Broadcasted {event_type} to {len(self.subscribers)} clients.")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class GitMonitor:
|
|
50
|
+
"""
|
|
51
|
+
Polls the Git repository for HEAD changes and triggers updates.
|
|
52
|
+
"""
|
|
53
|
+
def __init__(self, broadcaster: Broadcaster, poll_interval: float = 2.0):
|
|
54
|
+
self.broadcaster = broadcaster
|
|
55
|
+
self.poll_interval = poll_interval
|
|
56
|
+
self.last_head_hash: Optional[str] = None
|
|
57
|
+
self.is_running = False
|
|
58
|
+
|
|
59
|
+
async def get_head_hash(self) -> Optional[str]:
|
|
60
|
+
try:
|
|
61
|
+
# Run git rev-parse HEAD asynchronously
|
|
62
|
+
process = await asyncio.create_subprocess_exec(
|
|
63
|
+
"git", "rev-parse", "HEAD",
|
|
64
|
+
stdout=asyncio.subprocess.PIPE,
|
|
65
|
+
stderr=asyncio.subprocess.PIPE
|
|
66
|
+
)
|
|
67
|
+
stdout, _ = await process.communicate()
|
|
68
|
+
if process.returncode == 0:
|
|
69
|
+
return stdout.decode().strip()
|
|
70
|
+
return None
|
|
71
|
+
except Exception as e:
|
|
72
|
+
logger.error(f"Git polling error: {e}")
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
async def start(self):
|
|
76
|
+
self.is_running = True
|
|
77
|
+
logger.info("Git Monitor started.")
|
|
78
|
+
|
|
79
|
+
# Initial check
|
|
80
|
+
self.last_head_hash = await self.get_head_hash()
|
|
81
|
+
|
|
82
|
+
while self.is_running:
|
|
83
|
+
await asyncio.sleep(self.poll_interval)
|
|
84
|
+
current_hash = await self.get_head_hash()
|
|
85
|
+
|
|
86
|
+
if current_hash and current_hash != self.last_head_hash:
|
|
87
|
+
logger.info(f"Git HEAD changed: {self.last_head_hash} -> {current_hash}")
|
|
88
|
+
self.last_head_hash = current_hash
|
|
89
|
+
await self.broadcaster.broadcast("HEAD_UPDATED", {
|
|
90
|
+
"ref": "HEAD",
|
|
91
|
+
"hash": current_hash
|
|
92
|
+
})
|
|
93
|
+
|
|
94
|
+
def stop(self):
|
|
95
|
+
self.is_running = False
|
|
96
|
+
logger.info("Git Monitor stopping...")
|
|
97
|
+
|
|
98
|
+
from watchdog.observers import Observer
|
|
99
|
+
from watchdog.events import FileSystemEventHandler
|
|
100
|
+
from monoco.core.config import MonocoConfig, get_config
|
|
101
|
+
|
|
102
|
+
class ProjectContext:
|
|
103
|
+
"""
|
|
104
|
+
Holds the runtime state for a single project.
|
|
105
|
+
"""
|
|
106
|
+
def __init__(self, path: Path, config: MonocoConfig, broadcaster: Broadcaster):
|
|
107
|
+
self.path = path
|
|
108
|
+
self.config = config
|
|
109
|
+
self.id = path.name # Use directory name as ID for now
|
|
110
|
+
self.name = config.project.name
|
|
111
|
+
self.issues_root = path / config.paths.issues
|
|
112
|
+
self.monitor = IssueMonitor(self.issues_root, broadcaster, project_id=self.id)
|
|
113
|
+
|
|
114
|
+
async def start(self):
|
|
115
|
+
await self.monitor.start()
|
|
116
|
+
|
|
117
|
+
def stop(self):
|
|
118
|
+
self.monitor.stop()
|
|
119
|
+
|
|
120
|
+
class ProjectManager:
|
|
121
|
+
"""
|
|
122
|
+
Discovers and manages multiple Monoco projects within a workspace.
|
|
123
|
+
"""
|
|
124
|
+
def __init__(self, workspace_root: Path, broadcaster: Broadcaster):
|
|
125
|
+
self.workspace_root = workspace_root
|
|
126
|
+
self.broadcaster = broadcaster
|
|
127
|
+
self.projects: Dict[str, ProjectContext] = {}
|
|
128
|
+
|
|
129
|
+
def scan(self):
|
|
130
|
+
"""
|
|
131
|
+
Scans workspace for potential Monoco projects.
|
|
132
|
+
A directory is a project if it has monoco.yaml or .monoco/config.yaml or an Issues directory.
|
|
133
|
+
"""
|
|
134
|
+
logger.info(f"Scanning workspace: {self.workspace_root}")
|
|
135
|
+
from monoco.core.workspace import find_projects
|
|
136
|
+
|
|
137
|
+
projects = find_projects(self.workspace_root)
|
|
138
|
+
for p in projects:
|
|
139
|
+
self._register_project(p)
|
|
140
|
+
|
|
141
|
+
def _register_project(self, path: Path):
|
|
142
|
+
try:
|
|
143
|
+
config = get_config(str(path))
|
|
144
|
+
# If name is default, try to use directory name
|
|
145
|
+
if config.project.name == "Monoco Project":
|
|
146
|
+
config.project.name = path.name
|
|
147
|
+
|
|
148
|
+
ctx = ProjectContext(path, config, self.broadcaster)
|
|
149
|
+
self.projects[ctx.id] = ctx
|
|
150
|
+
logger.info(f"Registered project: {ctx.id} ({ctx.path})")
|
|
151
|
+
except Exception as e:
|
|
152
|
+
logger.error(f"Failed to register project at {path}: {e}")
|
|
153
|
+
|
|
154
|
+
async def start_all(self):
|
|
155
|
+
self.scan()
|
|
156
|
+
for project in self.projects.values():
|
|
157
|
+
await project.start()
|
|
158
|
+
|
|
159
|
+
def stop_all(self):
|
|
160
|
+
for project in self.projects.values():
|
|
161
|
+
project.stop()
|
|
162
|
+
|
|
163
|
+
def get_project(self, project_id: str) -> Optional[ProjectContext]:
|
|
164
|
+
return self.projects.get(project_id)
|
|
165
|
+
|
|
166
|
+
def list_projects(self) -> List[Dict[str, Any]]:
|
|
167
|
+
return [
|
|
168
|
+
{
|
|
169
|
+
"id": p.id,
|
|
170
|
+
"name": p.name,
|
|
171
|
+
"path": str(p.path),
|
|
172
|
+
"issues_path": str(p.issues_root)
|
|
173
|
+
}
|
|
174
|
+
for p in self.projects.values()
|
|
175
|
+
]
|
|
176
|
+
|
|
177
|
+
class IssueEventHandler(FileSystemEventHandler):
|
|
178
|
+
def __init__(self, loop, broadcaster: Broadcaster, project_id: str):
|
|
179
|
+
self.loop = loop
|
|
180
|
+
self.broadcaster = broadcaster
|
|
181
|
+
self.project_id = project_id
|
|
182
|
+
|
|
183
|
+
def _process_upsert(self, path_str: str):
|
|
184
|
+
if not path_str.endswith(".md"):
|
|
185
|
+
return
|
|
186
|
+
asyncio.run_coroutine_threadsafe(self._handle_upsert(path_str), self.loop)
|
|
187
|
+
|
|
188
|
+
async def _handle_upsert(self, path_str: str):
|
|
189
|
+
try:
|
|
190
|
+
path = Path(path_str)
|
|
191
|
+
if not path.exists():
|
|
192
|
+
return
|
|
193
|
+
issue = parse_issue(path)
|
|
194
|
+
if issue:
|
|
195
|
+
await self.broadcaster.broadcast("issue_upserted", {
|
|
196
|
+
"issue": issue.model_dump(mode='json'),
|
|
197
|
+
"project_id": self.project_id
|
|
198
|
+
})
|
|
199
|
+
except Exception as e:
|
|
200
|
+
logger.error(f"Error handling upsert for {path_str}: {e}")
|
|
201
|
+
|
|
202
|
+
def _process_delete(self, path_str: str):
|
|
203
|
+
if not path_str.endswith(".md"):
|
|
204
|
+
return
|
|
205
|
+
asyncio.run_coroutine_threadsafe(self._handle_delete(path_str), self.loop)
|
|
206
|
+
|
|
207
|
+
async def _handle_delete(self, path_str: str):
|
|
208
|
+
try:
|
|
209
|
+
filename = Path(path_str).name
|
|
210
|
+
match = re.match(r"([A-Z]+-\d{4})", filename)
|
|
211
|
+
if match:
|
|
212
|
+
issue_id = match.group(1)
|
|
213
|
+
await self.broadcaster.broadcast("issue_deleted", {
|
|
214
|
+
"id": issue_id,
|
|
215
|
+
"project_id": self.project_id
|
|
216
|
+
})
|
|
217
|
+
except Exception as e:
|
|
218
|
+
logger.error(f"Error handling delete for {path_str}: {e}")
|
|
219
|
+
|
|
220
|
+
def on_created(self, event):
|
|
221
|
+
if not event.is_directory:
|
|
222
|
+
self._process_upsert(event.src_path)
|
|
223
|
+
|
|
224
|
+
def on_modified(self, event):
|
|
225
|
+
if not event.is_directory:
|
|
226
|
+
self._process_upsert(event.src_path)
|
|
227
|
+
|
|
228
|
+
def on_deleted(self, event):
|
|
229
|
+
if not event.is_directory:
|
|
230
|
+
self._process_delete(event.src_path)
|
|
231
|
+
|
|
232
|
+
def on_moved(self, event):
|
|
233
|
+
if not event.is_directory:
|
|
234
|
+
self._process_delete(event.src_path)
|
|
235
|
+
self._process_upsert(event.dest_path)
|
|
236
|
+
|
|
237
|
+
class IssueMonitor:
|
|
238
|
+
"""
|
|
239
|
+
Monitor the Issues directory for changes using Watchdog and broadcast update events.
|
|
240
|
+
"""
|
|
241
|
+
def __init__(self, issues_root: Path, broadcaster: Broadcaster, project_id: str):
|
|
242
|
+
self.issues_root = issues_root
|
|
243
|
+
self.broadcaster = broadcaster
|
|
244
|
+
self.project_id = project_id
|
|
245
|
+
self.observer = Observer()
|
|
246
|
+
self.loop = None
|
|
247
|
+
|
|
248
|
+
async def start(self):
|
|
249
|
+
self.loop = asyncio.get_running_loop()
|
|
250
|
+
event_handler = IssueEventHandler(self.loop, self.broadcaster, self.project_id)
|
|
251
|
+
|
|
252
|
+
# Ensure directory exists
|
|
253
|
+
if not self.issues_root.exists():
|
|
254
|
+
logger.warning(f"Issues root {self.issues_root} does not exist. creating...")
|
|
255
|
+
self.issues_root.mkdir(parents=True, exist_ok=True)
|
|
256
|
+
|
|
257
|
+
self.observer.schedule(event_handler, str(self.issues_root), recursive=True)
|
|
258
|
+
self.observer.start()
|
|
259
|
+
logger.info(f"Issue Monitor started (Watchdog). Watching {self.issues_root}")
|
|
260
|
+
|
|
261
|
+
def stop(self):
|
|
262
|
+
if self.observer.is_alive():
|
|
263
|
+
self.observer.stop()
|
|
264
|
+
self.observer.join()
|
|
265
|
+
logger.info("Issue Monitor stopped.")
|
monoco/daemon/stats.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
from typing import List, Dict, Set, Optional
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
from monoco.features.issue.core import list_issues
|
|
7
|
+
from monoco.features.issue.models import IssueStatus, IssueMetadata
|
|
8
|
+
|
|
9
|
+
class ActivityType(str, Enum):
|
|
10
|
+
CREATED = "created"
|
|
11
|
+
UPDATED = "updated"
|
|
12
|
+
CLOSED = "closed"
|
|
13
|
+
|
|
14
|
+
class ActivityItem(BaseModel):
|
|
15
|
+
id: str
|
|
16
|
+
type: ActivityType
|
|
17
|
+
issue_id: str
|
|
18
|
+
issue_title: str
|
|
19
|
+
timestamp: datetime
|
|
20
|
+
description: Optional[str] = None
|
|
21
|
+
|
|
22
|
+
class DashboardStats(BaseModel):
|
|
23
|
+
total_backlog: int
|
|
24
|
+
completed_this_week: int
|
|
25
|
+
blocked_issues_count: int
|
|
26
|
+
velocity_trend: int # Delta compared to last week
|
|
27
|
+
recent_activities: List[ActivityItem] = []
|
|
28
|
+
|
|
29
|
+
def calculate_dashboard_stats(issues_root: Path) -> DashboardStats:
|
|
30
|
+
raw_issues = list_issues(issues_root)
|
|
31
|
+
|
|
32
|
+
# 1. Pre-process for fast lookup and deduplication
|
|
33
|
+
issue_map: Dict[str, IssueMetadata] = {i.id: i for i in raw_issues}
|
|
34
|
+
issues = list(issue_map.values())
|
|
35
|
+
|
|
36
|
+
backlog_count = 0
|
|
37
|
+
completed_this_week = 0
|
|
38
|
+
completed_last_week = 0
|
|
39
|
+
blocked_count = 0
|
|
40
|
+
|
|
41
|
+
now = datetime.now()
|
|
42
|
+
one_week_ago = now - timedelta(days=7)
|
|
43
|
+
two_weeks_ago = now - timedelta(days=14)
|
|
44
|
+
activity_window = now - timedelta(days=3) # Show activities from last 3 days
|
|
45
|
+
|
|
46
|
+
activities: List[ActivityItem] = []
|
|
47
|
+
|
|
48
|
+
for issue in issues:
|
|
49
|
+
# --- Stats Calculation ---
|
|
50
|
+
# Total Backlog
|
|
51
|
+
if issue.status == IssueStatus.BACKLOG:
|
|
52
|
+
backlog_count += 1
|
|
53
|
+
|
|
54
|
+
# Completed This Week & Last Week
|
|
55
|
+
if issue.status == IssueStatus.CLOSED and issue.closed_at:
|
|
56
|
+
closed_at = issue.closed_at
|
|
57
|
+
if closed_at >= one_week_ago:
|
|
58
|
+
completed_this_week += 1
|
|
59
|
+
elif closed_at >= two_weeks_ago and closed_at < one_week_ago:
|
|
60
|
+
completed_last_week += 1
|
|
61
|
+
|
|
62
|
+
# Blocked Issues
|
|
63
|
+
if issue.status == IssueStatus.OPEN:
|
|
64
|
+
is_blocked = False
|
|
65
|
+
for dep_id in issue.dependencies:
|
|
66
|
+
dep_issue = issue_map.get(dep_id)
|
|
67
|
+
if not dep_issue or dep_issue.status != IssueStatus.CLOSED:
|
|
68
|
+
is_blocked = True
|
|
69
|
+
break
|
|
70
|
+
if is_blocked:
|
|
71
|
+
blocked_count += 1
|
|
72
|
+
|
|
73
|
+
# --- Activity Feed Generation ---
|
|
74
|
+
# 1. Created Event
|
|
75
|
+
if issue.created_at >= activity_window:
|
|
76
|
+
activities.append(ActivityItem(
|
|
77
|
+
id=f"act_create_{issue.id}",
|
|
78
|
+
type=ActivityType.CREATED,
|
|
79
|
+
issue_id=issue.id,
|
|
80
|
+
issue_title=issue.title,
|
|
81
|
+
timestamp=issue.created_at,
|
|
82
|
+
description="Issue created"
|
|
83
|
+
))
|
|
84
|
+
|
|
85
|
+
# 2. Closed Event
|
|
86
|
+
if issue.status == IssueStatus.CLOSED and issue.closed_at and issue.closed_at >= activity_window:
|
|
87
|
+
activities.append(ActivityItem(
|
|
88
|
+
id=f"act_close_{issue.id}",
|
|
89
|
+
type=ActivityType.CLOSED,
|
|
90
|
+
issue_id=issue.id,
|
|
91
|
+
issue_title=issue.title,
|
|
92
|
+
timestamp=issue.closed_at,
|
|
93
|
+
description="Issue completed"
|
|
94
|
+
))
|
|
95
|
+
|
|
96
|
+
# 3. Updated Event (Heuristic: updated recently and not just created/closed)
|
|
97
|
+
# We skip 'updated' if it's too close to created_at or closed_at to avoid noise
|
|
98
|
+
if issue.updated_at >= activity_window:
|
|
99
|
+
is_creation = abs((issue.updated_at - issue.created_at).total_seconds()) < 60
|
|
100
|
+
is_closing = issue.closed_at and abs((issue.updated_at - issue.closed_at).total_seconds()) < 60
|
|
101
|
+
|
|
102
|
+
if not is_creation and not is_closing:
|
|
103
|
+
activities.append(ActivityItem(
|
|
104
|
+
id=f"act_update_{issue.id}_{issue.updated_at.timestamp()}",
|
|
105
|
+
type=ActivityType.UPDATED,
|
|
106
|
+
issue_id=issue.id,
|
|
107
|
+
issue_title=issue.title,
|
|
108
|
+
timestamp=issue.updated_at,
|
|
109
|
+
description="Issue updated"
|
|
110
|
+
))
|
|
111
|
+
|
|
112
|
+
# Sort activities by timestamp desc and take top 20
|
|
113
|
+
activities.sort(key=lambda x: x.timestamp, reverse=True)
|
|
114
|
+
recent_activities = activities[:20]
|
|
115
|
+
|
|
116
|
+
velocity_trend = completed_this_week - completed_last_week
|
|
117
|
+
|
|
118
|
+
return DashboardStats(
|
|
119
|
+
total_backlog=backlog_count,
|
|
120
|
+
completed_this_week=completed_this_week,
|
|
121
|
+
blocked_issues_count=blocked_count,
|
|
122
|
+
velocity_trend=velocity_trend,
|
|
123
|
+
recent_activities=recent_activities
|
|
124
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
import yaml
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from monoco.core.config import get_config, MonocoConfig
|
|
6
|
+
from monoco.core.output import print_output
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
|
|
9
|
+
app = typer.Typer(help="Manage Monoco configuration")
|
|
10
|
+
console = Console()
|
|
11
|
+
|
|
12
|
+
@app.command()
|
|
13
|
+
def show():
|
|
14
|
+
"""Show current configuration."""
|
|
15
|
+
settings = get_config()
|
|
16
|
+
print_output(settings, title="Current Configuration")
|
|
17
|
+
|
|
18
|
+
@app.command(name="set")
|
|
19
|
+
def set_val(
|
|
20
|
+
key: str = typer.Argument(..., help="Config key (e.g. telemetry.enabled)"),
|
|
21
|
+
value: str = typer.Argument(..., help="Value to set"),
|
|
22
|
+
scope: str = typer.Option("global", "--scope", "-s", help="Configuration scope: global or project")
|
|
23
|
+
):
|
|
24
|
+
"""Set a configuration value."""
|
|
25
|
+
# This is a simplified implementation of config setting
|
|
26
|
+
# In a real system, we'd want to validate the key against the schema
|
|
27
|
+
|
|
28
|
+
if scope == "global":
|
|
29
|
+
config_path = Path.home() / ".monoco" / "config.yaml"
|
|
30
|
+
else:
|
|
31
|
+
# Check project root
|
|
32
|
+
cwd = Path.cwd()
|
|
33
|
+
config_path = cwd / ".monoco" / "config.yaml"
|
|
34
|
+
if not (cwd / ".monoco").exists():
|
|
35
|
+
config_path = cwd / "monoco.yaml"
|
|
36
|
+
|
|
37
|
+
config_data = {}
|
|
38
|
+
if config_path.exists():
|
|
39
|
+
with open(config_path, "r") as f:
|
|
40
|
+
config_data = yaml.safe_load(f) or {}
|
|
41
|
+
|
|
42
|
+
# Simple nested key support (e.g. telemetry.enabled)
|
|
43
|
+
parts = key.split(".")
|
|
44
|
+
target = config_data
|
|
45
|
+
for part in parts[:-1]:
|
|
46
|
+
if part not in target:
|
|
47
|
+
target[part] = {}
|
|
48
|
+
target = target[part]
|
|
49
|
+
|
|
50
|
+
# Type conversion
|
|
51
|
+
if value.lower() in ("true", "yes", "on"):
|
|
52
|
+
val = True
|
|
53
|
+
elif value.lower() in ("false", "no", "off"):
|
|
54
|
+
val = False
|
|
55
|
+
else:
|
|
56
|
+
try:
|
|
57
|
+
val = int(value)
|
|
58
|
+
except ValueError:
|
|
59
|
+
val = value
|
|
60
|
+
|
|
61
|
+
target[parts[-1]] = val
|
|
62
|
+
|
|
63
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
64
|
+
with open(config_path, "w") as f:
|
|
65
|
+
yaml.dump(config_data, f, default_flow_style=False)
|
|
66
|
+
|
|
67
|
+
console.print(f"[green]✓ Set {key} = {val} in {scope} config.[/green]")
|
|
68
|
+
|
|
69
|
+
if __name__ == "__main__":
|
|
70
|
+
app()
|
|
File without changes
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from rich.console import Console
|
|
4
|
+
from rich.table import Table
|
|
5
|
+
from rich.panel import Panel
|
|
6
|
+
|
|
7
|
+
from monoco.core.config import get_config
|
|
8
|
+
from . import core
|
|
9
|
+
|
|
10
|
+
app = typer.Typer(help="Management tools for Documentation Internationalization (i18n).")
|
|
11
|
+
console = Console()
|
|
12
|
+
|
|
13
|
+
@app.command("scan")
|
|
14
|
+
def scan(
|
|
15
|
+
root: str = typer.Option(None, "--root", help="Target root directory to scan. Defaults to the project root."),
|
|
16
|
+
limit: int = typer.Option(10, "--limit", help="Maximum number of missing files to display. Use 0 for unlimited."),
|
|
17
|
+
):
|
|
18
|
+
"""
|
|
19
|
+
Scan the project for internationalization (i18n) status.
|
|
20
|
+
|
|
21
|
+
Scans all Markdown files in the target directory and checks for the existence of
|
|
22
|
+
translation files based on Monoco's i18n conventions:
|
|
23
|
+
- Root files: suffixed pattern (e.g., README_ZH.md)
|
|
24
|
+
- Sub-directories: subdir pattern (e.g., docs/guide/zh/xxx.md)
|
|
25
|
+
|
|
26
|
+
Returns a report of files missing translations in the checking target languages.
|
|
27
|
+
"""
|
|
28
|
+
config = get_config()
|
|
29
|
+
target_root = Path(root).resolve() if root else Path(config.paths.root)
|
|
30
|
+
target_langs = config.i18n.target_langs
|
|
31
|
+
|
|
32
|
+
console.print(f"Scanning i18n coverage in [bold cyan]{target_root}[/bold cyan]...")
|
|
33
|
+
console.print(f"Target Languages: [bold yellow]{', '.join(target_langs)}[/bold yellow] (Source: {config.i18n.source_lang})")
|
|
34
|
+
|
|
35
|
+
all_files = core.discover_markdown_files(target_root)
|
|
36
|
+
|
|
37
|
+
source_files = [f for f in all_files if not core.is_translation_file(f, target_langs)]
|
|
38
|
+
|
|
39
|
+
# Store missing results: { file_path: [missing_langs] }
|
|
40
|
+
missing_map = {}
|
|
41
|
+
total_checks = len(source_files) * len(target_langs)
|
|
42
|
+
found_count = 0
|
|
43
|
+
|
|
44
|
+
for f in source_files:
|
|
45
|
+
missing_langs = core.check_translation_exists(f, target_root, target_langs)
|
|
46
|
+
if missing_langs:
|
|
47
|
+
missing_map[f] = missing_langs
|
|
48
|
+
found_count += (len(target_langs) - len(missing_langs))
|
|
49
|
+
else:
|
|
50
|
+
found_count += len(target_langs)
|
|
51
|
+
|
|
52
|
+
# Reporting
|
|
53
|
+
coverage = (found_count / total_checks * 100) if total_checks > 0 else 100
|
|
54
|
+
|
|
55
|
+
# Sort missing_map by file path for stable output
|
|
56
|
+
sorted_missing = sorted(missing_map.items(), key=lambda x: str(x[0]))
|
|
57
|
+
|
|
58
|
+
# Apply limit
|
|
59
|
+
total_missing_files = len(sorted_missing)
|
|
60
|
+
display_limit = limit if limit > 0 else total_missing_files
|
|
61
|
+
displayed_missing = sorted_missing[:display_limit]
|
|
62
|
+
|
|
63
|
+
# Build table title with count info
|
|
64
|
+
table_title = "i18n Availability Report"
|
|
65
|
+
if total_missing_files > 0:
|
|
66
|
+
if display_limit < total_missing_files:
|
|
67
|
+
table_title = f"i18n Availability Report (Showing {display_limit} / {total_missing_files} missing files)"
|
|
68
|
+
else:
|
|
69
|
+
table_title = f"i18n Availability Report ({total_missing_files} missing files)"
|
|
70
|
+
|
|
71
|
+
table = Table(title=table_title, box=None)
|
|
72
|
+
table.add_column("Source File", style="cyan", no_wrap=True, overflow="fold")
|
|
73
|
+
table.add_column("Missing Languages", style="red")
|
|
74
|
+
table.add_column("Expected Paths", style="dim", no_wrap=True, overflow="fold")
|
|
75
|
+
|
|
76
|
+
for f, langs in displayed_missing:
|
|
77
|
+
rel_path = f.relative_to(target_root)
|
|
78
|
+
expected_paths = []
|
|
79
|
+
for lang in langs:
|
|
80
|
+
target = core.get_target_translation_path(f, target_root, lang)
|
|
81
|
+
expected_paths.append(str(target.relative_to(target_root)))
|
|
82
|
+
|
|
83
|
+
table.add_row(
|
|
84
|
+
str(rel_path),
|
|
85
|
+
", ".join(langs),
|
|
86
|
+
"\n".join(expected_paths)
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
console.print(table)
|
|
90
|
+
|
|
91
|
+
# Show hint if output was truncated
|
|
92
|
+
if display_limit < total_missing_files:
|
|
93
|
+
console.print(f"\n[dim]💡 Tip: Use [bold]--limit 0[/bold] to show all {total_missing_files} missing files.[/dim]\n")
|
|
94
|
+
|
|
95
|
+
# Calculate partial vs complete missing
|
|
96
|
+
partial_missing = sum(1 for _, langs in sorted_missing if len(langs) < len(target_langs))
|
|
97
|
+
complete_missing = total_missing_files - partial_missing
|
|
98
|
+
|
|
99
|
+
status_color = "green" if coverage == 100 else "yellow"
|
|
100
|
+
if coverage < 50:
|
|
101
|
+
status_color = "red"
|
|
102
|
+
|
|
103
|
+
summary_lines = [
|
|
104
|
+
f"Total Source Files: {len(source_files)}",
|
|
105
|
+
f"Target Languages: {len(target_langs)}",
|
|
106
|
+
f"Total Checks: {total_checks}",
|
|
107
|
+
f"Found Translations: {found_count}",
|
|
108
|
+
f"Missing Files: {total_missing_files}",
|
|
109
|
+
]
|
|
110
|
+
|
|
111
|
+
if total_missing_files > 0:
|
|
112
|
+
summary_lines.append(f" - Partial Missing: {partial_missing}")
|
|
113
|
+
summary_lines.append(f" - Complete Missing: {complete_missing}")
|
|
114
|
+
|
|
115
|
+
summary_lines.append(f"Coverage: [{status_color}]{coverage:.1f}%[/{status_color}]")
|
|
116
|
+
|
|
117
|
+
summary = "\n".join(summary_lines)
|
|
118
|
+
console.print(Panel(summary, title="I18N STATUS", expand=False))
|
|
119
|
+
|
|
120
|
+
if missing_map:
|
|
121
|
+
raise typer.Exit(code=1)
|