monoco-toolkit 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,89 @@
1
+ import os
2
+ import uuid
3
+ import json
4
+
5
+ import time
6
+ from pathlib import Path
7
+ from typing import Optional, Dict, Any
8
+ from monoco.core.config import get_config
9
+
10
+ POSTHOG_API_KEY = "phc_MndV8H8v0W3P7Yv1P7Z8X7X7X7X7X7X7X7X7"
11
+ POSTHOG_HOST = "https://app.posthog.com"
12
+
13
+ class Telemetry:
14
+ def __init__(self):
15
+ self.config = get_config()
16
+ self._device_id = self._get_or_create_device_id()
17
+
18
+ def _get_or_create_device_id(self) -> str:
19
+ state_path = Path.home() / ".monoco" / "state.json"
20
+ if state_path.exists():
21
+ try:
22
+ with open(state_path, "r") as f:
23
+ state = json.load(f)
24
+ if "device_id" in state:
25
+ return state["device_id"]
26
+ except Exception:
27
+ pass
28
+
29
+ device_id = str(uuid.uuid4())
30
+ state_path.parent.mkdir(parents=True, exist_ok=True)
31
+ try:
32
+ state = {}
33
+ if state_path.exists():
34
+ with open(state_path, "r") as f:
35
+ state = json.load(f)
36
+ state["device_id"] = device_id
37
+ with open(state_path, "w") as f:
38
+ json.dump(state, f)
39
+ except Exception:
40
+ pass
41
+ return device_id
42
+
43
+ def capture(self, event_name: str, properties: Optional[Dict[str, Any]] = None):
44
+ # Notify user on first use if not configured
45
+ if self.config.telemetry.enabled is None:
46
+ # We don't want to spam, but we must be transparent
47
+ # This is a one-time notice in a session via a class-level flag?
48
+ # Or just rely on the fact that 'init' will fix it.
49
+ # To be safe and minimal, we'll just skip capture if not explicitly enabled
50
+ return
51
+
52
+ if self.config.telemetry.enabled is False:
53
+ return
54
+
55
+ # Namespace events
56
+ namespaced_event = f"cli:{event_name}"
57
+ props = {
58
+ "distinct_id": self._device_id,
59
+ "project_key": self.config.project.key,
60
+ "project_name": self.config.project.name,
61
+ "os": os.name,
62
+ "cli_version": "0.1.0",
63
+ }
64
+ if properties:
65
+ props.update(properties)
66
+
67
+ data = {
68
+ "api_key": POSTHOG_API_KEY,
69
+ "event": namespaced_event,
70
+ "properties": props,
71
+ "timestamp": time.strftime("%Y-%m-%dT%H:%M:%S%z")
72
+ }
73
+
74
+ # Send asynchronously? For now, we'll do a simple non-blocking-ish call
75
+ try:
76
+ import httpx
77
+ httpx.post(f"{POSTHOG_HOST}/capture/", json=data, timeout=1.0)
78
+ except ImportError:
79
+ pass # Telemetry is optional
80
+ except Exception:
81
+ pass
82
+
83
+ _instance = None
84
+
85
+ def capture_event(event: str, properties: Optional[Dict[str, Any]] = None):
86
+ global _instance
87
+ if _instance is None:
88
+ _instance = Telemetry()
89
+ _instance.capture(event, properties)
@@ -0,0 +1,40 @@
1
+ from pathlib import Path
2
+ from typing import List, Optional
3
+
4
+ def is_project_root(path: Path) -> bool:
5
+ """
6
+ Check if a directory serves as a Monoco project root.
7
+ Criteria:
8
+ - has monoco.yaml
9
+ - OR has .monoco/config.yaml
10
+ - OR has Issues/ directory
11
+ """
12
+ if not path.is_dir():
13
+ return False
14
+
15
+ return (path / "monoco.yaml").exists() or \
16
+ (path / ".monoco" / "config.yaml").exists() or \
17
+ (path / "Issues").exists()
18
+
19
+ def find_projects(workspace_root: Path) -> List[Path]:
20
+ """
21
+ Scan for projects in a workspace.
22
+ Returns list of paths that are project roots.
23
+ """
24
+ projects = []
25
+
26
+ # 1. Check workspace root itself
27
+ if is_project_root(workspace_root):
28
+ projects.append(workspace_root)
29
+
30
+ # 2. Check first-level subdirectories
31
+ # Prevent scanning giant node_modules or .git
32
+ for item in workspace_root.iterdir():
33
+ if item.is_dir() and not item.name.startswith('.'):
34
+ if is_project_root(item):
35
+ # If workspace root is also a project, we might deduce duplicates
36
+ # But here we just append items.
37
+ # If workspace_root == item (impossible for iterdir child), no risk.
38
+ projects.append(item)
39
+
40
+ return projects
File without changes
monoco/daemon/app.py ADDED
@@ -0,0 +1,378 @@
1
+ from contextlib import asynccontextmanager
2
+ from fastapi import FastAPI, Request, HTTPException, Query
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from sse_starlette.sse import EventSourceResponse
5
+ import asyncio
6
+ import logging
7
+ import os
8
+ from typing import Optional, List, Dict
9
+ from monoco.daemon.services import Broadcaster, GitMonitor, ProjectManager
10
+ from fastapi import FastAPI, Request, HTTPException, Query
11
+
12
+ # Configure logging
13
+ logging.basicConfig(level=logging.INFO)
14
+ logger = logging.getLogger("monoco.daemon")
15
+ from pathlib import Path
16
+ from monoco.core.config import get_config
17
+ from monoco.features.issue.core import list_issues
18
+
19
+ description = """
20
+ Monoco Daemon Process
21
+ - Repository Awareness
22
+ - State Management
23
+ - SSE Broadcast
24
+ """
25
+
26
+ # Service Instances
27
+ broadcaster = Broadcaster()
28
+ git_monitor = GitMonitor(broadcaster)
29
+ project_manager: ProjectManager | None = None
30
+
31
+ @asynccontextmanager
32
+ async def lifespan(app: FastAPI):
33
+ # Startup
34
+ logger.info("Starting Monoco Daemon services...")
35
+
36
+ global project_manager
37
+ # Use MONOCO_SERVER_ROOT if set, otherwise CWD
38
+ env_root = os.getenv("MONOCO_SERVER_ROOT")
39
+ workspace_root = Path(env_root) if env_root else Path.cwd()
40
+ logger.info(f"Workspace Root: {workspace_root}")
41
+ project_manager = ProjectManager(workspace_root, broadcaster)
42
+
43
+ await project_manager.start_all()
44
+ monitor_task = asyncio.create_task(git_monitor.start())
45
+
46
+ yield
47
+ # Shutdown
48
+ logger.info("Shutting down Monoco Daemon services...")
49
+ git_monitor.stop()
50
+ if project_manager:
51
+ project_manager.stop_all()
52
+ await monitor_task
53
+
54
+ app = FastAPI(
55
+ title="Monoco Daemon",
56
+ description=description,
57
+ version="0.1.0",
58
+ docs_url="/docs",
59
+ redoc_url="/redoc",
60
+ lifespan=lifespan,
61
+ )
62
+
63
+ def get_project_or_404(project_id: Optional[str] = None):
64
+ if not project_manager:
65
+ raise HTTPException(status_code=503, detail="Daemon not fully initialized")
66
+
67
+ # If project_id is not provided, try to use the first available project (default behavior)
68
+ if not project_id:
69
+ projects = list(project_manager.projects.values())
70
+ if not projects:
71
+ # Fallback to legacy single-project mode if no sub-projects found?
72
+ # Or maybe ProjectManager scan logic already covers CWD as a project.
73
+ raise HTTPException(status_code=404, detail="No projects found")
74
+ return projects[0]
75
+
76
+ project = project_manager.get_project(project_id)
77
+ if not project:
78
+ raise HTTPException(status_code=404, detail=f"Project {project_id} not found")
79
+ return project
80
+
81
+ # CORS Configuration
82
+ # Kanban may run on different ports (e.g. localhost:3000, tauri://localhost)
83
+ app.add_middleware(
84
+ CORSMiddleware,
85
+ allow_origins=["*"], # In production, this should be restricted to localhost
86
+ allow_credentials=True,
87
+ allow_methods=["*"],
88
+ allow_headers=["*"],
89
+ )
90
+
91
+ @app.get("/health")
92
+ async def health_check():
93
+ """
94
+ Instant health check for process monitors.
95
+ """
96
+ return {"status": "ok", "component": "monoco-daemon"}
97
+
98
+ @app.get("/api/v1/projects")
99
+ async def list_projects():
100
+ """
101
+ List all discovered projects.
102
+ """
103
+ if not project_manager:
104
+ return []
105
+ return project_manager.list_projects()
106
+
107
+ @app.get("/api/v1/info")
108
+ async def get_project_info(project_id: Optional[str] = None):
109
+ """
110
+ Metadata about the current Monoco project.
111
+ """
112
+ project = get_project_or_404(project_id)
113
+ current_hash = await git_monitor.get_head_hash()
114
+ return {
115
+ "name": project.name,
116
+ "id": project.id,
117
+ "version": "0.1.0",
118
+ "mode": "daemon",
119
+ "head": current_hash
120
+ }
121
+
122
+ @app.get("/api/v1/config/dictionary")
123
+ async def get_ui_dictionary(project_id: Optional[str] = None) -> Dict[str, str]:
124
+ """
125
+ Get UI Terminology Dictionary.
126
+ """
127
+ project = get_project_or_404(project_id)
128
+ # Reload config to get latest
129
+ config = get_config(str(project.path))
130
+ return config.ui.dictionary
131
+
132
+ @app.get("/api/v1/events")
133
+ async def sse_endpoint(request: Request):
134
+ """
135
+ Server-Sent Events endpoint for real-time updates.
136
+ """
137
+ queue = await broadcaster.subscribe()
138
+
139
+ async def event_generator():
140
+ try:
141
+ # Quick ping to confirm connection
142
+ yield {
143
+ "event": "connect",
144
+ "data": "connected"
145
+ }
146
+
147
+ while True:
148
+ if await request.is_disconnected():
149
+ break
150
+
151
+ # Wait for new messages
152
+ message = await queue.get()
153
+ yield message
154
+
155
+ except asyncio.CancelledError:
156
+ logger.debug("SSE connection cancelled")
157
+ finally:
158
+ await broadcaster.unsubscribe(queue)
159
+
160
+ return EventSourceResponse(event_generator())
161
+
162
+ @app.get("/api/v1/issues")
163
+ async def get_issues(project_id: Optional[str] = None):
164
+ """
165
+ List all issues in the project.
166
+ """
167
+ project = get_project_or_404(project_id)
168
+ issues = list_issues(project.issues_root)
169
+ return issues
170
+
171
+ from monoco.features.issue.core import list_issues, create_issue_file, update_issue, delete_issue_file, find_issue_path, parse_issue, get_board_data, parse_issue_detail, update_issue_content
172
+ from monoco.features.issue.models import IssueType, IssueStatus, IssueSolution, IssueStage, IssueMetadata, IssueDetail
173
+ from monoco.daemon.models import CreateIssueRequest, UpdateIssueRequest, UpdateIssueContentRequest
174
+ from monoco.daemon.stats import calculate_dashboard_stats, DashboardStats
175
+ from fastapi import FastAPI, Request, HTTPException
176
+ from typing import Optional, List, Dict
177
+
178
+ # ... existing code ...
179
+
180
+ @app.get("/api/v1/board")
181
+ async def get_board_endpoint(project_id: Optional[str] = None):
182
+ """
183
+ Get open issues grouped by stage for Kanban visualization.
184
+ """
185
+ project = get_project_or_404(project_id)
186
+ board = get_board_data(project.issues_root)
187
+ return board
188
+
189
+ @app.get("/api/v1/stats/dashboard", response_model=DashboardStats)
190
+ async def get_dashboard_stats_endpoint(project_id: Optional[str] = None):
191
+ """
192
+ Get aggregated dashboard statistics.
193
+ """
194
+ project = get_project_or_404(project_id)
195
+ return calculate_dashboard_stats(project.issues_root)
196
+
197
+
198
+ @app.post("/api/v1/issues", response_model=IssueMetadata)
199
+ async def create_issue_endpoint(payload: CreateIssueRequest):
200
+ """
201
+ Create a new issue.
202
+ """
203
+ project = get_project_or_404(payload.project_id)
204
+
205
+ try:
206
+ issue, _ = create_issue_file(
207
+ project.issues_root,
208
+ payload.type,
209
+ payload.title,
210
+ parent=payload.parent,
211
+ status=payload.status,
212
+ stage=payload.stage,
213
+ dependencies=payload.dependencies,
214
+ related=payload.related,
215
+ subdir=payload.subdir
216
+ )
217
+ return issue
218
+ except Exception as e:
219
+ raise HTTPException(status_code=400, detail=str(e))
220
+
221
+ @app.get("/api/v1/issues/{issue_id}", response_model=IssueDetail)
222
+ async def get_issue_endpoint(issue_id: str, project_id: Optional[str] = None):
223
+ """
224
+ Get issue details by ID.
225
+ """
226
+ project = get_project_or_404(project_id)
227
+
228
+ path = find_issue_path(project.issues_root, issue_id)
229
+ if not path:
230
+ raise HTTPException(status_code=404, detail=f"Issue {issue_id} not found")
231
+
232
+ issue = parse_issue_detail(path)
233
+ if not issue:
234
+ raise HTTPException(status_code=500, detail=f"Failed to parse issue {issue_id}")
235
+
236
+ return issue
237
+
238
+ @app.patch("/api/v1/issues/{issue_id}", response_model=IssueMetadata)
239
+ async def update_issue_endpoint(issue_id: str, payload: UpdateIssueRequest):
240
+ """
241
+ Update an issue logic state (Status, Stage, Solution).
242
+ """
243
+ project = get_project_or_404(payload.project_id)
244
+
245
+ try:
246
+ issue = update_issue(
247
+ project.issues_root,
248
+ issue_id,
249
+ status=payload.status,
250
+ stage=payload.stage,
251
+ solution=payload.solution
252
+ )
253
+ return issue
254
+ except FileNotFoundError:
255
+ raise HTTPException(status_code=404, detail=f"Issue {issue_id} not found")
256
+ except ValueError as e:
257
+ raise HTTPException(status_code=400, detail=str(e))
258
+ except Exception as e:
259
+ raise HTTPException(status_code=500, detail=str(e))
260
+
261
+ @app.put("/api/v1/issues/{issue_id}/content", response_model=IssueMetadata)
262
+ async def update_issue_content_endpoint(issue_id: str, payload: UpdateIssueContentRequest):
263
+ """
264
+ Update raw content of an issue. Validates integrity before saving.
265
+ """
266
+ project = get_project_or_404(payload.project_id)
267
+
268
+ try:
269
+ # Note: We use PUT because we are replacing the content representation
270
+ issue = update_issue_content(
271
+ project.issues_root,
272
+ issue_id,
273
+ payload.content
274
+ )
275
+ return issue
276
+ except FileNotFoundError:
277
+ raise HTTPException(status_code=404, detail=f"Issue {issue_id} not found")
278
+ except ValueError as e:
279
+ raise HTTPException(status_code=400, detail=str(e))
280
+ except Exception as e:
281
+ raise HTTPException(status_code=500, detail=str(e))
282
+
283
+ @app.delete("/api/v1/issues/{issue_id}")
284
+ async def delete_issue_endpoint(issue_id: str, project_id: Optional[str] = None):
285
+ """
286
+ Delete an issue (physical removal).
287
+ """
288
+ project = get_project_or_404(project_id)
289
+
290
+ try:
291
+ delete_issue_file(project.issues_root, issue_id)
292
+ return {"status": "deleted", "id": issue_id}
293
+ except FileNotFoundError:
294
+ raise HTTPException(status_code=404, detail=f"Issue {issue_id} not found")
295
+ except Exception as e:
296
+ raise HTTPException(status_code=500, detail=str(e))
297
+
298
+ @app.post("/api/v1/monitor/refresh")
299
+ async def refresh_monitor():
300
+ """
301
+ Manually trigger a Git HEAD check.
302
+ Useful for 'monoco issue commit' to instantly notify Kanbans.
303
+ """
304
+ # In a real impl, we might force the monitor to wake up.
305
+ # For now, just getting the hash is a good sanity check.
306
+ current_hash = await git_monitor.get_head_hash()
307
+
308
+ # If we wanted to FORCE broadcast, we could do it here,
309
+ # but the monitor loop will pick it up in <2s anyway.
310
+ # To be "instant", we can manually broadcast if we know it changed?
311
+ # Or just returning the hash confirms the daemon sees it.
312
+ return {"status": "refreshed", "head": current_hash}
313
+
314
+ # --- Workspace State Management ---
315
+ import json
316
+ from pydantic import BaseModel
317
+
318
+ class WorkspaceState(BaseModel):
319
+ last_active_project_id: Optional[str] = None
320
+
321
+ @app.get("/api/v1/workspace/state", response_model=WorkspaceState)
322
+ async def get_workspace_state():
323
+ """
324
+ Get the persisted workspace state (e.g. last active project).
325
+ """
326
+ if not project_manager:
327
+ raise HTTPException(status_code=503, detail="Daemon not initialized")
328
+
329
+ state_file = project_manager.workspace_root / ".monoco" / "state.json"
330
+ if not state_file.exists():
331
+ # Default empty state
332
+ return WorkspaceState()
333
+
334
+ try:
335
+ content = state_file.read_text(encoding='utf-8')
336
+ if not content.strip():
337
+ return WorkspaceState()
338
+ data = json.loads(content)
339
+ return WorkspaceState(**data)
340
+ except Exception as e:
341
+ logger.error(f"Failed to read state file: {e}")
342
+ # Return empty state instead of crashing, so frontend can fallback
343
+ return WorkspaceState()
344
+
345
+ @app.post("/api/v1/workspace/state", response_model=WorkspaceState)
346
+ async def update_workspace_state(state: WorkspaceState):
347
+ """
348
+ Update the workspace state.
349
+ """
350
+ if not project_manager:
351
+ raise HTTPException(status_code=503, detail="Daemon not initialized")
352
+
353
+ state_file = project_manager.workspace_root / ".monoco" / "state.json"
354
+
355
+ # Ensure directory exists
356
+ if not state_file.parent.exists():
357
+ state_file.parent.mkdir(parents=True, exist_ok=True)
358
+
359
+ try:
360
+ # We merge with existing state to avoid data loss if we extend model later
361
+ current_data = {}
362
+ if state_file.exists():
363
+ try:
364
+ content = state_file.read_text(encoding='utf-8')
365
+ if content.strip():
366
+ current_data = json.loads(content)
367
+ except:
368
+ pass # ignore read errors on write
369
+
370
+ # Update with new values
371
+ new_data = state.model_dump(exclude_unset=True)
372
+ current_data.update(new_data)
373
+
374
+ state_file.write_text(json.dumps(current_data, indent=2), encoding='utf-8')
375
+ return WorkspaceState(**current_data)
376
+ except Exception as e:
377
+ logger.error(f"Failed to write state file: {e}")
378
+ raise HTTPException(status_code=500, detail=f"Failed to persist state: {str(e)}")
@@ -0,0 +1,36 @@
1
+ import typer
2
+ import uvicorn
3
+ import os
4
+ from typing import Optional
5
+ from monoco.core.output import print_output
6
+ from monoco.core.config import get_config
7
+
8
+ from pathlib import Path
9
+
10
+ def serve(
11
+ host: str = typer.Option("127.0.0.1", "--host", "-h", help="Bind host"),
12
+ port: int = typer.Option(8642, "--port", "-p", help="Bind port"),
13
+ reload: bool = typer.Option(False, "--reload", "-r", help="Enable auto-reload for dev"),
14
+ root: Optional[str] = typer.Option(None, "--root", help="Workspace root directory"),
15
+ ):
16
+ """
17
+ Start the Monoco Daemon server.
18
+ """
19
+ settings = get_config()
20
+
21
+ if root:
22
+ os.environ["MONOCO_SERVER_ROOT"] = str(Path(root).resolve())
23
+ print_output(f"Workspace Root: {os.environ['MONOCO_SERVER_ROOT']}", title="Monoco Serve")
24
+
25
+ print_output(f"Starting Monoco Daemon on http://{host}:{port}", title="Monoco Serve")
26
+
27
+ # We pass the import string to uvicorn to enable reload if needed
28
+ app_str = "monoco.daemon.app:app"
29
+
30
+ uvicorn.run(
31
+ app_str,
32
+ host=host,
33
+ port=port,
34
+ reload=reload,
35
+ log_level="info"
36
+ )
@@ -0,0 +1,24 @@
1
+ from pydantic import BaseModel
2
+ from typing import Optional, List
3
+ from monoco.features.issue.models import IssueType, IssueStatus, IssueSolution, IssueStage
4
+
5
+ class CreateIssueRequest(BaseModel):
6
+ type: IssueType
7
+ title: str
8
+ parent: Optional[str] = None
9
+ status: IssueStatus = IssueStatus.OPEN
10
+ stage: Optional[IssueStage] = None
11
+ dependencies: List[str] = []
12
+ related: List[str] = []
13
+ subdir: Optional[str] = None
14
+ project_id: Optional[str] = None # Added for multi-project support
15
+
16
+ class UpdateIssueRequest(BaseModel):
17
+ status: Optional[IssueStatus] = None
18
+ stage: Optional[IssueStage] = None
19
+ solution: Optional[IssueSolution] = None
20
+ project_id: Optional[str] = None
21
+
22
+ class UpdateIssueContentRequest(BaseModel):
23
+ content: str
24
+ project_id: Optional[str] = None
@@ -0,0 +1,41 @@
1
+
2
+ import sys
3
+ from pathlib import Path
4
+ from datetime import datetime
5
+
6
+ # Add Toolkit to sys.path
7
+ sys.path.append("/Users/indenscale/Documents/Projects/Monoco/Toolkit")
8
+
9
+ from monoco.daemon.stats import calculate_dashboard_stats
10
+
11
+ def run():
12
+ issues_root = Path("/Users/indenscale/Documents/Projects/Monoco/Toolkit/Issues")
13
+ print(f"Scanning {issues_root}...")
14
+
15
+ stats = calculate_dashboard_stats(issues_root)
16
+
17
+ print(f"Found {len(stats.recent_activities)} activities.")
18
+
19
+ seen_ids = set()
20
+ for act in stats.recent_activities:
21
+ print(f"[{act.type}] {act.id} - {act.issue_title} ({act.timestamp})")
22
+ if act.id in seen_ids:
23
+ print(f"!!! DUPLICATE ID FOUND: {act.id}")
24
+ seen_ids.add(act.id)
25
+
26
+ # Check for duplicate issue + type
27
+ # e.g. multiple UPDATED for same issue
28
+
29
+ # Group by issue
30
+ by_issue = {}
31
+ for act in stats.recent_activities:
32
+ if act.issue_id not in by_issue:
33
+ by_issue[act.issue_id] = []
34
+ by_issue[act.issue_id].append(act)
35
+
36
+ for issue_id, acts in by_issue.items():
37
+ if len(acts) > 1:
38
+ print(f"Issue {issue_id} has multiple activities: {[a.type for a in acts]}")
39
+
40
+ if __name__ == "__main__":
41
+ run()