monoco-toolkit 0.1.0__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. monoco/cli/__init__.py +0 -0
  2. monoco/cli/project.py +87 -0
  3. monoco/cli/workspace.py +46 -0
  4. monoco/core/agent/__init__.py +5 -0
  5. monoco/core/agent/action.py +144 -0
  6. monoco/core/agent/adapters.py +106 -0
  7. monoco/core/agent/protocol.py +31 -0
  8. monoco/core/agent/state.py +106 -0
  9. monoco/core/config.py +152 -17
  10. monoco/core/execution.py +62 -0
  11. monoco/core/feature.py +58 -0
  12. monoco/core/git.py +51 -2
  13. monoco/core/injection.py +196 -0
  14. monoco/core/integrations.py +234 -0
  15. monoco/core/lsp.py +61 -0
  16. monoco/core/output.py +13 -2
  17. monoco/core/registry.py +36 -0
  18. monoco/core/resources/en/AGENTS.md +8 -0
  19. monoco/core/resources/en/SKILL.md +66 -0
  20. monoco/core/resources/zh/AGENTS.md +8 -0
  21. monoco/core/resources/zh/SKILL.md +66 -0
  22. monoco/core/setup.py +88 -110
  23. monoco/core/skills.py +444 -0
  24. monoco/core/state.py +53 -0
  25. monoco/core/sync.py +224 -0
  26. monoco/core/telemetry.py +4 -1
  27. monoco/core/workspace.py +85 -20
  28. monoco/daemon/app.py +127 -58
  29. monoco/daemon/models.py +4 -0
  30. monoco/daemon/services.py +56 -155
  31. monoco/features/agent/commands.py +166 -0
  32. monoco/features/agent/doctor.py +30 -0
  33. monoco/features/config/commands.py +125 -44
  34. monoco/features/i18n/adapter.py +29 -0
  35. monoco/features/i18n/commands.py +89 -10
  36. monoco/features/i18n/core.py +113 -27
  37. monoco/features/i18n/resources/en/AGENTS.md +8 -0
  38. monoco/features/i18n/resources/en/SKILL.md +94 -0
  39. monoco/features/i18n/resources/zh/AGENTS.md +8 -0
  40. monoco/features/i18n/resources/zh/SKILL.md +94 -0
  41. monoco/features/issue/adapter.py +34 -0
  42. monoco/features/issue/commands.py +183 -65
  43. monoco/features/issue/core.py +172 -77
  44. monoco/features/issue/linter.py +215 -116
  45. monoco/features/issue/migration.py +134 -0
  46. monoco/features/issue/models.py +23 -19
  47. monoco/features/issue/monitor.py +94 -0
  48. monoco/features/issue/resources/en/AGENTS.md +15 -0
  49. monoco/features/issue/resources/en/SKILL.md +87 -0
  50. monoco/features/issue/resources/zh/AGENTS.md +15 -0
  51. monoco/features/issue/resources/zh/SKILL.md +114 -0
  52. monoco/features/issue/validator.py +269 -0
  53. monoco/features/pty/core.py +185 -0
  54. monoco/features/pty/router.py +138 -0
  55. monoco/features/pty/server.py +56 -0
  56. monoco/features/spike/adapter.py +30 -0
  57. monoco/features/spike/commands.py +45 -24
  58. monoco/features/spike/core.py +4 -21
  59. monoco/features/spike/resources/en/AGENTS.md +7 -0
  60. monoco/features/spike/resources/en/SKILL.md +74 -0
  61. monoco/features/spike/resources/zh/AGENTS.md +7 -0
  62. monoco/features/spike/resources/zh/SKILL.md +74 -0
  63. monoco/main.py +115 -2
  64. {monoco_toolkit-0.1.0.dist-info → monoco_toolkit-0.2.5.dist-info}/METADATA +10 -3
  65. monoco_toolkit-0.2.5.dist-info/RECORD +77 -0
  66. monoco_toolkit-0.1.0.dist-info/RECORD +0 -33
  67. {monoco_toolkit-0.1.0.dist-info → monoco_toolkit-0.2.5.dist-info}/WHEEL +0 -0
  68. {monoco_toolkit-0.1.0.dist-info → monoco_toolkit-0.2.5.dist-info}/entry_points.txt +0 -0
  69. {monoco_toolkit-0.1.0.dist-info → monoco_toolkit-0.2.5.dist-info}/licenses/LICENSE +0 -0
monoco/core/sync.py ADDED
@@ -0,0 +1,224 @@
1
+ import typer
2
+ from pathlib import Path
3
+ from typing import Optional, List
4
+ from monoco.core.registry import FeatureRegistry
5
+ from monoco.core.injection import PromptInjector
6
+ from monoco.core.config import get_config
7
+ from monoco.core.skills import SkillManager
8
+ from monoco.core.integrations import get_active_integrations
9
+ from rich.console import Console
10
+
11
+ console = Console()
12
+
13
+ def _get_targets(root: Path, config, cli_target: Optional[Path]) -> List[Path]:
14
+ """Helper to determine target files."""
15
+ targets = []
16
+
17
+ # 1. CLI Target
18
+ if cli_target:
19
+ targets.append(cli_target)
20
+ return targets
21
+
22
+ # 2. Config Targets
23
+ if config.agent.targets:
24
+ for t in config.agent.targets:
25
+ targets.append(root / t)
26
+ return targets
27
+
28
+ # 3. Registry Defaults (Dynamic Detection)
29
+ integrations = get_active_integrations(
30
+ root,
31
+ config_overrides=config.agent.integrations,
32
+ auto_detect=True
33
+ )
34
+
35
+ if integrations:
36
+ for integration in integrations.values():
37
+ targets.append(root / integration.system_prompt_file)
38
+ else:
39
+ # Fallback to standard Monoco header if nothing is detected
40
+ # but we usually want at least one target for a generic sync.
41
+ defaults = ["GEMINI.md", "CLAUDE.md"]
42
+ targets.extend([root / fname for fname in defaults])
43
+
44
+ return list(set(targets)) # Unique paths
45
+
46
+ def sync_command(
47
+ ctx: typer.Context,
48
+ target: Optional[Path] = typer.Option(None, "--target", "-t", help="Specific file to update (default: auto-detect from config or standard files)"),
49
+ check: bool = typer.Option(False, "--check", help="Dry run check mode")
50
+ ):
51
+ """
52
+ Synchronize Agent Environment (System Prompts & Skills).
53
+ Aggregates prompts from all active features and injects them into the agent configuration files.
54
+ """
55
+ root = Path.cwd() # TODO: Use workspace root detection properly if needed
56
+
57
+ # 0. Load Config
58
+ config = get_config(str(root))
59
+
60
+ # 1. Register Features
61
+ registry = FeatureRegistry()
62
+ registry.load_defaults()
63
+
64
+ # 2. Collect Data
65
+ collected_prompts = {}
66
+
67
+ # Filter features based on config if specified
68
+ all_features = registry.get_features()
69
+ active_features = []
70
+
71
+ if config.agent.includes:
72
+ for f in all_features:
73
+ if f.name in config.agent.includes:
74
+ active_features.append(f)
75
+ else:
76
+ active_features = all_features
77
+
78
+ with console.status("[bold green]Collecting feature integration data...") as status:
79
+ for feature in active_features:
80
+ status.update(f"Scanning Feature: {feature.name}")
81
+ try:
82
+ data = feature.integrate(root, config.model_dump())
83
+ if data:
84
+ if data.system_prompts:
85
+ collected_prompts.update(data.system_prompts)
86
+ except Exception as e:
87
+ console.print(f"[red]Error integrating feature {feature.name}: {e}[/red]")
88
+
89
+ console.print(f"[blue]Collected {len(collected_prompts)} prompts from {len(active_features)} features.[/blue]")
90
+
91
+ # 3. Distribute Skills
92
+ console.print(f"[bold blue]Distributing skills to agent frameworks...[/bold blue]")
93
+
94
+ # Determine language from config
95
+ skill_lang = config.i18n.source_lang if config.i18n.source_lang else 'en'
96
+ console.print(f"[dim] Using language: {skill_lang}[/dim]")
97
+
98
+ # Initialize SkillManager with active features
99
+ skill_manager = SkillManager(root, active_features)
100
+
101
+ # Get active integrations
102
+ integrations = get_active_integrations(
103
+ root,
104
+ config_overrides=config.agent.integrations,
105
+ auto_detect=True
106
+ )
107
+
108
+ if integrations:
109
+ for framework_key, integration in integrations.items():
110
+ skill_target_dir = root / integration.skill_root_dir
111
+ console.print(f"[dim] Distributing to {integration.name} ({skill_target_dir})...[/dim]")
112
+
113
+ try:
114
+ # Distribute only the configured language version
115
+ results = skill_manager.distribute(skill_target_dir, lang=skill_lang, force=False)
116
+ success_count = sum(1 for v in results.values() if v)
117
+ console.print(f"[green] ✓ Distributed {success_count}/{len(results)} skills to {integration.name}[/green]")
118
+ except Exception as e:
119
+ console.print(f"[red] Failed to distribute skills to {integration.name}: {e}[/red]")
120
+ else:
121
+ console.print(f"[yellow]No agent frameworks detected. Skipping skill distribution.[/yellow]")
122
+
123
+ # 4. Determine Targets
124
+ targets = _get_targets(root, config, target)
125
+
126
+ # Ensure targets exist for sync
127
+ final_targets = []
128
+ for t in targets:
129
+ if not t.exists():
130
+ # If explicit target, fail? Or create?
131
+ # If default, create.
132
+ if target:
133
+ # CLI target
134
+ console.print(f"[yellow]Creating {t.name}...[/yellow]")
135
+ try:
136
+ t.touch()
137
+ final_targets.append(t)
138
+ except Exception as e:
139
+ console.print(f"[red]Failed to create {t}: {e}[/red]")
140
+ else:
141
+ # Default/Config target -> only create if it's one of the defaults we manage?
142
+ # For now, let's just create it to be safe, assuming user wants it.
143
+ console.print(f"[yellow]Creating {t.name}...[/yellow]")
144
+ try:
145
+ t.touch()
146
+ final_targets.append(t)
147
+ except Exception as e:
148
+ console.print(f"[red]Failed to create {t}: {e}[/red]")
149
+ else:
150
+ final_targets.append(t)
151
+
152
+ # 5. Inject System Prompts
153
+ for t in final_targets:
154
+ injector = PromptInjector(t)
155
+
156
+ if check:
157
+ console.print(f"[dim][Dry Run] Would check/update {t.name}[/dim]")
158
+ else:
159
+ try:
160
+ changed = injector.inject(collected_prompts)
161
+ if changed:
162
+ console.print(f"[green]✓ Updated {t.name}[/green]")
163
+ else:
164
+ console.print(f"[dim]= {t.name} is up to date[/dim]")
165
+ except Exception as e:
166
+ console.print(f"[red]Failed to update {t.name}: {e}[/red]")
167
+
168
+ def uninstall_command(
169
+ ctx: typer.Context,
170
+ target: Optional[Path] = typer.Option(None, "--target", "-t", help="Specific file to clean (default: auto-detect from config or standard files)")
171
+ ):
172
+ """
173
+ Remove Monoco Managed Block from Agent Environment files and clean up distributed skills.
174
+ """
175
+ root = Path.cwd()
176
+ config = get_config(str(root))
177
+
178
+ # 1. Clean up System Prompts
179
+ targets = _get_targets(root, config, target)
180
+
181
+ for t in targets:
182
+ if not t.exists():
183
+ if target:
184
+ console.print(f"[yellow]Target {t} does not exist.[/yellow]")
185
+ continue
186
+
187
+ injector = PromptInjector(t)
188
+ try:
189
+ changed = injector.remove()
190
+ if changed:
191
+ console.print(f"[green]✓ Removed Monoco Managed Block from {t.name}[/green]")
192
+ else:
193
+ console.print(f"[dim]= No Monoco Block found in {t.name}[/dim]")
194
+ except Exception as e:
195
+ console.print(f"[red]Failed to uninstall from {t.name}: {e}[/red]")
196
+
197
+ # 2. Clean up Skills
198
+ console.print(f"[bold blue]Cleaning up distributed skills...[/bold blue]")
199
+
200
+ # Load features to get skill list
201
+ registry = FeatureRegistry()
202
+ registry.load_defaults()
203
+ active_features = registry.get_features()
204
+
205
+ skill_manager = SkillManager(root, active_features)
206
+
207
+ # Get active integrations
208
+ integrations = get_active_integrations(
209
+ root,
210
+ config_overrides=config.agent.integrations,
211
+ auto_detect=True
212
+ )
213
+
214
+ if integrations:
215
+ for framework_key, integration in integrations.items():
216
+ skill_target_dir = root / integration.skill_root_dir
217
+ console.print(f"[dim] Cleaning {integration.name} ({skill_target_dir})...[/dim]")
218
+
219
+ try:
220
+ skill_manager.cleanup(skill_target_dir)
221
+ except Exception as e:
222
+ console.print(f"[red] Failed to clean skills from {integration.name}: {e}[/red]")
223
+ else:
224
+ console.print(f"[yellow]No agent frameworks detected. Skipping skill cleanup.[/yellow]")
monoco/core/telemetry.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import os
2
2
  import uuid
3
3
  import json
4
- import httpx
4
+
5
5
  import time
6
6
  from pathlib import Path
7
7
  from typing import Optional, Dict, Any
@@ -73,7 +73,10 @@ class Telemetry:
73
73
 
74
74
  # Send asynchronously? For now, we'll do a simple non-blocking-ish call
75
75
  try:
76
+ import httpx
76
77
  httpx.post(f"{POSTHOG_HOST}/capture/", json=data, timeout=1.0)
78
+ except ImportError:
79
+ pass # Telemetry is optional
77
80
  except Exception:
78
81
  pass
79
82
 
monoco/core/workspace.py CHANGED
@@ -1,40 +1,105 @@
1
+ import os
1
2
  from pathlib import Path
2
- from typing import List, Optional
3
+ from typing import List, Optional, Dict
4
+ from pydantic import BaseModel, Field, ConfigDict
5
+
6
+ from monoco.core.config import get_config, MonocoConfig
7
+
8
+ class MonocoProject(BaseModel):
9
+ """
10
+ Representation of a single Monoco project.
11
+ """
12
+ id: str # Unique ID within the workspace (usually the directory name)
13
+ name: str
14
+ path: Path
15
+ config: MonocoConfig
16
+
17
+ @property
18
+ def issues_root(self) -> Path:
19
+ issues_path = Path(self.config.paths.issues)
20
+ if issues_path.is_absolute():
21
+ return issues_path
22
+ return (self.path / issues_path).resolve()
23
+
24
+ model_config = ConfigDict(arbitrary_types_allowed=True)
3
25
 
4
26
  def is_project_root(path: Path) -> bool:
5
27
  """
6
28
  Check if a directory serves as a Monoco project root.
7
29
  Criteria:
8
- - has monoco.yaml
9
- - OR has .monoco/config.yaml
10
- - OR has Issues/ directory
30
+ - has .monoco/ directory (which should contain project.yaml)
11
31
  """
12
32
  if not path.is_dir():
13
33
  return False
14
34
 
15
- return (path / "monoco.yaml").exists() or \
16
- (path / ".monoco" / "config.yaml").exists() or \
17
- (path / "Issues").exists()
35
+ return (path / ".monoco").is_dir()
36
+
37
+ def load_project(path: Path) -> Optional[MonocoProject]:
38
+ """Load a project from a path if it is a valid project root."""
39
+ if not is_project_root(path):
40
+ return None
41
+
42
+ try:
43
+ config = get_config(str(path))
44
+ # If name is default, use directory name
45
+ name = config.project.name
46
+ if name == "Monoco Project":
47
+ name = path.name
48
+
49
+ return MonocoProject(
50
+ id=path.name,
51
+ name=name,
52
+ path=path,
53
+ config=config
54
+ )
55
+ except Exception:
56
+ return None
18
57
 
19
- def find_projects(workspace_root: Path) -> List[Path]:
58
+ def find_projects(workspace_root: Path) -> List[MonocoProject]:
20
59
  """
21
60
  Scan for projects in a workspace.
22
- Returns list of paths that are project roots.
61
+ Returns list of MonocoProject instances.
23
62
  """
24
63
  projects = []
25
64
 
26
65
  # 1. Check workspace root itself
27
- if is_project_root(workspace_root):
28
- projects.append(workspace_root)
66
+ root_project = load_project(workspace_root)
67
+ if root_project:
68
+ projects.append(root_project)
29
69
 
30
- # 2. Check first-level subdirectories
31
- # Prevent scanning giant node_modules or .git
32
- for item in workspace_root.iterdir():
33
- if item.is_dir() and not item.name.startswith('.'):
34
- if is_project_root(item):
35
- # If workspace root is also a project, we might deduce duplicates
36
- # But here we just append items.
37
- # If workspace_root == item (impossible for iterdir child), no risk.
38
- projects.append(item)
70
+ # 2. Recursive Scan
71
+ for root, dirs, files in os.walk(workspace_root):
72
+ # Skip hidden directories and node_modules
73
+ dirs[:] = [d for d in dirs if not d.startswith('.') and d != 'node_modules' and d != 'venv']
74
+
75
+ for d in dirs:
76
+ project_path = Path(root) / d
77
+ # Avoid re-adding root if it was somehow added (unlikely here)
78
+ if project_path == workspace_root: continue
79
+
80
+ if is_project_root(project_path):
81
+ p = load_project(project_path)
82
+ if p:
83
+ projects.append(p)
39
84
 
40
85
  return projects
86
+
87
+ class Workspace(BaseModel):
88
+ """
89
+ Standardized Workspace primitive.
90
+ """
91
+ root: Path
92
+ projects: List[MonocoProject] = []
93
+
94
+ @classmethod
95
+ def discover(cls, root: Path) -> "Workspace":
96
+ projects = find_projects(root)
97
+ return cls(root=root, projects=projects)
98
+
99
+ def get_project(self, project_id: str) -> Optional[MonocoProject]:
100
+ for p in self.projects:
101
+ if p.id == project_id:
102
+ return p
103
+ return None
104
+
105
+ model_config = ConfigDict(arbitrary_types_allowed=True)
monoco/daemon/app.py CHANGED
@@ -6,8 +6,9 @@ import asyncio
6
6
  import logging
7
7
  import os
8
8
  from typing import Optional, List, Dict
9
- from monoco.daemon.services import Broadcaster, GitMonitor, ProjectManager
10
- from fastapi import FastAPI, Request, HTTPException, Query
9
+ from monoco.daemon.services import Broadcaster, ProjectManager
10
+ from monoco.core.git import GitMonitor
11
+ from monoco.core.config import get_config, ConfigMonitor, ConfigScope, get_config_path
11
12
 
12
13
  # Configure logging
13
14
  logging.basicConfig(level=logging.INFO)
@@ -15,6 +16,7 @@ logger = logging.getLogger("monoco.daemon")
15
16
  from pathlib import Path
16
17
  from monoco.core.config import get_config
17
18
  from monoco.features.issue.core import list_issues
19
+ from monoco.core.execution import scan_execution_profiles, get_profile_detail, ExecutionProfile
18
20
 
19
21
  description = """
20
22
  Monoco Daemon Process
@@ -25,7 +27,8 @@ Monoco Daemon Process
25
27
 
26
28
  # Service Instances
27
29
  broadcaster = Broadcaster()
28
- git_monitor = GitMonitor(broadcaster)
30
+ git_monitor: GitMonitor | None = None
31
+ config_monitors: List[ConfigMonitor] = []
29
32
  project_manager: ProjectManager | None = None
30
33
 
31
34
  @asynccontextmanager
@@ -33,23 +36,52 @@ async def lifespan(app: FastAPI):
33
36
  # Startup
34
37
  logger.info("Starting Monoco Daemon services...")
35
38
 
36
- global project_manager
39
+ global project_manager, git_monitor, config_monitors
37
40
  # Use MONOCO_SERVER_ROOT if set, otherwise CWD
38
41
  env_root = os.getenv("MONOCO_SERVER_ROOT")
39
42
  workspace_root = Path(env_root) if env_root else Path.cwd()
40
43
  logger.info(f"Workspace Root: {workspace_root}")
41
44
  project_manager = ProjectManager(workspace_root, broadcaster)
42
45
 
46
+ async def on_git_change(new_hash: str):
47
+ await broadcaster.broadcast("HEAD_UPDATED", {
48
+ "ref": "HEAD",
49
+ "hash": new_hash
50
+ })
51
+
52
+ async def on_config_change(path: str):
53
+ logger.info(f"Config file changed: {path}, broadcasting update...")
54
+ await broadcaster.broadcast("CONFIG_UPDATED", {
55
+ "scope": "workspace",
56
+ "path": path
57
+ })
58
+
59
+ git_monitor = GitMonitor(workspace_root, on_git_change)
60
+
61
+ project_config_path = get_config_path(ConfigScope.PROJECT, str(workspace_root))
62
+ workspace_config_path = get_config_path(ConfigScope.WORKSPACE, str(workspace_root))
63
+
64
+ config_monitors = [
65
+ ConfigMonitor(project_config_path, lambda: on_config_change(str(project_config_path))),
66
+ ConfigMonitor(workspace_config_path, lambda: on_config_change(str(workspace_config_path)))
67
+ ]
68
+
43
69
  await project_manager.start_all()
44
- monitor_task = asyncio.create_task(git_monitor.start())
70
+ git_task = asyncio.create_task(git_monitor.start())
71
+ config_tasks = [asyncio.create_task(m.start()) for m in config_monitors]
45
72
 
46
73
  yield
47
74
  # Shutdown
48
75
  logger.info("Shutting down Monoco Daemon services...")
49
- git_monitor.stop()
76
+ if git_monitor:
77
+ git_monitor.stop()
78
+ for m in config_monitors:
79
+ m.stop()
50
80
  if project_manager:
51
81
  project_manager.stop_all()
52
- await monitor_task
82
+
83
+ await git_task
84
+ await asyncio.gather(*config_tasks)
53
85
 
54
86
  app = FastAPI(
55
87
  title="Monoco Daemon",
@@ -160,10 +192,33 @@ async def sse_endpoint(request: Request):
160
192
  return EventSourceResponse(event_generator())
161
193
 
162
194
  @app.get("/api/v1/issues")
163
- async def get_issues(project_id: Optional[str] = None):
164
- """
165
- List all issues in the project.
195
+ async def get_issues(
196
+ project_id: Optional[str] = None,
197
+ path: Optional[str] = Query(None, description="Absolute file path for reverse lookup")
198
+ ):
166
199
  """
200
+ List all issues in the project, or get a single issue by file path.
201
+
202
+ Query Parameters:
203
+ - project_id: Optional project filter
204
+ - path: Optional absolute file path for reverse lookup (returns single issue)
205
+
206
+ If 'path' is provided, returns a single IssueMetadata object.
207
+ Otherwise, returns a list of all issues.
208
+ """
209
+ # Reverse lookup by path
210
+ if path:
211
+ p = Path(path)
212
+ if not p.exists():
213
+ raise HTTPException(status_code=404, detail=f"File {path} not found")
214
+
215
+ issue = parse_issue(p)
216
+ if not issue:
217
+ raise HTTPException(status_code=400, detail=f"File {path} is not a valid Monoco issue")
218
+
219
+ return issue
220
+
221
+ # Standard list operation
167
222
  project = get_project_or_404(project_id)
168
223
  issues = list_issues(project.issues_root)
169
224
  return issues
@@ -221,11 +276,22 @@ async def create_issue_endpoint(payload: CreateIssueRequest):
221
276
  @app.get("/api/v1/issues/{issue_id}", response_model=IssueDetail)
222
277
  async def get_issue_endpoint(issue_id: str, project_id: Optional[str] = None):
223
278
  """
224
- Get issue details by ID.
279
+ Get issue details by ID. Supports cross-project search if project_id is omitted.
225
280
  """
226
- project = get_project_or_404(project_id)
227
-
228
- path = find_issue_path(project.issues_root, issue_id)
281
+ path = None
282
+ if project_id:
283
+ project = get_project_or_404(project_id)
284
+ path = find_issue_path(project.issues_root, issue_id)
285
+ else:
286
+ # Global Search across all projects in the workspace
287
+ if not project_manager:
288
+ raise HTTPException(status_code=503, detail="Daemon not fully initialized")
289
+
290
+ for p_ctx in project_manager.projects.values():
291
+ path = find_issue_path(p_ctx.issues_root, issue_id)
292
+ if path:
293
+ break
294
+
229
295
  if not path:
230
296
  raise HTTPException(status_code=404, detail=f"Issue {issue_id} not found")
231
297
 
@@ -235,21 +301,36 @@ async def get_issue_endpoint(issue_id: str, project_id: Optional[str] = None):
235
301
 
236
302
  return issue
237
303
 
304
+
238
305
  @app.patch("/api/v1/issues/{issue_id}", response_model=IssueMetadata)
239
306
  async def update_issue_endpoint(issue_id: str, payload: UpdateIssueRequest):
240
307
  """
241
- Update an issue logic state (Status, Stage, Solution).
308
+ Update an issue's metadata (Status, Stage, Solution, Parent, Dependencies, etc.).
242
309
  """
243
310
  project = get_project_or_404(payload.project_id)
244
311
 
245
312
  try:
313
+ # Pre-lookup to get the current path for move detection
314
+ old_path_obj = find_issue_path(project.issues_root, issue_id)
315
+ old_path = str(old_path_obj.absolute()) if old_path_obj else None
316
+
246
317
  issue = update_issue(
247
318
  project.issues_root,
248
319
  issue_id,
249
320
  status=payload.status,
250
321
  stage=payload.stage,
251
- solution=payload.solution
322
+ solution=payload.solution,
323
+ parent=payload.parent,
324
+ dependencies=payload.dependencies,
325
+ related=payload.related,
326
+ tags=payload.tags
252
327
  )
328
+
329
+ # Post-update: check if path changed
330
+ if old_path and issue.path != old_path:
331
+ # Trigger a specialized move event to help editors redirect
332
+ await project.notify_move(old_path, issue.path, issue.model_dump(mode='json'))
333
+
253
334
  return issue
254
335
  except FileNotFoundError:
255
336
  raise HTTPException(status_code=404, detail=f"Issue {issue_id} not found")
@@ -311,12 +392,34 @@ async def refresh_monitor():
311
392
  # Or just returning the hash confirms the daemon sees it.
312
393
  return {"status": "refreshed", "head": current_hash}
313
394
 
314
- # --- Workspace State Management ---
315
- import json
316
- from pydantic import BaseModel
395
+ # --- Execution Profiles ---
396
+
397
+ @app.get("/api/v1/execution/profiles", response_model=List[ExecutionProfile])
398
+ async def get_execution_profiles(project_id: Optional[str] = None):
399
+ """
400
+ List all execution profiles available for the project/workspace.
401
+ """
402
+ project = None
403
+ if project_id:
404
+ project = get_project_or_404(project_id)
405
+ elif project_manager and project_manager.projects:
406
+ # Fallback to first project if none specified
407
+ project = list(project_manager.projects.values())[0]
408
+
409
+ return scan_execution_profiles(project.path if project else None)
317
410
 
318
- class WorkspaceState(BaseModel):
319
- last_active_project_id: Optional[str] = None
411
+ @app.get("/api/v1/execution/profiles/detail", response_model=ExecutionProfile)
412
+ async def get_execution_profile_detail(path: str):
413
+ """
414
+ Get full content of an execution profile.
415
+ """
416
+ profile = get_profile_detail(path)
417
+ if not profile:
418
+ raise HTTPException(status_code=404, detail="Profile not found")
419
+ return profile
420
+
421
+ # --- Workspace State Management ---
422
+ from monoco.core.state import WorkspaceState
320
423
 
321
424
  @app.get("/api/v1/workspace/state", response_model=WorkspaceState)
322
425
  async def get_workspace_state():
@@ -326,21 +429,7 @@ async def get_workspace_state():
326
429
  if not project_manager:
327
430
  raise HTTPException(status_code=503, detail="Daemon not initialized")
328
431
 
329
- state_file = project_manager.workspace_root / ".monoco" / "state.json"
330
- if not state_file.exists():
331
- # Default empty state
332
- return WorkspaceState()
333
-
334
- try:
335
- content = state_file.read_text(encoding='utf-8')
336
- if not content.strip():
337
- return WorkspaceState()
338
- data = json.loads(content)
339
- return WorkspaceState(**data)
340
- except Exception as e:
341
- logger.error(f"Failed to read state file: {e}")
342
- # Return empty state instead of crashing, so frontend can fallback
343
- return WorkspaceState()
432
+ return WorkspaceState.load(project_manager.workspace_root)
344
433
 
345
434
  @app.post("/api/v1/workspace/state", response_model=WorkspaceState)
346
435
  async def update_workspace_state(state: WorkspaceState):
@@ -350,29 +439,9 @@ async def update_workspace_state(state: WorkspaceState):
350
439
  if not project_manager:
351
440
  raise HTTPException(status_code=503, detail="Daemon not initialized")
352
441
 
353
- state_file = project_manager.workspace_root / ".monoco" / "state.json"
354
-
355
- # Ensure directory exists
356
- if not state_file.parent.exists():
357
- state_file.parent.mkdir(parents=True, exist_ok=True)
358
-
359
442
  try:
360
- # We merge with existing state to avoid data loss if we extend model later
361
- current_data = {}
362
- if state_file.exists():
363
- try:
364
- content = state_file.read_text(encoding='utf-8')
365
- if content.strip():
366
- current_data = json.loads(content)
367
- except:
368
- pass # ignore read errors on write
369
-
370
- # Update with new values
371
- new_data = state.model_dump(exclude_unset=True)
372
- current_data.update(new_data)
373
-
374
- state_file.write_text(json.dumps(current_data, indent=2), encoding='utf-8')
375
- return WorkspaceState(**current_data)
443
+ state.save(project_manager.workspace_root)
444
+ return state
376
445
  except Exception as e:
377
446
  logger.error(f"Failed to write state file: {e}")
378
447
  raise HTTPException(status_code=500, detail=f"Failed to persist state: {str(e)}")
monoco/daemon/models.py CHANGED
@@ -17,6 +17,10 @@ class UpdateIssueRequest(BaseModel):
17
17
  status: Optional[IssueStatus] = None
18
18
  stage: Optional[IssueStage] = None
19
19
  solution: Optional[IssueSolution] = None
20
+ parent: Optional[str] = None
21
+ dependencies: Optional[List[str]] = None
22
+ related: Optional[List[str]] = None
23
+ tags: Optional[List[str]] = None
20
24
  project_id: Optional[str] = None
21
25
 
22
26
  class UpdateIssueContentRequest(BaseModel):