monoco-toolkit 0.1.1__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. monoco/cli/__init__.py +0 -0
  2. monoco/cli/project.py +87 -0
  3. monoco/cli/workspace.py +46 -0
  4. monoco/core/agent/__init__.py +5 -0
  5. monoco/core/agent/action.py +144 -0
  6. monoco/core/agent/adapters.py +129 -0
  7. monoco/core/agent/protocol.py +31 -0
  8. monoco/core/agent/state.py +106 -0
  9. monoco/core/config.py +212 -17
  10. monoco/core/execution.py +62 -0
  11. monoco/core/feature.py +58 -0
  12. monoco/core/git.py +51 -2
  13. monoco/core/injection.py +196 -0
  14. monoco/core/integrations.py +242 -0
  15. monoco/core/lsp.py +68 -0
  16. monoco/core/output.py +21 -3
  17. monoco/core/registry.py +36 -0
  18. monoco/core/resources/en/AGENTS.md +8 -0
  19. monoco/core/resources/en/SKILL.md +66 -0
  20. monoco/core/resources/zh/AGENTS.md +8 -0
  21. monoco/core/resources/zh/SKILL.md +65 -0
  22. monoco/core/setup.py +96 -110
  23. monoco/core/skills.py +444 -0
  24. monoco/core/state.py +53 -0
  25. monoco/core/sync.py +224 -0
  26. monoco/core/telemetry.py +4 -1
  27. monoco/core/workspace.py +85 -20
  28. monoco/daemon/app.py +127 -58
  29. monoco/daemon/models.py +4 -0
  30. monoco/daemon/services.py +56 -155
  31. monoco/features/config/commands.py +125 -44
  32. monoco/features/i18n/adapter.py +29 -0
  33. monoco/features/i18n/commands.py +89 -10
  34. monoco/features/i18n/core.py +113 -27
  35. monoco/features/i18n/resources/en/AGENTS.md +8 -0
  36. monoco/features/i18n/resources/en/SKILL.md +94 -0
  37. monoco/features/i18n/resources/zh/AGENTS.md +8 -0
  38. monoco/features/i18n/resources/zh/SKILL.md +94 -0
  39. monoco/features/issue/adapter.py +34 -0
  40. monoco/features/issue/commands.py +343 -101
  41. monoco/features/issue/core.py +384 -150
  42. monoco/features/issue/domain/__init__.py +0 -0
  43. monoco/features/issue/domain/lifecycle.py +126 -0
  44. monoco/features/issue/domain/models.py +170 -0
  45. monoco/features/issue/domain/parser.py +223 -0
  46. monoco/features/issue/domain/workspace.py +104 -0
  47. monoco/features/issue/engine/__init__.py +22 -0
  48. monoco/features/issue/engine/config.py +172 -0
  49. monoco/features/issue/engine/machine.py +185 -0
  50. monoco/features/issue/engine/models.py +18 -0
  51. monoco/features/issue/linter.py +325 -120
  52. monoco/features/issue/lsp/__init__.py +3 -0
  53. monoco/features/issue/lsp/definition.py +72 -0
  54. monoco/features/issue/migration.py +134 -0
  55. monoco/features/issue/models.py +46 -24
  56. monoco/features/issue/monitor.py +94 -0
  57. monoco/features/issue/resources/en/AGENTS.md +20 -0
  58. monoco/features/issue/resources/en/SKILL.md +111 -0
  59. monoco/features/issue/resources/zh/AGENTS.md +20 -0
  60. monoco/features/issue/resources/zh/SKILL.md +138 -0
  61. monoco/features/issue/validator.py +455 -0
  62. monoco/features/spike/adapter.py +30 -0
  63. monoco/features/spike/commands.py +45 -24
  64. monoco/features/spike/core.py +6 -40
  65. monoco/features/spike/resources/en/AGENTS.md +7 -0
  66. monoco/features/spike/resources/en/SKILL.md +74 -0
  67. monoco/features/spike/resources/zh/AGENTS.md +7 -0
  68. monoco/features/spike/resources/zh/SKILL.md +74 -0
  69. monoco/main.py +91 -2
  70. monoco_toolkit-0.2.8.dist-info/METADATA +136 -0
  71. monoco_toolkit-0.2.8.dist-info/RECORD +83 -0
  72. monoco_toolkit-0.1.1.dist-info/METADATA +0 -93
  73. monoco_toolkit-0.1.1.dist-info/RECORD +0 -33
  74. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.8.dist-info}/WHEEL +0 -0
  75. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.8.dist-info}/entry_points.txt +0 -0
  76. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.8.dist-info}/licenses/LICENSE +0 -0
monoco/core/config.py CHANGED
@@ -1,9 +1,19 @@
1
1
  import os
2
2
  import yaml
3
3
  from pathlib import Path
4
- from typing import Optional, Dict, Any
4
+ from typing import Optional, Dict, Any, Callable, Awaitable, List
5
+ from enum import Enum
5
6
  from pydantic import BaseModel, Field
6
7
 
8
+ import logging
9
+ import asyncio
10
+ from watchdog.observers import Observer
11
+ from watchdog.events import FileSystemEventHandler
12
+
13
+
14
+
15
+ logger = logging.getLogger("monoco.core.config")
16
+
7
17
  class PathsConfig(BaseModel):
8
18
  """Configuration for directory paths."""
9
19
  root: str = Field(default=".", description="Project root directory")
@@ -13,7 +23,6 @@ class PathsConfig(BaseModel):
13
23
 
14
24
  class CoreConfig(BaseModel):
15
25
  """Core system configuration."""
16
- editor: str = Field(default_factory=lambda: os.getenv("EDITOR", "vim"), description="Preferred text editor")
17
26
  log_level: str = Field(default="INFO", description="Logging verbosity")
18
27
  author: Optional[str] = Field(default=None, description="Default author for new artifacts")
19
28
 
@@ -37,6 +46,69 @@ class TelemetryConfig(BaseModel):
37
46
  """Configuration for Telemetry."""
38
47
  enabled: Optional[bool] = Field(default=None, description="Whether telemetry is enabled")
39
48
 
49
+
50
+
51
+ class IssueTypeConfig(BaseModel):
52
+ name: str
53
+ label: str
54
+ prefix: str
55
+ folder: str
56
+ description: Optional[str] = None
57
+
58
+ class TransitionConfig(BaseModel):
59
+ name: str
60
+ label: str
61
+ icon: Optional[str] = None
62
+ from_status: Optional[str] = None
63
+ from_stage: Optional[str] = None
64
+ to_status: str
65
+ to_stage: Optional[str] = None
66
+ required_solution: Optional[str] = None
67
+ description: str = ""
68
+ command_template: Optional[str] = None
69
+
70
+ class IssueSchemaConfig(BaseModel):
71
+ types: List[IssueTypeConfig] = Field(default_factory=list)
72
+ statuses: List[str] = Field(default_factory=list)
73
+ stages: List[str] = Field(default_factory=list)
74
+ solutions: List[str] = Field(default_factory=list)
75
+ workflows: List[TransitionConfig] = Field(default_factory=list)
76
+
77
+ def merge(self, other: "IssueSchemaConfig") -> "IssueSchemaConfig":
78
+ if not other:
79
+ return self
80
+
81
+ # Types: merge by name
82
+ if other.types:
83
+ type_map = {t.name: t for t in self.types}
84
+ for ot in other.types:
85
+ type_map[ot.name] = ot
86
+ self.types = list(type_map.values())
87
+
88
+ # Statuses: replace if provided
89
+ if other.statuses:
90
+ self.statuses = other.statuses
91
+
92
+ # Stages: replace if provided
93
+ if other.stages:
94
+ self.stages = other.stages
95
+
96
+ # Solutions: replace if provided
97
+ if other.solutions:
98
+ self.solutions = other.solutions
99
+
100
+ # Workflows (Transitions): merge by name
101
+ if other.workflows:
102
+ wf_map = {w.name: w for w in self.workflows}
103
+ for ow in other.workflows:
104
+ wf_map[ow.name] = ow
105
+ self.workflows = list(wf_map.values())
106
+
107
+ return self
108
+
109
+ class StateMachineConfig(BaseModel):
110
+ transitions: List[TransitionConfig]
111
+
40
112
  class MonocoConfig(BaseModel):
41
113
  """
42
114
  Main Configuration Schema.
@@ -49,6 +121,8 @@ class MonocoConfig(BaseModel):
49
121
  ui: UIConfig = Field(default_factory=UIConfig)
50
122
  telemetry: TelemetryConfig = Field(default_factory=TelemetryConfig)
51
123
 
124
+ issue: IssueSchemaConfig = Field(default_factory=IssueSchemaConfig)
125
+
52
126
  @staticmethod
53
127
  def _deep_merge(base: Dict[str, Any], update: Dict[str, Any]) -> Dict[str, Any]:
54
128
  """Recursive dict merge."""
@@ -60,9 +134,13 @@ class MonocoConfig(BaseModel):
60
134
  return base
61
135
 
62
136
  @classmethod
63
- def load(cls, project_root: Optional[str] = None) -> "MonocoConfig":
137
+ def load(cls, project_root: Optional[str] = None, require_project: bool = False) -> "MonocoConfig":
64
138
  """
65
139
  Load configuration from multiple sources.
140
+
141
+ Args:
142
+ project_root: Explicit root path. If None, uses CWD.
143
+ require_project: If True, raises error if .monoco directory is missing.
66
144
  """
67
145
  # 1. Start with empty dict (will use defaults via Pydantic)
68
146
  config_data = {}
@@ -72,8 +150,13 @@ class MonocoConfig(BaseModel):
72
150
 
73
151
  # Determine project path
74
152
  cwd = Path(project_root) if project_root else Path.cwd()
75
- proj_path_hidden = cwd / ".monoco" / "config.yaml"
76
- proj_path_root = cwd / "monoco.yaml"
153
+ # FIX-0009: strict separation of workspace and project config
154
+ workspace_config_path = cwd / ".monoco" / "workspace.yaml"
155
+ project_config_path = cwd / ".monoco" / "project.yaml"
156
+
157
+ # Strict Workspace Check
158
+ if require_project and not (cwd / ".monoco").exists():
159
+ raise FileNotFoundError(f"Monoco workspace not found in {cwd}. (No .monoco directory)")
77
160
 
78
161
  # 3. Load User Config
79
162
  if home_path.exists():
@@ -86,17 +169,35 @@ class MonocoConfig(BaseModel):
86
169
  # We don't want to crash on config load fail, implementing simple warning equivalent
87
170
  pass
88
171
 
89
- # 4. Load Project Config (prefer .monoco/config.yaml, fallback to monoco.yaml)
90
- target_proj_conf = proj_path_hidden if proj_path_hidden.exists() else (
91
- proj_path_root if proj_path_root.exists() else None
92
- )
172
+ # 4. Load Project/Workspace Config
173
+
174
+ # 4a. Load workspace.yaml (Global Environment)
175
+ if workspace_config_path.exists():
176
+ try:
177
+ with open(workspace_config_path, "r") as f:
178
+ ws_config = yaml.safe_load(f)
179
+ if ws_config:
180
+ # workspace.yaml contains core, paths, i18n, ui, telemetry, agent
181
+ cls._deep_merge(config_data, ws_config)
182
+ except Exception:
183
+ pass
93
184
 
94
- if target_proj_conf:
185
+ # 4b. Load project.yaml (Identity)
186
+ if project_config_path.exists():
95
187
  try:
96
- with open(target_proj_conf, "r") as f:
97
- proj_config = yaml.safe_load(f)
98
- if proj_config:
99
- cls._deep_merge(config_data, proj_config)
188
+ with open(project_config_path, "r") as f:
189
+ pj_config = yaml.safe_load(f)
190
+ if pj_config:
191
+ # project.yaml contains 'project' fields directly? or under 'project' key?
192
+ # Design decision: project.yaml should be clean, e.g. "name: foo".
193
+ # But to simplify merging, let's check if it has a 'project' key or is flat.
194
+ if "project" in pj_config:
195
+ cls._deep_merge(config_data, pj_config)
196
+ else:
197
+ # Assume flat structure mapping to 'project' section
198
+ if "project" not in config_data:
199
+ config_data["project"] = {}
200
+ cls._deep_merge(config_data["project"], pj_config)
100
201
  except Exception:
101
202
  pass
102
203
 
@@ -106,8 +207,102 @@ class MonocoConfig(BaseModel):
106
207
  # Global singleton
107
208
  _settings = None
108
209
 
109
- def get_config(project_root: Optional[str] = None) -> MonocoConfig:
210
+ def get_config(project_root: Optional[str] = None, require_project: bool = False) -> MonocoConfig:
110
211
  global _settings
111
- if _settings is None or project_root is not None:
112
- _settings = MonocoConfig.load(project_root)
212
+ # If explicit root provided, always reload.
213
+ # If require_project is True, we must reload to ensure validation happens (in case a previous loose load occurred).
214
+ if _settings is None or project_root is not None or require_project:
215
+ _settings = MonocoConfig.load(project_root, require_project=require_project)
113
216
  return _settings
217
+
218
+ class ConfigScope(str, Enum):
219
+ GLOBAL = "global"
220
+ PROJECT = "project"
221
+ WORKSPACE = "workspace"
222
+
223
+ def get_config_path(scope: ConfigScope, project_root: Optional[str] = None) -> Path:
224
+ """Get the path to the configuration file for a given scope."""
225
+ if scope == ConfigScope.GLOBAL:
226
+ return Path.home() / ".monoco" / "config.yaml"
227
+ elif scope == ConfigScope.WORKSPACE:
228
+ cwd = Path(project_root) if project_root else Path.cwd()
229
+ return cwd / ".monoco" / "workspace.yaml"
230
+ else:
231
+ # ConfigScope.PROJECT
232
+ cwd = Path(project_root) if project_root else Path.cwd()
233
+ return cwd / ".monoco" / "project.yaml"
234
+
235
+ def find_monoco_root(start_path: Optional[Path] = None) -> Path:
236
+ """
237
+ Find the Monoco Workspace root.
238
+ Strictly restricted to checking the current directory (or its parent if CWD is .monoco).
239
+ Recursive upward lookup is disabled per FIX-0009.
240
+ """
241
+ current = (start_path or Path.cwd()).resolve()
242
+
243
+ # Check if we are inside a .monoco folder
244
+ if current.name == ".monoco":
245
+ return current.parent
246
+
247
+ # Check if current directory has .monoco
248
+ if (current / ".monoco").exists():
249
+ return current
250
+
251
+ return current
252
+
253
+ def load_raw_config(scope: ConfigScope, project_root: Optional[str] = None) -> Dict[str, Any]:
254
+ """Load raw configuration dictionary from a specific scope."""
255
+ path = get_config_path(scope, project_root)
256
+ if not path.exists():
257
+ return {}
258
+ try:
259
+ with open(path, "r") as f:
260
+ return yaml.safe_load(f) or {}
261
+ except Exception as e:
262
+ logger.warning(f"Failed to load config from {path}: {e}")
263
+ return {}
264
+
265
+ def save_raw_config(scope: ConfigScope, data: Dict[str, Any], project_root: Optional[str] = None) -> None:
266
+ """Save raw configuration dictionary to a specific scope."""
267
+ path = get_config_path(scope, project_root)
268
+ path.parent.mkdir(parents=True, exist_ok=True)
269
+ with open(path, "w") as f:
270
+ yaml.dump(data, f, default_flow_style=False)
271
+
272
+ class ConfigEventHandler(FileSystemEventHandler):
273
+ def __init__(self, loop, on_change: Callable[[], Awaitable[None]], config_path: Path):
274
+ self.loop = loop
275
+ self.on_change = on_change
276
+ self.config_path = config_path
277
+
278
+ def on_modified(self, event):
279
+ if event.src_path == str(self.config_path):
280
+ asyncio.run_coroutine_threadsafe(self.on_change(), self.loop)
281
+
282
+ class ConfigMonitor:
283
+ """
284
+ Monitors a configuration file for changes.
285
+ """
286
+ def __init__(self, config_path: Path, on_change: Callable[[], Awaitable[None]]):
287
+ self.config_path = config_path
288
+ self.on_change = on_change
289
+ self.observer = Observer()
290
+
291
+ async def start(self):
292
+ loop = asyncio.get_running_loop()
293
+ event_handler = ConfigEventHandler(loop, self.on_change, self.config_path)
294
+
295
+ if not self.config_path.exists():
296
+ # Ensure parent exists at least
297
+ self.config_path.parent.mkdir(parents=True, exist_ok=True)
298
+
299
+ # We watch the parent directory for the specific file
300
+ self.observer.schedule(event_handler, str(self.config_path.parent), recursive=False)
301
+ self.observer.start()
302
+ logger.info(f"Config Monitor started for {self.config_path}")
303
+
304
+ def stop(self):
305
+ if self.observer.is_alive():
306
+ self.observer.stop()
307
+ self.observer.join()
308
+ logger.info(f"Config Monitor stopped for {self.config_path}")
@@ -0,0 +1,62 @@
1
+ import os
2
+ from pathlib import Path
3
+ from typing import List, Dict, Optional
4
+ from pydantic import BaseModel
5
+
6
+ class ExecutionProfile(BaseModel):
7
+ name: str
8
+ source: str # "Global" or "Project"
9
+ path: str
10
+ content: Optional[str] = None
11
+
12
+ def scan_execution_profiles(project_root: Optional[Path] = None) -> List[ExecutionProfile]:
13
+ """
14
+ Scan for execution profiles (SOPs) in global and project scopes.
15
+ """
16
+ profiles = []
17
+
18
+ # 1. Global Scope
19
+ global_path = Path.home() / ".monoco" / "execution"
20
+ if global_path.exists():
21
+ profiles.extend(_scan_dir(global_path, "Global"))
22
+
23
+ # 2. Project Scope
24
+ if project_root:
25
+ project_path = project_root / ".monoco" / "execution"
26
+ if project_path.exists():
27
+ profiles.extend(_scan_dir(project_path, "Project"))
28
+
29
+ return profiles
30
+
31
+ def _scan_dir(base_path: Path, source: str) -> List[ExecutionProfile]:
32
+ profiles = []
33
+ if not base_path.is_dir():
34
+ return profiles
35
+
36
+ for item in base_path.iterdir():
37
+ if item.is_dir():
38
+ sop_path = item / "SOP.md"
39
+ if sop_path.exists():
40
+ profiles.append(ExecutionProfile(
41
+ name=item.name,
42
+ source=source,
43
+ path=str(sop_path.absolute())
44
+ ))
45
+ return profiles
46
+
47
+ def get_profile_detail(profile_path: str) -> Optional[ExecutionProfile]:
48
+ path = Path(profile_path)
49
+ if not path.exists():
50
+ return None
51
+
52
+ # Determine source (rough heuristic)
53
+ source = "Project"
54
+ if str(path).startswith(str(Path.home() / ".monoco")):
55
+ source = "Global"
56
+
57
+ return ExecutionProfile(
58
+ name=path.parent.name,
59
+ source=source,
60
+ path=str(path.absolute()),
61
+ content=path.read_text(encoding='utf-8')
62
+ )
monoco/core/feature.py ADDED
@@ -0,0 +1,58 @@
1
+ from abc import ABC, abstractmethod
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from typing import Dict, List, Optional
5
+
6
+ @dataclass
7
+ class IntegrationData:
8
+ """
9
+ Data collection returned by a feature for integration into the Agent environment.
10
+ """
11
+ # System Prompts to be injected into agent configuration (e.g., .cursorrules)
12
+ # Key: Section Title (e.g., "Issue Management"), Value: Markdown Content
13
+ system_prompts: Dict[str, str] = field(default_factory=dict)
14
+
15
+ # Paths to skill directories or files to be copied/symlinked
16
+ # DEPRECATED: Skill distribution is cancelled. Only prompts are synced.
17
+ skills: List[Path] = field(default_factory=list)
18
+
19
+ class MonocoFeature(ABC):
20
+ """
21
+ Abstract base class for all Monoco features.
22
+ Features must implement this protocol to participate in init and sync lifecycles.
23
+ """
24
+
25
+ @property
26
+ @abstractmethod
27
+ def name(self) -> str:
28
+ """Unique identifier for the feature (e.g., 'issue', 'spike')."""
29
+ pass
30
+
31
+ @abstractmethod
32
+ def initialize(self, root: Path, config: Dict) -> None:
33
+ """
34
+ Lifecycle hook: Physical Structure Initialization.
35
+ Called during `monoco init`.
36
+ Responsible for creating necessary directories, files, and config templates.
37
+
38
+ Args:
39
+ root: The root directory of the project.
40
+ config: The full project configuration dictionary.
41
+ """
42
+ pass
43
+
44
+ @abstractmethod
45
+ def integrate(self, root: Path, config: Dict) -> IntegrationData:
46
+ """
47
+ Lifecycle hook: Agent Environment Integration.
48
+ Called during `monoco sync`.
49
+ Responsible for returning data (prompts, skills) needed for the Agent Setup.
50
+
51
+ Args:
52
+ root: The root directory of the project.
53
+ config: The full project configuration dictionary.
54
+
55
+ Returns:
56
+ IntegrationData object containing prompts and skills.
57
+ """
58
+ pass
monoco/core/git.py CHANGED
@@ -1,7 +1,10 @@
1
+ from typing import List, Tuple, Optional, Dict, Callable, Awaitable
2
+ import asyncio
3
+ import logging
1
4
  import subprocess
2
- import shutil
3
5
  from pathlib import Path
4
- from typing import List, Tuple, Optional, Dict
6
+
7
+ logger = logging.getLogger("monoco.core.git")
5
8
 
6
9
  def _run_git(args: List[str], cwd: Path) -> Tuple[int, str, str]:
7
10
  """Run a raw git command."""
@@ -182,3 +185,49 @@ def worktree_remove(path: Path, worktree_path: Path, force: bool = False):
182
185
  code, _, stderr = _run_git(cmd, path)
183
186
  if code != 0:
184
187
  raise RuntimeError(f"Failed to remove worktree: {stderr}")
188
+
189
+ class GitMonitor:
190
+ """
191
+ Polls the Git repository for HEAD changes and triggers updates.
192
+ """
193
+ def __init__(self, path: Path, on_head_change: Callable[[str], Awaitable[None]], poll_interval: float = 2.0):
194
+ self.path = path
195
+ self.on_head_change = on_head_change
196
+ self.poll_interval = poll_interval
197
+ self.last_head_hash: Optional[str] = None
198
+ self.is_running = False
199
+
200
+ async def get_head_hash(self) -> Optional[str]:
201
+ try:
202
+ process = await asyncio.create_subprocess_exec(
203
+ "git", "rev-parse", "HEAD",
204
+ cwd=self.path,
205
+ stdout=asyncio.subprocess.PIPE,
206
+ stderr=asyncio.subprocess.PIPE
207
+ )
208
+ stdout, _ = await process.communicate()
209
+ if process.returncode == 0:
210
+ return stdout.decode().strip()
211
+ return None
212
+ except Exception as e:
213
+ logger.error(f"Git polling error: {e}")
214
+ return None
215
+
216
+ async def start(self):
217
+ self.is_running = True
218
+ logger.info(f"Git Monitor started for {self.path}.")
219
+
220
+ self.last_head_hash = await self.get_head_hash()
221
+
222
+ while self.is_running:
223
+ await asyncio.sleep(self.poll_interval)
224
+ current_hash = await self.get_head_hash()
225
+
226
+ if current_hash and current_hash != self.last_head_hash:
227
+ logger.info(f"Git HEAD changed: {self.last_head_hash} -> {current_hash}")
228
+ self.last_head_hash = current_hash
229
+ await self.on_head_change(current_hash)
230
+
231
+ def stop(self):
232
+ self.is_running = False
233
+ logger.info(f"Git Monitor stopping for {self.path}...")
@@ -0,0 +1,196 @@
1
+ import re
2
+ from pathlib import Path
3
+ from typing import Dict, List, Optional
4
+
5
+ class PromptInjector:
6
+ """
7
+ Engine for injecting managed content into Markdown-like files (e.g., .cursorrules, GEMINI.md).
8
+ Maintains a 'Managed Block' defined by a specific header.
9
+ """
10
+
11
+ MANAGED_HEADER = "## Monoco Toolkit"
12
+
13
+ def __init__(self, target_file: Path):
14
+ self.target_file = target_file
15
+
16
+ def inject(self, prompts: Dict[str, str]) -> bool:
17
+ """
18
+ Injects the provided prompts into the target file.
19
+
20
+ Args:
21
+ prompts: A dictionary where key is the section title and value is the content.
22
+
23
+ Returns:
24
+ True if changes were written, False otherwise.
25
+ """
26
+ current_content = ""
27
+ if self.target_file.exists():
28
+ current_content = self.target_file.read_text(encoding="utf-8")
29
+
30
+ new_content = self._merge_content(current_content, prompts)
31
+
32
+ if new_content != current_content:
33
+ self.target_file.write_text(new_content, encoding="utf-8")
34
+ return True
35
+ return False
36
+
37
+ def _merge_content(self, original: str, prompts: Dict[str, str]) -> str:
38
+ """
39
+ Merges the generated prompts into the original content within the managed block.
40
+ """
41
+ # 1. Generate the new managed block content
42
+ managed_block = [self.MANAGED_HEADER, ""]
43
+ managed_block.append("> **Auto-Generated**: This section is managed by Monoco. Do not edit manually.\n")
44
+
45
+ for title, content in prompts.items():
46
+ managed_block.append(f"### {title}")
47
+ managed_block.append("") # Blank line after header
48
+
49
+ # Sanitize content: remove leading header if it matches the title
50
+ clean_content = content.strip()
51
+ # Regex to match optional leading hash header matching the title (case insensitive)
52
+ # e.g. "### Issue Management" or "# Issue Management"
53
+ pattern = r"^(#+\s*)" + re.escape(title) + r"\s*\n"
54
+ match = re.match(pattern, clean_content, re.IGNORECASE)
55
+
56
+ if match:
57
+ clean_content = clean_content[match.end():].strip()
58
+
59
+ managed_block.append(clean_content)
60
+ managed_block.append("") # Blank line after section
61
+
62
+ managed_block_str = "\n".join(managed_block).strip() + "\n"
63
+
64
+ # 2. Find and replace/append in the original content
65
+ lines = original.splitlines()
66
+ start_idx = -1
67
+ end_idx = -1
68
+
69
+ # Find start
70
+ for i, line in enumerate(lines):
71
+ if line.strip() == self.MANAGED_HEADER:
72
+ start_idx = i
73
+ break
74
+
75
+ if start_idx == -1:
76
+ # Block not found, append to end
77
+ if original and not original.endswith("\n"):
78
+ return original + "\n\n" + managed_block_str
79
+ elif original:
80
+ return original + "\n" + managed_block_str
81
+ else:
82
+ return managed_block_str
83
+
84
+ # Find end: Look for next header of level 1 (assuming Managed Header is H1)
85
+ # Or EOF
86
+ # Note: If MANAGED_HEADER is "# ...", we look for next "# ..."
87
+ # But allow "## ..." as children.
88
+
89
+ header_level_match = re.match(r"^(#+)\s", self.MANAGED_HEADER)
90
+ header_level_prefix = header_level_match.group(1) if header_level_match else "#"
91
+
92
+ for i in range(start_idx + 1, len(lines)):
93
+ line = lines[i]
94
+ # Check if this line is a header of the same level or higher (fewer #s)
95
+ # e.g. if Managed is "###", then "#" and "##" are higher/parents, "###" is sibling.
96
+ # We treat siblings as end of block too.
97
+ if line.startswith("#"):
98
+ # Match regex to get level
99
+ match = re.match(r"^(#+)\s", line)
100
+ if match:
101
+ level = match.group(1)
102
+ if len(level) <= len(header_level_prefix):
103
+ end_idx = i
104
+ break
105
+
106
+ if end_idx == -1:
107
+ end_idx = len(lines)
108
+
109
+ # 3. Construct result
110
+ pre_block = "\n".join(lines[:start_idx])
111
+ post_block = "\n".join(lines[end_idx:])
112
+
113
+ result = pre_block
114
+ if result:
115
+ result += "\n\n"
116
+
117
+ result += managed_block_str
118
+
119
+ if post_block:
120
+ # Ensure separation if post block exists and isn't just empty lines
121
+ if post_block.strip():
122
+ result += "\n" + post_block
123
+ else:
124
+ result += post_block # Keep trailing newlines if any, or normalize?
125
+
126
+ return result.strip() + "\n"
127
+
128
+ def remove(self) -> bool:
129
+ """
130
+ Removes the managed block from the target file.
131
+
132
+ Returns:
133
+ True if changes were written (block removed), False otherwise.
134
+ """
135
+ if not self.target_file.exists():
136
+ return False
137
+
138
+ current_content = self.target_file.read_text(encoding="utf-8")
139
+ lines = current_content.splitlines()
140
+
141
+ start_idx = -1
142
+ end_idx = -1
143
+
144
+ # Find start
145
+ for i, line in enumerate(lines):
146
+ if line.strip() == self.MANAGED_HEADER:
147
+ start_idx = i
148
+ break
149
+
150
+ if start_idx == -1:
151
+ return False
152
+
153
+ # Find end: exact logic as in _merge_content
154
+ header_level_match = re.match(r"^(#+)\s", self.MANAGED_HEADER)
155
+ header_level_prefix = header_level_match.group(1) if header_level_match else "#"
156
+
157
+ for i in range(start_idx + 1, len(lines)):
158
+ line = lines[i]
159
+ if line.startswith("#"):
160
+ match = re.match(r"^(#+)\s", line)
161
+ if match:
162
+ level = match.group(1)
163
+ if len(level) <= len(header_level_prefix):
164
+ end_idx = i
165
+ break
166
+
167
+ if end_idx == -1:
168
+ end_idx = len(lines)
169
+
170
+ # Reconstruct content without the block
171
+ # We also need to be careful about surrounding newlines to avoid leaving gaps
172
+
173
+ # Check lines before start_idx
174
+ while start_idx > 0 and not lines[start_idx-1].strip():
175
+ start_idx -= 1
176
+
177
+ # Check lines after end_idx (optional, but good for cleanup)
178
+ # Usually end_idx points to the next header or EOF.
179
+ # If it points to next header, we keep it.
180
+
181
+ pre_block = lines[:start_idx]
182
+ post_block = lines[end_idx:]
183
+
184
+ # If we removed everything, the file might become empty or just newlines
185
+
186
+ new_lines = pre_block + post_block
187
+ if not new_lines:
188
+ new_content = ""
189
+ else:
190
+ new_content = "\n".join(new_lines).strip() + "\n"
191
+
192
+ if new_content != current_content:
193
+ self.target_file.write_text(new_content, encoding="utf-8")
194
+ return True
195
+
196
+ return False