monoco-toolkit 0.1.1__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. monoco/cli/__init__.py +0 -0
  2. monoco/cli/project.py +87 -0
  3. monoco/cli/workspace.py +46 -0
  4. monoco/core/agent/__init__.py +5 -0
  5. monoco/core/agent/action.py +144 -0
  6. monoco/core/agent/adapters.py +106 -0
  7. monoco/core/agent/protocol.py +31 -0
  8. monoco/core/agent/state.py +106 -0
  9. monoco/core/config.py +152 -17
  10. monoco/core/execution.py +62 -0
  11. monoco/core/feature.py +58 -0
  12. monoco/core/git.py +51 -2
  13. monoco/core/injection.py +196 -0
  14. monoco/core/integrations.py +234 -0
  15. monoco/core/lsp.py +61 -0
  16. monoco/core/output.py +13 -2
  17. monoco/core/registry.py +36 -0
  18. monoco/core/resources/en/AGENTS.md +8 -0
  19. monoco/core/resources/en/SKILL.md +66 -0
  20. monoco/core/resources/zh/AGENTS.md +8 -0
  21. monoco/core/resources/zh/SKILL.md +66 -0
  22. monoco/core/setup.py +88 -110
  23. monoco/core/skills.py +444 -0
  24. monoco/core/state.py +53 -0
  25. monoco/core/sync.py +224 -0
  26. monoco/core/telemetry.py +4 -1
  27. monoco/core/workspace.py +85 -20
  28. monoco/daemon/app.py +127 -58
  29. monoco/daemon/models.py +4 -0
  30. monoco/daemon/services.py +56 -155
  31. monoco/features/agent/commands.py +166 -0
  32. monoco/features/agent/doctor.py +30 -0
  33. monoco/features/config/commands.py +125 -44
  34. monoco/features/i18n/adapter.py +29 -0
  35. monoco/features/i18n/commands.py +89 -10
  36. monoco/features/i18n/core.py +113 -27
  37. monoco/features/i18n/resources/en/AGENTS.md +8 -0
  38. monoco/features/i18n/resources/en/SKILL.md +94 -0
  39. monoco/features/i18n/resources/zh/AGENTS.md +8 -0
  40. monoco/features/i18n/resources/zh/SKILL.md +94 -0
  41. monoco/features/issue/adapter.py +34 -0
  42. monoco/features/issue/commands.py +183 -65
  43. monoco/features/issue/core.py +172 -77
  44. monoco/features/issue/linter.py +215 -116
  45. monoco/features/issue/migration.py +134 -0
  46. monoco/features/issue/models.py +23 -19
  47. monoco/features/issue/monitor.py +94 -0
  48. monoco/features/issue/resources/en/AGENTS.md +15 -0
  49. monoco/features/issue/resources/en/SKILL.md +87 -0
  50. monoco/features/issue/resources/zh/AGENTS.md +15 -0
  51. monoco/features/issue/resources/zh/SKILL.md +114 -0
  52. monoco/features/issue/validator.py +269 -0
  53. monoco/features/pty/core.py +185 -0
  54. monoco/features/pty/router.py +138 -0
  55. monoco/features/pty/server.py +56 -0
  56. monoco/features/spike/adapter.py +30 -0
  57. monoco/features/spike/commands.py +45 -24
  58. monoco/features/spike/core.py +4 -21
  59. monoco/features/spike/resources/en/AGENTS.md +7 -0
  60. monoco/features/spike/resources/en/SKILL.md +74 -0
  61. monoco/features/spike/resources/zh/AGENTS.md +7 -0
  62. monoco/features/spike/resources/zh/SKILL.md +74 -0
  63. monoco/main.py +115 -2
  64. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/METADATA +2 -2
  65. monoco_toolkit-0.2.5.dist-info/RECORD +77 -0
  66. monoco_toolkit-0.1.1.dist-info/RECORD +0 -33
  67. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/WHEEL +0 -0
  68. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/entry_points.txt +0 -0
  69. {monoco_toolkit-0.1.1.dist-info → monoco_toolkit-0.2.5.dist-info}/licenses/LICENSE +0 -0
monoco/core/config.py CHANGED
@@ -1,9 +1,19 @@
1
1
  import os
2
2
  import yaml
3
3
  from pathlib import Path
4
- from typing import Optional, Dict, Any
4
+ from typing import Optional, Dict, Any, Callable, Awaitable
5
+ from enum import Enum
5
6
  from pydantic import BaseModel, Field
6
7
 
8
+ import logging
9
+ import asyncio
10
+ from watchdog.observers import Observer
11
+ from watchdog.events import FileSystemEventHandler
12
+
13
+ from monoco.core.integrations import AgentIntegration
14
+
15
+ logger = logging.getLogger("monoco.core.config")
16
+
7
17
  class PathsConfig(BaseModel):
8
18
  """Configuration for directory paths."""
9
19
  root: str = Field(default=".", description="Project root directory")
@@ -13,7 +23,6 @@ class PathsConfig(BaseModel):
13
23
 
14
24
  class CoreConfig(BaseModel):
15
25
  """Core system configuration."""
16
- editor: str = Field(default_factory=lambda: os.getenv("EDITOR", "vim"), description="Preferred text editor")
17
26
  log_level: str = Field(default="INFO", description="Logging verbosity")
18
27
  author: Optional[str] = Field(default=None, description="Default author for new artifacts")
19
28
 
@@ -37,6 +46,16 @@ class TelemetryConfig(BaseModel):
37
46
  """Configuration for Telemetry."""
38
47
  enabled: Optional[bool] = Field(default=None, description="Whether telemetry is enabled")
39
48
 
49
+ class AgentConfig(BaseModel):
50
+ """Configuration for Agent Environment Integration."""
51
+ targets: Optional[list[str]] = Field(default=None, description="Specific target files to inject into (e.g. .cursorrules)")
52
+ framework: Optional[str] = Field(default=None, description="Manually specified agent framework (cursor, windsurf, etc.)")
53
+ includes: Optional[list[str]] = Field(default=None, description="List of specific features to include in injection")
54
+ integrations: Optional[Dict[str, "AgentIntegration"]] = Field(
55
+ default=None,
56
+ description="Custom agent framework integrations (overrides defaults from monoco.core.integrations)"
57
+ )
58
+
40
59
  class MonocoConfig(BaseModel):
41
60
  """
42
61
  Main Configuration Schema.
@@ -48,6 +67,7 @@ class MonocoConfig(BaseModel):
48
67
  i18n: I18nConfig = Field(default_factory=I18nConfig)
49
68
  ui: UIConfig = Field(default_factory=UIConfig)
50
69
  telemetry: TelemetryConfig = Field(default_factory=TelemetryConfig)
70
+ agent: AgentConfig = Field(default_factory=AgentConfig)
51
71
 
52
72
  @staticmethod
53
73
  def _deep_merge(base: Dict[str, Any], update: Dict[str, Any]) -> Dict[str, Any]:
@@ -60,9 +80,13 @@ class MonocoConfig(BaseModel):
60
80
  return base
61
81
 
62
82
  @classmethod
63
- def load(cls, project_root: Optional[str] = None) -> "MonocoConfig":
83
+ def load(cls, project_root: Optional[str] = None, require_project: bool = False) -> "MonocoConfig":
64
84
  """
65
85
  Load configuration from multiple sources.
86
+
87
+ Args:
88
+ project_root: Explicit root path. If None, uses CWD.
89
+ require_project: If True, raises error if .monoco directory is missing.
66
90
  """
67
91
  # 1. Start with empty dict (will use defaults via Pydantic)
68
92
  config_data = {}
@@ -72,8 +96,13 @@ class MonocoConfig(BaseModel):
72
96
 
73
97
  # Determine project path
74
98
  cwd = Path(project_root) if project_root else Path.cwd()
75
- proj_path_hidden = cwd / ".monoco" / "config.yaml"
76
- proj_path_root = cwd / "monoco.yaml"
99
+ # FIX-0009: strict separation of workspace and project config
100
+ workspace_config_path = cwd / ".monoco" / "workspace.yaml"
101
+ project_config_path = cwd / ".monoco" / "project.yaml"
102
+
103
+ # Strict Workspace Check
104
+ if require_project and not (cwd / ".monoco").exists():
105
+ raise FileNotFoundError(f"Monoco workspace not found in {cwd}. (No .monoco directory)")
77
106
 
78
107
  # 3. Load User Config
79
108
  if home_path.exists():
@@ -86,17 +115,35 @@ class MonocoConfig(BaseModel):
86
115
  # We don't want to crash on config load fail, implementing simple warning equivalent
87
116
  pass
88
117
 
89
- # 4. Load Project Config (prefer .monoco/config.yaml, fallback to monoco.yaml)
90
- target_proj_conf = proj_path_hidden if proj_path_hidden.exists() else (
91
- proj_path_root if proj_path_root.exists() else None
92
- )
118
+ # 4. Load Project/Workspace Config
119
+
120
+ # 4a. Load workspace.yaml (Global Environment)
121
+ if workspace_config_path.exists():
122
+ try:
123
+ with open(workspace_config_path, "r") as f:
124
+ ws_config = yaml.safe_load(f)
125
+ if ws_config:
126
+ # workspace.yaml contains core, paths, i18n, ui, telemetry, agent
127
+ cls._deep_merge(config_data, ws_config)
128
+ except Exception:
129
+ pass
93
130
 
94
- if target_proj_conf:
131
+ # 4b. Load project.yaml (Identity)
132
+ if project_config_path.exists():
95
133
  try:
96
- with open(target_proj_conf, "r") as f:
97
- proj_config = yaml.safe_load(f)
98
- if proj_config:
99
- cls._deep_merge(config_data, proj_config)
134
+ with open(project_config_path, "r") as f:
135
+ pj_config = yaml.safe_load(f)
136
+ if pj_config:
137
+ # project.yaml contains 'project' fields directly? or under 'project' key?
138
+ # Design decision: project.yaml should be clean, e.g. "name: foo".
139
+ # But to simplify merging, let's check if it has a 'project' key or is flat.
140
+ if "project" in pj_config:
141
+ cls._deep_merge(config_data, pj_config)
142
+ else:
143
+ # Assume flat structure mapping to 'project' section
144
+ if "project" not in config_data:
145
+ config_data["project"] = {}
146
+ cls._deep_merge(config_data["project"], pj_config)
100
147
  except Exception:
101
148
  pass
102
149
 
@@ -106,8 +153,96 @@ class MonocoConfig(BaseModel):
106
153
  # Global singleton
107
154
  _settings = None
108
155
 
109
- def get_config(project_root: Optional[str] = None) -> MonocoConfig:
156
+ def get_config(project_root: Optional[str] = None, require_project: bool = False) -> MonocoConfig:
110
157
  global _settings
111
- if _settings is None or project_root is not None:
112
- _settings = MonocoConfig.load(project_root)
158
+ # If explicit root provided, always reload.
159
+ # If require_project is True, we must reload to ensure validation happens (in case a previous loose load occurred).
160
+ if _settings is None or project_root is not None or require_project:
161
+ _settings = MonocoConfig.load(project_root, require_project=require_project)
113
162
  return _settings
163
+
164
+ class ConfigScope(str, Enum):
165
+ GLOBAL = "global"
166
+ PROJECT = "project"
167
+ WORKSPACE = "workspace"
168
+
169
+ def get_config_path(scope: ConfigScope, project_root: Optional[str] = None) -> Path:
170
+ """Get the path to the configuration file for a given scope."""
171
+ if scope == ConfigScope.GLOBAL:
172
+ return Path.home() / ".monoco" / "config.yaml"
173
+ elif scope == ConfigScope.WORKSPACE:
174
+ cwd = Path(project_root) if project_root else Path.cwd()
175
+ return cwd / ".monoco" / "workspace.yaml"
176
+ else:
177
+ # ConfigScope.PROJECT
178
+ cwd = Path(project_root) if project_root else Path.cwd()
179
+ return cwd / ".monoco" / "project.yaml"
180
+
181
+ def find_monoco_root(start_path: Optional[Path] = None) -> Path:
182
+ """Recursively find the .monoco directory upwards."""
183
+ current = (start_path or Path.cwd()).resolve()
184
+ # Check if we are inside a .monoco folder (unlikely but possible)
185
+ if current.name == ".monoco":
186
+ return current.parent
187
+
188
+ for parent in [current] + list(current.parents):
189
+ if (parent / ".monoco").exists():
190
+ return parent
191
+ return current
192
+
193
+ def load_raw_config(scope: ConfigScope, project_root: Optional[str] = None) -> Dict[str, Any]:
194
+ """Load raw configuration dictionary from a specific scope."""
195
+ path = get_config_path(scope, project_root)
196
+ if not path.exists():
197
+ return {}
198
+ try:
199
+ with open(path, "r") as f:
200
+ return yaml.safe_load(f) or {}
201
+ except Exception as e:
202
+ logger.warning(f"Failed to load config from {path}: {e}")
203
+ return {}
204
+
205
+ def save_raw_config(scope: ConfigScope, data: Dict[str, Any], project_root: Optional[str] = None) -> None:
206
+ """Save raw configuration dictionary to a specific scope."""
207
+ path = get_config_path(scope, project_root)
208
+ path.parent.mkdir(parents=True, exist_ok=True)
209
+ with open(path, "w") as f:
210
+ yaml.dump(data, f, default_flow_style=False)
211
+
212
+ class ConfigEventHandler(FileSystemEventHandler):
213
+ def __init__(self, loop, on_change: Callable[[], Awaitable[None]], config_path: Path):
214
+ self.loop = loop
215
+ self.on_change = on_change
216
+ self.config_path = config_path
217
+
218
+ def on_modified(self, event):
219
+ if event.src_path == str(self.config_path):
220
+ asyncio.run_coroutine_threadsafe(self.on_change(), self.loop)
221
+
222
+ class ConfigMonitor:
223
+ """
224
+ Monitors a configuration file for changes.
225
+ """
226
+ def __init__(self, config_path: Path, on_change: Callable[[], Awaitable[None]]):
227
+ self.config_path = config_path
228
+ self.on_change = on_change
229
+ self.observer = Observer()
230
+
231
+ async def start(self):
232
+ loop = asyncio.get_running_loop()
233
+ event_handler = ConfigEventHandler(loop, self.on_change, self.config_path)
234
+
235
+ if not self.config_path.exists():
236
+ # Ensure parent exists at least
237
+ self.config_path.parent.mkdir(parents=True, exist_ok=True)
238
+
239
+ # We watch the parent directory for the specific file
240
+ self.observer.schedule(event_handler, str(self.config_path.parent), recursive=False)
241
+ self.observer.start()
242
+ logger.info(f"Config Monitor started for {self.config_path}")
243
+
244
+ def stop(self):
245
+ if self.observer.is_alive():
246
+ self.observer.stop()
247
+ self.observer.join()
248
+ logger.info(f"Config Monitor stopped for {self.config_path}")
@@ -0,0 +1,62 @@
1
+ import os
2
+ from pathlib import Path
3
+ from typing import List, Dict, Optional
4
+ from pydantic import BaseModel
5
+
6
+ class ExecutionProfile(BaseModel):
7
+ name: str
8
+ source: str # "Global" or "Project"
9
+ path: str
10
+ content: Optional[str] = None
11
+
12
+ def scan_execution_profiles(project_root: Optional[Path] = None) -> List[ExecutionProfile]:
13
+ """
14
+ Scan for execution profiles (SOPs) in global and project scopes.
15
+ """
16
+ profiles = []
17
+
18
+ # 1. Global Scope
19
+ global_path = Path.home() / ".monoco" / "execution"
20
+ if global_path.exists():
21
+ profiles.extend(_scan_dir(global_path, "Global"))
22
+
23
+ # 2. Project Scope
24
+ if project_root:
25
+ project_path = project_root / ".monoco" / "execution"
26
+ if project_path.exists():
27
+ profiles.extend(_scan_dir(project_path, "Project"))
28
+
29
+ return profiles
30
+
31
+ def _scan_dir(base_path: Path, source: str) -> List[ExecutionProfile]:
32
+ profiles = []
33
+ if not base_path.is_dir():
34
+ return profiles
35
+
36
+ for item in base_path.iterdir():
37
+ if item.is_dir():
38
+ sop_path = item / "SOP.md"
39
+ if sop_path.exists():
40
+ profiles.append(ExecutionProfile(
41
+ name=item.name,
42
+ source=source,
43
+ path=str(sop_path.absolute())
44
+ ))
45
+ return profiles
46
+
47
+ def get_profile_detail(profile_path: str) -> Optional[ExecutionProfile]:
48
+ path = Path(profile_path)
49
+ if not path.exists():
50
+ return None
51
+
52
+ # Determine source (rough heuristic)
53
+ source = "Project"
54
+ if str(path).startswith(str(Path.home() / ".monoco")):
55
+ source = "Global"
56
+
57
+ return ExecutionProfile(
58
+ name=path.parent.name,
59
+ source=source,
60
+ path=str(path.absolute()),
61
+ content=path.read_text(encoding='utf-8')
62
+ )
monoco/core/feature.py ADDED
@@ -0,0 +1,58 @@
1
+ from abc import ABC, abstractmethod
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from typing import Dict, List, Optional
5
+
6
+ @dataclass
7
+ class IntegrationData:
8
+ """
9
+ Data collection returned by a feature for integration into the Agent environment.
10
+ """
11
+ # System Prompts to be injected into agent configuration (e.g., .cursorrules)
12
+ # Key: Section Title (e.g., "Issue Management"), Value: Markdown Content
13
+ system_prompts: Dict[str, str] = field(default_factory=dict)
14
+
15
+ # Paths to skill directories or files to be copied/symlinked
16
+ # DEPRECATED: Skill distribution is cancelled. Only prompts are synced.
17
+ skills: List[Path] = field(default_factory=list)
18
+
19
+ class MonocoFeature(ABC):
20
+ """
21
+ Abstract base class for all Monoco features.
22
+ Features must implement this protocol to participate in init and sync lifecycles.
23
+ """
24
+
25
+ @property
26
+ @abstractmethod
27
+ def name(self) -> str:
28
+ """Unique identifier for the feature (e.g., 'issue', 'spike')."""
29
+ pass
30
+
31
+ @abstractmethod
32
+ def initialize(self, root: Path, config: Dict) -> None:
33
+ """
34
+ Lifecycle hook: Physical Structure Initialization.
35
+ Called during `monoco init`.
36
+ Responsible for creating necessary directories, files, and config templates.
37
+
38
+ Args:
39
+ root: The root directory of the project.
40
+ config: The full project configuration dictionary.
41
+ """
42
+ pass
43
+
44
+ @abstractmethod
45
+ def integrate(self, root: Path, config: Dict) -> IntegrationData:
46
+ """
47
+ Lifecycle hook: Agent Environment Integration.
48
+ Called during `monoco sync`.
49
+ Responsible for returning data (prompts, skills) needed for the Agent Setup.
50
+
51
+ Args:
52
+ root: The root directory of the project.
53
+ config: The full project configuration dictionary.
54
+
55
+ Returns:
56
+ IntegrationData object containing prompts and skills.
57
+ """
58
+ pass
monoco/core/git.py CHANGED
@@ -1,7 +1,10 @@
1
+ from typing import List, Tuple, Optional, Dict, Callable, Awaitable
2
+ import asyncio
3
+ import logging
1
4
  import subprocess
2
- import shutil
3
5
  from pathlib import Path
4
- from typing import List, Tuple, Optional, Dict
6
+
7
+ logger = logging.getLogger("monoco.core.git")
5
8
 
6
9
  def _run_git(args: List[str], cwd: Path) -> Tuple[int, str, str]:
7
10
  """Run a raw git command."""
@@ -182,3 +185,49 @@ def worktree_remove(path: Path, worktree_path: Path, force: bool = False):
182
185
  code, _, stderr = _run_git(cmd, path)
183
186
  if code != 0:
184
187
  raise RuntimeError(f"Failed to remove worktree: {stderr}")
188
+
189
+ class GitMonitor:
190
+ """
191
+ Polls the Git repository for HEAD changes and triggers updates.
192
+ """
193
+ def __init__(self, path: Path, on_head_change: Callable[[str], Awaitable[None]], poll_interval: float = 2.0):
194
+ self.path = path
195
+ self.on_head_change = on_head_change
196
+ self.poll_interval = poll_interval
197
+ self.last_head_hash: Optional[str] = None
198
+ self.is_running = False
199
+
200
+ async def get_head_hash(self) -> Optional[str]:
201
+ try:
202
+ process = await asyncio.create_subprocess_exec(
203
+ "git", "rev-parse", "HEAD",
204
+ cwd=self.path,
205
+ stdout=asyncio.subprocess.PIPE,
206
+ stderr=asyncio.subprocess.PIPE
207
+ )
208
+ stdout, _ = await process.communicate()
209
+ if process.returncode == 0:
210
+ return stdout.decode().strip()
211
+ return None
212
+ except Exception as e:
213
+ logger.error(f"Git polling error: {e}")
214
+ return None
215
+
216
+ async def start(self):
217
+ self.is_running = True
218
+ logger.info(f"Git Monitor started for {self.path}.")
219
+
220
+ self.last_head_hash = await self.get_head_hash()
221
+
222
+ while self.is_running:
223
+ await asyncio.sleep(self.poll_interval)
224
+ current_hash = await self.get_head_hash()
225
+
226
+ if current_hash and current_hash != self.last_head_hash:
227
+ logger.info(f"Git HEAD changed: {self.last_head_hash} -> {current_hash}")
228
+ self.last_head_hash = current_hash
229
+ await self.on_head_change(current_hash)
230
+
231
+ def stop(self):
232
+ self.is_running = False
233
+ logger.info(f"Git Monitor stopping for {self.path}...")
@@ -0,0 +1,196 @@
1
+ import re
2
+ from pathlib import Path
3
+ from typing import Dict, List, Optional
4
+
5
+ class PromptInjector:
6
+ """
7
+ Engine for injecting managed content into Markdown-like files (e.g., .cursorrules, GEMINI.md).
8
+ Maintains a 'Managed Block' defined by a specific header.
9
+ """
10
+
11
+ MANAGED_HEADER = "## Monoco Toolkit"
12
+
13
+ def __init__(self, target_file: Path):
14
+ self.target_file = target_file
15
+
16
+ def inject(self, prompts: Dict[str, str]) -> bool:
17
+ """
18
+ Injects the provided prompts into the target file.
19
+
20
+ Args:
21
+ prompts: A dictionary where key is the section title and value is the content.
22
+
23
+ Returns:
24
+ True if changes were written, False otherwise.
25
+ """
26
+ current_content = ""
27
+ if self.target_file.exists():
28
+ current_content = self.target_file.read_text(encoding="utf-8")
29
+
30
+ new_content = self._merge_content(current_content, prompts)
31
+
32
+ if new_content != current_content:
33
+ self.target_file.write_text(new_content, encoding="utf-8")
34
+ return True
35
+ return False
36
+
37
+ def _merge_content(self, original: str, prompts: Dict[str, str]) -> str:
38
+ """
39
+ Merges the generated prompts into the original content within the managed block.
40
+ """
41
+ # 1. Generate the new managed block content
42
+ managed_block = [self.MANAGED_HEADER, ""]
43
+ managed_block.append("> **Auto-Generated**: This section is managed by Monoco. Do not edit manually.\n")
44
+
45
+ for title, content in prompts.items():
46
+ managed_block.append(f"### {title}")
47
+ managed_block.append("") # Blank line after header
48
+
49
+ # Sanitize content: remove leading header if it matches the title
50
+ clean_content = content.strip()
51
+ # Regex to match optional leading hash header matching the title (case insensitive)
52
+ # e.g. "### Issue Management" or "# Issue Management"
53
+ pattern = r"^(#+\s*)" + re.escape(title) + r"\s*\n"
54
+ match = re.match(pattern, clean_content, re.IGNORECASE)
55
+
56
+ if match:
57
+ clean_content = clean_content[match.end():].strip()
58
+
59
+ managed_block.append(clean_content)
60
+ managed_block.append("") # Blank line after section
61
+
62
+ managed_block_str = "\n".join(managed_block).strip() + "\n"
63
+
64
+ # 2. Find and replace/append in the original content
65
+ lines = original.splitlines()
66
+ start_idx = -1
67
+ end_idx = -1
68
+
69
+ # Find start
70
+ for i, line in enumerate(lines):
71
+ if line.strip() == self.MANAGED_HEADER:
72
+ start_idx = i
73
+ break
74
+
75
+ if start_idx == -1:
76
+ # Block not found, append to end
77
+ if original and not original.endswith("\n"):
78
+ return original + "\n\n" + managed_block_str
79
+ elif original:
80
+ return original + "\n" + managed_block_str
81
+ else:
82
+ return managed_block_str
83
+
84
+ # Find end: Look for next header of level 1 (assuming Managed Header is H1)
85
+ # Or EOF
86
+ # Note: If MANAGED_HEADER is "# ...", we look for next "# ..."
87
+ # But allow "## ..." as children.
88
+
89
+ header_level_match = re.match(r"^(#+)\s", self.MANAGED_HEADER)
90
+ header_level_prefix = header_level_match.group(1) if header_level_match else "#"
91
+
92
+ for i in range(start_idx + 1, len(lines)):
93
+ line = lines[i]
94
+ # Check if this line is a header of the same level or higher (fewer #s)
95
+ # e.g. if Managed is "###", then "#" and "##" are higher/parents, "###" is sibling.
96
+ # We treat siblings as end of block too.
97
+ if line.startswith("#"):
98
+ # Match regex to get level
99
+ match = re.match(r"^(#+)\s", line)
100
+ if match:
101
+ level = match.group(1)
102
+ if len(level) <= len(header_level_prefix):
103
+ end_idx = i
104
+ break
105
+
106
+ if end_idx == -1:
107
+ end_idx = len(lines)
108
+
109
+ # 3. Construct result
110
+ pre_block = "\n".join(lines[:start_idx])
111
+ post_block = "\n".join(lines[end_idx:])
112
+
113
+ result = pre_block
114
+ if result:
115
+ result += "\n\n"
116
+
117
+ result += managed_block_str
118
+
119
+ if post_block:
120
+ # Ensure separation if post block exists and isn't just empty lines
121
+ if post_block.strip():
122
+ result += "\n" + post_block
123
+ else:
124
+ result += post_block # Keep trailing newlines if any, or normalize?
125
+
126
+ return result.strip() + "\n"
127
+
128
+ def remove(self) -> bool:
129
+ """
130
+ Removes the managed block from the target file.
131
+
132
+ Returns:
133
+ True if changes were written (block removed), False otherwise.
134
+ """
135
+ if not self.target_file.exists():
136
+ return False
137
+
138
+ current_content = self.target_file.read_text(encoding="utf-8")
139
+ lines = current_content.splitlines()
140
+
141
+ start_idx = -1
142
+ end_idx = -1
143
+
144
+ # Find start
145
+ for i, line in enumerate(lines):
146
+ if line.strip() == self.MANAGED_HEADER:
147
+ start_idx = i
148
+ break
149
+
150
+ if start_idx == -1:
151
+ return False
152
+
153
+ # Find end: exact logic as in _merge_content
154
+ header_level_match = re.match(r"^(#+)\s", self.MANAGED_HEADER)
155
+ header_level_prefix = header_level_match.group(1) if header_level_match else "#"
156
+
157
+ for i in range(start_idx + 1, len(lines)):
158
+ line = lines[i]
159
+ if line.startswith("#"):
160
+ match = re.match(r"^(#+)\s", line)
161
+ if match:
162
+ level = match.group(1)
163
+ if len(level) <= len(header_level_prefix):
164
+ end_idx = i
165
+ break
166
+
167
+ if end_idx == -1:
168
+ end_idx = len(lines)
169
+
170
+ # Reconstruct content without the block
171
+ # We also need to be careful about surrounding newlines to avoid leaving gaps
172
+
173
+ # Check lines before start_idx
174
+ while start_idx > 0 and not lines[start_idx-1].strip():
175
+ start_idx -= 1
176
+
177
+ # Check lines after end_idx (optional, but good for cleanup)
178
+ # Usually end_idx points to the next header or EOF.
179
+ # If it points to next header, we keep it.
180
+
181
+ pre_block = lines[:start_idx]
182
+ post_block = lines[end_idx:]
183
+
184
+ # If we removed everything, the file might become empty or just newlines
185
+
186
+ new_lines = pre_block + post_block
187
+ if not new_lines:
188
+ new_content = ""
189
+ else:
190
+ new_content = "\n".join(new_lines).strip() + "\n"
191
+
192
+ if new_content != current_content:
193
+ self.target_file.write_text(new_content, encoding="utf-8")
194
+ return True
195
+
196
+ return False