monoco-toolkit 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,172 @@
1
+ from typing import List, Optional, Tuple, Set
2
+ from pathlib import Path
3
+ from rich.console import Console
4
+ from rich.table import Table
5
+ import typer
6
+
7
+ from . import core
8
+ from .models import IssueStatus, IssueStage
9
+
10
+ console = Console()
11
+
12
+
13
+ def validate_issue(path: Path, meta: core.IssueMetadata, all_issue_ids: Set[str] = set(), issues_root: Optional[Path] = None) -> List[str]:
14
+ """
15
+ Validate a single issue's integrity.
16
+ """
17
+ errors = []
18
+
19
+ # A. Directory/Status Consistency
20
+ expected_status = meta.status.value
21
+ path_parts = path.parts
22
+ # We might be validating a temp file, so we skip path check if it's not in the tree?
23
+ # Or strict check? For "Safe Edit", the file might be in a temp dir.
24
+ # So we probably only care about content/metadata integrity.
25
+
26
+ # But wait, if we overwrite the file, it MUST be valid.
27
+ # Let's assume the validation is about the content itself (metadata logic).
28
+
29
+ # B. Solution Compliance
30
+ if meta.status == IssueStatus.CLOSED and not meta.solution:
31
+ errors.append(f"[red]Solution Missing:[/red] {meta.id} is closed but has no [dim]solution[/dim] field.")
32
+
33
+ # C. Link Integrity
34
+ if meta.parent:
35
+ if all_issue_ids and meta.parent not in all_issue_ids:
36
+ # Check workspace (fallback)
37
+ found = False
38
+ if issues_root:
39
+ if core.find_issue_path(issues_root, meta.parent):
40
+ found = True
41
+
42
+ if not found:
43
+ errors.append(f"[red]Broken Link:[/red] {meta.id} refers to non-existent parent [bold]{meta.parent}[/bold].")
44
+
45
+ # D. Lifecycle Guard (Backlog)
46
+ if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
47
+ errors.append(f"[red]Lifecycle Error:[/red] {meta.id} is backlog but stage is not [bold]freezed[/bold] (found: {meta.stage}).")
48
+
49
+ return errors
50
+
51
+ def check_integrity(issues_root: Path, recursive: bool = False) -> List[str]:
52
+ """
53
+ Verify the integrity of the Issues directory.
54
+ Returns a list of error messages.
55
+
56
+ If recursive=True, performs workspace-level validation including:
57
+ - Cross-project ID collision detection
58
+ - Cross-project UID collision detection
59
+ """
60
+ errors = []
61
+ all_issue_ids = set() # For parent reference validation (includes namespaced IDs)
62
+ id_to_projects = {} # local_id -> [(project_name, meta, file)]
63
+ all_uids = {} # uid -> (project, issue_id)
64
+ all_issues = []
65
+
66
+ # Helper to collect issues from a project
67
+ def collect_project_issues(project_issues_root: Path, project_name: str = "local"):
68
+ project_issues = []
69
+ for subdir in ["Epics", "Features", "Chores", "Fixes"]:
70
+ d = project_issues_root / subdir
71
+ if d.exists():
72
+ files = []
73
+ for status in ["open", "closed", "backlog"]:
74
+ status_dir = d / status
75
+ if status_dir.exists():
76
+ files.extend(status_dir.rglob("*.md"))
77
+
78
+ for f in files:
79
+ meta = core.parse_issue(f)
80
+ if meta:
81
+ local_id = meta.id
82
+ full_id = f"{project_name}::{local_id}" if project_name != "local" else local_id
83
+
84
+ # Track ID occurrences per project
85
+ if local_id not in id_to_projects:
86
+ id_to_projects[local_id] = []
87
+ id_to_projects[local_id].append((project_name, meta, f))
88
+
89
+ # Add IDs for reference validation
90
+ all_issue_ids.add(local_id) # Local ID
91
+ if project_name != "local":
92
+ all_issue_ids.add(full_id) # Namespaced ID
93
+
94
+ # Check UID collision (if UID exists)
95
+ if meta.uid:
96
+ if meta.uid in all_uids:
97
+ existing_project, existing_id = all_uids[meta.uid]
98
+ errors.append(
99
+ f"[red]UID Collision:[/red] UID {meta.uid} is duplicated.\n"
100
+ f" - {existing_project}::{existing_id}\n"
101
+ f" - {project_name}::{local_id}"
102
+ )
103
+ else:
104
+ all_uids[meta.uid] = (project_name, local_id)
105
+
106
+ project_issues.append((f, meta, project_name))
107
+ return project_issues
108
+
109
+ # 1. Collect local issues
110
+ all_issues.extend(collect_project_issues(issues_root, "local"))
111
+
112
+ # 2. If recursive, collect workspace member issues
113
+ if recursive:
114
+ try:
115
+ from monoco.core.config import get_config
116
+ project_root = issues_root.parent
117
+ conf = get_config(str(project_root))
118
+
119
+ for member_name, rel_path in conf.project.members.items():
120
+ member_root = (project_root / rel_path).resolve()
121
+ member_issues_dir = member_root / "Issues"
122
+
123
+ if member_issues_dir.exists():
124
+ all_issues.extend(collect_project_issues(member_issues_dir, member_name))
125
+ except Exception as e:
126
+ # Fail gracefully if workspace config is missing
127
+ pass
128
+
129
+ # 3. Check for ID collisions within same project
130
+ for local_id, occurrences in id_to_projects.items():
131
+ # Group by project
132
+ projects_with_id = {}
133
+ for project_name, meta, f in occurrences:
134
+ if project_name not in projects_with_id:
135
+ projects_with_id[project_name] = []
136
+ projects_with_id[project_name].append((meta, f))
137
+
138
+ # Check for duplicates within same project
139
+ for project_name, metas in projects_with_id.items():
140
+ if len(metas) > 1:
141
+ # Same ID appears multiple times in same project - this is an error
142
+ error_msg = f"[red]ID Collision:[/red] {local_id} appears {len(metas)} times in project '{project_name}':\n"
143
+ for idx, (meta, f) in enumerate(metas, 1):
144
+ error_msg += f" {idx}. uid: {meta.uid or 'N/A'} | created: {meta.created_at} | stage: {meta.stage} | status: {meta.status.value}\n"
145
+ error_msg += f" [yellow]→ Action:[/yellow] Remove duplicate or use 'monoco issue move --to <target> --renumber' to resolve."
146
+ errors.append(error_msg)
147
+
148
+ # 4. Validation
149
+ for path, meta, project_name in all_issues:
150
+ # A. Directory/Status Consistency (Only check this for files in the tree)
151
+ expected_status = meta.status.value
152
+ path_parts = path.parts
153
+ if expected_status not in path_parts:
154
+ errors.append(f"[yellow]Placement Error:[/yellow] {meta.id} has status [cyan]{expected_status}[/cyan] but is not under a [dim]{expected_status}/[/dim] directory.")
155
+
156
+ # Reuse common logic
157
+ errors.extend(validate_issue(path, meta, all_issue_ids, issues_root))
158
+
159
+ return errors
160
+
161
+
162
+ def run_lint(issues_root: Path, recursive: bool = False):
163
+ errors = check_integrity(issues_root, recursive)
164
+
165
+ if not errors:
166
+ console.print("[green]✔[/green] Issue integrity check passed. No integrity errors found.")
167
+ else:
168
+ table = Table(title="Issue Integrity Issues", show_header=False, border_style="red")
169
+ for err in errors:
170
+ table.add_row(err)
171
+ console.print(table)
172
+ raise typer.Exit(code=1)
@@ -0,0 +1,154 @@
1
+ from enum import Enum
2
+ from typing import List, Optional, Any
3
+ from pydantic import BaseModel, Field, model_validator
4
+ from datetime import datetime
5
+ import hashlib
6
+ import secrets
7
+
8
+
9
+ class IssueID:
10
+ """
11
+ Helper for parsing Issue IDs that might be namespaced (e.g. 'toolkit::FEAT-0001').
12
+ """
13
+ def __init__(self, raw: str):
14
+ self.raw = raw
15
+ if "::" in raw:
16
+ self.namespace, self.local_id = raw.split("::", 1)
17
+ else:
18
+ self.namespace = None
19
+ self.local_id = raw
20
+
21
+ def __str__(self):
22
+ if self.namespace:
23
+ return f"{self.namespace}::{self.local_id}"
24
+ return self.local_id
25
+
26
+ def __repr__(self):
27
+ return f"IssueID({self.raw})"
28
+
29
+ @property
30
+ def is_local(self) -> bool:
31
+ return self.namespace is None
32
+
33
+ def matches(self, other_id: str) -> bool:
34
+ """Check if this ID matches another ID string."""
35
+ return str(self) == other_id or (self.is_local and self.local_id == other_id)
36
+
37
+ def current_time() -> datetime:
38
+ return datetime.now().replace(microsecond=0)
39
+
40
+ def generate_uid() -> str:
41
+ """
42
+ Generate a globally unique 6-character short hash for issue identity.
43
+ Uses timestamp + random bytes to ensure uniqueness across projects.
44
+ """
45
+ timestamp = str(datetime.now().timestamp()).encode()
46
+ random_bytes = secrets.token_bytes(8)
47
+ combined = timestamp + random_bytes
48
+ hash_digest = hashlib.sha256(combined).hexdigest()
49
+ return hash_digest[:6]
50
+
51
+
52
+ class IssueType(str, Enum):
53
+ EPIC = "epic"
54
+ FEATURE = "feature"
55
+ CHORE = "chore"
56
+ FIX = "fix"
57
+
58
+ class IssueStatus(str, Enum):
59
+ OPEN = "open"
60
+ CLOSED = "closed"
61
+ BACKLOG = "backlog"
62
+
63
+ class IssueStage(str, Enum):
64
+ TODO = "todo"
65
+ DOING = "doing"
66
+ REVIEW = "review"
67
+ DONE = "done"
68
+ FREEZED = "freezed"
69
+
70
+ class IssueSolution(str, Enum):
71
+ IMPLEMENTED = "implemented"
72
+ CANCELLED = "cancelled"
73
+ WONTFIX = "wontfix"
74
+ DUPLICATE = "duplicate"
75
+
76
+ class IsolationType(str, Enum):
77
+ BRANCH = "branch"
78
+ WORKTREE = "worktree"
79
+
80
+ class IssueIsolation(BaseModel):
81
+ type: IsolationType
82
+ ref: str # Git branch name
83
+ path: Optional[str] = None # Worktree path (relative to repo root or absolute)
84
+ created_at: datetime = Field(default_factory=current_time)
85
+
86
+ class IssueMetadata(BaseModel):
87
+ model_config = {"extra": "allow"}
88
+
89
+ id: str
90
+ uid: Optional[str] = None # Global unique identifier for cross-project identity
91
+ type: IssueType
92
+ status: IssueStatus = IssueStatus.OPEN
93
+ stage: Optional[IssueStage] = None
94
+ title: str
95
+
96
+ # Time Anchors
97
+ created_at: datetime = Field(default_factory=current_time)
98
+ opened_at: Optional[datetime] = None
99
+ updated_at: datetime = Field(default_factory=current_time)
100
+ closed_at: Optional[datetime] = None
101
+
102
+ parent: Optional[str] = None
103
+ sprint: Optional[str] = None
104
+ solution: Optional[IssueSolution] = None
105
+ isolation: Optional[IssueIsolation] = None
106
+ dependencies: List[str] = []
107
+ related: List[str] = []
108
+ tags: List[str] = []
109
+
110
+
111
+ @model_validator(mode='before')
112
+ @classmethod
113
+ def normalize_fields(cls, v: Any) -> Any:
114
+ if isinstance(v, dict):
115
+ # Normalize type and status to lowercase for compatibility
116
+ if "type" in v and isinstance(v["type"], str):
117
+ v["type"] = v["type"].lower()
118
+ if "status" in v and isinstance(v["status"], str):
119
+ v["status"] = v["status"].lower()
120
+ if "solution" in v and isinstance(v["solution"], str):
121
+ v["solution"] = v["solution"].lower()
122
+ # Stage normalization
123
+ if "stage" in v and isinstance(v["stage"], str):
124
+ v["stage"] = v["stage"].lower()
125
+ return v
126
+
127
+ @model_validator(mode='after')
128
+ def validate_lifecycle(self) -> 'IssueMetadata':
129
+ # Logic Definition:
130
+ # status: backlog -> stage: null
131
+ # status: closed -> stage: done
132
+ # status: open -> stage: todo | doing | review (default todo)
133
+
134
+ if self.status == IssueStatus.BACKLOG:
135
+ self.stage = IssueStage.FREEZED
136
+
137
+ elif self.status == IssueStatus.CLOSED:
138
+ # Enforce stage=done for closed issues
139
+ if self.stage != IssueStage.DONE:
140
+ self.stage = IssueStage.DONE
141
+ # Auto-fill closed_at if missing
142
+ if not self.closed_at:
143
+ self.closed_at = current_time()
144
+
145
+ elif self.status == IssueStatus.OPEN:
146
+ # Ensure valid stage for open status
147
+ if self.stage is None or self.stage == IssueStage.DONE:
148
+ self.stage = IssueStage.TODO
149
+
150
+ return self
151
+
152
+ class IssueDetail(IssueMetadata):
153
+ body: str = ""
154
+ raw_content: Optional[str] = None # Full file content including frontmatter for editing
@@ -0,0 +1 @@
1
+ from .core import init
@@ -0,0 +1,96 @@
1
+ import os
2
+ import re
3
+ from pathlib import Path
4
+ from typing import Dict, List, Any
5
+ from rich.console import Console
6
+
7
+ console = Console()
8
+
9
+ def init(root: Path, resources: List[Dict[str, Any]]):
10
+ """
11
+ Initialize the Skills module.
12
+
13
+ Args:
14
+ root: Project root directory.
15
+ resources: List of resource dicts from modules.
16
+ Expected format:
17
+ {
18
+ "skills": { "name": "content" },
19
+ "prompts": { "name": "content" }
20
+ }
21
+ """
22
+ skills_root = root / "Toolkit" / "skills"
23
+ skills_root.mkdir(parents=True, exist_ok=True)
24
+
25
+ # 1. Write Skills
26
+ for res in resources:
27
+ if "skills" in res:
28
+ for name, content in res["skills"].items():
29
+ target_dir = skills_root / name
30
+ target_dir.mkdir(exist_ok=True)
31
+ target_file = target_dir / "SKILL.md"
32
+ # Idempotency: Overwrite if content is different? Or just always overwrite?
33
+ # User asked for "scaffold", implies creation.
34
+ # Let's overwrite to ensure extensive "Repair" capability.
35
+ target_file.write_text(content, encoding="utf-8")
36
+ console.print(f"[dim] - Scaffolding skill:[/dim] {name}")
37
+
38
+ # 2. Update Agent Docs
39
+ update_agent_docs(root, resources)
40
+
41
+ def update_agent_docs(root: Path, resources: List[Dict[str, Any]]):
42
+ """
43
+ Inject prompts into AGENTS.md, GEMINI.md, CLAUDE.md.
44
+ """
45
+ target_files = ["AGENTS.md", "GEMINI.md", "CLAUDE.md"]
46
+
47
+ # Aggregate Prompts
48
+ aggregated_prompt = "\n\n".join([
49
+ res["prompts"][name]
50
+ for res in resources
51
+ if "prompts" in res
52
+ for name in res["prompts"]
53
+ ])
54
+
55
+ injection_content = f"""
56
+ ## Monoco Toolkit
57
+
58
+ The following tools and skills are available in this environment.
59
+
60
+ {aggregated_prompt}
61
+ """
62
+
63
+ for filename in target_files:
64
+ _inject_section(root / filename, injection_content)
65
+
66
+ def _inject_section(file_path: Path, content: str):
67
+ if not file_path.exists():
68
+ # Create if not exists? User said "Edit AGENTS.md...", implies existence.
69
+ # But if we init in a fresh repo, maybe we should create them?
70
+ # Let's create if missing.
71
+ file_path.write_text(f"# Project Guidelines\n{content}", encoding="utf-8")
72
+ console.print(f"[green]✔[/green] Created {file_path.name}")
73
+ return
74
+
75
+ original_content = file_path.read_text(encoding="utf-8")
76
+
77
+ # Regex to find existing section
78
+ # Matches ## Monoco Toolkit ... until next ## or End of String
79
+ pattern = r"(## Monoco Toolkit.*?)(\n## |\Z)"
80
+
81
+ # Check if section exists
82
+ if re.search(pattern, original_content, re.DOTALL):
83
+ # Replace
84
+ new_content = re.sub(pattern, f"{content.strip()}\n\n\\2", original_content, flags=re.DOTALL)
85
+ if new_content != original_content:
86
+ file_path.write_text(new_content, encoding="utf-8")
87
+ console.print(f"[green]✔[/green] Updated {file_path.name}")
88
+ else:
89
+ console.print(f"[dim] - {file_path.name} is up to date.[/dim]")
90
+ else:
91
+ # Append
92
+ with open(file_path, "a", encoding="utf-8") as f:
93
+ if not original_content.endswith("\n"):
94
+ f.write("\n")
95
+ f.write(content)
96
+ console.print(f"[green]✔[/green] Appended to {file_path.name}")
@@ -0,0 +1,110 @@
1
+ import typer
2
+ from pathlib import Path
3
+ from rich.console import Console
4
+
5
+ from monoco.core.config import get_config
6
+ from . import core
7
+
8
+ app = typer.Typer(help="Spike & Repo Management.")
9
+ console = Console()
10
+
11
+ @app.command("init")
12
+ def init():
13
+ """Initialize the Spike environment (gitignore setup)."""
14
+ config = get_config()
15
+ root_dir = Path(config.paths.root)
16
+ spikes_dir_name = config.paths.spikes
17
+
18
+ core.ensure_gitignore(root_dir, spikes_dir_name)
19
+
20
+ # Create the directory
21
+ (root_dir / spikes_dir_name).mkdir(exist_ok=True)
22
+
23
+ console.print(f"[green]✔[/green] Initialized Spike environment. Added '{spikes_dir_name}/' to .gitignore.")
24
+
25
+ @app.command("add")
26
+ def add_repo(
27
+ url: str = typer.Argument(..., help="Git Repository URL"),
28
+ ):
29
+ """Add a new research repository."""
30
+ config = get_config()
31
+ root_dir = Path(config.paths.root)
32
+
33
+ # Infer name from URL
34
+ # e.g., https://github.com/foo/bar.git -> bar
35
+ # e.g., git@github.com:foo/bar.git -> bar
36
+ name = url.split("/")[-1]
37
+ if name.endswith(".git"):
38
+ name = name[:-4]
39
+
40
+ core.update_config_repos(root_dir, name, url)
41
+ console.print(f"[green]✔[/green] Added repo [bold]{name}[/bold] ({url}) to configuration.")
42
+ console.print("Run [bold]monoco spike sync[/bold] to download content.")
43
+
44
+ @app.command("remove")
45
+ def remove_repo(
46
+ name: str = typer.Argument(..., help="Repository Name"),
47
+ force: bool = typer.Option(False, "--force", "-f", help="Force delete physical directory without asking"),
48
+ ):
49
+ """Remove a repository from configuration."""
50
+ config = get_config()
51
+ root_dir = Path(config.paths.root)
52
+ spikes_dir = root_dir / config.paths.spikes
53
+
54
+ if name not in config.project.spike_repos:
55
+ console.print(f"[yellow]![/yellow] Repo [bold]{name}[/bold] not found in configuration.")
56
+ return
57
+
58
+ # Remove from config
59
+ core.update_config_repos(root_dir, name, "", remove=True)
60
+ console.print(f"[green]✔[/green] Removed [bold]{name}[/bold] from configuration.")
61
+
62
+ target_path = spikes_dir / name
63
+ if target_path.exists():
64
+ if force or typer.confirm(f"Do you want to delete the directory {target_path}?", default=False):
65
+ core.remove_repo_dir(spikes_dir, name)
66
+ console.print(f"[gray]✔[/gray] Deleted directory {target_path}.")
67
+ else:
68
+ console.print(f"[gray]ℹ[/gray] Directory {target_path} kept.")
69
+
70
+ @app.command("sync")
71
+ def sync_repos():
72
+ """Sync (Clone/Pull) all configured repositories."""
73
+ # Force reload config to get latest updates
74
+ config = get_config()
75
+ # Note: get_config is a singleton, so for 'add' then 'sync' in same process,
76
+ # we rely on 'add' writing to disk and us reading from memory?
77
+ # Actually, if we run standard CLI "monoco spike add" then "monoco spike sync",
78
+ # they are separate processes, so config loads fresh.
79
+
80
+ root_dir = Path(config.paths.root)
81
+ spikes_dir = root_dir / config.paths.spikes
82
+ spikes_dir.mkdir(exist_ok=True)
83
+
84
+ repos = config.project.spike_repos
85
+
86
+ if not repos:
87
+ console.print("[yellow]No repositories configured.[/yellow] Use 'monoco spike add <url>' first.")
88
+ return
89
+
90
+ console.print(f"Syncing {len(repos)} repositories...")
91
+
92
+ for name, url in repos.items():
93
+ core.sync_repo(root_dir, spikes_dir, name, url)
94
+
95
+ console.print("[green]✔[/green] Sync complete.")
96
+
97
+ # Alias for list (showing configured repos) could be useful but not strictly asked for.
98
+ # Let's add a simple list command to see what we have.
99
+ @app.command("list")
100
+ def list_repos():
101
+ """List configured repositories."""
102
+ config = get_config()
103
+ repos = config.project.spike_repos
104
+
105
+ if not repos:
106
+ console.print("[yellow]No repositories configured.[/yellow]")
107
+ return
108
+
109
+ for name, url in repos.items():
110
+ console.print(f"- [bold]{name}[/bold]: {url}")
@@ -0,0 +1,154 @@
1
+ import os
2
+ import shutil
3
+ import subprocess
4
+ import yaml
5
+ from pathlib import Path
6
+ from typing import Dict, Optional, List, Any
7
+ from rich.console import Console
8
+
9
+ from monoco.core.config import get_config
10
+
11
+ console = Console()
12
+
13
+ def run_git_command(cmd: List[str], cwd: Path) -> bool:
14
+ """Run a git command in the specified directory."""
15
+ try:
16
+ subprocess.run(
17
+ cmd,
18
+ cwd=cwd,
19
+ check=True,
20
+ stdout=subprocess.PIPE,
21
+ stderr=subprocess.PIPE,
22
+ text=True
23
+ )
24
+ return True
25
+ except subprocess.CalledProcessError as e:
26
+ console.print(f"[red]Git Error:[/red] {' '.join(cmd)}\n{e.stderr}")
27
+ return False
28
+ except FileNotFoundError:
29
+ console.print("[red]Error:[/red] git command not found.")
30
+ return False
31
+
32
+ def get_config_file_path(root: Path) -> Path:
33
+ """Determine the config file to update."""
34
+ # Priority 1: .monoco/config.yaml
35
+ hidden = root / ".monoco" / "config.yaml"
36
+ if hidden.exists():
37
+ return hidden
38
+
39
+ # Priority 2: monoco.yaml
40
+ visible = root / "monoco.yaml"
41
+ if visible.exists():
42
+ return visible
43
+
44
+ # Default to .monoco/config.yaml for new files
45
+ hidden.parent.mkdir(exist_ok=True)
46
+ return hidden
47
+
48
+ def update_config_repos(root: Path, repo_name: str, repo_url: str, remove: bool = False):
49
+ """Update the repos list in the config file."""
50
+ config_path = get_config_file_path(root)
51
+
52
+ data = {}
53
+ if config_path.exists():
54
+ try:
55
+ with open(config_path, "r") as f:
56
+ data = yaml.safe_load(f) or {}
57
+ except Exception:
58
+ data = {}
59
+
60
+ # Ensure structure exists
61
+ if "project" not in data:
62
+ data["project"] = {}
63
+ if "spike_repos" not in data["project"]:
64
+ data["project"]["spike_repos"] = {}
65
+
66
+ if remove:
67
+ if repo_name in data["project"]["spike_repos"]:
68
+ del data["project"]["spike_repos"][repo_name]
69
+ else:
70
+ data["project"]["spike_repos"][repo_name] = repo_url
71
+
72
+ with open(config_path, "w") as f:
73
+ yaml.dump(data, f, sort_keys=False, default_flow_style=False)
74
+
75
+ def ensure_gitignore(root: Path, target_dir_name: str):
76
+ """Ensure the target directory is in .gitignore."""
77
+ gitignore = root / ".gitignore"
78
+ if not gitignore.exists():
79
+ gitignore.write_text(f"{target_dir_name}/\n")
80
+ return
81
+
82
+ content = gitignore.read_text()
83
+ if f"{target_dir_name}/" not in content and f"{target_dir_name}" not in content:
84
+ # Avoid redundant newlines if file ends with one
85
+ prefix = "\n" if content and not content.endswith("\n") else ""
86
+ with open(gitignore, "a") as f:
87
+ f.write(f"{prefix}{target_dir_name}/\n")
88
+
89
+ def sync_repo(root: Path, spikes_dir: Path, name: str, url: str):
90
+ """Clone or Pull a repo."""
91
+ target_path = spikes_dir / name
92
+
93
+ if target_path.exists() and (target_path / ".git").exists():
94
+ console.print(f"Updating [bold]{name}[/bold]...")
95
+ run_git_command(["git", "pull"], cwd=target_path)
96
+ else:
97
+ # If dir exists but not a git repo, warn or error?
98
+ # For safety, if non-empty and not git, skip or error.
99
+ if target_path.exists() and any(target_path.iterdir()):
100
+ console.print(f"[yellow]Skipping {name}:[/yellow] Directory exists and is not empty, but not a git repo.")
101
+ return
102
+
103
+ console.print(f"Cloning [bold]{name}[/bold]...")
104
+ target_path.mkdir(parents=True, exist_ok=True)
105
+ run_git_command(["git", "clone", url, "."], cwd=target_path)
106
+
107
+ def remove_repo_dir(spikes_dir: Path, name: str):
108
+ """Physically remove the repo directory."""
109
+ target_path = spikes_dir / name
110
+
111
+ if target_path.exists():
112
+ shutil.rmtree(target_path)
113
+
114
+ SKILL_CONTENT = """---
115
+ name: git-repo-spike
116
+ description: Manage external Git repositories as References in `.reference/`.
117
+ ---
118
+
119
+ # Git Repo Spike (Reference Management)
120
+
121
+ This skill normalizes how we introduce external code repositories.
122
+
123
+ ## Core Principles
124
+ 1. **Read-Only**: Code in `.reference/` is for reference only.
125
+ 2. **Isolation**: All external repos sit within `.reference/`.
126
+ 3. **VCS Hygiene**: `.reference/` is gitignored. We track the intent to clone, not the files.
127
+
128
+ ## Workflow
129
+ 1. **Add**: `monoco spike add <url>`
130
+ 2. **Sync**: `monoco spike sync` (Clones/Pulls all repos)
131
+ 3. **Remove**: `monoco spike remove <name>`
132
+ """
133
+
134
+ PROMPT_CONTENT = """### Spike (Research)
135
+ Manage external reference repositories.
136
+ - **Add Repo**: `monoco spike add <url>` (Available in `.reference/<name>` for reading)
137
+ - **Sync**: `monoco spike sync` (Run to download content)
138
+ - **Constraint**: Never edit files in `.reference/`. Treat them as read-only external knowledge."""
139
+
140
+ def init(root: Path, spikes_dir_name: str):
141
+ """Initialize Spike environment."""
142
+ ensure_gitignore(root, spikes_dir_name)
143
+ (root / spikes_dir_name).mkdir(exist_ok=True)
144
+
145
+ def get_resources() -> Dict[str, Any]:
146
+ return {
147
+ "skills": {
148
+ "git-repo-spike": SKILL_CONTENT
149
+ },
150
+ "prompts": {
151
+ "spike": PROMPT_CONTENT
152
+ }
153
+ }
154
+