myai-opencode 0.6.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +244 -0
- package/dist/agents/designer.d.ts +2 -0
- package/dist/agents/explorer.d.ts +2 -0
- package/dist/agents/fixer.d.ts +2 -0
- package/dist/agents/index.d.ts +22 -0
- package/dist/agents/librarian.d.ts +2 -0
- package/dist/agents/oracle.d.ts +2 -0
- package/dist/agents/orchestrator.d.ts +7 -0
- package/dist/background/background-manager.d.ts +131 -0
- package/dist/background/index.d.ts +2 -0
- package/dist/background/tmux-session-manager.d.ts +58 -0
- package/dist/cli/config-io.d.ts +21 -0
- package/dist/cli/config-manager.d.ts +4 -0
- package/dist/cli/custom-skills.d.ts +29 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +14342 -0
- package/dist/cli/install.d.ts +2 -0
- package/dist/cli/paths.d.ts +7 -0
- package/dist/cli/providers.d.ts +79 -0
- package/dist/cli/skills.d.ts +35 -0
- package/dist/cli/system.d.ts +4 -0
- package/dist/cli/types.d.ts +34 -0
- package/dist/config/agent-mcps.d.ts +15 -0
- package/dist/config/constants.d.ts +12 -0
- package/dist/config/index.d.ts +4 -0
- package/dist/config/loader.d.ts +26 -0
- package/dist/config/schema.d.ts +81 -0
- package/dist/config/utils.d.ts +10 -0
- package/dist/hooks/auto-update-checker/cache.d.ts +6 -0
- package/dist/hooks/auto-update-checker/checker.d.ts +28 -0
- package/dist/hooks/auto-update-checker/constants.d.ts +11 -0
- package/dist/hooks/auto-update-checker/index.d.ts +17 -0
- package/dist/hooks/auto-update-checker/types.d.ts +23 -0
- package/dist/hooks/index.d.ts +4 -0
- package/dist/hooks/phase-reminder/index.d.ts +25 -0
- package/dist/hooks/post-read-nudge/index.d.ts +18 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +33076 -0
- package/dist/mcp/context7.d.ts +6 -0
- package/dist/mcp/grep-app.d.ts +6 -0
- package/dist/mcp/index.d.ts +6 -0
- package/dist/mcp/types.d.ts +12 -0
- package/dist/mcp/websearch.d.ts +6 -0
- package/dist/tools/ast-grep/cli.d.ts +15 -0
- package/dist/tools/ast-grep/constants.d.ts +25 -0
- package/dist/tools/ast-grep/downloader.d.ts +5 -0
- package/dist/tools/ast-grep/index.d.ts +10 -0
- package/dist/tools/ast-grep/tools.d.ts +3 -0
- package/dist/tools/ast-grep/types.d.ts +30 -0
- package/dist/tools/ast-grep/utils.d.ts +4 -0
- package/dist/tools/background.d.ts +13 -0
- package/dist/tools/grep/cli.d.ts +3 -0
- package/dist/tools/grep/constants.d.ts +18 -0
- package/dist/tools/grep/downloader.d.ts +3 -0
- package/dist/tools/grep/index.d.ts +5 -0
- package/dist/tools/grep/tools.d.ts +2 -0
- package/dist/tools/grep/types.d.ts +35 -0
- package/dist/tools/grep/utils.d.ts +2 -0
- package/dist/tools/index.d.ts +4 -0
- package/dist/tools/lsp/client.d.ts +42 -0
- package/dist/tools/lsp/config.d.ts +4 -0
- package/dist/tools/lsp/constants.d.ts +8 -0
- package/dist/tools/lsp/index.d.ts +3 -0
- package/dist/tools/lsp/tools.d.ts +5 -0
- package/dist/tools/lsp/types.d.ts +28 -0
- package/dist/tools/lsp/utils.d.ts +21 -0
- package/dist/utils/agent-variant.d.ts +47 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/logger.d.ts +1 -0
- package/dist/utils/polling.d.ts +21 -0
- package/dist/utils/tmux.d.ts +32 -0
- package/dist/utils/zip-extractor.d.ts +1 -0
- package/package.json +68 -0
- package/src/skills/cartography/README.md +57 -0
- package/src/skills/cartography/SKILL.md +137 -0
- package/src/skills/cartography/scripts/cartographer.py +456 -0
- package/src/skills/cartography/scripts/test_cartographer.py +87 -0
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: cartography
|
|
3
|
+
description: Repository understanding and hierarchical codemap generation
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Cartography Skill
|
|
7
|
+
|
|
8
|
+
You help users understand and map repositories by creating hierarchical codemaps.
|
|
9
|
+
|
|
10
|
+
## When to Use
|
|
11
|
+
|
|
12
|
+
- User asks to understand/map a repository
|
|
13
|
+
- User wants codebase documentation
|
|
14
|
+
- Starting work on an unfamiliar codebase
|
|
15
|
+
|
|
16
|
+
## Workflow
|
|
17
|
+
|
|
18
|
+
### Step 1: Check for Existing State
|
|
19
|
+
|
|
20
|
+
**First, check if `.slim/cartography.json` exists in the repo root.**
|
|
21
|
+
|
|
22
|
+
If it **exists**: Skip to Step 3 (Detect Changes) - no need to re-initialize.
|
|
23
|
+
|
|
24
|
+
If it **doesn't exist**: Continue to Step 2 (Initialize).
|
|
25
|
+
|
|
26
|
+
### Step 2: Initialize (Only if no state exists)
|
|
27
|
+
|
|
28
|
+
1. **Analyze the repository structure** - List files, understand directories
|
|
29
|
+
2. **Infer patterns** for **core code/config files ONLY** to include:
|
|
30
|
+
- **Include**: `src/**/*.ts`, `package.json`, etc.
|
|
31
|
+
- **Exclude (MANDATORY)**: Do NOT include tests, documentation, or translations.
|
|
32
|
+
- Tests: `**/*.test.ts`, `**/*.spec.ts`, `tests/**`, `__tests__/**`
|
|
33
|
+
- Docs: `docs/**`, `*.md` (except root `README.md` if needed), `LICENSE`
|
|
34
|
+
- Build/Deps: `node_modules/**`, `dist/**`, `build/**`, `*.min.js`
|
|
35
|
+
- Respect `.gitignore` automatically
|
|
36
|
+
3. **Run cartographer.py init**:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
python3 ~/.config/opencode/skills/cartography/scripts/cartographer.py init \
|
|
40
|
+
--root ./ \
|
|
41
|
+
--include "src/**/*.ts" \
|
|
42
|
+
--exclude "**/*.test.ts" --exclude "dist/**" --exclude "node_modules/**"
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
This creates:
|
|
46
|
+
- `.slim/cartography.json` - File and folder hashes for change detection
|
|
47
|
+
- Empty `codemap.md` files in all relevant subdirectories
|
|
48
|
+
|
|
49
|
+
4. **Delegate to Explorer agents** - Spawn one explorer per folder to read code and fill in its specific `codemap.md` file.
|
|
50
|
+
|
|
51
|
+
### Step 3: Detect Changes (If state already exists)
|
|
52
|
+
|
|
53
|
+
1. **Run cartographer.py changes** to see what changed:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
python3 ~/.config/opencode/skills/cartography/scripts/cartographer.py changes \
|
|
57
|
+
--root ./
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
2. **Review the output** - It shows:
|
|
61
|
+
- Added files
|
|
62
|
+
- Removed files
|
|
63
|
+
- Modified files
|
|
64
|
+
- Affected folders
|
|
65
|
+
|
|
66
|
+
3. **Only update affected codemaps** - Spawn one explorer per affected folder to update its `codemap.md`.
|
|
67
|
+
4. **Run update** to save new state:
|
|
68
|
+
|
|
69
|
+
```bash
|
|
70
|
+
python3 ~/.config/opencode/skills/cartography/scripts/cartographer.py update \
|
|
71
|
+
--root ./
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Step 4: Finalize Repository Atlas (Root Codemap)
|
|
75
|
+
|
|
76
|
+
Once all specific directories are mapped, the Orchestrator must create or update the root `codemap.md`. This file serves as the **Master Entry Point** for any agent or human entering the repository.
|
|
77
|
+
|
|
78
|
+
1. **Map Root Assets**: Document the root-level files (e.g., `package.json`, `index.ts`, `plugin.json`) and the project's overall purpose.
|
|
79
|
+
2. **Aggregate Sub-Maps**: Create a "Repository Directory Map" section. For every folder that has a `codemap.md`, extract its **Responsibility** summary and include it in a table or list in the root map.
|
|
80
|
+
3. **Cross-Reference**: Ensure that the root map contains the absolute or relative paths to the sub-maps so agents can jump directly to the relevant details.
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
## Codemap Content
|
|
84
|
+
|
|
85
|
+
Explorers are granted write permissions for `codemap.md` files during this workflow. Use precise technical terminology to document the implementation:
|
|
86
|
+
|
|
87
|
+
- **Responsibility** - Define the specific role of this directory using standard software engineering terms (e.g., "Service Layer", "Data Access Object", "Middleware").
|
|
88
|
+
- **Design Patterns** - Identify and name specific patterns used (e.g., "Observer", "Singleton", "Factory", "Strategy"). Detail the abstractions and interfaces.
|
|
89
|
+
- **Data & Control Flow** - Explicitly trace how data enters and leaves the module. Mention specific function call sequences and state transitions.
|
|
90
|
+
- **Integration Points** - List dependencies and consumer modules. Use technical names for hooks, events, or API endpoints.
|
|
91
|
+
|
|
92
|
+
Example codemap:
|
|
93
|
+
|
|
94
|
+
```markdown
|
|
95
|
+
# src/agents/
|
|
96
|
+
|
|
97
|
+
## Responsibility
|
|
98
|
+
Defines agent personalities and manages their configuration lifecycle.
|
|
99
|
+
|
|
100
|
+
## Design
|
|
101
|
+
Each agent is a prompt + permission set. Config system uses:
|
|
102
|
+
- Default prompts (orchestrator.ts, explorer.ts, etc.)
|
|
103
|
+
- User overrides from ~/.config/opencode/oh-my-opencode-slim.json
|
|
104
|
+
- Permission wildcards for skill/MCP access control
|
|
105
|
+
|
|
106
|
+
## Flow
|
|
107
|
+
1. Plugin loads → calls getAgentConfigs()
|
|
108
|
+
2. Reads user config preset
|
|
109
|
+
3. Merges defaults with overrides
|
|
110
|
+
4. Applies permission rules (wildcard expansion)
|
|
111
|
+
5. Returns agent configs to OpenCode
|
|
112
|
+
|
|
113
|
+
## Integration
|
|
114
|
+
- Consumed by: Main plugin (src/index.ts)
|
|
115
|
+
- Depends on: Config loader, skills registry
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
Example **Root Codemap (Atlas)**:
|
|
119
|
+
|
|
120
|
+
```markdown
|
|
121
|
+
# Repository Atlas: oh-my-opencode-slim
|
|
122
|
+
|
|
123
|
+
## Project Responsibility
|
|
124
|
+
A high-performance, low-latency agent orchestration plugin for OpenCode, focusing on specialized sub-agent delegation and background task management.
|
|
125
|
+
|
|
126
|
+
## System Entry Points
|
|
127
|
+
- `src/index.ts`: Plugin initialization and OpenCode integration.
|
|
128
|
+
- `package.json`: Dependency manifest and build scripts.
|
|
129
|
+
- `oh-my-opencode-slim.json`: User configuration schema.
|
|
130
|
+
|
|
131
|
+
## Directory Map (Aggregated)
|
|
132
|
+
| Directory | Responsibility Summary | Detailed Map |
|
|
133
|
+
|-----------|------------------------|--------------|
|
|
134
|
+
| `src/agents/` | Defines agent personalities (Orchestrator, Explorer) and manages model routing. | [View Map](src/agents/codemap.md) |
|
|
135
|
+
| `src/features/` | Core logic for tmux integration, background task spawning, and session state. | [View Map](src/features/codemap.md) |
|
|
136
|
+
| `src/config/` | Implements the configuration loading pipeline and environment variable injection. | [View Map](src/config/codemap.md) |
|
|
137
|
+
```
|
|
@@ -0,0 +1,456 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Cartographer - Repository mapping and change detection tool.
|
|
4
|
+
|
|
5
|
+
Commands:
|
|
6
|
+
init Initialize mapping (create hashes + empty codemaps)
|
|
7
|
+
changes Show what changed (read-only, like git status)
|
|
8
|
+
update Update hashes (like git commit)
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
cartographer.py init --root /path/to/repo --include "src/**/*.ts" --exclude "node_modules/**"
|
|
12
|
+
cartographer.py changes --root /path/to/repo
|
|
13
|
+
cartographer.py update --root /path/to/repo
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import argparse
|
|
17
|
+
import hashlib
|
|
18
|
+
import json
|
|
19
|
+
import os
|
|
20
|
+
import re
|
|
21
|
+
import sys
|
|
22
|
+
from datetime import datetime, timezone
|
|
23
|
+
from pathlib import Path, PurePath
|
|
24
|
+
from typing import Dict, List, Optional, Set, Tuple
|
|
25
|
+
|
|
26
|
+
VERSION = "1.0.0"
|
|
27
|
+
STATE_DIR = ".slim"
|
|
28
|
+
STATE_FILE = "cartography.json"
|
|
29
|
+
CODEMAP_FILE = "codemap.md"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def load_gitignore(root: Path) -> List[str]:
|
|
33
|
+
"""Load .gitignore patterns from the repository root."""
|
|
34
|
+
gitignore_path = root / ".gitignore"
|
|
35
|
+
patterns = []
|
|
36
|
+
if gitignore_path.exists():
|
|
37
|
+
with open(gitignore_path, "r", encoding="utf-8") as f:
|
|
38
|
+
for line in f:
|
|
39
|
+
line = line.strip()
|
|
40
|
+
if line and not line.startswith("#"):
|
|
41
|
+
patterns.append(line)
|
|
42
|
+
return patterns
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class PatternMatcher:
|
|
46
|
+
"""Efficiently match paths against multiple glob patterns using pre-compiled regex."""
|
|
47
|
+
|
|
48
|
+
def __init__(self, patterns: List[str]):
|
|
49
|
+
if not patterns:
|
|
50
|
+
self.regex = None
|
|
51
|
+
return
|
|
52
|
+
|
|
53
|
+
regex_parts = []
|
|
54
|
+
for pattern in patterns:
|
|
55
|
+
# Regex conversion logic
|
|
56
|
+
reg = re.escape(pattern)
|
|
57
|
+
reg = reg.replace(r'\*\*/', '(?:.*/)?') # Recursive glob
|
|
58
|
+
reg = reg.replace(r'\*\*', '.*')
|
|
59
|
+
reg = reg.replace(r'\*', '[^/]*') # Single level glob
|
|
60
|
+
reg = reg.replace(r'\?', '.')
|
|
61
|
+
|
|
62
|
+
if pattern.endswith('/'):
|
|
63
|
+
reg += '.*'
|
|
64
|
+
|
|
65
|
+
if pattern.startswith('/'):
|
|
66
|
+
reg = '^' + reg[1:]
|
|
67
|
+
else:
|
|
68
|
+
reg = '(?:^|.*/)' + reg
|
|
69
|
+
|
|
70
|
+
regex_parts.append(f'(?:{reg}$)')
|
|
71
|
+
|
|
72
|
+
# Combine all patterns into a single regex for speed
|
|
73
|
+
combined_regex = '|'.join(regex_parts)
|
|
74
|
+
self.regex = re.compile(combined_regex)
|
|
75
|
+
|
|
76
|
+
def matches(self, path: str) -> bool:
|
|
77
|
+
"""Check if a path matches any of the patterns."""
|
|
78
|
+
if not self.regex:
|
|
79
|
+
return False
|
|
80
|
+
return bool(self.regex.search(path))
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def select_files(
|
|
84
|
+
root: Path,
|
|
85
|
+
include_patterns: List[str],
|
|
86
|
+
exclude_patterns: List[str],
|
|
87
|
+
exceptions: List[str],
|
|
88
|
+
gitignore_patterns: List[str],
|
|
89
|
+
) -> List[Path]:
|
|
90
|
+
"""Select files based on include/exclude patterns and exceptions."""
|
|
91
|
+
selected = []
|
|
92
|
+
|
|
93
|
+
# Pre-compile matchers
|
|
94
|
+
include_matcher = PatternMatcher(include_patterns)
|
|
95
|
+
exclude_matcher = PatternMatcher(exclude_patterns)
|
|
96
|
+
gitignore_matcher = PatternMatcher(gitignore_patterns)
|
|
97
|
+
exception_set = set(exceptions)
|
|
98
|
+
|
|
99
|
+
root_str = str(root)
|
|
100
|
+
|
|
101
|
+
for dirpath, dirnames, filenames in os.walk(root_str):
|
|
102
|
+
# Skip hidden directories early by modifying dirnames in-place
|
|
103
|
+
dirnames[:] = [d for d in dirnames if not d.startswith(".")]
|
|
104
|
+
|
|
105
|
+
rel_dir = os.path.relpath(dirpath, root_str)
|
|
106
|
+
if rel_dir == ".":
|
|
107
|
+
rel_dir = ""
|
|
108
|
+
|
|
109
|
+
for filename in filenames:
|
|
110
|
+
rel_path = os.path.join(rel_dir, filename).replace("\\", "/")
|
|
111
|
+
if rel_path.startswith("./"):
|
|
112
|
+
rel_path = rel_path[2:]
|
|
113
|
+
|
|
114
|
+
# Skip if ignored by .gitignore
|
|
115
|
+
if gitignore_matcher.matches(rel_path):
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
# Check explicit exclusions first
|
|
119
|
+
if exclude_matcher.matches(rel_path):
|
|
120
|
+
# Unless it's an exception
|
|
121
|
+
if rel_path not in exception_set:
|
|
122
|
+
continue
|
|
123
|
+
|
|
124
|
+
# Check inclusions
|
|
125
|
+
if include_matcher.matches(rel_path) or rel_path in exception_set:
|
|
126
|
+
selected.append(root / rel_path)
|
|
127
|
+
|
|
128
|
+
return sorted(selected)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def compute_file_hash(filepath: Path) -> str:
|
|
132
|
+
"""Compute MD5 hash of file content."""
|
|
133
|
+
hasher = hashlib.md5()
|
|
134
|
+
try:
|
|
135
|
+
with open(filepath, "rb") as f:
|
|
136
|
+
for chunk in iter(lambda: f.read(8192), b""):
|
|
137
|
+
hasher.update(chunk)
|
|
138
|
+
return hasher.hexdigest()
|
|
139
|
+
except (IOError, OSError):
|
|
140
|
+
return ""
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def compute_folder_hash(folder: str, file_hashes: Dict[str, str]) -> str:
|
|
144
|
+
"""Compute a stable hash for a folder based on its files."""
|
|
145
|
+
# Get all files in this folder
|
|
146
|
+
folder_files = sorted(
|
|
147
|
+
(path, hash_val)
|
|
148
|
+
for path, hash_val in file_hashes.items()
|
|
149
|
+
if path.startswith(folder + "/") or (folder == "." and "/" not in path)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
if not folder_files:
|
|
153
|
+
return ""
|
|
154
|
+
|
|
155
|
+
# Hash the concatenation of path:hash pairs
|
|
156
|
+
hasher = hashlib.md5()
|
|
157
|
+
for path, hash_val in folder_files:
|
|
158
|
+
hasher.update(f"{path}:{hash_val}\n".encode())
|
|
159
|
+
return hasher.hexdigest()
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def get_folders_with_files(files: List[Path], root: Path) -> Set[str]:
|
|
163
|
+
"""Get all unique folders that contain selected files."""
|
|
164
|
+
folders = set()
|
|
165
|
+
for f in files:
|
|
166
|
+
rel = f.relative_to(root)
|
|
167
|
+
# Add all parent directories
|
|
168
|
+
parts = rel.parts[:-1] # Exclude filename
|
|
169
|
+
for i in range(len(parts)):
|
|
170
|
+
folders.add("/".join(parts[: i + 1]))
|
|
171
|
+
folders.add(".") # Always include root
|
|
172
|
+
return folders
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def load_state(root: Path) -> Optional[dict]:
|
|
176
|
+
"""Load the current cartography state."""
|
|
177
|
+
state_path = root / STATE_DIR / STATE_FILE
|
|
178
|
+
if state_path.exists():
|
|
179
|
+
try:
|
|
180
|
+
with open(state_path, "r", encoding="utf-8") as f:
|
|
181
|
+
return json.load(f)
|
|
182
|
+
except (json.JSONDecodeError, IOError):
|
|
183
|
+
return None
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def save_state(root: Path, state: dict) -> None:
|
|
188
|
+
"""Save the cartography state."""
|
|
189
|
+
state_dir = root / STATE_DIR
|
|
190
|
+
state_dir.mkdir(parents=True, exist_ok=True)
|
|
191
|
+
|
|
192
|
+
state_path = state_dir / STATE_FILE
|
|
193
|
+
with open(state_path, "w", encoding="utf-8") as f:
|
|
194
|
+
json.dump(state, f, indent=2)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def create_empty_codemap(folder_path: Path, folder_name: str) -> None:
|
|
198
|
+
"""Create an empty codemap.md file with a header."""
|
|
199
|
+
codemap_path = folder_path / CODEMAP_FILE
|
|
200
|
+
if not codemap_path.exists():
|
|
201
|
+
content = f"""# {folder_name}/
|
|
202
|
+
|
|
203
|
+
<!-- Explorer: Fill in this section with architectural understanding -->
|
|
204
|
+
|
|
205
|
+
## Responsibility
|
|
206
|
+
|
|
207
|
+
<!-- What is this folder's job in the system? -->
|
|
208
|
+
|
|
209
|
+
## Design
|
|
210
|
+
|
|
211
|
+
<!-- Key patterns, abstractions, architectural decisions -->
|
|
212
|
+
|
|
213
|
+
## Flow
|
|
214
|
+
|
|
215
|
+
<!-- How does data/control flow through this module? -->
|
|
216
|
+
|
|
217
|
+
## Integration
|
|
218
|
+
|
|
219
|
+
<!-- How does it connect to other parts of the system? -->
|
|
220
|
+
"""
|
|
221
|
+
with open(codemap_path, "w", encoding="utf-8") as f:
|
|
222
|
+
f.write(content)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def cmd_init(args: argparse.Namespace) -> int:
|
|
226
|
+
"""Initialize mapping: create hashes and empty codemaps."""
|
|
227
|
+
root = Path(args.root).resolve()
|
|
228
|
+
|
|
229
|
+
if not root.is_dir():
|
|
230
|
+
print(f"Error: {root} is not a directory", file=sys.stderr)
|
|
231
|
+
return 1
|
|
232
|
+
|
|
233
|
+
# Load patterns
|
|
234
|
+
gitignore = load_gitignore(root)
|
|
235
|
+
include_patterns = args.include or ["**/*"]
|
|
236
|
+
exclude_patterns = args.exclude or []
|
|
237
|
+
exceptions = args.exception or []
|
|
238
|
+
|
|
239
|
+
print(f"Scanning {root}...")
|
|
240
|
+
print(f"Include patterns: {include_patterns}")
|
|
241
|
+
print(f"Exclude patterns: {exclude_patterns}")
|
|
242
|
+
print(f"Exceptions: {exceptions}")
|
|
243
|
+
|
|
244
|
+
# Select files
|
|
245
|
+
selected_files = select_files(
|
|
246
|
+
root, include_patterns, exclude_patterns, exceptions, gitignore
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
print(f"Selected {len(selected_files)} files")
|
|
250
|
+
|
|
251
|
+
# Compute file hashes
|
|
252
|
+
file_hashes: Dict[str, str] = {}
|
|
253
|
+
for f in selected_files:
|
|
254
|
+
rel_path = str(f.relative_to(root))
|
|
255
|
+
file_hashes[rel_path] = compute_file_hash(f)
|
|
256
|
+
|
|
257
|
+
# Get folders and compute folder hashes
|
|
258
|
+
folders = get_folders_with_files(selected_files, root)
|
|
259
|
+
folder_hashes: Dict[str, str] = {}
|
|
260
|
+
for folder in folders:
|
|
261
|
+
folder_hashes[folder] = compute_folder_hash(folder, file_hashes)
|
|
262
|
+
|
|
263
|
+
# Create state
|
|
264
|
+
state = {
|
|
265
|
+
"metadata": {
|
|
266
|
+
"version": VERSION,
|
|
267
|
+
"last_run": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
|
268
|
+
"root": str(root),
|
|
269
|
+
"include_patterns": include_patterns,
|
|
270
|
+
"exclude_patterns": exclude_patterns,
|
|
271
|
+
"exceptions": exceptions,
|
|
272
|
+
},
|
|
273
|
+
"file_hashes": file_hashes,
|
|
274
|
+
"folder_hashes": folder_hashes,
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
# Save state
|
|
278
|
+
save_state(root, state)
|
|
279
|
+
print(f"Created {STATE_DIR}/{STATE_FILE}")
|
|
280
|
+
|
|
281
|
+
# Create empty codemaps
|
|
282
|
+
for folder in folders:
|
|
283
|
+
if folder == ".":
|
|
284
|
+
folder_path = root
|
|
285
|
+
folder_name = root.name
|
|
286
|
+
else:
|
|
287
|
+
folder_path = root / folder
|
|
288
|
+
folder_name = folder
|
|
289
|
+
|
|
290
|
+
create_empty_codemap(folder_path, folder_name)
|
|
291
|
+
|
|
292
|
+
print(f"Created {len(folders)} empty codemap.md files")
|
|
293
|
+
|
|
294
|
+
return 0
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def cmd_changes(args: argparse.Namespace) -> int:
|
|
298
|
+
"""Show what changed since last update."""
|
|
299
|
+
root = Path(args.root).resolve()
|
|
300
|
+
|
|
301
|
+
state = load_state(root)
|
|
302
|
+
if not state:
|
|
303
|
+
print("No cartography state found. Run 'init' first.", file=sys.stderr)
|
|
304
|
+
return 1
|
|
305
|
+
|
|
306
|
+
# Get patterns from saved state
|
|
307
|
+
metadata = state.get("metadata", {})
|
|
308
|
+
include_patterns = metadata.get("include_patterns", ["**/*"])
|
|
309
|
+
exclude_patterns = metadata.get("exclude_patterns", [])
|
|
310
|
+
exceptions = metadata.get("exceptions", [])
|
|
311
|
+
|
|
312
|
+
gitignore = load_gitignore(root)
|
|
313
|
+
|
|
314
|
+
# Select current files
|
|
315
|
+
current_files = select_files(
|
|
316
|
+
root, include_patterns, exclude_patterns, exceptions, gitignore
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
# Compute current hashes
|
|
320
|
+
current_hashes: Dict[str, str] = {}
|
|
321
|
+
for f in current_files:
|
|
322
|
+
rel_path = str(f.relative_to(root))
|
|
323
|
+
current_hashes[rel_path] = compute_file_hash(f)
|
|
324
|
+
|
|
325
|
+
saved_hashes = state.get("file_hashes", {})
|
|
326
|
+
|
|
327
|
+
# Find changes
|
|
328
|
+
added = set(current_hashes.keys()) - set(saved_hashes.keys())
|
|
329
|
+
removed = set(saved_hashes.keys()) - set(current_hashes.keys())
|
|
330
|
+
modified = {
|
|
331
|
+
path
|
|
332
|
+
for path in current_hashes.keys() & saved_hashes.keys()
|
|
333
|
+
if current_hashes[path] != saved_hashes[path]
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
if not added and not removed and not modified:
|
|
337
|
+
print("No changes detected.")
|
|
338
|
+
return 0
|
|
339
|
+
|
|
340
|
+
if added:
|
|
341
|
+
print(f"\n{len(added)} added:")
|
|
342
|
+
for path in sorted(added):
|
|
343
|
+
print(f" + {path}")
|
|
344
|
+
|
|
345
|
+
if removed:
|
|
346
|
+
print(f"\n{len(removed)} removed:")
|
|
347
|
+
for path in sorted(removed):
|
|
348
|
+
print(f" - {path}")
|
|
349
|
+
|
|
350
|
+
if modified:
|
|
351
|
+
print(f"\n{len(modified)} modified:")
|
|
352
|
+
for path in sorted(modified):
|
|
353
|
+
print(f" ~ {path}")
|
|
354
|
+
|
|
355
|
+
# Show affected folders
|
|
356
|
+
affected_folders = set()
|
|
357
|
+
for path in added | removed | modified:
|
|
358
|
+
parts = Path(path).parts[:-1]
|
|
359
|
+
for i in range(len(parts)):
|
|
360
|
+
affected_folders.add("/".join(parts[: i + 1]))
|
|
361
|
+
affected_folders.add(".")
|
|
362
|
+
|
|
363
|
+
print(f"\n{len(affected_folders)} folders affected:")
|
|
364
|
+
for folder in sorted(affected_folders):
|
|
365
|
+
print(f" {folder}/")
|
|
366
|
+
|
|
367
|
+
return 0
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
def cmd_update(args: argparse.Namespace) -> int:
|
|
371
|
+
"""Update hashes and save state."""
|
|
372
|
+
root = Path(args.root).resolve()
|
|
373
|
+
|
|
374
|
+
state = load_state(root)
|
|
375
|
+
if not state:
|
|
376
|
+
print("No cartography state found. Run 'init' first.", file=sys.stderr)
|
|
377
|
+
return 1
|
|
378
|
+
|
|
379
|
+
# Get patterns from saved state
|
|
380
|
+
metadata = state.get("metadata", {})
|
|
381
|
+
include_patterns = metadata.get("include_patterns", ["**/*"])
|
|
382
|
+
exclude_patterns = metadata.get("exclude_patterns", [])
|
|
383
|
+
exceptions = metadata.get("exceptions", [])
|
|
384
|
+
|
|
385
|
+
gitignore = load_gitignore(root)
|
|
386
|
+
|
|
387
|
+
# Select current files
|
|
388
|
+
selected_files = select_files(
|
|
389
|
+
root, include_patterns, exclude_patterns, exceptions, gitignore
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
# Compute new hashes
|
|
393
|
+
file_hashes: Dict[str, str] = {}
|
|
394
|
+
for f in selected_files:
|
|
395
|
+
rel_path = str(f.relative_to(root))
|
|
396
|
+
file_hashes[rel_path] = compute_file_hash(f)
|
|
397
|
+
|
|
398
|
+
# Compute folder hashes
|
|
399
|
+
folders = get_folders_with_files(selected_files, root)
|
|
400
|
+
folder_hashes: Dict[str, str] = {}
|
|
401
|
+
for folder in folders:
|
|
402
|
+
folder_hashes[folder] = compute_folder_hash(folder, file_hashes)
|
|
403
|
+
|
|
404
|
+
# Update state
|
|
405
|
+
state["metadata"]["last_run"] = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
|
406
|
+
state["file_hashes"] = file_hashes
|
|
407
|
+
state["folder_hashes"] = folder_hashes
|
|
408
|
+
|
|
409
|
+
save_state(root, state)
|
|
410
|
+
print(f"Updated {STATE_DIR}/{STATE_FILE} with {len(file_hashes)} files")
|
|
411
|
+
|
|
412
|
+
return 0
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
def main() -> int:
|
|
416
|
+
parser = argparse.ArgumentParser(
|
|
417
|
+
description="Cartographer - Repository mapping and change detection"
|
|
418
|
+
)
|
|
419
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
420
|
+
|
|
421
|
+
# Init command
|
|
422
|
+
init_parser = subparsers.add_parser("init", help="Initialize mapping")
|
|
423
|
+
init_parser.add_argument("--root", required=True, help="Repository root path")
|
|
424
|
+
init_parser.add_argument(
|
|
425
|
+
"--include", action="append", help="Glob patterns for files to include"
|
|
426
|
+
)
|
|
427
|
+
init_parser.add_argument(
|
|
428
|
+
"--exclude", action="append", help="Glob patterns for files to exclude"
|
|
429
|
+
)
|
|
430
|
+
init_parser.add_argument(
|
|
431
|
+
"--exception", action="append", help="Explicit file paths to include despite exclusions"
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
# Changes command
|
|
435
|
+
changes_parser = subparsers.add_parser("changes", help="Show what changed")
|
|
436
|
+
changes_parser.add_argument("--root", required=True, help="Repository root path")
|
|
437
|
+
|
|
438
|
+
# Update command
|
|
439
|
+
update_parser = subparsers.add_parser("update", help="Update hashes")
|
|
440
|
+
update_parser.add_argument("--root", required=True, help="Repository root path")
|
|
441
|
+
|
|
442
|
+
args = parser.parse_args()
|
|
443
|
+
|
|
444
|
+
if args.command == "init":
|
|
445
|
+
return cmd_init(args)
|
|
446
|
+
elif args.command == "changes":
|
|
447
|
+
return cmd_changes(args)
|
|
448
|
+
elif args.command == "update":
|
|
449
|
+
return cmd_update(args)
|
|
450
|
+
else:
|
|
451
|
+
parser.print_help()
|
|
452
|
+
return 1
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
if __name__ == "__main__":
|
|
456
|
+
sys.exit(main())
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import json
|
|
5
|
+
import tempfile
|
|
6
|
+
import hashlib
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from cartographer import PatternMatcher, compute_file_hash, compute_folder_hash, select_files
|
|
9
|
+
|
|
10
|
+
class TestCartographer(unittest.TestCase):
|
|
11
|
+
def test_pattern_matcher(self):
|
|
12
|
+
patterns = ["node_modules/", "dist/", "*.log", "src/**/*.ts"]
|
|
13
|
+
matcher = PatternMatcher(patterns)
|
|
14
|
+
|
|
15
|
+
# Directory patterns
|
|
16
|
+
self.assertTrue(matcher.matches("node_modules/foo.js"))
|
|
17
|
+
self.assertTrue(matcher.matches("vendor/node_modules/bar.js"))
|
|
18
|
+
self.assertTrue(matcher.matches("dist/main.js"))
|
|
19
|
+
self.assertTrue(matcher.matches("src/dist/output.js"))
|
|
20
|
+
|
|
21
|
+
# Glob patterns
|
|
22
|
+
self.assertTrue(matcher.matches("error.log"))
|
|
23
|
+
self.assertTrue(matcher.matches("logs/access.log"))
|
|
24
|
+
|
|
25
|
+
# Recursive glob patterns
|
|
26
|
+
self.assertTrue(matcher.matches("src/index.ts"))
|
|
27
|
+
self.assertTrue(matcher.matches("src/utils/helper.ts"))
|
|
28
|
+
|
|
29
|
+
# Non-matches
|
|
30
|
+
self.assertFalse(matcher.matches("README.md"))
|
|
31
|
+
self.assertFalse(matcher.matches("tests/test.py"))
|
|
32
|
+
|
|
33
|
+
def test_compute_file_hash(self):
|
|
34
|
+
# Use binary mode to avoid any newline translation issues
|
|
35
|
+
with tempfile.NamedTemporaryFile(mode='wb', delete=False) as f:
|
|
36
|
+
f.write(b"test content")
|
|
37
|
+
f_path = f.name
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
h1 = compute_file_hash(Path(f_path))
|
|
41
|
+
# md5 of b"test content" is 9473fdd0d880a43c21b7778d34872157
|
|
42
|
+
expected = hashlib.md5(b"test content").hexdigest()
|
|
43
|
+
self.assertEqual(h1, expected)
|
|
44
|
+
self.assertEqual(h1, "9473fdd0d880a43c21b7778d34872157")
|
|
45
|
+
finally:
|
|
46
|
+
if os.path.exists(f_path):
|
|
47
|
+
os.unlink(f_path)
|
|
48
|
+
|
|
49
|
+
def test_compute_folder_hash(self):
|
|
50
|
+
file_hashes = {
|
|
51
|
+
"src/a.ts": "hash-a",
|
|
52
|
+
"src/b.ts": "hash-b",
|
|
53
|
+
"tests/test.ts": "hash-test"
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
h1 = compute_folder_hash("src", file_hashes)
|
|
57
|
+
h2 = compute_folder_hash("src", file_hashes)
|
|
58
|
+
self.assertEqual(h1, h2)
|
|
59
|
+
|
|
60
|
+
file_hashes_alt = {
|
|
61
|
+
"src/a.ts": "hash-a-modified",
|
|
62
|
+
"src/b.ts": "hash-b"
|
|
63
|
+
}
|
|
64
|
+
h3 = compute_folder_hash("src", file_hashes_alt)
|
|
65
|
+
self.assertNotEqual(h1, h3)
|
|
66
|
+
|
|
67
|
+
def test_select_files(self):
|
|
68
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
69
|
+
root = Path(tmpdir)
|
|
70
|
+
(root / "src").mkdir()
|
|
71
|
+
(root / "node_modules").mkdir()
|
|
72
|
+
(root / "src" / "index.ts").write_text("code")
|
|
73
|
+
(root / "src" / "index.test.ts").write_text("test")
|
|
74
|
+
(root / "node_modules" / "foo.js").write_text("dep")
|
|
75
|
+
(root / "package.json").write_text("{}")
|
|
76
|
+
|
|
77
|
+
includes = ["src/**/*.ts", "package.json"]
|
|
78
|
+
excludes = ["**/*.test.ts", "node_modules/"]
|
|
79
|
+
exceptions = []
|
|
80
|
+
|
|
81
|
+
selected = select_files(root, includes, excludes, exceptions, [])
|
|
82
|
+
|
|
83
|
+
rel_selected = sorted([os.path.relpath(f, root) for f in selected])
|
|
84
|
+
self.assertEqual(rel_selected, ["package.json", "src/index.ts"])
|
|
85
|
+
|
|
86
|
+
if __name__ == "__main__":
|
|
87
|
+
unittest.main()
|