oh-my-opencode-lite 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +365 -0
- package/dist/agents/deep.d.ts +2 -0
- package/dist/agents/designer.d.ts +2 -0
- package/dist/agents/explorer.d.ts +2 -0
- package/dist/agents/index.d.ts +8 -0
- package/dist/agents/librarian.d.ts +2 -0
- package/dist/agents/oracle.d.ts +2 -0
- package/dist/agents/orchestrator.d.ts +15 -0
- package/dist/agents/prompt-utils.d.ts +10 -0
- package/dist/agents/quick.d.ts +2 -0
- package/dist/background/background-manager.d.ts +196 -0
- package/dist/background/index.d.ts +2 -0
- package/dist/background/tmux-session-manager.d.ts +63 -0
- package/dist/cli/config-io.d.ts +22 -0
- package/dist/cli/config-manager.d.ts +4 -0
- package/dist/cli/custom-skills.d.ts +48 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +1178 -0
- package/dist/cli/install.d.ts +3 -0
- package/dist/cli/model-key-normalization.d.ts +1 -0
- package/dist/cli/paths.d.ts +21 -0
- package/dist/cli/providers.d.ts +120 -0
- package/dist/cli/skill-manifest.d.ts +32 -0
- package/dist/cli/skills.d.ts +26 -0
- package/dist/cli/system.d.ts +6 -0
- package/dist/cli/types.d.ts +38 -0
- package/dist/config/constants.d.ts +19 -0
- package/dist/config/index.d.ts +5 -0
- package/dist/config/loader.d.ts +33 -0
- package/dist/config/schema.d.ts +313 -0
- package/dist/config/utils.d.ts +10 -0
- package/dist/delegation/delegation-manager.d.ts +25 -0
- package/dist/delegation/index.d.ts +4 -0
- package/dist/delegation/paths.d.ts +15 -0
- package/dist/delegation/project-id.d.ts +1 -0
- package/dist/delegation/types.d.ts +39 -0
- package/dist/hooks/auto-update-checker/cache.d.ts +6 -0
- package/dist/hooks/auto-update-checker/checker.d.ts +28 -0
- package/dist/hooks/auto-update-checker/constants.d.ts +11 -0
- package/dist/hooks/auto-update-checker/index.d.ts +17 -0
- package/dist/hooks/auto-update-checker/types.d.ts +23 -0
- package/dist/hooks/chat-headers.d.ts +16 -0
- package/dist/hooks/clarification-gate/index.d.ts +30 -0
- package/dist/hooks/delegate-task-retry/guidance.d.ts +2 -0
- package/dist/hooks/delegate-task-retry/hook.d.ts +8 -0
- package/dist/hooks/delegate-task-retry/index.d.ts +4 -0
- package/dist/hooks/delegate-task-retry/patterns.d.ts +11 -0
- package/dist/hooks/foreground-fallback/index.d.ts +72 -0
- package/dist/hooks/index.d.ts +11 -0
- package/dist/hooks/json-error-recovery/hook.d.ts +18 -0
- package/dist/hooks/json-error-recovery/index.d.ts +1 -0
- package/dist/hooks/phase-reminder/index.d.ts +26 -0
- package/dist/hooks/post-read-nudge/index.d.ts +18 -0
- package/dist/hooks/skill-sync.d.ts +10 -0
- package/dist/hooks/thoth-mem/index.d.ts +46 -0
- package/dist/hooks/thoth-mem/protocol.d.ts +6 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +36210 -0
- package/dist/mcp/context7.d.ts +6 -0
- package/dist/mcp/grep-app.d.ts +6 -0
- package/dist/mcp/index.d.ts +7 -0
- package/dist/mcp/thoth.d.ts +3 -0
- package/dist/mcp/types.d.ts +12 -0
- package/dist/mcp/websearch.d.ts +6 -0
- package/dist/thoth/client.d.ts +14 -0
- package/dist/thoth/index.d.ts +2 -0
- package/dist/tools/ast-grep/cli.d.ts +15 -0
- package/dist/tools/ast-grep/constants.d.ts +25 -0
- package/dist/tools/ast-grep/downloader.d.ts +5 -0
- package/dist/tools/ast-grep/index.d.ts +10 -0
- package/dist/tools/ast-grep/tools.d.ts +3 -0
- package/dist/tools/ast-grep/types.d.ts +30 -0
- package/dist/tools/ast-grep/utils.d.ts +4 -0
- package/dist/tools/background.d.ts +13 -0
- package/dist/tools/index.d.ts +3 -0
- package/dist/tools/lsp/client.d.ts +42 -0
- package/dist/tools/lsp/config-store.d.ts +29 -0
- package/dist/tools/lsp/config.d.ts +4 -0
- package/dist/tools/lsp/constants.d.ts +24 -0
- package/dist/tools/lsp/index.d.ts +4 -0
- package/dist/tools/lsp/tools.d.ts +5 -0
- package/dist/tools/lsp/types.d.ts +35 -0
- package/dist/tools/lsp/utils.d.ts +34 -0
- package/dist/utils/agent-variant.d.ts +47 -0
- package/dist/utils/env.d.ts +1 -0
- package/dist/utils/index.d.ts +7 -0
- package/dist/utils/internal-initiator.d.ts +6 -0
- package/dist/utils/logger.d.ts +1 -0
- package/dist/utils/polling.d.ts +21 -0
- package/dist/utils/tmux.d.ts +32 -0
- package/dist/utils/zip-extractor.d.ts +1 -0
- package/oh-my-opencode-lite.schema.json +556 -0
- package/package.json +74 -0
- package/src/skills/_shared/openspec-convention.md +92 -0
- package/src/skills/_shared/persistence-contract.md +78 -0
- package/src/skills/_shared/thoth-mem-convention.md +80 -0
- package/src/skills/brainstorming/SKILL.md +120 -0
- package/src/skills/cartography/README.md +57 -0
- package/src/skills/cartography/SKILL.md +160 -0
- package/src/skills/cartography/scripts/cartographer.py +460 -0
- package/src/skills/cartography/scripts/test_cartographer.py +87 -0
- package/src/skills/executing-plans/SKILL.md +211 -0
- package/src/skills/plan-reviewer/SKILL.md +100 -0
- package/src/skills/sdd-apply/SKILL.md +101 -0
- package/src/skills/sdd-archive/SKILL.md +94 -0
- package/src/skills/sdd-design/SKILL.md +104 -0
- package/src/skills/sdd-propose/SKILL.md +99 -0
- package/src/skills/sdd-spec/SKILL.md +105 -0
- package/src/skills/sdd-tasks/SKILL.md +116 -0
- package/src/skills/sdd-verify/SKILL.md +102 -0
|
@@ -0,0 +1,460 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Cartographer - Repository mapping and change detection tool.
|
|
4
|
+
|
|
5
|
+
Commands:
|
|
6
|
+
init Initialize mapping (create hashes + empty codemaps)
|
|
7
|
+
changes Show what changed (read-only, like git status)
|
|
8
|
+
update Update hashes (like git commit)
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
cartographer.py init --root /path/to/repo --include "src/**/*.ts" --exclude "node_modules/**"
|
|
12
|
+
cartographer.py changes --root /path/to/repo
|
|
13
|
+
cartographer.py update --root /path/to/repo
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import argparse
|
|
17
|
+
import hashlib
|
|
18
|
+
import json
|
|
19
|
+
import os
|
|
20
|
+
import re
|
|
21
|
+
import sys
|
|
22
|
+
from datetime import datetime, timezone
|
|
23
|
+
from pathlib import Path, PurePath
|
|
24
|
+
from typing import Dict, List, Optional, Set, Tuple
|
|
25
|
+
|
|
26
|
+
VERSION = "1.0.0"
|
|
27
|
+
STATE_DIR = ".lite"
|
|
28
|
+
STATE_FILE = "cartography.json"
|
|
29
|
+
CODEMAP_FILE = "codemap.md"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def load_gitignore(root: Path) -> List[str]:
|
|
33
|
+
"""Load .gitignore patterns from the repository root."""
|
|
34
|
+
gitignore_path = root / ".gitignore"
|
|
35
|
+
patterns = []
|
|
36
|
+
if gitignore_path.exists():
|
|
37
|
+
with open(gitignore_path, "r", encoding="utf-8") as f:
|
|
38
|
+
for line in f:
|
|
39
|
+
line = line.strip()
|
|
40
|
+
if line and not line.startswith("#"):
|
|
41
|
+
patterns.append(line)
|
|
42
|
+
return patterns
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class PatternMatcher:
|
|
46
|
+
"""Efficiently match paths against multiple glob patterns using pre-compiled regex."""
|
|
47
|
+
|
|
48
|
+
def __init__(self, patterns: List[str]):
|
|
49
|
+
if not patterns:
|
|
50
|
+
self.regex = None
|
|
51
|
+
return
|
|
52
|
+
|
|
53
|
+
regex_parts = []
|
|
54
|
+
for pattern in patterns:
|
|
55
|
+
# Regex conversion logic
|
|
56
|
+
reg = re.escape(pattern)
|
|
57
|
+
reg = reg.replace(r"\*\*/", "(?:.*/)?") # Recursive glob
|
|
58
|
+
reg = reg.replace(r"\*\*", ".*")
|
|
59
|
+
reg = reg.replace(r"\*", "[^/]*") # Single level glob
|
|
60
|
+
reg = reg.replace(r"\?", ".")
|
|
61
|
+
|
|
62
|
+
if pattern.endswith("/"):
|
|
63
|
+
reg += ".*"
|
|
64
|
+
|
|
65
|
+
if pattern.startswith("/"):
|
|
66
|
+
reg = "^" + reg[1:]
|
|
67
|
+
else:
|
|
68
|
+
reg = "(?:^|.*/)" + reg
|
|
69
|
+
|
|
70
|
+
regex_parts.append(f"(?:{reg}$)")
|
|
71
|
+
|
|
72
|
+
# Combine all patterns into a single regex for speed
|
|
73
|
+
combined_regex = "|".join(regex_parts)
|
|
74
|
+
self.regex = re.compile(combined_regex)
|
|
75
|
+
|
|
76
|
+
def matches(self, path: str) -> bool:
|
|
77
|
+
"""Check if a path matches any of the patterns."""
|
|
78
|
+
if not self.regex:
|
|
79
|
+
return False
|
|
80
|
+
return bool(self.regex.search(path))
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def select_files(
|
|
84
|
+
root: Path,
|
|
85
|
+
include_patterns: List[str],
|
|
86
|
+
exclude_patterns: List[str],
|
|
87
|
+
exceptions: List[str],
|
|
88
|
+
gitignore_patterns: List[str],
|
|
89
|
+
) -> List[Path]:
|
|
90
|
+
"""Select files based on include/exclude patterns and exceptions."""
|
|
91
|
+
selected = []
|
|
92
|
+
|
|
93
|
+
# Pre-compile matchers
|
|
94
|
+
include_matcher = PatternMatcher(include_patterns)
|
|
95
|
+
exclude_matcher = PatternMatcher(exclude_patterns)
|
|
96
|
+
gitignore_matcher = PatternMatcher(gitignore_patterns)
|
|
97
|
+
exception_set = set(exceptions)
|
|
98
|
+
|
|
99
|
+
root_str = str(root)
|
|
100
|
+
|
|
101
|
+
for dirpath, dirnames, filenames in os.walk(root_str):
|
|
102
|
+
# Skip hidden directories early by modifying dirnames in-place
|
|
103
|
+
dirnames[:] = [d for d in dirnames if not d.startswith(".")]
|
|
104
|
+
|
|
105
|
+
rel_dir = os.path.relpath(dirpath, root_str)
|
|
106
|
+
if rel_dir == ".":
|
|
107
|
+
rel_dir = ""
|
|
108
|
+
|
|
109
|
+
for filename in filenames:
|
|
110
|
+
rel_path = os.path.join(rel_dir, filename).replace("\\", "/")
|
|
111
|
+
if rel_path.startswith("./"):
|
|
112
|
+
rel_path = rel_path[2:]
|
|
113
|
+
|
|
114
|
+
# Skip if ignored by .gitignore
|
|
115
|
+
if gitignore_matcher.matches(rel_path):
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
# Check explicit exclusions first
|
|
119
|
+
if exclude_matcher.matches(rel_path):
|
|
120
|
+
# Unless it's an exception
|
|
121
|
+
if rel_path not in exception_set:
|
|
122
|
+
continue
|
|
123
|
+
|
|
124
|
+
# Check inclusions
|
|
125
|
+
if include_matcher.matches(rel_path) or rel_path in exception_set:
|
|
126
|
+
selected.append(root / rel_path)
|
|
127
|
+
|
|
128
|
+
return sorted(selected)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def compute_file_hash(filepath: Path) -> str:
|
|
132
|
+
"""Compute MD5 hash of file content."""
|
|
133
|
+
hasher = hashlib.md5()
|
|
134
|
+
try:
|
|
135
|
+
with open(filepath, "rb") as f:
|
|
136
|
+
for chunk in iter(lambda: f.read(8192), b""):
|
|
137
|
+
hasher.update(chunk)
|
|
138
|
+
return hasher.hexdigest()
|
|
139
|
+
except (IOError, OSError):
|
|
140
|
+
return ""
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def compute_folder_hash(folder: str, file_hashes: Dict[str, str]) -> str:
|
|
144
|
+
"""Compute a stable hash for a folder based on its files."""
|
|
145
|
+
# Get all files in this folder
|
|
146
|
+
folder_files = sorted(
|
|
147
|
+
(path, hash_val)
|
|
148
|
+
for path, hash_val in file_hashes.items()
|
|
149
|
+
if path.startswith(folder + "/") or (folder == "." and "/" not in path)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
if not folder_files:
|
|
153
|
+
return ""
|
|
154
|
+
|
|
155
|
+
# Hash the concatenation of path:hash pairs
|
|
156
|
+
hasher = hashlib.md5()
|
|
157
|
+
for path, hash_val in folder_files:
|
|
158
|
+
hasher.update(f"{path}:{hash_val}\n".encode())
|
|
159
|
+
return hasher.hexdigest()
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def get_folders_with_files(files: List[Path], root: Path) -> Set[str]:
|
|
163
|
+
"""Get all unique folders that contain selected files."""
|
|
164
|
+
folders = set()
|
|
165
|
+
for f in files:
|
|
166
|
+
rel = f.relative_to(root)
|
|
167
|
+
# Add all parent directories
|
|
168
|
+
parts = rel.parts[:-1] # Exclude filename
|
|
169
|
+
for i in range(len(parts)):
|
|
170
|
+
folders.add("/".join(parts[: i + 1]))
|
|
171
|
+
folders.add(".") # Always include root
|
|
172
|
+
return folders
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def load_state(root: Path) -> Optional[dict]:
|
|
176
|
+
"""Load the current cartography state."""
|
|
177
|
+
state_path = root / STATE_DIR / STATE_FILE
|
|
178
|
+
if state_path.exists():
|
|
179
|
+
try:
|
|
180
|
+
with open(state_path, "r", encoding="utf-8") as f:
|
|
181
|
+
return json.load(f)
|
|
182
|
+
except (json.JSONDecodeError, IOError):
|
|
183
|
+
return None
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def save_state(root: Path, state: dict) -> None:
|
|
188
|
+
"""Save the cartography state."""
|
|
189
|
+
state_dir = root / STATE_DIR
|
|
190
|
+
state_dir.mkdir(parents=True, exist_ok=True)
|
|
191
|
+
|
|
192
|
+
state_path = state_dir / STATE_FILE
|
|
193
|
+
with open(state_path, "w", encoding="utf-8") as f:
|
|
194
|
+
json.dump(state, f, indent=2)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def create_empty_codemap(folder_path: Path, folder_name: str) -> None:
|
|
198
|
+
"""Create an empty codemap.md file with a header."""
|
|
199
|
+
codemap_path = folder_path / CODEMAP_FILE
|
|
200
|
+
if not codemap_path.exists():
|
|
201
|
+
content = f"""# {folder_name}/
|
|
202
|
+
|
|
203
|
+
<!-- Explorer: Fill in this section with architectural understanding -->
|
|
204
|
+
|
|
205
|
+
## Responsibility
|
|
206
|
+
|
|
207
|
+
<!-- What is this folder's job in the system? -->
|
|
208
|
+
|
|
209
|
+
## Design
|
|
210
|
+
|
|
211
|
+
<!-- Key patterns, abstractions, architectural decisions -->
|
|
212
|
+
|
|
213
|
+
## Flow
|
|
214
|
+
|
|
215
|
+
<!-- How does data/control flow through this module? -->
|
|
216
|
+
|
|
217
|
+
## Integration
|
|
218
|
+
|
|
219
|
+
<!-- How does it connect to other parts of the system? -->
|
|
220
|
+
"""
|
|
221
|
+
with open(codemap_path, "w", encoding="utf-8") as f:
|
|
222
|
+
f.write(content)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def cmd_init(args: argparse.Namespace) -> int:
|
|
226
|
+
"""Initialize mapping: create hashes and empty codemaps."""
|
|
227
|
+
root = Path(args.root).resolve()
|
|
228
|
+
|
|
229
|
+
if not root.is_dir():
|
|
230
|
+
print(f"Error: {root} is not a directory", file=sys.stderr)
|
|
231
|
+
return 1
|
|
232
|
+
|
|
233
|
+
# Load patterns
|
|
234
|
+
gitignore = load_gitignore(root)
|
|
235
|
+
include_patterns = args.include or ["**/*"]
|
|
236
|
+
exclude_patterns = args.exclude or []
|
|
237
|
+
exceptions = args.exception or []
|
|
238
|
+
|
|
239
|
+
print(f"Scanning {root}...")
|
|
240
|
+
print(f"Include patterns: {include_patterns}")
|
|
241
|
+
print(f"Exclude patterns: {exclude_patterns}")
|
|
242
|
+
print(f"Exceptions: {exceptions}")
|
|
243
|
+
|
|
244
|
+
# Select files
|
|
245
|
+
selected_files = select_files(
|
|
246
|
+
root, include_patterns, exclude_patterns, exceptions, gitignore
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
print(f"Selected {len(selected_files)} files")
|
|
250
|
+
|
|
251
|
+
# Compute file hashes
|
|
252
|
+
file_hashes: Dict[str, str] = {}
|
|
253
|
+
for f in selected_files:
|
|
254
|
+
rel_path = str(f.relative_to(root))
|
|
255
|
+
file_hashes[rel_path] = compute_file_hash(f)
|
|
256
|
+
|
|
257
|
+
# Get folders and compute folder hashes
|
|
258
|
+
folders = get_folders_with_files(selected_files, root)
|
|
259
|
+
folder_hashes: Dict[str, str] = {}
|
|
260
|
+
for folder in folders:
|
|
261
|
+
folder_hashes[folder] = compute_folder_hash(folder, file_hashes)
|
|
262
|
+
|
|
263
|
+
# Create state
|
|
264
|
+
state = {
|
|
265
|
+
"metadata": {
|
|
266
|
+
"version": VERSION,
|
|
267
|
+
"last_run": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
|
268
|
+
"root": str(root),
|
|
269
|
+
"include_patterns": include_patterns,
|
|
270
|
+
"exclude_patterns": exclude_patterns,
|
|
271
|
+
"exceptions": exceptions,
|
|
272
|
+
},
|
|
273
|
+
"file_hashes": file_hashes,
|
|
274
|
+
"folder_hashes": folder_hashes,
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
# Save state
|
|
278
|
+
save_state(root, state)
|
|
279
|
+
print(f"Created {STATE_DIR}/{STATE_FILE}")
|
|
280
|
+
|
|
281
|
+
# Create empty codemaps
|
|
282
|
+
for folder in folders:
|
|
283
|
+
if folder == ".":
|
|
284
|
+
folder_path = root
|
|
285
|
+
folder_name = root.name
|
|
286
|
+
else:
|
|
287
|
+
folder_path = root / folder
|
|
288
|
+
folder_name = folder
|
|
289
|
+
|
|
290
|
+
create_empty_codemap(folder_path, folder_name)
|
|
291
|
+
|
|
292
|
+
print(f"Created {len(folders)} empty codemap.md files")
|
|
293
|
+
|
|
294
|
+
return 0
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def cmd_changes(args: argparse.Namespace) -> int:
|
|
298
|
+
"""Show what changed since last update."""
|
|
299
|
+
root = Path(args.root).resolve()
|
|
300
|
+
|
|
301
|
+
state = load_state(root)
|
|
302
|
+
if not state:
|
|
303
|
+
print("No cartography state found. Run 'init' first.", file=sys.stderr)
|
|
304
|
+
return 1
|
|
305
|
+
|
|
306
|
+
# Get patterns from saved state
|
|
307
|
+
metadata = state.get("metadata", {})
|
|
308
|
+
include_patterns = metadata.get("include_patterns", ["**/*"])
|
|
309
|
+
exclude_patterns = metadata.get("exclude_patterns", [])
|
|
310
|
+
exceptions = metadata.get("exceptions", [])
|
|
311
|
+
|
|
312
|
+
gitignore = load_gitignore(root)
|
|
313
|
+
|
|
314
|
+
# Select current files
|
|
315
|
+
current_files = select_files(
|
|
316
|
+
root, include_patterns, exclude_patterns, exceptions, gitignore
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
# Compute current hashes
|
|
320
|
+
current_hashes: Dict[str, str] = {}
|
|
321
|
+
for f in current_files:
|
|
322
|
+
rel_path = str(f.relative_to(root))
|
|
323
|
+
current_hashes[rel_path] = compute_file_hash(f)
|
|
324
|
+
|
|
325
|
+
saved_hashes = state.get("file_hashes", {})
|
|
326
|
+
|
|
327
|
+
# Find changes
|
|
328
|
+
added = set(current_hashes.keys()) - set(saved_hashes.keys())
|
|
329
|
+
removed = set(saved_hashes.keys()) - set(current_hashes.keys())
|
|
330
|
+
modified = {
|
|
331
|
+
path
|
|
332
|
+
for path in current_hashes.keys() & saved_hashes.keys()
|
|
333
|
+
if current_hashes[path] != saved_hashes[path]
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
if not added and not removed and not modified:
|
|
337
|
+
print("No changes detected.")
|
|
338
|
+
return 0
|
|
339
|
+
|
|
340
|
+
if added:
|
|
341
|
+
print(f"\n{len(added)} added:")
|
|
342
|
+
for path in sorted(added):
|
|
343
|
+
print(f" + {path}")
|
|
344
|
+
|
|
345
|
+
if removed:
|
|
346
|
+
print(f"\n{len(removed)} removed:")
|
|
347
|
+
for path in sorted(removed):
|
|
348
|
+
print(f" - {path}")
|
|
349
|
+
|
|
350
|
+
if modified:
|
|
351
|
+
print(f"\n{len(modified)} modified:")
|
|
352
|
+
for path in sorted(modified):
|
|
353
|
+
print(f" ~ {path}")
|
|
354
|
+
|
|
355
|
+
# Show affected folders
|
|
356
|
+
affected_folders = set()
|
|
357
|
+
for path in added | removed | modified:
|
|
358
|
+
parts = Path(path).parts[:-1]
|
|
359
|
+
for i in range(len(parts)):
|
|
360
|
+
affected_folders.add("/".join(parts[: i + 1]))
|
|
361
|
+
affected_folders.add(".")
|
|
362
|
+
|
|
363
|
+
print(f"\n{len(affected_folders)} folders affected:")
|
|
364
|
+
for folder in sorted(affected_folders):
|
|
365
|
+
print(f" {folder}/")
|
|
366
|
+
|
|
367
|
+
return 0
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
def cmd_update(args: argparse.Namespace) -> int:
|
|
371
|
+
"""Update hashes and save state."""
|
|
372
|
+
root = Path(args.root).resolve()
|
|
373
|
+
|
|
374
|
+
state = load_state(root)
|
|
375
|
+
if not state:
|
|
376
|
+
print("No cartography state found. Run 'init' first.", file=sys.stderr)
|
|
377
|
+
return 1
|
|
378
|
+
|
|
379
|
+
# Get patterns from saved state
|
|
380
|
+
metadata = state.get("metadata", {})
|
|
381
|
+
include_patterns = metadata.get("include_patterns", ["**/*"])
|
|
382
|
+
exclude_patterns = metadata.get("exclude_patterns", [])
|
|
383
|
+
exceptions = metadata.get("exceptions", [])
|
|
384
|
+
|
|
385
|
+
gitignore = load_gitignore(root)
|
|
386
|
+
|
|
387
|
+
# Select current files
|
|
388
|
+
selected_files = select_files(
|
|
389
|
+
root, include_patterns, exclude_patterns, exceptions, gitignore
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
# Compute new hashes
|
|
393
|
+
file_hashes: Dict[str, str] = {}
|
|
394
|
+
for f in selected_files:
|
|
395
|
+
rel_path = str(f.relative_to(root))
|
|
396
|
+
file_hashes[rel_path] = compute_file_hash(f)
|
|
397
|
+
|
|
398
|
+
# Compute folder hashes
|
|
399
|
+
folders = get_folders_with_files(selected_files, root)
|
|
400
|
+
folder_hashes: Dict[str, str] = {}
|
|
401
|
+
for folder in folders:
|
|
402
|
+
folder_hashes[folder] = compute_folder_hash(folder, file_hashes)
|
|
403
|
+
|
|
404
|
+
# Update state
|
|
405
|
+
state["metadata"]["last_run"] = (
|
|
406
|
+
datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
|
407
|
+
)
|
|
408
|
+
state["file_hashes"] = file_hashes
|
|
409
|
+
state["folder_hashes"] = folder_hashes
|
|
410
|
+
|
|
411
|
+
save_state(root, state)
|
|
412
|
+
print(f"Updated {STATE_DIR}/{STATE_FILE} with {len(file_hashes)} files")
|
|
413
|
+
|
|
414
|
+
return 0
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
def main() -> int:
|
|
418
|
+
parser = argparse.ArgumentParser(
|
|
419
|
+
description="Cartographer - Repository mapping and change detection"
|
|
420
|
+
)
|
|
421
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
422
|
+
|
|
423
|
+
# Init command
|
|
424
|
+
init_parser = subparsers.add_parser("init", help="Initialize mapping")
|
|
425
|
+
init_parser.add_argument("--root", required=True, help="Repository root path")
|
|
426
|
+
init_parser.add_argument(
|
|
427
|
+
"--include", action="append", help="Glob patterns for files to include"
|
|
428
|
+
)
|
|
429
|
+
init_parser.add_argument(
|
|
430
|
+
"--exclude", action="append", help="Glob patterns for files to exclude"
|
|
431
|
+
)
|
|
432
|
+
init_parser.add_argument(
|
|
433
|
+
"--exception",
|
|
434
|
+
action="append",
|
|
435
|
+
help="Explicit file paths to include despite exclusions",
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
# Changes command
|
|
439
|
+
changes_parser = subparsers.add_parser("changes", help="Show what changed")
|
|
440
|
+
changes_parser.add_argument("--root", required=True, help="Repository root path")
|
|
441
|
+
|
|
442
|
+
# Update command
|
|
443
|
+
update_parser = subparsers.add_parser("update", help="Update hashes")
|
|
444
|
+
update_parser.add_argument("--root", required=True, help="Repository root path")
|
|
445
|
+
|
|
446
|
+
args = parser.parse_args()
|
|
447
|
+
|
|
448
|
+
if args.command == "init":
|
|
449
|
+
return cmd_init(args)
|
|
450
|
+
elif args.command == "changes":
|
|
451
|
+
return cmd_changes(args)
|
|
452
|
+
elif args.command == "update":
|
|
453
|
+
return cmd_update(args)
|
|
454
|
+
else:
|
|
455
|
+
parser.print_help()
|
|
456
|
+
return 1
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
if __name__ == "__main__":
|
|
460
|
+
sys.exit(main())
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import json
|
|
5
|
+
import tempfile
|
|
6
|
+
import hashlib
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from cartographer import PatternMatcher, compute_file_hash, compute_folder_hash, select_files
|
|
9
|
+
|
|
10
|
+
class TestCartographer(unittest.TestCase):
|
|
11
|
+
def test_pattern_matcher(self):
|
|
12
|
+
patterns = ["node_modules/", "dist/", "*.log", "src/**/*.ts"]
|
|
13
|
+
matcher = PatternMatcher(patterns)
|
|
14
|
+
|
|
15
|
+
# Directory patterns
|
|
16
|
+
self.assertTrue(matcher.matches("node_modules/foo.js"))
|
|
17
|
+
self.assertTrue(matcher.matches("vendor/node_modules/bar.js"))
|
|
18
|
+
self.assertTrue(matcher.matches("dist/main.js"))
|
|
19
|
+
self.assertTrue(matcher.matches("src/dist/output.js"))
|
|
20
|
+
|
|
21
|
+
# Glob patterns
|
|
22
|
+
self.assertTrue(matcher.matches("error.log"))
|
|
23
|
+
self.assertTrue(matcher.matches("logs/access.log"))
|
|
24
|
+
|
|
25
|
+
# Recursive glob patterns
|
|
26
|
+
self.assertTrue(matcher.matches("src/index.ts"))
|
|
27
|
+
self.assertTrue(matcher.matches("src/utils/helper.ts"))
|
|
28
|
+
|
|
29
|
+
# Non-matches
|
|
30
|
+
self.assertFalse(matcher.matches("README.md"))
|
|
31
|
+
self.assertFalse(matcher.matches("tests/test.py"))
|
|
32
|
+
|
|
33
|
+
def test_compute_file_hash(self):
|
|
34
|
+
# Use binary mode to avoid any newline translation issues
|
|
35
|
+
with tempfile.NamedTemporaryFile(mode='wb', delete=False) as f:
|
|
36
|
+
f.write(b"test content")
|
|
37
|
+
f_path = f.name
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
h1 = compute_file_hash(Path(f_path))
|
|
41
|
+
# md5 of b"test content" is 9473fdd0d880a43c21b7778d34872157
|
|
42
|
+
expected = hashlib.md5(b"test content").hexdigest()
|
|
43
|
+
self.assertEqual(h1, expected)
|
|
44
|
+
self.assertEqual(h1, "9473fdd0d880a43c21b7778d34872157")
|
|
45
|
+
finally:
|
|
46
|
+
if os.path.exists(f_path):
|
|
47
|
+
os.unlink(f_path)
|
|
48
|
+
|
|
49
|
+
def test_compute_folder_hash(self):
|
|
50
|
+
file_hashes = {
|
|
51
|
+
"src/a.ts": "hash-a",
|
|
52
|
+
"src/b.ts": "hash-b",
|
|
53
|
+
"tests/test.ts": "hash-test"
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
h1 = compute_folder_hash("src", file_hashes)
|
|
57
|
+
h2 = compute_folder_hash("src", file_hashes)
|
|
58
|
+
self.assertEqual(h1, h2)
|
|
59
|
+
|
|
60
|
+
file_hashes_alt = {
|
|
61
|
+
"src/a.ts": "hash-a-modified",
|
|
62
|
+
"src/b.ts": "hash-b"
|
|
63
|
+
}
|
|
64
|
+
h3 = compute_folder_hash("src", file_hashes_alt)
|
|
65
|
+
self.assertNotEqual(h1, h3)
|
|
66
|
+
|
|
67
|
+
def test_select_files(self):
|
|
68
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
69
|
+
root = Path(tmpdir)
|
|
70
|
+
(root / "src").mkdir()
|
|
71
|
+
(root / "node_modules").mkdir()
|
|
72
|
+
(root / "src" / "index.ts").write_text("code")
|
|
73
|
+
(root / "src" / "index.test.ts").write_text("test")
|
|
74
|
+
(root / "node_modules" / "foo.js").write_text("dep")
|
|
75
|
+
(root / "package.json").write_text("{}")
|
|
76
|
+
|
|
77
|
+
includes = ["src/**/*.ts", "package.json"]
|
|
78
|
+
excludes = ["**/*.test.ts", "node_modules/"]
|
|
79
|
+
exceptions = []
|
|
80
|
+
|
|
81
|
+
selected = select_files(root, includes, excludes, exceptions, [])
|
|
82
|
+
|
|
83
|
+
rel_selected = sorted([os.path.relpath(f, root) for f in selected])
|
|
84
|
+
self.assertEqual(rel_selected, ["package.json", "src/index.ts"])
|
|
85
|
+
|
|
86
|
+
if __name__ == "__main__":
|
|
87
|
+
unittest.main()
|