codebeacon 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codebeacon/__init__.py +1 -0
- codebeacon/__main__.py +3 -0
- codebeacon/cache.py +136 -0
- codebeacon/cli.py +391 -0
- codebeacon/common/__init__.py +0 -0
- codebeacon/common/filters.py +170 -0
- codebeacon/common/symbols.py +121 -0
- codebeacon/common/types.py +98 -0
- codebeacon/config.py +144 -0
- codebeacon/contextmap/__init__.py +0 -0
- codebeacon/contextmap/generator.py +602 -0
- codebeacon/discover/__init__.py +0 -0
- codebeacon/discover/detector.py +388 -0
- codebeacon/discover/scanner.py +192 -0
- codebeacon/export/__init__.py +0 -0
- codebeacon/export/mcp.py +515 -0
- codebeacon/export/obsidian.py +812 -0
- codebeacon/extract/__init__.py +22 -0
- codebeacon/extract/base.py +372 -0
- codebeacon/extract/components.py +357 -0
- codebeacon/extract/dependencies.py +140 -0
- codebeacon/extract/entities.py +575 -0
- codebeacon/extract/queries/README.md +116 -0
- codebeacon/extract/queries/actix.scm +115 -0
- codebeacon/extract/queries/angular.scm +155 -0
- codebeacon/extract/queries/aspnet.scm +159 -0
- codebeacon/extract/queries/django.scm +122 -0
- codebeacon/extract/queries/express.scm +124 -0
- codebeacon/extract/queries/fastapi.scm +152 -0
- codebeacon/extract/queries/flask.scm +120 -0
- codebeacon/extract/queries/gin.scm +142 -0
- codebeacon/extract/queries/ktor.scm +144 -0
- codebeacon/extract/queries/laravel.scm +172 -0
- codebeacon/extract/queries/nestjs.scm +183 -0
- codebeacon/extract/queries/rails.scm +114 -0
- codebeacon/extract/queries/react.scm +111 -0
- codebeacon/extract/queries/spring_boot.scm +204 -0
- codebeacon/extract/queries/svelte.scm +73 -0
- codebeacon/extract/queries/vapor.scm +130 -0
- codebeacon/extract/queries/vue.scm +123 -0
- codebeacon/extract/routes.py +910 -0
- codebeacon/extract/semantic.py +280 -0
- codebeacon/extract/services.py +597 -0
- codebeacon/graph/__init__.py +1 -0
- codebeacon/graph/analyze.py +281 -0
- codebeacon/graph/build.py +320 -0
- codebeacon/graph/cluster.py +160 -0
- codebeacon/graph/enrich.py +206 -0
- codebeacon/skill/SKILL.md +127 -0
- codebeacon/wave.py +292 -0
- codebeacon/wiki/__init__.py +0 -0
- codebeacon/wiki/generator.py +376 -0
- codebeacon/wiki/index.py +95 -0
- codebeacon/wiki/templates.py +467 -0
- codebeacon-0.1.2.dist-info/METADATA +319 -0
- codebeacon-0.1.2.dist-info/RECORD +59 -0
- codebeacon-0.1.2.dist-info/WHEEL +4 -0
- codebeacon-0.1.2.dist-info/entry_points.txt +2 -0
- codebeacon-0.1.2.dist-info/licenses/LICENSE +21 -0
codebeacon/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.2"
|
codebeacon/__main__.py
ADDED
codebeacon/cache.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"""SHA-256 based incremental cache for codebeacon.
|
|
2
|
+
|
|
3
|
+
Stores file_path → {hash, result, ts} mapping in .codebeacon/cache/cache.json.
|
|
4
|
+
On re-scan, files whose hash hasn't changed reuse cached extraction results,
|
|
5
|
+
skipping tree-sitter re-parsing.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
cache = Cache(output_dir)
|
|
9
|
+
cache.load()
|
|
10
|
+
|
|
11
|
+
for file in files:
|
|
12
|
+
cached = cache.get(file) # None if stale/missing
|
|
13
|
+
if cached is not None:
|
|
14
|
+
use(cached)
|
|
15
|
+
else:
|
|
16
|
+
result = extract(file)
|
|
17
|
+
cache.put(file, result)
|
|
18
|
+
|
|
19
|
+
cache.save()
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from __future__ import annotations
|
|
23
|
+
|
|
24
|
+
import hashlib
|
|
25
|
+
import json
|
|
26
|
+
import time
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from typing import Any, Optional
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Cache:
|
|
32
|
+
"""Manages SHA-256 based incremental file extraction cache."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, output_dir: str) -> None:
|
|
35
|
+
self._cache_dir = Path(output_dir) / "cache"
|
|
36
|
+
self._cache_file = self._cache_dir / "cache.json"
|
|
37
|
+
self._data: dict[str, dict] = {}
|
|
38
|
+
self._dirty = False
|
|
39
|
+
# Memoize hashes within a single run to avoid double-reading files
|
|
40
|
+
self._hash_memo: dict[str, str] = {}
|
|
41
|
+
|
|
42
|
+
def load(self) -> None:
|
|
43
|
+
"""Load cache from disk. Safe to call even if the cache file doesn't exist."""
|
|
44
|
+
try:
|
|
45
|
+
if self._cache_file.exists():
|
|
46
|
+
self._data = json.loads(self._cache_file.read_text(encoding="utf-8"))
|
|
47
|
+
except (json.JSONDecodeError, OSError):
|
|
48
|
+
self._data = {}
|
|
49
|
+
|
|
50
|
+
def save(self) -> None:
|
|
51
|
+
"""Persist cache to disk. No-op if nothing has changed."""
|
|
52
|
+
if not self._dirty:
|
|
53
|
+
return
|
|
54
|
+
self._cache_dir.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
self._cache_file.write_text(
|
|
56
|
+
json.dumps(self._data, ensure_ascii=False, indent=2),
|
|
57
|
+
encoding="utf-8",
|
|
58
|
+
)
|
|
59
|
+
self._dirty = False
|
|
60
|
+
|
|
61
|
+
def file_hash(self, file_path: str) -> str:
|
|
62
|
+
"""Compute (and memoize) the SHA-256 hex digest of a file's contents."""
|
|
63
|
+
if file_path in self._hash_memo:
|
|
64
|
+
return self._hash_memo[file_path]
|
|
65
|
+
|
|
66
|
+
h = hashlib.sha256()
|
|
67
|
+
try:
|
|
68
|
+
with open(file_path, "rb") as f:
|
|
69
|
+
for chunk in iter(lambda: f.read(65536), b""):
|
|
70
|
+
h.update(chunk)
|
|
71
|
+
digest = h.hexdigest()
|
|
72
|
+
except OSError:
|
|
73
|
+
digest = ""
|
|
74
|
+
|
|
75
|
+
self._hash_memo[file_path] = digest
|
|
76
|
+
return digest
|
|
77
|
+
|
|
78
|
+
def is_fresh(self, file_path: str) -> bool:
|
|
79
|
+
"""Return True if the cached hash matches the current file hash."""
|
|
80
|
+
entry = self._data.get(file_path)
|
|
81
|
+
if not entry:
|
|
82
|
+
return False
|
|
83
|
+
return entry.get("hash") == self.file_hash(file_path)
|
|
84
|
+
|
|
85
|
+
def get(self, file_path: str) -> Optional[dict]:
|
|
86
|
+
"""Return the cached extraction result dict, or None if stale/missing."""
|
|
87
|
+
entry = self._data.get(file_path)
|
|
88
|
+
if not entry:
|
|
89
|
+
return None
|
|
90
|
+
if entry.get("hash") != self.file_hash(file_path):
|
|
91
|
+
return None
|
|
92
|
+
return entry.get("result")
|
|
93
|
+
|
|
94
|
+
def put(self, file_path: str, result: Any, file_hash: Optional[str] = None) -> None:
|
|
95
|
+
"""Store an extraction result for a file.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
file_path: absolute path to the source file
|
|
99
|
+
result: extraction result dict (must be JSON-serializable)
|
|
100
|
+
file_hash: pre-computed SHA-256 digest (computed if not provided)
|
|
101
|
+
"""
|
|
102
|
+
h = file_hash or self.file_hash(file_path)
|
|
103
|
+
if not isinstance(result, dict):
|
|
104
|
+
try:
|
|
105
|
+
from dataclasses import asdict
|
|
106
|
+
result = asdict(result)
|
|
107
|
+
except (TypeError, ImportError):
|
|
108
|
+
result = {"_raw": str(result)}
|
|
109
|
+
|
|
110
|
+
self._data[file_path] = {
|
|
111
|
+
"hash": h,
|
|
112
|
+
"result": result,
|
|
113
|
+
"ts": time.time(),
|
|
114
|
+
}
|
|
115
|
+
self._hash_memo[file_path] = h
|
|
116
|
+
self._dirty = True
|
|
117
|
+
|
|
118
|
+
def invalidate(self, file_path: str) -> None:
|
|
119
|
+
"""Remove a specific file's cache entry."""
|
|
120
|
+
if file_path in self._data:
|
|
121
|
+
del self._data[file_path]
|
|
122
|
+
self._dirty = True
|
|
123
|
+
self._hash_memo.pop(file_path, None)
|
|
124
|
+
|
|
125
|
+
def clear(self) -> None:
|
|
126
|
+
"""Remove all cache entries."""
|
|
127
|
+
self._data = {}
|
|
128
|
+
self._hash_memo = {}
|
|
129
|
+
self._dirty = True
|
|
130
|
+
|
|
131
|
+
def stats(self) -> dict:
|
|
132
|
+
"""Return basic cache statistics."""
|
|
133
|
+
return {
|
|
134
|
+
"entries": len(self._data),
|
|
135
|
+
"cache_file": str(self._cache_file),
|
|
136
|
+
}
|
codebeacon/cli.py
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
1
|
+
"""codebeacon CLI entry point."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from codebeacon import __version__
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _cmd_scan(args: argparse.Namespace) -> int:
|
|
13
|
+
from codebeacon.config import find_config, load_config, generate_config
|
|
14
|
+
from codebeacon.discover.detector import discover_projects, extract_convention_routes
|
|
15
|
+
from codebeacon.discover.scanner import collect_files
|
|
16
|
+
|
|
17
|
+
paths = [str(Path(p).resolve()) for p in args.paths]
|
|
18
|
+
|
|
19
|
+
# If single path and codebeacon.yaml exists there → sync mode
|
|
20
|
+
if len(paths) == 1:
|
|
21
|
+
config_path = find_config(paths[0])
|
|
22
|
+
if config_path:
|
|
23
|
+
print(f"Found {config_path} — switching to sync mode")
|
|
24
|
+
args.config = str(config_path)
|
|
25
|
+
return _cmd_sync(args)
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
projects = discover_projects(paths)
|
|
29
|
+
except (FileNotFoundError, ValueError) as e:
|
|
30
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
31
|
+
return 1
|
|
32
|
+
|
|
33
|
+
if not projects:
|
|
34
|
+
print("No projects found.", file=sys.stderr)
|
|
35
|
+
return 1
|
|
36
|
+
|
|
37
|
+
multi = len(projects) > 1 or (len(projects) == 1 and projects[0].is_multi)
|
|
38
|
+
|
|
39
|
+
if multi:
|
|
40
|
+
print(f"Scanning {len(projects)} project(s)...")
|
|
41
|
+
else:
|
|
42
|
+
print(f"Scanning {projects[0].path} ...")
|
|
43
|
+
|
|
44
|
+
max_name = max(len(p.name) for p in projects)
|
|
45
|
+
max_fw = max(len(p.framework) for p in projects)
|
|
46
|
+
|
|
47
|
+
for p in projects:
|
|
48
|
+
sig = f"({Path(p.signature_file).name})" if p.signature_file else "(code files)"
|
|
49
|
+
print(f" {p.name:<{max_name}} {p.framework:<{max_fw}} {sig}")
|
|
50
|
+
|
|
51
|
+
# Show convention routes for file-system frameworks
|
|
52
|
+
for p in projects:
|
|
53
|
+
routes = extract_convention_routes(p)
|
|
54
|
+
if routes:
|
|
55
|
+
print(f" → {p.name}: {len(routes)} file-system routes detected")
|
|
56
|
+
|
|
57
|
+
# Determine output dir
|
|
58
|
+
if len(args.paths) == 1:
|
|
59
|
+
output_base = Path(paths[0])
|
|
60
|
+
else:
|
|
61
|
+
output_base = Path.cwd()
|
|
62
|
+
|
|
63
|
+
output_dir = str(output_base / ".codebeacon")
|
|
64
|
+
print(f" Output: {output_dir}")
|
|
65
|
+
|
|
66
|
+
# Auto-generate codebeacon.yaml on multi-project first scan
|
|
67
|
+
if multi and len(args.paths) == 1:
|
|
68
|
+
yaml_path = output_base / "codebeacon.yaml"
|
|
69
|
+
if not yaml_path.exists():
|
|
70
|
+
generate_config(projects, output_dir, yaml_path)
|
|
71
|
+
print(f" Generated {yaml_path} — next time run: codebeacon sync")
|
|
72
|
+
|
|
73
|
+
if args.list_only:
|
|
74
|
+
return 0
|
|
75
|
+
|
|
76
|
+
return _run_pipeline(projects, output_dir, args)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _run_pipeline(projects, output_dir: str, args) -> int:
|
|
80
|
+
"""Run the full extraction pipeline for a list of projects."""
|
|
81
|
+
from codebeacon.discover.scanner import collect_files
|
|
82
|
+
from codebeacon.cache import Cache
|
|
83
|
+
from codebeacon.wave import auto_wave
|
|
84
|
+
from codebeacon.graph.build import build_graph
|
|
85
|
+
from codebeacon.graph.enrich import enrich_http_api, enrich_shared_db
|
|
86
|
+
from codebeacon.graph.cluster import cluster, apply_communities, score_all
|
|
87
|
+
from codebeacon.graph.analyze import analyze, report_to_markdown
|
|
88
|
+
import json
|
|
89
|
+
from pathlib import Path
|
|
90
|
+
|
|
91
|
+
output_path = Path(output_dir)
|
|
92
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
93
|
+
|
|
94
|
+
cache = Cache(output_dir)
|
|
95
|
+
if getattr(args, "update", False):
|
|
96
|
+
cache.load()
|
|
97
|
+
else:
|
|
98
|
+
cache = None # fresh scan, no cache
|
|
99
|
+
|
|
100
|
+
wave_results = []
|
|
101
|
+
for project in projects:
|
|
102
|
+
print(f"\n Extracting {project.name} ({project.framework}) ...")
|
|
103
|
+
files = collect_files(project.path)
|
|
104
|
+
print(f" {len(files)} source files found")
|
|
105
|
+
|
|
106
|
+
def progress(done, total, _name=project.name):
|
|
107
|
+
pct = int(done / total * 100) if total else 100
|
|
108
|
+
print(f" [{pct:3d}%] {done}/{total} files processed", end="\r")
|
|
109
|
+
|
|
110
|
+
wave = auto_wave(
|
|
111
|
+
project=project,
|
|
112
|
+
files=files,
|
|
113
|
+
chunk_size=300,
|
|
114
|
+
max_parallel=5,
|
|
115
|
+
cache=cache,
|
|
116
|
+
progress_callback=progress,
|
|
117
|
+
semantic=getattr(args, "semantic", False),
|
|
118
|
+
)
|
|
119
|
+
print() # newline after progress
|
|
120
|
+
|
|
121
|
+
stats = (
|
|
122
|
+
f" Routes: {len(wave.routes)}, Services: {len(wave.services)}, "
|
|
123
|
+
f"Entities: {len(wave.entities)}, Components: {len(wave.components)}"
|
|
124
|
+
)
|
|
125
|
+
if wave.skipped_count:
|
|
126
|
+
stats += f" (cache hits: {wave.skipped_count})"
|
|
127
|
+
print(stats)
|
|
128
|
+
wave_results.append(wave)
|
|
129
|
+
|
|
130
|
+
if cache is not None:
|
|
131
|
+
cache.save()
|
|
132
|
+
|
|
133
|
+
print("\n Building knowledge graph ...")
|
|
134
|
+
G = build_graph(wave_results)
|
|
135
|
+
print(f" Nodes: {G.number_of_nodes()}, Edges: {G.number_of_edges()}")
|
|
136
|
+
|
|
137
|
+
# Enrichment
|
|
138
|
+
api_edges = enrich_http_api(G)
|
|
139
|
+
db_edges = enrich_shared_db(G)
|
|
140
|
+
if api_edges or db_edges:
|
|
141
|
+
print(f" Enriched: +{api_edges} calls_api, +{db_edges} shares_db_entity edges")
|
|
142
|
+
|
|
143
|
+
# Community detection
|
|
144
|
+
print(" Detecting communities ...")
|
|
145
|
+
communities = cluster(G)
|
|
146
|
+
apply_communities(G, communities)
|
|
147
|
+
cohesion = score_all(G, communities)
|
|
148
|
+
n_communities = len(set(communities.values())) if communities else 0
|
|
149
|
+
print(f" {n_communities} communities detected")
|
|
150
|
+
|
|
151
|
+
# Analysis
|
|
152
|
+
report = analyze(G, communities, cohesion)
|
|
153
|
+
|
|
154
|
+
# Wiki generation
|
|
155
|
+
wiki_only = getattr(args, "wiki_only", False)
|
|
156
|
+
if not wiki_only:
|
|
157
|
+
print(" Generating wiki ...")
|
|
158
|
+
from codebeacon.wiki.generator import generate_wiki
|
|
159
|
+
generate_wiki(G, communities, output_dir)
|
|
160
|
+
print(f" Wiki written to {output_dir}/wiki/")
|
|
161
|
+
|
|
162
|
+
# Obsidian vault generation
|
|
163
|
+
obsidian_dir = getattr(args, "obsidian_dir", None)
|
|
164
|
+
print(" Generating Obsidian vault ...")
|
|
165
|
+
from codebeacon.export.obsidian import generate_obsidian_vault
|
|
166
|
+
n_notes = generate_obsidian_vault(G, communities, output_dir, obsidian_dir=obsidian_dir)
|
|
167
|
+
print(f" {n_notes} notes written to {obsidian_dir or output_dir + '/obsidian'}/")
|
|
168
|
+
|
|
169
|
+
# Context Map generation (CLAUDE.md / .cursorrules / AGENTS.md)
|
|
170
|
+
print(" Generating context map ...")
|
|
171
|
+
from codebeacon.contextmap.generator import generate_context_map
|
|
172
|
+
written = generate_context_map(
|
|
173
|
+
G=G,
|
|
174
|
+
output_dir=output_dir,
|
|
175
|
+
projects=projects,
|
|
176
|
+
obsidian_dir=obsidian_dir,
|
|
177
|
+
)
|
|
178
|
+
for path in written:
|
|
179
|
+
print(f" {path}")
|
|
180
|
+
|
|
181
|
+
# Save outputs
|
|
182
|
+
# beacon.json (node-link format)
|
|
183
|
+
import networkx.readwrite.json_graph as nxjson
|
|
184
|
+
beacon_path = output_path / "beacon.json"
|
|
185
|
+
beacon_path.write_text(
|
|
186
|
+
json.dumps(nxjson.node_link_data(G), ensure_ascii=False, indent=2),
|
|
187
|
+
encoding="utf-8",
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# REPORT.md
|
|
191
|
+
report_path = output_path / "REPORT.md"
|
|
192
|
+
report_path.write_text(report_to_markdown(report), encoding="utf-8")
|
|
193
|
+
|
|
194
|
+
print(f"\n Output: {output_dir}")
|
|
195
|
+
print(f" beacon.json, REPORT.md, wiki/, obsidian/, CLAUDE.md written")
|
|
196
|
+
print(f" Done. {report.node_count} nodes, {report.edge_count} edges, {n_communities} communities.")
|
|
197
|
+
return 0
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _cmd_sync(args: argparse.Namespace) -> int:
|
|
201
|
+
from codebeacon.config import load_config, find_config
|
|
202
|
+
from codebeacon.discover.detector import detect_framework
|
|
203
|
+
from codebeacon.common.types import ProjectInfo
|
|
204
|
+
|
|
205
|
+
config_path = getattr(args, "config", None)
|
|
206
|
+
if not config_path:
|
|
207
|
+
config_path = find_config(Path.cwd())
|
|
208
|
+
if not config_path:
|
|
209
|
+
print("Error: No codebeacon.yaml found in current directory.", file=sys.stderr)
|
|
210
|
+
print("Run 'codebeacon scan <path>' or 'codebeacon init' to create one.", file=sys.stderr)
|
|
211
|
+
return 1
|
|
212
|
+
|
|
213
|
+
try:
|
|
214
|
+
config = load_config(config_path)
|
|
215
|
+
except (FileNotFoundError, ValueError) as e:
|
|
216
|
+
print(f"Error loading config: {e}", file=sys.stderr)
|
|
217
|
+
return 1
|
|
218
|
+
|
|
219
|
+
print(f"Using {config.config_file}")
|
|
220
|
+
print(f"Processing {len(config.projects)} project(s)...")
|
|
221
|
+
|
|
222
|
+
for p in config.projects:
|
|
223
|
+
fw, lang, sig = detect_framework(p.path)
|
|
224
|
+
effective_fw = p.type if p.type != "auto" else fw
|
|
225
|
+
print(f" {p.name:<20} {effective_fw:<15} {p.path}")
|
|
226
|
+
|
|
227
|
+
output_dir = config.output.dir
|
|
228
|
+
print(f" Output: {output_dir}")
|
|
229
|
+
|
|
230
|
+
projects_info = []
|
|
231
|
+
for p in config.projects:
|
|
232
|
+
fw, lang, sig = detect_framework(p.path)
|
|
233
|
+
effective_fw = p.type if p.type != "auto" else fw
|
|
234
|
+
from codebeacon.common.types import ProjectInfo
|
|
235
|
+
projects_info.append(ProjectInfo(
|
|
236
|
+
name=p.name,
|
|
237
|
+
path=p.path,
|
|
238
|
+
framework=effective_fw,
|
|
239
|
+
language=lang,
|
|
240
|
+
signature_file=sig or "",
|
|
241
|
+
))
|
|
242
|
+
|
|
243
|
+
return _run_pipeline(projects_info, output_dir, args)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def _cmd_init(args: argparse.Namespace) -> int:
|
|
247
|
+
target = Path(args.path) if args.path else Path.cwd()
|
|
248
|
+
yaml_path = target / "codebeacon.yaml"
|
|
249
|
+
|
|
250
|
+
if yaml_path.exists():
|
|
251
|
+
print(f"Config already exists: {yaml_path}")
|
|
252
|
+
return 0
|
|
253
|
+
|
|
254
|
+
from codebeacon.discover.detector import discover_projects
|
|
255
|
+
from codebeacon.config import generate_config
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
projects = discover_projects([str(target)])
|
|
259
|
+
except (FileNotFoundError, ValueError) as e:
|
|
260
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
261
|
+
return 1
|
|
262
|
+
|
|
263
|
+
generate_config(projects, ".codebeacon", yaml_path)
|
|
264
|
+
print(f"Created {yaml_path}")
|
|
265
|
+
return 0
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _cmd_query(args: argparse.Namespace) -> int:
|
|
269
|
+
print(f"[query] Not yet implemented (Task 8). Query: {args.term}")
|
|
270
|
+
return 0
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def _cmd_path(args: argparse.Namespace) -> int:
|
|
274
|
+
print(f"[path] Not yet implemented (Task 8). From: {args.source}, To: {args.target}")
|
|
275
|
+
return 0
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _cmd_serve(args: argparse.Namespace) -> int:
|
|
279
|
+
from pathlib import Path
|
|
280
|
+
from codebeacon.export.mcp import serve
|
|
281
|
+
|
|
282
|
+
beacon_dir = Path(getattr(args, "dir", ".codebeacon"))
|
|
283
|
+
if not beacon_dir.is_absolute():
|
|
284
|
+
beacon_dir = Path.cwd() / beacon_dir
|
|
285
|
+
serve(beacon_dir)
|
|
286
|
+
return 0
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def _cmd_install(args: argparse.Namespace) -> int:
|
|
290
|
+
import shutil
|
|
291
|
+
import sys
|
|
292
|
+
from pathlib import Path
|
|
293
|
+
|
|
294
|
+
# SKILL.md is shipped inside the package at codebeacon/skill/SKILL.md
|
|
295
|
+
skill_src = Path(__file__).parent / "skill" / "SKILL.md"
|
|
296
|
+
if not skill_src.exists():
|
|
297
|
+
print(f"Error: SKILL.md not found at {skill_src}", file=sys.stderr)
|
|
298
|
+
return 1
|
|
299
|
+
|
|
300
|
+
claude_dir = Path.home() / ".claude"
|
|
301
|
+
skills_dir = claude_dir / "skills" / "codebeacon"
|
|
302
|
+
skill_dest = skills_dir / "SKILL.md"
|
|
303
|
+
claude_md = claude_dir / "CLAUDE.md"
|
|
304
|
+
|
|
305
|
+
skills_dir.mkdir(parents=True, exist_ok=True)
|
|
306
|
+
shutil.copy2(skill_src, skill_dest)
|
|
307
|
+
print(f" Copied SKILL.md → {skill_dest}")
|
|
308
|
+
|
|
309
|
+
trigger_block = (
|
|
310
|
+
"\n# codebeacon\n"
|
|
311
|
+
"- **codebeacon** (`~/.claude/skills/codebeacon/SKILL.md`) - scan source code → knowledge graph + wiki. Trigger: `/codebeacon`\n"
|
|
312
|
+
"When the user types `/codebeacon`, invoke the Skill tool with `skill: \"codebeacon\"` before doing anything else.\n"
|
|
313
|
+
)
|
|
314
|
+
existing = claude_md.read_text(encoding="utf-8") if claude_md.exists() else ""
|
|
315
|
+
if "# codebeacon" in existing:
|
|
316
|
+
print(f" Trigger already present in {claude_md} — skipping.")
|
|
317
|
+
else:
|
|
318
|
+
separator = "\n" if existing and not existing.endswith("\n\n") else ""
|
|
319
|
+
claude_md.write_text(existing + separator + trigger_block, encoding="utf-8")
|
|
320
|
+
print(f" Added codebeacon trigger to {claude_md}")
|
|
321
|
+
|
|
322
|
+
print("\ncodebeacon skill installed.")
|
|
323
|
+
print("Start a new Claude Code session and type /codebeacon to use it.")
|
|
324
|
+
return 0
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def build_parser() -> argparse.ArgumentParser:
|
|
328
|
+
parser = argparse.ArgumentParser(
|
|
329
|
+
prog="codebeacon",
|
|
330
|
+
description="Source code AST analysis for AI context generation",
|
|
331
|
+
)
|
|
332
|
+
parser.add_argument("--version", action="version", version=f"codebeacon {__version__}")
|
|
333
|
+
|
|
334
|
+
sub = parser.add_subparsers(dest="command", metavar="<command>")
|
|
335
|
+
sub.required = True
|
|
336
|
+
|
|
337
|
+
# scan
|
|
338
|
+
scan_p = sub.add_parser("scan", help="Scan one or more project directories")
|
|
339
|
+
scan_p.add_argument("paths", nargs="+", metavar="PATH", help="Project or workspace path(s)")
|
|
340
|
+
scan_p.add_argument("--semantic", action="store_true", help="Enable LLM semantic extraction")
|
|
341
|
+
scan_p.add_argument("--update", action="store_true", help="Only reprocess changed files")
|
|
342
|
+
scan_p.add_argument("--watch", action="store_true", help="Watch for file changes")
|
|
343
|
+
scan_p.add_argument("--wiki-only", action="store_true", help="Only generate wiki")
|
|
344
|
+
scan_p.add_argument("--obsidian-dir", metavar="PATH", help="Custom Obsidian vault path")
|
|
345
|
+
scan_p.add_argument("--list-only", action="store_true", help="Only list detected projects, don't extract")
|
|
346
|
+
scan_p.set_defaults(func=_cmd_scan)
|
|
347
|
+
|
|
348
|
+
# sync
|
|
349
|
+
sync_p = sub.add_parser("sync", help="Run extraction based on codebeacon.yaml")
|
|
350
|
+
sync_p.add_argument("--config", metavar="FILE", help="Path to codebeacon.yaml")
|
|
351
|
+
sync_p.add_argument("--semantic", action="store_true")
|
|
352
|
+
sync_p.add_argument("--update", action="store_true")
|
|
353
|
+
sync_p.set_defaults(func=_cmd_sync)
|
|
354
|
+
|
|
355
|
+
# init
|
|
356
|
+
init_p = sub.add_parser("init", help="Interactively create codebeacon.yaml")
|
|
357
|
+
init_p.add_argument("path", nargs="?", default="", help="Target directory (default: cwd)")
|
|
358
|
+
init_p.set_defaults(func=_cmd_init)
|
|
359
|
+
|
|
360
|
+
# query
|
|
361
|
+
query_p = sub.add_parser("query", help="Search nodes and edges in the graph")
|
|
362
|
+
query_p.add_argument("term", help="Search term")
|
|
363
|
+
query_p.set_defaults(func=_cmd_query)
|
|
364
|
+
|
|
365
|
+
# path
|
|
366
|
+
path_p = sub.add_parser("path", help="Find shortest path between two nodes")
|
|
367
|
+
path_p.add_argument("source", help="Source node name")
|
|
368
|
+
path_p.add_argument("target", help="Target node name")
|
|
369
|
+
path_p.set_defaults(func=_cmd_path)
|
|
370
|
+
|
|
371
|
+
# serve
|
|
372
|
+
serve_p = sub.add_parser("serve", help="Start MCP server (stdio)")
|
|
373
|
+
serve_p.add_argument(
|
|
374
|
+
"--dir",
|
|
375
|
+
metavar="DIR",
|
|
376
|
+
default=".codebeacon",
|
|
377
|
+
help="Path to .codebeacon output directory (default: .codebeacon)",
|
|
378
|
+
)
|
|
379
|
+
serve_p.set_defaults(func=_cmd_serve)
|
|
380
|
+
|
|
381
|
+
# install
|
|
382
|
+
install_p = sub.add_parser("install", help="Install Claude Code skill")
|
|
383
|
+
install_p.set_defaults(func=_cmd_install)
|
|
384
|
+
|
|
385
|
+
return parser
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def main() -> None:
|
|
389
|
+
parser = build_parser()
|
|
390
|
+
args = parser.parse_args()
|
|
391
|
+
sys.exit(args.func(args))
|
|
File without changes
|