deploysquad-recon-core 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deploysquad_recon_core-0.1.1/PKG-INFO +11 -0
- deploysquad_recon_core-0.1.1/pyproject.toml +33 -0
- deploysquad_recon_core-0.1.1/setup.cfg +4 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/__init__.py +203 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/__main__.py +50 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/cli.py +107 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/cli_tools.py +190 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/context.py +319 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/embeddings.py +283 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/errors.py +33 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/index.py +90 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/links.py +95 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/mcp_server.py +219 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/__init__.py +35 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/base.py +36 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/constraint.py +16 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/decision.py +25 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/epic.py +18 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/feature.py +31 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/goal.py +16 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/module.py +27 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/persona.py +16 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/project.py +15 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/user_story.py +28 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/models/version.py +17 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/schemas.py +18 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/skill/openclaw/recon/SKILL.md +139 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/skill/openclaw/recon-add-feature/SKILL.md +93 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/skill/recon.add-feature.md +92 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/skill/recon.md +430 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/vault/__init__.py +10 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/vault/paths.py +104 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/vault/reader.py +81 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core/vault/writer.py +74 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core.egg-info/PKG-INFO +11 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core.egg-info/SOURCES.txt +46 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core.egg-info/dependency_links.txt +1 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core.egg-info/entry_points.txt +3 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core.egg-info/requires.txt +7 -0
- deploysquad_recon_core-0.1.1/src/deploysquad_recon_core.egg-info/top_level.txt +1 -0
- deploysquad_recon_core-0.1.1/tests/test_api.py +249 -0
- deploysquad_recon_core-0.1.1/tests/test_cli.py +148 -0
- deploysquad_recon_core-0.1.1/tests/test_cli_tools.py +79 -0
- deploysquad_recon_core-0.1.1/tests/test_context.py +92 -0
- deploysquad_recon_core-0.1.1/tests/test_embeddings.py +359 -0
- deploysquad_recon_core-0.1.1/tests/test_index.py +71 -0
- deploysquad_recon_core-0.1.1/tests/test_links.py +106 -0
- deploysquad_recon_core-0.1.1/tests/test_mcp_server.py +233 -0
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: deploysquad-recon-core
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Engine for reading, writing, and validating Obsidian vault project graphs
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Requires-Dist: pydantic>=2.0
|
|
7
|
+
Requires-Dist: python-frontmatter>=1.0
|
|
8
|
+
Requires-Dist: mcp[cli]>=1.0
|
|
9
|
+
Requires-Dist: google-generativeai<2,>=0.8
|
|
10
|
+
Provides-Extra: dev
|
|
11
|
+
Requires-Dist: pytest>=8.0; extra == "dev"
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=68.0"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "deploysquad-recon-core"
|
|
7
|
+
version = "0.1.1"
|
|
8
|
+
description = "Engine for reading, writing, and validating Obsidian vault project graphs"
|
|
9
|
+
requires-python = ">=3.11"
|
|
10
|
+
dependencies = [
|
|
11
|
+
"pydantic>=2.0",
|
|
12
|
+
"python-frontmatter>=1.0",
|
|
13
|
+
"mcp[cli]>=1.0",
|
|
14
|
+
"google-generativeai>=0.8,<2",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[project.optional-dependencies]
|
|
18
|
+
dev = [
|
|
19
|
+
"pytest>=8.0",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
[project.scripts]
|
|
23
|
+
recon-server = "deploysquad_recon_core.mcp_server:main"
|
|
24
|
+
deploysquad-recon-core = "deploysquad_recon_core.__main__:main"
|
|
25
|
+
|
|
26
|
+
[tool.setuptools.packages.find]
|
|
27
|
+
where = ["src"]
|
|
28
|
+
|
|
29
|
+
[tool.setuptools.package-data]
|
|
30
|
+
deploysquad_recon_core = ["skill/*.md", "skill/openclaw/recon/SKILL.md", "skill/openclaw/recon-add-feature/SKILL.md"]
|
|
31
|
+
|
|
32
|
+
[tool.pytest.ini_options]
|
|
33
|
+
testpaths = ["tests"]
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"""recon-core: Engine for reading, writing, and validating Obsidian vault project graphs.
|
|
2
|
+
|
|
3
|
+
Public API:
|
|
4
|
+
create_node(node_type, data, project_dir, body_sections=None) -> Path
|
|
5
|
+
get_node(file_path) -> dict
|
|
6
|
+
list_nodes(project_dir, node_type=None, status=None) -> list[dict]
|
|
7
|
+
update_node(file_path, updates, body_sections=None) -> Path
|
|
8
|
+
resolve_links(project_dir) -> dict
|
|
9
|
+
build_index(project_dir) -> dict
|
|
10
|
+
generate_context(feature_name, project_dir) -> str
|
|
11
|
+
embed_nodes(project_dir, api_key=None) -> dict
|
|
12
|
+
find_similar(node_path, project_dir, top_k=5, threshold=0.75) -> list[tuple[Path, float]]
|
|
13
|
+
"""
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
from .errors import ValidationError, NodeNotFoundError, BrokenLinkError, DuplicateNodeError
|
|
19
|
+
from .models import NODE_TYPE_MAP, get_model_for_type
|
|
20
|
+
from .vault.reader import read_node
|
|
21
|
+
from .vault.writer import write_node
|
|
22
|
+
from .vault.paths import node_filepath, parse_filename, type_to_subfolder, find_project_name, project_tag
|
|
23
|
+
from .links import resolve_all_links
|
|
24
|
+
from .index import build_index as _build_index, write_index, read_index
|
|
25
|
+
from .context import generate_context as _generate_context
|
|
26
|
+
from .embeddings import embed_nodes, find_similar
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def create_node(
|
|
30
|
+
node_type: str,
|
|
31
|
+
data: dict,
|
|
32
|
+
project_dir: str | Path,
|
|
33
|
+
body_sections: dict[str, str] | None = None,
|
|
34
|
+
) -> Path:
|
|
35
|
+
"""Create and write a new node to the vault.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
node_type: One of the 10 node types (e.g. "feature", "user-story")
|
|
39
|
+
data: Dict of frontmatter fields (type and schema_version auto-filled)
|
|
40
|
+
project_dir: Root directory of the project vault
|
|
41
|
+
body_sections: Optional dict of {heading: content} for the body
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Path to the created file
|
|
45
|
+
|
|
46
|
+
Raises:
|
|
47
|
+
ValidationError: Data fails schema validation
|
|
48
|
+
DuplicateNodeError: Node already exists
|
|
49
|
+
KeyError: Unknown node type
|
|
50
|
+
"""
|
|
51
|
+
project_dir = Path(project_dir)
|
|
52
|
+
model_cls = get_model_for_type(node_type)
|
|
53
|
+
|
|
54
|
+
# Auto-fill type and schema_version
|
|
55
|
+
full_data = {"type": node_type, "schema_version": 1, **data}
|
|
56
|
+
|
|
57
|
+
# Auto-add project tag
|
|
58
|
+
if node_type == "project":
|
|
59
|
+
proj_name = data.get("name", "")
|
|
60
|
+
else:
|
|
61
|
+
proj_name = find_project_name(project_dir)
|
|
62
|
+
|
|
63
|
+
if proj_name:
|
|
64
|
+
tag = project_tag(proj_name)
|
|
65
|
+
existing_tags = full_data.get("tags", [])
|
|
66
|
+
if tag not in existing_tags:
|
|
67
|
+
full_data["tags"] = existing_tags + [tag]
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
model = model_cls(**full_data)
|
|
71
|
+
except Exception as e:
|
|
72
|
+
raise ValidationError(f"Validation failed for {node_type}: {e}") from e
|
|
73
|
+
|
|
74
|
+
return write_node(model, project_dir, body_sections=body_sections)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def get_node(file_path: str | Path) -> dict:
|
|
78
|
+
"""Read and validate a node file.
|
|
79
|
+
|
|
80
|
+
Returns dict with keys: frontmatter, body, body_sections, model, file_path
|
|
81
|
+
"""
|
|
82
|
+
return read_node(Path(file_path))
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def list_nodes(
|
|
86
|
+
project_dir: str | Path,
|
|
87
|
+
node_type: str | None = None,
|
|
88
|
+
status: str | None = None,
|
|
89
|
+
) -> list[dict]:
|
|
90
|
+
"""List nodes in the project, optionally filtered by type and/or status.
|
|
91
|
+
|
|
92
|
+
Returns list of dicts, each with: type, name, status, file_path
|
|
93
|
+
"""
|
|
94
|
+
project_dir = Path(project_dir)
|
|
95
|
+
results = []
|
|
96
|
+
|
|
97
|
+
for md_file in sorted(project_dir.rglob("*.md")):
|
|
98
|
+
if ".graph" in md_file.parts:
|
|
99
|
+
continue
|
|
100
|
+
parsed = parse_filename(md_file.name)
|
|
101
|
+
if parsed is None:
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
file_type, name = parsed
|
|
105
|
+
|
|
106
|
+
if node_type and file_type != node_type:
|
|
107
|
+
continue
|
|
108
|
+
|
|
109
|
+
node_data = read_node(md_file)
|
|
110
|
+
node_status = node_data["model"].status
|
|
111
|
+
if hasattr(node_status, "value"):
|
|
112
|
+
node_status = node_status.value
|
|
113
|
+
|
|
114
|
+
if status and node_status != status:
|
|
115
|
+
continue
|
|
116
|
+
|
|
117
|
+
results.append({
|
|
118
|
+
"type": file_type,
|
|
119
|
+
"name": name,
|
|
120
|
+
"status": node_status,
|
|
121
|
+
"file_path": md_file,
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
return results
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def update_node(
|
|
128
|
+
file_path: str | Path,
|
|
129
|
+
updates: dict,
|
|
130
|
+
body_sections: dict[str, str] | None = None,
|
|
131
|
+
) -> Path:
|
|
132
|
+
"""Update an existing node's frontmatter fields.
|
|
133
|
+
|
|
134
|
+
Reads the current node, merges updates, validates, and rewrites.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
file_path: Path to the existing node file
|
|
138
|
+
updates: Dict of fields to update (merged with existing frontmatter)
|
|
139
|
+
body_sections: If provided, replaces the body sections entirely
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Path to the updated file
|
|
143
|
+
"""
|
|
144
|
+
file_path = Path(file_path)
|
|
145
|
+
current = read_node(file_path)
|
|
146
|
+
|
|
147
|
+
# Merge updates into existing frontmatter
|
|
148
|
+
merged = {**current["frontmatter"], **updates}
|
|
149
|
+
|
|
150
|
+
# Ensure project tag is preserved/added
|
|
151
|
+
proj_name = find_project_name(
|
|
152
|
+
file_path.parent.parent if type_to_subfolder(merged["type"]) else file_path.parent
|
|
153
|
+
)
|
|
154
|
+
if not proj_name and merged["type"] == "project":
|
|
155
|
+
proj_name = merged.get("name", "")
|
|
156
|
+
if proj_name:
|
|
157
|
+
tag = project_tag(proj_name)
|
|
158
|
+
existing_tags = merged.get("tags", [])
|
|
159
|
+
if tag not in existing_tags:
|
|
160
|
+
merged["tags"] = existing_tags + [tag]
|
|
161
|
+
|
|
162
|
+
model_cls = get_model_for_type(merged["type"])
|
|
163
|
+
try:
|
|
164
|
+
model = model_cls(**merged)
|
|
165
|
+
except Exception as e:
|
|
166
|
+
raise ValidationError(f"Validation failed: {e}") from e
|
|
167
|
+
|
|
168
|
+
# Use existing body sections if not replacing
|
|
169
|
+
if body_sections is None:
|
|
170
|
+
body_sections = current["body_sections"]
|
|
171
|
+
|
|
172
|
+
# Derive project_dir from file_path based on node type.
|
|
173
|
+
# Project nodes live at project_dir/Project - Name.md (subfolder is empty),
|
|
174
|
+
# other nodes live at project_dir/subfolder/Type - Name.md.
|
|
175
|
+
subfolder = type_to_subfolder(merged["type"])
|
|
176
|
+
if subfolder:
|
|
177
|
+
project_dir = file_path.parent.parent
|
|
178
|
+
else:
|
|
179
|
+
project_dir = file_path.parent
|
|
180
|
+
|
|
181
|
+
return write_node(model, project_dir, body_sections=body_sections, overwrite=True)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def resolve_links(project_dir: str | Path) -> dict:
|
|
185
|
+
"""Check all wikilinks in the vault.
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
{"valid": [LinkResult, ...], "broken": [LinkResult, ...]}
|
|
189
|
+
"""
|
|
190
|
+
return resolve_all_links(Path(project_dir))
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def build_index(project_dir: str | Path) -> dict:
|
|
194
|
+
"""Build the graph index from vault files.
|
|
195
|
+
|
|
196
|
+
Returns the index dict. Does NOT write it to disk -- call write_index() for that.
|
|
197
|
+
"""
|
|
198
|
+
return _build_index(Path(project_dir))
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def generate_context(feature_name: str, project_dir: str | Path) -> str:
|
|
202
|
+
"""Generate a CONTEXT.md string for a Feature node."""
|
|
203
|
+
return _generate_context(feature_name, Path(project_dir))
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""Entry point for `uvx deploysquad-recon-core [subcommand]`.
|
|
2
|
+
|
|
3
|
+
No args → start MCP server (Claude Code)
|
|
4
|
+
install → interactive installer
|
|
5
|
+
* → CLI subcommand for OpenClaw shell integration
|
|
6
|
+
"""
|
|
7
|
+
import sys
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def main() -> None:
|
|
11
|
+
if len(sys.argv) < 2:
|
|
12
|
+
# No subcommand — start MCP server
|
|
13
|
+
from .mcp_server import main as mcp_main
|
|
14
|
+
mcp_main()
|
|
15
|
+
return
|
|
16
|
+
|
|
17
|
+
command = sys.argv[1]
|
|
18
|
+
|
|
19
|
+
if command == "install":
|
|
20
|
+
from .cli import install
|
|
21
|
+
install()
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
# All other subcommands go through cli_tools
|
|
25
|
+
from .cli_tools import build_parser
|
|
26
|
+
handlers = {
|
|
27
|
+
"list_nodes": "cmd_list_nodes",
|
|
28
|
+
"get_node": "cmd_get_node",
|
|
29
|
+
"create_node": "cmd_create_node",
|
|
30
|
+
"update_node": "cmd_update_node",
|
|
31
|
+
"resolve_links": "cmd_resolve_links",
|
|
32
|
+
"build_index": "cmd_build_index",
|
|
33
|
+
"generate_context": "cmd_generate_context",
|
|
34
|
+
"embed_nodes": "cmd_embed_nodes",
|
|
35
|
+
"find_similar": "cmd_find_similar",
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if command not in handlers:
|
|
39
|
+
print(f"Unknown command: {command}", file=sys.stderr)
|
|
40
|
+
print(f"Available: install, {', '.join(handlers)}", file=sys.stderr)
|
|
41
|
+
sys.exit(1)
|
|
42
|
+
|
|
43
|
+
from . import cli_tools
|
|
44
|
+
parser = build_parser()
|
|
45
|
+
args = parser.parse_args()
|
|
46
|
+
getattr(cli_tools, handlers[command])(args)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
if __name__ == "__main__":
|
|
50
|
+
main()
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""Install command for recon — writes MCP config and copies skill files."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import importlib.resources as resources
|
|
5
|
+
import json
|
|
6
|
+
import shutil
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _claude_settings_path() -> Path:
|
|
11
|
+
return Path.home() / ".claude" / "settings.json"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _commands_dir() -> Path:
|
|
15
|
+
return Path.home() / ".claude" / "commands"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _read_settings(path: Path) -> dict:
|
|
19
|
+
if path.exists():
|
|
20
|
+
try:
|
|
21
|
+
return json.loads(path.read_text())
|
|
22
|
+
except json.JSONDecodeError as e:
|
|
23
|
+
raise SystemExit(
|
|
24
|
+
f"Error: {path} contains invalid JSON and could not be read.\n"
|
|
25
|
+
f"Please fix or remove it before running install.\nDetail: {e}"
|
|
26
|
+
)
|
|
27
|
+
return {}
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _write_settings(path: Path, data: dict) -> None:
|
|
31
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
32
|
+
path.write_text(json.dumps(data, indent=2))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _openclaw_skills_dir() -> Path | None:
|
|
36
|
+
"""Return OpenClaw skills directory if OpenClaw is installed, else None."""
|
|
37
|
+
openclaw_dir = Path.home() / ".openclaw" / "workspace" / "skills"
|
|
38
|
+
return openclaw_dir if openclaw_dir.parent.parent.exists() else None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _install_openclaw_skills() -> bool:
|
|
42
|
+
"""Copy OpenClaw skill dirs to ~/.openclaw/workspace/skills/. Returns True if installed."""
|
|
43
|
+
skills_dir = _openclaw_skills_dir()
|
|
44
|
+
if skills_dir is None:
|
|
45
|
+
return False
|
|
46
|
+
|
|
47
|
+
skills_dir.mkdir(parents=True, exist_ok=True)
|
|
48
|
+
import deploysquad_recon_core
|
|
49
|
+
|
|
50
|
+
skill_src = resources.files(deploysquad_recon_core) / "skill" / "openclaw"
|
|
51
|
+
for skill_name in ["recon", "recon-add-feature"]:
|
|
52
|
+
dest = skills_dir / skill_name
|
|
53
|
+
dest.mkdir(exist_ok=True)
|
|
54
|
+
src_file = skill_src / skill_name / "SKILL.md"
|
|
55
|
+
with resources.as_file(src_file) as src:
|
|
56
|
+
shutil.copy2(src, dest / "SKILL.md")
|
|
57
|
+
print(f"✓ OpenClaw: {skill_name} skill copied to {dest}")
|
|
58
|
+
return True
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def install() -> None:
|
|
62
|
+
"""Interactive installer: prompts for vault path, writes MCP config, copies skills."""
|
|
63
|
+
# Prompt for vault path — re-prompt until path exists
|
|
64
|
+
default = str(Path.home() / "obsidian-vault")
|
|
65
|
+
while True:
|
|
66
|
+
raw = input(f"Where is your Obsidian vault? [{default}] ").strip()
|
|
67
|
+
vault_path = raw if raw else default
|
|
68
|
+
if Path(vault_path).exists():
|
|
69
|
+
break
|
|
70
|
+
print(f" ✗ Path not found: {vault_path}. Please enter a valid directory.")
|
|
71
|
+
|
|
72
|
+
# Write MCP server config
|
|
73
|
+
settings_path = _claude_settings_path()
|
|
74
|
+
settings = _read_settings(settings_path)
|
|
75
|
+
settings.setdefault("mcpServers", {})
|
|
76
|
+
settings["mcpServers"]["recon"] = {
|
|
77
|
+
"command": "uvx",
|
|
78
|
+
"args": ["deploysquad-recon-core"],
|
|
79
|
+
"env": {"VAULT_PATH": vault_path},
|
|
80
|
+
}
|
|
81
|
+
_write_settings(settings_path, settings)
|
|
82
|
+
print(f"✓ MCP server config written to {settings_path}")
|
|
83
|
+
|
|
84
|
+
# Copy skill files
|
|
85
|
+
commands_dir = _commands_dir()
|
|
86
|
+
commands_dir.mkdir(parents=True, exist_ok=True)
|
|
87
|
+
|
|
88
|
+
import deploysquad_recon_core
|
|
89
|
+
skill_dir = resources.files(deploysquad_recon_core) / "skill"
|
|
90
|
+
for skill_file in ["recon.md", "recon.add-feature.md"]:
|
|
91
|
+
dest = commands_dir / skill_file
|
|
92
|
+
skill_source = skill_dir / skill_file
|
|
93
|
+
with resources.as_file(skill_source) as src:
|
|
94
|
+
shutil.copy2(src, dest)
|
|
95
|
+
print(f"✓ Copied {skill_file} to {dest}")
|
|
96
|
+
|
|
97
|
+
# OpenClaw (if installed)
|
|
98
|
+
if _install_openclaw_skills():
|
|
99
|
+
print()
|
|
100
|
+
print("OpenClaw detected — skills also installed.")
|
|
101
|
+
print("In OpenClaw, say: 'map out my project with recon'")
|
|
102
|
+
else:
|
|
103
|
+
print()
|
|
104
|
+
print("(OpenClaw not detected — skipping. Install from openclaw.ai if needed.)")
|
|
105
|
+
|
|
106
|
+
print()
|
|
107
|
+
print("Done! Restart Claude Code and run /recon to get started.")
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
"""CLI subcommands for OpenClaw shell integration.
|
|
2
|
+
|
|
3
|
+
Each function is one subcommand. All output JSON to stdout.
|
|
4
|
+
Exit code 0 = success, 1 = error (error JSON on stdout).
|
|
5
|
+
"""
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _ok(data: dict | list) -> None:
|
|
14
|
+
print(json.dumps(data, default=str))
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _err(message: str) -> None:
|
|
18
|
+
print(json.dumps({"error": message}))
|
|
19
|
+
sys.exit(1)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def cmd_list_nodes(args) -> None:
|
|
23
|
+
from deploysquad_recon_core import list_nodes
|
|
24
|
+
try:
|
|
25
|
+
nodes = list_nodes(
|
|
26
|
+
args.project_dir,
|
|
27
|
+
node_type=args.type,
|
|
28
|
+
status=args.status,
|
|
29
|
+
)
|
|
30
|
+
_ok([{
|
|
31
|
+
"type": n["type"],
|
|
32
|
+
"name": n["name"],
|
|
33
|
+
"status": n["status"],
|
|
34
|
+
"file_path": str(n["file_path"]),
|
|
35
|
+
} for n in nodes])
|
|
36
|
+
except Exception as e:
|
|
37
|
+
_err(str(e))
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def cmd_get_node(args) -> None:
|
|
41
|
+
from deploysquad_recon_core import get_node
|
|
42
|
+
try:
|
|
43
|
+
result = get_node(args.file)
|
|
44
|
+
_ok({
|
|
45
|
+
"frontmatter": result["frontmatter"],
|
|
46
|
+
"body": result["body"],
|
|
47
|
+
"file_path": str(args.file),
|
|
48
|
+
})
|
|
49
|
+
except Exception as e:
|
|
50
|
+
_err(str(e))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def cmd_create_node(args) -> None:
|
|
54
|
+
from deploysquad_recon_core import create_node
|
|
55
|
+
try:
|
|
56
|
+
data = json.loads(args.data)
|
|
57
|
+
body_sections = json.loads(args.body_sections) if args.body_sections else None
|
|
58
|
+
path = create_node(args.type, data, args.project_dir, body_sections)
|
|
59
|
+
_ok({"file_path": str(path), "status": "created"})
|
|
60
|
+
except Exception as e:
|
|
61
|
+
_err(str(e))
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def cmd_update_node(args) -> None:
|
|
65
|
+
from deploysquad_recon_core import update_node
|
|
66
|
+
try:
|
|
67
|
+
updates = json.loads(args.data)
|
|
68
|
+
body_sections = json.loads(args.body_sections) if args.body_sections else None
|
|
69
|
+
path = update_node(args.file, updates, body_sections)
|
|
70
|
+
_ok({"file_path": str(path), "status": "updated"})
|
|
71
|
+
except Exception as e:
|
|
72
|
+
_err(str(e))
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def cmd_resolve_links(args) -> None:
|
|
76
|
+
from deploysquad_recon_core import resolve_links
|
|
77
|
+
try:
|
|
78
|
+
result = resolve_links(args.project_dir)
|
|
79
|
+
_ok({
|
|
80
|
+
"valid_count": len(result["valid"]),
|
|
81
|
+
"broken_count": len(result["broken"]),
|
|
82
|
+
"broken": [str(b) for b in result["broken"]],
|
|
83
|
+
})
|
|
84
|
+
except Exception as e:
|
|
85
|
+
_err(str(e))
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def cmd_build_index(args) -> None:
|
|
89
|
+
from deploysquad_recon_core import build_index
|
|
90
|
+
from deploysquad_recon_core.index import write_index
|
|
91
|
+
try:
|
|
92
|
+
index = build_index(args.project_dir)
|
|
93
|
+
write_index(index, Path(args.project_dir))
|
|
94
|
+
_ok({"node_count": len(index.get("nodes", {})), "status": "built"})
|
|
95
|
+
except Exception as e:
|
|
96
|
+
_err(str(e))
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def cmd_generate_context(args) -> None:
|
|
100
|
+
from deploysquad_recon_core import generate_context
|
|
101
|
+
try:
|
|
102
|
+
content = generate_context(args.feature, args.project_dir)
|
|
103
|
+
_ok({"content": content, "feature": args.feature})
|
|
104
|
+
except Exception as e:
|
|
105
|
+
_err(str(e))
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def cmd_embed_nodes(args) -> None:
|
|
109
|
+
from deploysquad_recon_core import embed_nodes
|
|
110
|
+
try:
|
|
111
|
+
result = embed_nodes(args.project_dir, api_key=args.api_key)
|
|
112
|
+
_ok(result if isinstance(result, dict) else {"status": "embedded"})
|
|
113
|
+
except Exception as e:
|
|
114
|
+
_err(str(e))
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def cmd_find_similar(args) -> None:
|
|
118
|
+
from deploysquad_recon_core import find_similar
|
|
119
|
+
try:
|
|
120
|
+
results = find_similar(
|
|
121
|
+
Path(args.node),
|
|
122
|
+
args.project_dir,
|
|
123
|
+
top_k=args.top_k,
|
|
124
|
+
threshold=args.threshold,
|
|
125
|
+
)
|
|
126
|
+
_ok([{"path": str(p), "score": score} for p, score in results])
|
|
127
|
+
except Exception as e:
|
|
128
|
+
_err(str(e))
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def build_parser():
|
|
132
|
+
import argparse
|
|
133
|
+
parser = argparse.ArgumentParser(
|
|
134
|
+
prog="deploysquad_recon_core",
|
|
135
|
+
description="recon-core CLI — graph authoring tools",
|
|
136
|
+
)
|
|
137
|
+
sub = parser.add_subparsers(dest="command")
|
|
138
|
+
|
|
139
|
+
# install
|
|
140
|
+
sub.add_parser("install", help="Install recon into Claude Code / OpenClaw")
|
|
141
|
+
|
|
142
|
+
# list_nodes
|
|
143
|
+
p = sub.add_parser("list_nodes", help="List nodes in a project")
|
|
144
|
+
p.add_argument("--project-dir", required=True)
|
|
145
|
+
p.add_argument("--type", default=None)
|
|
146
|
+
p.add_argument("--status", default=None)
|
|
147
|
+
|
|
148
|
+
# get_node
|
|
149
|
+
p = sub.add_parser("get_node", help="Read a node file")
|
|
150
|
+
p.add_argument("--file", required=True, type=Path)
|
|
151
|
+
|
|
152
|
+
# create_node
|
|
153
|
+
p = sub.add_parser("create_node", help="Create a new node")
|
|
154
|
+
p.add_argument("--type", required=True)
|
|
155
|
+
p.add_argument("--project-dir", required=True)
|
|
156
|
+
p.add_argument("--data", required=True, help="JSON dict of frontmatter fields")
|
|
157
|
+
p.add_argument("--body-sections", default=None, help='JSON dict e.g. \'{"## Description": "..."}\'')
|
|
158
|
+
|
|
159
|
+
# update_node
|
|
160
|
+
p = sub.add_parser("update_node", help="Update an existing node")
|
|
161
|
+
p.add_argument("--file", required=True, type=Path)
|
|
162
|
+
p.add_argument("--data", required=True, help="JSON dict of fields to update")
|
|
163
|
+
p.add_argument("--body-sections", default=None)
|
|
164
|
+
|
|
165
|
+
# resolve_links
|
|
166
|
+
p = sub.add_parser("resolve_links", help="Check all wikilinks")
|
|
167
|
+
p.add_argument("--project-dir", required=True)
|
|
168
|
+
|
|
169
|
+
# build_index
|
|
170
|
+
p = sub.add_parser("build_index", help="Rebuild .graph/index.json")
|
|
171
|
+
p.add_argument("--project-dir", required=True)
|
|
172
|
+
|
|
173
|
+
# generate_context
|
|
174
|
+
p = sub.add_parser("generate_context", help="Generate CONTEXT.md for a feature")
|
|
175
|
+
p.add_argument("--feature", required=True, help="Feature name (not filename)")
|
|
176
|
+
p.add_argument("--project-dir", required=True)
|
|
177
|
+
|
|
178
|
+
# embed_nodes
|
|
179
|
+
p = sub.add_parser("embed_nodes", help="Embed all nodes for semantic search")
|
|
180
|
+
p.add_argument("--project-dir", required=True)
|
|
181
|
+
p.add_argument("--api-key", default=None)
|
|
182
|
+
|
|
183
|
+
# find_similar
|
|
184
|
+
p = sub.add_parser("find_similar", help="Find semantically similar nodes")
|
|
185
|
+
p.add_argument("--node", required=True, help="Vault-relative path to node file")
|
|
186
|
+
p.add_argument("--project-dir", required=True)
|
|
187
|
+
p.add_argument("--top-k", type=int, default=5)
|
|
188
|
+
p.add_argument("--threshold", type=float, default=0.75)
|
|
189
|
+
|
|
190
|
+
return parser
|