opencode-agenthub 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +373 -0
- package/dist/composer/bootstrap.js +493 -0
- package/dist/composer/builtin-assets.js +139 -0
- package/dist/composer/capabilities.js +20 -0
- package/dist/composer/compose.js +824 -0
- package/dist/composer/defaults.js +10 -0
- package/dist/composer/home-transfer.js +288 -0
- package/dist/composer/install-home.js +5 -0
- package/dist/composer/library/README.md +93 -0
- package/dist/composer/library/bundles/auto.json +18 -0
- package/dist/composer/library/bundles/build.json +17 -0
- package/dist/composer/library/bundles/hr-adapter.json +26 -0
- package/dist/composer/library/bundles/hr-cto.json +24 -0
- package/dist/composer/library/bundles/hr-evaluator.json +26 -0
- package/dist/composer/library/bundles/hr-planner.json +26 -0
- package/dist/composer/library/bundles/hr-sourcer.json +24 -0
- package/dist/composer/library/bundles/hr-verifier.json +26 -0
- package/dist/composer/library/bundles/hr.json +35 -0
- package/dist/composer/library/bundles/plan.json +19 -0
- package/dist/composer/library/instructions/hr-boundaries.md +38 -0
- package/dist/composer/library/instructions/hr-protocol.md +102 -0
- package/dist/composer/library/profiles/auto.json +9 -0
- package/dist/composer/library/profiles/hr.json +9 -0
- package/dist/composer/library/souls/auto.md +29 -0
- package/dist/composer/library/souls/build.md +21 -0
- package/dist/composer/library/souls/hr-adapter.md +64 -0
- package/dist/composer/library/souls/hr-cto.md +57 -0
- package/dist/composer/library/souls/hr-evaluator.md +64 -0
- package/dist/composer/library/souls/hr-planner.md +48 -0
- package/dist/composer/library/souls/hr-sourcer.md +70 -0
- package/dist/composer/library/souls/hr-verifier.md +62 -0
- package/dist/composer/library/souls/hr.md +186 -0
- package/dist/composer/library/souls/plan.md +23 -0
- package/dist/composer/library/workflow/auto-mode.json +139 -0
- package/dist/composer/model-utils.js +39 -0
- package/dist/composer/opencode-profile.js +2299 -0
- package/dist/composer/package-manager.js +75 -0
- package/dist/composer/package-version.js +20 -0
- package/dist/composer/platform.js +48 -0
- package/dist/composer/query.js +133 -0
- package/dist/composer/settings.js +400 -0
- package/dist/plugins/opencode-agenthub.js +310 -0
- package/dist/plugins/opencode-question.js +223 -0
- package/dist/plugins/plan-guidance.js +263 -0
- package/dist/plugins/runtime-config.js +57 -0
- package/dist/skills/agenthub-doctor/SKILL.md +238 -0
- package/dist/skills/agenthub-doctor/diagnose.js +213 -0
- package/dist/skills/agenthub-doctor/fix.js +293 -0
- package/dist/skills/agenthub-doctor/index.js +30 -0
- package/dist/skills/agenthub-doctor/interactive.js +756 -0
- package/dist/skills/hr-assembly/SKILL.md +121 -0
- package/dist/skills/hr-final-check/SKILL.md +98 -0
- package/dist/skills/hr-review/SKILL.md +100 -0
- package/dist/skills/hr-staffing/SKILL.md +85 -0
- package/dist/skills/hr-support/bin/sync_sources.py +560 -0
- package/dist/skills/hr-support/bin/validate_staged_package.py +290 -0
- package/dist/skills/hr-support/bin/vendor_stage_mcps.py +234 -0
- package/dist/skills/hr-support/bin/vendor_stage_skills.py +104 -0
- package/dist/types.js +11 -0
- package/package.json +54 -0
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import tempfile
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def resolve_agenthub_bin() -> str:
|
|
13
|
+
if os.getenv("OPENCODE_AGENTHUB_BIN"):
|
|
14
|
+
return os.environ["OPENCODE_AGENTHUB_BIN"]
|
|
15
|
+
found = shutil.which("opencode-agenthub")
|
|
16
|
+
if found:
|
|
17
|
+
return found
|
|
18
|
+
# Portable repo-local fallback: when running from src/skills/hr-support/bin/
|
|
19
|
+
# inside the source tree, try <repo-root>/bin/opencode-agenthub
|
|
20
|
+
this_file = Path(__file__).resolve()
|
|
21
|
+
parts = this_file.parts
|
|
22
|
+
# Expected: .../<repo>/src/skills/hr-support/bin/validate_staged_package.py
|
|
23
|
+
# so repo root is 4 levels up from this file's directory
|
|
24
|
+
if (
|
|
25
|
+
len(parts) >= 6
|
|
26
|
+
and parts[-2] == "bin"
|
|
27
|
+
and parts[-3] == "hr-support"
|
|
28
|
+
and parts[-4] == "skills"
|
|
29
|
+
and parts[-5] == "src"
|
|
30
|
+
):
|
|
31
|
+
repo_bin = this_file.parents[4] / "bin" / "opencode-agenthub"
|
|
32
|
+
if repo_bin.exists():
|
|
33
|
+
return str(repo_bin)
|
|
34
|
+
raise SystemExit(
|
|
35
|
+
"Could not locate opencode-agenthub.\n"
|
|
36
|
+
" Set OPENCODE_AGENTHUB_BIN to the full path, or add opencode-agenthub to PATH.\n"
|
|
37
|
+
" When running from source, ensure bin/opencode-agenthub exists in the repo root."
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def run(cmd: list[str], env: dict[str, str] | None = None) -> None:
|
|
42
|
+
result = subprocess.run(cmd, capture_output=True, text=True, env=env)
|
|
43
|
+
if result.returncode != 0:
|
|
44
|
+
message = result.stderr.strip() or result.stdout.strip()
|
|
45
|
+
raise SystemExit(f"Command failed: {' '.join(cmd)}\n{message}")
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def resolve_import_root(stage_arg: str) -> Path:
|
|
49
|
+
stage_path = Path(stage_arg).resolve()
|
|
50
|
+
if (stage_path / "bundles").exists() and (stage_path / "profiles").exists():
|
|
51
|
+
return stage_path
|
|
52
|
+
import_root = stage_path / "agenthub-home"
|
|
53
|
+
if (import_root / "bundles").exists() and (import_root / "profiles").exists():
|
|
54
|
+
return import_root
|
|
55
|
+
raise SystemExit(f"No importable Agent Hub root found under: {stage_path}")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def read_json(path: Path) -> dict:
|
|
59
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def validate_bundle_metadata(import_root: Path) -> None:
|
|
63
|
+
forbidden = {"optional_skills", "runtime_conditional_skills"}
|
|
64
|
+
violations: list[str] = []
|
|
65
|
+
for bundle_file in sorted((import_root / "bundles").glob("*.json")):
|
|
66
|
+
bundle = read_json(bundle_file)
|
|
67
|
+
metadata = bundle.get("metadata", {})
|
|
68
|
+
if not isinstance(metadata, dict):
|
|
69
|
+
continue
|
|
70
|
+
bad = sorted(forbidden.intersection(metadata.keys()))
|
|
71
|
+
if bad:
|
|
72
|
+
violations.append(f"{bundle_file.name}: {', '.join(bad)}")
|
|
73
|
+
if violations:
|
|
74
|
+
detail = "\n".join(f"- {item}" for item in violations)
|
|
75
|
+
raise SystemExit(
|
|
76
|
+
"Bundle metadata contains non-runtime semantic keys that the current platform does not consume:\n"
|
|
77
|
+
f"{detail}"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def validate_staged_skills(import_root: Path) -> None:
|
|
82
|
+
skills_root = import_root / "skills"
|
|
83
|
+
missing: list[str] = []
|
|
84
|
+
for bundle_file in sorted((import_root / "bundles").glob("*.json")):
|
|
85
|
+
bundle = read_json(bundle_file)
|
|
86
|
+
for skill_name in bundle.get("skills", []):
|
|
87
|
+
if not (skills_root / skill_name).exists():
|
|
88
|
+
missing.append(f"{bundle_file.name}: {skill_name}")
|
|
89
|
+
if missing:
|
|
90
|
+
detail = "\n".join(f"- {item}" for item in missing)
|
|
91
|
+
raise SystemExit(f"Missing staged skills referenced by bundles:\n{detail}")
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def validate_staged_mcps(import_root: Path) -> None:
|
|
95
|
+
mcp_root = import_root / "mcp"
|
|
96
|
+
bundles_root = import_root / "bundles"
|
|
97
|
+
if not bundles_root.exists():
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
missing_configs: list[str] = []
|
|
101
|
+
missing_servers: list[str] = []
|
|
102
|
+
|
|
103
|
+
for bundle_file in sorted(bundles_root.glob("*.json")):
|
|
104
|
+
bundle = read_json(bundle_file)
|
|
105
|
+
for mcp_name in bundle.get("mcp", []):
|
|
106
|
+
config_path = mcp_root / f"{mcp_name}.json"
|
|
107
|
+
if not config_path.exists():
|
|
108
|
+
missing_configs.append(f"{bundle_file.name}: {mcp_name}")
|
|
109
|
+
|
|
110
|
+
if missing_configs:
|
|
111
|
+
detail = "\n".join(f"- {item}" for item in missing_configs)
|
|
112
|
+
raise SystemExit(f"Missing staged MCP configs referenced by bundles:\n{detail}")
|
|
113
|
+
|
|
114
|
+
if not mcp_root.exists():
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
for mcp_file in sorted(mcp_root.glob("*.json")):
|
|
118
|
+
config = read_json(mcp_file)
|
|
119
|
+
command = config.get("command", [])
|
|
120
|
+
if not isinstance(command, list):
|
|
121
|
+
continue
|
|
122
|
+
for item in command:
|
|
123
|
+
if not isinstance(item, str) or "${LIBRARY_ROOT}/" not in item:
|
|
124
|
+
continue
|
|
125
|
+
relative = item.split("${LIBRARY_ROOT}/", 1)[1]
|
|
126
|
+
if not relative.startswith("mcp-servers/"):
|
|
127
|
+
continue
|
|
128
|
+
target = import_root / relative
|
|
129
|
+
if not target.exists():
|
|
130
|
+
missing_servers.append(f"{mcp_file.name}: {relative}")
|
|
131
|
+
|
|
132
|
+
if missing_servers:
|
|
133
|
+
detail = "\n".join(f"- {item}" for item in missing_servers)
|
|
134
|
+
raise SystemExit(
|
|
135
|
+
f"Missing staged MCP server artifacts referenced by MCP configs:\n{detail}"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def validate_profile_default_agents(import_root: Path) -> None:
|
|
140
|
+
bundle_specs = {
|
|
141
|
+
bundle_file.stem: read_json(bundle_file)
|
|
142
|
+
for bundle_file in sorted((import_root / "bundles").glob("*.json"))
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for profile_file in sorted((import_root / "profiles").glob("*.json")):
|
|
146
|
+
profile = read_json(profile_file)
|
|
147
|
+
profile_name = profile.get("name") or profile_file.stem
|
|
148
|
+
bundle_names = profile.get("bundles", [])
|
|
149
|
+
if not isinstance(bundle_names, list):
|
|
150
|
+
raise SystemExit(
|
|
151
|
+
f"Profile '{profile_name}' has invalid bundles metadata; expected a list."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
references: list[tuple[str, str]] = []
|
|
155
|
+
missing_bundles: list[str] = []
|
|
156
|
+
for raw_bundle_name in bundle_names:
|
|
157
|
+
if not isinstance(raw_bundle_name, str) or not raw_bundle_name.strip():
|
|
158
|
+
raise SystemExit(
|
|
159
|
+
f"Profile '{profile_name}' contains an invalid bundle reference: {raw_bundle_name!r}."
|
|
160
|
+
)
|
|
161
|
+
bundle_name = raw_bundle_name.strip()
|
|
162
|
+
bundle = bundle_specs.get(bundle_name)
|
|
163
|
+
if not bundle:
|
|
164
|
+
missing_bundles.append(bundle_name)
|
|
165
|
+
continue
|
|
166
|
+
agent = bundle.get("agent", {})
|
|
167
|
+
agent_name = agent.get("name") if isinstance(agent, dict) else None
|
|
168
|
+
if not isinstance(agent_name, str) or not agent_name.strip():
|
|
169
|
+
raise SystemExit(
|
|
170
|
+
f"Bundle '{bundle_name}' is missing required agent.name; profile '{profile_name}' cannot use it."
|
|
171
|
+
)
|
|
172
|
+
references.append((bundle_name, agent_name.strip()))
|
|
173
|
+
|
|
174
|
+
if missing_bundles:
|
|
175
|
+
detail = ", ".join(missing_bundles)
|
|
176
|
+
raise SystemExit(
|
|
177
|
+
f"Profile '{profile_name}' references missing bundle(s): {detail}."
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
default_agent = profile.get("defaultAgent")
|
|
181
|
+
native_agent_policy = profile.get("nativeAgentPolicy")
|
|
182
|
+
if native_agent_policy == "team-only" and default_agent is None:
|
|
183
|
+
raise SystemExit(
|
|
184
|
+
f"Profile '{profile_name}' uses nativeAgentPolicy 'team-only' and must set defaultAgent explicitly."
|
|
185
|
+
)
|
|
186
|
+
if default_agent is None:
|
|
187
|
+
continue
|
|
188
|
+
if not isinstance(default_agent, str) or not default_agent.strip():
|
|
189
|
+
raise SystemExit(
|
|
190
|
+
f"Profile '{profile_name}' has an invalid defaultAgent value: {default_agent!r}."
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
normalized_default_agent = default_agent.strip()
|
|
194
|
+
bundle_match = next(
|
|
195
|
+
(
|
|
196
|
+
agent_name
|
|
197
|
+
for bundle_name, agent_name in references
|
|
198
|
+
if bundle_name == normalized_default_agent
|
|
199
|
+
and agent_name != normalized_default_agent
|
|
200
|
+
),
|
|
201
|
+
None,
|
|
202
|
+
)
|
|
203
|
+
if bundle_match:
|
|
204
|
+
raise SystemExit(
|
|
205
|
+
f"Profile '{profile_name}' defaultAgent '{normalized_default_agent}' does not match any bundle agent.name. "
|
|
206
|
+
f"Bundle '{normalized_default_agent}' uses bundle agent.name '{bundle_match}'. Set defaultAgent to that value."
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
agent_names = {agent_name for _, agent_name in references}
|
|
210
|
+
if normalized_default_agent in agent_names:
|
|
211
|
+
continue
|
|
212
|
+
|
|
213
|
+
available = ", ".join(sorted(agent_names)) or "(none)"
|
|
214
|
+
raise SystemExit(
|
|
215
|
+
f"Profile '{profile_name}' defaultAgent '{normalized_default_agent}' does not match any bundle agent.name. "
|
|
216
|
+
f"Available bundle agent.name values: {available}."
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def main() -> int:
|
|
221
|
+
if len(sys.argv) != 2:
|
|
222
|
+
raise SystemExit(
|
|
223
|
+
"Usage: validate_staged_package.py <stage-package-root|agenthub-home-root>"
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
agenthub_bin = resolve_agenthub_bin()
|
|
227
|
+
import_root = resolve_import_root(sys.argv[1])
|
|
228
|
+
workspace_root = Path.cwd()
|
|
229
|
+
validate_bundle_metadata(import_root)
|
|
230
|
+
validate_staged_skills(import_root)
|
|
231
|
+
validate_staged_mcps(import_root)
|
|
232
|
+
validate_profile_default_agents(import_root)
|
|
233
|
+
|
|
234
|
+
profiles = sorted(p.stem for p in (import_root / "profiles").glob("*.json"))
|
|
235
|
+
if not profiles:
|
|
236
|
+
raise SystemExit("No profiles found in staged import root.")
|
|
237
|
+
|
|
238
|
+
with tempfile.TemporaryDirectory(prefix="agenthub-stage-validate-") as temp_dir:
|
|
239
|
+
temp_root = Path(temp_dir)
|
|
240
|
+
temp_home = temp_root / "home"
|
|
241
|
+
temp_hr_home = temp_root / "hr-home"
|
|
242
|
+
temp_cfg = temp_root / "config"
|
|
243
|
+
|
|
244
|
+
run(
|
|
245
|
+
[
|
|
246
|
+
agenthub_bin,
|
|
247
|
+
"setup",
|
|
248
|
+
"minimal",
|
|
249
|
+
"--target-root",
|
|
250
|
+
str(temp_home),
|
|
251
|
+
]
|
|
252
|
+
)
|
|
253
|
+
run(
|
|
254
|
+
[
|
|
255
|
+
agenthub_bin,
|
|
256
|
+
"hub-import",
|
|
257
|
+
"--source",
|
|
258
|
+
str(import_root),
|
|
259
|
+
"--target-root",
|
|
260
|
+
str(temp_home),
|
|
261
|
+
"--overwrite",
|
|
262
|
+
]
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
env = os.environ.copy()
|
|
266
|
+
env["OPENCODE_AGENTHUB_HOME"] = str(temp_home)
|
|
267
|
+
env["OPENCODE_AGENTHUB_HR_HOME"] = str(temp_hr_home)
|
|
268
|
+
for profile in profiles:
|
|
269
|
+
run(
|
|
270
|
+
[
|
|
271
|
+
agenthub_bin,
|
|
272
|
+
"run",
|
|
273
|
+
profile,
|
|
274
|
+
"--workspace",
|
|
275
|
+
str(workspace_root),
|
|
276
|
+
"--config-root",
|
|
277
|
+
str(temp_cfg / profile),
|
|
278
|
+
"--assemble-only",
|
|
279
|
+
],
|
|
280
|
+
env=env,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
print("VALIDATED")
|
|
284
|
+
print(f"- import_root: {import_root}")
|
|
285
|
+
print(f"- profiles: {', '.join(profiles)}")
|
|
286
|
+
return 0
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
if __name__ == "__main__":
|
|
290
|
+
raise SystemExit(main())
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def resolve_hr_home() -> Path:
|
|
12
|
+
explicit = os.environ.get("OPENCODE_AGENTHUB_HR_HOME")
|
|
13
|
+
if explicit:
|
|
14
|
+
return Path(explicit).resolve()
|
|
15
|
+
return Path(__file__).resolve().parent.parent
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def resolve_builtin_mcp_root() -> Path | None:
|
|
19
|
+
this_file = Path(__file__).resolve()
|
|
20
|
+
parts = this_file.parts
|
|
21
|
+
if (
|
|
22
|
+
len(parts) >= 6
|
|
23
|
+
and parts[-2] == "bin"
|
|
24
|
+
and parts[-3] == "hr-support"
|
|
25
|
+
and parts[-4] == "skills"
|
|
26
|
+
and parts[-5] == "src"
|
|
27
|
+
):
|
|
28
|
+
candidate = this_file.parents[3] / "composer" / "library" / "mcp"
|
|
29
|
+
if candidate.exists():
|
|
30
|
+
return candidate
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
HR_HOME = resolve_hr_home()
|
|
35
|
+
WORKERS_ROOT = HR_HOME / "inventory" / "workers"
|
|
36
|
+
BUILTIN_MCP_ROOT = resolve_builtin_mcp_root()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def resolve_import_root(stage_arg: str) -> Path:
|
|
40
|
+
stage_path = Path(stage_arg).resolve()
|
|
41
|
+
if (stage_path / "bundles").exists() and (stage_path / "profiles").exists():
|
|
42
|
+
return stage_path
|
|
43
|
+
import_root = stage_path / "agenthub-home"
|
|
44
|
+
if (import_root / "bundles").exists() and (import_root / "profiles").exists():
|
|
45
|
+
return import_root
|
|
46
|
+
raise SystemExit(f"No importable Agent Hub root found under: {stage_path}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def read_json(path: Path) -> dict:
|
|
50
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def load_worker_cards() -> list[dict]:
|
|
54
|
+
cards: list[dict] = []
|
|
55
|
+
for worker_file in sorted(WORKERS_ROOT.glob("*.json")):
|
|
56
|
+
try:
|
|
57
|
+
cards.append(read_json(worker_file))
|
|
58
|
+
except json.JSONDecodeError:
|
|
59
|
+
continue
|
|
60
|
+
return cards
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def find_source_root(mcp_name: str, cards: list[dict]) -> Path | None:
|
|
64
|
+
normalized = mcp_name.lower()
|
|
65
|
+
for card in cards:
|
|
66
|
+
if card.get("asset_kind") != "profile":
|
|
67
|
+
continue
|
|
68
|
+
selected = card.get("selected_mcps") or []
|
|
69
|
+
if normalized not in {str(item).lower() for item in selected}:
|
|
70
|
+
continue
|
|
71
|
+
cached_repo = card.get("artifacts", {}).get("cached_repo")
|
|
72
|
+
if not cached_repo:
|
|
73
|
+
continue
|
|
74
|
+
repo_root = Path(cached_repo)
|
|
75
|
+
if not repo_root.is_absolute():
|
|
76
|
+
repo_root = HR_HOME / repo_root
|
|
77
|
+
if (repo_root / "mcp" / f"{mcp_name}.json").exists():
|
|
78
|
+
return repo_root
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def copy_tree_if_exists(source: Path, target: Path) -> None:
|
|
83
|
+
if not source.exists():
|
|
84
|
+
return
|
|
85
|
+
if source.is_dir():
|
|
86
|
+
shutil.copytree(source, target, dirs_exist_ok=True)
|
|
87
|
+
else:
|
|
88
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
89
|
+
shutil.copy2(source, target)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def resolve_library_root_from_command(command: list[object]) -> str | None:
|
|
93
|
+
for item in command:
|
|
94
|
+
if isinstance(item, str) and "${LIBRARY_ROOT}" in item:
|
|
95
|
+
suffix = item.split("${LIBRARY_ROOT}/", 1)
|
|
96
|
+
if len(suffix) == 2:
|
|
97
|
+
return suffix[1]
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def command_available(name: str) -> bool:
|
|
102
|
+
return shutil.which(name) is not None
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def install_mcp_dependencies(target_root: Path) -> str:
|
|
106
|
+
if command_available("bun"):
|
|
107
|
+
subprocess.run(["bun", "install"], cwd=target_root, check=True)
|
|
108
|
+
return "bun"
|
|
109
|
+
if command_available("npm"):
|
|
110
|
+
subprocess.run(["npm", "install"], cwd=target_root, check=True)
|
|
111
|
+
return "npm"
|
|
112
|
+
raise SystemExit(
|
|
113
|
+
"MCP dependencies are required but neither 'bun' nor 'npm' is available on PATH."
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def main() -> int:
|
|
118
|
+
if len(sys.argv) != 2:
|
|
119
|
+
raise SystemExit(
|
|
120
|
+
"Usage: vendor_stage_mcps.py <stage-package-root|agenthub-home-root>"
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
import_root = resolve_import_root(sys.argv[1])
|
|
124
|
+
bundles_root = import_root / "bundles"
|
|
125
|
+
staged_mcp_root = import_root / "mcp"
|
|
126
|
+
staged_mcp_servers_root = import_root / "mcp-servers"
|
|
127
|
+
staged_mcp_root.mkdir(parents=True, exist_ok=True)
|
|
128
|
+
|
|
129
|
+
required_mcps: set[str] = set()
|
|
130
|
+
for bundle_file in sorted(bundles_root.glob("*.json")):
|
|
131
|
+
bundle = read_json(bundle_file)
|
|
132
|
+
for mcp_name in bundle.get("mcp", []):
|
|
133
|
+
required_mcps.add(str(mcp_name))
|
|
134
|
+
|
|
135
|
+
if not required_mcps:
|
|
136
|
+
print("Vendored MCPs:\n- none required")
|
|
137
|
+
return 0
|
|
138
|
+
|
|
139
|
+
cards = load_worker_cards()
|
|
140
|
+
vendored_mcps: list[str] = []
|
|
141
|
+
vendored_servers: set[str] = set()
|
|
142
|
+
deferred_install_paths: set[str] = set()
|
|
143
|
+
|
|
144
|
+
for mcp_name in sorted(required_mcps):
|
|
145
|
+
source_root = find_source_root(mcp_name, cards)
|
|
146
|
+
if source_root is None:
|
|
147
|
+
raise SystemExit(
|
|
148
|
+
f"Missing cached source repo for MCP '{mcp_name}'. Add a worker card with selected_mcps and cached_repo."
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
source_mcp_file = source_root / "mcp" / f"{mcp_name}.json"
|
|
152
|
+
if not source_mcp_file.exists():
|
|
153
|
+
builtin_candidate = (
|
|
154
|
+
BUILTIN_MCP_ROOT / f"{mcp_name}.json" if BUILTIN_MCP_ROOT else None
|
|
155
|
+
)
|
|
156
|
+
if builtin_candidate and builtin_candidate.exists():
|
|
157
|
+
source_mcp_file = builtin_candidate
|
|
158
|
+
else:
|
|
159
|
+
raise SystemExit(
|
|
160
|
+
f"Missing MCP registration for '{mcp_name}' in {source_root / 'mcp'}"
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
target_mcp_file = staged_mcp_root / f"{mcp_name}.json"
|
|
164
|
+
target_mcp_file.parent.mkdir(parents=True, exist_ok=True)
|
|
165
|
+
shutil.copy2(source_mcp_file, target_mcp_file)
|
|
166
|
+
vendored_mcps.append(mcp_name)
|
|
167
|
+
|
|
168
|
+
config = read_json(source_mcp_file)
|
|
169
|
+
command = config.get("command") or []
|
|
170
|
+
if not isinstance(command, list):
|
|
171
|
+
continue
|
|
172
|
+
for entry in command:
|
|
173
|
+
if not isinstance(entry, str) or "${LIBRARY_ROOT}/" not in entry:
|
|
174
|
+
continue
|
|
175
|
+
relative = resolve_library_root_from_command([entry])
|
|
176
|
+
if not relative:
|
|
177
|
+
continue
|
|
178
|
+
source_path = source_root / relative
|
|
179
|
+
if "mcp-servers/" not in relative:
|
|
180
|
+
continue
|
|
181
|
+
if relative.startswith("mcp-servers/node_modules/"):
|
|
182
|
+
package_manifest = source_root / "mcp-servers" / "package.json"
|
|
183
|
+
if package_manifest.exists():
|
|
184
|
+
deferred_install_paths.add(relative)
|
|
185
|
+
continue
|
|
186
|
+
if not source_path.exists():
|
|
187
|
+
raise SystemExit(
|
|
188
|
+
f"MCP '{mcp_name}' references '{relative}' but it does not exist under {source_root}"
|
|
189
|
+
)
|
|
190
|
+
target_path = import_root / relative
|
|
191
|
+
copy_tree_if_exists(source_path, target_path)
|
|
192
|
+
vendored_servers.add(relative)
|
|
193
|
+
|
|
194
|
+
package_manifest = source_root / "mcp-servers" / "package.json"
|
|
195
|
+
if package_manifest.exists():
|
|
196
|
+
copy_tree_if_exists(
|
|
197
|
+
package_manifest, staged_mcp_servers_root / "package.json"
|
|
198
|
+
)
|
|
199
|
+
vendored_servers.add("mcp-servers/package.json")
|
|
200
|
+
|
|
201
|
+
install_tool: str | None = None
|
|
202
|
+
if deferred_install_paths:
|
|
203
|
+
missing_after_copy = [
|
|
204
|
+
relative
|
|
205
|
+
for relative in sorted(deferred_install_paths)
|
|
206
|
+
if not (import_root / relative).exists()
|
|
207
|
+
]
|
|
208
|
+
if missing_after_copy:
|
|
209
|
+
if not (staged_mcp_servers_root / "package.json").exists():
|
|
210
|
+
detail = ", ".join(missing_after_copy)
|
|
211
|
+
raise SystemExit(
|
|
212
|
+
f"MCP configs reference runtime dependencies ({detail}) but no staged mcp-servers/package.json is available."
|
|
213
|
+
)
|
|
214
|
+
install_tool = install_mcp_dependencies(staged_mcp_servers_root)
|
|
215
|
+
for relative in missing_after_copy:
|
|
216
|
+
if not (import_root / relative).exists():
|
|
217
|
+
raise SystemExit(
|
|
218
|
+
f"Installed MCP dependencies with {install_tool}, but required runtime file is still missing: {relative}"
|
|
219
|
+
)
|
|
220
|
+
vendored_servers.add(relative)
|
|
221
|
+
|
|
222
|
+
print("Vendored MCP configs:")
|
|
223
|
+
for name in vendored_mcps:
|
|
224
|
+
print(f"- {name}")
|
|
225
|
+
print("Vendored MCP server artifacts:")
|
|
226
|
+
for relative in sorted(vendored_servers):
|
|
227
|
+
print(f"- {relative}")
|
|
228
|
+
if install_tool:
|
|
229
|
+
print(f"Installed MCP dependencies with: {install_tool}")
|
|
230
|
+
return 0
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
if __name__ == "__main__":
|
|
234
|
+
raise SystemExit(main())
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import shutil
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
HR_HOME = Path(__file__).resolve().parent.parent
|
|
10
|
+
WORKERS_ROOT = HR_HOME / "inventory" / "workers"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def resolve_import_root(stage_arg: str) -> Path:
|
|
14
|
+
stage_path = Path(stage_arg).resolve()
|
|
15
|
+
if (stage_path / "bundles").exists() and (stage_path / "profiles").exists():
|
|
16
|
+
return stage_path
|
|
17
|
+
import_root = stage_path / "agenthub-home"
|
|
18
|
+
if (import_root / "bundles").exists() and (import_root / "profiles").exists():
|
|
19
|
+
return import_root
|
|
20
|
+
raise SystemExit(f"No importable Agent Hub root found under: {stage_path}")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def read_json(path: Path) -> dict:
|
|
24
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def locate_skill_card(skill_name: str) -> dict:
|
|
28
|
+
for worker_file in sorted(WORKERS_ROOT.glob("*.json")):
|
|
29
|
+
card = read_json(worker_file)
|
|
30
|
+
if card.get("asset_kind") != "skill":
|
|
31
|
+
continue
|
|
32
|
+
if skill_name in {
|
|
33
|
+
card.get("candidate_slug"),
|
|
34
|
+
card.get("name"),
|
|
35
|
+
worker_file.stem,
|
|
36
|
+
}:
|
|
37
|
+
return card
|
|
38
|
+
raise SystemExit(f"Missing worker card for skill: {skill_name}")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def resolve_source_dir(card: dict) -> Path:
|
|
42
|
+
cached_repo = Path(card.get("artifacts", {}).get("cached_repo", ""))
|
|
43
|
+
if not cached_repo:
|
|
44
|
+
raise SystemExit(
|
|
45
|
+
f"Missing cached_repo artifact for skill: {card.get('name', 'unknown')}"
|
|
46
|
+
)
|
|
47
|
+
repo_root = cached_repo if cached_repo.is_absolute() else HR_HOME / cached_repo
|
|
48
|
+
source_path = card.get("source_path") or card.get("artifacts", {}).get(
|
|
49
|
+
"source_file"
|
|
50
|
+
)
|
|
51
|
+
if not source_path:
|
|
52
|
+
raise SystemExit(
|
|
53
|
+
f"Missing source_path for skill: {card.get('name', 'unknown')}"
|
|
54
|
+
)
|
|
55
|
+
source_file = repo_root / source_path
|
|
56
|
+
if source_file.is_dir():
|
|
57
|
+
return source_file
|
|
58
|
+
if source_file.exists():
|
|
59
|
+
return source_file.parent
|
|
60
|
+
raise SystemExit(
|
|
61
|
+
f"Missing source directory for skill {card.get('name', 'unknown')}: {source_file}"
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def main() -> int:
|
|
66
|
+
if len(sys.argv) != 2:
|
|
67
|
+
raise SystemExit(
|
|
68
|
+
"Usage: vendor_stage_skills.py <stage-package-root|agenthub-home-root>"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
import_root = resolve_import_root(sys.argv[1])
|
|
72
|
+
skills_root = import_root / "skills"
|
|
73
|
+
skills_root.mkdir(parents=True, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
required_skills: set[str] = set()
|
|
76
|
+
for bundle_file in sorted((import_root / "bundles").glob("*.json")):
|
|
77
|
+
bundle = read_json(bundle_file)
|
|
78
|
+
for skill_name in bundle.get("skills", []):
|
|
79
|
+
required_skills.add(skill_name)
|
|
80
|
+
|
|
81
|
+
vendored: list[str] = []
|
|
82
|
+
skipped: list[str] = []
|
|
83
|
+
for skill_name in sorted(required_skills):
|
|
84
|
+
target_dir = skills_root / skill_name
|
|
85
|
+
if target_dir.exists():
|
|
86
|
+
skipped.append(skill_name)
|
|
87
|
+
continue
|
|
88
|
+
|
|
89
|
+
card = locate_skill_card(skill_name)
|
|
90
|
+
shutil.copytree(resolve_source_dir(card), target_dir, dirs_exist_ok=True)
|
|
91
|
+
vendored.append(skill_name)
|
|
92
|
+
|
|
93
|
+
print("Vendored skills:")
|
|
94
|
+
for skill in vendored:
|
|
95
|
+
print(f"- {skill}")
|
|
96
|
+
if skipped:
|
|
97
|
+
print("Already present:")
|
|
98
|
+
for skill in skipped:
|
|
99
|
+
print(f"- {skill}")
|
|
100
|
+
return 0
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
if __name__ == "__main__":
|
|
104
|
+
raise SystemExit(main())
|
package/dist/types.js
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "opencode-agenthub",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Systematic control plane for opencode agents, skills, profiles, and shared runtime assets.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/plugins/opencode-agenthub.js",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./dist/plugins/opencode-agenthub.js",
|
|
9
|
+
"./question": "./dist/plugins/opencode-question.js"
|
|
10
|
+
},
|
|
11
|
+
"bin": {
|
|
12
|
+
"agenthub": "dist/composer/opencode-profile.js",
|
|
13
|
+
"opencode-agenthub": "dist/composer/opencode-profile.js"
|
|
14
|
+
},
|
|
15
|
+
"scripts": {
|
|
16
|
+
"build": "node scripts/build.mjs",
|
|
17
|
+
"test:smoke": "bun test test/smoke-*.test.ts",
|
|
18
|
+
"prepublishOnly": "npm run build"
|
|
19
|
+
},
|
|
20
|
+
"files": [
|
|
21
|
+
"LICENSE",
|
|
22
|
+
"README.md",
|
|
23
|
+
"dist/"
|
|
24
|
+
],
|
|
25
|
+
"keywords": [
|
|
26
|
+
"opencode",
|
|
27
|
+
"agents",
|
|
28
|
+
"agenthub",
|
|
29
|
+
"skills",
|
|
30
|
+
"mcp",
|
|
31
|
+
"workflow",
|
|
32
|
+
"llm",
|
|
33
|
+
"ai-agents"
|
|
34
|
+
],
|
|
35
|
+
"engines": {
|
|
36
|
+
"node": ">=18.0.0"
|
|
37
|
+
},
|
|
38
|
+
"license": "MIT",
|
|
39
|
+
"repository": {
|
|
40
|
+
"type": "git",
|
|
41
|
+
"url": "git+https://github.com/sdwolf4103/opencode-agenthub.git"
|
|
42
|
+
},
|
|
43
|
+
"homepage": "https://github.com/sdwolf4103/opencode-agenthub#readme",
|
|
44
|
+
"bugs": {
|
|
45
|
+
"url": "https://github.com/sdwolf4103/opencode-agenthub/issues"
|
|
46
|
+
},
|
|
47
|
+
"publishConfig": {
|
|
48
|
+
"access": "public"
|
|
49
|
+
},
|
|
50
|
+
"packageManager": "bun@1.3.9",
|
|
51
|
+
"devDependencies": {
|
|
52
|
+
"esbuild": "^0.25.9"
|
|
53
|
+
}
|
|
54
|
+
}
|