opencode-agenthub 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +373 -0
  3. package/dist/composer/bootstrap.js +493 -0
  4. package/dist/composer/builtin-assets.js +139 -0
  5. package/dist/composer/capabilities.js +20 -0
  6. package/dist/composer/compose.js +824 -0
  7. package/dist/composer/defaults.js +10 -0
  8. package/dist/composer/home-transfer.js +288 -0
  9. package/dist/composer/install-home.js +5 -0
  10. package/dist/composer/library/README.md +93 -0
  11. package/dist/composer/library/bundles/auto.json +18 -0
  12. package/dist/composer/library/bundles/build.json +17 -0
  13. package/dist/composer/library/bundles/hr-adapter.json +26 -0
  14. package/dist/composer/library/bundles/hr-cto.json +24 -0
  15. package/dist/composer/library/bundles/hr-evaluator.json +26 -0
  16. package/dist/composer/library/bundles/hr-planner.json +26 -0
  17. package/dist/composer/library/bundles/hr-sourcer.json +24 -0
  18. package/dist/composer/library/bundles/hr-verifier.json +26 -0
  19. package/dist/composer/library/bundles/hr.json +35 -0
  20. package/dist/composer/library/bundles/plan.json +19 -0
  21. package/dist/composer/library/instructions/hr-boundaries.md +38 -0
  22. package/dist/composer/library/instructions/hr-protocol.md +102 -0
  23. package/dist/composer/library/profiles/auto.json +9 -0
  24. package/dist/composer/library/profiles/hr.json +9 -0
  25. package/dist/composer/library/souls/auto.md +29 -0
  26. package/dist/composer/library/souls/build.md +21 -0
  27. package/dist/composer/library/souls/hr-adapter.md +64 -0
  28. package/dist/composer/library/souls/hr-cto.md +57 -0
  29. package/dist/composer/library/souls/hr-evaluator.md +64 -0
  30. package/dist/composer/library/souls/hr-planner.md +48 -0
  31. package/dist/composer/library/souls/hr-sourcer.md +70 -0
  32. package/dist/composer/library/souls/hr-verifier.md +62 -0
  33. package/dist/composer/library/souls/hr.md +186 -0
  34. package/dist/composer/library/souls/plan.md +23 -0
  35. package/dist/composer/library/workflow/auto-mode.json +139 -0
  36. package/dist/composer/model-utils.js +39 -0
  37. package/dist/composer/opencode-profile.js +2299 -0
  38. package/dist/composer/package-manager.js +75 -0
  39. package/dist/composer/package-version.js +20 -0
  40. package/dist/composer/platform.js +48 -0
  41. package/dist/composer/query.js +133 -0
  42. package/dist/composer/settings.js +400 -0
  43. package/dist/plugins/opencode-agenthub.js +310 -0
  44. package/dist/plugins/opencode-question.js +223 -0
  45. package/dist/plugins/plan-guidance.js +263 -0
  46. package/dist/plugins/runtime-config.js +57 -0
  47. package/dist/skills/agenthub-doctor/SKILL.md +238 -0
  48. package/dist/skills/agenthub-doctor/diagnose.js +213 -0
  49. package/dist/skills/agenthub-doctor/fix.js +293 -0
  50. package/dist/skills/agenthub-doctor/index.js +30 -0
  51. package/dist/skills/agenthub-doctor/interactive.js +756 -0
  52. package/dist/skills/hr-assembly/SKILL.md +121 -0
  53. package/dist/skills/hr-final-check/SKILL.md +98 -0
  54. package/dist/skills/hr-review/SKILL.md +100 -0
  55. package/dist/skills/hr-staffing/SKILL.md +85 -0
  56. package/dist/skills/hr-support/bin/sync_sources.py +560 -0
  57. package/dist/skills/hr-support/bin/validate_staged_package.py +290 -0
  58. package/dist/skills/hr-support/bin/vendor_stage_mcps.py +234 -0
  59. package/dist/skills/hr-support/bin/vendor_stage_skills.py +104 -0
  60. package/dist/types.js +11 -0
  61. package/package.json +54 -0
@@ -0,0 +1,560 @@
1
+ #!/usr/bin/env python3
2
+
3
+ import fnmatch
4
+ import json
5
+ import re
6
+ import subprocess
7
+ import sys
8
+ import urllib.request
9
+ from datetime import datetime, timezone
10
+ from pathlib import Path
11
+
12
+
13
+ HR_HOME = Path(__file__).resolve().parent.parent
14
+ CONFIG_PATH = HR_HOME / "hr-config.json"
15
+ INVENTORY_ROOT = HR_HOME / "inventory"
16
+ WORKERS_ROOT = INVENTORY_ROOT / "workers"
17
+ MODELS_ROOT = INVENTORY_ROOT / "models"
18
+ CACHE_ROOT = HR_HOME / "sources" / "github"
19
+ STATUS_PATH = HR_HOME / "source-status.json"
20
+ MODEL_CATALOG_PATH = MODELS_ROOT / "catalog.json"
21
+ MODEL_IDS_PATH = MODELS_ROOT / "valid-model-ids.txt"
22
+
23
+
24
+ def now_iso() -> str:
25
+ return datetime.now(timezone.utc).replace(microsecond=0).isoformat()
26
+
27
+
28
+ def slugify(value: str) -> str:
29
+ value = value.lower()
30
+ value = re.sub(r"[^a-z0-9]+", "-", value)
31
+ return value.strip("-")
32
+
33
+
34
+ def read_text(path: Path) -> str:
35
+ try:
36
+ return path.read_text(encoding="utf-8")
37
+ except UnicodeDecodeError:
38
+ return path.read_text(encoding="utf-8", errors="ignore")
39
+
40
+
41
+ def read_json(path: Path) -> dict:
42
+ return json.loads(path.read_text(encoding="utf-8"))
43
+
44
+
45
+ def write_json(path: Path, payload: dict) -> None:
46
+ path.parent.mkdir(parents=True, exist_ok=True)
47
+ path.write_text(
48
+ json.dumps(payload, indent=2, ensure_ascii=True) + "\n", encoding="utf-8"
49
+ )
50
+
51
+
52
+ def run(cmd: list[str], cwd: Path | None = None) -> str:
53
+ result = subprocess.run(
54
+ cmd,
55
+ cwd=str(cwd) if cwd else None,
56
+ capture_output=True,
57
+ text=True,
58
+ )
59
+ if result.returncode != 0:
60
+ raise RuntimeError(f"command failed: {' '.join(cmd)}\n{result.stderr.strip()}")
61
+ return result.stdout.strip()
62
+
63
+
64
+ def parse_frontmatter(text: str) -> dict[str, str]:
65
+ lines = text.splitlines()
66
+ if len(lines) < 3 or lines[0].strip() != "---":
67
+ return {}
68
+
69
+ data: dict[str, str] = {}
70
+ i = 1
71
+ while i < len(lines):
72
+ line = lines[i]
73
+ if line.strip() == "---":
74
+ break
75
+ if ":" not in line:
76
+ i += 1
77
+ continue
78
+ key, value = line.split(":", 1)
79
+ key = key.strip()
80
+ value = value.strip().strip('"')
81
+ if value in {">", "|"}:
82
+ block: list[str] = []
83
+ i += 1
84
+ while i < len(lines):
85
+ nested = lines[i]
86
+ if nested.strip() == "---":
87
+ i -= 1
88
+ break
89
+ if nested and not nested.startswith((" ", "\t")) and ":" in nested:
90
+ i -= 1
91
+ break
92
+ if nested.strip():
93
+ block.append(nested.strip())
94
+ i += 1
95
+ value = " ".join(block).strip()
96
+ data[key] = value
97
+ i += 1
98
+ return data
99
+
100
+
101
+ def infer_name(path: Path, text: str) -> str:
102
+ frontmatter = parse_frontmatter(text)
103
+ name = frontmatter.get("name", "").strip()
104
+ if name:
105
+ return name
106
+
107
+ for line in text.splitlines()[:40]:
108
+ stripped = line.strip()
109
+ if stripped.startswith("# Agent:"):
110
+ return stripped.split(":", 1)[1].strip()
111
+ if stripped.startswith("# "):
112
+ return stripped[2:].strip()
113
+ if stripped.startswith("name:"):
114
+ return stripped.split(":", 1)[1].strip().strip('"')
115
+ return path.stem
116
+
117
+
118
+ def infer_summary(text: str, fallback: str) -> str:
119
+ frontmatter = parse_frontmatter(text)
120
+ description = frontmatter.get("description", "").strip()
121
+ if description and description not in {">", "|"}:
122
+ return description[:280]
123
+
124
+ for line in text.splitlines():
125
+ stripped = line.strip()
126
+ if not stripped:
127
+ continue
128
+ if stripped.startswith(
129
+ (
130
+ "#",
131
+ "---",
132
+ "name:",
133
+ "description:",
134
+ "audience:",
135
+ "license:",
136
+ "compatibility:",
137
+ "metadata:",
138
+ )
139
+ ):
140
+ continue
141
+ return stripped[:280]
142
+ return fallback
143
+
144
+
145
+ def match_any(path: str, patterns: list[str]) -> bool:
146
+ return any(fnmatch.fnmatch(path, pattern) for pattern in patterns)
147
+
148
+
149
+ def load_config() -> dict:
150
+ if not CONFIG_PATH.exists():
151
+ raise SystemExit(f"Missing HR config: {CONFIG_PATH}")
152
+ return read_json(CONFIG_PATH)
153
+
154
+
155
+ def github_sources(config: dict) -> list[dict]:
156
+ raw_sources = []
157
+ top_sources = config.get("sources", {})
158
+ if isinstance(top_sources, list):
159
+ raw_sources.extend(top_sources)
160
+ elif isinstance(top_sources, dict):
161
+ raw_sources.extend(top_sources.get("github", []))
162
+
163
+ if isinstance(config.get("github"), list):
164
+ raw_sources.extend(config["github"])
165
+
166
+ settings = (
167
+ config.get("settings", {}) if isinstance(config.get("settings"), dict) else {}
168
+ )
169
+ sync_depth = settings.get("sync_depth", 1)
170
+ sources: list[dict] = []
171
+ for entry in raw_sources:
172
+ if isinstance(entry, str):
173
+ repo = entry
174
+ source = {
175
+ "repo": repo,
176
+ "branch": "main",
177
+ "include": ["**/*"],
178
+ "exclude": [
179
+ ".git/**",
180
+ "node_modules/**",
181
+ "dist/**",
182
+ "build/**",
183
+ "coverage/**",
184
+ ],
185
+ "source_id": slugify(repo),
186
+ "sync_depth": sync_depth,
187
+ }
188
+ elif isinstance(entry, dict) and isinstance(entry.get("repo"), str):
189
+ repo = entry["repo"]
190
+ source = {
191
+ "repo": repo,
192
+ "branch": entry.get("branch", "main"),
193
+ "include": entry.get("include", ["**/*"]),
194
+ "exclude": entry.get(
195
+ "exclude",
196
+ [
197
+ ".git/**",
198
+ "node_modules/**",
199
+ "dist/**",
200
+ "build/**",
201
+ "coverage/**",
202
+ ],
203
+ ),
204
+ "source_id": entry.get("source_id", slugify(repo)),
205
+ "sync_depth": entry.get("sync_depth", sync_depth),
206
+ }
207
+ else:
208
+ continue
209
+ sources.append(source)
210
+ return sources
211
+
212
+
213
+ def model_catalog_sources(config: dict) -> list[dict]:
214
+ raw_sources = []
215
+ top_sources = config.get("sources", {})
216
+ if isinstance(top_sources, dict):
217
+ raw_sources.extend(top_sources.get("models", []))
218
+
219
+ if isinstance(config.get("models"), list):
220
+ raw_sources.extend(config["models"])
221
+
222
+ if not raw_sources:
223
+ raw_sources.append(
224
+ {
225
+ "source_id": "models-dev",
226
+ "url": "https://models.dev/api.json",
227
+ "format": "models.dev",
228
+ }
229
+ )
230
+
231
+ sources: list[dict] = []
232
+ for entry in raw_sources:
233
+ if isinstance(entry, str):
234
+ source = {
235
+ "source_id": slugify(entry),
236
+ "url": entry,
237
+ "format": "models.dev",
238
+ }
239
+ elif isinstance(entry, dict) and isinstance(entry.get("url"), str):
240
+ source = {
241
+ "source_id": entry.get("source_id", slugify(entry["url"])),
242
+ "url": entry["url"],
243
+ "format": entry.get("format", "models.dev"),
244
+ }
245
+ else:
246
+ continue
247
+ sources.append(source)
248
+ return sources
249
+
250
+
251
+ def git_cache_dir(repo_slug: str) -> Path:
252
+ owner, repo = repo_slug.split("/", 1)
253
+ return CACHE_ROOT / f"{owner}--{repo}"
254
+
255
+
256
+ def ensure_repo(source: dict) -> tuple[Path, str]:
257
+ repo_slug = source["repo"]
258
+ branch = source.get("branch", "main")
259
+ depth = str(source.get("sync_depth", 1))
260
+ repo_dir = git_cache_dir(repo_slug)
261
+ remote_url = f"https://github.com/{repo_slug}.git"
262
+ repo_dir.parent.mkdir(parents=True, exist_ok=True)
263
+
264
+ if not repo_dir.exists():
265
+ run(
266
+ [
267
+ "git",
268
+ "clone",
269
+ "--depth",
270
+ depth,
271
+ "--branch",
272
+ branch,
273
+ remote_url,
274
+ str(repo_dir),
275
+ ]
276
+ )
277
+ else:
278
+ run(["git", "remote", "set-url", "origin", remote_url], cwd=repo_dir)
279
+ run(["git", "fetch", "--depth", depth, "origin", branch], cwd=repo_dir)
280
+ run(["git", "checkout", "-B", branch, f"origin/{branch}"], cwd=repo_dir)
281
+
282
+ commit = run(["git", "rev-parse", "HEAD"], cwd=repo_dir)
283
+ return repo_dir, commit
284
+
285
+
286
+ def should_keep_candidate(rel_path: str, text: str) -> bool:
287
+ lower_rel = rel_path.lower()
288
+ name = Path(lower_rel).name
289
+ if Path(lower_rel).suffix not in {".md", ".json", ".toml"}:
290
+ return False
291
+ if name in {"skill.md", "skill.toml", "claude.md", "agents.md", "conductor.json"}:
292
+ return True
293
+ if any(part in lower_rel.split("/") for part in {"souls", "skills", "agents"}):
294
+ return True
295
+ lowered = text.lower()
296
+ return (
297
+ "# agent:" in lowered
298
+ or "required attached skills" in lowered
299
+ or lowered.startswith("---\nname:")
300
+ )
301
+
302
+
303
+ def classify_asset(rel_path: str, text: str) -> tuple[str, str, str, str]:
304
+ lower_rel = rel_path.lower()
305
+ lower_text = text.lower()
306
+ is_skill = (
307
+ Path(lower_rel).name in {"skill.md", "skill.toml"} or "/skills/" in lower_rel
308
+ )
309
+ if is_skill:
310
+ return ("skill", "not-applicable", "skill-attachment", "skill-only")
311
+ if "required attached skills" in lower_text or "mixed soul+skill" in lower_text:
312
+ return ("agent", "mixed-soul-skill", "subagent-preferred", "needs-adaptation")
313
+ return ("agent", "pure-soul", "primary-capable", "needs-adaptation")
314
+
315
+
316
+ def fetch_json(url: str) -> dict:
317
+ with urllib.request.urlopen(url, timeout=30) as response:
318
+ charset = response.headers.get_content_charset("utf-8")
319
+ payload = response.read().decode(charset)
320
+ data = json.loads(payload)
321
+ if not isinstance(data, dict):
322
+ raise RuntimeError(f"expected JSON object from {url}")
323
+ return data
324
+
325
+
326
+ def build_model_catalog(source: dict, payload: dict) -> dict:
327
+ providers: list[dict] = []
328
+ model_entries: list[dict] = []
329
+ model_ids: list[str] = []
330
+
331
+ for provider_key, provider_data in sorted(payload.items()):
332
+ if not isinstance(provider_data, dict):
333
+ continue
334
+ provider_id = provider_data.get("id")
335
+ if not isinstance(provider_id, str) or not provider_id.strip():
336
+ provider_id = str(provider_key)
337
+ provider_name = (
338
+ provider_data.get("name")
339
+ if isinstance(provider_data.get("name"), str)
340
+ else provider_id
341
+ )
342
+ models = provider_data.get("models")
343
+ if not isinstance(models, dict):
344
+ continue
345
+
346
+ provider_model_count = 0
347
+ for model_key, model_data in sorted(models.items()):
348
+ if not isinstance(model_data, dict):
349
+ continue
350
+ model_id = model_data.get("id")
351
+ if not isinstance(model_id, str) or not model_id.strip():
352
+ model_id = str(model_key)
353
+ normalized_id = f"{provider_id}/{model_id}"
354
+ model_name = (
355
+ model_data.get("name")
356
+ if isinstance(model_data.get("name"), str)
357
+ else model_id
358
+ )
359
+ model_entries.append(
360
+ {
361
+ "provider": provider_id,
362
+ "provider_name": provider_name,
363
+ "model": model_id,
364
+ "model_name": model_name,
365
+ "id": normalized_id,
366
+ }
367
+ )
368
+ model_ids.append(normalized_id)
369
+ provider_model_count += 1
370
+
371
+ providers.append(
372
+ {
373
+ "id": provider_id,
374
+ "name": provider_name,
375
+ "model_count": provider_model_count,
376
+ }
377
+ )
378
+
379
+ unique_model_ids = sorted(set(model_ids))
380
+ return {
381
+ "schema_version": "1.0",
382
+ "generated_at": now_iso(),
383
+ "source": {
384
+ "source_id": source["source_id"],
385
+ "url": source["url"],
386
+ "format": source.get("format", "models.dev"),
387
+ },
388
+ "provider_count": len(
389
+ [provider for provider in providers if provider["model_count"] > 0]
390
+ ),
391
+ "model_count": len(unique_model_ids),
392
+ "providers": [
393
+ provider for provider in providers if provider["model_count"] > 0
394
+ ],
395
+ "models": model_entries,
396
+ }
397
+
398
+
399
+ def sync_model_catalog(source: dict) -> tuple[int, int]:
400
+ source_format = source.get("format", "models.dev")
401
+ if source_format != "models.dev":
402
+ raise RuntimeError(f"unsupported model catalog format: {source_format}")
403
+
404
+ payload = fetch_json(source["url"])
405
+ catalog = build_model_catalog(source, payload)
406
+ model_ids = [entry["id"] for entry in catalog["models"]]
407
+
408
+ MODELS_ROOT.mkdir(parents=True, exist_ok=True)
409
+ write_json(MODEL_CATALOG_PATH, catalog)
410
+ MODEL_IDS_PATH.write_text("\n".join(model_ids) + "\n", encoding="utf-8")
411
+ return catalog["provider_count"], catalog["model_count"]
412
+
413
+
414
+ def scan_source(source: dict, repo_dir: Path, commit: str) -> tuple[int, int]:
415
+ include = source.get("include", ["**/*"])
416
+ exclude = source.get("exclude", [])
417
+ repo_slug = source["repo"]
418
+ source_id = source["source_id"]
419
+ discovered = 0
420
+ updated = 0
421
+
422
+ for path in sorted(repo_dir.rglob("*")):
423
+ if not path.is_file():
424
+ continue
425
+ rel_path = path.relative_to(repo_dir).as_posix()
426
+ if not match_any(rel_path, include):
427
+ continue
428
+ if exclude and match_any(rel_path, exclude):
429
+ continue
430
+
431
+ text = read_text(path)
432
+ if not should_keep_candidate(rel_path, text):
433
+ continue
434
+
435
+ discovered += 1
436
+ candidate_slug = slugify(
437
+ f"{source_id}-{Path(rel_path).with_suffix('').as_posix().replace('/', '-')}"
438
+ )
439
+ worker_path = WORKERS_ROOT / f"{candidate_slug}.json"
440
+ existing = read_json(worker_path) if worker_path.exists() else {}
441
+ name = infer_name(path, text)
442
+ summary = infer_summary(text, f"Imported from {repo_slug}:{rel_path}")
443
+ asset_kind, agent_class, deployment_role, compatibility = classify_asset(
444
+ rel_path, text
445
+ )
446
+
447
+ card = {
448
+ "schema_version": "1.1",
449
+ "candidate_slug": candidate_slug,
450
+ "worker_id": f"{source_id}:{rel_path}",
451
+ "name": name,
452
+ "summary": summary,
453
+ "source_id": f"github:{repo_slug}",
454
+ "source_path": rel_path,
455
+ "source_commit": commit,
456
+ "inventory_status": existing.get("inventory_status", "draft"),
457
+ "asset_kind": asset_kind,
458
+ "agent_class": agent_class,
459
+ "deployment_role": deployment_role,
460
+ "host_requirement": existing.get(
461
+ "host_requirement",
462
+ "requires-host-agent" if asset_kind == "skill" else "none",
463
+ ),
464
+ "self_contained": existing.get("self_contained", asset_kind != "skill"),
465
+ "compatibility": existing.get("compatibility", compatibility),
466
+ "risk_tier": existing.get("risk_tier", "unknown"),
467
+ "testing_readiness": existing.get("testing_readiness", "unknown"),
468
+ "description_clarity": existing.get(
469
+ "description_clarity", "needs-clarification"
470
+ ),
471
+ "recommended_hosts": existing.get("recommended_hosts", []),
472
+ "flags": existing.get("flags", []),
473
+ "artifacts": {
474
+ "cached_repo": str(repo_dir.relative_to(HR_HOME)),
475
+ "source_file": rel_path,
476
+ "review_notes": existing.get("artifacts", {}).get("review_notes"),
477
+ },
478
+ }
479
+ write_json(worker_path, card)
480
+ updated += 1
481
+
482
+ return discovered, updated
483
+
484
+
485
+ def main() -> int:
486
+ config = load_config()
487
+ sources = github_sources(config)
488
+ model_sources = model_catalog_sources(config)
489
+ WORKERS_ROOT.mkdir(parents=True, exist_ok=True)
490
+ MODELS_ROOT.mkdir(parents=True, exist_ok=True)
491
+
492
+ status = {
493
+ "schema_version": "1.1",
494
+ "generated_at": now_iso(),
495
+ "sources": {},
496
+ "model_catalogs": {},
497
+ }
498
+ if STATUS_PATH.exists():
499
+ try:
500
+ status = read_json(STATUS_PATH)
501
+ status.setdefault("schema_version", "1.1")
502
+ status.setdefault("sources", {})
503
+ status.setdefault("model_catalogs", {})
504
+ except json.JSONDecodeError:
505
+ pass
506
+ status["generated_at"] = now_iso()
507
+
508
+ summary = []
509
+ for source in sources:
510
+ repo_dir, commit = ensure_repo(source)
511
+ discovered, updated = scan_source(source, repo_dir, commit)
512
+ previous = status["sources"].get(source["source_id"], {})
513
+ status["sources"][source["source_id"]] = {
514
+ "repo": source["repo"],
515
+ "branch": source.get("branch", "main"),
516
+ "cached_path": str(repo_dir.relative_to(HR_HOME)),
517
+ "last_checked": now_iso(),
518
+ "commit": commit,
519
+ "discovered_count": discovered,
520
+ "updated_cards": updated,
521
+ "previous_commit": previous.get("commit"),
522
+ }
523
+ summary.append(
524
+ f"- {source['source_id']}: {source['repo']} @ {commit[:12]} ({updated} cards)"
525
+ )
526
+
527
+ for model_source in model_sources:
528
+ provider_count, model_count = sync_model_catalog(model_source)
529
+ previous = status["model_catalogs"].get(model_source["source_id"], {})
530
+ status["model_catalogs"][model_source["source_id"]] = {
531
+ "url": model_source["url"],
532
+ "format": model_source.get("format", "models.dev"),
533
+ "last_checked": now_iso(),
534
+ "provider_count": provider_count,
535
+ "model_count": model_count,
536
+ "catalog_path": str(MODEL_CATALOG_PATH.relative_to(HR_HOME)),
537
+ "model_ids_path": str(MODEL_IDS_PATH.relative_to(HR_HOME)),
538
+ "previous_model_count": previous.get("model_count"),
539
+ }
540
+ summary.append(
541
+ f"- {model_source['source_id']}: {model_source['url']} ({model_count} models across {provider_count} providers)"
542
+ )
543
+
544
+ write_json(STATUS_PATH, status)
545
+ summary_path = INVENTORY_ROOT / "SUMMARY.md"
546
+ if summary:
547
+ summary_text = "# Inventory Sync Summary\n\n" + "\n".join(summary) + "\n"
548
+ else:
549
+ summary_text = "# Inventory Sync Summary\n\n- No GitHub sources configured in hr-config.json\n"
550
+ summary_path.write_text(summary_text, encoding="utf-8")
551
+ print(summary_text)
552
+ return 0
553
+
554
+
555
+ if __name__ == "__main__":
556
+ try:
557
+ raise SystemExit(main())
558
+ except Exception as exc:
559
+ print(f"sync failed: {exc}", file=sys.stderr)
560
+ raise SystemExit(1)