arkaos 2.2.2 → 2.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/VERSION +1 -1
- package/arka/skills/conclave/SKILL.md +194 -0
- package/arka/skills/human-writing/SKILL.md +143 -0
- package/config/agent-memory-template.md +28 -0
- package/config/disc-profiles.json +108 -0
- package/config/disc-team-validator.sh +94 -0
- package/config/gotchas-fixes.json +148 -0
- package/config/profile-template.json +12 -0
- package/config/providers-registry.json +56 -0
- package/config/settings-template.json +42 -0
- package/config/standards/communication.md +64 -0
- package/config/standards/orchestration.md +91 -0
- package/config/statusline-v2.sh +101 -0
- package/config/statusline.sh +139 -0
- package/config/system-prompt.sh +190 -0
- package/dashboard/LICENSE +21 -0
- package/dashboard/README.md +64 -0
- package/dashboard/app/app.config.ts +8 -0
- package/dashboard/app/app.vue +42 -0
- package/dashboard/app/assets/css/main.css +18 -0
- package/dashboard/app/composables/useApi.ts +8 -0
- package/dashboard/app/composables/useDashboard.ts +19 -0
- package/dashboard/app/error.vue +24 -0
- package/dashboard/app/layouts/default.vue +114 -0
- package/dashboard/app/pages/agents/[id].vue +506 -0
- package/dashboard/app/pages/agents/index.vue +225 -0
- package/dashboard/app/pages/budget.vue +132 -0
- package/dashboard/app/pages/commands.vue +180 -0
- package/dashboard/app/pages/health.vue +98 -0
- package/dashboard/app/pages/index.vue +126 -0
- package/dashboard/app/pages/knowledge.vue +729 -0
- package/dashboard/app/pages/personas.vue +597 -0
- package/dashboard/app/pages/settings.vue +146 -0
- package/dashboard/app/pages/tasks.vue +203 -0
- package/dashboard/app/types/index.d.ts +181 -0
- package/dashboard/app/utils/index.ts +7 -0
- package/dashboard/nuxt.config.ts +39 -0
- package/dashboard/package.json +37 -0
- package/dashboard/pnpm-workspace.yaml +7 -0
- package/dashboard/tsconfig.json +10 -0
- package/installer/cli.js +0 -0
- package/installer/index.js +262 -62
- package/knowledge/INDEX.md +34 -0
- package/knowledge/agents-registry.json +254 -0
- package/knowledge/channels-config.json +6 -0
- package/knowledge/commands-keywords.json +466 -0
- package/knowledge/commands-registry.json +2791 -0
- package/knowledge/commands-registry.json.bak +2791 -0
- package/knowledge/ecosystems.json +7 -0
- package/knowledge/obsidian-config.json +112 -0
- package/package.json +10 -6
- package/pyproject.toml +1 -1
- package/scripts/check-version.js +13 -0
- package/scripts/dashboard-api.py +636 -0
- package/scripts/knowledge-index.py +113 -0
- package/scripts/skill_validator.py +217 -0
- package/scripts/start-dashboard.sh +54 -0
- package/scripts/synapse-bridge.py +199 -0
- package/scripts/tools/brand_voice_analyzer.py +192 -0
- package/scripts/tools/dcf_calculator.py +168 -0
- package/scripts/tools/headline_scorer.py +215 -0
- package/scripts/tools/okr_cascade.py +207 -0
- package/scripts/tools/rice_prioritizer.py +230 -0
- package/scripts/tools/saas_metrics.py +234 -0
- package/scripts/tools/seo_checker.py +197 -0
- package/scripts/tools/tech_debt_analyzer.py +206 -0
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Knowledge Indexer — index markdown files into vector store.
|
|
3
|
+
|
|
4
|
+
Usage:
|
|
5
|
+
python scripts/knowledge-index.py --vault ~/Documents/Personal
|
|
6
|
+
python scripts/knowledge-index.py --dir departments/ --db /tmp/test.db
|
|
7
|
+
python scripts/knowledge-index.py --stats
|
|
8
|
+
python scripts/knowledge-index.py --search "security vulnerability"
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import sys
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
ARKAOS_ROOT = Path(os.environ.get("ARKAOS_ROOT", Path(__file__).resolve().parent.parent))
|
|
18
|
+
sys.path.insert(0, str(ARKAOS_ROOT))
|
|
19
|
+
|
|
20
|
+
DEFAULT_DB = Path.home() / ".arkaos" / "knowledge.db"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def main() -> int:
|
|
24
|
+
parser = argparse.ArgumentParser(description="ArkaOS Knowledge Indexer")
|
|
25
|
+
parser.add_argument("--vault", type=str, help="Obsidian vault path to index")
|
|
26
|
+
parser.add_argument("--dir", type=str, help="Directory to index")
|
|
27
|
+
parser.add_argument("--db", type=str, default=str(DEFAULT_DB), help="Vector DB path")
|
|
28
|
+
parser.add_argument("--search", type=str, help="Search query")
|
|
29
|
+
parser.add_argument("--stats", action="store_true", help="Show DB statistics")
|
|
30
|
+
parser.add_argument("--clear", action="store_true", help="Clear all indexed data")
|
|
31
|
+
parser.add_argument("--json", action="store_true", dest="json_output", help="JSON output")
|
|
32
|
+
args = parser.parse_args()
|
|
33
|
+
|
|
34
|
+
from core.knowledge.vector_store import VectorStore
|
|
35
|
+
|
|
36
|
+
db_path = Path(args.db)
|
|
37
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
38
|
+
store = VectorStore(db_path)
|
|
39
|
+
|
|
40
|
+
if args.clear:
|
|
41
|
+
store.clear()
|
|
42
|
+
print("Knowledge base cleared." if not args.json_output else json.dumps({"cleared": True}))
|
|
43
|
+
return 0
|
|
44
|
+
|
|
45
|
+
if args.stats:
|
|
46
|
+
stats = store.get_stats()
|
|
47
|
+
if args.json_output:
|
|
48
|
+
print(json.dumps(stats, indent=2))
|
|
49
|
+
else:
|
|
50
|
+
print(f"Chunks: {stats['total_chunks']}")
|
|
51
|
+
print(f"Files: {stats['total_files']}")
|
|
52
|
+
print(f"VSS: {'enabled' if stats['vss_available'] else 'disabled (keyword fallback)'}")
|
|
53
|
+
print(f"DB: {stats['db_path']}")
|
|
54
|
+
return 0
|
|
55
|
+
|
|
56
|
+
if args.search:
|
|
57
|
+
results = store.search(args.search, top_k=5)
|
|
58
|
+
if args.json_output:
|
|
59
|
+
print(json.dumps(results, indent=2))
|
|
60
|
+
else:
|
|
61
|
+
if not results:
|
|
62
|
+
print("No results found.")
|
|
63
|
+
for i, r in enumerate(results, 1):
|
|
64
|
+
src = Path(r.get("source", "")).name if r.get("source") else "unknown"
|
|
65
|
+
print(f"\n[{i}] Score: {r['score']:.3f} | {src}")
|
|
66
|
+
if r.get("heading"):
|
|
67
|
+
print(f" Heading: {r['heading']}")
|
|
68
|
+
print(f" {r['text'][:200]}...")
|
|
69
|
+
return 0
|
|
70
|
+
|
|
71
|
+
# Index mode
|
|
72
|
+
directory = args.vault or args.dir
|
|
73
|
+
if not directory:
|
|
74
|
+
# Auto-detect vault from config
|
|
75
|
+
config_path = ARKAOS_ROOT / "knowledge" / "obsidian-config.json"
|
|
76
|
+
if config_path.exists():
|
|
77
|
+
config = json.loads(config_path.read_text())
|
|
78
|
+
vault = config.get("vault_path", "")
|
|
79
|
+
if vault and Path(vault).exists():
|
|
80
|
+
directory = vault
|
|
81
|
+
|
|
82
|
+
if not directory:
|
|
83
|
+
print("No directory specified. Use --vault or --dir.", file=sys.stderr)
|
|
84
|
+
return 2
|
|
85
|
+
|
|
86
|
+
if not Path(directory).exists():
|
|
87
|
+
print(f"Directory not found: {directory}", file=sys.stderr)
|
|
88
|
+
return 2
|
|
89
|
+
|
|
90
|
+
from core.knowledge.indexer import index_directory
|
|
91
|
+
|
|
92
|
+
def progress(current, total, name):
|
|
93
|
+
if not args.json_output:
|
|
94
|
+
print(f"\r [{current}/{total}] {name[:50]}...", end="", flush=True)
|
|
95
|
+
|
|
96
|
+
print(f"Indexing: {directory}" if not args.json_output else "", file=sys.stderr)
|
|
97
|
+
result = index_directory(directory, store, on_progress=progress)
|
|
98
|
+
|
|
99
|
+
if not args.json_output:
|
|
100
|
+
print() # newline after progress
|
|
101
|
+
print(f"\nFiles scanned: {result['files_scanned']}")
|
|
102
|
+
print(f"Files indexed: {result['files_indexed']}")
|
|
103
|
+
print(f"Files skipped: {result['files_skipped']}")
|
|
104
|
+
print(f"Chunks created: {result['chunks_created']}")
|
|
105
|
+
else:
|
|
106
|
+
print(json.dumps(result, indent=2))
|
|
107
|
+
|
|
108
|
+
store.close()
|
|
109
|
+
return 0
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
if __name__ == "__main__":
|
|
113
|
+
sys.exit(main())
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""ArkaOS v2 Skill Validator — validates SKILL.md files against project standards.
|
|
3
|
+
|
|
4
|
+
Exit codes: 0 = all passed, 1 = warnings only, 2 = failures found.
|
|
5
|
+
"""
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import argparse
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import re
|
|
12
|
+
import sys
|
|
13
|
+
from dataclasses import dataclass, field
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
WEIGHTS = {
|
|
17
|
+
"skill_md_exists": 20, "frontmatter_present": 15, "required_fields": 15,
|
|
18
|
+
"name_format": 10, "allowed_tools_list": 5, "has_h1": 10, "has_h2": 5,
|
|
19
|
+
"line_count": 5, "agent_attribution": 10, "output_section": 5,
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class SkillResult:
|
|
25
|
+
"""Validation result for a single skill."""
|
|
26
|
+
name: str
|
|
27
|
+
score: int = 100
|
|
28
|
+
level: str = "EXCELLENT"
|
|
29
|
+
issues: list[str] = field(default_factory=list)
|
|
30
|
+
|
|
31
|
+
def deduct(self, points: int, reason: str) -> None:
|
|
32
|
+
self.score = max(0, self.score - points)
|
|
33
|
+
self.issues.append(reason)
|
|
34
|
+
|
|
35
|
+
def finalize(self) -> None:
|
|
36
|
+
if self.score >= 90:
|
|
37
|
+
self.level = "EXCELLENT"
|
|
38
|
+
elif self.score >= 70:
|
|
39
|
+
self.level = "GOOD"
|
|
40
|
+
elif self.score >= 50:
|
|
41
|
+
self.level = "WARN"
|
|
42
|
+
else:
|
|
43
|
+
self.level = "FAIL"
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def passed(self) -> bool:
|
|
47
|
+
return self.level in ("EXCELLENT", "GOOD")
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def is_warning(self) -> bool:
|
|
51
|
+
return self.level == "WARN"
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def is_failure(self) -> bool:
|
|
55
|
+
return self.level == "FAIL"
|
|
56
|
+
|
|
57
|
+
def to_dict(self) -> dict:
|
|
58
|
+
return {"name": self.name, "score": self.score, "level": self.level, "issues": self.issues}
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def parse_frontmatter(content: str) -> dict[str, str | list[str]] | None:
|
|
62
|
+
"""Extract YAML frontmatter between --- markers (no PyYAML needed)."""
|
|
63
|
+
match = re.match(r"^---\s*\n(.*?)\n---", content, re.DOTALL)
|
|
64
|
+
if not match:
|
|
65
|
+
return None
|
|
66
|
+
data: dict[str, str | list[str]] = {}
|
|
67
|
+
for line in match.group(1).split("\n"):
|
|
68
|
+
key_match = re.match(r"^(\w[\w-]*)\s*:\s*(.*)", line)
|
|
69
|
+
if key_match:
|
|
70
|
+
key, value = key_match.group(1), key_match.group(2).strip()
|
|
71
|
+
list_match = re.match(r"^\[(.+)]$", value)
|
|
72
|
+
if list_match:
|
|
73
|
+
data[key] = [v.strip() for v in list_match.group(1).split(",")]
|
|
74
|
+
elif value in (">", "|"):
|
|
75
|
+
data[key] = ""
|
|
76
|
+
else:
|
|
77
|
+
data[key] = value
|
|
78
|
+
elif line.startswith(" ") and data:
|
|
79
|
+
last_key = list(data.keys())[-1]
|
|
80
|
+
if isinstance(data[last_key], str):
|
|
81
|
+
data[last_key] = (data[last_key] + " " + line.strip()).strip()
|
|
82
|
+
return data
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def validate_skill(skill_dir: Path) -> SkillResult:
|
|
86
|
+
"""Validate a single skill directory against ArkaOS v2 standards."""
|
|
87
|
+
parts = skill_dir.resolve().parts
|
|
88
|
+
try:
|
|
89
|
+
dept = parts[parts.index("departments") + 1]
|
|
90
|
+
except (ValueError, IndexError):
|
|
91
|
+
dept = "unknown"
|
|
92
|
+
result = SkillResult(name=f"{dept}/{skill_dir.name}")
|
|
93
|
+
skill_md = skill_dir / "SKILL.md"
|
|
94
|
+
|
|
95
|
+
if not skill_md.is_file():
|
|
96
|
+
result.deduct(WEIGHTS["skill_md_exists"], "missing SKILL.md")
|
|
97
|
+
result.finalize()
|
|
98
|
+
return result
|
|
99
|
+
|
|
100
|
+
content = skill_md.read_text(encoding="utf-8")
|
|
101
|
+
line_count = content.count("\n") + 1
|
|
102
|
+
|
|
103
|
+
# Frontmatter checks
|
|
104
|
+
fm = parse_frontmatter(content)
|
|
105
|
+
if fm is None:
|
|
106
|
+
for key in ("frontmatter_present", "required_fields", "name_format", "allowed_tools_list"):
|
|
107
|
+
result.deduct(WEIGHTS[key], f"no {key.replace('_', ' ')} (no frontmatter)")
|
|
108
|
+
else:
|
|
109
|
+
missing = [f for f in ("name", "description", "allowed-tools") if f not in fm]
|
|
110
|
+
if missing:
|
|
111
|
+
result.deduct(WEIGHTS["required_fields"], f"missing fields: {', '.join(missing)}")
|
|
112
|
+
name_val = fm.get("name", "")
|
|
113
|
+
if isinstance(name_val, str) and not re.match(r"^[a-z][\w-]*/[\w-]+$", name_val):
|
|
114
|
+
result.deduct(WEIGHTS["name_format"], f"name '{name_val}' does not match dept/slug format")
|
|
115
|
+
tools = fm.get("allowed-tools")
|
|
116
|
+
if tools is not None and not isinstance(tools, list):
|
|
117
|
+
result.deduct(WEIGHTS["allowed_tools_list"], "allowed-tools is not a list")
|
|
118
|
+
|
|
119
|
+
# Content checks (strip frontmatter)
|
|
120
|
+
body = re.sub(r"^---.*?---\s*", "", content, count=1, flags=re.DOTALL)
|
|
121
|
+
if not re.search(r"^# ", body, re.MULTILINE):
|
|
122
|
+
result.deduct(WEIGHTS["has_h1"], "no H1 heading found")
|
|
123
|
+
if not re.search(r"^## ", body, re.MULTILINE):
|
|
124
|
+
result.deduct(WEIGHTS["has_h2"], "no H2 heading found")
|
|
125
|
+
|
|
126
|
+
# Line count: error if outside 30-200, soft warn if outside 60-120
|
|
127
|
+
if line_count < 30 or line_count > 200:
|
|
128
|
+
result.deduct(WEIGHTS["line_count"], f"line count {line_count} outside 30-200 range")
|
|
129
|
+
elif line_count < 60 or line_count > 120:
|
|
130
|
+
result.deduct(WEIGHTS["line_count"] // 2, f"line count {line_count} outside ideal 60-120 range")
|
|
131
|
+
|
|
132
|
+
if not re.search(r"^>\s*\*\*Agent:", body, re.MULTILINE):
|
|
133
|
+
result.deduct(WEIGHTS["agent_attribution"], "missing agent attribution (> **Agent:** line)")
|
|
134
|
+
if not re.search(r"^##\s+Output", body, re.MULTILINE):
|
|
135
|
+
result.deduct(WEIGHTS["output_section"], "missing ## Output section")
|
|
136
|
+
|
|
137
|
+
result.finalize()
|
|
138
|
+
return result
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def discover_skills(root: Path) -> list[Path]:
|
|
142
|
+
"""Recursively find all directories containing SKILL.md."""
|
|
143
|
+
skills = [Path(dp) for dp, _, fns in os.walk(root) if "SKILL.md" in fns]
|
|
144
|
+
skills.sort(key=lambda p: str(p))
|
|
145
|
+
return skills
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _counts(results: list[SkillResult]) -> tuple[int, int, int]:
|
|
149
|
+
passed = sum(1 for r in results if r.passed)
|
|
150
|
+
warnings = sum(1 for r in results if r.is_warning)
|
|
151
|
+
failures = sum(1 for r in results if r.is_failure)
|
|
152
|
+
return passed, warnings, failures
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def print_text(results: list[SkillResult], summary_only: bool = False) -> None:
|
|
156
|
+
"""Print human-readable validation output."""
|
|
157
|
+
if not summary_only:
|
|
158
|
+
for r in results:
|
|
159
|
+
icon = "\u2713" if r.passed else ("\u26A0" if r.is_warning else "\u2717")
|
|
160
|
+
suffix = f" ({', '.join(r.issues)})" if r.issues else ""
|
|
161
|
+
print(f"{icon} {r.name} \u2014 {r.score}/100 {r.level}{suffix}")
|
|
162
|
+
print()
|
|
163
|
+
passed, warnings, failures = _counts(results)
|
|
164
|
+
print(f"Summary: {len(results)} skills validated, {passed} passed, {warnings} warnings, {failures} failures")
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def print_json(results: list[SkillResult]) -> None:
|
|
168
|
+
"""Print JSON validation output."""
|
|
169
|
+
passed, warnings, failures = _counts(results)
|
|
170
|
+
output = {
|
|
171
|
+
"total": len(results), "passed": passed, "warnings": warnings,
|
|
172
|
+
"failures": failures, "skills": [r.to_dict() for r in results],
|
|
173
|
+
}
|
|
174
|
+
print(json.dumps(output, indent=2))
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def main() -> int:
|
|
178
|
+
"""Entry point. Returns exit code."""
|
|
179
|
+
parser = argparse.ArgumentParser(
|
|
180
|
+
description="ArkaOS v2 Skill Validator — validate SKILL.md files against project standards.",
|
|
181
|
+
epilog="Exit codes: 0 = all passed, 1 = warnings only, 2 = failures found.",
|
|
182
|
+
)
|
|
183
|
+
parser.add_argument("path", type=Path, help="Skill directory or parent to scan recursively.")
|
|
184
|
+
parser.add_argument("--json", action="store_true", dest="json_output", help="Output as JSON.")
|
|
185
|
+
parser.add_argument("--summary", action="store_true", help="Print only summary totals.")
|
|
186
|
+
args = parser.parse_args()
|
|
187
|
+
target: Path = args.path.resolve()
|
|
188
|
+
|
|
189
|
+
if not target.exists():
|
|
190
|
+
print(f"Error: path does not exist: {target}", file=sys.stderr)
|
|
191
|
+
return 2
|
|
192
|
+
|
|
193
|
+
if (target / "SKILL.md").is_file():
|
|
194
|
+
skill_dirs = [target]
|
|
195
|
+
elif target.is_dir():
|
|
196
|
+
skill_dirs = discover_skills(target)
|
|
197
|
+
else:
|
|
198
|
+
print(f"Error: {target} is not a directory", file=sys.stderr)
|
|
199
|
+
return 2
|
|
200
|
+
|
|
201
|
+
if not skill_dirs:
|
|
202
|
+
print("No SKILL.md files found.", file=sys.stderr)
|
|
203
|
+
return 2
|
|
204
|
+
|
|
205
|
+
results = [validate_skill(d) for d in skill_dirs]
|
|
206
|
+
if args.json_output:
|
|
207
|
+
print_json(results)
|
|
208
|
+
else:
|
|
209
|
+
print_text(results, summary_only=args.summary)
|
|
210
|
+
|
|
211
|
+
if any(r.is_failure for r in results):
|
|
212
|
+
return 2
|
|
213
|
+
return 0
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
if __name__ == "__main__":
|
|
217
|
+
sys.exit(main())
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ArkaOS Dashboard — Start FastAPI + Nuxt servers
|
|
3
|
+
set -euo pipefail
|
|
4
|
+
|
|
5
|
+
ARKAOS_ROOT="${ARKAOS_ROOT:-$(cd "$(dirname "$0")/.." && pwd)}"
|
|
6
|
+
DASHBOARD_DIR="${ARKAOS_ROOT}/dashboard"
|
|
7
|
+
PID_FILE="$HOME/.arkaos/dashboard.pid"
|
|
8
|
+
API_PORT="${ARKAOS_DASHBOARD_API_PORT:-3334}"
|
|
9
|
+
UI_PORT="${ARKAOS_DASHBOARD_UI_PORT:-3333}"
|
|
10
|
+
|
|
11
|
+
mkdir -p "$HOME/.arkaos"
|
|
12
|
+
|
|
13
|
+
# Check if already running
|
|
14
|
+
if [ -f "$PID_FILE" ]; then
|
|
15
|
+
api_pid=$(head -1 "$PID_FILE" 2>/dev/null || echo "")
|
|
16
|
+
ui_pid=$(tail -1 "$PID_FILE" 2>/dev/null || echo "")
|
|
17
|
+
if [ -n "$api_pid" ] && kill -0 "$api_pid" 2>/dev/null; then
|
|
18
|
+
echo "Dashboard already running (API PID: $api_pid, UI PID: $ui_pid)"
|
|
19
|
+
echo " API: http://localhost:${API_PORT}"
|
|
20
|
+
echo " UI: http://localhost:${UI_PORT}"
|
|
21
|
+
exit 0
|
|
22
|
+
fi
|
|
23
|
+
rm -f "$PID_FILE"
|
|
24
|
+
fi
|
|
25
|
+
|
|
26
|
+
# Start FastAPI backend
|
|
27
|
+
echo "Starting API server on :${API_PORT}..."
|
|
28
|
+
ARKAOS_ROOT="$ARKAOS_ROOT" python3 "${ARKAOS_ROOT}/scripts/dashboard-api.py" --port "$API_PORT" &
|
|
29
|
+
API_PID=$!
|
|
30
|
+
|
|
31
|
+
# Check if Nuxt is built
|
|
32
|
+
if [ -d "${DASHBOARD_DIR}/.output" ]; then
|
|
33
|
+
echo "Starting dashboard on :${UI_PORT}..."
|
|
34
|
+
PORT="$UI_PORT" node "${DASHBOARD_DIR}/.output/server/index.mjs" &
|
|
35
|
+
UI_PID=$!
|
|
36
|
+
elif [ -d "${DASHBOARD_DIR}/node_modules" ]; then
|
|
37
|
+
echo "Starting dashboard (dev mode) on :${UI_PORT}..."
|
|
38
|
+
cd "$DASHBOARD_DIR" && npx nuxt dev --port "$UI_PORT" &
|
|
39
|
+
UI_PID=$!
|
|
40
|
+
else
|
|
41
|
+
echo "Dashboard not built. Run: cd dashboard && npm install && npm run build"
|
|
42
|
+
UI_PID=""
|
|
43
|
+
fi
|
|
44
|
+
|
|
45
|
+
# Save PIDs
|
|
46
|
+
echo "$API_PID" > "$PID_FILE"
|
|
47
|
+
[ -n "${UI_PID:-}" ] && echo "$UI_PID" >> "$PID_FILE"
|
|
48
|
+
|
|
49
|
+
echo ""
|
|
50
|
+
echo "ArkaOS Dashboard running:"
|
|
51
|
+
echo " API: http://localhost:${API_PORT}/api/overview"
|
|
52
|
+
[ -n "${UI_PID:-}" ] && echo " UI: http://localhost:${UI_PORT}"
|
|
53
|
+
echo ""
|
|
54
|
+
echo "Stop with: kill \$(cat $PID_FILE)"
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Synapse Bridge — standalone script for hook integration.
|
|
3
|
+
|
|
4
|
+
Reads JSON from stdin, runs the 8-layer Synapse engine, outputs JSON to stdout.
|
|
5
|
+
Loads constitution, agent registry, and command registry automatically.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
echo '{"user_input":"validate my saas idea"}' | python3 scripts/synapse-bridge.py
|
|
9
|
+
echo '{}' | python3 scripts/synapse-bridge.py --layers-only
|
|
10
|
+
|
|
11
|
+
Exit codes: 0 = success, 1 = degraded (partial layers), 2 = error
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import argparse
|
|
15
|
+
import json
|
|
16
|
+
import os
|
|
17
|
+
import sys
|
|
18
|
+
import time
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
|
|
21
|
+
# Resolve ArkaOS root from environment or script location
|
|
22
|
+
ARKAOS_ROOT = Path(os.environ.get("ARKAOS_ROOT", Path(__file__).resolve().parent.parent))
|
|
23
|
+
sys.path.insert(0, str(ARKAOS_ROOT))
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def load_constitution(root: Path) -> str:
|
|
27
|
+
"""Load and compress constitution rules."""
|
|
28
|
+
config_path = root / "config" / "constitution.yaml"
|
|
29
|
+
if not config_path.exists():
|
|
30
|
+
return ""
|
|
31
|
+
try:
|
|
32
|
+
from core.governance.constitution import load_constitution as _load
|
|
33
|
+
const = _load(str(config_path))
|
|
34
|
+
parts = []
|
|
35
|
+
for level_name in ("non_negotiable", "quality_gate", "must", "should"):
|
|
36
|
+
level = getattr(const, level_name, None)
|
|
37
|
+
if level and hasattr(level, "rules"):
|
|
38
|
+
ids = [r.id for r in level.rules]
|
|
39
|
+
if ids:
|
|
40
|
+
parts.append(f"{level_name.upper()}: {', '.join(ids)}")
|
|
41
|
+
return " | ".join(parts) if parts else ""
|
|
42
|
+
except Exception:
|
|
43
|
+
return ""
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def load_agents_registry(root: Path) -> dict:
|
|
47
|
+
"""Load agents registry JSON."""
|
|
48
|
+
path = root / "knowledge" / "agents-registry-v2.json"
|
|
49
|
+
if not path.exists():
|
|
50
|
+
return {}
|
|
51
|
+
try:
|
|
52
|
+
data = json.loads(path.read_text())
|
|
53
|
+
agents = {}
|
|
54
|
+
for agent in data.get("agents", []):
|
|
55
|
+
agent_id = agent.get("id", "")
|
|
56
|
+
if agent_id:
|
|
57
|
+
agents[agent_id] = agent
|
|
58
|
+
return agents
|
|
59
|
+
except Exception:
|
|
60
|
+
return {}
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def load_commands_registry(root: Path) -> list:
|
|
64
|
+
"""Load commands registry JSON."""
|
|
65
|
+
path = root / "knowledge" / "commands-registry-v2.json"
|
|
66
|
+
if not path.exists():
|
|
67
|
+
return []
|
|
68
|
+
try:
|
|
69
|
+
data = json.loads(path.read_text())
|
|
70
|
+
return data.get("commands", [])
|
|
71
|
+
except Exception:
|
|
72
|
+
return []
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def main() -> int:
|
|
76
|
+
parser = argparse.ArgumentParser(description="Synapse Bridge — context injection for hooks")
|
|
77
|
+
parser.add_argument("--layers-only", action="store_true", help="Output layer breakdown instead of context string")
|
|
78
|
+
parser.add_argument("--root", type=str, default=str(ARKAOS_ROOT), help="ArkaOS root directory")
|
|
79
|
+
args = parser.parse_args()
|
|
80
|
+
|
|
81
|
+
root = Path(args.root)
|
|
82
|
+
start = time.time()
|
|
83
|
+
|
|
84
|
+
# Read input from stdin
|
|
85
|
+
try:
|
|
86
|
+
raw = sys.stdin.read().strip()
|
|
87
|
+
input_data = json.loads(raw) if raw else {}
|
|
88
|
+
except (json.JSONDecodeError, Exception):
|
|
89
|
+
input_data = {}
|
|
90
|
+
|
|
91
|
+
# Extract context fields
|
|
92
|
+
user_input = input_data.get("user_input", "")
|
|
93
|
+
if not user_input:
|
|
94
|
+
# Claude Code hook format: the full input is the user message
|
|
95
|
+
user_input = raw if raw and not raw.startswith("{") else ""
|
|
96
|
+
|
|
97
|
+
# Load registries
|
|
98
|
+
constitution = load_constitution(root)
|
|
99
|
+
agents = load_agents_registry(root)
|
|
100
|
+
commands = load_commands_registry(root)
|
|
101
|
+
|
|
102
|
+
# Load vector store (optional — graceful if unavailable)
|
|
103
|
+
vector_store = None
|
|
104
|
+
kb_db = Path(os.environ.get("ARKAOS_KNOWLEDGE_DB", Path.home() / ".arkaos" / "knowledge.db"))
|
|
105
|
+
if kb_db.exists():
|
|
106
|
+
try:
|
|
107
|
+
from core.knowledge.vector_store import VectorStore
|
|
108
|
+
vector_store = VectorStore(kb_db)
|
|
109
|
+
except Exception:
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
# Build engine
|
|
113
|
+
try:
|
|
114
|
+
from core.synapse.engine import create_default_engine
|
|
115
|
+
from core.synapse.layers import PromptContext
|
|
116
|
+
|
|
117
|
+
engine = create_default_engine(
|
|
118
|
+
constitution_compressed=constitution,
|
|
119
|
+
commands=commands,
|
|
120
|
+
agents_registry=agents,
|
|
121
|
+
vector_store=vector_store,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# Build context
|
|
125
|
+
import subprocess
|
|
126
|
+
try:
|
|
127
|
+
branch = subprocess.run(
|
|
128
|
+
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
|
129
|
+
capture_output=True, text=True, timeout=2,
|
|
130
|
+
cwd=input_data.get("cwd", os.getcwd()),
|
|
131
|
+
).stdout.strip()
|
|
132
|
+
except Exception:
|
|
133
|
+
branch = ""
|
|
134
|
+
|
|
135
|
+
ctx = PromptContext(
|
|
136
|
+
user_input=user_input,
|
|
137
|
+
cwd=input_data.get("cwd", os.getcwd()),
|
|
138
|
+
git_branch=branch,
|
|
139
|
+
project_name=input_data.get("project_name", ""),
|
|
140
|
+
project_stack=input_data.get("project_stack", ""),
|
|
141
|
+
active_agent=input_data.get("active_agent", ""),
|
|
142
|
+
runtime_id=input_data.get("runtime_id", "claude-code"),
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
result = engine.inject(ctx)
|
|
146
|
+
total_ms = int((time.time() - start) * 1000)
|
|
147
|
+
|
|
148
|
+
# Record token usage in budget tracker
|
|
149
|
+
try:
|
|
150
|
+
from core.budget.manager import BudgetManager
|
|
151
|
+
budget_mgr = BudgetManager(storage_path=Path.home() / ".arkaos" / "budget-usage.json")
|
|
152
|
+
# Extract department from result layers
|
|
153
|
+
dept = ""
|
|
154
|
+
for lr in result.layers:
|
|
155
|
+
if lr.layer_id == "L1" and lr.tag:
|
|
156
|
+
dept = lr.tag.replace("[dept:", "").replace("]", "")
|
|
157
|
+
break
|
|
158
|
+
budget_mgr.record_usage(
|
|
159
|
+
agent_id=ctx.active_agent or "system",
|
|
160
|
+
tokens=result.total_tokens_est,
|
|
161
|
+
tier=2,
|
|
162
|
+
department=dept,
|
|
163
|
+
description="synapse-context-injection",
|
|
164
|
+
)
|
|
165
|
+
except Exception:
|
|
166
|
+
pass # Never block on budget tracking
|
|
167
|
+
|
|
168
|
+
if args.layers_only:
|
|
169
|
+
output = {
|
|
170
|
+
"context_string": result.context_string,
|
|
171
|
+
"layers": [
|
|
172
|
+
{"id": lr.layer_id, "tag": lr.tag, "tokens": lr.tokens_est, "cached": lr.cached, "ms": lr.compute_ms}
|
|
173
|
+
for lr in result.layers
|
|
174
|
+
],
|
|
175
|
+
"total_ms": total_ms,
|
|
176
|
+
"total_tokens": result.total_tokens_est,
|
|
177
|
+
"cache_stats": result.cache_stats,
|
|
178
|
+
"layers_skipped": result.layers_skipped,
|
|
179
|
+
}
|
|
180
|
+
else:
|
|
181
|
+
output = {"context_string": result.context_string, "total_ms": total_ms}
|
|
182
|
+
|
|
183
|
+
print(json.dumps(output))
|
|
184
|
+
return 0
|
|
185
|
+
|
|
186
|
+
except ImportError as e:
|
|
187
|
+
# Python dependencies not available — output minimal context
|
|
188
|
+
sys.stderr.write(f"synapse-bridge: import error: {e}\n")
|
|
189
|
+
print(json.dumps({"context_string": "", "error": str(e)}))
|
|
190
|
+
return 1
|
|
191
|
+
|
|
192
|
+
except Exception as e:
|
|
193
|
+
sys.stderr.write(f"synapse-bridge: error: {e}\n")
|
|
194
|
+
print(json.dumps({"context_string": "", "error": str(e)}))
|
|
195
|
+
return 2
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
if __name__ == "__main__":
|
|
199
|
+
sys.exit(main())
|