ultimate-pi 0.2.3 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,294 @@
1
+ #!/usr/bin/env bash
2
+ # harness-cli-verify — install and smoke-test harness global CLI tools; fix common Linux deps.
3
+ # Used by /harness-setup Step 2. Exit 0 only if all required tools pass verification.
4
+
5
+ set -u
6
+ set -o pipefail
7
+
8
+ FORCE=false
9
+ for arg in "$@"; do
10
+ case "$arg" in
11
+ --force) FORCE=true ;;
12
+ -h | --help)
13
+ echo "Usage: $0 [--force]"
14
+ echo " Installs missing npm globals, fixes Linux browser libs, runs smoke tests."
15
+ exit 0
16
+ ;;
17
+ *)
18
+ echo "Unknown argument: $arg" >&2
19
+ exit 2
20
+ ;;
21
+ esac
22
+ done
23
+
24
+ export PATH="${HOME}/.local/bin:${PATH}:$(npm prefix -g 2>/dev/null)/bin"
25
+
26
+ ROOT="$(pwd)"
27
+ FAILURES=0
28
+ WARNINGS=0
29
+
30
+ log() { printf '%s\n' "$*"; }
31
+ pass() { log " ✓ $1"; }
32
+ warn() { log " ! $1"; WARNINGS=$((WARNINGS + 1)); }
33
+ fail() { log " ✗ $1"; FAILURES=$((FAILURES + 1)); }
34
+
35
+ have_cmd() { command -v "$1" &>/dev/null; }
36
+
37
+ npm_global_install() {
38
+ local pkg="$1"
39
+ if [ "$FORCE" = true ] || ! have_cmd "$2"; then
40
+ log " installing $pkg..."
41
+ npm install -g "$pkg" || return 1
42
+ fi
43
+ return 0
44
+ }
45
+
46
+ apt_get_cmd() {
47
+ command -v apt-get 2>/dev/null || command -v apt 2>/dev/null || true
48
+ }
49
+
50
+ linux_apt_install() {
51
+ # Install apt packages when available (WSL/Debian/Ubuntu). Best-effort if sudo works.
52
+ local pkgs=("$@")
53
+ [ ${#pkgs[@]} -eq 0 ] && return 0
54
+ local apt_cmd
55
+ apt_cmd="$(apt_get_cmd)"
56
+ if [ -z "$apt_cmd" ]; then
57
+ return 2
58
+ fi
59
+ local missing=()
60
+ for p in "${pkgs[@]}"; do
61
+ dpkg -s "$p" &>/dev/null 2>&1 || missing+=("$p")
62
+ done
63
+ [ ${#missing[@]} -eq 0 ] && return 0
64
+ log " installing apt packages: ${missing[*]}"
65
+ if sudo -n true 2>/dev/null; then
66
+ sudo DEBIAN_FRONTEND=noninteractive "$apt_cmd" update -qq
67
+ sudo DEBIAN_FRONTEND=noninteractive "$apt_cmd" install -y "${missing[@]}" || {
68
+ warn "apt install failed — run: sudo apt-get install -y ${missing[*]}"
69
+ return 1
70
+ }
71
+ else
72
+ return 2
73
+ fi
74
+ return 0
75
+ }
76
+
77
+ linux_pkg_install() {
78
+ # Debian/Ubuntu, RHEL/Fedora, or Arch — best-effort system deps for headless Chrome.
79
+ local pkgs_deb=(
80
+ libnss3 libnspr4 libatk1.0-0 libatk-bridge2.0-0 libcups2 libdrm2
81
+ libxkbcommon0 libxcomposite1 libxdamage1 libxfixes3 libxrandr2 libgbm1
82
+ libasound2 libpango-1.0-0 libcairo2 libx11-6 libxcb1 libxext6 fonts-liberation
83
+ )
84
+ if [ -n "$(apt_get_cmd)" ]; then
85
+ linux_apt_install "${pkgs_deb[@]}"
86
+ return $?
87
+ fi
88
+ if command -v dnf &>/dev/null && sudo -n true 2>/dev/null; then
89
+ sudo dnf install -y nss nspr atk at-spi2-atk cups-libs libdrm libxkbcommon \
90
+ libXcomposite libXdamage libXfixes libXrandr mesa-libgbm alsa-lib \
91
+ pango cairo libX11 libxcb libXext liberation-fonts 2>/dev/null && return 0
92
+ return 2
93
+ fi
94
+ if command -v pacman &>/dev/null && sudo -n true 2>/dev/null; then
95
+ sudo pacman -S --noconfirm nss nspr atk at-spi2-atk cups libdrm libxkbcommon \
96
+ libxcomposite libxdamage libxfixes libxrandr mesa gbm alsa-lib \
97
+ pango cairo libx11 libxcb libxext ttf-liberation 2>/dev/null && return 0
98
+ return 2
99
+ fi
100
+ return 2
101
+ }
102
+
103
+ # Playwright / agent-browser Chrome on Linux (libnspr4.so, etc.)
104
+ ensure_linux_browser_deps() {
105
+ [ "$(uname -s)" != "Linux" ] && return 0
106
+ local rc=0
107
+ linux_pkg_install || rc=$?
108
+ if [ "$rc" -eq 2 ]; then
109
+ return 2
110
+ fi
111
+ return 0
112
+ }
113
+
114
+ verify_agent_browser() {
115
+ log "[agent-browser]"
116
+ npm_global_install "agent-browser" "agent-browser" || { fail "agent-browser npm install"; return; }
117
+
118
+ local deps_rc=0
119
+ ensure_linux_browser_deps || deps_rc=$?
120
+ if ! agent-browser install 2>/dev/null; then
121
+ warn "agent-browser install (Chrome binary) failed — may still work with system Chrome"
122
+ fi
123
+
124
+ local out
125
+ out="$(agent-browser open "about:blank" 2>&1)" || true
126
+ if echo "$out" | grep -qiE 'shared libraries|libnspr4|cannot open shared object'; then
127
+ warn "Chrome missing system libs — installing OS packages"
128
+ if [ "$deps_rc" -eq 2 ] || ! ensure_linux_browser_deps; then
129
+ warn "Could not auto-install OS packages (need sudo). Debian/Ubuntu: sudo apt-get install -y libnss3 libnspr4 libgbm1 libatk1.0-0 libx11-6"
130
+ fi
131
+ if sudo -n true 2>/dev/null; then
132
+ agent-browser install --with-deps 2>/dev/null || true
133
+ else
134
+ warn "Run manually: agent-browser install --with-deps"
135
+ fi
136
+ out="$(agent-browser open "about:blank" 2>&1)" || true
137
+ fi
138
+
139
+ if echo "$out" | grep -qiE 'shared libraries|libnspr4|Auto-launch failed'; then
140
+ if [ "$deps_rc" -eq 2 ]; then
141
+ warn "agent-browser needs Linux system libs (manual): sudo apt-get install -y libnss3 libnspr4 libgbm1 && agent-browser install --with-deps"
142
+ else
143
+ fail "agent-browser runtime failed after dep install — see stderr above"
144
+ fi
145
+ else
146
+ pass "agent-browser $(agent-browser --version 2>/dev/null | head -1)"
147
+ agent-browser close 2>/dev/null || true
148
+ fi
149
+
150
+ mkdir -p .pi/harness
151
+ if [ ! -f .pi/harness/browser.json ]; then
152
+ echo '{"headless": true, "timeout": 30000, "viewport": {"width": 1280, "height": 720}}' >.pi/harness/browser.json
153
+ fi
154
+ }
155
+
156
+ verify_firecrawl() {
157
+ log "[firecrawl-cli]"
158
+ npm_global_install "firecrawl-cli@latest" "firecrawl" || { fail "firecrawl-cli npm install"; return; }
159
+ if firecrawl --status &>/dev/null; then
160
+ pass "firecrawl $(firecrawl --status 2>/dev/null | head -1 || echo ok)"
161
+ else
162
+ fail "firecrawl --status failed (run: firecrawl login)"
163
+ fi
164
+ }
165
+
166
+ verify_ctx7() {
167
+ log "[ctx7]"
168
+ npm_global_install "ctx7@latest" "ctx7" || { fail "ctx7 npm install"; return; }
169
+ if ctx7 --help &>/dev/null; then
170
+ pass "ctx7"
171
+ else
172
+ fail "ctx7 --help failed"
173
+ fi
174
+ }
175
+
176
+ verify_ck() {
177
+ log "[ck-search]"
178
+ npm_global_install "@beaconbay/ck-search" "ck" || { fail "ck-search npm install"; return; }
179
+ if ! ck --version &>/dev/null; then
180
+ fail "ck --version failed"
181
+ return
182
+ fi
183
+ # Fast grep-mode smoke (no embedding model download)
184
+ local ck_target="."
185
+ [ -d .pi ] && ck_target=".pi"
186
+ if ck -l 1 "export" "$ck_target" 2>/dev/null | head -1 | grep -q .; then
187
+ pass "ck $(ck --version 2>/dev/null | head -1)"
188
+ elif ck --status "$ck_target" 2>/dev/null | head -1 | grep -q .; then
189
+ pass "ck $(ck --version 2>/dev/null | head -1) (index status ok)"
190
+ else
191
+ warn "ck installed but smoke search empty"
192
+ fi
193
+ }
194
+
195
+ verify_biome() {
196
+ log "[biome]"
197
+ npm_global_install "@biomejs/biome" "biome" || { fail "biome npm install"; return; }
198
+ if biome --version &>/dev/null; then
199
+ pass "biome $(biome --version 2>/dev/null | head -1)"
200
+ else
201
+ fail "biome --version failed"
202
+ fi
203
+ }
204
+
205
+ verify_sg() {
206
+ log "[ast-grep]"
207
+ npm_global_install "@ast-grep/cli@latest" "sg" || { fail "ast-grep npm install"; return; }
208
+ if ! sg --version &>/dev/null; then
209
+ fail "sg --version failed"
210
+ return
211
+ fi
212
+ if sg -p 'export' -l ts .pi 2>/dev/null | head -1 | grep -q .; then
213
+ pass "ast-grep $(sg --version 2>/dev/null | head -1)"
214
+ else
215
+ # Still pass if binary works
216
+ pass "ast-grep $(sg --version 2>/dev/null | head -1) (pattern scan skipped)"
217
+ fi
218
+ }
219
+
220
+ verify_gh() {
221
+ log "[gh]"
222
+ if ! have_cmd gh; then
223
+ if [ -n "$(apt_get_cmd)" ] && sudo -n true 2>/dev/null; then
224
+ log " installing gh via apt..."
225
+ sudo DEBIAN_FRONTEND=noninteractive "$(apt_get_cmd)" update -qq
226
+ sudo DEBIAN_FRONTEND=noninteractive "$(apt_get_cmd)" install -y gh 2>/dev/null || true
227
+ fi
228
+ fi
229
+ if have_cmd gh && gh --version &>/dev/null; then
230
+ pass "gh $(gh --version 2>/dev/null | head -1)"
231
+ if gh auth status &>/dev/null 2>&1; then
232
+ pass "gh authenticated"
233
+ if [ -d .git ]; then
234
+ gh label create "harness" --color "0366d6" --description "Agentic harness managed" 2>/dev/null || true
235
+ gh label create "harness-spec" --color "0e8a16" --description "Hardened specification" 2>/dev/null || true
236
+ gh label create "harness-plan" --color "fbca04" --description "Structured plan generated" 2>/dev/null || true
237
+ gh label create "harness-critic" --color "d73a4a" --description "Adversarial review" 2>/dev/null || true
238
+ fi
239
+ else
240
+ warn "gh not authenticated (run: gh auth login)"
241
+ fi
242
+ else
243
+ warn "gh not installed — https://cli.github.com/ (optional for issue specs)"
244
+ fi
245
+ }
246
+
247
+ verify_sentrux() {
248
+ log "[sentrux]"
249
+ if ! have_cmd sentrux || [ "$FORCE" = true ]; then
250
+ if curl -fsSL https://raw.githubusercontent.com/sentrux/sentrux/main/install.sh | sh; then
251
+ export PATH="${HOME}/.local/bin:${PATH}"
252
+ else
253
+ fail "sentrux install script failed"
254
+ return
255
+ fi
256
+ fi
257
+ if ! sentrux --version &>/dev/null; then
258
+ fail "sentrux --version failed"
259
+ return
260
+ fi
261
+ sentrux plugin add-standard 2>/dev/null || warn "sentrux plugin add-standard skipped"
262
+ if [ -f package.json ] && grep -q harness:sentrux-sync package.json 2>/dev/null; then
263
+ npm run harness:sentrux-sync 2>/dev/null || warn "npm run harness:sentrux-sync failed (needs package.json scripts)"
264
+ fi
265
+ if sentrux check . &>/dev/null; then
266
+ pass "sentrux $(sentrux --version 2>/dev/null | head -1)"
267
+ else
268
+ warn "sentrux check . failed (rules may need manifest sync)"
269
+ fi
270
+ }
271
+
272
+ log "Harness CLI verification (cwd: $ROOT)"
273
+ log ""
274
+
275
+ verify_firecrawl
276
+ verify_ctx7
277
+ verify_agent_browser
278
+ verify_ck
279
+ verify_biome
280
+ verify_sg
281
+ verify_gh
282
+ verify_sentrux
283
+
284
+ log ""
285
+ if [ "$FAILURES" -gt 0 ]; then
286
+ log "FAILED: $FAILURES required tool(s). Fix errors above and re-run."
287
+ exit 1
288
+ fi
289
+ if [ "$WARNINGS" -gt 0 ]; then
290
+ log "OK with $WARNINGS warning(s) (optional tools or auth)."
291
+ else
292
+ log "All harness CLI tools verified."
293
+ fi
294
+ exit 0
@@ -0,0 +1,151 @@
1
+ #!/usr/bin/env bash
2
+ # harness-graphify-bootstrap — install graphify and build graphify-out for the current repo.
3
+ # Used by /harness-setup. Do not use deprecated `graphify . --wiki` (invalid CLI).
4
+
5
+ set -euo pipefail
6
+
7
+ FORCE=false
8
+ for arg in "$@"; do
9
+ case "$arg" in
10
+ --force) FORCE=true ;;
11
+ -h | --help)
12
+ echo "Usage: $0 [--force]"
13
+ echo " --force rebuild even when graphify-out/graph.json already exists"
14
+ exit 0
15
+ ;;
16
+ *)
17
+ echo "Unknown argument: $arg" >&2
18
+ exit 2
19
+ ;;
20
+ esac
21
+ done
22
+
23
+ export PATH="${HOME}/.local/bin:${PATH}"
24
+
25
+ log() { printf '%s\n' "$*"; }
26
+ die() { printf 'error: %s\n' "$*" >&2; exit 1; }
27
+
28
+ # Python 3.10+
29
+ if ! python3 --version 2>/dev/null | grep -qE 'Python 3\.(1[0-9]|[2-9][0-9])'; then
30
+ die "Python 3.10+ required (got: $(python3 --version 2>/dev/null || echo missing))"
31
+ fi
32
+ log "✓ Python 3.10+"
33
+
34
+ PIP_CMD=""
35
+ command -v pip &>/dev/null && PIP_CMD=pip
36
+ [ -z "$PIP_CMD" ] && command -v pip3 &>/dev/null && PIP_CMD=pip3
37
+
38
+ graphify_installed() {
39
+ command -v graphify &>/dev/null && return 0
40
+ [ -n "$PIP_CMD" ] && $PIP_CMD show graphifyy &>/dev/null 2>&1 && return 0
41
+ command -v uv &>/dev/null && uv pip show graphifyy &>/dev/null 2>&1 && return 0
42
+ command -v uv &>/dev/null && uv tool list 2>/dev/null | grep -qE '(^|[[:space:]])graphifyy([[:space:]]|$)' && return 0
43
+ dpkg -l 2>/dev/null | grep -qE '^ii[[:space:]]+(python3-)?graphify' && return 0
44
+ apt list --installed 2>/dev/null | grep -qiE '(^|/)python3?-?graphify' && return 0
45
+ return 1
46
+ }
47
+
48
+ install_graphify() {
49
+ if command -v uv &>/dev/null; then
50
+ uv tool install graphifyy
51
+ elif [ -n "$PIP_CMD" ]; then
52
+ $PIP_CMD install --user graphifyy
53
+ else
54
+ die "Need uv, pip, or pip3 to install graphifyy"
55
+ fi
56
+ export PATH="${HOME}/.local/bin:${PATH}"
57
+ command -v graphify &>/dev/null || die "graphify not on PATH after install (try: export PATH=\"\$HOME/.local/bin:\$PATH\")"
58
+ }
59
+
60
+ graphify_platform_install() {
61
+ graphify install --platform pi 2>/dev/null || graphify pi install 2>/dev/null || true
62
+ if [ -d .cursor ]; then
63
+ graphify cursor install 2>/dev/null || true
64
+ fi
65
+ if [ -f AGENTS.md ] || [ -d .pi ]; then
66
+ graphify codex install 2>/dev/null || true
67
+ fi
68
+ }
69
+
70
+ graph_is_valid() {
71
+ python3 - <<'PY'
72
+ import json
73
+ import sys
74
+ from pathlib import Path
75
+
76
+ root = Path("graphify-out")
77
+ gj = root / "graph.json"
78
+ gr = root / "GRAPH_REPORT.md"
79
+ if not gj.is_file() or not gr.is_file():
80
+ sys.exit(1)
81
+ data = json.loads(gj.read_text(encoding="utf-8"))
82
+ nodes = data.get("nodes") or []
83
+ if len(nodes) < 1:
84
+ sys.exit(1)
85
+ edges = data.get("edges") or data.get("links") or []
86
+ print(f"nodes={len(nodes)} edges={len(edges)}")
87
+ PY
88
+ }
89
+
90
+ has_llm_key() {
91
+ [ -n "${GEMINI_API_KEY:-}" ] || [ -n "${GOOGLE_API_KEY:-}" ] || \
92
+ [ -n "${MOONSHOT_API_KEY:-}" ] || [ -n "${ANTHROPIC_API_KEY:-}" ] || \
93
+ [ -n "${OPENAI_API_KEY:-}" ]
94
+ }
95
+
96
+ mkdir -p graphify-out ./raw
97
+
98
+ if ! graphify_installed; then
99
+ log "Installing graphifyy..."
100
+ install_graphify
101
+ fi
102
+
103
+ command -v graphify &>/dev/null || die "graphify CLI not found after install"
104
+ log "✓ graphify ($(command -v graphify))"
105
+
106
+ graphify_platform_install
107
+
108
+ NEED_BUILD=true
109
+ if [ "$FORCE" = false ] && graph_is_valid 2>/dev/null; then
110
+ NEED_BUILD=false
111
+ log "✓ Existing graphify-out/graph.json ($(graph_is_valid))"
112
+ fi
113
+
114
+ export GRAPHIFY_VIZ_NODE_LIMIT="${GRAPHIFY_VIZ_NODE_LIMIT:-200000}"
115
+
116
+ if [ "$NEED_BUILD" = true ] || [ "$FORCE" = true ]; then
117
+ log "Building knowledge graph for codebase (graphify update .)..."
118
+ if ! graphify update .; then
119
+ die "graphify update . failed — graphify-out was not created"
120
+ fi
121
+ if ! graph_is_valid 2>/dev/null; then
122
+ die "graphify update . finished but graphify-out/graph.json is missing or empty"
123
+ fi
124
+ log "✓ Code graph built ($(graph_is_valid))"
125
+ if has_llm_key; then
126
+ log "LLM API key detected — running full semantic extract (graphify extract .)..."
127
+ if graphify extract .; then
128
+ if graph_is_valid 2>/dev/null; then
129
+ log "✓ Full graph built ($(graph_is_valid))"
130
+ else
131
+ log "! graphify extract finished but graph validation failed; code-only graph remains"
132
+ fi
133
+ else
134
+ log "! graphify extract failed; keeping code-only graph from graphify update ."
135
+ fi
136
+ else
137
+ log "No LLM API key — code-only graph (AST). Set GEMINI_API_KEY or OPENAI_API_KEY and re-run with --force for semantic extraction."
138
+ fi
139
+ else
140
+ log "Refreshing code graph (graphify update .)..."
141
+ graphify update . || die "graphify update . failed"
142
+ fi
143
+
144
+ if [ -d .git ]; then
145
+ graphify hook install 2>/dev/null && log "✓ graphify git hooks installed" || log "! graphify hook install skipped or failed"
146
+ else
147
+ log "! Not a git repo — skipped graphify hook install"
148
+ fi
149
+
150
+ log "Graph output: graphify-out/"
151
+ ls -la graphify-out/ 2>/dev/null | head -20 || true
@@ -1,95 +0,0 @@
1
- {
2
- "defaultProfile": "auto",
3
- "debug": false,
4
- "classifierModel": "opencode-go/qwen3.6-plus",
5
- "phaseBias": 0.5,
6
- "maxSessionBudget": 1.0,
7
- "largeContextThreshold": 100000,
8
- "rules": [
9
- {
10
- "matches": [
11
- "deploy",
12
- "production",
13
- "release"
14
- ],
15
- "tier": "high",
16
- "reason": "Safety check for production tasks"
17
- },
18
- {
19
- "matches": "changelog",
20
- "tier": "low"
21
- }
22
- ],
23
- "profiles": {
24
- "auto": {
25
- "high": {
26
- "model": "opencode-go/deepseek-v4-pro",
27
- "thinking": "high",
28
- "fallbacks": [
29
- "opencode-go/qwen3.6-plus",
30
- "opencode-go/kimi-k2.6"
31
- ]
32
- },
33
- "medium": {
34
- "model": "opencode-go/qwen3.6-plus",
35
- "thinking": "medium",
36
- "fallbacks": [
37
- "opencode-go/deepseek-v4-pro"
38
- ]
39
- },
40
- "low": {
41
- "model": "opencode-go/deepseek-v4-flash",
42
- "thinking": "low",
43
- "fallbacks": [
44
- "opencode-go/qwen3.5-plus"
45
- ]
46
- }
47
- },
48
- "cheap": {
49
- "high": {
50
- "model": "opencode-go/qwen3.6-plus",
51
- "thinking": "low",
52
- "fallbacks": [
53
- "opencode-go/qwen3.5-plus"
54
- ]
55
- },
56
- "medium": {
57
- "model": "opencode-go/qwen3.5-plus",
58
- "thinking": "off",
59
- "fallbacks": [
60
- "opencode-go/deepseek-v4-flash"
61
- ]
62
- },
63
- "low": {
64
- "model": "opencode-go/deepseek-v4-flash",
65
- "thinking": "off",
66
- "fallbacks": [
67
- "opencode-go/qwen3.5-plus"
68
- ]
69
- }
70
- },
71
- "deep": {
72
- "high": {
73
- "model": "opencode-go/deepseek-v4-pro",
74
- "thinking": "xhigh",
75
- "fallbacks": [
76
- "opencode-go/kimi-k2.6"
77
- ]
78
- },
79
- "medium": {
80
- "model": "opencode-go/kimi-k2.6",
81
- "thinking": "medium",
82
- "fallbacks": [
83
- "opencode-go/deepseek-v4-pro"
84
- ]
85
- },
86
- "low": {
87
- "model": "opencode-go/qwen3.6-plus",
88
- "thinking": "low",
89
- "fallbacks": [
90
- "opencode-go/deepseek-v4-flash"
91
- ]
92
- }
93
- }
94
- }
95
- }