aidevops 3.13.95 → 3.14.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,1688 +0,0 @@
1
- #!/usr/bin/env bash
2
- # SPDX-License-Identifier: MIT
3
- # SPDX-FileCopyrightText: 2025-2026 Marcus Quinn
4
- # Migration functions: migrate_* and cleanup_* functions
5
- # Part of aidevops setup.sh modularization (t316.3)
6
-
7
- # Shell safety baseline
8
- set -Eeuo pipefail
9
- IFS=$'\n\t'
10
- # shellcheck disable=SC2154 # rc is assigned by $? in the trap string
11
- trap 'rc=$?; echo "[ERROR] ${BASH_SOURCE[0]}:${LINENO} exit $rc" >&2' ERR
12
- shopt -s inherit_errexit 2>/dev/null || true
13
-
14
- cleanup_deprecated_paths() {
15
- local agents_dir="$HOME/.aidevops/agents"
16
- local cleaned=0
17
-
18
- # List of deprecated paths (add new ones here when reorganizing)
19
- local deprecated_paths=(
20
- # v2.40.7: wordpress moved from root to tools/wordpress
21
- "$agents_dir/wordpress.md"
22
- "$agents_dir/wordpress"
23
- # v2.41.0: build-agent and build-mcp moved from root to tools/
24
- "$agents_dir/build-agent.md"
25
- "$agents_dir/build-agent"
26
- "$agents_dir/build-mcp.md"
27
- "$agents_dir/build-mcp"
28
- # v2.93.3: moltbot renamed to openclaw (formerly clawdbot)
29
- "$agents_dir/tools/ai-assistants/clawdbot.md"
30
- "$agents_dir/tools/ai-assistants/moltbot.md"
31
- # Removed non-OpenCode AI tool docs (focus on OpenCode only)
32
- "$agents_dir/tools/ai-assistants/windsurf.md"
33
- "$agents_dir/tools/ai-assistants/configuration.md"
34
- "$agents_dir/tools/ai-assistants/status.md"
35
- # Removed oh-my-opencode integration (no longer supported)
36
- "$agents_dir/tools/opencode/oh-my-opencode.md"
37
- # t199.8: youtube moved from root to content/distribution/youtube/
38
- "$agents_dir/youtube.md"
39
- "$agents_dir/youtube"
40
- # osgrep removed — disproportionate CPU/disk cost vs rg + LLM comprehension
41
- "$agents_dir/tools/context/osgrep.md"
42
- # GH#5155: scripts archived upstream but orphaned in deployed installs
43
- # (rsync only adds/overwrites, doesn't delete removed files)
44
- "$agents_dir/scripts/pattern-tracker-helper.sh"
45
- "$agents_dir/scripts/quality-sweep-helper.sh"
46
- "$agents_dir/scripts/quality-loop-helper.sh"
47
- "$agents_dir/scripts/review-pulse-helper.sh"
48
- "$agents_dir/scripts/self-improve-helper.sh"
49
- "$agents_dir/scripts/coderabbit-pulse-helper.sh"
50
- "$agents_dir/scripts/coderabbit-task-creator-helper.sh"
51
- "$agents_dir/scripts/audit-task-creator-helper.sh"
52
- "$agents_dir/scripts/batch-cleanup-helper.sh"
53
- "$agents_dir/scripts/coordinator-helper.sh"
54
- "$agents_dir/scripts/finding-to-task-helper.sh"
55
- "$agents_dir/scripts/objective-runner-helper.sh"
56
- "$agents_dir/scripts/ralph-loop-helper.sh"
57
- "$agents_dir/scripts/stale-pr-helper.sh"
58
- )
59
-
60
- for path in "${deprecated_paths[@]}"; do
61
- if [[ -e "$path" ]]; then
62
- rm -rf "$path"
63
- ((++cleaned))
64
- fi
65
- done
66
-
67
- if [[ $cleaned -gt 0 ]]; then
68
- print_info "Cleaned up $cleaned deprecated agent path(s)"
69
- fi
70
-
71
- # Remove oh-my-opencode remnants (no longer supported) — but respect user preference.
72
- # Default: preserve user files. Override with --overwrite flag or settings.json.
73
- # See: ~/.config/aidevops/settings.json { "preserve_oh_my_opencode": true }
74
- local omo_config="$HOME/.config/opencode/oh-my-opencode.json"
75
- if [[ -f "$omo_config" ]]; then
76
- if should_cleanup_oh_my_opencode_artifacts "oh-my-opencode config ($omo_config)"; then
77
- rm -f "$omo_config"
78
- print_info "Removed oh-my-opencode config"
79
- fi
80
- fi
81
-
82
- # Remove osgrep — disproportionate CPU/disk cost (74GB indexes, 4 CPU cores on startup)
83
- # rg + fd + LLM comprehension covers the same ground at zero resource cost
84
- cleanup_osgrep
85
-
86
- # Remove opencode-antigravity-auth — third-party Google OAuth plugin removed from aidevops.
87
- # When present but unresolvable it breaks the OpenCode plugin chain, preventing the
88
- # aidevops pool from injecting tokens and causing "API key missing" errors for all providers.
89
- cleanup_antigravity_plugin
90
-
91
- # Remove oh-my-opencode from plugin array if present — guarded by same setting
92
- local opencode_config
93
- opencode_config=$(find_opencode_config 2>/dev/null) || true
94
- if [[ -n "$opencode_config" ]] && [[ -f "$opencode_config" ]] && command -v jq &>/dev/null; then
95
- if jq -e '.plugin | index("oh-my-opencode")' "$opencode_config" >/dev/null 2>&1; then
96
- if should_cleanup_oh_my_opencode_artifacts "oh-my-opencode plugin entry in OpenCode config"; then
97
- local tmp_file
98
- tmp_file=$(mktemp)
99
- trap 'rm -f "${tmp_file:-}"' RETURN
100
- jq '.plugin = [.plugin[] | select(. != "oh-my-opencode")]' "$opencode_config" >"$tmp_file" && mv "$tmp_file" "$opencode_config"
101
- print_info "Removed oh-my-opencode from OpenCode plugin list"
102
- fi
103
- fi
104
- fi
105
-
106
- return 0
107
- }
108
-
109
- # Backward-compatibility guard for oh-my-opencode cleanup migration.
110
- # setup.sh no longer defines should_overwrite_user_file() in current runtime.
111
- # Preserve user files by default when the legacy helper is unavailable.
112
- should_cleanup_oh_my_opencode_artifacts() {
113
- local description="$1"
114
-
115
- if type should_overwrite_user_file &>/dev/null; then
116
- should_overwrite_user_file "preserve_oh_my_opencode" "$description"
117
- return $?
118
- fi
119
-
120
- return 1
121
- }
122
-
123
- # Remove osgrep completely — one-time cleanup for all aidevops users
124
- # osgrep consumed 74GB disk (lancedb indexes) and 4 CPU cores on startup.
125
- # rg + fd + LLM comprehension covers the same ground at zero resource cost.
126
- cleanup_osgrep() {
127
- local cleaned=false
128
-
129
- # 0. Kill running osgrep processes first (MCP servers, indexers)
130
- # These are Node.js processes already loaded in memory — removing the
131
- # binary and data won't stop them, and they may try to rebuild indexes.
132
- if pgrep -f 'osgrep' >/dev/null; then
133
- print_info "Killing running osgrep processes..."
134
- pkill -f 'osgrep' || true
135
- # Give processes a moment to exit gracefully
136
- sleep 1
137
- # Force-kill any stragglers
138
- pkill -9 -f 'osgrep' || true
139
- cleaned=true
140
- fi
141
-
142
- # 1. Uninstall npm package (global)
143
- if command -v osgrep &>/dev/null; then
144
- print_info "Removing osgrep npm package..."
145
- npm uninstall -g osgrep >/dev/null 2>&1 || true
146
- cleaned=true
147
- fi
148
-
149
- # 2. Remove indexes, models, and config (~74GB)
150
- if [[ -d "$HOME/.osgrep" ]]; then
151
- print_info "Removing osgrep data directory (~74GB indexes)..."
152
- rm -rf "$HOME/.osgrep"
153
- cleaned=true
154
- fi
155
-
156
- # 3. Remove osgrep from OpenCode MCP config
157
- local opencode_config
158
- opencode_config=$(find_opencode_config 2>/dev/null) || true
159
- if [[ -n "$opencode_config" ]] && [[ -f "$opencode_config" ]] && command -v jq &>/dev/null; then
160
- local osgrep_mcp="osgrep"
161
- local osgrep_tool="osgrep_*"
162
- if jq -e --arg mcp "$osgrep_mcp" '.mcp[$mcp]' "$opencode_config" >/dev/null 2>&1; then
163
- local tmp_file
164
- tmp_file=$(mktemp)
165
- if jq --arg mcp "$osgrep_mcp" --arg tool "$osgrep_tool" 'del(.mcp[$mcp]) | del(.tools[$tool])' "$opencode_config" >"$tmp_file" 2>/dev/null; then
166
- mv "$tmp_file" "$opencode_config"
167
- print_info "Removed osgrep from OpenCode MCP config"
168
- else
169
- rm -f "$tmp_file"
170
- fi
171
- cleaned=true
172
- fi
173
- fi
174
-
175
- # 4. Remove osgrep from Claude Code settings
176
- local claude_settings="$HOME/.claude/settings.json"
177
- if [[ -f "$claude_settings" ]] && command -v jq &>/dev/null; then
178
- if jq -e '.mcpServers["osgrep"] // .enabledPlugins["osgrep@osgrep"]' "$claude_settings" >/dev/null 2>&1; then
179
- local tmp_file
180
- tmp_file=$(mktemp)
181
- if jq 'del(.mcpServers["osgrep"]) | del(.enabledPlugins["osgrep@osgrep"])' "$claude_settings" >"$tmp_file" 2>/dev/null; then
182
- mv "$tmp_file" "$claude_settings"
183
- print_info "Removed osgrep from Claude Code settings"
184
- else
185
- rm -f "$tmp_file"
186
- fi
187
- cleaned=true
188
- fi
189
- fi
190
-
191
- # 5. Remove per-repo .osgrep directories in registered repos
192
- local repos_file="$HOME/.config/aidevops/repos.json"
193
- if [[ -f "$repos_file" ]] && command -v jq &>/dev/null; then
194
- while IFS= read -r repo_path; do
195
- [[ -z "$repo_path" ]] && continue
196
- [[ ! -d "$repo_path" ]] && continue
197
- if [[ -d "$repo_path/.osgrep" ]]; then
198
- rm -rf "$repo_path/.osgrep"
199
- fi
200
- done < <(jq -r '.[]' "$repos_file" 2>/dev/null)
201
- fi
202
-
203
- if [[ "$cleaned" == "true" ]]; then
204
- print_success "osgrep removed (freed CPU cores and disk space)"
205
- fi
206
-
207
- return 0
208
- }
209
-
210
- # Remove opencode-antigravity-auth plugin — third-party Google OAuth plugin removed from aidevops.
211
- # When present but unresolvable it breaks the OpenCode plugin chain, preventing the aidevops
212
- # pool from injecting tokens and causing "API key missing" errors for all providers.
213
- # Affects: opencode.json plugin array, Claude Code settings enabledPlugins.
214
- cleanup_antigravity_plugin() {
215
- local cleaned=false
216
- local plugin_id="opencode-antigravity-auth"
217
-
218
- # 1. Remove from OpenCode config plugin array
219
- local opencode_config
220
- opencode_config=$(find_opencode_config 2>/dev/null) || true
221
- if [[ -n "$opencode_config" ]] && [[ -f "$opencode_config" ]] && command -v jq &>/dev/null; then
222
- # Plugin may appear as bare name or with @version suffix
223
- if jq -e --arg p "$plugin_id" '.plugin // [] | map(. | startswith($p)) | any' "$opencode_config" >/dev/null 2>&1; then
224
- local tmp_file
225
- tmp_file=$(mktemp)
226
- if jq --arg p "$plugin_id" '.plugin = [(.plugin // [])[] | select(startswith($p) | not)]' \
227
- "$opencode_config" >"$tmp_file" 2>/dev/null; then
228
- mv "$tmp_file" "$opencode_config"
229
- print_success "Removed ${plugin_id} from OpenCode plugin list"
230
- cleaned=true
231
- else
232
- rm -f "$tmp_file"
233
- fi
234
- fi
235
- fi
236
-
237
- # 2. Remove from Claude Code settings enabledPlugins (if present)
238
- local claude_settings="$HOME/.claude/settings.json"
239
- if [[ -f "$claude_settings" ]] && command -v jq &>/dev/null; then
240
- if jq -e --arg p "$plugin_id" '.enabledPlugins // {} | keys[] | startswith($p)' \
241
- "$claude_settings" >/dev/null 2>&1; then
242
- local tmp_file
243
- tmp_file=$(mktemp)
244
- if jq --arg p "$plugin_id" \
245
- 'del(.enabledPlugins[(.enabledPlugins // {} | keys[] | select(startswith($p)))])' \
246
- "$claude_settings" >"$tmp_file" 2>/dev/null; then
247
- mv "$tmp_file" "$claude_settings"
248
- print_success "Removed ${plugin_id} from Claude Code settings"
249
- cleaned=true
250
- else
251
- rm -f "$tmp_file"
252
- fi
253
- fi
254
- fi
255
-
256
- if [[ "$cleaned" == "false" ]]; then
257
- print_info "${plugin_id} not present — nothing to remove"
258
- fi
259
-
260
- return 0
261
- }
262
-
263
- # Remove stale bun-installed opencode if npm version exists (v2.123.5)
264
- # Prior to v2.123.1, tool-version-check.sh used `bun install -g opencode-ai`.
265
- # This left a binary at ~/.bun/bin/opencode that shadows the npm install
266
- # if ~/.bun/bin is earlier in PATH than the npm bin directory.
267
- cleanup_stale_bun_opencode() {
268
- local bun_opencode="$HOME/.bun/bin/opencode"
269
- local bun_modules="$HOME/.bun/install/global/node_modules/opencode-ai"
270
-
271
- # Only clean up if the stale bun binary exists
272
- if [[ ! -f "$bun_opencode" ]] && [[ ! -d "$bun_modules" ]]; then
273
- return 0
274
- fi
275
-
276
- # Only clean up if npm version is installed (don't leave user without opencode)
277
- local npm_opencode
278
- npm_opencode=$(npm list -g opencode-ai --json 2>/dev/null | grep -c '"opencode-ai"' || true)
279
- if [[ "$npm_opencode" -eq 0 ]]; then
280
- # npm version not installed — install it first, then clean up bun
281
- if command -v npm >/dev/null 2>&1; then
282
- local pin_ver="${OPENCODE_PINNED_VERSION:-latest}"
283
- print_info "Installing opencode via npm (replacing bun install)..."
284
- npm_global_install "opencode-ai@${pin_ver}" >/dev/null 2>&1 || true
285
- else
286
- # Can't install npm version — leave bun version in place
287
- return 0
288
- fi
289
- fi
290
-
291
- # Remove stale bun binary and modules
292
- if [[ -f "$bun_opencode" ]]; then
293
- rm -f "$bun_opencode"
294
- print_info "Removed stale bun opencode binary: $bun_opencode"
295
- fi
296
-
297
- if [[ -d "$bun_modules" ]]; then
298
- rm -rf "$bun_modules"
299
- print_info "Removed stale bun opencode modules: $bun_modules"
300
- fi
301
-
302
- print_success "Cleaned up stale bun opencode install (npm version is canonical)"
303
-
304
- return 0
305
- }
306
-
307
- # Register the setup caller's linked worktree as owned before setup performs
308
- # deployment work that may restart the pulse or trigger cleanup routines.
309
- protect_current_setup_worktree() {
310
- command -v git &>/dev/null || return 0
311
- declare -F register_worktree >/dev/null 2>&1 || return 0
312
-
313
- local current_root=""
314
- local git_dir=""
315
- local common_dir=""
316
- local branch=""
317
-
318
- current_root=$(git -C "${INSTALL_DIR:-.}" rev-parse --show-toplevel 2>/dev/null || true)
319
- [[ -n "$current_root" ]] || return 0
320
- current_root=$(cd "$current_root" 2>/dev/null && pwd -P) || return 0
321
-
322
- git_dir=$(git -C "$current_root" rev-parse --git-dir 2>/dev/null) || return 0
323
- common_dir=$(git -C "$current_root" rev-parse --git-common-dir 2>/dev/null) || return 0
324
- [[ "$git_dir" = /* ]] || git_dir="$current_root/$git_dir"
325
- [[ "$common_dir" = /* ]] || common_dir="$current_root/$common_dir"
326
- git_dir=$(cd "$git_dir" 2>/dev/null && pwd -P) || git_dir=""
327
- common_dir=$(cd "$common_dir" 2>/dev/null && pwd -P) || common_dir=""
328
- [[ -n "$git_dir" && -n "$common_dir" && "$git_dir" != "$common_dir" ]] || return 0
329
-
330
- branch=$(git -C "$current_root" rev-parse --abbrev-ref HEAD 2>/dev/null || true)
331
- [[ -n "$branch" ]] || branch="HEAD"
332
- register_worktree "$current_root" "$branch" --task "setup-noninteractive" --session "setup:${OPENCODE_SESSION_ID:-${CLAUDE_SESSION_ID:-manual}}" >/dev/null 2>&1 || true
333
- print_info "Protected current setup worktree from cleanup: $current_root"
334
- return 0
335
- }
336
-
337
- # t1929: Remove stale contributor/legacy health issue cache files and close
338
- # the corresponding GitHub issues. One-time migration — the root cause
339
- # (API failure in _get_runner_role defaulting to "contributor") is fixed
340
- # by the 4-layer role resolution in stats-functions.sh.
341
- #
342
- # Gated by a flag file so it runs exactly once per install.
343
- cleanup_worktree_entries_in_repos_json() {
344
- # t2250: `find ~/Git -name .aidevops.json` during auto-discovery picks up
345
- # files that exist inside linked worktrees (because worktrees inherit the
346
- # working tree). Before the register_repo guard, each worktree ended up as
347
- # a separate entry in repos.json — confusing tabby-profile-sync, pulse,
348
- # cross-repo tooling, and anything that enumerates `initialized_repos`.
349
- #
350
- # One-shot migration: scan `initialized_repos[].path`, detect entries that
351
- # are linked worktrees (git rev-parse --git-dir != --git-common-dir), and
352
- # remove them. Safe to re-run; a flag file suppresses re-execution once
353
- # the cleanup has been done on this machine.
354
- local flag_file="${HOME}/.aidevops/logs/.migrated-worktree-repos-json-t2250"
355
- [[ -f "$flag_file" ]] && return 0
356
-
357
- local repos_json="${HOME}/.config/aidevops/repos.json"
358
- [[ -f "$repos_json" ]] || return 0
359
-
360
- command -v jq &>/dev/null || return 0
361
- command -v git &>/dev/null || return 0
362
-
363
- local stale_paths=()
364
- local skipped_current_paths=()
365
- local current_worktree=""
366
- local current_physical_dir=""
367
- local path git_dir common_dir resolved_path
368
-
369
- current_physical_dir=$(pwd -P 2>/dev/null || pwd)
370
- current_worktree=$(git rev-parse --show-toplevel 2>/dev/null || true)
371
- if [[ -n "$current_worktree" ]]; then
372
- current_worktree=$(cd "$current_worktree" 2>/dev/null && pwd -P) || current_worktree=""
373
- fi
374
-
375
- while IFS= read -r path; do
376
- [[ -n "$path" && -d "$path" ]] || continue
377
- resolved_path=$(cd "$path" 2>/dev/null && pwd -P) || resolved_path=""
378
- git_dir=$(git -C "$path" rev-parse --git-dir 2>/dev/null) || continue
379
- common_dir=$(git -C "$path" rev-parse --git-common-dir 2>/dev/null) || continue
380
- # Normalise to absolute paths for comparison.
381
- [[ "$git_dir" = /* ]] || git_dir="$path/$git_dir"
382
- [[ "$common_dir" = /* ]] || common_dir="$path/$common_dir"
383
- git_dir=$(cd "$git_dir" 2>/dev/null && pwd -P) || git_dir=""
384
- common_dir=$(cd "$common_dir" 2>/dev/null && pwd -P) || common_dir=""
385
- if [[ -n "$git_dir" && -n "$common_dir" && "$git_dir" != "$common_dir" ]]; then
386
- if [[ -n "$current_worktree" && -n "$resolved_path" && "$resolved_path" == "$current_worktree" ]] \
387
- || [[ -n "$resolved_path" && "$current_physical_dir" == "$resolved_path"/* ]]; then
388
- skipped_current_paths+=("$path")
389
- continue
390
- fi
391
- stale_paths+=("$path")
392
- fi
393
- done < <(jq -r '.initialized_repos[].path // empty' "$repos_json" 2>/dev/null)
394
-
395
- if [[ ${#stale_paths[@]} -gt 0 ]]; then
396
- local temp_file="${repos_json}.tmp"
397
- local paths_json
398
- paths_json=$(printf '%s\n' "${stale_paths[@]}" | jq -R . | jq -s .)
399
- jq --argjson stale "$paths_json" \
400
- '.initialized_repos |= map(select(.path as $p | ($stale | index($p)) | not))' \
401
- "$repos_json" >"$temp_file" && mv "$temp_file" "$repos_json"
402
- print_info "Removed ${#stale_paths[@]} worktree entry/entries from repos.json (t2250):"
403
- local p
404
- for p in "${stale_paths[@]}"; do
405
- print_info " - $p"
406
- done
407
- fi
408
-
409
- if [[ ${#skipped_current_paths[@]} -gt 0 ]]; then
410
- print_warning "Skipped ${#skipped_current_paths[@]} active current worktree entry/entries in repos.json (t2250):"
411
- local skipped_path
412
- for skipped_path in "${skipped_current_paths[@]}"; do
413
- print_warning " - $skipped_path"
414
- done
415
- print_warning "Run setup.sh from the canonical worktree later to finish the one-shot cleanup."
416
- return 0
417
- fi
418
-
419
- mkdir -p "$(dirname "$flag_file")"
420
- date -u +"%Y-%m-%dT%H:%M:%SZ" >"$flag_file"
421
- return 0
422
- }
423
-
424
- cleanup_stale_health_issue_caches() {
425
- local flag_file="${HOME}/.aidevops/logs/.migrated-health-issue-caches-t1929"
426
- [[ -f "$flag_file" ]] && return 0
427
-
428
- local cache_dir="${HOME}/.aidevops/logs"
429
- [[ -d "$cache_dir" ]] || return 0
430
-
431
- local cleaned=0
432
-
433
- # 1. Remove contributor cache files (the duplicates).
434
- # The correct files are health-issue-{user}-supervisor-{slug}.
435
- local contributor_cache
436
- for contributor_cache in "${cache_dir}"/health-issue-*-contributor-*; do
437
- [[ -f "$contributor_cache" ]] || continue
438
- local stale_num
439
- stale_num=$(cat "$contributor_cache" 2>/dev/null || echo "")
440
- # Extract slug from filename: health-issue-{user}-contributor-{slug}
441
- # Best-effort close via gh if available and authenticated
442
- if [[ -n "$stale_num" ]] && command -v gh &>/dev/null && gh auth status &>/dev/null 2>&1; then
443
- local fname
444
- fname=$(basename "$contributor_cache")
445
- local slug_safe="${fname##*-contributor-}"
446
- local supervisor_cache="${cache_dir}/health-issue-${fname%-contributor-*}-supervisor-${slug_safe}"
447
- # Only close if there IS a supervisor counterpart (confirms it's a duplicate)
448
- if [[ -f "$supervisor_cache" ]]; then
449
- # Resolve actual slug from repos.json — the slug-safe format
450
- # (hyphens replacing /) is lossy for owners/repos containing hyphens.
451
- local repos_json="${HOME}/.config/aidevops/repos.json"
452
- local repo_slug=""
453
- if [[ -f "$repos_json" ]]; then
454
- repo_slug=$(jq -r --arg ss "$slug_safe" \
455
- '.initialized_repos[] | select((.slug // "") | gsub("/"; "-") == $ss) | .slug' \
456
- "$repos_json" 2>/dev/null | head -1)
457
- fi
458
- if [[ -n "$repo_slug" ]]; then
459
- # Remove "persistent" label first — a GitHub Actions workflow
460
- # auto-reopens issues with this label (health issues get it on creation).
461
- gh issue edit "$stale_num" --repo "$repo_slug" \
462
- --remove-label persistent 2>/dev/null || true
463
- gh issue close "$stale_num" --repo "$repo_slug" \
464
- --comment "Closing duplicate contributor health issue (t1929 migration)." 2>/dev/null || true
465
- fi
466
- fi
467
- fi
468
- rm -f "$contributor_cache"
469
- cleaned=$((cleaned + 1))
470
- done
471
-
472
- # 2. Remove legacy cache files (no role prefix, pre-role-naming).
473
- # Pattern: health-issue-{user}-{slug} where {slug} has no "supervisor" or "contributor".
474
- local legacy_cache
475
- for legacy_cache in "${cache_dir}"/health-issue-*; do
476
- [[ -f "$legacy_cache" ]] || continue
477
- local fname
478
- fname=$(basename "$legacy_cache")
479
- # Skip files that already have a role prefix (they're the correct format)
480
- [[ "$fname" == *-supervisor-* || "$fname" == *-contributor-* ]] && continue
481
- rm -f "$legacy_cache"
482
- cleaned=$((cleaned + 1))
483
- done
484
-
485
- # Write flag file
486
- mkdir -p "$(dirname "$flag_file")"
487
- date -u +"%Y-%m-%dT%H:%M:%SZ" >"$flag_file"
488
-
489
- if [[ "$cleaned" -gt 0 ]]; then
490
- print_info "Cleaned up $cleaned stale health issue cache file(s) (t1929)"
491
- fi
492
- return 0
493
- }
494
-
495
- # Migrate legacy .agent symlink/directory to .agents in a single repo.
496
- # Args: $1 = repo_path
497
- # Prints: info messages for each migration action
498
- # Returns: 0 on success; sets _migrate_count to number of items migrated
499
- _migrate_repo_agent_symlinks() {
500
- local repo_path="$1"
501
- _migrate_count=0
502
-
503
- # Migrate legacy .agent symlink/directory to .agents real directory
504
- if [[ -L "$repo_path/.agent" ]]; then
505
- rm -f "$repo_path/.agent"
506
- if [[ ! -d "$repo_path/.agents" ]]; then
507
- mkdir -p "$repo_path/.agents"
508
- fi
509
- print_info " Removed legacy .agent symlink in $(basename "$repo_path")"
510
- ((++_migrate_count))
511
- elif [[ -d "$repo_path/.agent" && ! -L "$repo_path/.agent" ]]; then
512
- # Real directory (not symlink) - rename it
513
- # Handle mixed state: .agents may be a legacy symlink blocking the rename
514
- if [[ -L "$repo_path/.agents" ]]; then
515
- rm -f "$repo_path/.agents"
516
- print_info " Removed legacy .agents symlink in $(basename "$repo_path")"
517
- ((++_migrate_count))
518
- fi
519
- if [[ ! -e "$repo_path/.agents" ]]; then
520
- mv "$repo_path/.agent" "$repo_path/.agents"
521
- print_info " Renamed directory: $repo_path/.agent -> .agents"
522
- ((++_migrate_count))
523
- fi
524
- fi
525
-
526
- # Migrate legacy .agents symlink to real directory
527
- if [[ -L "$repo_path/.agents" ]]; then
528
- rm -f "$repo_path/.agents"
529
- mkdir -p "$repo_path/.agents"
530
- print_info " Replaced .agents symlink with real directory in $(basename "$repo_path")"
531
- ((++_migrate_count))
532
- fi
533
-
534
- return 0
535
- }
536
-
537
- # Update .gitignore in a repo: remove legacy entries, add runtime artifact ignores.
538
- # Args: $1 = repo_path
539
- # SKIP in non-interactive mode to avoid leaving uncommitted changes (issue #2570 bug 1).
540
- _migrate_repo_gitignore() {
541
- local repo_path="$1"
542
- local gitignore="$repo_path/.gitignore"
543
-
544
- if [[ "${NON_INTERACTIVE:-false}" == "true" ]]; then
545
- if [[ -f "$gitignore" ]]; then
546
- local needs_gitignore_update=false
547
- local agents_loop_state_pattern="^\.agents/""loop-state/"
548
- if grep -q -e "^\.agents$" -e "^\.agent$" -e "^\.agent/loop-state/" "$gitignore" 2>/dev/null ||
549
- ! grep -q "$agents_loop_state_pattern" "$gitignore" 2>/dev/null; then
550
- needs_gitignore_update=true
551
- fi
552
- if [[ "$needs_gitignore_update" == "true" ]]; then
553
- print_warning " $(basename "$repo_path")/.gitignore needs migration (skipped in non-interactive mode)"
554
- print_info " Run 'aidevops init' in $(basename "$repo_path") or 'setup.sh -i' to apply"
555
- fi
556
- fi
557
- return 0
558
- fi
559
-
560
- if [[ ! -f "$gitignore" ]]; then
561
- return 0
562
- fi
563
-
564
- # Remove legacy bare ".agents" and ".agent" entries (added by older versions)
565
- # .agents/ is now a real committed directory, not a symlink to ignore
566
- if grep -q "^\.agents$" "$gitignore" 2>/dev/null; then
567
- sed -i '' '/^\.agents$/d' "$gitignore" 2>/dev/null ||
568
- sed -i '/^\.agents$/d' "$gitignore" 2>/dev/null || true
569
- print_info " Removed legacy bare .agents from .gitignore in $(basename "$repo_path")"
570
- fi
571
- if grep -q "^\.agent$" "$gitignore" 2>/dev/null; then
572
- sed -i '' '/^\.agent$/d' "$gitignore" 2>/dev/null ||
573
- sed -i '/^\.agent$/d' "$gitignore" 2>/dev/null || true
574
- fi
575
-
576
- # Migrate .agent/loop-state/ -> .agents/loop-state/
577
- if grep -q "^\.agent/loop-state/" "$gitignore" 2>/dev/null; then
578
- sed -i '' 's|^\.agent/loop-state/|.agents/loop-state/|' "$gitignore" 2>/dev/null ||
579
- sed -i 's|^\.agent/loop-state/|.agents/loop-state/|' "$gitignore" 2>/dev/null || true
580
- fi
581
-
582
- # Add runtime artifact ignores if not present
583
- if ! grep -q "^\.agents/loop-state/" "$gitignore" 2>/dev/null; then
584
- # Ensure trailing newline before appending (prevents malformed entries like *.zip.agents/loop-state/)
585
- [[ -s "$gitignore" && $(tail -c1 "$gitignore" | wc -l) -eq 0 ]] && printf '\n' >>"$gitignore"
586
- {
587
- echo ""
588
- echo "# aidevops runtime artifacts"
589
- echo ".agents/loop-state/"
590
- echo ".agents/tmp/"
591
- echo ".agents/memory/"
592
- } >>"$gitignore"
593
- print_info " Added .agents/ runtime artifact ignores in $(basename "$repo_path")"
594
- fi
595
-
596
- return 0
597
- }
598
-
599
- # Scan ~/Git/ for .agent symlinks or directories not covered by repos.json.
600
- # Sets _migrate_count to number of items migrated.
601
- _migrate_git_dir_agent_paths() {
602
- _migrate_count=0
603
-
604
- if [[ ! -d "$HOME/Git" ]]; then
605
- return 0
606
- fi
607
-
608
- while IFS= read -r -d '' agent_path; do
609
- local repo_dir
610
- repo_dir=$(dirname "$agent_path")
611
-
612
- if [[ -L "$agent_path" ]]; then
613
- # Symlink: remove and create real directory
614
- rm -f "$agent_path"
615
- if [[ ! -d "$repo_dir/.agents" ]]; then
616
- mkdir -p "$repo_dir/.agents"
617
- fi
618
- print_info " Removed legacy .agent symlink: $agent_path"
619
- ((++_migrate_count))
620
- elif [[ -d "$agent_path" ]]; then
621
- # Directory: rename to .agents if .agents doesn't exist
622
- if [[ ! -e "$repo_dir/.agents" ]]; then
623
- mv "$agent_path" "$repo_dir/.agents"
624
- print_info " Renamed directory: $agent_path -> .agents"
625
- ((++_migrate_count))
626
- fi
627
- fi
628
- done < <(find "$HOME/Git" -maxdepth 3 -name ".agent" \( -type l -o -type d \) -print0 2>/dev/null)
629
-
630
- return 0
631
- }
632
-
633
- # Update AI assistant config files and session greeting cache that reference .agent/.
634
- # Sets _migrate_count to number of files updated.
635
- _migrate_ai_config_agent_refs() {
636
- _migrate_count=0
637
-
638
- local ai_config_files=(
639
- "$HOME/.config/opencode/agent/AGENTS.md"
640
- "$HOME/.config/Claude/AGENTS.md"
641
- "$HOME/.claude/commands/AGENTS.md"
642
- "$HOME/.opencode/AGENTS.md"
643
- )
644
-
645
- for config_file in "${ai_config_files[@]}"; do
646
- if [[ -f "$config_file" ]]; then
647
- if grep -q '\.agent/' "$config_file" 2>/dev/null; then
648
- sed -i '' 's|\.agent/|.agents/|g' "$config_file" 2>/dev/null ||
649
- sed -i 's|\.agent/|.agents/|g' "$config_file" 2>/dev/null || true
650
- print_info " Updated references in $config_file"
651
- ((++_migrate_count))
652
- fi
653
- fi
654
- done
655
-
656
- # Update session greeting cache if it references .agent/
657
- local greeting_cache="$HOME/.aidevops/cache/session-greeting.txt"
658
- if [[ -f "$greeting_cache" ]]; then
659
- if grep -q '\.agent/' "$greeting_cache" 2>/dev/null; then
660
- sed -i '' 's|\.agent/|.agents/|g' "$greeting_cache" 2>/dev/null ||
661
- sed -i 's|\.agent/|.agents/|g' "$greeting_cache" 2>/dev/null || true
662
- ((++_migrate_count))
663
- fi
664
- fi
665
-
666
- return 0
667
- }
668
-
669
- # Migrate .agent -> .agents in user projects and local config
670
- # v2.104.0: Industry converging on .agents/ folder convention (aligning with AGENTS.md)
671
- # This migrates:
672
- # 1. .agent symlinks in user projects -> .agents
673
- # 2. .agent/loop-state/ -> .agents/loop-state/ in user projects
674
- # 3. .gitignore entries in user projects
675
- # 4. References in user's AI assistant configs
676
- # 5. References in ~/.aidevops/ config files
677
- #
678
- # Guarded by a sentinel file: on a converged system the function does
679
- # a repos.json scan and a find(1) scan of ~/Git, both of which cost
680
- # several seconds per run (t3221).
681
- migrate_agent_to_agents_folder() {
682
- local _sentinel="${HOME}/.aidevops/.migrations/agent-to-agents-done"
683
- if [[ -f "$_sentinel" ]]; then
684
- return 0
685
- fi
686
-
687
- print_info "Checking for .agent -> .agents migration..."
688
-
689
- local migrated=0
690
-
691
- # 1. Migrate .agent symlinks and .gitignore in registered repos
692
- local repos_file="$HOME/.config/aidevops/repos.json"
693
- if [[ -f "$repos_file" ]] && command -v jq &>/dev/null; then
694
- while IFS= read -r repo_path; do
695
- [[ -z "$repo_path" ]] && continue
696
- [[ ! -d "$repo_path" ]] && continue
697
-
698
- _migrate_repo_agent_symlinks "$repo_path"
699
- migrated=$((migrated + _migrate_count))
700
-
701
- _migrate_repo_gitignore "$repo_path"
702
- done < <(jq -r '.initialized_repos[].path' "$repos_file" 2>/dev/null)
703
- fi
704
-
705
- # 2. Scan ~/Git/ for .agent paths not in repos.json
706
- _migrate_git_dir_agent_paths
707
- migrated=$((migrated + _migrate_count))
708
-
709
- # 3. Update AI assistant config files and greeting cache
710
- _migrate_ai_config_agent_refs
711
- migrated=$((migrated + _migrate_count))
712
-
713
- if [[ $migrated -gt 0 ]]; then
714
- print_success "Migrated $migrated .agent -> .agents reference(s)"
715
- else
716
- print_info "No .agent -> .agents migration needed"
717
- fi
718
-
719
- # Write sentinel so subsequent setup runs skip the repos+find scans (t3221)
720
- mkdir -p "$(dirname "$_sentinel")"
721
- date -u +%Y-%m-%dT%H:%M:%SZ >"$_sentinel"
722
- return 0
723
- }
724
-
725
- # Remove deprecated MCP and tool entries from a config file.
726
- # Args: $1 = path to tmp config file to modify in-place
727
- # Sets _cleanup_count to number of entries removed.
728
- _remove_deprecated_mcp_entries() {
729
- local tmp_config="$1"
730
- _cleanup_count=0
731
-
732
- # MCPs replaced by curl subagents in v2.79.0
733
- local deprecated_mcps=(
734
- "auggie-mcp"
735
- "augment-context-engine"
736
- "hetzner-webapp"
737
- "hetzner-brandlight"
738
- "hetzner-marcusquinn"
739
- "hetzner-storagebox"
740
- "ahrefs"
741
- "serper"
742
- "dataforseo"
743
- "hostinger-api"
744
- "shadcn"
745
- "repomix"
746
- )
747
-
748
- # Tool rules to remove (for MCPs that no longer exist)
749
- local auggie_tool="auggie-mcp_*"
750
- local augment_tool="augment-context-engine_*"
751
- local deprecated_tools=(
752
- "$auggie_tool"
753
- "$augment_tool"
754
- "hetzner-*"
755
- "hostinger-api_*"
756
- "ahrefs_*"
757
- "dataforseo_*"
758
- "serper_*"
759
- "shadcn_*"
760
- "repomix_*"
761
- )
762
-
763
- for mcp in "${deprecated_mcps[@]}"; do
764
- if jq -e --arg mcp "$mcp" '.mcp[$mcp]' "$tmp_config" >/dev/null 2>&1; then
765
- jq --arg mcp "$mcp" 'del(.mcp[$mcp])' "$tmp_config" >"${tmp_config}.new" && mv "${tmp_config}.new" "$tmp_config"
766
- ((++_cleanup_count))
767
- fi
768
- done
769
-
770
- for tool in "${deprecated_tools[@]}"; do
771
- if jq -e ".tools[\"$tool\"]" "$tmp_config" >/dev/null 2>&1; then
772
- jq "del(.tools[\"$tool\"])" "$tmp_config" >"${tmp_config}.new" &&
773
- mv "${tmp_config}.new" "$tmp_config" &&
774
- ((++_cleanup_count))
775
- fi
776
- done
777
-
778
- # Also remove deprecated tool refs from agents
779
- local ahrefs_tool="ahrefs_*"
780
- if jq -e --arg ahrefs_tool "$ahrefs_tool" '(.agent.SEO.tools // {}) | keys[]? | select(. == "dataforseo_*" or . == "serper_*" or . == $ahrefs_tool)' \
781
- "$tmp_config" >/dev/null 2>&1; then
782
- jq --arg ahrefs_tool "$ahrefs_tool" 'del(.agent.SEO.tools["dataforseo_*"]) | del(.agent.SEO.tools["serper_*"]) | del(.agent.SEO.tools[$ahrefs_tool])' \
783
- "$tmp_config" >"${tmp_config}.new" &&
784
- mv "${tmp_config}.new" "$tmp_config" &&
785
- ((++_cleanup_count))
786
- fi
787
-
788
- if jq -e --arg auggie_tool "$auggie_tool" --arg augment_tool "$augment_tool" '(.agent // {}) | to_entries[]? | (.value.tools // {}) | keys[]? | select(. == $auggie_tool or . == $augment_tool)' \
789
- "$tmp_config" >/dev/null 2>&1; then
790
- jq --arg auggie_tool "$auggie_tool" --arg augment_tool "$augment_tool" '(.agent // {}) as $agents | reduce ($agents | keys[]) as $name (. ; del(.agent[$name].tools[$auggie_tool]) | del(.agent[$name].tools[$augment_tool]))' \
791
- "$tmp_config" >"${tmp_config}.new" &&
792
- mv "${tmp_config}.new" "$tmp_config" &&
793
- ((++_cleanup_count))
794
- fi
795
-
796
- return 0
797
- }
798
-
799
- # Migrate npx/pipx/bunx MCP commands to full binary paths (faster startup).
800
- # Args: $1 = path to tmp config file to modify in-place
801
- # Sets _cleanup_count to number of entries migrated.
802
- _migrate_mcp_npx_to_binary() {
803
- local tmp_config="$1"
804
- _cleanup_count=0
805
-
806
- # Early return if config has no .mcp key — nothing to migrate (GH#14220)
807
- if ! jq -e '.mcp' "$tmp_config" >/dev/null 2>&1; then
808
- return 0
809
- fi
810
-
811
- # Parallel arrays avoid bash associative array issues with @ in package names
812
- local -a mcp_pkgs=(
813
- "chrome-devtools-mcp"
814
- "mcp-server-gsc"
815
- "playwriter"
816
- "@steipete/macos-automator-mcp"
817
- "@steipete/claude-code-mcp"
818
- "analytics-mcp"
819
- )
820
- local -a mcp_bins=(
821
- "chrome-devtools-mcp"
822
- "mcp-server-gsc"
823
- "playwriter"
824
- "macos-automator-mcp"
825
- "claude-code-mcp"
826
- "analytics-mcp"
827
- )
828
-
829
- local i
830
- for i in "${!mcp_pkgs[@]}"; do
831
- local pkg="${mcp_pkgs[$i]}"
832
- local bin_name="${mcp_bins[$i]}"
833
- # Find MCP key using npx/bunx/pipx for this package (single query)
834
- # Use (.mcp // {}) for null-safety — .mcp may not exist in minimal configs (GH#14220)
835
- local mcp_key
836
- mcp_key=$(jq -r --arg pkg "$pkg" '(.mcp // {}) | to_entries[]? | select(.value.command != null) | select(.value.command | join(" ") | test("npx.*" + $pkg + "|bunx.*" + $pkg + "|pipx.*run.*" + $pkg)) | .key' "$tmp_config" 2>/dev/null | head -1)
837
-
838
- if [[ -n "$mcp_key" ]]; then
839
- # Resolve full path for the binary
840
- local full_path
841
- full_path=$(resolve_mcp_binary_path "$bin_name")
842
- if [[ -n "$full_path" ]]; then
843
- jq --arg k "$mcp_key" --arg p "$full_path" '.mcp[$k].command = [$p]' "$tmp_config" >"${tmp_config}.new" && mv "${tmp_config}.new" "$tmp_config"
844
- ((++_cleanup_count))
845
- fi
846
- fi
847
- done
848
-
849
- # Migrate outscraper from bash -c wrapper to full binary path
850
- if jq -e '.mcp.outscraper.command | join(" ") | test("bash.*outscraper")' "$tmp_config" >/dev/null 2>&1; then
851
- local outscraper_path
852
- outscraper_path=$(resolve_mcp_binary_path "outscraper-mcp-server")
853
- if [[ -n "$outscraper_path" ]]; then
854
- # Source the API key and set it in environment
855
- local outscraper_key=""
856
- if [[ -f "$HOME/.config/aidevops/credentials.sh" ]]; then
857
- # shellcheck source=/dev/null
858
- outscraper_key=$(source "$HOME/.config/aidevops/credentials.sh" && echo "${OUTSCRAPER_API_KEY:-}")
859
- fi
860
- jq --arg p "$outscraper_path" --arg key "$outscraper_key" '.mcp.outscraper.command = [$p] | .mcp.outscraper.environment = {"OUTSCRAPER_API_KEY": $key}' "$tmp_config" >"${tmp_config}.new" && mv "${tmp_config}.new" "$tmp_config"
861
- ((++_cleanup_count))
862
- fi
863
- fi
864
-
865
- return 0
866
- }
867
-
868
- # Remove deprecated MCP entries from opencode.json
869
- # These MCPs have been replaced by curl-based subagents (zero context cost)
870
- #
871
- # The one-time cleanup (remove deprecated entries + migrate npx→binary) is
872
- # guarded by a versioned sentinel (t3221). Bump the sentinel version when new
873
- # deprecated MCPs are added to _remove_deprecated_mcp_entries.
874
- # The recurring update_mcp_paths_in_opencode call is NOT guarded — it resolves
875
- # stale binary paths on every run (paths can change after package upgrades).
876
- cleanup_deprecated_mcps() {
877
- local opencode_config
878
- opencode_config=$(find_opencode_config) || return 0
879
-
880
- if [[ ! -f "$opencode_config" ]]; then
881
- return 0
882
- fi
883
-
884
- if ! command -v jq &>/dev/null; then
885
- return 0
886
- fi
887
-
888
- # One-time cleanup: remove deprecated MCPs and migrate npx→binary paths.
889
- # Sentinel version must be bumped whenever new deprecated MCPs are added.
890
- local _sentinel="${HOME}/.aidevops/.migrations/cleanup-deprecated-mcps-v2"
891
- if [[ ! -f "$_sentinel" ]]; then
892
- local cleaned=0
893
- local tmp_config
894
- tmp_config=$(mktemp)
895
- trap 'rm -f "${tmp_config:-}"' RETURN
896
-
897
- cp "$opencode_config" "$tmp_config"
898
-
899
- # Remove deprecated MCP and tool entries
900
- _remove_deprecated_mcp_entries "$tmp_config"
901
- cleaned=$((cleaned + _cleanup_count))
902
-
903
- # Migrate npx/pipx commands to full binary paths (faster startup, PATH-independent)
904
- _migrate_mcp_npx_to_binary "$tmp_config"
905
- cleaned=$((cleaned + _cleanup_count))
906
-
907
- if [[ $cleaned -gt 0 ]]; then
908
- create_backup_with_rotation "$opencode_config" "opencode"
909
- mv "$tmp_config" "$opencode_config"
910
- print_info "Updated $cleaned MCP entry/entries in opencode.json (using full binary paths)"
911
- else
912
- rm -f "$tmp_config"
913
- fi
914
-
915
- # Write sentinel
916
- mkdir -p "$(dirname "$_sentinel")"
917
- date -u +%Y-%m-%dT%H:%M:%SZ >"$_sentinel"
918
- fi
919
-
920
- # Always resolve bare binary names to full paths (fixes PATH-dependent startup)
921
- update_mcp_paths_in_opencode
922
-
923
- return 0
924
- }
925
-
926
- # Disable MCPs globally that should only be enabled on-demand via subagents
927
- # This reduces session startup context by disabling rarely-used MCPs
928
- # - playwriter: ~3K tokens - enable via @playwriter subagent
929
- # - gh_grep: ~600 tokens - replaced by @github-search subagent (uses rg/bash)
930
- # - google-analytics-mcp: ~800 tokens - enable via @google-analytics subagent
931
- # - context7: ~800 tokens - enable via @context7 subagent (for library docs lookup)
932
- disable_ondemand_mcps() {
933
- local opencode_config
934
- opencode_config=$(find_opencode_config) || return 0
935
-
936
- if [[ ! -f "$opencode_config" ]]; then
937
- return 0
938
- fi
939
-
940
- if ! command -v jq &>/dev/null; then
941
- return 0
942
- fi
943
-
944
- # All MCPs disabled by default — activate on-demand via subagents.
945
- # This reduces idle process/connection overhead to zero.
946
- # Note: use exact MCP key names from opencode.json
947
- local -a ondemand_mcps=(
948
- "cloudflare-api"
949
- "context7"
950
- "gh_grep"
951
- "google-analytics-mcp"
952
- "grep_app"
953
- "playwright"
954
- "playwriter"
955
- "shadcn"
956
- "macos-automator"
957
- "websearch"
958
- )
959
-
960
- local disabled=0
961
- local changed=0
962
- local tmp_config
963
- tmp_config=$(mktemp)
964
- trap 'rm -f "${tmp_config:-}"' RETURN
965
-
966
- cp "$opencode_config" "$tmp_config"
967
-
968
- for mcp in "${ondemand_mcps[@]}"; do
969
- # Only disable MCPs that exist in the config
970
- # Don't add fake entries - they break OpenCode's config validation
971
- if jq -e ".mcp[\"$mcp\"]" "$tmp_config" >/dev/null 2>&1; then
972
- local current_enabled
973
- current_enabled=$(jq -r ".mcp[\"$mcp\"].enabled // \"true\"" "$tmp_config")
974
- if [[ "$current_enabled" != "false" ]]; then
975
- jq ".mcp[\"$mcp\"].enabled = false" "$tmp_config" >"${tmp_config}.new" && mv "${tmp_config}.new" "$tmp_config"
976
- ((++disabled))
977
- fi
978
- fi
979
- done
980
-
981
- # Remove invalid MCP entries added by v2.100.16 bug
982
- # These have type "stdio" (invalid - only "local" or "remote" are valid)
983
- # or command ["echo", "disabled"] which breaks OpenCode
984
- local invalid_mcps=("grep_app" "websearch" "context7")
985
- for mcp in "${invalid_mcps[@]}"; do
986
- # Check for invalid type "stdio" or dummy command
987
- if jq -e ".mcp[\"$mcp\"].type == \"stdio\" or .mcp[\"$mcp\"].command[0] == \"echo\"" "$tmp_config" >/dev/null 2>&1; then
988
- jq "del(.mcp[\"$mcp\"])" "$tmp_config" >"${tmp_config}.new" && mv "${tmp_config}.new" "$tmp_config"
989
- print_info "Removed invalid MCP entry: $mcp"
990
- changed=1
991
- fi
992
- done
993
-
994
- # Note: the v2.100.16-17 context7 re-enable migration was removed in v3.1.312.
995
- # All MCPs are now disabled by default — subagents enable them on-demand.
996
-
997
- if [[ $disabled -gt 0 || $changed -gt 0 ]]; then
998
- create_backup_with_rotation "$opencode_config" "opencode"
999
- mv "$tmp_config" "$opencode_config"
1000
- if [[ $disabled -gt 0 ]]; then
1001
- print_info "Disabled $disabled MCP(s) globally (use subagents to enable on-demand)"
1002
- fi
1003
- else
1004
- rm -f "$tmp_config"
1005
- fi
1006
-
1007
- return 0
1008
- }
1009
-
1010
- # Run bounded opencode --version probe to detect config schema errors (GH#22079).
1011
- # Returns 0 when config looks valid (or opencode is unavailable / probe timed out),
1012
- # 1 when opencode explicitly reports "Configuration is invalid".
1013
- # Uses AIDEVOPS_OPENCODE_VERSION_TIMEOUT (default 5s) so a hung Node.js process
1014
- # cannot stall the non-interactive deploy path indefinitely.
1015
- _validate_opencode_config_schema() {
1016
- command -v opencode &>/dev/null || return 0
1017
- local validation_output
1018
- local _oc_timeout
1019
- local _oc_rc
1020
- _oc_timeout="${AIDEVOPS_OPENCODE_VERSION_TIMEOUT:-5}"
1021
- _oc_rc=0
1022
- # Prefer the shared helper sourced from _services.sh (always available when
1023
- # called from setup.sh); fall back to system timeout; last resort: unbounded.
1024
- if declare -F _setup_opencode_timeout_cmd >/dev/null 2>&1; then
1025
- validation_output=$(_setup_opencode_timeout_cmd "$_oc_timeout" opencode --version 2>&1) || _oc_rc=$?
1026
- elif command -v timeout >/dev/null 2>&1; then
1027
- validation_output=$(timeout "$_oc_timeout" opencode --version 2>&1) || _oc_rc=$?
1028
- else
1029
- validation_output=$(opencode --version 2>&1) || _oc_rc=$?
1030
- fi
1031
- # Exit 124 = timed out — not a config-invalid signal.
1032
- if [[ "$_oc_rc" -ne 0 && "$_oc_rc" -ne 124 ]] && [[ "$validation_output" == *"Configuration is invalid"* ]]; then
1033
- return 1
1034
- fi
1035
- return 0
1036
- }
1037
-
1038
- # Validate and repair OpenCode config schema
1039
- # Fixes common issues from manual editing or AI-generated configs:
1040
- # - MCP entries missing "type": "local" field
1041
- # - tools entries as objects {} instead of booleans
1042
- # If invalid, backs up and regenerates using the generator script
1043
- validate_opencode_config() {
1044
- local opencode_config
1045
- opencode_config=$(find_opencode_config) || return 0
1046
-
1047
- if [[ ! -f "$opencode_config" ]]; then
1048
- return 0
1049
- fi
1050
-
1051
- if ! command -v jq &>/dev/null; then
1052
- return 0
1053
- fi
1054
-
1055
- local needs_repair=false
1056
- local issues=""
1057
-
1058
- # Check 0: Remove deprecated top-level keys that OpenCode no longer recognizes
1059
- # "compaction" was removed in OpenCode v1.1.x - causes "Unrecognized key" error
1060
- local deprecated_keys=("compaction")
1061
- for key in "${deprecated_keys[@]}"; do
1062
- if jq -e ".[\"$key\"]" "$opencode_config" >/dev/null 2>&1; then
1063
- local tmp_fix
1064
- tmp_fix=$(mktemp)
1065
- trap 'rm -f "${tmp_fix:-}"' RETURN
1066
- if jq "del(.[\"$key\"])" "$opencode_config" >"$tmp_fix" 2>/dev/null; then
1067
- create_backup_with_rotation "$opencode_config" "opencode"
1068
- mv "$tmp_fix" "$opencode_config"
1069
- print_info "Removed deprecated '$key' key from OpenCode config"
1070
- else
1071
- rm -f "$tmp_fix"
1072
- fi
1073
- fi
1074
- done
1075
-
1076
- # Check 1: MCP entries must have "type" field (usually "local")
1077
- # Invalid: {"mcp": {"foo": {"command": "..."}}}
1078
- # Valid: {"mcp": {"foo": {"type": "local", "command": "..."}}}
1079
- local mcps_without_type
1080
- mcps_without_type=$(jq -r '.mcp // {} | to_entries[] | select(.value.type == null and .value.command != null) | .key' "$opencode_config" 2>/dev/null | head -5)
1081
- if [[ -n "$mcps_without_type" ]]; then
1082
- needs_repair=true
1083
- issues="${issues}\n - MCP entries missing 'type' field: $(echo "$mcps_without_type" | tr '\n' ', ' | sed 's/,$//')"
1084
- fi
1085
-
1086
- # Check 2: tools entries must be booleans, not objects
1087
- # Invalid: {"tools": {"gh_grep": {}}}
1088
- # Valid: {"tools": {"gh_grep": true}}
1089
- local tools_as_objects
1090
- tools_as_objects=$(jq -r '.tools // {} | to_entries[] | select(.value | type == "object") | .key' "$opencode_config" 2>/dev/null | head -5)
1091
- if [[ -n "$tools_as_objects" ]]; then
1092
- needs_repair=true
1093
- issues="${issues}\n - tools entries as objects instead of booleans: $(echo "$tools_as_objects" | tr '\n' ', ' | sed 's/,$//')"
1094
- fi
1095
-
1096
- # Check 3: bounded opencode --version probe to catch other schema issues (GH#22079).
1097
- if ! _validate_opencode_config_schema; then
1098
- needs_repair=true
1099
- issues="${issues}\n - OpenCode reports invalid configuration"
1100
- fi
1101
-
1102
- if [[ "$needs_repair" == "true" ]]; then
1103
- print_warning "OpenCode config has schema issues:$issues"
1104
-
1105
- # Backup the invalid config
1106
- create_backup_with_rotation "$opencode_config" "opencode"
1107
- print_info "Backed up invalid config"
1108
-
1109
- # Remove the invalid config so generator creates fresh one
1110
- rm -f "$opencode_config"
1111
-
1112
- # Regenerate using the generator script
1113
- local generator_script="$HOME/.aidevops/agents/scripts/generate-opencode-agents.sh"
1114
- if [[ -x "$generator_script" ]]; then
1115
- print_info "Regenerating OpenCode config with correct schema..."
1116
- if "$generator_script" >/dev/null 2>&1; then
1117
- print_success "OpenCode config regenerated successfully"
1118
- else
1119
- print_warning "Config regeneration failed - run manually: $generator_script"
1120
- fi
1121
- else
1122
- print_warning "Generator script not found - run setup.sh again after agents are deployed"
1123
- fi
1124
- fi
1125
-
1126
- return 0
1127
- }
1128
-
1129
- # Migrate mcp-env.sh to credentials.sh (v2.105.0)
1130
- # Renames the credential file and creates backward-compatible symlink
1131
- migrate_mcp_env_to_credentials() {
1132
- local config_dir="$HOME/.config/aidevops"
1133
- local old_file="$config_dir/mcp-env.sh"
1134
- local new_file="$config_dir/credentials.sh"
1135
- local migrated=0
1136
-
1137
- # Migrate root-level mcp-env.sh -> credentials.sh
1138
- if [[ -f "$old_file" && ! -L "$old_file" ]]; then
1139
- if [[ ! -f "$new_file" ]]; then
1140
- mv "$old_file" "$new_file"
1141
- chmod 600 "$new_file"
1142
- ((++migrated))
1143
- print_info "Renamed mcp-env.sh to credentials.sh"
1144
- fi
1145
- # Create backward-compatible symlink
1146
- if [[ ! -L "$old_file" ]]; then
1147
- ln -sf "credentials.sh" "$old_file"
1148
- print_info "Created symlink mcp-env.sh -> credentials.sh"
1149
- fi
1150
- fi
1151
-
1152
- # Migrate tenant-level mcp-env.sh -> credentials.sh
1153
- local tenants_dir="$config_dir/tenants"
1154
- if [[ -d "$tenants_dir" ]]; then
1155
- for tenant_dir in "$tenants_dir"/*/; do
1156
- [[ -d "$tenant_dir" ]] || continue
1157
- local tenant_old="$tenant_dir/mcp-env.sh"
1158
- local tenant_new="$tenant_dir/credentials.sh"
1159
- if [[ -f "$tenant_old" && ! -L "$tenant_old" ]]; then
1160
- if [[ ! -f "$tenant_new" ]]; then
1161
- mv "$tenant_old" "$tenant_new"
1162
- chmod 600 "$tenant_new"
1163
- ((++migrated))
1164
- fi
1165
- if [[ ! -L "$tenant_old" ]]; then
1166
- ln -sf "credentials.sh" "$tenant_old"
1167
- fi
1168
- fi
1169
- done
1170
- fi
1171
-
1172
- # Update shell rc files that source the old path
1173
- for rc_file in "$HOME/.zshrc" "$HOME/.bashrc" "$HOME/.bash_profile"; do
1174
- if [[ -f "$rc_file" ]] && grep -q 'source.*mcp-env\.sh' "$rc_file" 2>/dev/null; then
1175
- # shellcheck disable=SC2016
1176
- sed -i '' 's|source.*\.config/aidevops/mcp-env\.sh|source "$HOME/.config/aidevops/credentials.sh"|g' "$rc_file" 2>/dev/null ||
1177
- sed -i 's|source.*\.config/aidevops/mcp-env\.sh|source "$HOME/.config/aidevops/credentials.sh"|g' "$rc_file" 2>/dev/null || true
1178
- ((++migrated))
1179
- print_info "Updated $rc_file to source credentials.sh"
1180
- fi
1181
- done
1182
-
1183
- if [[ $migrated -gt 0 ]]; then
1184
- print_success "Migrated $migrated mcp-env.sh -> credentials.sh reference(s)"
1185
- fi
1186
-
1187
- return 0
1188
- }
1189
-
1190
- # Migrate old config-backups to new per-type backup structure
1191
- # This runs once to clean up the legacy backup directory
1192
- migrate_old_backups() {
1193
- local old_backup_dir="$HOME/.aidevops/config-backups"
1194
-
1195
- # Skip if old directory doesn't exist
1196
- if [[ ! -d "$old_backup_dir" ]]; then
1197
- return 0
1198
- fi
1199
-
1200
- # Count old backups
1201
- local old_count
1202
- old_count=$(find "$old_backup_dir" -maxdepth 1 -type d -name "20*" 2>/dev/null | wc -l | tr -d ' ')
1203
-
1204
- if [[ $old_count -eq 0 ]]; then
1205
- # Empty directory, just remove it
1206
- rm -rf "$old_backup_dir"
1207
- return 0
1208
- fi
1209
-
1210
- print_info "Migrating $old_count old backups to new structure..."
1211
-
1212
- # Create new backup directories
1213
- mkdir -p "$HOME/.aidevops/agents-backups"
1214
- mkdir -p "$HOME/.aidevops/opencode-backups"
1215
-
1216
- # Move the most recent backups (up to BACKUP_KEEP_COUNT) to new locations
1217
- # Old backups contained mixed content, so we'll just keep the newest ones as agents backups
1218
- local migrated=0
1219
- for backup in $(find "$old_backup_dir" -maxdepth 1 -type d -name "20*" 2>/dev/null | sort -r | head -n "$BACKUP_KEEP_COUNT"); do
1220
- local backup_name
1221
- backup_name=$(basename "$backup")
1222
-
1223
- # Check if it contains agents folder (most common)
1224
- if [[ -d "$backup/agents" ]]; then
1225
- mv "$backup" "$HOME/.aidevops/agents-backups/$backup_name"
1226
- ((++migrated))
1227
- # Check if it contains opencode.json
1228
- elif [[ -f "$backup/opencode.json" ]]; then
1229
- mv "$backup" "$HOME/.aidevops/opencode-backups/$backup_name"
1230
- ((++migrated))
1231
- fi
1232
- done
1233
-
1234
- # Remove remaining old backups and the old directory
1235
- rm -rf "$old_backup_dir"
1236
-
1237
- if [[ $migrated -gt 0 ]]; then
1238
- print_success "Migrated $migrated recent backups, removed $((old_count - migrated)) old backups"
1239
- else
1240
- print_info "Cleaned up $old_count old backups"
1241
- fi
1242
-
1243
- return 0
1244
- }
1245
-
1246
- # Migrate loop state from .claude/ to .agents/loop-state/ in user projects
1247
- # Also migrates from legacy .agents/loop-state/ to .agents/loop-state/
1248
- # The migration is non-destructive: moves files, doesn't delete originals until confirmed
1249
- #
1250
- # Guarded by a sentinel file: on a converged system the function does a
1251
- # find(1) scan of ~/Git which costs several seconds per run (t3221).
1252
- migrate_loop_state_directories() {
1253
- local _sentinel="${HOME}/.aidevops/.migrations/loop-state-dirs-migrated"
1254
- if [[ -f "$_sentinel" ]]; then
1255
- return 0
1256
- fi
1257
-
1258
- print_info "Checking for legacy loop state directories..."
1259
-
1260
- local migrated=0
1261
- local git_dirs=()
1262
-
1263
- # Find Git repositories in common locations
1264
- # Check ~/Git/ and current directory's parent
1265
- for search_dir in "$HOME/Git" "$(dirname "$(pwd)")"; do
1266
- if [[ -d "$search_dir" ]]; then
1267
- while IFS= read -r -d '' git_dir; do
1268
- git_dirs+=("$(dirname "$git_dir")")
1269
- done < <(find "$search_dir" -maxdepth 3 -type d -name ".git" -print0 2>/dev/null)
1270
- fi
1271
- done
1272
-
1273
- for repo_dir in "${git_dirs[@]}"; do
1274
- local old_state_dir="$repo_dir/.claude"
1275
- local legacy_state_dir="$repo_dir/.agent/loop-state"
1276
- local new_state_dir="$repo_dir/.agents/loop-state"
1277
-
1278
- # Migrate from .claude/ (oldest legacy path)
1279
- if [[ -d "$old_state_dir" ]]; then
1280
- local has_loop_state=false
1281
- if [[ -f "$old_state_dir/ralph-loop.local.state" ]] ||
1282
- [[ -f "$old_state_dir/loop-state.json" ]] ||
1283
- [[ -d "$old_state_dir/receipts" ]]; then
1284
- has_loop_state=true
1285
- fi
1286
-
1287
- if [[ "$has_loop_state" == "true" ]]; then
1288
- print_info "Found legacy loop state in: $repo_dir/.claude/"
1289
- mkdir -p "$new_state_dir"
1290
-
1291
- for file in ralph-loop.local.state loop-state.json re-anchor.md guardrails.md; do
1292
- if [[ -f "$old_state_dir/$file" ]]; then
1293
- mv "$old_state_dir/$file" "$new_state_dir/"
1294
- print_info " Moved $file"
1295
- fi
1296
- done
1297
-
1298
- if [[ -d "$old_state_dir/receipts" ]]; then
1299
- mv "$old_state_dir/receipts" "$new_state_dir/"
1300
- print_info " Moved receipts/"
1301
- fi
1302
-
1303
- local remaining
1304
- remaining=$(find "$old_state_dir" -mindepth 1 -maxdepth 1 2>/dev/null | wc -l | tr -d ' ')
1305
-
1306
- if [[ "$remaining" -eq 0 ]]; then
1307
- rmdir "$old_state_dir" 2>/dev/null && print_info " Removed empty .claude/"
1308
- else
1309
- print_warning " .claude/ has other files, not removing"
1310
- fi
1311
-
1312
- ((++migrated))
1313
- fi
1314
- fi
1315
-
1316
- # Migrate from .agents/loop-state/ (v2.51.0-v2.103.0 path) to .agents/loop-state/
1317
- if [[ -d "$legacy_state_dir" ]] && [[ "$legacy_state_dir" != "$new_state_dir" ]]; then
1318
- print_info "Found legacy loop state in: $repo_dir/.agent/loop-state/"
1319
- mkdir -p "$new_state_dir"
1320
-
1321
- # Move all files from old to new
1322
- if [[ -n "$(ls -A "$legacy_state_dir" 2>/dev/null)" ]]; then
1323
- cp -R "$legacy_state_dir"/* "$new_state_dir/" 2>/dev/null || true
1324
- rm -rf "$legacy_state_dir"
1325
- print_info " Migrated .agents/loop-state/ -> .agents/loop-state/"
1326
- ((++migrated))
1327
- fi
1328
- fi
1329
-
1330
- # Update .gitignore if needed
1331
- local gitignore="$repo_dir/.gitignore"
1332
- if [[ -f "$gitignore" ]]; then
1333
- if ! grep -q "^\.agents/loop-state/" "$gitignore" 2>/dev/null; then
1334
- # Ensure trailing newline before appending (prevents malformed entries)
1335
- [[ -s "$gitignore" && $(tail -c1 "$gitignore" | wc -l) -eq 0 ]] && printf '\n' >>"$gitignore"
1336
- echo ".agents/loop-state/" >>"$gitignore"
1337
- print_info " Added .agents/loop-state/ to .gitignore"
1338
- fi
1339
- fi
1340
- done
1341
-
1342
- if [[ $migrated -gt 0 ]]; then
1343
- print_success "Migrated loop state in $migrated repositories"
1344
- else
1345
- print_info "No legacy loop state directories found"
1346
- fi
1347
-
1348
- # Write sentinel so subsequent setup runs skip the find scan (t3221)
1349
- mkdir -p "$(dirname "$_sentinel")"
1350
- date -u +%Y-%m-%dT%H:%M:%SZ >"$_sentinel"
1351
- return 0
1352
- }
1353
-
1354
- # Migrate pulse-repos.json into repos.json
1355
- # pulse-repos.json had slug/path/priority for supervisor-managed repos.
1356
- # Now repos.json is the single source of truth with slug, pulse, and priority fields.
1357
- migrate_pulse_repos_to_repos_json() {
1358
- local pulse_file="$HOME/.config/aidevops/pulse-repos.json"
1359
- local repos_file="$HOME/.config/aidevops/repos.json"
1360
-
1361
- if [[ ! -f "$pulse_file" ]]; then
1362
- return 0
1363
- fi
1364
-
1365
- if ! command -v jq &>/dev/null; then
1366
- print_warning "jq not installed — skipping pulse-repos.json migration"
1367
- return 0
1368
- fi
1369
-
1370
- if [[ ! -f "$repos_file" ]]; then
1371
- print_warning "repos.json not found — skipping pulse-repos.json migration"
1372
- return 0
1373
- fi
1374
-
1375
- local migrated=0
1376
- local slug repo_path priority
1377
-
1378
- # Read each entry from pulse-repos.json and merge into repos.json
1379
- # Note: avoid 'path' as variable name — in zsh, lowercase 'path' is tied to PATH array
1380
- while IFS=$'\t' read -r slug repo_path priority; do
1381
- [[ -z "$slug" ]] && continue
1382
- # Expand ~ in path
1383
- local expanded_path="${repo_path/#\~/$HOME}"
1384
-
1385
- # Check if this repo exists in repos.json by path
1386
- if jq -e --arg path "$expanded_path" '.initialized_repos[] | select(.path == $path)' "$repos_file" &>/dev/null; then
1387
- # Update existing entry: add slug, pulse, priority
1388
- local temp_file="${repos_file}.tmp"
1389
- jq --arg path "$expanded_path" --arg slug "$slug" --arg priority "$priority" \
1390
- '(.initialized_repos[] | select(.path == $path)) |= . + {slug: $slug, pulse: true, priority: $priority}' \
1391
- "$repos_file" >"$temp_file" && mv "$temp_file" "$repos_file"
1392
- ((++migrated))
1393
- else
1394
- # Add new entry from pulse-repos.json
1395
- local temp_file="${repos_file}.tmp"
1396
- jq --arg path "$expanded_path" --arg slug "$slug" --arg priority "$priority" \
1397
- '.initialized_repos += [{path: $path, slug: $slug, pulse: true, priority: $priority}]' \
1398
- "$repos_file" >"$temp_file" && mv "$temp_file" "$repos_file"
1399
- ((++migrated))
1400
- fi
1401
- done < <(jq -r '(.repos? // .)[] | [.slug, .path, .priority] | @tsv' "$pulse_file" 2>/dev/null)
1402
-
1403
- if [[ $migrated -gt 0 ]]; then
1404
- print_success "Migrated $migrated repo(s) from pulse-repos.json into repos.json"
1405
- # Rename old file so it's not read again, but keep as backup
1406
- mv "$pulse_file" "${pulse_file}.migrated"
1407
- print_info "Renamed pulse-repos.json to pulse-repos.json.migrated"
1408
- fi
1409
-
1410
- return 0
1411
- }
1412
-
1413
- # Migrate orphaned supervisor files from deployed installs (GH#5147)
1414
- # After the supervisor-to-pulse-wrapper migration (PR #2291, PR #2475), and
1415
- # subsequent removal of archived dirs from the repo, deployed installs may
1416
- # retain orphaned files that rsync doesn't clean up:
1417
- # - ~/.aidevops/agents/scripts/supervisor-helper.sh (old entry point)
1418
- # - ~/.aidevops/agents/scripts/supervisor/ (old module directory)
1419
- # - ~/.aidevops/agents/scripts/archived/ (removed from repo)
1420
- # - ~/.aidevops/agents/scripts/supervisor-archived/ (removed from repo)
1421
- # - cron/launchd entries invoking supervisor-helper.sh pulse
1422
- # This migration removes all orphaned files and rewrites scheduler entries.
1423
- migrate_orphaned_supervisor() {
1424
- local agents_dir="$HOME/.aidevops/agents"
1425
- local scripts_dir="$agents_dir/scripts"
1426
- local cleaned=0
1427
-
1428
- # 1. Remove orphaned supervisor-helper.sh from deployed scripts
1429
- if [[ -f "$scripts_dir/supervisor-helper.sh" ]]; then
1430
- rm -f "$scripts_dir/supervisor-helper.sh"
1431
- print_info "Removed orphaned supervisor-helper.sh from deployed scripts"
1432
- ((++cleaned))
1433
- fi
1434
-
1435
- # 2. Remove orphaned supervisor/ module directory
1436
- if [[ -d "$scripts_dir/supervisor" && ! -L "$scripts_dir/supervisor" ]]; then
1437
- if [[ -f "$scripts_dir/supervisor/pulse.sh" ]] ||
1438
- [[ -f "$scripts_dir/supervisor/dispatch.sh" ]] ||
1439
- [[ -f "$scripts_dir/supervisor/_common.sh" ]]; then
1440
- rm -rf "$scripts_dir/supervisor"
1441
- print_info "Removed orphaned supervisor/ module directory from deployed scripts"
1442
- ((++cleaned))
1443
- fi
1444
- fi
1445
-
1446
- # 3. Remove archived dirs no longer shipped in repo
1447
- if [[ -d "$scripts_dir/archived" ]]; then
1448
- rm -rf "$scripts_dir/archived"
1449
- print_info "Removed orphaned archived/ directory from deployed scripts"
1450
- ((++cleaned))
1451
- fi
1452
- if [[ -d "$scripts_dir/supervisor-archived" ]]; then
1453
- rm -rf "$scripts_dir/supervisor-archived"
1454
- print_info "Removed orphaned supervisor-archived/ directory from deployed scripts"
1455
- ((++cleaned))
1456
- fi
1457
-
1458
- # 3. Migrate cron entries from supervisor-helper.sh to pulse-wrapper.sh
1459
- # Old pattern: */2 * * * * ... supervisor-helper.sh pulse ...
1460
- # New pattern: already installed by setup.sh's pulse section
1461
- # Strategy: remove old entries; setup.sh will install the new one if pulse is enabled
1462
- local current_crontab
1463
- current_crontab=$(crontab -l 2>/dev/null) || current_crontab=""
1464
- if echo "$current_crontab" | grep -qF "supervisor-helper.sh"; then
1465
- # Remove all cron lines referencing supervisor-helper.sh
1466
- local new_crontab
1467
- new_crontab=$(echo "$current_crontab" | grep -v "supervisor-helper.sh")
1468
- if [[ -n "$new_crontab" ]]; then
1469
- printf '%s\n' "$new_crontab" | crontab - || true
1470
- else
1471
- # All entries were supervisor-helper.sh — remove crontab entirely
1472
- crontab -r || true
1473
- fi
1474
- print_info "Removed orphaned supervisor-helper.sh cron entries"
1475
- print_info " pulse-wrapper.sh will be installed by setup.sh if supervisor pulse is enabled"
1476
- ((++cleaned))
1477
- fi
1478
-
1479
- # 4. Migrate launchd entries from old supervisor label (macOS only)
1480
- # Old label: com.aidevops.supervisor-pulse (from cron.sh/launchd.sh)
1481
- # New label: com.aidevops.aidevops-supervisor-pulse (from setup.sh)
1482
- # setup.sh already handles the new label cleanup at line ~1000, but
1483
- # the old label from cron.sh may also be present
1484
- if [[ "$(uname -s)" == "Darwin" ]]; then
1485
- local old_label="com.aidevops.supervisor-pulse"
1486
- local old_plist="$HOME/Library/LaunchAgents/${old_label}.plist"
1487
- if _launchd_has_agent "$old_label" || [[ -f "$old_plist" ]]; then
1488
- # Use launchctl remove by label — works even when the plist file is
1489
- # missing (orphaned agent loaded without a backing file on disk)
1490
- launchctl remove "$old_label" || true
1491
- rm -f "$old_plist"
1492
- print_info "Removed orphaned supervisor-pulse LaunchAgent ($old_label)"
1493
- ((++cleaned))
1494
- fi
1495
- fi
1496
-
1497
- if [[ $cleaned -gt 0 ]]; then
1498
- print_success "Cleaned up $cleaned orphaned supervisor artifact(s) — pulse-wrapper.sh is the active system"
1499
- fi
1500
-
1501
- return 0
1502
- }
1503
-
1504
- # Backfill GitHub issue relationships from TODO.md metadata (t1889)
1505
- # One-time migration: reads blocked-by:/blocks: and subtask hierarchy from
1506
- # TODO.md in each pulse-enabled repo, and sets the corresponding GitHub
1507
- # issue relationships (blocked-by, sub-issues) via the GraphQL API.
1508
- #
1509
- # Uses marker file to ensure it runs only once per install.
1510
- # Safe to re-run — the GraphQL mutations are idempotent (duplicates are skipped).
1511
- backfill_issue_relationships() {
1512
- local marker_file="$HOME/.aidevops/.migrations/t1889-relationships-backfill"
1513
- local marker_dir
1514
- marker_dir=$(dirname "$marker_file")
1515
-
1516
- # Skip if already done
1517
- if [[ -f "$marker_file" ]]; then
1518
- return 0
1519
- fi
1520
-
1521
- # Require gh CLI and authentication
1522
- if ! command -v gh &>/dev/null; then
1523
- print_warning "gh CLI not installed — skipping issue relationships backfill"
1524
- return 0
1525
- fi
1526
- if ! gh auth status &>/dev/null 2>&1; then
1527
- print_warning "gh CLI not authenticated — skipping issue relationships backfill"
1528
- return 0
1529
- fi
1530
-
1531
- # Require jq for repos.json parsing
1532
- if ! command -v jq &>/dev/null; then
1533
- print_warning "jq not installed — skipping issue relationships backfill"
1534
- return 0
1535
- fi
1536
-
1537
- local repos_file="$HOME/.config/aidevops/repos.json"
1538
- if [[ ! -f "$repos_file" ]]; then
1539
- print_info "No repos.json — skipping issue relationships backfill"
1540
- mkdir -p "$marker_dir"
1541
- touch "$marker_file"
1542
- return 0
1543
- fi
1544
-
1545
- local sync_script="$HOME/.aidevops/agents/scripts/issue-sync-helper.sh"
1546
- if [[ ! -x "$sync_script" ]]; then
1547
- print_warning "issue-sync-helper.sh not found — skipping relationships backfill"
1548
- return 0
1549
- fi
1550
-
1551
- print_info "Backfilling GitHub issue relationships (blocked-by, sub-issues) from TODO.md..."
1552
-
1553
- local total_repos=0 total_rels=0 failed_repos=0
1554
- local repo_path repo_slug local_only
1555
-
1556
- while IFS=$'\t' read -r repo_path repo_slug local_only; do
1557
- [[ -z "$repo_path" ]] && continue
1558
- local expanded_path="${repo_path/#\~/$HOME}"
1559
-
1560
- # Skip local-only repos (no GitHub remote)
1561
- [[ "$local_only" == "true" ]] && continue
1562
-
1563
- # Skip repos without TODO.md
1564
- [[ ! -f "$expanded_path/TODO.md" ]] && continue
1565
-
1566
- # Skip repos with no ref:GH# entries
1567
- if ! grep -qE 'ref:GH#[0-9]+' "$expanded_path/TODO.md" 2>/dev/null; then
1568
- continue
1569
- fi
1570
-
1571
- # Skip repos with no blocked-by:/blocks: or subtask entries
1572
- local has_deps=false
1573
- grep -qE 'blocked-by:|blocks:' "$expanded_path/TODO.md" 2>/dev/null && has_deps=true
1574
- grep -qE '^\s+- \[.\] t[0-9]+\.[0-9]+.*ref:GH#' "$expanded_path/TODO.md" 2>/dev/null && has_deps=true
1575
- [[ "$has_deps" == "false" ]] && continue
1576
-
1577
- total_repos=$((total_repos + 1))
1578
- local repo_arg=""
1579
- [[ -n "$repo_slug" ]] && repo_arg="--repo $repo_slug"
1580
-
1581
- print_info " $(basename "$expanded_path"): syncing relationships..."
1582
- # shellcheck disable=SC2086
1583
- if (cd "$expanded_path" && bash "$sync_script" relationships $repo_arg --verbose 2>&1 | tail -3); then
1584
- true
1585
- else
1586
- print_warning " $(basename "$expanded_path"): relationships sync had errors"
1587
- failed_repos=$((failed_repos + 1))
1588
- fi
1589
- done < <(jq -r '.initialized_repos[] | select(.pulse == true) | [.path, .slug, (.local_only // false | tostring)] | @tsv' "$repos_file" 2>/dev/null)
1590
-
1591
- # Create marker directory and file
1592
- mkdir -p "$marker_dir"
1593
- date -u +%Y-%m-%dT%H:%M:%SZ >"$marker_file"
1594
-
1595
- if [[ $total_repos -eq 0 ]]; then
1596
- print_info "No repos with relationship data to backfill"
1597
- elif [[ $failed_repos -eq 0 ]]; then
1598
- print_success "Issue relationships backfilled for $total_repos repo(s)"
1599
- else
1600
- print_warning "Backfilled $total_repos repo(s), $failed_repos had errors"
1601
- fi
1602
-
1603
- return 0
1604
- }
1605
-
1606
- # Migrate aidevops cron entries to systemd user timers (GH#17695 Finding D).
1607
- # On Linux systems with systemd, scans cron for aidevops markers and removes
1608
- # entries that have a corresponding systemd timer already installed. This
1609
- # prevents dual-execution for existing installations that were set up before
1610
- # the systemd preference was added.
1611
- # Safe to run on macOS (no-op) and on Linux without systemd (no-op).
1612
- # Idempotent: uses a marker file to run only once.
1613
- migrate_cron_to_systemd() {
1614
- # Only run on Linux with systemd available
1615
- if [[ "$(uname -s)" == "Darwin" ]]; then
1616
- return 0
1617
- fi
1618
- if ! command -v systemctl >/dev/null 2>&1 || ! systemctl --user status >/dev/null 2>&1; then
1619
- return 0
1620
- fi
1621
-
1622
- # Versioned migration marker — bump version when new entries are added so
1623
- # existing systems re-run the migration (GH#17861: added auto-update + repo-sync).
1624
- local marker_dir="$HOME/.aidevops/cache/migrations"
1625
- local marker_file="$marker_dir/cron-to-systemd-v2-done"
1626
- if [[ -f "$marker_file" ]]; then
1627
- return 0
1628
- fi
1629
-
1630
- # Parallel arrays: cron markers and their corresponding systemd timer names.
1631
- # Bash 3.2 compatible (no associative arrays).
1632
- local cron_markers="aidevops: stats-wrapper
1633
- aidevops: gh-failure-miner
1634
- aidevops: process-guard
1635
- aidevops: memory-pressure-monitor
1636
- aidevops: screen-time-snapshot
1637
- aidevops: contribution-watch
1638
- aidevops: profile-readme-update
1639
- aidevops: token-refresh
1640
- aidevops-auto-update
1641
- aidevops-repo-sync"
1642
-
1643
- local systemd_timers="aidevops-stats-wrapper
1644
- aidevops-gh-failure-miner
1645
- aidevops-process-guard
1646
- aidevops-memory-pressure-monitor
1647
- aidevops-screen-time-snapshot
1648
- aidevops-contribution-watch
1649
- aidevops-profile-readme-update
1650
- aidevops-token-refresh
1651
- aidevops-auto-update
1652
- aidevops-repo-sync"
1653
-
1654
- local current_cron
1655
- current_cron=$(crontab -l 2>/dev/null) || current_cron=""
1656
- if [[ -z "$current_cron" ]]; then
1657
- mkdir -p "$marker_dir"
1658
- date -u +%Y-%m-%dT%H:%M:%SZ >"$marker_file"
1659
- return 0
1660
- fi
1661
-
1662
- local migrated=0
1663
- local new_cron="$current_cron"
1664
- local i=0
1665
-
1666
- while IFS= read -r marker; do
1667
- local timer_name
1668
- timer_name=$(echo "$systemd_timers" | sed -n "$((i + 1))p")
1669
- i=$((i + 1))
1670
- # Only remove cron entry if the corresponding systemd timer is active
1671
- if echo "$new_cron" | grep -qF "$marker" &&
1672
- systemctl --user is-enabled "${timer_name}.timer" >/dev/null 2>&1; then
1673
- new_cron=$(echo "$new_cron" | grep -vF "$marker")
1674
- migrated=$((migrated + 1))
1675
- print_info "Migrated $marker from cron to systemd (${timer_name}.timer)"
1676
- fi
1677
- done <<<"$cron_markers"
1678
-
1679
- if [[ $migrated -gt 0 ]]; then
1680
- echo "$new_cron" | crontab -
1681
- print_success "Cron-to-systemd migration: $migrated scheduler(s) migrated"
1682
- fi
1683
-
1684
- # Write versioned marker regardless of whether anything was migrated
1685
- mkdir -p "$marker_dir"
1686
- date -u +%Y-%m-%dT%H:%M:%SZ >"$marker_file"
1687
- return 0
1688
- }