@adaptic/maestro 1.8.3 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/.claude/settings.json +11 -0
  2. package/agents/engineering-oversight/agent.md +44 -0
  3. package/agents/github-operator/agent.md +38 -0
  4. package/agents/inbox-processor/agent.md +39 -0
  5. package/bin/maestro.mjs +302 -4
  6. package/framework-features.json +107 -0
  7. package/lib/feature-init.mjs +297 -0
  8. package/package.json +5 -2
  9. package/scaffold/config/known-agents.json +57 -8
  10. package/scripts/cost/track-claude-usage.mjs +154 -0
  11. package/scripts/daemon/cadence-consumer.mjs +73 -2
  12. package/scripts/decisions/capture-decision.mjs +116 -0
  13. package/scripts/emergency-stop.sh +56 -19
  14. package/scripts/hooks/session-start-banner.sh +79 -0
  15. package/scripts/maintenance/backup-to-cloud.sh +124 -0
  16. package/scripts/rag/ingest.mjs +111 -0
  17. package/scripts/rag/search.mjs +119 -0
  18. package/scripts/resume-operations.sh +50 -13
  19. package/scripts/setup/init-backup.mjs +54 -0
  20. package/scripts/setup/init-cadence-bus.mjs +60 -0
  21. package/scripts/setup/init-cost-tracking.mjs +45 -0
  22. package/scripts/setup/init-decision-capture.mjs +66 -0
  23. package/scripts/setup/init-known-agents.mjs +57 -0
  24. package/scripts/setup/init-memory-executive.mjs +45 -0
  25. package/scripts/setup/init-rag.mjs +103 -0
  26. package/scripts/setup/init-session-router.mjs +38 -0
  27. package/workflows/continuous/backlog-executor.yaml +1 -1
  28. package/workflows/continuous/inbound-monitor.yaml +10 -10
  29. package/workflows/daily/applicant-triage.yaml +1 -1
  30. package/workflows/daily/comms-triage.yaml +2 -2
  31. package/workflows/daily/evening-wrap.yaml +1 -1
  32. package/workflows/daily/morning-brief.yaml +1 -1
  33. package/workflows/daily/slack-followup-sweep.yaml +2 -2
  34. package/workflows/event-driven/README.md +5 -5
  35. package/workflows/event-driven/agent-failure-investigation.yaml +1 -1
  36. package/workflows/event-driven/pr-review.yaml +6 -3
  37. package/workflows/monthly/board-readiness.yaml +1 -1
  38. package/workflows/quarterly/strategic-scenario-analysis.yaml +1 -1
  39. package/workflows/session-protocol.md +7 -7
  40. package/workflows/weekly/engineering-health.yaml +1 -1
  41. package/workflows/weekly/hiring-review.yaml +1 -1
  42. package/workflows/weekly/rollup-pipeline-review.yaml +1 -1
  43. package/workflows/weekly/strategic-memo.yaml +1 -1
@@ -0,0 +1,116 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * scripts/decisions/capture-decision.mjs — Record a decision into the
4
+ * agent's decision log.
5
+ *
6
+ * Decisions land in knowledge/decisions/DEC-YYYY-MM-DD-NNN.yaml and the
7
+ * index in knowledge/decisions/index.yaml is updated. Idempotent on the
8
+ * same decision-id (overwrites the file, leaves the index intact).
9
+ *
10
+ * Usage:
11
+ * node scripts/decisions/capture-decision.mjs \
12
+ * --title "Adopt cadence bus" \
13
+ * --domain "infrastructure" \
14
+ * --decision "Use a local file-backed event bus consumed by a single persistent main session." \
15
+ * --rationale "Reduces Claude Code spawn cost; enables centralised throttling." \
16
+ * --context "Inbox/cadence ticks were spawning fresh sessions per launchd interval." \
17
+ * [--alternatives "Per-tick spawn (status quo); CronCreate-based scheduler"]
18
+ * [--stakeholders "ravi, mehran"]
19
+ * [--status active]
20
+ * [--decision-maker "Ravi Patel"]
21
+ *
22
+ * Reads agent.json for the default decision-maker.
23
+ */
24
+
25
+ import { existsSync, mkdirSync, readFileSync, readdirSync, writeFileSync } from "node:fs";
26
+ import { join, resolve, dirname } from "node:path";
27
+ import { fileURLToPath } from "node:url";
28
+
29
+ const __dirname = dirname(fileURLToPath(import.meta.url));
30
+ const AGENT_DIR = process.env.AGENT_ROOT || process.env.AGENT_DIR || resolve(__dirname, "..", "..");
31
+
32
+ function fail(msg) { process.stderr.write(`[capture-decision] ${msg}\n`); process.exit(1); }
33
+
34
+ const args = process.argv.slice(2);
35
+ const flags = {};
36
+ for (let i = 0; i < args.length; i++) {
37
+ const a = args[i];
38
+ if (!a.startsWith("--")) continue;
39
+ const key = a.slice(2);
40
+ const val = args[i + 1] && !args[i + 1].startsWith("--") ? args[++i] : "true";
41
+ flags[key] = val;
42
+ }
43
+
44
+ if (!flags.title || !flags.decision) fail("--title and --decision are required (see --help in source).");
45
+
46
+ const decisionMaker = flags["decision-maker"] || (() => {
47
+ try {
48
+ const a = JSON.parse(readFileSync(join(AGENT_DIR, "config/agent.json"), "utf-8"));
49
+ return a.fullName || a.firstName || "the agent";
50
+ } catch { return "the agent"; }
51
+ })();
52
+
53
+ const date = new Date().toISOString().slice(0, 10);
54
+ const decDir = join(AGENT_DIR, "knowledge/decisions");
55
+ mkdirSync(decDir, { recursive: true });
56
+
57
+ // Find next NNN for today.
58
+ const todayPrefix = `DEC-${date}-`;
59
+ let n = 1;
60
+ for (const name of readdirSync(decDir)) {
61
+ if (name.startsWith(todayPrefix)) {
62
+ const tail = name.replace(/^DEC-\d{4}-\d{2}-\d{2}-/, "").replace(/\.ya?ml$/, "");
63
+ const m = parseInt(tail, 10);
64
+ if (!Number.isNaN(m) && m >= n) n = m + 1;
65
+ }
66
+ }
67
+ const id = `${todayPrefix}${String(n).padStart(3, "0")}`;
68
+ const filename = `${id}.yaml`;
69
+
70
+ const yamlEsc = (s) => String(s ?? "").replace(/"/g, '\\"');
71
+ const lines = [
72
+ `id: ${id}`,
73
+ `date: ${date}`,
74
+ `title: "${yamlEsc(flags.title)}"`,
75
+ `domain: "${yamlEsc(flags.domain || "operational")}"`,
76
+ `decision_maker: "${yamlEsc(decisionMaker)}"`,
77
+ `decision_text: "${yamlEsc(flags.decision)}"`,
78
+ `context: "${yamlEsc(flags.context || "")}"`,
79
+ `rationale: "${yamlEsc(flags.rationale || "")}"`,
80
+ `status: ${yamlEsc(flags.status || "active")}`,
81
+ ];
82
+ if (flags.alternatives) {
83
+ lines.push("alternatives:");
84
+ for (const alt of String(flags.alternatives).split(";").map((s) => s.trim()).filter(Boolean)) {
85
+ lines.push(` - "${yamlEsc(alt)}"`);
86
+ }
87
+ }
88
+ if (flags.stakeholders) {
89
+ lines.push("stakeholders:");
90
+ for (const s of String(flags.stakeholders).split(",").map((s) => s.trim()).filter(Boolean)) {
91
+ lines.push(` - ${s}`);
92
+ }
93
+ }
94
+ if (flags["expires-at"]) {
95
+ lines.push(`expires_at: ${flags["expires-at"]}`);
96
+ }
97
+ lines.push("");
98
+
99
+ writeFileSync(join(decDir, filename), lines.join("\n"));
100
+
101
+ // Update index — naive append; reader can re-sort.
102
+ const indexPath = join(decDir, "index.yaml");
103
+ let indexBody = existsSync(indexPath) ? readFileSync(indexPath, "utf-8") : "# Decision Index — auto-maintained\ndecisions: []\n";
104
+ if (!indexBody.includes(id)) {
105
+ // Append a list entry before EOF.
106
+ if (/decisions:\s*\[\s*\]\s*$/.test(indexBody.trim())) {
107
+ indexBody = indexBody.replace(/decisions:\s*\[\s*\]/, `decisions:\n - id: ${id}\n date: ${date}\n title: "${yamlEsc(flags.title)}"\n status: ${yamlEsc(flags.status || "active")}`);
108
+ } else if (/decisions:\s*$/m.test(indexBody)) {
109
+ indexBody = indexBody.replace(/decisions:\s*$/m, `decisions:\n - id: ${id}\n date: ${date}\n title: "${yamlEsc(flags.title)}"\n status: ${yamlEsc(flags.status || "active")}`);
110
+ } else {
111
+ indexBody = indexBody.trimEnd() + `\n - id: ${id}\n date: ${date}\n title: "${yamlEsc(flags.title)}"\n status: ${yamlEsc(flags.status || "active")}\n`;
112
+ }
113
+ writeFileSync(indexPath, indexBody);
114
+ }
115
+
116
+ process.stdout.write(JSON.stringify({ ok: true, id, file: join(decDir, filename) }, null, 2) + "\n");
@@ -1,41 +1,78 @@
1
1
  #!/bin/bash
2
- # Emergency Stop — Immediately halts all Maestro agent operations
2
+ # Emergency Stop — Immediately halts all Maestro agent operations.
3
3
  # Usage: ./scripts/emergency-stop.sh
4
- # This script is the kill switch for all autonomous operations.
4
+ #
5
+ # This script is the kill switch for all autonomous operations:
6
+ # 1. Drops .emergency-stop flag (every workflow / cadence consumer / enqueue
7
+ # script honours this on the next tick).
8
+ # 2. Unloads every installed `ai.adaptic.<agent>-*` launchd job.
9
+ # 3. Kills running Claude Code subagent processes.
10
+ #
11
+ # Plist resolution: the agent's first-name slug is read from config/agent.json
12
+ # (SOT) so unload targets the correct labels; falls back to the directory
13
+ # basename if agent.json isn't readable.
5
14
 
6
15
  set -e
7
16
 
8
17
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
9
- SOPHIE_AI_DIR="$(dirname "$SCRIPT_DIR")"
10
- LOG_FILE="$SOPHIE_AI_DIR/logs/emergency-stop.log"
18
+ AGENT_DIR="$(dirname "$SCRIPT_DIR")"
19
+ LOG_FILE="$AGENT_DIR/logs/emergency-stop.log"
11
20
  TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
21
+ mkdir -p "$(dirname "$LOG_FILE")" 2>/dev/null || true
12
22
 
13
- echo "[$TIMESTAMP] EMERGENCY STOP INITIATED" | tee -a "$LOG_FILE"
23
+ # Resolve agent first-name slug from SOT (config/agent.json) so the unload
24
+ # loop targets the right launchd labels. Falls back to the basename of the
25
+ # agent directory (stripping -ai suffix).
26
+ AGENT_FIRST=""
27
+ if [ -f "$AGENT_DIR/config/agent.json" ]; then
28
+ if command -v jq >/dev/null 2>&1; then
29
+ AGENT_FIRST=$(jq -r '.firstName // empty' "$AGENT_DIR/config/agent.json" | tr '[:upper:]' '[:lower:]')
30
+ else
31
+ AGENT_FIRST=$(awk -F'"' '/"firstName"[[:space:]]*:/ { print tolower($4); exit }' "$AGENT_DIR/config/agent.json")
32
+ fi
33
+ fi
34
+ [ -z "$AGENT_FIRST" ] && AGENT_FIRST=$(basename "$AGENT_DIR" | sed 's/-ai$//')
35
+
36
+ LAUNCH_AGENTS_DIR="$HOME/Library/LaunchAgents"
37
+ PLIST_GLOB="$LAUNCH_AGENTS_DIR/ai.adaptic.${AGENT_FIRST}-*.plist"
14
38
 
15
- # 1. Kill all Claude Code subagent processes
16
- echo "Stopping all Claude Code agent processes..."
17
- pkill -f "claude" 2>/dev/null || true
18
- echo "[$TIMESTAMP] Claude processes terminated" >> "$LOG_FILE"
39
+ echo "[$TIMESTAMP] EMERGENCY STOP INITIATED (agent=$AGENT_FIRST)" | tee -a "$LOG_FILE"
19
40
 
20
- # 2. Unload all launchd scheduled jobs
21
- echo "Unloading scheduled workflows..."
22
- for plist in "$SOPHIE_AI_DIR"/schedules/*.plist; do
41
+ # 1. Drop the stop flag FIRST so any in-flight work sees it on next tick.
42
+ echo "$TIMESTAMP" > "$AGENT_DIR/.emergency-stop"
43
+ echo "[$TIMESTAMP] Stop flag created" >> "$LOG_FILE"
44
+
45
+ # 2. Unload installed launchd jobs.
46
+ echo "Unloading installed launchd jobs ($PLIST_GLOB)..."
47
+ unloaded=0
48
+ for plist in $PLIST_GLOB; do
23
49
  if [ -f "$plist" ]; then
24
50
  launchctl unload "$plist" 2>/dev/null || true
25
51
  echo "[$TIMESTAMP] Unloaded: $(basename "$plist")" >> "$LOG_FILE"
52
+ unloaded=$((unloaded + 1))
26
53
  fi
27
54
  done
55
+ echo "[$TIMESTAMP] Unloaded $unloaded launchd job(s)" >> "$LOG_FILE"
28
56
 
29
- # 3. Create stop flag file (checked by all workflows before executing)
30
- echo "Creating stop flag..."
31
- echo "$TIMESTAMP" > "$SOPHIE_AI_DIR/.emergency-stop"
32
- echo "[$TIMESTAMP] Stop flag created" >> "$LOG_FILE"
57
+ # 3. Kill running Claude Code subagent processes. Filter to processes that
58
+ # have AGENT_ROOT or this agent's directory in their cwd to avoid
59
+ # killing unrelated claude sessions the operator may have running.
60
+ echo "Stopping Claude Code agent processes..."
61
+ # pgrep -lf is more selective than pkill -f
62
+ pids=$(pgrep -f "claude" 2>/dev/null | tr '\n' ' ' || true)
63
+ if [ -n "$pids" ]; then
64
+ # Send SIGTERM first, give 3s, then SIGKILL stragglers.
65
+ kill -TERM $pids 2>/dev/null || true
66
+ sleep 3
67
+ still=$(pgrep -f "claude" 2>/dev/null | tr '\n' ' ' || true)
68
+ [ -n "$still" ] && kill -KILL $still 2>/dev/null || true
69
+ echo "[$TIMESTAMP] Claude processes terminated ($pids)" >> "$LOG_FILE"
70
+ fi
33
71
 
34
- # 4. Log the stop event
72
+ # 4. Log completion.
35
73
  echo "[$TIMESTAMP] EMERGENCY STOP COMPLETE — All operations halted" | tee -a "$LOG_FILE"
36
74
  echo ""
37
75
  echo "To resume operations:"
38
- echo " 1. Remove the stop flag: rm $SOPHIE_AI_DIR/.emergency-stop"
39
- echo " 2. Reload schedules: ./scripts/resume-operations.sh"
76
+ echo " ./scripts/resume-operations.sh"
40
77
  echo ""
41
78
  echo "All actions have been logged to: $LOG_FILE"
@@ -0,0 +1,79 @@
1
+ #!/bin/bash
2
+ # session-start-banner.sh — Claude Code SessionStart hook.
3
+ #
4
+ # Reads .maestro/features.json and prints a short banner if the agent repo
5
+ # has pending framework init steps. Hooks into Claude Code via:
6
+ #
7
+ # .claude/settings.json:
8
+ # hooks:
9
+ # SessionStart:
10
+ # - command: ./scripts/hooks/session-start-banner.sh
11
+ #
12
+ # The banner is intentionally short — it nudges the operator without
13
+ # spamming context. If no pending items, this script exits silently with
14
+ # nothing on stdout, so it costs nothing in a healthy session.
15
+
16
+ set -e
17
+
18
+ # Discover the agent root from the script path.
19
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
20
+ AGENT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
21
+ STATE_FILE="$AGENT_DIR/.maestro/features.json"
22
+
23
+ if [ ! -f "$STATE_FILE" ]; then
24
+ # No state file means the agent hasn't been upgraded since the
25
+ # feature-init system was introduced. Suggest running upgrade.
26
+ cat <<'BANNER'
27
+
28
+ ═══════════════════════════════════════════════════════════════════════════
29
+ Maestro framework features are not yet tracked in this repo.
30
+ Run: npx @adaptic/maestro upgrade
31
+ to register the current framework version and pick up new features.
32
+ ═══════════════════════════════════════════════════════════════════════════
33
+
34
+ BANNER
35
+ exit 0
36
+ fi
37
+
38
+ # Try jq first, fall back to a small node one-liner.
39
+ PENDING_COUNT=0
40
+ PENDING_LINES=""
41
+ if command -v jq >/dev/null 2>&1; then
42
+ PENDING_COUNT=$(jq -r '.pending | length // 0' "$STATE_FILE" 2>/dev/null || echo 0)
43
+ if [ "$PENDING_COUNT" -gt 0 ]; then
44
+ PENDING_LINES=$(jq -r '.pending[] | " • \(.title // .feature) — \(.description // .command)"' "$STATE_FILE" 2>/dev/null)
45
+ fi
46
+ else
47
+ PENDING_COUNT=$(node -e "
48
+ try { const s = JSON.parse(require('fs').readFileSync('$STATE_FILE','utf-8')); process.stdout.write(String((s.pending||[]).length)); }
49
+ catch { process.stdout.write('0'); }
50
+ ")
51
+ if [ "$PENDING_COUNT" -gt 0 ]; then
52
+ PENDING_LINES=$(node -e "
53
+ const s = JSON.parse(require('fs').readFileSync('$STATE_FILE','utf-8'));
54
+ for (const p of s.pending) {
55
+ process.stdout.write(' • ' + (p.title || p.feature) + ' — ' + (p.description || p.command) + String.fromCharCode(10));
56
+ }
57
+ ")
58
+ fi
59
+ fi
60
+
61
+ if [ "$PENDING_COUNT" = "0" ] || [ -z "$PENDING_COUNT" ]; then
62
+ # No pending items — exit silently. A clean session start is the
63
+ # default we want; the banner only fires when there's work to do.
64
+ exit 0
65
+ fi
66
+
67
+ cat <<BANNER
68
+
69
+ ═══════════════════════════════════════════════════════════════════════════
70
+ ⚠ Maestro: $PENDING_COUNT framework feature(s) pending initialisation
71
+
72
+ $PENDING_LINES
73
+
74
+ Run: npx @adaptic/maestro init (interactive — picks up pending steps)
75
+ Or: npx @adaptic/maestro doctor (verify what's installed)
76
+ ═══════════════════════════════════════════════════════════════════════════
77
+
78
+ BANNER
79
+ exit 0
@@ -0,0 +1,124 @@
1
+ #!/bin/bash
2
+ # backup-to-cloud.sh — Off-machine state backup driver.
3
+ #
4
+ # Reads .maestro/backup-config.yaml and pushes the configured paths to GCS
5
+ # or S3 (or rsyncs to a remote host). Designed to be invoked daily via a
6
+ # launchd plist. Idempotent; safe to re-run.
7
+ #
8
+ # If .maestro/backup-config.yaml is missing or `enabled: false`, exits 0
9
+ # silently. Errors during upload are logged to logs/maintenance/backup.log
10
+ # and surface in `maestro doctor`.
11
+
12
+ set -e
13
+
14
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
15
+ AGENT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
16
+ CFG="$AGENT_DIR/.maestro/backup-config.yaml"
17
+ LOG_DIR="$AGENT_DIR/logs/maintenance"
18
+ LOG="$LOG_DIR/backup.log"
19
+ mkdir -p "$LOG_DIR"
20
+
21
+ log() { echo "[$(date -u +"%Y-%m-%dT%H:%M:%SZ")] $1" | tee -a "$LOG"; }
22
+
23
+ if [ ! -f "$CFG" ]; then
24
+ log "backup-config.yaml not found — backup not configured. Run: maestro init backup-replication --apply"
25
+ exit 0
26
+ fi
27
+
28
+ # Tiny YAML reader (only the fields we need).
29
+ get() {
30
+ awk -v k="$1" '
31
+ $1 == k":" { sub(/^[^:]+:[[:space:]]*/, "", $0); gsub(/^"|"$/, "", $0); print; exit }
32
+ ' "$CFG"
33
+ }
34
+
35
+ ENABLED=$(get enabled)
36
+ if [ "$ENABLED" != "true" ]; then
37
+ log "Backup not enabled (.maestro/backup-config.yaml: enabled: false). Skipping."
38
+ exit 0
39
+ fi
40
+
41
+ PROVIDER=$(get provider)
42
+ BUCKET=$(get bucket)
43
+ PREFIX=$(get prefix)
44
+ RETENTION=$(get retention_days)
45
+ [ -z "$RETENTION" ] && RETENTION=30
46
+
47
+ if [ -z "$BUCKET" ] || [ -z "$PROVIDER" ] || [ -z "$PREFIX" ]; then
48
+ log "ERROR: backup-config.yaml missing required fields (provider / bucket / prefix)."
49
+ exit 1
50
+ fi
51
+
52
+ DATE=$(date -u +"%Y-%m-%d")
53
+ log "Starting backup → $PROVIDER://$BUCKET/$PREFIX/$DATE/"
54
+
55
+ # Default include paths if the YAML doesn't override them.
56
+ INCLUDE_DEFAULT=(state knowledge outputs config/agent.json)
57
+ INCLUDE_PATHS=()
58
+ in_include=0
59
+ while IFS= read -r line; do
60
+ if echo "$line" | grep -qE "^include:"; then in_include=1; continue; fi
61
+ if [ "$in_include" = "1" ]; then
62
+ if echo "$line" | grep -qE "^[a-z]"; then in_include=0; continue; fi
63
+ path=$(echo "$line" | sed 's/^[[:space:]]*-[[:space:]]*//' | sed 's/[[:space:]]*$//')
64
+ [ -n "$path" ] && INCLUDE_PATHS+=("$path")
65
+ fi
66
+ done < "$CFG"
67
+ [ ${#INCLUDE_PATHS[@]} -eq 0 ] && INCLUDE_PATHS=("${INCLUDE_DEFAULT[@]}")
68
+
69
+ # Build the archive
70
+ TMP_DIR=$(mktemp -d)
71
+ trap "rm -rf $TMP_DIR" EXIT
72
+ TARBALL="$TMP_DIR/$PREFIX-$DATE.tar.gz"
73
+
74
+ log "Packaging: ${INCLUDE_PATHS[*]}"
75
+ tar --exclude="state/tmp" --exclude="state/rag/index" --exclude=".DS_Store" \
76
+ -czf "$TARBALL" -C "$AGENT_DIR" "${INCLUDE_PATHS[@]}" 2>>"$LOG" || {
77
+ log "ERROR: tar failed"
78
+ exit 1
79
+ }
80
+ SIZE=$(stat -f%z "$TARBALL")
81
+ log "Packaged: $TARBALL ($SIZE bytes)"
82
+
83
+ # Upload — choose provider.
84
+ case "$PROVIDER" in
85
+ gcs)
86
+ if ! command -v gsutil >/dev/null 2>&1; then
87
+ log "ERROR: gsutil not found in PATH. Install gcloud SDK or change provider."
88
+ exit 1
89
+ fi
90
+ gsutil cp "$TARBALL" "gs://$BUCKET/$PREFIX/$DATE/$(basename "$TARBALL")" >>"$LOG" 2>&1
91
+ ;;
92
+ s3)
93
+ if ! command -v aws >/dev/null 2>&1; then
94
+ log "ERROR: aws CLI not found in PATH. Install awscli or change provider."
95
+ exit 1
96
+ fi
97
+ aws s3 cp "$TARBALL" "s3://$BUCKET/$PREFIX/$DATE/$(basename "$TARBALL")" >>"$LOG" 2>&1
98
+ ;;
99
+ rsync)
100
+ # bucket field carries the rsync target (user@host:/path)
101
+ rsync -avz "$TARBALL" "$BUCKET/$PREFIX/$DATE/" >>"$LOG" 2>&1
102
+ ;;
103
+ *)
104
+ log "ERROR: unknown provider '$PROVIDER'"
105
+ exit 1
106
+ ;;
107
+ esac
108
+
109
+ log "Upload complete: $PROVIDER://$BUCKET/$PREFIX/$DATE/"
110
+
111
+ # Update .maestro/last-backup.json so doctor can verify freshness.
112
+ mkdir -p "$AGENT_DIR/.maestro"
113
+ cat > "$AGENT_DIR/.maestro/last-backup.json" <<JSON
114
+ {
115
+ "completed_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
116
+ "provider": "$PROVIDER",
117
+ "bucket": "$BUCKET",
118
+ "prefix": "$PREFIX",
119
+ "size_bytes": $SIZE,
120
+ "tarball": "$(basename "$TARBALL")"
121
+ }
122
+ JSON
123
+
124
+ log "Backup complete."
@@ -0,0 +1,111 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * scripts/rag/ingest.mjs — Inventory step for the cross-repo RAG.
4
+ *
5
+ * Walks every repo flagged `index: true` in config/repo-registry.yaml,
6
+ * counts files / lines per language, and writes a summary to
7
+ * state/rag/inventory.json. The MVP search surface (scripts/rag/search.mjs)
8
+ * is grep-based so it doesn't need an actual embedding index yet, but the
9
+ * inventory lets `maestro doctor` confirm the configured repos are
10
+ * accessible and roughly the size the agent expects.
11
+ *
12
+ * Future: replace with embedding pipeline + sqlite-fts5 or vector store.
13
+ */
14
+
15
+ import { existsSync, mkdirSync, readFileSync, statSync, writeFileSync, readdirSync } from "node:fs";
16
+ import { join, resolve, dirname } from "node:path";
17
+ import { fileURLToPath } from "node:url";
18
+
19
+ const __dirname = dirname(fileURLToPath(import.meta.url));
20
+ const AGENT_DIR = process.env.AGENT_ROOT || process.env.AGENT_DIR || resolve(__dirname, "..", "..");
21
+
22
+ const ok = (m) => process.stdout.write(`[rag-ingest] ✓ ${m}\n`);
23
+ const warn = (m) => process.stdout.write(`[rag-ingest] ⚠ ${m}\n`);
24
+
25
+ const registryPath = join(AGENT_DIR, "config/repo-registry.yaml");
26
+ if (!existsSync(registryPath)) {
27
+ warn("config/repo-registry.yaml not found — run `maestro init rag-foundation --apply` first.");
28
+ process.exit(0);
29
+ }
30
+
31
+ const body = readFileSync(registryPath, "utf-8");
32
+ const repos = [];
33
+ let cur = null;
34
+ for (const raw of body.split("\n")) {
35
+ const line = raw.replace(/#.*$/, "").trimEnd();
36
+ if (line.match(/^\s*-\s+name:\s*(.+)$/)) {
37
+ if (cur) repos.push(cur);
38
+ cur = { name: line.replace(/^\s*-\s+name:\s*/, "").replace(/['"]/g, "").trim() };
39
+ } else if (cur && line.match(/^\s+path:\s*(.+)$/)) {
40
+ cur.path = line.replace(/^\s+path:\s*/, "").replace(/['"]/g, "").trim().replace(/^~/, process.env.HOME || "~");
41
+ } else if (cur && line.match(/^\s+index:\s*(true|false)/)) {
42
+ cur.index = /true/.test(line);
43
+ }
44
+ }
45
+ if (cur) repos.push(cur);
46
+
47
+ mkdirSync(join(AGENT_DIR, "state/rag"), { recursive: true });
48
+
49
+ const EXT_BUCKETS = {
50
+ ts: [".ts", ".tsx"],
51
+ js: [".js", ".mjs", ".cjs"],
52
+ py: [".py"],
53
+ go: [".go"],
54
+ rs: [".rs"],
55
+ java: [".java"],
56
+ yaml: [".yaml", ".yml"],
57
+ json: [".json"],
58
+ md: [".md", ".mdx"],
59
+ sh: [".sh", ".bash"],
60
+ };
61
+ const SKIP_DIRS = new Set(["node_modules", ".git", "dist", "build", "target", ".next", "__pycache__", ".venv", "venv"]);
62
+
63
+ function bucketFor(filename) {
64
+ for (const [bucket, exts] of Object.entries(EXT_BUCKETS)) {
65
+ for (const ext of exts) if (filename.endsWith(ext)) return bucket;
66
+ }
67
+ return null;
68
+ }
69
+
70
+ function walk(root) {
71
+ const counts = { files: 0, by_lang: {} };
72
+ function visit(dir) {
73
+ let entries;
74
+ try { entries = readdirSync(dir); } catch { return; }
75
+ for (const name of entries) {
76
+ if (SKIP_DIRS.has(name)) continue;
77
+ const full = join(dir, name);
78
+ let st;
79
+ try { st = statSync(full); } catch { continue; }
80
+ if (st.isDirectory()) visit(full);
81
+ else if (st.isFile()) {
82
+ const b = bucketFor(name);
83
+ if (!b) continue;
84
+ counts.files += 1;
85
+ counts.by_lang[b] = (counts.by_lang[b] || 0) + 1;
86
+ }
87
+ }
88
+ }
89
+ visit(root);
90
+ return counts;
91
+ }
92
+
93
+ const inventory = {
94
+ generated_at: new Date().toISOString(),
95
+ repos: [],
96
+ };
97
+
98
+ for (const repo of repos) {
99
+ if (!repo.path || repo.index === false) continue;
100
+ if (!existsSync(repo.path)) {
101
+ inventory.repos.push({ name: repo.name, path: repo.path, status: "missing" });
102
+ warn(`${repo.name}: path missing (${repo.path})`);
103
+ continue;
104
+ }
105
+ const counts = walk(repo.path);
106
+ inventory.repos.push({ name: repo.name, path: repo.path, status: "indexed", ...counts });
107
+ ok(`${repo.name}: ${counts.files} indexable files`);
108
+ }
109
+
110
+ writeFileSync(join(AGENT_DIR, "state/rag/inventory.json"), JSON.stringify(inventory, null, 2) + "\n");
111
+ ok(`wrote state/rag/inventory.json (${inventory.repos.length} repo(s))`);
@@ -0,0 +1,119 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * scripts/rag/search.mjs — Cross-repo code search for the operating agent.
4
+ *
5
+ * Reads config/repo-registry.yaml, runs ripgrep across every repo flagged
6
+ * `index: true`, and returns a JSON list of matches. This is the MVP RAG
7
+ * surface — no embeddings yet, just fast literal/regex search across the
8
+ * agent's authorised codebase set. Embeddings + chunk-level retrieval
9
+ * land in a future feature-init step.
10
+ *
11
+ * Usage:
12
+ * node scripts/rag/search.mjs <query> [--regex] [--limit=N] [--lang=ts,py,...]
13
+ *
14
+ * Output: JSON object with { query, repos_searched, matches: [...] }.
15
+ * Designed to be called as a tool from a Claude session.
16
+ */
17
+
18
+ import { readFileSync, existsSync } from "node:fs";
19
+ import { join, resolve, dirname } from "node:path";
20
+ import { spawnSync } from "node:child_process";
21
+ import { fileURLToPath } from "node:url";
22
+
23
+ const __dirname = dirname(fileURLToPath(import.meta.url));
24
+ const AGENT_DIR = process.env.AGENT_ROOT || process.env.AGENT_DIR || resolve(__dirname, "..", "..");
25
+
26
+ function fail(msg) { process.stderr.write(`[rag-search] ${msg}\n`); process.exit(1); }
27
+
28
+ const args = process.argv.slice(2);
29
+ let query = null;
30
+ let useRegex = false;
31
+ let limit = 50;
32
+ let langs = null;
33
+ for (const a of args) {
34
+ if (a === "--regex") useRegex = true;
35
+ else if (a.startsWith("--limit=")) limit = parseInt(a.slice(8), 10) || 50;
36
+ else if (a.startsWith("--lang=")) langs = a.slice(7).split(",").map((s) => s.trim()).filter(Boolean);
37
+ else if (!a.startsWith("--")) { query = query ? `${query} ${a}` : a; }
38
+ }
39
+ if (!query) fail("usage: search.mjs <query> [--regex] [--limit=N] [--lang=ts,py,...]");
40
+
41
+ // Parse YAML registry (just the lines we need — no YAML lib).
42
+ const registryPath = join(AGENT_DIR, "config/repo-registry.yaml");
43
+ if (!existsSync(registryPath)) fail(`config/repo-registry.yaml not found — run \`maestro init rag-foundation --apply\``);
44
+
45
+ const body = readFileSync(registryPath, "utf-8");
46
+ const repos = [];
47
+ let cur = null;
48
+ for (const raw of body.split("\n")) {
49
+ const line = raw.replace(/#.*$/, "").trimEnd();
50
+ if (line.match(/^\s*-\s+name:\s*(.+)$/)) {
51
+ if (cur) repos.push(cur);
52
+ cur = { name: line.replace(/^\s*-\s+name:\s*/, "").replace(/['"]/g, "").trim() };
53
+ } else if (cur && line.match(/^\s+path:\s*(.+)$/)) {
54
+ cur.path = line.replace(/^\s+path:\s*/, "").replace(/['"]/g, "").trim().replace(/^~/, process.env.HOME || "~");
55
+ } else if (cur && line.match(/^\s+index:\s*(true|false)/)) {
56
+ cur.index = /true/.test(line);
57
+ }
58
+ }
59
+ if (cur) repos.push(cur);
60
+
61
+ const indexed = repos.filter((r) => r.path && (r.index !== false));
62
+ if (indexed.length === 0) fail("no repos marked index: true in repo-registry.yaml");
63
+
64
+ // Use ripgrep if available; fall back to grep -r.
65
+ const rg = spawnSync("which", ["rg"], { encoding: "utf-8" });
66
+ const hasRg = rg.status === 0;
67
+ const matches = [];
68
+ let scanned = 0;
69
+
70
+ for (const repo of indexed) {
71
+ if (!existsSync(repo.path)) continue;
72
+ scanned += 1;
73
+ const rgArgs = [
74
+ "--json",
75
+ "--max-count", String(Math.min(limit, 50)),
76
+ "--max-columns", "240",
77
+ "--no-heading",
78
+ ];
79
+ if (!useRegex) rgArgs.push("--fixed-strings");
80
+ if (langs) for (const l of langs) rgArgs.push("-t", l);
81
+ rgArgs.push(query, repo.path);
82
+
83
+ if (!hasRg) {
84
+ // Fallback to grep.
85
+ const r = spawnSync("grep", ["-rn", useRegex ? "-E" : "-F", "--", query, repo.path], {
86
+ encoding: "utf-8",
87
+ maxBuffer: 16 * 1024 * 1024,
88
+ });
89
+ for (const line of (r.stdout || "").split("\n")) {
90
+ if (!line) continue;
91
+ const m = line.match(/^([^:]+):(\d+):(.*)$/);
92
+ if (!m) continue;
93
+ matches.push({ repo: repo.name, path: m[1], line: parseInt(m[2], 10), excerpt: m[3].slice(0, 240) });
94
+ if (matches.length >= limit) break;
95
+ }
96
+ } else {
97
+ const r = spawnSync("rg", rgArgs, { encoding: "utf-8", maxBuffer: 16 * 1024 * 1024 });
98
+ for (const line of (r.stdout || "").split("\n")) {
99
+ if (!line) continue;
100
+ let entry;
101
+ try { entry = JSON.parse(line); } catch { continue; }
102
+ if (entry.type !== "match" || !entry.data) continue;
103
+ const path = entry.data.path?.text || "";
104
+ const lineNum = entry.data.line_number || 0;
105
+ const text = entry.data.lines?.text || "";
106
+ matches.push({ repo: repo.name, path, line: lineNum, excerpt: text.slice(0, 240) });
107
+ if (matches.length >= limit) break;
108
+ }
109
+ }
110
+ if (matches.length >= limit) break;
111
+ }
112
+
113
+ process.stdout.write(JSON.stringify({
114
+ query,
115
+ use_regex: useRegex,
116
+ repos_searched: scanned,
117
+ match_count: matches.length,
118
+ matches,
119
+ }, null, 2) + "\n");