@aiassesstech/mighty-mark 0.3.31 → 0.3.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +88 -0
- package/agent/AGENTS.md +28 -0
- package/dist/checks/backup-orchestrator.d.ts +33 -0
- package/dist/checks/backup-orchestrator.d.ts.map +1 -0
- package/dist/checks/backup-orchestrator.js +260 -0
- package/dist/checks/backup-orchestrator.js.map +1 -0
- package/dist/checks/check-runner.d.ts +7 -0
- package/dist/checks/check-runner.d.ts.map +1 -1
- package/dist/checks/check-runner.js +22 -2
- package/dist/checks/check-runner.js.map +1 -1
- package/dist/checks/data-integrity.d.ts +2 -1
- package/dist/checks/data-integrity.d.ts.map +1 -1
- package/dist/checks/data-integrity.js +10 -2
- package/dist/checks/data-integrity.js.map +1 -1
- package/dist/checks/gateway-health.d.ts.map +1 -1
- package/dist/checks/gateway-health.js +3 -0
- package/dist/checks/gateway-health.js.map +1 -1
- package/dist/checks/openclaw-version.d.ts +34 -0
- package/dist/checks/openclaw-version.d.ts.map +1 -0
- package/dist/checks/openclaw-version.js +165 -0
- package/dist/checks/openclaw-version.js.map +1 -0
- package/package.json +3 -1
- package/src/watchdog/fleet-backup/config.sh +74 -0
- package/src/watchdog/fleet-backup/fleet-backup.sh +363 -0
- package/src/watchdog/fleet-backup/fleet-restore.sh +437 -0
- package/src/watchdog/fleet-backup/test/test-backup.sh +395 -0
- package/src/watchdog/fleet-backup/test/test-restore.sh +302 -0
- package/src/watchdog/install.sh +61 -4
- package/src/watchdog/morning-check.sh +5 -13
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ═══════════════════════════════════════════════════════════════════
|
|
3
|
+
# nole-fleet-backup — Tiered backup of entire OpenClaw fleet
|
|
4
|
+
# Agent: Mighty Mark (sentinel duty — fleet safety checkpoint)
|
|
5
|
+
#
|
|
6
|
+
# Tiers:
|
|
7
|
+
# full — everything (extensions, memory, data stores, vectors,
|
|
8
|
+
# sessions, fleet-bus, watchdog, configs). Weekly (Sunday).
|
|
9
|
+
# light — agent .md files, sanitized configs, systemd/cron only.
|
|
10
|
+
# Small enough to push to GitHub daily. Mon–Sat.
|
|
11
|
+
#
|
|
12
|
+
# Usage:
|
|
13
|
+
# fleet-backup.sh # auto-detect tier from day of week
|
|
14
|
+
# fleet-backup.sh --full # force full backup
|
|
15
|
+
# fleet-backup.sh --light # force light backup
|
|
16
|
+
#
|
|
17
|
+
# Excludes: node_modules, secrets (.env, credentials, API keys)
|
|
18
|
+
# Retention: 35 days (configurable)
|
|
19
|
+
# ═══════════════════════════════════════════════════════════════════
|
|
20
|
+
|
|
21
|
+
set -euo pipefail
|
|
22
|
+
|
|
23
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
24
|
+
|
|
25
|
+
if [[ -f "$SCRIPT_DIR/config.local.sh" ]]; then
|
|
26
|
+
source "$SCRIPT_DIR/config.local.sh"
|
|
27
|
+
else
|
|
28
|
+
source "$SCRIPT_DIR/config.sh"
|
|
29
|
+
fi
|
|
30
|
+
|
|
31
|
+
DATE=$(date +%Y-%m-%d)
|
|
32
|
+
TIME=$(date +%H%M%S)
|
|
33
|
+
TIMESTAMP=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
|
34
|
+
STAGING_DIR=$(mktemp -d "/tmp/fleet-backup-staging-XXXXXX")
|
|
35
|
+
|
|
36
|
+
# ─── Determine backup tier ────────────────────────────────────────
|
|
37
|
+
|
|
38
|
+
DOW=$(date +%w) # 0=Sunday
|
|
39
|
+
TIER="light"
|
|
40
|
+
if [[ "$DOW" == "${FULL_BACKUP_DAY:-0}" ]]; then
|
|
41
|
+
TIER="full"
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
case "${1:-}" in
|
|
45
|
+
--full) TIER="full" ;;
|
|
46
|
+
--light) TIER="light" ;;
|
|
47
|
+
--help|-h)
|
|
48
|
+
echo "Usage: $0 [--full|--light]"
|
|
49
|
+
echo " --full Force a full backup (all data)"
|
|
50
|
+
echo " --light Force a light backup (agent .md + configs only)"
|
|
51
|
+
echo " (none) Auto-detect: full on day $FULL_BACKUP_DAY, light otherwise"
|
|
52
|
+
exit 0
|
|
53
|
+
;;
|
|
54
|
+
esac
|
|
55
|
+
|
|
56
|
+
ARCHIVE_NAME="fleet-${TIER}-${DATE}-${TIME}.tar.gz"
|
|
57
|
+
BACKUP_FAILED=false
|
|
58
|
+
|
|
59
|
+
log() {
|
|
60
|
+
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] $1"
|
|
61
|
+
echo "$msg"
|
|
62
|
+
if [[ -n "${LOG_FILE:-}" ]]; then
|
|
63
|
+
mkdir -p "$(dirname "$LOG_FILE")"
|
|
64
|
+
echo "$msg" >> "$LOG_FILE"
|
|
65
|
+
fi
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# Load Mighty Mark's Telegram credentials if available
|
|
69
|
+
MARK_ENV="/opt/mighty-mark/.env"
|
|
70
|
+
if [[ -f "$MARK_ENV" ]]; then
|
|
71
|
+
set -a
|
|
72
|
+
source "$MARK_ENV"
|
|
73
|
+
set +a
|
|
74
|
+
fi
|
|
75
|
+
|
|
76
|
+
notify_telegram() {
|
|
77
|
+
local token="${MIGHTY_MARK_TELEGRAM_TOKEN:-}"
|
|
78
|
+
local chat="${MIGHTY_MARK_TELEGRAM_CHAT_ID:-}"
|
|
79
|
+
if [[ -n "$token" && -n "$chat" ]]; then
|
|
80
|
+
curl -s -X POST \
|
|
81
|
+
"https://api.telegram.org/bot${token}/sendMessage" \
|
|
82
|
+
-d "chat_id=${chat}" \
|
|
83
|
+
-d "text=$1" \
|
|
84
|
+
-d "parse_mode=Markdown" > /dev/null 2>&1 || true
|
|
85
|
+
fi
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
cleanup() {
|
|
89
|
+
rm -rf "$STAGING_DIR"
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
on_error() {
|
|
93
|
+
local exit_code=$?
|
|
94
|
+
log "ERROR: Backup failed with exit code $exit_code"
|
|
95
|
+
BACKUP_FAILED=true
|
|
96
|
+
notify_telegram "🔴 *MIGHTY MARK — Backup FAILED*%0A%0ATier: ${TIER}%0AExit code: ${exit_code}%0A%0ACheck logs: /opt/fleet-backup/logs/backup.log%0AServer: $(hostname)%0ATime: $(date)"
|
|
97
|
+
cleanup
|
|
98
|
+
exit $exit_code
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
trap on_error ERR
|
|
102
|
+
trap cleanup EXIT
|
|
103
|
+
|
|
104
|
+
# ─── Collect backup contents ──────────────────────────────────────
|
|
105
|
+
|
|
106
|
+
log "Starting $TIER backup for $DATE"
|
|
107
|
+
mkdir -p "$STAGING_DIR/fleet-backup"
|
|
108
|
+
DEST="$STAGING_DIR/fleet-backup"
|
|
109
|
+
|
|
110
|
+
# --- Items included in BOTH tiers ---
|
|
111
|
+
|
|
112
|
+
# Agent identity/prompt .md files from extensions
|
|
113
|
+
if [[ -d "$OPENCLAW_HOME/extensions" ]]; then
|
|
114
|
+
log "Backing up agent extension docs..."
|
|
115
|
+
mkdir -p "$DEST/extensions"
|
|
116
|
+
for agent_dir in "$OPENCLAW_HOME/extensions"/*/; do
|
|
117
|
+
agent_name=$(basename "$agent_dir")
|
|
118
|
+
mkdir -p "$DEST/extensions/$agent_name"
|
|
119
|
+
if [[ -d "$agent_dir/agent" ]]; then
|
|
120
|
+
cp -r "$agent_dir/agent" "$DEST/extensions/$agent_name/"
|
|
121
|
+
fi
|
|
122
|
+
[[ -f "$agent_dir/package.json" ]] && cp "$agent_dir/package.json" "$DEST/extensions/$agent_name/"
|
|
123
|
+
if [[ "$TIER" == "full" ]]; then
|
|
124
|
+
[[ -d "$agent_dir/dist" ]] && cp -r "$agent_dir/dist" "$DEST/extensions/$agent_name/"
|
|
125
|
+
fi
|
|
126
|
+
done
|
|
127
|
+
fi
|
|
128
|
+
|
|
129
|
+
# Agent workspace .md files (memory markdown, prompts)
|
|
130
|
+
if [[ -d "$OPENCLAW_HOME/agents" ]]; then
|
|
131
|
+
log "Backing up agent workspace docs..."
|
|
132
|
+
mkdir -p "$DEST/agents"
|
|
133
|
+
for agent_dir in "$OPENCLAW_HOME/agents"/*/; do
|
|
134
|
+
agent_name=$(basename "$agent_dir")
|
|
135
|
+
mkdir -p "$DEST/agents/$agent_name"
|
|
136
|
+
|
|
137
|
+
if [[ -d "$agent_dir/memory" ]]; then
|
|
138
|
+
mkdir -p "$DEST/agents/$agent_name/memory"
|
|
139
|
+
find "$agent_dir/memory" -name '*.md' -exec cp {} "$DEST/agents/$agent_name/memory/" \;
|
|
140
|
+
fi
|
|
141
|
+
if [[ -d "$agent_dir/agent" ]]; then
|
|
142
|
+
cp -r "$agent_dir/agent" "$DEST/agents/$agent_name/"
|
|
143
|
+
fi
|
|
144
|
+
|
|
145
|
+
if [[ "$TIER" == "full" ]]; then
|
|
146
|
+
[[ -d "$agent_dir/sessions" ]] && cp -r "$agent_dir/sessions" "$DEST/agents/$agent_name/"
|
|
147
|
+
# Include non-.md memory files (JSON, JSONL) only in full
|
|
148
|
+
if [[ -d "$agent_dir/memory" ]]; then
|
|
149
|
+
find "$agent_dir/memory" \( -name '*.json' -o -name '*.jsonl' \) -exec cp {} "$DEST/agents/$agent_name/memory/" \;
|
|
150
|
+
fi
|
|
151
|
+
fi
|
|
152
|
+
done
|
|
153
|
+
fi
|
|
154
|
+
|
|
155
|
+
# Sanitized openclaw.json (strip API keys)
|
|
156
|
+
if [[ "$SANITIZE_OPENCLAW_JSON" == "true" ]]; then
|
|
157
|
+
for config_file in "$OPENCLAW_HOME/openclaw.json" "$CLAWDBOT_HOME/openclaw.json"; do
|
|
158
|
+
if [[ -f "$config_file" ]]; then
|
|
159
|
+
config_name=$(echo "$config_file" | sed 's|/|_|g')
|
|
160
|
+
log "Backing up sanitized config: $config_file"
|
|
161
|
+
sed -E 's/("apiKey"\s*:\s*)"[^"]+"/\1"REDACTED_FOR_BACKUP"/g' \
|
|
162
|
+
"$config_file" > "$DEST/$config_name"
|
|
163
|
+
fi
|
|
164
|
+
done
|
|
165
|
+
fi
|
|
166
|
+
|
|
167
|
+
# Systemd, cron, logrotate configs
|
|
168
|
+
mkdir -p "$DEST/system"
|
|
169
|
+
for unit in "${SYSTEMD_UNITS[@]}"; do
|
|
170
|
+
[[ -f "$unit" ]] && cp "$unit" "$DEST/system/"
|
|
171
|
+
done
|
|
172
|
+
for cron in "${CRON_FILES[@]}"; do
|
|
173
|
+
[[ -f "$cron" ]] && cp "$cron" "$DEST/system/"
|
|
174
|
+
done
|
|
175
|
+
for lr in "${LOGROTATE_FILES[@]}"; do
|
|
176
|
+
[[ -f "$lr" ]] && cp "$lr" "$DEST/system/"
|
|
177
|
+
done
|
|
178
|
+
|
|
179
|
+
# --- Items included in FULL tier only ---
|
|
180
|
+
|
|
181
|
+
if [[ "$TIER" == "full" ]]; then
|
|
182
|
+
|
|
183
|
+
# Vector indexes (sqlite-vec)
|
|
184
|
+
if [[ -d "$OPENCLAW_HOME/memory" ]]; then
|
|
185
|
+
log "Backing up vector indexes..."
|
|
186
|
+
mkdir -p "$DEST/memory"
|
|
187
|
+
find "$OPENCLAW_HOME/memory" -name '*.sqlite' -exec cp {} "$DEST/memory/" \;
|
|
188
|
+
fi
|
|
189
|
+
|
|
190
|
+
# Agent data stores (JSON assessment chains, temporal stores)
|
|
191
|
+
for data_dir in \
|
|
192
|
+
"$OPENCLAW_HOME/grillo-data" \
|
|
193
|
+
"$OPENCLAW_HOME/.noah-data" \
|
|
194
|
+
"$OPENCLAW_HOME/nole-data" \
|
|
195
|
+
"$OPENCLAW_HOME/.mark-data" \
|
|
196
|
+
"$OPENCLAW_HOME/.fleet-data" \
|
|
197
|
+
"$OPENCLAW_HOME/.sam-artifacts" \
|
|
198
|
+
"/root/.mark-data" \
|
|
199
|
+
"/root/.jessie-data"; do
|
|
200
|
+
if [[ -d "$data_dir" ]]; then
|
|
201
|
+
dir_name=$(basename "$data_dir")
|
|
202
|
+
log "Backing up data store: $dir_name"
|
|
203
|
+
cp -r "$data_dir" "$DEST/$dir_name"
|
|
204
|
+
fi
|
|
205
|
+
done
|
|
206
|
+
|
|
207
|
+
# Fleet-bus workspace (audit logs, agent cards, events)
|
|
208
|
+
if [[ -d "$OPENCLAW_HOME/workspace" ]]; then
|
|
209
|
+
log "Backing up workspace (fleet-bus, skills)..."
|
|
210
|
+
cp -r "$OPENCLAW_HOME/workspace" "$DEST/workspace"
|
|
211
|
+
fi
|
|
212
|
+
|
|
213
|
+
# Clawd workspace (Jessie)
|
|
214
|
+
if [[ -d "$CLAWD_WORKSPACE" ]]; then
|
|
215
|
+
log "Backing up clawd workspace..."
|
|
216
|
+
cp -r "$CLAWD_WORKSPACE" "$DEST/clawd"
|
|
217
|
+
fi
|
|
218
|
+
|
|
219
|
+
# Mighty Mark watchdog (scripts, state — excluding .env)
|
|
220
|
+
if [[ -d "$MIGHTY_MARK_OPT" ]]; then
|
|
221
|
+
log "Backing up Mighty Mark watchdog..."
|
|
222
|
+
mkdir -p "$DEST/mighty-mark-watchdog"
|
|
223
|
+
for item in "$MIGHTY_MARK_OPT"/*; do
|
|
224
|
+
item_name=$(basename "$item")
|
|
225
|
+
[[ "$item_name" == ".env" ]] && continue
|
|
226
|
+
[[ "$item_name" == "backups" ]] && continue
|
|
227
|
+
[[ "$item_name" == "logs" ]] && continue
|
|
228
|
+
cp -r "$item" "$DEST/mighty-mark-watchdog/"
|
|
229
|
+
done
|
|
230
|
+
fi
|
|
231
|
+
|
|
232
|
+
fi
|
|
233
|
+
|
|
234
|
+
# ─── Create archive ───────────────────────────────────────────────
|
|
235
|
+
|
|
236
|
+
log "Creating archive..."
|
|
237
|
+
mkdir -p "$BACKUP_DIR"
|
|
238
|
+
tar -czf "$BACKUP_DIR/$ARCHIVE_NAME" -C "$STAGING_DIR" fleet-backup
|
|
239
|
+
|
|
240
|
+
ARCHIVE_SIZE=$(stat -c%s "$BACKUP_DIR/$ARCHIVE_NAME" 2>/dev/null || stat -f%z "$BACKUP_DIR/$ARCHIVE_NAME" 2>/dev/null || echo 0)
|
|
241
|
+
ARCHIVE_SIZE_MB=$(awk "BEGIN {printf \"%.1f\", $ARCHIVE_SIZE / 1048576}")
|
|
242
|
+
|
|
243
|
+
CHECKSUM=$(sha256sum "$BACKUP_DIR/$ARCHIVE_NAME" 2>/dev/null | awk '{print $1}' || shasum -a 256 "$BACKUP_DIR/$ARCHIVE_NAME" | awk '{print $1}')
|
|
244
|
+
|
|
245
|
+
EXTENSION_COUNT=$(find "$DEST/extensions" -maxdepth 1 -mindepth 1 -type d 2>/dev/null | wc -l | tr -d ' ')
|
|
246
|
+
AGENT_COUNT=$(find "$DEST/agents" -maxdepth 1 -mindepth 1 -type d 2>/dev/null | wc -l | tr -d ' ')
|
|
247
|
+
MEMORY_FILE_COUNT=$(find "$DEST" -name '*.md' -o -name '*.json' -o -name '*.jsonl' -o -name '*.sqlite' 2>/dev/null | wc -l | tr -d ' ')
|
|
248
|
+
|
|
249
|
+
# ─── Write manifest ──────────────────────────────────────────────
|
|
250
|
+
|
|
251
|
+
cat > "$MANIFEST_FILE" << MANIFEST_EOF
|
|
252
|
+
{
|
|
253
|
+
"date": "$DATE",
|
|
254
|
+
"timestamp": "$TIMESTAMP",
|
|
255
|
+
"tier": "$TIER",
|
|
256
|
+
"archive": "$ARCHIVE_NAME",
|
|
257
|
+
"size_bytes": $ARCHIVE_SIZE,
|
|
258
|
+
"size_mb": $ARCHIVE_SIZE_MB,
|
|
259
|
+
"checksum": "sha256:$CHECKSUM",
|
|
260
|
+
"extensions": $EXTENSION_COUNT,
|
|
261
|
+
"agents": $AGENT_COUNT,
|
|
262
|
+
"memory_files": $MEMORY_FILE_COUNT,
|
|
263
|
+
"retention_days": $RETENTION_DAYS,
|
|
264
|
+
"agent": "mighty-mark",
|
|
265
|
+
"workflow": "nole-fleet-backup"
|
|
266
|
+
}
|
|
267
|
+
MANIFEST_EOF
|
|
268
|
+
|
|
269
|
+
log "Archive: $ARCHIVE_NAME ($ARCHIVE_SIZE_MB MB) [$TIER]"
|
|
270
|
+
log " Extensions: $EXTENSION_COUNT"
|
|
271
|
+
log " Agents: $AGENT_COUNT"
|
|
272
|
+
log " Memory files: $MEMORY_FILE_COUNT"
|
|
273
|
+
log " Checksum: sha256:$CHECKSUM"
|
|
274
|
+
|
|
275
|
+
# ─── Prune old backups ────────────────────────────────────────────
|
|
276
|
+
|
|
277
|
+
log "Pruning backups older than $RETENTION_DAYS days..."
|
|
278
|
+
find "$BACKUP_DIR" -name 'fleet-*.tar.gz' -mtime +"$RETENTION_DAYS" -print -delete 2>/dev/null | while read -r old; do
|
|
279
|
+
log " Pruned: $(basename "$old")"
|
|
280
|
+
done
|
|
281
|
+
|
|
282
|
+
REMAINING=$(find "$BACKUP_DIR" -name 'fleet-*.tar.gz' 2>/dev/null | wc -l | tr -d ' ')
|
|
283
|
+
log "Backup complete. $REMAINING archives retained."
|
|
284
|
+
log "Manifest: $MANIFEST_FILE"
|
|
285
|
+
|
|
286
|
+
# ─── Push to GitHub (off-site copy) ──────────────────────────────
|
|
287
|
+
|
|
288
|
+
if [[ "${PUSH_TO_GITHUB:-false}" == "true" ]] && [[ -d "${GITHUB_REPO_DIR:-}" ]]; then
|
|
289
|
+
log "Pushing backup to GitHub (branch: $GITHUB_BACKUP_BRANCH)..."
|
|
290
|
+
|
|
291
|
+
PUSH_WORK=$(mktemp -d "/tmp/fleet-backup-push-XXXXXX")
|
|
292
|
+
|
|
293
|
+
push_cleanup() {
|
|
294
|
+
rm -rf "$PUSH_WORK"
|
|
295
|
+
git -C "$GITHUB_REPO_DIR" worktree prune 2>/dev/null || true
|
|
296
|
+
}
|
|
297
|
+
trap push_cleanup EXIT
|
|
298
|
+
|
|
299
|
+
(
|
|
300
|
+
cd "$GITHUB_REPO_DIR"
|
|
301
|
+
|
|
302
|
+
REMOTE_URL=$(git remote get-url origin 2>/dev/null || echo "")
|
|
303
|
+
if [[ -z "$REMOTE_URL" ]]; then
|
|
304
|
+
log " ERROR: No git remote found in $GITHUB_REPO_DIR"
|
|
305
|
+
exit 1
|
|
306
|
+
fi
|
|
307
|
+
|
|
308
|
+
git fetch origin "$GITHUB_BACKUP_BRANCH" 2>/dev/null || true
|
|
309
|
+
|
|
310
|
+
if git rev-parse --verify "origin/$GITHUB_BACKUP_BRANCH" >/dev/null 2>&1; then
|
|
311
|
+
git worktree add "$PUSH_WORK" "origin/$GITHUB_BACKUP_BRANCH" 2>/dev/null || true
|
|
312
|
+
cd "$PUSH_WORK"
|
|
313
|
+
git checkout -B "$GITHUB_BACKUP_BRANCH" 2>/dev/null || true
|
|
314
|
+
else
|
|
315
|
+
cd "$PUSH_WORK"
|
|
316
|
+
git init
|
|
317
|
+
git remote add origin "$REMOTE_URL"
|
|
318
|
+
git checkout --orphan "$GITHUB_BACKUP_BRANCH"
|
|
319
|
+
fi
|
|
320
|
+
|
|
321
|
+
# Clean previous backup files from the working tree
|
|
322
|
+
rm -f fleet-*.tar.gz fleet-*.tar.gz.part-* latest-manifest.json
|
|
323
|
+
|
|
324
|
+
# Split large archives into chunks under GitHub's file size limit
|
|
325
|
+
if [[ "$ARCHIVE_SIZE" -gt "${GITHUB_MAX_FILE_SIZE:-99614720}" ]]; then
|
|
326
|
+
log " Splitting archive into chunks (${ARCHIVE_SIZE_MB} MB > limit)..."
|
|
327
|
+
split -b "${GITHUB_MAX_FILE_SIZE:-99614720}" -d -a 2 \
|
|
328
|
+
"$BACKUP_DIR/$ARCHIVE_NAME" \
|
|
329
|
+
"${ARCHIVE_NAME}.part-"
|
|
330
|
+
CHUNK_COUNT=$(ls -1 "${ARCHIVE_NAME}".part-* 2>/dev/null | wc -l | tr -d ' ')
|
|
331
|
+
log " Split into $CHUNK_COUNT chunks"
|
|
332
|
+
else
|
|
333
|
+
cp "$BACKUP_DIR/$ARCHIVE_NAME" .
|
|
334
|
+
fi
|
|
335
|
+
|
|
336
|
+
cp "$MANIFEST_FILE" ./latest-manifest.json
|
|
337
|
+
|
|
338
|
+
git add -A
|
|
339
|
+
git config user.name "Mighty Mark (Automated)"
|
|
340
|
+
git config user.email "mighty-mark@aiassesstech.com"
|
|
341
|
+
|
|
342
|
+
if git diff --cached --quiet 2>/dev/null; then
|
|
343
|
+
log " No changes to push."
|
|
344
|
+
else
|
|
345
|
+
git commit -m "fleet $TIER backup: $DATE
|
|
346
|
+
|
|
347
|
+
Automated $TIER backup — $ARCHIVE_SIZE_MB MB
|
|
348
|
+
Extensions: $EXTENSION_COUNT | Agents: $AGENT_COUNT | Memory files: $MEMORY_FILE_COUNT
|
|
349
|
+
Checksum: sha256:${CHECKSUM:0:16}...
|
|
350
|
+
Agent: mighty-mark (sentinel duty — fleet safety checkpoint)"
|
|
351
|
+
|
|
352
|
+
git push origin "$GITHUB_BACKUP_BRANCH" --force
|
|
353
|
+
log " Pushed to origin/$GITHUB_BACKUP_BRANCH"
|
|
354
|
+
fi
|
|
355
|
+
)
|
|
356
|
+
PUSH_EXIT=$?
|
|
357
|
+
if [[ $PUSH_EXIT -ne 0 ]]; then
|
|
358
|
+
log " WARNING: GitHub push failed (backup is still saved locally)"
|
|
359
|
+
notify_telegram "🟡 *MIGHTY MARK — GitHub Push Failed*%0A%0ATier: ${TIER} | Size: ${ARCHIVE_SIZE_MB} MB%0ABackup saved locally but GitHub push failed.%0A%0ACheck logs: /opt/fleet-backup/logs/backup.log%0AServer: $(hostname)%0ATime: $(date)"
|
|
360
|
+
fi
|
|
361
|
+
|
|
362
|
+
push_cleanup
|
|
363
|
+
fi
|