@aiassesstech/mighty-mark 0.3.31 → 0.3.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/checks/backup-orchestrator.d.ts +33 -0
- package/dist/checks/backup-orchestrator.d.ts.map +1 -0
- package/dist/checks/backup-orchestrator.js +260 -0
- package/dist/checks/backup-orchestrator.js.map +1 -0
- package/dist/checks/check-runner.d.ts +7 -0
- package/dist/checks/check-runner.d.ts.map +1 -1
- package/dist/checks/check-runner.js +22 -2
- package/dist/checks/check-runner.js.map +1 -1
- package/dist/checks/data-integrity.d.ts +2 -1
- package/dist/checks/data-integrity.d.ts.map +1 -1
- package/dist/checks/data-integrity.js +10 -2
- package/dist/checks/data-integrity.js.map +1 -1
- package/package.json +3 -1
- package/src/watchdog/fleet-backup/config.sh +74 -0
- package/src/watchdog/fleet-backup/fleet-backup.sh +363 -0
- package/src/watchdog/fleet-backup/fleet-restore.sh +437 -0
- package/src/watchdog/fleet-backup/test/test-backup.sh +395 -0
- package/src/watchdog/fleet-backup/test/test-restore.sh +302 -0
- package/src/watchdog/install.sh +61 -4
- package/src/watchdog/morning-check.sh +5 -13
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ═══════════════════════════════════════════════════════════════════
|
|
3
|
+
# nole-fleet-backup — Restore Script Test Suite
|
|
4
|
+
#
|
|
5
|
+
# Tests fleet-restore.sh in a sandboxed environment.
|
|
6
|
+
# Creates real archives, then tests list, verify, inspect, extract.
|
|
7
|
+
# Interactive restore is tested non-interactively where possible.
|
|
8
|
+
#
|
|
9
|
+
# Usage:
|
|
10
|
+
# ./test/test-restore.sh
|
|
11
|
+
# ═══════════════════════════════════════════════════════════════════
|
|
12
|
+
|
|
13
|
+
set -euo pipefail
|
|
14
|
+
|
|
15
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
16
|
+
PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
17
|
+
|
|
18
|
+
PASS_COUNT=0
|
|
19
|
+
FAIL_COUNT=0
|
|
20
|
+
SKIP_COUNT=0
|
|
21
|
+
|
|
22
|
+
RED='\033[0;31m'
|
|
23
|
+
GREEN='\033[0;32m'
|
|
24
|
+
YELLOW='\033[1;33m'
|
|
25
|
+
NC='\033[0m'
|
|
26
|
+
|
|
27
|
+
pass() { PASS_COUNT=$((PASS_COUNT + 1)); echo -e " ${GREEN}PASS${NC}: $1"; }
|
|
28
|
+
fail() { FAIL_COUNT=$((FAIL_COUNT + 1)); echo -e " ${RED}FAIL${NC}: $1"; echo -e " $2"; }
|
|
29
|
+
skip() { SKIP_COUNT=$((SKIP_COUNT + 1)); echo -e " ${YELLOW}SKIP${NC}: $1 — $2"; }
|
|
30
|
+
|
|
31
|
+
# ─── Sandbox ─────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
SANDBOX=""
|
|
34
|
+
|
|
35
|
+
create_test_archive() {
|
|
36
|
+
local name="$1"
|
|
37
|
+
local content_dir="$SANDBOX/content"
|
|
38
|
+
mkdir -p "$content_dir/fleet-backup/extensions/grillo/agent"
|
|
39
|
+
mkdir -p "$content_dir/fleet-backup/agents/grillo/memory"
|
|
40
|
+
mkdir -p "$content_dir/fleet-backup/system"
|
|
41
|
+
echo "# Grillo SOUL" > "$content_dir/fleet-backup/extensions/grillo/agent/SOUL.md"
|
|
42
|
+
echo "# Memory" > "$content_dir/fleet-backup/agents/grillo/memory/context.md"
|
|
43
|
+
echo "config" > "$content_dir/fleet-backup/system/openclaw-gateway.service"
|
|
44
|
+
|
|
45
|
+
tar -czf "$SANDBOX/backups/${name}.tar.gz" -C "$content_dir" fleet-backup
|
|
46
|
+
rm -rf "$content_dir"
|
|
47
|
+
|
|
48
|
+
local checksum
|
|
49
|
+
checksum=$(sha256sum "$SANDBOX/backups/${name}.tar.gz" 2>/dev/null | awk '{print $1}' || \
|
|
50
|
+
shasum -a 256 "$SANDBOX/backups/${name}.tar.gz" | awk '{print $1}')
|
|
51
|
+
|
|
52
|
+
local date_part
|
|
53
|
+
date_part=$(echo "$name" | grep -oE '[0-9]{4}-[0-9]{2}-[0-9]{2}' | head -1)
|
|
54
|
+
[[ -z "$date_part" ]] && date_part=$(date +%Y-%m-%d)
|
|
55
|
+
|
|
56
|
+
local tier="full"
|
|
57
|
+
echo "$name" | grep -q "light" && tier="light"
|
|
58
|
+
|
|
59
|
+
cat > "$SANDBOX/latest-manifest.json" << MEOF
|
|
60
|
+
{
|
|
61
|
+
"date": "$date_part",
|
|
62
|
+
"timestamp": "${date_part}T09:00:00Z",
|
|
63
|
+
"tier": "$tier",
|
|
64
|
+
"archive": "${name}.tar.gz",
|
|
65
|
+
"size_bytes": $(stat -c%s "$SANDBOX/backups/${name}.tar.gz" 2>/dev/null || stat -f%z "$SANDBOX/backups/${name}.tar.gz"),
|
|
66
|
+
"size_mb": 0.1,
|
|
67
|
+
"checksum": "sha256:$checksum",
|
|
68
|
+
"extensions": 6,
|
|
69
|
+
"agents": 6,
|
|
70
|
+
"memory_files": 50,
|
|
71
|
+
"retention_days": 35,
|
|
72
|
+
"agent": "mighty-mark",
|
|
73
|
+
"workflow": "nole-fleet-backup"
|
|
74
|
+
}
|
|
75
|
+
MEOF
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
setup_sandbox() {
|
|
79
|
+
SANDBOX=$(mktemp -d "/tmp/fleet-restore-test-XXXXXX")
|
|
80
|
+
mkdir -p "$SANDBOX/backups"
|
|
81
|
+
|
|
82
|
+
cat > "$SANDBOX/config.sh" << CFGEOF
|
|
83
|
+
BACKUP_DIR="$SANDBOX/backups"
|
|
84
|
+
RETENTION_DAYS=35
|
|
85
|
+
LOG_FILE="$SANDBOX/logs/backup.log"
|
|
86
|
+
MANIFEST_FILE="$SANDBOX/latest-manifest.json"
|
|
87
|
+
OPENCLAW_HOME="$SANDBOX/openclaw"
|
|
88
|
+
CLAWDBOT_HOME="$SANDBOX/clawdbot"
|
|
89
|
+
CLAWD_WORKSPACE="$SANDBOX/clawd"
|
|
90
|
+
MIGHTY_MARK_OPT="$SANDBOX/mighty-mark"
|
|
91
|
+
NOLE_CREDS="$SANDBOX/nole"
|
|
92
|
+
FULL_BACKUP_DAY="0"
|
|
93
|
+
SYSTEMD_UNITS=()
|
|
94
|
+
CRON_FILES=()
|
|
95
|
+
LOGROTATE_FILES=()
|
|
96
|
+
EXCLUDE_PATTERNS=()
|
|
97
|
+
SANITIZE_OPENCLAW_JSON=true
|
|
98
|
+
PUSH_TO_GITHUB=false
|
|
99
|
+
GITHUB_REPO_DIR=""
|
|
100
|
+
GITHUB_SSH_HOST=""
|
|
101
|
+
GITHUB_BACKUP_BRANCH="backups"
|
|
102
|
+
GITHUB_MAX_FILE_SIZE=\$((95 * 1024 * 1024))
|
|
103
|
+
CFGEOF
|
|
104
|
+
|
|
105
|
+
cp "$PROJECT_DIR/fleet-restore.sh" "$SANDBOX/fleet-restore.sh"
|
|
106
|
+
chmod +x "$SANDBOX/fleet-restore.sh"
|
|
107
|
+
|
|
108
|
+
cat > "$SANDBOX/config.local.sh" << LOCALEOF
|
|
109
|
+
source "$SANDBOX/config.sh"
|
|
110
|
+
LOCALEOF
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
teardown_sandbox() {
|
|
114
|
+
[[ -n "$SANDBOX" ]] && rm -rf "$SANDBOX"
|
|
115
|
+
rm -rf /tmp/fleet-restore-fleet-* 2>/dev/null || true
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
run_restore() {
|
|
119
|
+
cd "$SANDBOX"
|
|
120
|
+
SCRIPT_DIR="$SANDBOX" bash "$SANDBOX/fleet-restore.sh" "$@" 2>&1
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
# ─── Tests ────────────────────────────────────────────────────────
|
|
124
|
+
|
|
125
|
+
echo "═══════════════════════════════════════════════════════"
|
|
126
|
+
echo " nole-fleet-backup — Restore Test Suite"
|
|
127
|
+
echo "═══════════════════════════════════════════════════════"
|
|
128
|
+
echo ""
|
|
129
|
+
|
|
130
|
+
# --- Test 1: --list with no backups ---
|
|
131
|
+
echo "▸ List (empty)"
|
|
132
|
+
setup_sandbox
|
|
133
|
+
output=$(run_restore --list)
|
|
134
|
+
if echo "$output" | grep -q "0 backup(s) found"; then
|
|
135
|
+
pass "--list shows 0 backups when empty"
|
|
136
|
+
else
|
|
137
|
+
fail "--list shows 0 backups when empty" "Output: $output"
|
|
138
|
+
fi
|
|
139
|
+
teardown_sandbox
|
|
140
|
+
|
|
141
|
+
# --- Test 2: --list with backups ---
|
|
142
|
+
echo "▸ List (with archives)"
|
|
143
|
+
setup_sandbox
|
|
144
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
145
|
+
create_test_archive "fleet-light-2026-03-02-090000"
|
|
146
|
+
output=$(run_restore --list)
|
|
147
|
+
if echo "$output" | grep -q "2 backup(s) found"; then
|
|
148
|
+
pass "--list shows 2 backups"
|
|
149
|
+
else
|
|
150
|
+
fail "--list shows 2 backups" "Output: $output"
|
|
151
|
+
fi
|
|
152
|
+
|
|
153
|
+
if echo "$output" | grep -q "FULL"; then
|
|
154
|
+
pass "--list identifies full tier"
|
|
155
|
+
else
|
|
156
|
+
fail "--list identifies full tier" "Output: $output"
|
|
157
|
+
fi
|
|
158
|
+
|
|
159
|
+
if echo "$output" | grep -q "light"; then
|
|
160
|
+
pass "--list identifies light tier"
|
|
161
|
+
else
|
|
162
|
+
fail "--list identifies light tier" "Output: $output"
|
|
163
|
+
fi
|
|
164
|
+
teardown_sandbox
|
|
165
|
+
|
|
166
|
+
# --- Test 3: --inspect shows contents ---
|
|
167
|
+
echo "▸ Inspect"
|
|
168
|
+
setup_sandbox
|
|
169
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
170
|
+
output=$(run_restore --inspect "fleet-full-2026-03-01-090000")
|
|
171
|
+
if echo "$output" | grep -q "SOUL.md"; then
|
|
172
|
+
pass "--inspect shows archive contents"
|
|
173
|
+
else
|
|
174
|
+
fail "--inspect shows archive contents" "Output: $output"
|
|
175
|
+
fi
|
|
176
|
+
|
|
177
|
+
if echo "$output" | grep -q "Total entries"; then
|
|
178
|
+
pass "--inspect shows entry count"
|
|
179
|
+
else
|
|
180
|
+
fail "--inspect shows entry count" "Output: $output"
|
|
181
|
+
fi
|
|
182
|
+
teardown_sandbox
|
|
183
|
+
|
|
184
|
+
# --- Test 4: --verify passes with matching checksum ---
|
|
185
|
+
echo "▸ Verify (valid)"
|
|
186
|
+
setup_sandbox
|
|
187
|
+
create_test_archive "fleet-full-$(date +%Y-%m-%d)-090000"
|
|
188
|
+
output=$(run_restore --verify "fleet-full-$(date +%Y-%m-%d)-090000")
|
|
189
|
+
if echo "$output" | grep -q "VERIFIED"; then
|
|
190
|
+
pass "--verify passes with correct checksum"
|
|
191
|
+
else
|
|
192
|
+
fail "--verify passes with correct checksum" "Output: $output"
|
|
193
|
+
fi
|
|
194
|
+
teardown_sandbox
|
|
195
|
+
|
|
196
|
+
# --- Test 5: --verify detects mismatch ---
|
|
197
|
+
echo "▸ Verify (tampered)"
|
|
198
|
+
setup_sandbox
|
|
199
|
+
create_test_archive "fleet-full-$(date +%Y-%m-%d)-090000"
|
|
200
|
+
echo "tampered" >> "$SANDBOX/backups/fleet-full-$(date +%Y-%m-%d)-090000.tar.gz"
|
|
201
|
+
output=$(run_restore --verify "fleet-full-$(date +%Y-%m-%d)-090000" 2>&1 || true)
|
|
202
|
+
if echo "$output" | grep -q "MISMATCH"; then
|
|
203
|
+
pass "--verify detects tampered archive"
|
|
204
|
+
else
|
|
205
|
+
fail "--verify detects tampered archive" "Output: $output"
|
|
206
|
+
fi
|
|
207
|
+
teardown_sandbox
|
|
208
|
+
|
|
209
|
+
# --- Test 6: --extract creates temp dir ---
|
|
210
|
+
echo "▸ Extract"
|
|
211
|
+
setup_sandbox
|
|
212
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
213
|
+
output=$(run_restore --extract "fleet-full-2026-03-01-090000")
|
|
214
|
+
extract_dir="/tmp/fleet-restore-fleet-full-2026-03-01-090000"
|
|
215
|
+
if [[ -d "$extract_dir" ]]; then
|
|
216
|
+
pass "--extract creates temp directory"
|
|
217
|
+
if [[ -f "$extract_dir/fleet-backup/extensions/grillo/agent/SOUL.md" ]]; then
|
|
218
|
+
pass "--extract preserves file structure"
|
|
219
|
+
else
|
|
220
|
+
fail "--extract preserves file structure" "SOUL.md not found in extracted archive"
|
|
221
|
+
fi
|
|
222
|
+
rm -rf "$extract_dir"
|
|
223
|
+
else
|
|
224
|
+
fail "--extract creates temp directory" "Not found: $extract_dir"
|
|
225
|
+
fi
|
|
226
|
+
teardown_sandbox
|
|
227
|
+
|
|
228
|
+
# --- Test 7: --help exits 0 ---
|
|
229
|
+
echo "▸ Help"
|
|
230
|
+
setup_sandbox
|
|
231
|
+
if run_restore --help > /dev/null 2>&1; then
|
|
232
|
+
pass "--help exits 0"
|
|
233
|
+
else
|
|
234
|
+
fail "--help exits 0" "Non-zero exit"
|
|
235
|
+
fi
|
|
236
|
+
teardown_sandbox
|
|
237
|
+
|
|
238
|
+
# --- Test 8: Unknown command exits 1 ---
|
|
239
|
+
echo "▸ Unknown command"
|
|
240
|
+
setup_sandbox
|
|
241
|
+
if run_restore --bogus > /dev/null 2>&1; then
|
|
242
|
+
fail "Unknown command exits 1" "Got exit 0"
|
|
243
|
+
else
|
|
244
|
+
pass "Unknown command exits 1"
|
|
245
|
+
fi
|
|
246
|
+
teardown_sandbox
|
|
247
|
+
|
|
248
|
+
# --- Test 9: --inspect without archive name exits 1 ---
|
|
249
|
+
echo "▸ Missing archive name"
|
|
250
|
+
setup_sandbox
|
|
251
|
+
if run_restore --inspect 2>/dev/null; then
|
|
252
|
+
fail "--inspect without name exits 1" "Got exit 0"
|
|
253
|
+
else
|
|
254
|
+
pass "--inspect without name exits 1"
|
|
255
|
+
fi
|
|
256
|
+
teardown_sandbox
|
|
257
|
+
|
|
258
|
+
# --- Test 10: Non-existent archive exits 1 ---
|
|
259
|
+
echo "▸ Missing archive"
|
|
260
|
+
setup_sandbox
|
|
261
|
+
if run_restore --inspect "nonexistent-2026-01-01-090000" > /dev/null 2>&1; then
|
|
262
|
+
fail "Non-existent archive exits 1" "Got exit 0"
|
|
263
|
+
else
|
|
264
|
+
pass "Non-existent archive exits 1"
|
|
265
|
+
fi
|
|
266
|
+
teardown_sandbox
|
|
267
|
+
|
|
268
|
+
# --- Test 11: Partial name matching ---
|
|
269
|
+
echo "▸ Partial name match"
|
|
270
|
+
setup_sandbox
|
|
271
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
272
|
+
output=$(run_restore --inspect "2026-03-01" 2>&1)
|
|
273
|
+
if echo "$output" | grep -q "SOUL.md\|Total entries"; then
|
|
274
|
+
pass "Partial name match finds archive"
|
|
275
|
+
else
|
|
276
|
+
fail "Partial name match finds archive" "Output: $output"
|
|
277
|
+
fi
|
|
278
|
+
teardown_sandbox
|
|
279
|
+
|
|
280
|
+
# --- Test 12: Default (no args) lists backups ---
|
|
281
|
+
echo "▸ Default action"
|
|
282
|
+
setup_sandbox
|
|
283
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
284
|
+
output=$(run_restore)
|
|
285
|
+
if echo "$output" | grep -q "1 backup(s) found"; then
|
|
286
|
+
pass "Default action lists backups"
|
|
287
|
+
else
|
|
288
|
+
fail "Default action lists backups" "Output: $output"
|
|
289
|
+
fi
|
|
290
|
+
teardown_sandbox
|
|
291
|
+
|
|
292
|
+
# ─── Summary ──────────────────────────────────────────────────────
|
|
293
|
+
|
|
294
|
+
echo ""
|
|
295
|
+
echo "═══════════════════════════════════════════════════════"
|
|
296
|
+
TOTAL=$((PASS_COUNT + FAIL_COUNT + SKIP_COUNT))
|
|
297
|
+
echo -e " Results: ${GREEN}${PASS_COUNT} passed${NC}, ${RED}${FAIL_COUNT} failed${NC}, ${YELLOW}${SKIP_COUNT} skipped${NC} (${TOTAL} total)"
|
|
298
|
+
echo "═══════════════════════════════════════════════════════"
|
|
299
|
+
|
|
300
|
+
if [[ "$FAIL_COUNT" -gt 0 ]]; then
|
|
301
|
+
exit 1
|
|
302
|
+
fi
|
package/src/watchdog/install.sh
CHANGED
|
@@ -189,29 +189,79 @@ LREOF
|
|
|
189
189
|
fi
|
|
190
190
|
|
|
191
191
|
# ── Step 6: Install fleet backup scripts ──
|
|
192
|
-
echo "[6/8] Installing fleet backup
|
|
192
|
+
echo "[6/8] Installing fleet backup..."
|
|
193
193
|
|
|
194
194
|
FLEET_BACKUP_DIR="/opt/fleet-backup"
|
|
195
195
|
FLEET_REPO_DIR="/opt/nole-fleet-backup"
|
|
196
196
|
|
|
197
|
+
# Source priority:
|
|
198
|
+
# 1. Bundled in npm package (src/watchdog/fleet-backup/) — preferred
|
|
199
|
+
# 2. Standalone git clone (/opt/nole-fleet-backup) — fallback
|
|
200
|
+
FLEET_BUNDLED_DIR="${SOURCE_DIR}/fleet-backup"
|
|
201
|
+
|
|
197
202
|
if [ "$DRY_RUN" = true ]; then
|
|
198
203
|
echo " [DRY RUN] Would set up fleet backup at ${FLEET_BACKUP_DIR}"
|
|
204
|
+
elif [ -d "$FLEET_BUNDLED_DIR" ] && [ -f "$FLEET_BUNDLED_DIR/fleet-backup.sh" ]; then
|
|
205
|
+
echo " Source: bundled in mighty-mark npm package"
|
|
206
|
+
mkdir -p "${FLEET_BACKUP_DIR}/backups"
|
|
207
|
+
mkdir -p "${FLEET_BACKUP_DIR}/logs"
|
|
208
|
+
mkdir -p "${FLEET_BACKUP_DIR}/test"
|
|
209
|
+
cp "$FLEET_BUNDLED_DIR/fleet-backup.sh" "${FLEET_BACKUP_DIR}/"
|
|
210
|
+
cp "$FLEET_BUNDLED_DIR/fleet-restore.sh" "${FLEET_BACKUP_DIR}/"
|
|
211
|
+
cp "$FLEET_BUNDLED_DIR/config.sh" "${FLEET_BACKUP_DIR}/"
|
|
212
|
+
# Preserve config.local.sh if it already exists on disk (user customizations)
|
|
213
|
+
if [ ! -f "${FLEET_BACKUP_DIR}/config.local.sh" ] && [ -f "$FLEET_BUNDLED_DIR/config.local.sh" ]; then
|
|
214
|
+
cp "$FLEET_BUNDLED_DIR/config.local.sh" "${FLEET_BACKUP_DIR}/"
|
|
215
|
+
fi
|
|
216
|
+
# Install test scripts for daily verification
|
|
217
|
+
if [ -d "$FLEET_BUNDLED_DIR/test" ]; then
|
|
218
|
+
cp "$FLEET_BUNDLED_DIR/test/test-backup.sh" "${FLEET_BACKUP_DIR}/test/" 2>/dev/null || true
|
|
219
|
+
cp "$FLEET_BUNDLED_DIR/test/test-restore.sh" "${FLEET_BACKUP_DIR}/test/" 2>/dev/null || true
|
|
220
|
+
chmod +x "${FLEET_BACKUP_DIR}/test/"*.sh 2>/dev/null || true
|
|
221
|
+
echo " Tests: ${FLEET_BACKUP_DIR}/test/"
|
|
222
|
+
fi
|
|
223
|
+
chmod +x "${FLEET_BACKUP_DIR}/fleet-backup.sh"
|
|
224
|
+
chmod +x "${FLEET_BACKUP_DIR}/fleet-restore.sh"
|
|
225
|
+
echo " Installed: fleet-backup.sh, fleet-restore.sh, config.sh"
|
|
226
|
+
echo " Backups: ${FLEET_BACKUP_DIR}/backups/"
|
|
227
|
+
echo " Logs: ${FLEET_BACKUP_DIR}/logs/"
|
|
228
|
+
|
|
229
|
+
# Install fleet-backup logrotate
|
|
230
|
+
cat > /etc/logrotate.d/fleet-backup <<FBLREOF
|
|
231
|
+
${FLEET_BACKUP_DIR}/logs/*.log {
|
|
232
|
+
weekly
|
|
233
|
+
rotate 8
|
|
234
|
+
compress
|
|
235
|
+
delaycompress
|
|
236
|
+
missingok
|
|
237
|
+
notifempty
|
|
238
|
+
create 644 root root
|
|
239
|
+
}
|
|
240
|
+
FBLREOF
|
|
241
|
+
chmod 644 /etc/logrotate.d/fleet-backup
|
|
242
|
+
echo " Logrotate: /etc/logrotate.d/fleet-backup"
|
|
199
243
|
elif [ -d "$FLEET_REPO_DIR" ] && [ -f "$FLEET_REPO_DIR/fleet-backup.sh" ]; then
|
|
244
|
+
echo " Source: standalone git clone at ${FLEET_REPO_DIR}"
|
|
200
245
|
mkdir -p "${FLEET_BACKUP_DIR}/backups"
|
|
201
246
|
mkdir -p "${FLEET_BACKUP_DIR}/logs"
|
|
247
|
+
mkdir -p "${FLEET_BACKUP_DIR}/test"
|
|
202
248
|
cp "$FLEET_REPO_DIR/fleet-backup.sh" "${FLEET_BACKUP_DIR}/"
|
|
203
249
|
cp "$FLEET_REPO_DIR/fleet-restore.sh" "${FLEET_BACKUP_DIR}/"
|
|
204
250
|
cp "$FLEET_REPO_DIR/config.sh" "${FLEET_BACKUP_DIR}/"
|
|
205
251
|
if [ -f "$FLEET_REPO_DIR/config.local.sh" ]; then
|
|
206
252
|
cp "$FLEET_REPO_DIR/config.local.sh" "${FLEET_BACKUP_DIR}/"
|
|
207
253
|
fi
|
|
254
|
+
if [ -d "$FLEET_REPO_DIR/test" ]; then
|
|
255
|
+
cp "$FLEET_REPO_DIR/test/"*.sh "${FLEET_BACKUP_DIR}/test/" 2>/dev/null || true
|
|
256
|
+
chmod +x "${FLEET_BACKUP_DIR}/test/"*.sh 2>/dev/null || true
|
|
257
|
+
echo " Tests: ${FLEET_BACKUP_DIR}/test/"
|
|
258
|
+
fi
|
|
208
259
|
chmod +x "${FLEET_BACKUP_DIR}/fleet-backup.sh"
|
|
209
260
|
chmod +x "${FLEET_BACKUP_DIR}/fleet-restore.sh"
|
|
210
261
|
echo " Installed: fleet-backup.sh, fleet-restore.sh, config.sh"
|
|
211
262
|
echo " Backups: ${FLEET_BACKUP_DIR}/backups/"
|
|
212
263
|
echo " Logs: ${FLEET_BACKUP_DIR}/logs/"
|
|
213
264
|
|
|
214
|
-
# Install fleet-backup logrotate
|
|
215
265
|
cat > /etc/logrotate.d/fleet-backup <<FBLREOF
|
|
216
266
|
${FLEET_BACKUP_DIR}/logs/*.log {
|
|
217
267
|
weekly
|
|
@@ -226,8 +276,15 @@ FBLREOF
|
|
|
226
276
|
chmod 644 /etc/logrotate.d/fleet-backup
|
|
227
277
|
echo " Logrotate: /etc/logrotate.d/fleet-backup"
|
|
228
278
|
else
|
|
229
|
-
echo " Skipped —
|
|
230
|
-
echo "
|
|
279
|
+
echo " Skipped — fleet backup scripts not found"
|
|
280
|
+
echo " Expected in npm package at: ${FLEET_BUNDLED_DIR}/"
|
|
281
|
+
echo " Or standalone repo at: ${FLEET_REPO_DIR}/"
|
|
282
|
+
fi
|
|
283
|
+
|
|
284
|
+
# Remove standalone fleet-backup cron — now orchestrated by morning-check.sh via TypeScript engine
|
|
285
|
+
if [ -f /etc/cron.d/fleet-backup ]; then
|
|
286
|
+
rm -f /etc/cron.d/fleet-backup
|
|
287
|
+
echo " Removed: /etc/cron.d/fleet-backup (now orchestrated by Mighty Mark)"
|
|
231
288
|
fi
|
|
232
289
|
|
|
233
290
|
# ── Step 7: Initialize state ──
|
|
@@ -21,19 +21,11 @@ mkdir -p "${MARK_LOG_DIR}" "${MARK_STATE_DIR}"
|
|
|
21
21
|
|
|
22
22
|
log_morning "Morning check starting"
|
|
23
23
|
|
|
24
|
-
#
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
log_morning "Fleet backup completed successfully"
|
|
30
|
-
else
|
|
31
|
-
log_morning "WARN: Fleet backup failed (exit code $?)"
|
|
32
|
-
notify_telegram "🟡 *MIGHTY MARK — Backup Warning*%0A%0AFleet backup failed during morning check.%0AHealth check will proceed.%0A%0ACheck logs: /opt/fleet-backup/logs/backup.log%0AServer: $(hostname)%0ATime: $(date)"
|
|
33
|
-
fi
|
|
34
|
-
else
|
|
35
|
-
log_morning "Fleet backup script not found at $FLEET_BACKUP, skipping"
|
|
36
|
-
fi
|
|
24
|
+
# Fleet backup is now orchestrated by the TypeScript health engine
|
|
25
|
+
# (backup-orchestrator.ts). It runs backup + tests + manifest validation
|
|
26
|
+
# as part of the data integrity checks. No need to trigger it separately.
|
|
27
|
+
# If the TypeScript engine fails and we fall back to bash, the backup
|
|
28
|
+
# won't run — but it will catch up on the next successful morning check.
|
|
37
29
|
|
|
38
30
|
# ── Try Node.js health check first ──
|
|
39
31
|
if command -v npx &> /dev/null; then
|