@aiassesstech/mighty-mark 0.3.29 → 0.3.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/checks/backup-orchestrator.d.ts +33 -0
- package/dist/checks/backup-orchestrator.d.ts.map +1 -0
- package/dist/checks/backup-orchestrator.js +260 -0
- package/dist/checks/backup-orchestrator.js.map +1 -0
- package/dist/checks/check-runner.d.ts +7 -0
- package/dist/checks/check-runner.d.ts.map +1 -1
- package/dist/checks/check-runner.js +22 -2
- package/dist/checks/check-runner.js.map +1 -1
- package/dist/checks/data-integrity.d.ts +8 -1
- package/dist/checks/data-integrity.d.ts.map +1 -1
- package/dist/checks/data-integrity.js +99 -1
- package/dist/checks/data-integrity.js.map +1 -1
- package/package.json +3 -1
- package/src/watchdog/fleet-backup/config.sh +74 -0
- package/src/watchdog/fleet-backup/fleet-backup.sh +363 -0
- package/src/watchdog/fleet-backup/fleet-restore.sh +437 -0
- package/src/watchdog/fleet-backup/test/test-backup.sh +395 -0
- package/src/watchdog/fleet-backup/test/test-restore.sh +302 -0
- package/src/watchdog/install.sh +112 -4
- package/src/watchdog/morning-check.sh +6 -0
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ═══════════════════════════════════════════════════════════════════
|
|
3
|
+
# nole-fleet-backup — Restore Script Test Suite
|
|
4
|
+
#
|
|
5
|
+
# Tests fleet-restore.sh in a sandboxed environment.
|
|
6
|
+
# Creates real archives, then tests list, verify, inspect, extract.
|
|
7
|
+
# Interactive restore is tested non-interactively where possible.
|
|
8
|
+
#
|
|
9
|
+
# Usage:
|
|
10
|
+
# ./test/test-restore.sh
|
|
11
|
+
# ═══════════════════════════════════════════════════════════════════
|
|
12
|
+
|
|
13
|
+
set -euo pipefail
|
|
14
|
+
|
|
15
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
16
|
+
PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
17
|
+
|
|
18
|
+
PASS_COUNT=0
|
|
19
|
+
FAIL_COUNT=0
|
|
20
|
+
SKIP_COUNT=0
|
|
21
|
+
|
|
22
|
+
RED='\033[0;31m'
|
|
23
|
+
GREEN='\033[0;32m'
|
|
24
|
+
YELLOW='\033[1;33m'
|
|
25
|
+
NC='\033[0m'
|
|
26
|
+
|
|
27
|
+
pass() { PASS_COUNT=$((PASS_COUNT + 1)); echo -e " ${GREEN}PASS${NC}: $1"; }
|
|
28
|
+
fail() { FAIL_COUNT=$((FAIL_COUNT + 1)); echo -e " ${RED}FAIL${NC}: $1"; echo -e " $2"; }
|
|
29
|
+
skip() { SKIP_COUNT=$((SKIP_COUNT + 1)); echo -e " ${YELLOW}SKIP${NC}: $1 — $2"; }
|
|
30
|
+
|
|
31
|
+
# ─── Sandbox ─────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
SANDBOX=""
|
|
34
|
+
|
|
35
|
+
create_test_archive() {
|
|
36
|
+
local name="$1"
|
|
37
|
+
local content_dir="$SANDBOX/content"
|
|
38
|
+
mkdir -p "$content_dir/fleet-backup/extensions/grillo/agent"
|
|
39
|
+
mkdir -p "$content_dir/fleet-backup/agents/grillo/memory"
|
|
40
|
+
mkdir -p "$content_dir/fleet-backup/system"
|
|
41
|
+
echo "# Grillo SOUL" > "$content_dir/fleet-backup/extensions/grillo/agent/SOUL.md"
|
|
42
|
+
echo "# Memory" > "$content_dir/fleet-backup/agents/grillo/memory/context.md"
|
|
43
|
+
echo "config" > "$content_dir/fleet-backup/system/openclaw-gateway.service"
|
|
44
|
+
|
|
45
|
+
tar -czf "$SANDBOX/backups/${name}.tar.gz" -C "$content_dir" fleet-backup
|
|
46
|
+
rm -rf "$content_dir"
|
|
47
|
+
|
|
48
|
+
local checksum
|
|
49
|
+
checksum=$(sha256sum "$SANDBOX/backups/${name}.tar.gz" 2>/dev/null | awk '{print $1}' || \
|
|
50
|
+
shasum -a 256 "$SANDBOX/backups/${name}.tar.gz" | awk '{print $1}')
|
|
51
|
+
|
|
52
|
+
local date_part
|
|
53
|
+
date_part=$(echo "$name" | grep -oE '[0-9]{4}-[0-9]{2}-[0-9]{2}' | head -1)
|
|
54
|
+
[[ -z "$date_part" ]] && date_part=$(date +%Y-%m-%d)
|
|
55
|
+
|
|
56
|
+
local tier="full"
|
|
57
|
+
echo "$name" | grep -q "light" && tier="light"
|
|
58
|
+
|
|
59
|
+
cat > "$SANDBOX/latest-manifest.json" << MEOF
|
|
60
|
+
{
|
|
61
|
+
"date": "$date_part",
|
|
62
|
+
"timestamp": "${date_part}T09:00:00Z",
|
|
63
|
+
"tier": "$tier",
|
|
64
|
+
"archive": "${name}.tar.gz",
|
|
65
|
+
"size_bytes": $(stat -c%s "$SANDBOX/backups/${name}.tar.gz" 2>/dev/null || stat -f%z "$SANDBOX/backups/${name}.tar.gz"),
|
|
66
|
+
"size_mb": 0.1,
|
|
67
|
+
"checksum": "sha256:$checksum",
|
|
68
|
+
"extensions": 6,
|
|
69
|
+
"agents": 6,
|
|
70
|
+
"memory_files": 50,
|
|
71
|
+
"retention_days": 35,
|
|
72
|
+
"agent": "mighty-mark",
|
|
73
|
+
"workflow": "nole-fleet-backup"
|
|
74
|
+
}
|
|
75
|
+
MEOF
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
setup_sandbox() {
|
|
79
|
+
SANDBOX=$(mktemp -d "/tmp/fleet-restore-test-XXXXXX")
|
|
80
|
+
mkdir -p "$SANDBOX/backups"
|
|
81
|
+
|
|
82
|
+
cat > "$SANDBOX/config.sh" << CFGEOF
|
|
83
|
+
BACKUP_DIR="$SANDBOX/backups"
|
|
84
|
+
RETENTION_DAYS=35
|
|
85
|
+
LOG_FILE="$SANDBOX/logs/backup.log"
|
|
86
|
+
MANIFEST_FILE="$SANDBOX/latest-manifest.json"
|
|
87
|
+
OPENCLAW_HOME="$SANDBOX/openclaw"
|
|
88
|
+
CLAWDBOT_HOME="$SANDBOX/clawdbot"
|
|
89
|
+
CLAWD_WORKSPACE="$SANDBOX/clawd"
|
|
90
|
+
MIGHTY_MARK_OPT="$SANDBOX/mighty-mark"
|
|
91
|
+
NOLE_CREDS="$SANDBOX/nole"
|
|
92
|
+
FULL_BACKUP_DAY="0"
|
|
93
|
+
SYSTEMD_UNITS=()
|
|
94
|
+
CRON_FILES=()
|
|
95
|
+
LOGROTATE_FILES=()
|
|
96
|
+
EXCLUDE_PATTERNS=()
|
|
97
|
+
SANITIZE_OPENCLAW_JSON=true
|
|
98
|
+
PUSH_TO_GITHUB=false
|
|
99
|
+
GITHUB_REPO_DIR=""
|
|
100
|
+
GITHUB_SSH_HOST=""
|
|
101
|
+
GITHUB_BACKUP_BRANCH="backups"
|
|
102
|
+
GITHUB_MAX_FILE_SIZE=\$((95 * 1024 * 1024))
|
|
103
|
+
CFGEOF
|
|
104
|
+
|
|
105
|
+
cp "$PROJECT_DIR/fleet-restore.sh" "$SANDBOX/fleet-restore.sh"
|
|
106
|
+
chmod +x "$SANDBOX/fleet-restore.sh"
|
|
107
|
+
|
|
108
|
+
cat > "$SANDBOX/config.local.sh" << LOCALEOF
|
|
109
|
+
source "$SANDBOX/config.sh"
|
|
110
|
+
LOCALEOF
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
teardown_sandbox() {
|
|
114
|
+
[[ -n "$SANDBOX" ]] && rm -rf "$SANDBOX"
|
|
115
|
+
rm -rf /tmp/fleet-restore-fleet-* 2>/dev/null || true
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
run_restore() {
|
|
119
|
+
cd "$SANDBOX"
|
|
120
|
+
SCRIPT_DIR="$SANDBOX" bash "$SANDBOX/fleet-restore.sh" "$@" 2>&1
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
# ─── Tests ────────────────────────────────────────────────────────
|
|
124
|
+
|
|
125
|
+
echo "═══════════════════════════════════════════════════════"
|
|
126
|
+
echo " nole-fleet-backup — Restore Test Suite"
|
|
127
|
+
echo "═══════════════════════════════════════════════════════"
|
|
128
|
+
echo ""
|
|
129
|
+
|
|
130
|
+
# --- Test 1: --list with no backups ---
|
|
131
|
+
echo "▸ List (empty)"
|
|
132
|
+
setup_sandbox
|
|
133
|
+
output=$(run_restore --list)
|
|
134
|
+
if echo "$output" | grep -q "0 backup(s) found"; then
|
|
135
|
+
pass "--list shows 0 backups when empty"
|
|
136
|
+
else
|
|
137
|
+
fail "--list shows 0 backups when empty" "Output: $output"
|
|
138
|
+
fi
|
|
139
|
+
teardown_sandbox
|
|
140
|
+
|
|
141
|
+
# --- Test 2: --list with backups ---
|
|
142
|
+
echo "▸ List (with archives)"
|
|
143
|
+
setup_sandbox
|
|
144
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
145
|
+
create_test_archive "fleet-light-2026-03-02-090000"
|
|
146
|
+
output=$(run_restore --list)
|
|
147
|
+
if echo "$output" | grep -q "2 backup(s) found"; then
|
|
148
|
+
pass "--list shows 2 backups"
|
|
149
|
+
else
|
|
150
|
+
fail "--list shows 2 backups" "Output: $output"
|
|
151
|
+
fi
|
|
152
|
+
|
|
153
|
+
if echo "$output" | grep -q "FULL"; then
|
|
154
|
+
pass "--list identifies full tier"
|
|
155
|
+
else
|
|
156
|
+
fail "--list identifies full tier" "Output: $output"
|
|
157
|
+
fi
|
|
158
|
+
|
|
159
|
+
if echo "$output" | grep -q "light"; then
|
|
160
|
+
pass "--list identifies light tier"
|
|
161
|
+
else
|
|
162
|
+
fail "--list identifies light tier" "Output: $output"
|
|
163
|
+
fi
|
|
164
|
+
teardown_sandbox
|
|
165
|
+
|
|
166
|
+
# --- Test 3: --inspect shows contents ---
|
|
167
|
+
echo "▸ Inspect"
|
|
168
|
+
setup_sandbox
|
|
169
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
170
|
+
output=$(run_restore --inspect "fleet-full-2026-03-01-090000")
|
|
171
|
+
if echo "$output" | grep -q "SOUL.md"; then
|
|
172
|
+
pass "--inspect shows archive contents"
|
|
173
|
+
else
|
|
174
|
+
fail "--inspect shows archive contents" "Output: $output"
|
|
175
|
+
fi
|
|
176
|
+
|
|
177
|
+
if echo "$output" | grep -q "Total entries"; then
|
|
178
|
+
pass "--inspect shows entry count"
|
|
179
|
+
else
|
|
180
|
+
fail "--inspect shows entry count" "Output: $output"
|
|
181
|
+
fi
|
|
182
|
+
teardown_sandbox
|
|
183
|
+
|
|
184
|
+
# --- Test 4: --verify passes with matching checksum ---
|
|
185
|
+
echo "▸ Verify (valid)"
|
|
186
|
+
setup_sandbox
|
|
187
|
+
create_test_archive "fleet-full-$(date +%Y-%m-%d)-090000"
|
|
188
|
+
output=$(run_restore --verify "fleet-full-$(date +%Y-%m-%d)-090000")
|
|
189
|
+
if echo "$output" | grep -q "VERIFIED"; then
|
|
190
|
+
pass "--verify passes with correct checksum"
|
|
191
|
+
else
|
|
192
|
+
fail "--verify passes with correct checksum" "Output: $output"
|
|
193
|
+
fi
|
|
194
|
+
teardown_sandbox
|
|
195
|
+
|
|
196
|
+
# --- Test 5: --verify detects mismatch ---
|
|
197
|
+
echo "▸ Verify (tampered)"
|
|
198
|
+
setup_sandbox
|
|
199
|
+
create_test_archive "fleet-full-$(date +%Y-%m-%d)-090000"
|
|
200
|
+
echo "tampered" >> "$SANDBOX/backups/fleet-full-$(date +%Y-%m-%d)-090000.tar.gz"
|
|
201
|
+
output=$(run_restore --verify "fleet-full-$(date +%Y-%m-%d)-090000" 2>&1 || true)
|
|
202
|
+
if echo "$output" | grep -q "MISMATCH"; then
|
|
203
|
+
pass "--verify detects tampered archive"
|
|
204
|
+
else
|
|
205
|
+
fail "--verify detects tampered archive" "Output: $output"
|
|
206
|
+
fi
|
|
207
|
+
teardown_sandbox
|
|
208
|
+
|
|
209
|
+
# --- Test 6: --extract creates temp dir ---
|
|
210
|
+
echo "▸ Extract"
|
|
211
|
+
setup_sandbox
|
|
212
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
213
|
+
output=$(run_restore --extract "fleet-full-2026-03-01-090000")
|
|
214
|
+
extract_dir="/tmp/fleet-restore-fleet-full-2026-03-01-090000"
|
|
215
|
+
if [[ -d "$extract_dir" ]]; then
|
|
216
|
+
pass "--extract creates temp directory"
|
|
217
|
+
if [[ -f "$extract_dir/fleet-backup/extensions/grillo/agent/SOUL.md" ]]; then
|
|
218
|
+
pass "--extract preserves file structure"
|
|
219
|
+
else
|
|
220
|
+
fail "--extract preserves file structure" "SOUL.md not found in extracted archive"
|
|
221
|
+
fi
|
|
222
|
+
rm -rf "$extract_dir"
|
|
223
|
+
else
|
|
224
|
+
fail "--extract creates temp directory" "Not found: $extract_dir"
|
|
225
|
+
fi
|
|
226
|
+
teardown_sandbox
|
|
227
|
+
|
|
228
|
+
# --- Test 7: --help exits 0 ---
|
|
229
|
+
echo "▸ Help"
|
|
230
|
+
setup_sandbox
|
|
231
|
+
if run_restore --help > /dev/null 2>&1; then
|
|
232
|
+
pass "--help exits 0"
|
|
233
|
+
else
|
|
234
|
+
fail "--help exits 0" "Non-zero exit"
|
|
235
|
+
fi
|
|
236
|
+
teardown_sandbox
|
|
237
|
+
|
|
238
|
+
# --- Test 8: Unknown command exits 1 ---
|
|
239
|
+
echo "▸ Unknown command"
|
|
240
|
+
setup_sandbox
|
|
241
|
+
if run_restore --bogus > /dev/null 2>&1; then
|
|
242
|
+
fail "Unknown command exits 1" "Got exit 0"
|
|
243
|
+
else
|
|
244
|
+
pass "Unknown command exits 1"
|
|
245
|
+
fi
|
|
246
|
+
teardown_sandbox
|
|
247
|
+
|
|
248
|
+
# --- Test 9: --inspect without archive name exits 1 ---
|
|
249
|
+
echo "▸ Missing archive name"
|
|
250
|
+
setup_sandbox
|
|
251
|
+
if run_restore --inspect 2>/dev/null; then
|
|
252
|
+
fail "--inspect without name exits 1" "Got exit 0"
|
|
253
|
+
else
|
|
254
|
+
pass "--inspect without name exits 1"
|
|
255
|
+
fi
|
|
256
|
+
teardown_sandbox
|
|
257
|
+
|
|
258
|
+
# --- Test 10: Non-existent archive exits 1 ---
|
|
259
|
+
echo "▸ Missing archive"
|
|
260
|
+
setup_sandbox
|
|
261
|
+
if run_restore --inspect "nonexistent-2026-01-01-090000" > /dev/null 2>&1; then
|
|
262
|
+
fail "Non-existent archive exits 1" "Got exit 0"
|
|
263
|
+
else
|
|
264
|
+
pass "Non-existent archive exits 1"
|
|
265
|
+
fi
|
|
266
|
+
teardown_sandbox
|
|
267
|
+
|
|
268
|
+
# --- Test 11: Partial name matching ---
|
|
269
|
+
echo "▸ Partial name match"
|
|
270
|
+
setup_sandbox
|
|
271
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
272
|
+
output=$(run_restore --inspect "2026-03-01" 2>&1)
|
|
273
|
+
if echo "$output" | grep -q "SOUL.md\|Total entries"; then
|
|
274
|
+
pass "Partial name match finds archive"
|
|
275
|
+
else
|
|
276
|
+
fail "Partial name match finds archive" "Output: $output"
|
|
277
|
+
fi
|
|
278
|
+
teardown_sandbox
|
|
279
|
+
|
|
280
|
+
# --- Test 12: Default (no args) lists backups ---
|
|
281
|
+
echo "▸ Default action"
|
|
282
|
+
setup_sandbox
|
|
283
|
+
create_test_archive "fleet-full-2026-03-01-090000"
|
|
284
|
+
output=$(run_restore)
|
|
285
|
+
if echo "$output" | grep -q "1 backup(s) found"; then
|
|
286
|
+
pass "Default action lists backups"
|
|
287
|
+
else
|
|
288
|
+
fail "Default action lists backups" "Output: $output"
|
|
289
|
+
fi
|
|
290
|
+
teardown_sandbox
|
|
291
|
+
|
|
292
|
+
# ─── Summary ──────────────────────────────────────────────────────
|
|
293
|
+
|
|
294
|
+
echo ""
|
|
295
|
+
echo "═══════════════════════════════════════════════════════"
|
|
296
|
+
TOTAL=$((PASS_COUNT + FAIL_COUNT + SKIP_COUNT))
|
|
297
|
+
echo -e " Results: ${GREEN}${PASS_COUNT} passed${NC}, ${RED}${FAIL_COUNT} failed${NC}, ${YELLOW}${SKIP_COUNT} skipped${NC} (${TOTAL} total)"
|
|
298
|
+
echo "═══════════════════════════════════════════════════════"
|
|
299
|
+
|
|
300
|
+
if [[ "$FAIL_COUNT" -gt 0 ]]; then
|
|
301
|
+
exit 1
|
|
302
|
+
fi
|
package/src/watchdog/install.sh
CHANGED
|
@@ -147,8 +147,14 @@ CRON_TZ=America/Chicago
|
|
|
147
147
|
*/5 * * * * root ${MARK_HOME}/watchdog.sh >> ${MARK_HOME}/logs/watchdog-cron.log 2>&1
|
|
148
148
|
|
|
149
149
|
# Morning Check: daily at 6:00 AM Chicago time (auto-adjusts for CDT/CST)
|
|
150
|
+
# Runs fleet backup first, then health check (including backup freshness validation)
|
|
150
151
|
0 6 * * * root ${MARK_HOME}/morning-check.sh >> ${MARK_HOME}/logs/morning-cron.log 2>&1
|
|
151
152
|
CRONEOF
|
|
153
|
+
# Remove standalone fleet-backup cron if it exists (now integrated into morning check)
|
|
154
|
+
if [ -f /etc/cron.d/fleet-backup ]; then
|
|
155
|
+
rm -f /etc/cron.d/fleet-backup
|
|
156
|
+
echo " Removed: /etc/cron.d/fleet-backup (now integrated into morning check)"
|
|
157
|
+
fi
|
|
152
158
|
chmod 644 /etc/cron.d/mighty-mark
|
|
153
159
|
echo " Installed: /etc/cron.d/mighty-mark"
|
|
154
160
|
echo " Watchdog: */5 * * * * (every 5 min)"
|
|
@@ -182,8 +188,107 @@ LREOF
|
|
|
182
188
|
echo " Installed: /etc/logrotate.d/mighty-mark (14-day rotation)"
|
|
183
189
|
fi
|
|
184
190
|
|
|
185
|
-
# ── Step 6:
|
|
186
|
-
echo "[6/
|
|
191
|
+
# ── Step 6: Install fleet backup scripts ──
|
|
192
|
+
echo "[6/8] Installing fleet backup..."
|
|
193
|
+
|
|
194
|
+
FLEET_BACKUP_DIR="/opt/fleet-backup"
|
|
195
|
+
FLEET_REPO_DIR="/opt/nole-fleet-backup"
|
|
196
|
+
|
|
197
|
+
# Source priority:
|
|
198
|
+
# 1. Bundled in npm package (src/watchdog/fleet-backup/) — preferred
|
|
199
|
+
# 2. Standalone git clone (/opt/nole-fleet-backup) — fallback
|
|
200
|
+
FLEET_BUNDLED_DIR="${SOURCE_DIR}/fleet-backup"
|
|
201
|
+
|
|
202
|
+
if [ "$DRY_RUN" = true ]; then
|
|
203
|
+
echo " [DRY RUN] Would set up fleet backup at ${FLEET_BACKUP_DIR}"
|
|
204
|
+
elif [ -d "$FLEET_BUNDLED_DIR" ] && [ -f "$FLEET_BUNDLED_DIR/fleet-backup.sh" ]; then
|
|
205
|
+
echo " Source: bundled in mighty-mark npm package"
|
|
206
|
+
mkdir -p "${FLEET_BACKUP_DIR}/backups"
|
|
207
|
+
mkdir -p "${FLEET_BACKUP_DIR}/logs"
|
|
208
|
+
mkdir -p "${FLEET_BACKUP_DIR}/test"
|
|
209
|
+
cp "$FLEET_BUNDLED_DIR/fleet-backup.sh" "${FLEET_BACKUP_DIR}/"
|
|
210
|
+
cp "$FLEET_BUNDLED_DIR/fleet-restore.sh" "${FLEET_BACKUP_DIR}/"
|
|
211
|
+
cp "$FLEET_BUNDLED_DIR/config.sh" "${FLEET_BACKUP_DIR}/"
|
|
212
|
+
# Preserve config.local.sh if it already exists on disk (user customizations)
|
|
213
|
+
if [ ! -f "${FLEET_BACKUP_DIR}/config.local.sh" ] && [ -f "$FLEET_BUNDLED_DIR/config.local.sh" ]; then
|
|
214
|
+
cp "$FLEET_BUNDLED_DIR/config.local.sh" "${FLEET_BACKUP_DIR}/"
|
|
215
|
+
fi
|
|
216
|
+
# Install test scripts for daily verification
|
|
217
|
+
if [ -d "$FLEET_BUNDLED_DIR/test" ]; then
|
|
218
|
+
cp "$FLEET_BUNDLED_DIR/test/test-backup.sh" "${FLEET_BACKUP_DIR}/test/" 2>/dev/null || true
|
|
219
|
+
cp "$FLEET_BUNDLED_DIR/test/test-restore.sh" "${FLEET_BACKUP_DIR}/test/" 2>/dev/null || true
|
|
220
|
+
chmod +x "${FLEET_BACKUP_DIR}/test/"*.sh 2>/dev/null || true
|
|
221
|
+
echo " Tests: ${FLEET_BACKUP_DIR}/test/"
|
|
222
|
+
fi
|
|
223
|
+
chmod +x "${FLEET_BACKUP_DIR}/fleet-backup.sh"
|
|
224
|
+
chmod +x "${FLEET_BACKUP_DIR}/fleet-restore.sh"
|
|
225
|
+
echo " Installed: fleet-backup.sh, fleet-restore.sh, config.sh"
|
|
226
|
+
echo " Backups: ${FLEET_BACKUP_DIR}/backups/"
|
|
227
|
+
echo " Logs: ${FLEET_BACKUP_DIR}/logs/"
|
|
228
|
+
|
|
229
|
+
# Install fleet-backup logrotate
|
|
230
|
+
cat > /etc/logrotate.d/fleet-backup <<FBLREOF
|
|
231
|
+
${FLEET_BACKUP_DIR}/logs/*.log {
|
|
232
|
+
weekly
|
|
233
|
+
rotate 8
|
|
234
|
+
compress
|
|
235
|
+
delaycompress
|
|
236
|
+
missingok
|
|
237
|
+
notifempty
|
|
238
|
+
create 644 root root
|
|
239
|
+
}
|
|
240
|
+
FBLREOF
|
|
241
|
+
chmod 644 /etc/logrotate.d/fleet-backup
|
|
242
|
+
echo " Logrotate: /etc/logrotate.d/fleet-backup"
|
|
243
|
+
elif [ -d "$FLEET_REPO_DIR" ] && [ -f "$FLEET_REPO_DIR/fleet-backup.sh" ]; then
|
|
244
|
+
echo " Source: standalone git clone at ${FLEET_REPO_DIR}"
|
|
245
|
+
mkdir -p "${FLEET_BACKUP_DIR}/backups"
|
|
246
|
+
mkdir -p "${FLEET_BACKUP_DIR}/logs"
|
|
247
|
+
mkdir -p "${FLEET_BACKUP_DIR}/test"
|
|
248
|
+
cp "$FLEET_REPO_DIR/fleet-backup.sh" "${FLEET_BACKUP_DIR}/"
|
|
249
|
+
cp "$FLEET_REPO_DIR/fleet-restore.sh" "${FLEET_BACKUP_DIR}/"
|
|
250
|
+
cp "$FLEET_REPO_DIR/config.sh" "${FLEET_BACKUP_DIR}/"
|
|
251
|
+
if [ -f "$FLEET_REPO_DIR/config.local.sh" ]; then
|
|
252
|
+
cp "$FLEET_REPO_DIR/config.local.sh" "${FLEET_BACKUP_DIR}/"
|
|
253
|
+
fi
|
|
254
|
+
if [ -d "$FLEET_REPO_DIR/test" ]; then
|
|
255
|
+
cp "$FLEET_REPO_DIR/test/"*.sh "${FLEET_BACKUP_DIR}/test/" 2>/dev/null || true
|
|
256
|
+
chmod +x "${FLEET_BACKUP_DIR}/test/"*.sh 2>/dev/null || true
|
|
257
|
+
echo " Tests: ${FLEET_BACKUP_DIR}/test/"
|
|
258
|
+
fi
|
|
259
|
+
chmod +x "${FLEET_BACKUP_DIR}/fleet-backup.sh"
|
|
260
|
+
chmod +x "${FLEET_BACKUP_DIR}/fleet-restore.sh"
|
|
261
|
+
echo " Installed: fleet-backup.sh, fleet-restore.sh, config.sh"
|
|
262
|
+
echo " Backups: ${FLEET_BACKUP_DIR}/backups/"
|
|
263
|
+
echo " Logs: ${FLEET_BACKUP_DIR}/logs/"
|
|
264
|
+
|
|
265
|
+
cat > /etc/logrotate.d/fleet-backup <<FBLREOF
|
|
266
|
+
${FLEET_BACKUP_DIR}/logs/*.log {
|
|
267
|
+
weekly
|
|
268
|
+
rotate 8
|
|
269
|
+
compress
|
|
270
|
+
delaycompress
|
|
271
|
+
missingok
|
|
272
|
+
notifempty
|
|
273
|
+
create 644 root root
|
|
274
|
+
}
|
|
275
|
+
FBLREOF
|
|
276
|
+
chmod 644 /etc/logrotate.d/fleet-backup
|
|
277
|
+
echo " Logrotate: /etc/logrotate.d/fleet-backup"
|
|
278
|
+
else
|
|
279
|
+
echo " Skipped — fleet backup scripts not found"
|
|
280
|
+
echo " Expected in npm package at: ${FLEET_BUNDLED_DIR}/"
|
|
281
|
+
echo " Or standalone repo at: ${FLEET_REPO_DIR}/"
|
|
282
|
+
fi
|
|
283
|
+
|
|
284
|
+
# Remove standalone fleet-backup cron — now orchestrated by morning-check.sh via TypeScript engine
|
|
285
|
+
if [ -f /etc/cron.d/fleet-backup ]; then
|
|
286
|
+
rm -f /etc/cron.d/fleet-backup
|
|
287
|
+
echo " Removed: /etc/cron.d/fleet-backup (now orchestrated by Mighty Mark)"
|
|
288
|
+
fi
|
|
289
|
+
|
|
290
|
+
# ── Step 7: Initialize state ──
|
|
291
|
+
echo "[7/8] Initializing state files..."
|
|
187
292
|
|
|
188
293
|
if [ "$DRY_RUN" = true ]; then
|
|
189
294
|
echo " [DRY RUN] Would initialize state files"
|
|
@@ -196,8 +301,8 @@ else
|
|
|
196
301
|
fi
|
|
197
302
|
fi
|
|
198
303
|
|
|
199
|
-
# ── Step
|
|
200
|
-
echo "[
|
|
304
|
+
# ── Step 8: Test Telegram connectivity ──
|
|
305
|
+
echo "[8/8] Testing Telegram connectivity..."
|
|
201
306
|
|
|
202
307
|
if [ "$DRY_RUN" = true ]; then
|
|
203
308
|
echo " [DRY RUN] Would test Telegram API"
|
|
@@ -240,5 +345,8 @@ if [ -z "${TELEGRAM_TOKEN}" ]; then
|
|
|
240
345
|
echo ""
|
|
241
346
|
fi
|
|
242
347
|
|
|
348
|
+
echo " Fleet backup: ${FLEET_BACKUP_DIR}/fleet-backup.sh"
|
|
349
|
+
echo " Restore: ${FLEET_BACKUP_DIR}/fleet-restore.sh --help"
|
|
350
|
+
echo ""
|
|
243
351
|
echo " Manual test: bash ${MARK_HOME}/watchdog.sh"
|
|
244
352
|
echo " View logs: tail -f ${MARK_HOME}/logs/watchdog.log"
|
|
@@ -21,6 +21,12 @@ mkdir -p "${MARK_LOG_DIR}" "${MARK_STATE_DIR}"
|
|
|
21
21
|
|
|
22
22
|
log_morning "Morning check starting"
|
|
23
23
|
|
|
24
|
+
# Fleet backup is now orchestrated by the TypeScript health engine
|
|
25
|
+
# (backup-orchestrator.ts). It runs backup + tests + manifest validation
|
|
26
|
+
# as part of the data integrity checks. No need to trigger it separately.
|
|
27
|
+
# If the TypeScript engine fails and we fall back to bash, the backup
|
|
28
|
+
# won't run — but it will catch up on the next successful morning check.
|
|
29
|
+
|
|
24
30
|
# ── Try Node.js health check first ──
|
|
25
31
|
if command -v npx &> /dev/null; then
|
|
26
32
|
log_morning "Running TypeScript health engine via npx"
|