cursor-guard 2.1.1 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -11
- package/README.zh-CN.md +351 -293
- package/ROADMAP.md +1040 -0
- package/SKILL.md +631 -557
- package/package.json +14 -5
- package/references/config-reference.md +215 -175
- package/references/config-reference.zh-CN.md +215 -175
- package/references/cursor-guard.example.json +6 -6
- package/references/cursor-guard.schema.json +30 -0
- package/references/lib/auto-backup.js +315 -530
- package/references/lib/core/anomaly.js +217 -0
- package/references/lib/core/backups.js +357 -0
- package/references/lib/core/core.test.js +1459 -0
- package/references/lib/core/dashboard.js +208 -0
- package/references/lib/core/doctor-fix.js +237 -0
- package/references/lib/core/doctor.js +248 -0
- package/references/lib/core/restore.js +360 -0
- package/references/lib/core/snapshot.js +198 -0
- package/references/lib/core/status.js +163 -0
- package/references/lib/guard-doctor.js +46 -238
- package/references/lib/utils.js +438 -371
- package/references/mcp/mcp.test.js +374 -0
- package/references/mcp/server.js +252 -0
- package/references/quickstart.zh-CN.md +364 -0
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { isGitRepo, gitDir: getGitDir } = require('../utils');
|
|
6
|
+
|
|
7
|
+
// ── Alert file paths ────────────────────────────────────────────
|
|
8
|
+
|
|
9
|
+
function alertFilePath(projectDir) {
|
|
10
|
+
if (isGitRepo(projectDir)) {
|
|
11
|
+
const gDir = getGitDir(projectDir);
|
|
12
|
+
if (gDir) return path.join(gDir, 'cursor-guard-alert.json');
|
|
13
|
+
}
|
|
14
|
+
return path.join(projectDir, '.cursor-guard-backup', 'cursor-guard-alert.json');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// ── In-process change tracker ───────────────────────────────────
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Create a change tracker for in-process monitoring (used by auto-backup).
|
|
21
|
+
*
|
|
22
|
+
* @param {object} cfg - Loaded config (needs proactive_alert, alert_thresholds)
|
|
23
|
+
* @returns {{ events: Array, alerts: Array, config: object }}
|
|
24
|
+
*/
|
|
25
|
+
function createChangeTracker(cfg) {
|
|
26
|
+
return {
|
|
27
|
+
events: [],
|
|
28
|
+
alerts: [],
|
|
29
|
+
config: {
|
|
30
|
+
enabled: cfg.proactive_alert !== false,
|
|
31
|
+
filesPerWindow: cfg.alert_thresholds.files_per_window,
|
|
32
|
+
windowSeconds: cfg.alert_thresholds.window_seconds,
|
|
33
|
+
cooldownSeconds: cfg.alert_thresholds.cooldown_seconds,
|
|
34
|
+
maxEvents: 1000,
|
|
35
|
+
maxAlerts: 100,
|
|
36
|
+
},
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Record a change event in the tracker.
|
|
42
|
+
*
|
|
43
|
+
* @param {object} tracker
|
|
44
|
+
* @param {number} fileCount - Number of files changed
|
|
45
|
+
* @param {string[]} [files] - Changed file paths (optional, for diagnostics)
|
|
46
|
+
*/
|
|
47
|
+
function recordChange(tracker, fileCount, files) {
|
|
48
|
+
if (!tracker.config.enabled) return;
|
|
49
|
+
|
|
50
|
+
tracker.events.push({
|
|
51
|
+
timestamp: Date.now(),
|
|
52
|
+
fileCount,
|
|
53
|
+
files: files || [],
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
if (tracker.events.length > tracker.config.maxEvents) {
|
|
57
|
+
tracker.events = tracker.events.slice(-tracker.config.maxEvents);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Analyze the tracker for anomalous change velocity.
|
|
63
|
+
*
|
|
64
|
+
* @param {object} tracker
|
|
65
|
+
* @returns {{ anomaly: boolean, alert?: object }}
|
|
66
|
+
*/
|
|
67
|
+
function checkAnomaly(tracker) {
|
|
68
|
+
if (!tracker.config.enabled || tracker.events.length === 0) {
|
|
69
|
+
return { anomaly: false };
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const now = Date.now();
|
|
73
|
+
const windowMs = tracker.config.windowSeconds * 1000;
|
|
74
|
+
const recentEvents = tracker.events.filter(e => e.timestamp >= now - windowMs);
|
|
75
|
+
const totalFiles = recentEvents.reduce((sum, e) => sum + e.fileCount, 0);
|
|
76
|
+
|
|
77
|
+
if (totalFiles < tracker.config.filesPerWindow) {
|
|
78
|
+
return { anomaly: false };
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const lastAlert = tracker.alerts[tracker.alerts.length - 1];
|
|
82
|
+
const cooldownMs = tracker.config.cooldownSeconds * 1000;
|
|
83
|
+
if (lastAlert && now - lastAlert.detectedAt < cooldownMs) {
|
|
84
|
+
return { anomaly: true, alert: lastAlert, suppressed: true };
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const alert = {
|
|
88
|
+
type: 'high_change_velocity',
|
|
89
|
+
detectedAt: now,
|
|
90
|
+
timestamp: new Date(now).toISOString(),
|
|
91
|
+
fileCount: totalFiles,
|
|
92
|
+
windowSeconds: tracker.config.windowSeconds,
|
|
93
|
+
threshold: tracker.config.filesPerWindow,
|
|
94
|
+
expiresAt: new Date(now + 5 * 60 * 1000).toISOString(),
|
|
95
|
+
recommendation: 'High volume of file changes detected. Consider reviewing recent modifications and creating a manual snapshot.',
|
|
96
|
+
};
|
|
97
|
+
|
|
98
|
+
tracker.alerts.push(alert);
|
|
99
|
+
if (tracker.alerts.length > tracker.config.maxAlerts) {
|
|
100
|
+
tracker.alerts = tracker.alerts.slice(-tracker.config.maxAlerts);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return { anomaly: true, alert };
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Get the current alert status summary from the tracker.
|
|
108
|
+
*
|
|
109
|
+
* @param {object} tracker
|
|
110
|
+
* @returns {{ enabled: boolean, hasActiveAlert: boolean, latestAlert?: object, alertCount: number, recentActivity: object }}
|
|
111
|
+
*/
|
|
112
|
+
function getAlertStatus(tracker) {
|
|
113
|
+
if (!tracker.config.enabled) {
|
|
114
|
+
return { enabled: false, hasActiveAlert: false, alertCount: 0, recentActivity: { windowSeconds: 0, fileCount: 0 } };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const now = Date.now();
|
|
118
|
+
const windowMs = tracker.config.windowSeconds * 1000;
|
|
119
|
+
const recentEvents = tracker.events.filter(e => e.timestamp >= now - windowMs);
|
|
120
|
+
const fileCount = recentEvents.reduce((sum, e) => sum + e.fileCount, 0);
|
|
121
|
+
|
|
122
|
+
const latestAlert = tracker.alerts[tracker.alerts.length - 1];
|
|
123
|
+
const isActive = latestAlert && now < new Date(latestAlert.expiresAt).getTime();
|
|
124
|
+
|
|
125
|
+
return {
|
|
126
|
+
enabled: true,
|
|
127
|
+
hasActiveAlert: !!isActive,
|
|
128
|
+
latestAlert: isActive ? latestAlert : undefined,
|
|
129
|
+
alertCount: tracker.alerts.length,
|
|
130
|
+
recentActivity: {
|
|
131
|
+
windowSeconds: tracker.config.windowSeconds,
|
|
132
|
+
fileCount,
|
|
133
|
+
},
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// ── Alert file persistence (bridge between auto-backup and MCP) ─
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Save an alert to a file so the MCP server can read it.
|
|
141
|
+
*
|
|
142
|
+
* @param {string} projectDir
|
|
143
|
+
* @param {object} alert
|
|
144
|
+
*/
|
|
145
|
+
function saveAlert(projectDir, alert) {
|
|
146
|
+
const filePath = alertFilePath(projectDir);
|
|
147
|
+
try {
|
|
148
|
+
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
149
|
+
fs.writeFileSync(filePath, JSON.stringify(alert, null, 2));
|
|
150
|
+
} catch { /* best-effort */ }
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Load and return any active (non-expired) alert from file.
|
|
155
|
+
* Pure read — does not delete expired files (use clearExpiredAlert for that).
|
|
156
|
+
*
|
|
157
|
+
* @param {string} projectDir
|
|
158
|
+
* @returns {object|null} - Alert object or null
|
|
159
|
+
*/
|
|
160
|
+
function loadActiveAlert(projectDir) {
|
|
161
|
+
const filePath = alertFilePath(projectDir);
|
|
162
|
+
try {
|
|
163
|
+
if (!fs.existsSync(filePath)) return null;
|
|
164
|
+
const data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
165
|
+
if (!data.expiresAt) return null;
|
|
166
|
+
if (Date.now() >= new Date(data.expiresAt).getTime()) return null;
|
|
167
|
+
return data;
|
|
168
|
+
} catch { return null; }
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Remove the alert file if it exists and is expired. No-op if still active.
|
|
173
|
+
* Safe to call periodically from the watcher loop.
|
|
174
|
+
*
|
|
175
|
+
* @param {string} projectDir
|
|
176
|
+
* @returns {boolean} true if an expired file was removed
|
|
177
|
+
*/
|
|
178
|
+
function clearExpiredAlert(projectDir) {
|
|
179
|
+
const filePath = alertFilePath(projectDir);
|
|
180
|
+
try {
|
|
181
|
+
if (!fs.existsSync(filePath)) return false;
|
|
182
|
+
let data;
|
|
183
|
+
try {
|
|
184
|
+
data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
185
|
+
} catch {
|
|
186
|
+
try { fs.unlinkSync(filePath); } catch { /* ignore */ }
|
|
187
|
+
return true;
|
|
188
|
+
}
|
|
189
|
+
if (data.expiresAt && Date.now() >= new Date(data.expiresAt).getTime()) {
|
|
190
|
+
fs.unlinkSync(filePath);
|
|
191
|
+
return true;
|
|
192
|
+
}
|
|
193
|
+
return false;
|
|
194
|
+
} catch { return false; }
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Unconditionally remove the alert file (e.g. user acknowledged the alert).
|
|
199
|
+
*
|
|
200
|
+
* @param {string} projectDir
|
|
201
|
+
*/
|
|
202
|
+
function clearAlert(projectDir) {
|
|
203
|
+
const filePath = alertFilePath(projectDir);
|
|
204
|
+
try { fs.unlinkSync(filePath); } catch { /* ignore */ }
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
module.exports = {
|
|
208
|
+
createChangeTracker,
|
|
209
|
+
recordChange,
|
|
210
|
+
checkAnomaly,
|
|
211
|
+
getAlertStatus,
|
|
212
|
+
saveAlert,
|
|
213
|
+
loadActiveAlert,
|
|
214
|
+
clearExpiredAlert,
|
|
215
|
+
clearAlert,
|
|
216
|
+
alertFilePath,
|
|
217
|
+
};
|
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { execFileSync } = require('child_process');
|
|
6
|
+
const {
|
|
7
|
+
git, isGitRepo, gitDir: getGitDir, walkDir, diskFreeGB,
|
|
8
|
+
} = require('../utils');
|
|
9
|
+
|
|
10
|
+
// ── Helpers ──────────────────────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
function parseShadowTimestamp(name) {
|
|
13
|
+
const m = name.match(/^(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})(?:_(\d{3}))?$/);
|
|
14
|
+
if (!m) return null;
|
|
15
|
+
const ms = m[7] ? `.${m[7]}` : '';
|
|
16
|
+
return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}${ms}`);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function parseBeforeExpression(before) {
|
|
20
|
+
if (!before) return null;
|
|
21
|
+
const iso = Date.parse(before);
|
|
22
|
+
if (!isNaN(iso)) return new Date(iso);
|
|
23
|
+
const agoMatch = before.match(/^(\d+)\s*(second|minute|hour|day|week|month)s?\s*ago$/i);
|
|
24
|
+
if (agoMatch) {
|
|
25
|
+
const n = parseInt(agoMatch[1], 10);
|
|
26
|
+
const unit = agoMatch[2].toLowerCase();
|
|
27
|
+
const ms = { second: 1000, minute: 60000, hour: 3600000, day: 86400000, week: 604800000, month: 2592000000 }[unit] || 0;
|
|
28
|
+
return new Date(Date.now() - n * ms);
|
|
29
|
+
}
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function entryToMs(entry) {
|
|
34
|
+
if (!entry.timestamp) return 0;
|
|
35
|
+
const iso = Date.parse(entry.timestamp);
|
|
36
|
+
if (!isNaN(iso)) return iso;
|
|
37
|
+
const tsName = typeof entry.timestamp === 'string' && entry.timestamp.startsWith('pre-restore-')
|
|
38
|
+
? entry.timestamp.slice('pre-restore-'.length)
|
|
39
|
+
: entry.timestamp;
|
|
40
|
+
const d = parseShadowTimestamp(tsName);
|
|
41
|
+
return d ? d.getTime() : 0;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// ── List backups ────────────────────────────────────────────────
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* List available backup/restore points from all sources.
|
|
48
|
+
* Returns a globally time-sorted list (newest first), truncated to `limit`.
|
|
49
|
+
*
|
|
50
|
+
* @param {string} projectDir
|
|
51
|
+
* @param {object} [opts]
|
|
52
|
+
* @param {string} [opts.file] - Filter to commits touching this relative path
|
|
53
|
+
* @param {string} [opts.before] - Time boundary (e.g. '10 minutes ago', ISO string)
|
|
54
|
+
* @param {number} [opts.limit=20] - Max total results
|
|
55
|
+
* @returns {{ sources: Array<{type: string, ref?: string, commitHash?: string, shortHash?: string, timestamp?: string, message?: string, path?: string}> }}
|
|
56
|
+
*/
|
|
57
|
+
function listBackups(projectDir, opts = {}) {
|
|
58
|
+
const limit = opts.limit || 20;
|
|
59
|
+
const sources = [];
|
|
60
|
+
|
|
61
|
+
if (opts.file) {
|
|
62
|
+
const normalized = path.normalize(opts.file).replace(/\\/g, '/');
|
|
63
|
+
if (path.isAbsolute(normalized) || normalized.startsWith('..')) {
|
|
64
|
+
return { sources: [], error: 'file path must be relative and within project directory' };
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const repo = isGitRepo(projectDir);
|
|
69
|
+
const beforeDate = parseBeforeExpression(opts.before);
|
|
70
|
+
|
|
71
|
+
// Git sources
|
|
72
|
+
if (repo) {
|
|
73
|
+
// Auto-backup commits (git --before handles native filtering)
|
|
74
|
+
const autoRef = 'refs/guard/auto-backup';
|
|
75
|
+
const autoExists = git(['rev-parse', '--verify', autoRef], { cwd: projectDir, allowFail: true });
|
|
76
|
+
if (autoExists) {
|
|
77
|
+
const logArgs = ['log', autoRef, `--format=%H %aI %s`, `-${limit}`, '--grep=^guard:'];
|
|
78
|
+
if (opts.before) logArgs.push(`--before=${opts.before}`);
|
|
79
|
+
if (opts.file) logArgs.push('--', opts.file);
|
|
80
|
+
const out = git(logArgs, { cwd: projectDir, allowFail: true });
|
|
81
|
+
if (out) {
|
|
82
|
+
for (const line of out.split('\n').filter(Boolean)) {
|
|
83
|
+
const firstSpace = line.indexOf(' ');
|
|
84
|
+
const secondSpace = line.indexOf(' ', firstSpace + 1);
|
|
85
|
+
const hash = line.substring(0, firstSpace);
|
|
86
|
+
const timestamp = line.substring(firstSpace + 1, secondSpace);
|
|
87
|
+
const message = line.substring(secondSpace + 1);
|
|
88
|
+
sources.push({
|
|
89
|
+
type: 'git-auto-backup',
|
|
90
|
+
ref: autoRef,
|
|
91
|
+
commitHash: hash,
|
|
92
|
+
shortHash: hash.substring(0, 7),
|
|
93
|
+
timestamp,
|
|
94
|
+
message,
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Pre-restore snapshots
|
|
101
|
+
const preRestoreRefs = git(
|
|
102
|
+
['for-each-ref', 'refs/guard/pre-restore/', '--format=%(refname) %(objectname) %(*objectname) %(creatordate:iso-strict)', '--sort=-creatordate'],
|
|
103
|
+
{ cwd: projectDir, allowFail: true }
|
|
104
|
+
);
|
|
105
|
+
if (preRestoreRefs) {
|
|
106
|
+
for (const line of preRestoreRefs.split('\n').filter(Boolean)) {
|
|
107
|
+
const parts = line.split(' ');
|
|
108
|
+
const ref = parts[0];
|
|
109
|
+
const hash = parts[1];
|
|
110
|
+
const timestamp = parts[3] || parts[2];
|
|
111
|
+
if (beforeDate && timestamp) {
|
|
112
|
+
const ms = Date.parse(timestamp);
|
|
113
|
+
if (!isNaN(ms) && ms > beforeDate.getTime()) continue;
|
|
114
|
+
}
|
|
115
|
+
sources.push({
|
|
116
|
+
type: 'git-pre-restore',
|
|
117
|
+
ref,
|
|
118
|
+
commitHash: hash,
|
|
119
|
+
shortHash: hash.substring(0, 7),
|
|
120
|
+
timestamp,
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Agent snapshot ref
|
|
126
|
+
const snapshotHash = git(['rev-parse', '--verify', 'refs/guard/snapshot'], { cwd: projectDir, allowFail: true });
|
|
127
|
+
if (snapshotHash) {
|
|
128
|
+
const ts = git(['log', '-1', '--format=%aI', 'refs/guard/snapshot'], { cwd: projectDir, allowFail: true });
|
|
129
|
+
const include = !beforeDate || (ts && Date.parse(ts) <= beforeDate.getTime());
|
|
130
|
+
if (include) {
|
|
131
|
+
sources.push({
|
|
132
|
+
type: 'git-snapshot',
|
|
133
|
+
ref: 'refs/guard/snapshot',
|
|
134
|
+
commitHash: snapshotHash,
|
|
135
|
+
shortHash: snapshotHash.substring(0, 7),
|
|
136
|
+
timestamp: ts || null,
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Shadow copy directories
|
|
143
|
+
const backupDir = path.join(projectDir, '.cursor-guard-backup');
|
|
144
|
+
if (fs.existsSync(backupDir)) {
|
|
145
|
+
try {
|
|
146
|
+
const dirs = fs.readdirSync(backupDir, { withFileTypes: true })
|
|
147
|
+
.filter(d => d.isDirectory())
|
|
148
|
+
.map(d => d.name)
|
|
149
|
+
.sort()
|
|
150
|
+
.reverse();
|
|
151
|
+
|
|
152
|
+
for (const name of dirs) {
|
|
153
|
+
const isPreRestore = name.startsWith('pre-restore-');
|
|
154
|
+
const isTimestamp = /^\d{8}_\d{6}(_\d{3})?$/.test(name);
|
|
155
|
+
if (!isTimestamp && !isPreRestore) continue;
|
|
156
|
+
|
|
157
|
+
if (beforeDate) {
|
|
158
|
+
const tsName = isPreRestore ? name.slice('pre-restore-'.length) : name;
|
|
159
|
+
const snapDate = parseShadowTimestamp(tsName);
|
|
160
|
+
if (snapDate && snapDate.getTime() > beforeDate.getTime()) continue;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const dirPath = path.join(backupDir, name);
|
|
164
|
+
|
|
165
|
+
if (opts.file && !fs.existsSync(path.join(dirPath, opts.file))) continue;
|
|
166
|
+
|
|
167
|
+
sources.push({
|
|
168
|
+
type: isPreRestore ? 'shadow-pre-restore' : 'shadow',
|
|
169
|
+
timestamp: name,
|
|
170
|
+
path: dirPath,
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
} catch { /* ignore */ }
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Unified time sort (newest first) across all sources, then truncate
|
|
177
|
+
sources.sort((a, b) => entryToMs(b) - entryToMs(a));
|
|
178
|
+
|
|
179
|
+
return { sources: sources.slice(0, limit) };
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// ── Shadow retention ────────────────────────────────────────────
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Clean old shadow copy snapshots based on retention config.
|
|
186
|
+
*
|
|
187
|
+
* @param {string} backupDir - Path to .cursor-guard-backup/
|
|
188
|
+
* @param {object} cfg - Loaded config
|
|
189
|
+
* @returns {{ removed: number, mode: string, diskFreeGB?: number, diskWarning?: string }}
|
|
190
|
+
*/
|
|
191
|
+
function cleanShadowRetention(backupDir, cfg) {
|
|
192
|
+
const { mode, days, max_count, max_size_mb } = cfg.retention;
|
|
193
|
+
let dirs;
|
|
194
|
+
try {
|
|
195
|
+
dirs = fs.readdirSync(backupDir, { withFileTypes: true })
|
|
196
|
+
.filter(d => d.isDirectory() && /^\d{8}_\d{6}(_\d{3})?$/.test(d.name))
|
|
197
|
+
.map(d => d.name)
|
|
198
|
+
.sort()
|
|
199
|
+
.reverse();
|
|
200
|
+
} catch { return { removed: 0, mode }; }
|
|
201
|
+
if (!dirs || dirs.length === 0) return { removed: 0, mode };
|
|
202
|
+
|
|
203
|
+
let removed = 0;
|
|
204
|
+
|
|
205
|
+
if (mode === 'days') {
|
|
206
|
+
const cutoff = Date.now() - days * 86400000;
|
|
207
|
+
for (const name of dirs) {
|
|
208
|
+
const dt = parseShadowTimestamp(name);
|
|
209
|
+
if (!dt) continue;
|
|
210
|
+
if (dt.getTime() < cutoff) {
|
|
211
|
+
fs.rmSync(path.join(backupDir, name), { recursive: true, force: true });
|
|
212
|
+
removed++;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
} else if (mode === 'count') {
|
|
216
|
+
if (dirs.length > max_count) {
|
|
217
|
+
for (const name of dirs.slice(max_count)) {
|
|
218
|
+
fs.rmSync(path.join(backupDir, name), { recursive: true, force: true });
|
|
219
|
+
removed++;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
} else if (mode === 'size') {
|
|
223
|
+
let totalBytes = 0;
|
|
224
|
+
try {
|
|
225
|
+
const allFiles = walkDir(backupDir, backupDir);
|
|
226
|
+
for (const f of allFiles) {
|
|
227
|
+
try { totalBytes += fs.statSync(f.full).size; } catch { /* skip */ }
|
|
228
|
+
}
|
|
229
|
+
} catch { /* ignore */ }
|
|
230
|
+
const oldestFirst = [...dirs].reverse();
|
|
231
|
+
for (const name of oldestFirst) {
|
|
232
|
+
if (totalBytes / (1024 * 1024) <= max_size_mb) break;
|
|
233
|
+
const dirPath = path.join(backupDir, name);
|
|
234
|
+
let dirSize = 0;
|
|
235
|
+
try {
|
|
236
|
+
const files = walkDir(dirPath, dirPath);
|
|
237
|
+
for (const f of files) {
|
|
238
|
+
try { dirSize += fs.statSync(f.full).size; } catch { /* skip */ }
|
|
239
|
+
}
|
|
240
|
+
} catch { /* ignore */ }
|
|
241
|
+
fs.rmSync(dirPath, { recursive: true, force: true });
|
|
242
|
+
totalBytes -= dirSize;
|
|
243
|
+
removed++;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
const result = { removed, mode };
|
|
248
|
+
|
|
249
|
+
const freeGB = diskFreeGB(backupDir);
|
|
250
|
+
if (freeGB !== null) {
|
|
251
|
+
result.diskFreeGB = parseFloat(freeGB.toFixed(1));
|
|
252
|
+
if (freeGB < 1) result.diskWarning = 'critically low';
|
|
253
|
+
else if (freeGB < 5) result.diskWarning = 'low';
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
return result;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
// ── Git retention ───────────────────────────────────────────────
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Clean old git auto-backup commits by rebuilding the branch as an orphan chain.
|
|
263
|
+
*
|
|
264
|
+
* @param {string} branchRef
|
|
265
|
+
* @param {string} gitDirPath
|
|
266
|
+
* @param {object} cfg - Loaded config
|
|
267
|
+
* @param {string} cwd - Project directory
|
|
268
|
+
* @returns {{ kept: number, pruned: number, mode: string, rebuilt: boolean, skipped?: boolean, reason?: string }}
|
|
269
|
+
*/
|
|
270
|
+
function cleanGitRetention(branchRef, gitDirPath, cfg, cwd) {
|
|
271
|
+
const { mode, days, max_count } = cfg.git_retention;
|
|
272
|
+
if (!cfg.git_retention.enabled) {
|
|
273
|
+
return { kept: 0, pruned: 0, mode, rebuilt: false, skipped: true, reason: 'retention disabled' };
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
const out = git(['log', branchRef, '--format=%H %aI %cI %s'], { cwd, allowFail: true });
|
|
277
|
+
if (!out) {
|
|
278
|
+
return { kept: 0, pruned: 0, mode, rebuilt: false, skipped: true, reason: 'no commits on ref' };
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
const lines = out.split('\n').filter(Boolean);
|
|
282
|
+
const guardCommits = [];
|
|
283
|
+
for (const line of lines) {
|
|
284
|
+
const firstSpace = line.indexOf(' ');
|
|
285
|
+
const secondSpace = line.indexOf(' ', firstSpace + 1);
|
|
286
|
+
const thirdSpace = line.indexOf(' ', secondSpace + 1);
|
|
287
|
+
const hash = line.substring(0, firstSpace);
|
|
288
|
+
const authorDate = line.substring(firstSpace + 1, secondSpace);
|
|
289
|
+
const committerDate = line.substring(secondSpace + 1, thirdSpace);
|
|
290
|
+
const subject = line.substring(thirdSpace + 1);
|
|
291
|
+
if (subject.startsWith('guard: auto-backup') || subject.startsWith('guard: snapshot')) {
|
|
292
|
+
guardCommits.push({ hash, authorDate, committerDate, subject });
|
|
293
|
+
}
|
|
294
|
+
// Non-guard commits are silently skipped; continue scanning older history
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
const total = guardCommits.length;
|
|
298
|
+
if (total === 0) {
|
|
299
|
+
return { kept: 0, pruned: 0, mode, rebuilt: false, skipped: true, reason: 'no guard commits found' };
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
let keepCount = total;
|
|
303
|
+
if (mode === 'count') {
|
|
304
|
+
keepCount = Math.min(total, max_count);
|
|
305
|
+
} else if (mode === 'days') {
|
|
306
|
+
const cutoff = Date.now() - days * 86400000;
|
|
307
|
+
keepCount = 0;
|
|
308
|
+
for (const c of guardCommits) {
|
|
309
|
+
if (new Date(c.authorDate).getTime() >= cutoff) keepCount++;
|
|
310
|
+
else break;
|
|
311
|
+
}
|
|
312
|
+
keepCount = Math.max(keepCount, 10);
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
if (keepCount >= total) {
|
|
316
|
+
return { kept: total, pruned: 0, mode, rebuilt: false };
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
const toKeep = guardCommits.slice(0, keepCount).reverse();
|
|
320
|
+
|
|
321
|
+
function commitTreeWithDate(args, commit) {
|
|
322
|
+
const env = {
|
|
323
|
+
...process.env,
|
|
324
|
+
GIT_AUTHOR_DATE: commit.authorDate,
|
|
325
|
+
GIT_COMMITTER_DATE: commit.committerDate,
|
|
326
|
+
};
|
|
327
|
+
try {
|
|
328
|
+
return execFileSync('git', args, { cwd, env, stdio: 'pipe', encoding: 'utf-8' }).trim() || null;
|
|
329
|
+
} catch { return null; }
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
const rootTree = git(['rev-parse', `${toKeep[0].hash}^{tree}`], { cwd, allowFail: true });
|
|
333
|
+
if (!rootTree) {
|
|
334
|
+
return { kept: total, pruned: 0, mode, rebuilt: false, reason: 'could not resolve root tree' };
|
|
335
|
+
}
|
|
336
|
+
let prevHash = commitTreeWithDate(['commit-tree', rootTree, '-m', toKeep[0].subject], toKeep[0]);
|
|
337
|
+
if (!prevHash) {
|
|
338
|
+
return { kept: total, pruned: 0, mode, rebuilt: false, reason: 'commit-tree failed for root' };
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
for (let i = 1; i < toKeep.length; i++) {
|
|
342
|
+
const tree = git(['rev-parse', `${toKeep[i].hash}^{tree}`], { cwd, allowFail: true });
|
|
343
|
+
if (!tree) {
|
|
344
|
+
return { kept: total, pruned: 0, mode, rebuilt: false, reason: `could not resolve tree for commit ${i}` };
|
|
345
|
+
}
|
|
346
|
+
prevHash = commitTreeWithDate(['commit-tree', tree, '-p', prevHash, '-m', toKeep[i].subject], toKeep[i]);
|
|
347
|
+
if (!prevHash) {
|
|
348
|
+
return { kept: total, pruned: 0, mode, rebuilt: false, reason: `commit-tree failed at index ${i}` };
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
git(['update-ref', branchRef, prevHash], { cwd, allowFail: true });
|
|
353
|
+
|
|
354
|
+
return { kept: keepCount, pruned: total - keepCount, mode, rebuilt: true };
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
module.exports = { listBackups, cleanShadowRetention, cleanGitRetention, parseShadowTimestamp };
|