smart-claude-memory-mcp 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +38 -0
- package/CHANGELOG.md +52 -0
- package/LICENSE +21 -0
- package/README.md +790 -0
- package/dist/chunker.js +33 -0
- package/dist/chunker.js.map +1 -0
- package/dist/config.js +23 -0
- package/dist/config.js.map +1 -0
- package/dist/curriculum/daemon.js +190 -0
- package/dist/curriculum/daemon.js.map +1 -0
- package/dist/curriculum/scanner.js +237 -0
- package/dist/curriculum/scanner.js.map +1 -0
- package/dist/index.js +429 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/migrations.js +128 -0
- package/dist/lib/migrations.js.map +1 -0
- package/dist/ollama.js +59 -0
- package/dist/ollama.js.map +1 -0
- package/dist/project-detect.js +102 -0
- package/dist/project-detect.js.map +1 -0
- package/dist/project.js +26 -0
- package/dist/project.js.map +1 -0
- package/dist/sleep/daemon.js +215 -0
- package/dist/sleep/daemon.js.map +1 -0
- package/dist/sleep/miner.js +285 -0
- package/dist/sleep/miner.js.map +1 -0
- package/dist/supabase.js +405 -0
- package/dist/supabase.js.map +1 -0
- package/dist/telemetry/emit.js +19 -0
- package/dist/telemetry/emit.js.map +1 -0
- package/dist/telemetry/pruner.js +141 -0
- package/dist/telemetry/pruner.js.map +1 -0
- package/dist/telemetry/types.js +2 -0
- package/dist/telemetry/types.js.map +1 -0
- package/dist/tools/backlog.js +599 -0
- package/dist/tools/backlog.js.map +1 -0
- package/dist/tools/batch-freeze-patterns.js +243 -0
- package/dist/tools/batch-freeze-patterns.js.map +1 -0
- package/dist/tools/bloat-audit.js +101 -0
- package/dist/tools/bloat-audit.js.map +1 -0
- package/dist/tools/checkpoint.js +259 -0
- package/dist/tools/checkpoint.js.map +1 -0
- package/dist/tools/compact.js +60 -0
- package/dist/tools/compact.js.map +1 -0
- package/dist/tools/conflict.js +102 -0
- package/dist/tools/conflict.js.map +1 -0
- package/dist/tools/curriculum.js +225 -0
- package/dist/tools/curriculum.js.map +1 -0
- package/dist/tools/frozen-cache.js +106 -0
- package/dist/tools/frozen-cache.js.map +1 -0
- package/dist/tools/health.js +368 -0
- package/dist/tools/health.js.map +1 -0
- package/dist/tools/hygiene.js +309 -0
- package/dist/tools/hygiene.js.map +1 -0
- package/dist/tools/image.js +107 -0
- package/dist/tools/image.js.map +1 -0
- package/dist/tools/list-global-patterns.js +101 -0
- package/dist/tools/list-global-patterns.js.map +1 -0
- package/dist/tools/orchestrator.js +113 -0
- package/dist/tools/orchestrator.js.map +1 -0
- package/dist/tools/policy.js +90 -0
- package/dist/tools/policy.js.map +1 -0
- package/dist/tools/refactor.js +220 -0
- package/dist/tools/refactor.js.map +1 -0
- package/dist/tools/save.js +42 -0
- package/dist/tools/save.js.map +1 -0
- package/dist/tools/search.js +189 -0
- package/dist/tools/search.js.map +1 -0
- package/dist/tools/setup.js +868 -0
- package/dist/tools/setup.js.map +1 -0
- package/dist/tools/shared-schemas.js +24 -0
- package/dist/tools/shared-schemas.js.map +1 -0
- package/dist/tools/skills.js +174 -0
- package/dist/tools/skills.js.map +1 -0
- package/dist/tools/sleep.js +239 -0
- package/dist/tools/sleep.js.map +1 -0
- package/dist/tools/sovereign-constitution.js +319 -0
- package/dist/tools/sovereign-constitution.js.map +1 -0
- package/dist/tools/summarize.js +55 -0
- package/dist/tools/summarize.js.map +1 -0
- package/dist/tools/sync.js +255 -0
- package/dist/tools/sync.js.map +1 -0
- package/dist/tools/system_dashboard.js +181 -0
- package/dist/tools/system_dashboard.js.map +1 -0
- package/dist/tools/update-rule.js +15 -0
- package/dist/tools/update-rule.js.map +1 -0
- package/dist/tools/verification.js +333 -0
- package/dist/tools/verification.js.map +1 -0
- package/dist/trajectory/daemon.js +270 -0
- package/dist/trajectory/daemon.js.map +1 -0
- package/dist/trajectory/stripper.js +124 -0
- package/dist/trajectory/stripper.js.map +1 -0
- package/dist/trajectory/summarizer.js +77 -0
- package/dist/trajectory/summarizer.js.map +1 -0
- package/dist/transactions/checkpoint.js +272 -0
- package/dist/transactions/checkpoint.js.map +1 -0
- package/dist/verification-gate.js +43 -0
- package/dist/verification-gate.js.map +1 -0
- package/dist/version.js +16 -0
- package/dist/version.js.map +1 -0
- package/hooks/README.md +54 -0
- package/hooks/md-policy.py +497 -0
- package/marketplace.json +13 -0
- package/package.json +66 -0
|
@@ -0,0 +1,868 @@
|
|
|
1
|
+
import { stat, readFile, mkdir, rename, copyFile, rm } from "node:fs/promises";
|
|
2
|
+
import { existsSync, readdirSync, readFileSync, } from "node:fs";
|
|
3
|
+
import { resolve, dirname, basename, relative, join, isAbsolute } from "node:path";
|
|
4
|
+
import { homedir } from "node:os";
|
|
5
|
+
import { fileURLToPath } from "node:url";
|
|
6
|
+
import { glob } from "glob";
|
|
7
|
+
import { Client } from "pg";
|
|
8
|
+
import { loadFrozenCache } from "./frozen-cache.js";
|
|
9
|
+
import { currentProjectId, slugify } from "../project.js";
|
|
10
|
+
import { GLOBAL_PROJECT_ID } from "./save.js";
|
|
11
|
+
import { applyPendingMigrations, listPendingMigrations, loadMigrationFiles, } from "../lib/migrations.js";
|
|
12
|
+
import { ensureSovereignConstitution, upgradeConstitutionBlock, } from "./sovereign-constitution.js";
|
|
13
|
+
import { auditBloat, } from "./bloat-audit.js";
|
|
14
|
+
const packageRoot = resolve(dirname(fileURLToPath(import.meta.url)), "..", "..");
|
|
15
|
+
const mcpEntryPoint = resolve(packageRoot, "dist", "index.js").replace(/\\/g, "/");
|
|
16
|
+
const REQUIRED_ENV = [
|
|
17
|
+
{ key: "SUPABASE_URL", desc: "Supabase project URL" },
|
|
18
|
+
{ key: "SUPABASE_SECRET_KEY", desc: "Supabase service-role key" },
|
|
19
|
+
{ key: "OLLAMA_HOST", desc: "Ollama endpoint (default http://localhost:11434)" },
|
|
20
|
+
{ key: "OLLAMA_EMBED_MODEL", desc: "Embedding model name (default nomic-embed-text)" },
|
|
21
|
+
{ key: "MEMORY_ROOTS", desc: "Semicolon-separated folders to sync" },
|
|
22
|
+
];
|
|
23
|
+
const RECOMMENDED_ENV = [
|
|
24
|
+
{ key: "SUPABASE_POOLER_URL", desc: "IPv4-reachable pooler URL (required for apply-schema)" },
|
|
25
|
+
{ key: "EMBED_DIM", desc: "Embedding vector dimension (defaults to 768)" },
|
|
26
|
+
];
|
|
27
|
+
async function fileExists(p) {
|
|
28
|
+
try {
|
|
29
|
+
await stat(p);
|
|
30
|
+
return true;
|
|
31
|
+
}
|
|
32
|
+
catch {
|
|
33
|
+
return false;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async function findHookScript(workspace) {
|
|
37
|
+
const candidates = [
|
|
38
|
+
resolve(workspace, ".claude", "hooks", "md-policy.py"),
|
|
39
|
+
resolve(workspace, "hooks", "md-policy.py"),
|
|
40
|
+
resolve(homedir(), ".claude", "hooks", "md-policy.py"),
|
|
41
|
+
resolve(packageRoot, "hooks", "md-policy.py"),
|
|
42
|
+
];
|
|
43
|
+
for (const p of candidates)
|
|
44
|
+
if (await fileExists(p))
|
|
45
|
+
return p;
|
|
46
|
+
return null;
|
|
47
|
+
}
|
|
48
|
+
async function settingsRegistration(workspace) {
|
|
49
|
+
const candidates = [
|
|
50
|
+
resolve(workspace, ".mcp.json"),
|
|
51
|
+
resolve(workspace, ".claude", "settings.json"),
|
|
52
|
+
resolve(workspace, ".claude", "settings.local.json"),
|
|
53
|
+
resolve(homedir(), ".claude.json"),
|
|
54
|
+
resolve(homedir(), ".claude", "settings.json"),
|
|
55
|
+
];
|
|
56
|
+
const matches = [];
|
|
57
|
+
for (const p of candidates) {
|
|
58
|
+
try {
|
|
59
|
+
const content = await readFile(p, "utf8");
|
|
60
|
+
// Path separator is already forward-slashed in our comparison string.
|
|
61
|
+
// Normalize the file content the same way for the match.
|
|
62
|
+
if (content.replace(/\\\\/g, "/").replace(/\\/g, "/").includes(mcpEntryPoint)) {
|
|
63
|
+
matches.push(p);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
catch {
|
|
67
|
+
/* ignore missing config files */
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return { registered: matches.length > 0, matches };
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Normalize a path for symmetric comparison between cache `entry.source`
|
|
74
|
+
* values and freshly-discovered rule file paths.
|
|
75
|
+
*
|
|
76
|
+
* Rules:
|
|
77
|
+
* 1. Absolute paths become workspace-relative; relatives are left alone.
|
|
78
|
+
* 2. All backslashes → forward slashes.
|
|
79
|
+
* 3. Strip any leading "./".
|
|
80
|
+
* 4. On Windows, lowercase for case-insensitive comparison; otherwise
|
|
81
|
+
* preserve case (Linux is case-sensitive at the filesystem layer).
|
|
82
|
+
*/
|
|
83
|
+
function normalizeSource(p, workspace) {
|
|
84
|
+
let out = p;
|
|
85
|
+
if (isAbsolute(out)) {
|
|
86
|
+
out = relative(workspace, out);
|
|
87
|
+
}
|
|
88
|
+
out = out.replace(/\\/g, "/");
|
|
89
|
+
if (out.startsWith("./"))
|
|
90
|
+
out = out.slice(2);
|
|
91
|
+
if (process.platform === "win32")
|
|
92
|
+
out = out.toLowerCase();
|
|
93
|
+
return out;
|
|
94
|
+
}
|
|
95
|
+
async function detectHydrateRecommendations(workspace) {
|
|
96
|
+
// Step A — fast-path exit when the rules dir is missing.
|
|
97
|
+
const rulesDir = join(workspace, ".claude", "rules");
|
|
98
|
+
if (!existsSync(rulesDir))
|
|
99
|
+
return [];
|
|
100
|
+
// Step B — bounded scan of immediate-child .md files.
|
|
101
|
+
// `encoding: "utf8"` pins the Dirent generic to `string`; the no-encoding
|
|
102
|
+
// overload resolves to `Dirent<NonSharedBuffer>` under newer @types/node.
|
|
103
|
+
let entries;
|
|
104
|
+
try {
|
|
105
|
+
entries = readdirSync(rulesDir, { withFileTypes: true, encoding: "utf8" });
|
|
106
|
+
}
|
|
107
|
+
catch {
|
|
108
|
+
return [];
|
|
109
|
+
}
|
|
110
|
+
const sectionPositive = []; // absolute paths that have the header
|
|
111
|
+
for (const entry of entries) {
|
|
112
|
+
if (!entry.isFile())
|
|
113
|
+
continue;
|
|
114
|
+
if (!entry.name.toLowerCase().endsWith(".md"))
|
|
115
|
+
continue;
|
|
116
|
+
const abs = join(rulesDir, entry.name);
|
|
117
|
+
let body;
|
|
118
|
+
try {
|
|
119
|
+
body = readFileSync(abs, "utf8");
|
|
120
|
+
}
|
|
121
|
+
catch {
|
|
122
|
+
continue;
|
|
123
|
+
}
|
|
124
|
+
const head = body.split(/\r?\n/).slice(0, 200);
|
|
125
|
+
const hit = head.some((line) => line.trimEnd() === "## Frozen Patterns");
|
|
126
|
+
if (!hit)
|
|
127
|
+
continue;
|
|
128
|
+
sectionPositive.push(abs);
|
|
129
|
+
}
|
|
130
|
+
if (sectionPositive.length === 0)
|
|
131
|
+
return [];
|
|
132
|
+
// Step C — provenance suppression. Build a set of normalized cache
|
|
133
|
+
// sources for the *current* project bucket and drop already-hydrated
|
|
134
|
+
// candidates.
|
|
135
|
+
const hydrated = new Set();
|
|
136
|
+
try {
|
|
137
|
+
const cache = await loadFrozenCache();
|
|
138
|
+
const key = slugify(currentProjectId);
|
|
139
|
+
const bucket = cache.projects[key] ?? [];
|
|
140
|
+
for (const e of bucket) {
|
|
141
|
+
if (!e.source || e.source === "legacy")
|
|
142
|
+
continue;
|
|
143
|
+
hydrated.add(normalizeSource(e.source, workspace));
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
catch {
|
|
147
|
+
// Best-effort accessory: if we can't read the cache, fall through
|
|
148
|
+
// and include every candidate. The user can re-run later.
|
|
149
|
+
}
|
|
150
|
+
const actionable = [];
|
|
151
|
+
for (const abs of sectionPositive) {
|
|
152
|
+
const norm = normalizeSource(abs, workspace);
|
|
153
|
+
if (hydrated.has(norm))
|
|
154
|
+
continue;
|
|
155
|
+
actionable.push(norm);
|
|
156
|
+
}
|
|
157
|
+
if (actionable.length === 0)
|
|
158
|
+
return [];
|
|
159
|
+
// Determinism: sort alphabetically before picking the first call.
|
|
160
|
+
actionable.sort((a, b) => a.localeCompare(b));
|
|
161
|
+
return [
|
|
162
|
+
{
|
|
163
|
+
id: "hydrate_policies",
|
|
164
|
+
tool: "batch_freeze_patterns",
|
|
165
|
+
candidates: actionable.map((file) => ({ file, section_found: true })),
|
|
166
|
+
suggested_first_call: {
|
|
167
|
+
from_rule_file: actionable[0],
|
|
168
|
+
dry_run: true,
|
|
169
|
+
},
|
|
170
|
+
},
|
|
171
|
+
];
|
|
172
|
+
}
|
|
173
|
+
const CORE3_NAMES = ["CLAUDE.md", "README.md", "ARCHITECTURE.md"];
|
|
174
|
+
// mtime drift heuristic: if one of the Core 3 was edited >30 days after the
|
|
175
|
+
// others, assume someone updated one without propagating to the rest.
|
|
176
|
+
const CORE3_MTIME_DRIFT_DAYS = 30;
|
|
177
|
+
async function auditCore3(workspace) {
|
|
178
|
+
const files = [];
|
|
179
|
+
for (const name of CORE3_NAMES) {
|
|
180
|
+
const p = resolve(workspace, name);
|
|
181
|
+
try {
|
|
182
|
+
const s = await stat(p);
|
|
183
|
+
files.push({
|
|
184
|
+
name,
|
|
185
|
+
exists: true,
|
|
186
|
+
mtime_iso: s.mtime.toISOString(),
|
|
187
|
+
size_bytes: s.size,
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
catch {
|
|
191
|
+
files.push({ name, exists: false, mtime_iso: null, size_bytes: 0 });
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
const missing = files.filter((f) => !f.exists).map((f) => f.name);
|
|
195
|
+
if (missing.length > 0) {
|
|
196
|
+
return {
|
|
197
|
+
files,
|
|
198
|
+
in_sync: false,
|
|
199
|
+
reason: `Missing Core 3 file(s): ${missing.join(", ")}`,
|
|
200
|
+
required_action: "delegate_audit",
|
|
201
|
+
directive: `STOP. The Architecture Guard requires CLAUDE.md, README.md, and ARCHITECTURE.md ` +
|
|
202
|
+
`to all exist at the project root. Missing: ${missing.join(", ")}. ` +
|
|
203
|
+
`Before any other work, call delegate_task with: ` +
|
|
204
|
+
`"Audit and reconcile the Core 3 (CLAUDE.md, README.md, ARCHITECTURE.md). ` +
|
|
205
|
+
`Read the existing files and the codebase, then propose content for the missing ` +
|
|
206
|
+
`${missing.length === 1 ? "file" : "files"}: ${missing.join(", ")}. ` +
|
|
207
|
+
`Return a 2-paragraph synthesis only — do not write the files yourself."`,
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
const mtimes = files.map((f) => Date.parse(f.mtime_iso));
|
|
211
|
+
const driftMs = Math.max(...mtimes) - Math.min(...mtimes);
|
|
212
|
+
const driftDays = driftMs / (1000 * 60 * 60 * 24);
|
|
213
|
+
if (driftDays > CORE3_MTIME_DRIFT_DAYS) {
|
|
214
|
+
return {
|
|
215
|
+
files,
|
|
216
|
+
in_sync: false,
|
|
217
|
+
reason: `Core 3 mtime spread = ${driftDays.toFixed(1)} days (>${CORE3_MTIME_DRIFT_DAYS}d threshold)`,
|
|
218
|
+
required_action: "delegate_audit",
|
|
219
|
+
directive: `Core 3 audit recommended. CLAUDE.md, README.md, and ARCHITECTURE.md have a ` +
|
|
220
|
+
`${driftDays.toFixed(1)}-day mtime spread, which suggests one was updated without ` +
|
|
221
|
+
`propagating the change to the others. Before any other work, call delegate_task with: ` +
|
|
222
|
+
`"Audit the Core 3 (CLAUDE.md, README.md, ARCHITECTURE.md) for cross-file consistency. ` +
|
|
223
|
+
`Flag any architectural claims, file paths, schema descriptions, or tool inventories ` +
|
|
224
|
+
`that appear in one file but contradict or are absent from the others. Return a ` +
|
|
225
|
+
`2-paragraph synthesis only."`,
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
return {
|
|
229
|
+
files,
|
|
230
|
+
in_sync: true,
|
|
231
|
+
reason: `All three present; mtime spread ${driftDays.toFixed(1)} days (≤${CORE3_MTIME_DRIFT_DAYS}d)`,
|
|
232
|
+
required_action: "none",
|
|
233
|
+
directive: "Core 3 in sync — proceed with normal work.",
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
const CAPABILITIES_HINTS = [
|
|
237
|
+
"On boot: search_memory({ query: 'Active Backlog' })",
|
|
238
|
+
"Before non-trivial edits: search_memory({ query: '<topic>', metadata_filter: { type: 'PATTERN' } })",
|
|
239
|
+
"After architectural choice: save_memory({ content, metadata: { type: 'DECISION' } })",
|
|
240
|
+
"After bug fix: save_memory({ content, metadata: { type: 'ERROR', status: 'fixed' } })",
|
|
241
|
+
"For universal patterns (MUST pass Sovereign Vetting + Cross-Project Test): save_memory({ content, metadata: { type: 'PATTERN', is_global: true, global_rationale: '<why this is a universal truth>' } })",
|
|
242
|
+
"Browse GLOBAL: list_global_patterns({ metadata_filter: { type: 'PATTERN' }, limit: 10 })",
|
|
243
|
+
];
|
|
244
|
+
/**
|
|
245
|
+
* Pure capabilities-header builder.
|
|
246
|
+
*
|
|
247
|
+
* Extracted from runInitProject so the shape contract (protocol version,
|
|
248
|
+
* global_scope, taxonomy, hints) is unit-testable in isolation — no
|
|
249
|
+
* Supabase, no Ollama, no filesystem.
|
|
250
|
+
*
|
|
251
|
+
* Reused by: runInitProject (the live boot path) and
|
|
252
|
+
* tests/capabilities.test.ts (the shape contract tests).
|
|
253
|
+
*/
|
|
254
|
+
export function buildCapabilities(projectIdSlug) {
|
|
255
|
+
return {
|
|
256
|
+
protocol: "smart-claude-memory/v2.1.0",
|
|
257
|
+
project_id: projectIdSlug,
|
|
258
|
+
global_scope: {
|
|
259
|
+
available: true,
|
|
260
|
+
project_id: GLOBAL_PROJECT_ID,
|
|
261
|
+
browse_tool: "list_global_patterns",
|
|
262
|
+
browse_args: ["metadata_filter", "limit", "offset", "include_content"],
|
|
263
|
+
},
|
|
264
|
+
taxonomy: ["DECISION", "PATTERN", "ERROR", "LOG"],
|
|
265
|
+
context_gathering_hints: [...CAPABILITIES_HINTS],
|
|
266
|
+
delegate_task_threshold: ">3 files OR >100 lines raw output",
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
const REQUIRED_OLLAMA_MODELS = ["moondream", "nomic-embed-text"];
|
|
270
|
+
/**
|
|
271
|
+
* Preflight: verify required Ollama models are pulled. Queries
|
|
272
|
+
* `${OLLAMA_HOST}/api/tags` (default http://localhost:11434) and checks that
|
|
273
|
+
* `moondream` and `nomic-embed-text` are both present (base name match,
|
|
274
|
+
* `:tag` suffix stripped). Failure modes:
|
|
275
|
+
* - Ollama reachable, models missing → `partial` with `ollama pull` hint.
|
|
276
|
+
* - Ollama unreachable (network / HTTP error) → `not_ready`.
|
|
277
|
+
* Exceptions never escape — `init_project` must not crash on this check.
|
|
278
|
+
* 5-second timeout via AbortController. No new dependencies.
|
|
279
|
+
*/
|
|
280
|
+
async function runOllamaModelsCheck() {
|
|
281
|
+
const host = process.env.OLLAMA_HOST || "http://localhost:11434";
|
|
282
|
+
const controller = new AbortController();
|
|
283
|
+
const timer = setTimeout(() => controller.abort(), 5000);
|
|
284
|
+
try {
|
|
285
|
+
const r = await fetch(`${host}/api/tags`, { signal: controller.signal });
|
|
286
|
+
if (!r.ok) {
|
|
287
|
+
return {
|
|
288
|
+
name: "ollama_models",
|
|
289
|
+
status: "not_ready",
|
|
290
|
+
detail: `Ollama unreachable at ${host} (HTTP ${r.status})`,
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
const data = (await r.json());
|
|
294
|
+
const present = (data.models ?? []).map((m) => m.name.split(":")[0]);
|
|
295
|
+
const missing = REQUIRED_OLLAMA_MODELS.filter((req) => !present.includes(req));
|
|
296
|
+
if (missing.length === 0) {
|
|
297
|
+
return {
|
|
298
|
+
name: "ollama_models",
|
|
299
|
+
status: "ok",
|
|
300
|
+
detail: `required models present: ${REQUIRED_OLLAMA_MODELS.join(", ")}`,
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
return {
|
|
304
|
+
name: "ollama_models",
|
|
305
|
+
status: "partial",
|
|
306
|
+
detail: `Missing Ollama models: ${missing.join(", ")}. Run: ollama pull ${missing.join(" ")}`,
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
catch (err) {
|
|
310
|
+
const msg = err.message || String(err);
|
|
311
|
+
return {
|
|
312
|
+
name: "ollama_models",
|
|
313
|
+
status: "not_ready",
|
|
314
|
+
detail: `Ollama unreachable at ${host}: ${msg}`,
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
finally {
|
|
318
|
+
clearTimeout(timer);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
/**
|
|
322
|
+
* BYO-Supabase bootstrap: open a fresh pg.Client against SUPABASE_POOLER_URL
|
|
323
|
+
* (or SUPABASE_DB_URL fallback) and apply any pending migrations idempotently.
|
|
324
|
+
*
|
|
325
|
+
* Failure modes (DB unreachable, missing env, migration error) ALL collapse to
|
|
326
|
+
* a single `{ status: "not_ready" }` Check. Exceptions never propagate — the
|
|
327
|
+
* MCP server must not crash on first-call DB issues.
|
|
328
|
+
*/
|
|
329
|
+
async function runMigrationsCheck() {
|
|
330
|
+
const cs = process.env.SUPABASE_POOLER_URL || process.env.SUPABASE_DB_URL;
|
|
331
|
+
if (!cs) {
|
|
332
|
+
return {
|
|
333
|
+
check: {
|
|
334
|
+
name: "migrations",
|
|
335
|
+
status: "not_ready",
|
|
336
|
+
detail: "SUPABASE_POOLER_URL (or SUPABASE_DB_URL) not set; cannot apply migrations",
|
|
337
|
+
},
|
|
338
|
+
block: null,
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
const client = new Client({
|
|
342
|
+
connectionString: cs,
|
|
343
|
+
ssl: { rejectUnauthorized: false },
|
|
344
|
+
});
|
|
345
|
+
const total = loadMigrationFiles().length;
|
|
346
|
+
try {
|
|
347
|
+
await client.connect();
|
|
348
|
+
const pending = await listPendingMigrations(client);
|
|
349
|
+
if (pending.length === 0) {
|
|
350
|
+
return {
|
|
351
|
+
check: {
|
|
352
|
+
name: "migrations",
|
|
353
|
+
status: "ok",
|
|
354
|
+
detail: "schema up to date (0 pending)",
|
|
355
|
+
},
|
|
356
|
+
block: { applied: 0, skipped: total, total },
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
const result = await applyPendingMigrations(client);
|
|
360
|
+
return {
|
|
361
|
+
check: {
|
|
362
|
+
name: "migrations",
|
|
363
|
+
status: "ok",
|
|
364
|
+
detail: `applied ${result.applied} pending migration(s)`,
|
|
365
|
+
},
|
|
366
|
+
block: { applied: result.applied, skipped: result.skipped, total: result.total },
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
catch (err) {
|
|
370
|
+
return {
|
|
371
|
+
check: {
|
|
372
|
+
name: "migrations",
|
|
373
|
+
status: "not_ready",
|
|
374
|
+
detail: `migration apply failed: ${err.message}`,
|
|
375
|
+
},
|
|
376
|
+
block: null,
|
|
377
|
+
};
|
|
378
|
+
}
|
|
379
|
+
finally {
|
|
380
|
+
try {
|
|
381
|
+
await client.end();
|
|
382
|
+
}
|
|
383
|
+
catch {
|
|
384
|
+
/* swallow */
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
export async function initProject(args = {}) {
|
|
389
|
+
const ws = resolve(args.workspace ?? process.cwd());
|
|
390
|
+
const checks = [];
|
|
391
|
+
// 1. Required env vars
|
|
392
|
+
for (const { key, desc } of REQUIRED_ENV) {
|
|
393
|
+
const val = process.env[key];
|
|
394
|
+
checks.push({
|
|
395
|
+
name: `env:${key}`,
|
|
396
|
+
status: val ? "ok" : "missing",
|
|
397
|
+
detail: val ? `${desc} — set` : `${desc} — MISSING in .env`,
|
|
398
|
+
fix: val ? undefined : `Add ${key}=... to ${resolve(packageRoot, ".env")} (see .env.example).`,
|
|
399
|
+
});
|
|
400
|
+
}
|
|
401
|
+
// 2. Recommended env vars
|
|
402
|
+
for (const { key, desc } of RECOMMENDED_ENV) {
|
|
403
|
+
const val = process.env[key];
|
|
404
|
+
checks.push({
|
|
405
|
+
name: `env:${key}`,
|
|
406
|
+
status: val ? "ok" : "warn",
|
|
407
|
+
detail: val ? `${desc} — set` : `${desc} — not set (falls back to default)`,
|
|
408
|
+
fix: val ? undefined : `Recommended: add ${key} to .env.`,
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
// 3. md-policy.py hook presence
|
|
412
|
+
const hook = await findHookScript(ws);
|
|
413
|
+
checks.push({
|
|
414
|
+
name: "hook:md-policy.py",
|
|
415
|
+
status: hook ? "ok" : "warn",
|
|
416
|
+
detail: hook
|
|
417
|
+
? `Hook script present at ${hook}`
|
|
418
|
+
: "Hook script not located — Guardian rules (750-line, frozen features, Manual Test Gate) will be advisory only.",
|
|
419
|
+
fix: hook
|
|
420
|
+
? undefined
|
|
421
|
+
: `Copy ${resolve(packageRoot, "hooks", "md-policy.py")} into ${ws}/.claude/hooks/ and add a PreToolUse entry in .claude/settings.json (see hooks/README.md).`,
|
|
422
|
+
});
|
|
423
|
+
// 4. MCP server registration
|
|
424
|
+
const reg = await settingsRegistration(ws);
|
|
425
|
+
checks.push({
|
|
426
|
+
name: "mcp:registration",
|
|
427
|
+
status: reg.registered ? "ok" : "missing",
|
|
428
|
+
detail: reg.registered
|
|
429
|
+
? `smart-claude-memory MCP server registered in: ${reg.matches.join(", ")}`
|
|
430
|
+
: `smart-claude-memory MCP server is not registered in any known settings file. Expected the path ${mcpEntryPoint} to appear in one of them.`,
|
|
431
|
+
fix: reg.registered
|
|
432
|
+
? undefined
|
|
433
|
+
: `Add to ~/.claude.json under "mcpServers":\n "smart-claude-memory": { "type":"stdio", "command":"node", "args":["${mcpEntryPoint}"] }\nThen restart Claude Code.`,
|
|
434
|
+
});
|
|
435
|
+
// 5. Compiled dist present?
|
|
436
|
+
const distOk = await fileExists(resolve(packageRoot, "dist", "index.js"));
|
|
437
|
+
checks.push({
|
|
438
|
+
name: "build:dist",
|
|
439
|
+
status: distOk ? "ok" : "missing",
|
|
440
|
+
detail: distOk ? `Compiled dist/ found` : `dist/index.js does not exist`,
|
|
441
|
+
fix: distOk ? undefined : `Run: npm install && npm run build`,
|
|
442
|
+
});
|
|
443
|
+
// 6. Constitutional Enforcer — bind workspace to Sovereign Memory Protocol.
|
|
444
|
+
// Runs BEFORE the Core 3 audit so a freshly-created CLAUDE.md is visible to
|
|
445
|
+
// the audit step. Failures here demote `overall` to `partial` (not
|
|
446
|
+
// `not_ready`) — the rest of the system can still function.
|
|
447
|
+
const sovereignConstitution = await ensureSovereignConstitution(ws);
|
|
448
|
+
if (sovereignConstitution.action === "error") {
|
|
449
|
+
checks.push({
|
|
450
|
+
name: "constitution:sovereign",
|
|
451
|
+
status: "warn",
|
|
452
|
+
detail: `Could not bind workspace to Sovereign Memory Protocol: ${sovereignConstitution.error}`,
|
|
453
|
+
});
|
|
454
|
+
}
|
|
455
|
+
else {
|
|
456
|
+
checks.push({
|
|
457
|
+
name: "constitution:sovereign",
|
|
458
|
+
status: "ok",
|
|
459
|
+
detail: `${sovereignConstitution.action}: ${sovereignConstitution.path}`,
|
|
460
|
+
});
|
|
461
|
+
}
|
|
462
|
+
// 6b. Deterministic constitution version sync. Probe in dry-run mode; auto-
|
|
463
|
+
// apply ONLY when the existing block hash matches a previously-canonical
|
|
464
|
+
// entry in KNOWN_CANONICAL_HASHES (no user customization). Drift with local
|
|
465
|
+
// customizations surfaces as a directive — recommend, never overwrite
|
|
466
|
+
// silently. Eliminates the LLM-edit hallucination path entirely.
|
|
467
|
+
let constitutionUpgrade = null;
|
|
468
|
+
if (sovereignConstitution.action !== "error") {
|
|
469
|
+
constitutionUpgrade = await upgradeConstitutionBlock(ws, { dry_run: true });
|
|
470
|
+
if (constitutionUpgrade.action === "synced" &&
|
|
471
|
+
constitutionUpgrade.mode === "auto_safe") {
|
|
472
|
+
constitutionUpgrade = await upgradeConstitutionBlock(ws, { dry_run: false });
|
|
473
|
+
}
|
|
474
|
+
if (constitutionUpgrade.action === "synced") {
|
|
475
|
+
checks.push({
|
|
476
|
+
name: "constitution:upgrade",
|
|
477
|
+
status: "ok",
|
|
478
|
+
detail: `Auto-synced ${constitutionUpgrade.from_version} → ${constitutionUpgrade.to_version} (${constitutionUpgrade.mode}, dry_run=${constitutionUpgrade.dry_run})`,
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
else if (constitutionUpgrade.action === "drift_detected") {
|
|
482
|
+
checks.push({
|
|
483
|
+
name: "constitution:upgrade",
|
|
484
|
+
status: "warn",
|
|
485
|
+
detail: `Drift ${constitutionUpgrade.from_version} → ${constitutionUpgrade.to_version}: ${constitutionUpgrade.reason}`,
|
|
486
|
+
fix: `Run upgrade_constitution({ force: true }) to overwrite, or keep customizations and ignore.`,
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
else if (constitutionUpgrade.action === "error") {
|
|
490
|
+
checks.push({
|
|
491
|
+
name: "constitution:upgrade",
|
|
492
|
+
status: "warn",
|
|
493
|
+
detail: `Upgrade probe failed: ${constitutionUpgrade.error}`,
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
// 7. Architecture Guard — Core 3 audit (CLAUDE.md, README.md, ARCHITECTURE.md).
|
|
498
|
+
// The audit is read-only; the agent reacts to `core3.required_action` and to
|
|
499
|
+
// the `directives` array on the result envelope.
|
|
500
|
+
const core3 = await auditCore3(ws);
|
|
501
|
+
const core3Status = core3.files.some((f) => !f.exists)
|
|
502
|
+
? "missing"
|
|
503
|
+
: core3.in_sync
|
|
504
|
+
? "ok"
|
|
505
|
+
: "warn";
|
|
506
|
+
checks.push({
|
|
507
|
+
name: "core3:audit",
|
|
508
|
+
status: core3Status,
|
|
509
|
+
detail: core3.reason,
|
|
510
|
+
fix: core3.required_action === "delegate_audit" ? core3.directive : undefined,
|
|
511
|
+
});
|
|
512
|
+
// 8. Migrations — auto-apply pending SQL migrations on every init so a fresh
|
|
513
|
+
// BYO-Supabase database bootstraps transparently on first call. All paths
|
|
514
|
+
// (missing connection string, unreachable DB, mid-apply failure) collapse
|
|
515
|
+
// to `{ status: "not_ready" }` — never throws.
|
|
516
|
+
const migrationsResult = await runMigrationsCheck();
|
|
517
|
+
checks.push(migrationsResult.check);
|
|
518
|
+
// 9. Ollama models preflight — verify required models are pulled. Surfaces an
|
|
519
|
+
// actionable `ollama pull <names>` command instead of a cryptic embedding
|
|
520
|
+
// failure deeper in the call chain. Never throws; unreachable Ollama
|
|
521
|
+
// collapses to `not_ready` with the host in the detail.
|
|
522
|
+
const ollamaModelsCheck = await runOllamaModelsCheck();
|
|
523
|
+
checks.push(ollamaModelsCheck);
|
|
524
|
+
const anyNotReady = checks.some((c) => c.status === "missing" || c.status === "not_ready");
|
|
525
|
+
const anyWarn = checks.some((c) => c.status === "warn" || c.status === "partial");
|
|
526
|
+
const overall = anyNotReady
|
|
527
|
+
? "not_ready"
|
|
528
|
+
: anyWarn
|
|
529
|
+
? "partial"
|
|
530
|
+
: "ready";
|
|
531
|
+
// Auto-artefacts: generate the architecture doc on every init so new
|
|
532
|
+
// projects get a diagram without having to run session_end first.
|
|
533
|
+
let architectureSynced = null;
|
|
534
|
+
if (args?.arch !== false) {
|
|
535
|
+
try {
|
|
536
|
+
architectureSynced = await writeProjectArchitectureOnInit(ws);
|
|
537
|
+
}
|
|
538
|
+
catch (e) {
|
|
539
|
+
architectureSynced = { path: "", written: false };
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
// Optional: first-init legacy sweep. 'dry' (default) previews only; 'commit'
|
|
543
|
+
// moves HIGH-confidence matches; 'off' skips the scan entirely.
|
|
544
|
+
let legacySweep = null;
|
|
545
|
+
const sweepMode = args?.sweep_legacy ?? "dry";
|
|
546
|
+
if (sweepMode !== "off") {
|
|
547
|
+
legacySweep = await sweepLegacyBackups({
|
|
548
|
+
workspace: ws,
|
|
549
|
+
confirm: sweepMode === "commit",
|
|
550
|
+
});
|
|
551
|
+
}
|
|
552
|
+
// v1.1.3: smart-scout for un-hydrated policy rule files. Best-effort —
|
|
553
|
+
// any failure inside the detector is swallowed and we simply omit the
|
|
554
|
+
// `recommendations` key (init_project's primary job is unaffected).
|
|
555
|
+
let recommendations = [];
|
|
556
|
+
try {
|
|
557
|
+
recommendations = await detectHydrateRecommendations(ws);
|
|
558
|
+
}
|
|
559
|
+
catch {
|
|
560
|
+
recommendations = [];
|
|
561
|
+
}
|
|
562
|
+
// Sovereign Purge auto-hygiene — token-count audit on CLAUDE.md and the
|
|
563
|
+
// hidden Claude project-memory file. Best-effort: a failure here never
|
|
564
|
+
// breaks init_project; we just emit a zero-value bloat_audit and skip
|
|
565
|
+
// the recommendation.
|
|
566
|
+
let bloatAudit = {
|
|
567
|
+
threshold: 3000,
|
|
568
|
+
claude_md: { path: null, tokens: 0, bloated: false },
|
|
569
|
+
hidden_memory: { path: null, tokens: 0, bloated: false, found: false },
|
|
570
|
+
};
|
|
571
|
+
try {
|
|
572
|
+
const audit = await auditBloat(ws);
|
|
573
|
+
bloatAudit = audit.bloat_audit;
|
|
574
|
+
if (audit.sovereign_purge_recommendation) {
|
|
575
|
+
recommendations.push(audit.sovereign_purge_recommendation);
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
catch {
|
|
579
|
+
/* keep default bloatAudit */
|
|
580
|
+
}
|
|
581
|
+
// Top-level imperatives the agent MUST act on before doing anything else.
|
|
582
|
+
// Today only the Core 3 audit emits one; future Architecture Guard checks
|
|
583
|
+
// can append to this array.
|
|
584
|
+
const directives = [];
|
|
585
|
+
if (core3.required_action === "delegate_audit") {
|
|
586
|
+
directives.push(core3.directive);
|
|
587
|
+
}
|
|
588
|
+
if (constitutionUpgrade && constitutionUpgrade.action === "drift_detected") {
|
|
589
|
+
directives.push(`Constitution drift detected (${constitutionUpgrade.from_version} → ${constitutionUpgrade.to_version}). ${constitutionUpgrade.recommendation}`);
|
|
590
|
+
}
|
|
591
|
+
// v2.0.0-rc1 Capabilities Header — surfaces the protocol contract the agent should
|
|
592
|
+
// adhere to during the session: dual-scope search, GLOBAL Knowledge Vault,
|
|
593
|
+
// Sovereign Taxonomy, and the delegation threshold from CLAUDE.md.
|
|
594
|
+
const capabilities = buildCapabilities(slugify(currentProjectId));
|
|
595
|
+
const result = {
|
|
596
|
+
action: "init_project",
|
|
597
|
+
workspace: ws,
|
|
598
|
+
expected_mcp_entry: mcpEntryPoint,
|
|
599
|
+
overall,
|
|
600
|
+
checks,
|
|
601
|
+
architecture_synced: architectureSynced,
|
|
602
|
+
legacy_sweep: legacySweep,
|
|
603
|
+
core3,
|
|
604
|
+
directives,
|
|
605
|
+
capabilities,
|
|
606
|
+
sovereign_constitution: sovereignConstitution,
|
|
607
|
+
bloat_audit: bloatAudit,
|
|
608
|
+
migrations: migrationsResult.block,
|
|
609
|
+
};
|
|
610
|
+
if (recommendations.length > 0) {
|
|
611
|
+
result.recommendations = recommendations;
|
|
612
|
+
}
|
|
613
|
+
return result;
|
|
614
|
+
}
|
|
615
|
+
async function writeProjectArchitectureOnInit(workspace) {
|
|
616
|
+
const docPath = resolve(workspace, "project_file_architecture.md");
|
|
617
|
+
// The backlog tool already owns the Mermaid renderer; calling a dedicated
|
|
618
|
+
// small helper here would duplicate logic. We simply touch the file with a
|
|
619
|
+
// pointer so session_end (which handles the real generation) knows to
|
|
620
|
+
// overwrite the mermaid block. If the file is already present we leave it.
|
|
621
|
+
try {
|
|
622
|
+
await stat(docPath);
|
|
623
|
+
return { path: docPath, written: false };
|
|
624
|
+
}
|
|
625
|
+
catch {
|
|
626
|
+
const seed = [
|
|
627
|
+
`# Project File Architecture`,
|
|
628
|
+
"",
|
|
629
|
+
`> Auto-created by smart-claude-memory init_project. The Mermaid block is `,
|
|
630
|
+
`> populated by manage_backlog({ action: "session_end" }).`,
|
|
631
|
+
"",
|
|
632
|
+
"## Tree",
|
|
633
|
+
"",
|
|
634
|
+
"```mermaid",
|
|
635
|
+
"flowchart TD",
|
|
636
|
+
' n0["(run session_end to populate)"]',
|
|
637
|
+
"```",
|
|
638
|
+
].join("\n");
|
|
639
|
+
try {
|
|
640
|
+
const { writeFile } = await import("node:fs/promises");
|
|
641
|
+
await writeFile(docPath, seed, "utf8");
|
|
642
|
+
return { path: docPath, written: true };
|
|
643
|
+
}
|
|
644
|
+
catch {
|
|
645
|
+
return { path: docPath, written: false };
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
const LEGACY_IGNORE = [
|
|
650
|
+
"**/node_modules/**",
|
|
651
|
+
"**/.git/**",
|
|
652
|
+
"**/dist/**",
|
|
653
|
+
"**/build/**",
|
|
654
|
+
"**/.next/**",
|
|
655
|
+
"**/.nuxt/**",
|
|
656
|
+
"**/.turbo/**",
|
|
657
|
+
"**/.cache/**",
|
|
658
|
+
"**/coverage/**",
|
|
659
|
+
"**/backups/**", // the destination itself
|
|
660
|
+
];
|
|
661
|
+
/**
|
|
662
|
+
* Classify a filename as a legacy backup. Returns null if nothing matches.
|
|
663
|
+
*
|
|
664
|
+
* HIGH-confidence rules are conservative enough to auto-move:
|
|
665
|
+
* - *.bak / *.backup / *.old extensions
|
|
666
|
+
* - Explicit `_backup` / `-backup` / `.backup` separator suffixes
|
|
667
|
+
* - `old_backup_*` or `backup_*` prefix
|
|
668
|
+
* - Timestamped `backup[_.-]\d{4,}` patterns
|
|
669
|
+
*
|
|
670
|
+
* MEDIUM: filename contains "backup" but doesn't match strict patterns.
|
|
671
|
+
* Examples of things that END UP MEDIUM and therefore are NOT moved without
|
|
672
|
+
* --aggressive: backup-service.ts, backup_restore.py, my-backup-utils.js.
|
|
673
|
+
*/
|
|
674
|
+
/**
|
|
675
|
+
* Tokens that flag a file as probable production code even if its name
|
|
676
|
+
* contains "backup". A file called `backup-service.ts` is a service that
|
|
677
|
+
* DOES backups, not a backup OF a file. Files matching this pattern are
|
|
678
|
+
* downgraded from HIGH to MEDIUM so they never get auto-moved.
|
|
679
|
+
*/
|
|
680
|
+
const PRODUCTION_QUALIFIER = new RegExp("\\b(" +
|
|
681
|
+
[
|
|
682
|
+
"service", "services",
|
|
683
|
+
"util", "utils", "helper", "helpers",
|
|
684
|
+
"restore", "restorer",
|
|
685
|
+
"manager", "handler", "controller",
|
|
686
|
+
"tool", "tools",
|
|
687
|
+
"provider", "factory", "gateway", "adapter", "client",
|
|
688
|
+
"store", "registry", "router", "middleware",
|
|
689
|
+
"config", "schema", "type", "types", "model", "models",
|
|
690
|
+
"validator", "loader", "parser", "formatter", "serializer",
|
|
691
|
+
// Operational-script vocabulary (caught scripts/backup-and-remove.ts in v0.9.1)
|
|
692
|
+
"remove", "delete", "purge", "cleanup", "clean",
|
|
693
|
+
"sync", "runner", "worker", "job",
|
|
694
|
+
"init", "setup", "bootstrap", "install",
|
|
695
|
+
"cli", "script", "entry", "main", "index",
|
|
696
|
+
"archive", "archiver",
|
|
697
|
+
].join("|") +
|
|
698
|
+
")\\b", "i");
|
|
699
|
+
function hasProductionQualifier(base) {
|
|
700
|
+
// Treat hyphens/underscores/dots as word boundaries so 'backup-service.ts'
|
|
701
|
+
// is seen as ['backup', 'service', 'ts'] when testing for qualifiers.
|
|
702
|
+
const tokenized = base.replace(/[_\-.]/g, " ");
|
|
703
|
+
return PRODUCTION_QUALIFIER.test(tokenized);
|
|
704
|
+
}
|
|
705
|
+
export function classifyLegacyBackup(filename) {
|
|
706
|
+
const base = basename(filename);
|
|
707
|
+
const lower = base.toLowerCase();
|
|
708
|
+
const downgrade = hasProductionQualifier(base);
|
|
709
|
+
// Extension is the strongest signal — .bak / .backup / .old almost never
|
|
710
|
+
// belong to a live build artifact path. Still downgrade if the filename
|
|
711
|
+
// otherwise looks production-y.
|
|
712
|
+
if (/\.(bak|backup|old)$/i.test(base)) {
|
|
713
|
+
return {
|
|
714
|
+
confidence: downgrade ? "medium" : "high",
|
|
715
|
+
reason: downgrade
|
|
716
|
+
? "backup-file extension but production qualifier in name"
|
|
717
|
+
: "backup-file extension (.bak/.backup/.old)",
|
|
718
|
+
};
|
|
719
|
+
}
|
|
720
|
+
if (/[_.-]backup\.[a-z0-9]+$/i.test(base)) {
|
|
721
|
+
return {
|
|
722
|
+
confidence: downgrade ? "medium" : "high",
|
|
723
|
+
reason: downgrade
|
|
724
|
+
? "_backup suffix but production qualifier in name — likely production code"
|
|
725
|
+
: "explicit _backup/-backup/.backup suffix before extension",
|
|
726
|
+
};
|
|
727
|
+
}
|
|
728
|
+
if (/^(old[_-])?backup[_-][^/]+\.[a-z0-9]+$/i.test(base)) {
|
|
729
|
+
return {
|
|
730
|
+
confidence: downgrade ? "medium" : "high",
|
|
731
|
+
reason: downgrade
|
|
732
|
+
? "backup- prefix but production qualifier in name"
|
|
733
|
+
: "backup-prefixed filename",
|
|
734
|
+
};
|
|
735
|
+
}
|
|
736
|
+
if (/backup[_.-]\d{4,}/i.test(base)) {
|
|
737
|
+
return { confidence: "high", reason: "timestamped backup filename" };
|
|
738
|
+
}
|
|
739
|
+
if (lower.includes("backup")) {
|
|
740
|
+
return { confidence: "medium", reason: "filename contains 'backup' but no strict pattern match" };
|
|
741
|
+
}
|
|
742
|
+
return null;
|
|
743
|
+
}
|
|
744
|
+
async function scanLegacyCandidates(workspace) {
|
|
745
|
+
const hits = await glob("**/*", {
|
|
746
|
+
cwd: workspace,
|
|
747
|
+
absolute: true,
|
|
748
|
+
nodir: true,
|
|
749
|
+
ignore: LEGACY_IGNORE,
|
|
750
|
+
dot: false,
|
|
751
|
+
});
|
|
752
|
+
const out = [];
|
|
753
|
+
for (const p of hits) {
|
|
754
|
+
const cls = classifyLegacyBackup(p);
|
|
755
|
+
if (!cls)
|
|
756
|
+
continue;
|
|
757
|
+
out.push({
|
|
758
|
+
path: p,
|
|
759
|
+
relative_path: relative(workspace, p),
|
|
760
|
+
confidence: cls.confidence,
|
|
761
|
+
reason: cls.reason,
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
// Sort HIGH first, then MEDIUM, then alpha.
|
|
765
|
+
const order = { high: 0, medium: 1, low: 2 };
|
|
766
|
+
out.sort((a, b) => order[a.confidence] - order[b.confidence] ||
|
|
767
|
+
a.relative_path.localeCompare(b.relative_path));
|
|
768
|
+
return out;
|
|
769
|
+
}
|
|
770
|
+
/**
|
|
771
|
+
* Non-blocking, write-free summary of legacy backups in a workspace.
|
|
772
|
+
* Used by the server's startup probe and also exposed as a tool for
|
|
773
|
+
* ad-hoc inspection.
|
|
774
|
+
*/
|
|
775
|
+
export async function legacyBackupSummary(workspace) {
|
|
776
|
+
const candidates = await scanLegacyCandidates(workspace);
|
|
777
|
+
return {
|
|
778
|
+
workspace,
|
|
779
|
+
total: candidates.length,
|
|
780
|
+
high: candidates.filter((c) => c.confidence === "high").length,
|
|
781
|
+
medium: candidates.filter((c) => c.confidence === "medium").length,
|
|
782
|
+
top_examples: candidates.slice(0, 5).map((c) => `[${c.confidence}] ${c.relative_path}`),
|
|
783
|
+
};
|
|
784
|
+
}
|
|
785
|
+
async function moveWithFallback(src, dest) {
|
|
786
|
+
await mkdir(dirname(dest), { recursive: true });
|
|
787
|
+
try {
|
|
788
|
+
await rename(src, dest);
|
|
789
|
+
}
|
|
790
|
+
catch (e) {
|
|
791
|
+
// Cross-device rename isn't allowed; fall back to copy+delete.
|
|
792
|
+
const code = e.code;
|
|
793
|
+
if (code === "EXDEV" || code === "EPERM") {
|
|
794
|
+
await copyFile(src, dest);
|
|
795
|
+
await rm(src, { force: true });
|
|
796
|
+
}
|
|
797
|
+
else {
|
|
798
|
+
throw e;
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
export async function sweepLegacyBackups(args = {}) {
|
|
803
|
+
const workspace = resolve(args.workspace ?? process.cwd());
|
|
804
|
+
const stamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
805
|
+
const dest = resolve(args.dest ?? join(workspace, "backups", `legacy-sweep-${stamp}`));
|
|
806
|
+
const candidates = await scanLegacyCandidates(workspace);
|
|
807
|
+
// Compute proposed_dest for every candidate so dry-run previews are useful.
|
|
808
|
+
for (const c of candidates) {
|
|
809
|
+
c.proposed_dest = join(dest, c.relative_path);
|
|
810
|
+
}
|
|
811
|
+
const wantTier = args.aggressive
|
|
812
|
+
? "high_and_medium"
|
|
813
|
+
: "high_only";
|
|
814
|
+
if (!args.confirm) {
|
|
815
|
+
return {
|
|
816
|
+
action: "sweep_legacy_backups",
|
|
817
|
+
workspace,
|
|
818
|
+
dest,
|
|
819
|
+
mode: "dry_run",
|
|
820
|
+
tier_moved: wantTier,
|
|
821
|
+
candidates,
|
|
822
|
+
moved: 0,
|
|
823
|
+
skipped: candidates.length,
|
|
824
|
+
notes: [
|
|
825
|
+
"DRY RUN — no files moved.",
|
|
826
|
+
`Would move ${candidates.filter((c) => c.confidence === "high").length} HIGH-confidence file(s) with confirm:true.`,
|
|
827
|
+
`Set aggressive:true to also move ${candidates.filter((c) => c.confidence === "medium").length} MEDIUM-confidence file(s) (filenames that contain 'backup' without a strict pattern — may include production code like backup-service.ts).`,
|
|
828
|
+
"Re-run with confirm:true (and optionally aggressive:true) to commit.",
|
|
829
|
+
],
|
|
830
|
+
};
|
|
831
|
+
}
|
|
832
|
+
let moved = 0;
|
|
833
|
+
let skipped = 0;
|
|
834
|
+
const notes = [];
|
|
835
|
+
for (const c of candidates) {
|
|
836
|
+
const shouldMove = c.confidence === "high" || (wantTier === "high_and_medium" && c.confidence === "medium");
|
|
837
|
+
if (!shouldMove) {
|
|
838
|
+
skipped++;
|
|
839
|
+
continue;
|
|
840
|
+
}
|
|
841
|
+
try {
|
|
842
|
+
await moveWithFallback(c.path, c.proposed_dest);
|
|
843
|
+
c.moved = true;
|
|
844
|
+
moved++;
|
|
845
|
+
}
|
|
846
|
+
catch (e) {
|
|
847
|
+
c.move_error = e.message;
|
|
848
|
+
skipped++;
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
if (moved > 0)
|
|
852
|
+
notes.push(`Moved ${moved} file(s) into ${dest}.`);
|
|
853
|
+
if (skipped > 0)
|
|
854
|
+
notes.push(`Skipped ${skipped} candidate(s) — lower confidence or move error (see candidates[].move_error).`);
|
|
855
|
+
notes.push("Originals are gone. If the sweep picked up production code by mistake, restore from git or the new backups/ subdir.");
|
|
856
|
+
return {
|
|
857
|
+
action: "sweep_legacy_backups",
|
|
858
|
+
workspace,
|
|
859
|
+
dest,
|
|
860
|
+
mode: "committed",
|
|
861
|
+
tier_moved: wantTier,
|
|
862
|
+
candidates,
|
|
863
|
+
moved,
|
|
864
|
+
skipped,
|
|
865
|
+
notes,
|
|
866
|
+
};
|
|
867
|
+
}
|
|
868
|
+
//# sourceMappingURL=setup.js.map
|