mover-os 4.7.4 → 4.7.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/install.js +705 -39
- package/package.json +1 -1
package/install.js
CHANGED
|
@@ -734,7 +734,15 @@ async function activateKey(key) {
|
|
|
734
734
|
if (!key) return;
|
|
735
735
|
try {
|
|
736
736
|
const https = require("https");
|
|
737
|
-
|
|
737
|
+
// v4.7.6: label = stable machine_id (hardware-derived), not os.hostname().
|
|
738
|
+
// Hostnames change (rename a Mac, switch wifi, container restart) and the
|
|
739
|
+
// server-side activation cap counts each hostname as a different machine.
|
|
740
|
+
// machine_id is stable across hostname changes and matches what the
|
|
741
|
+
// server also stores for /api/download X-Machine-Id correlation.
|
|
742
|
+
const moverDir = path.join(os.homedir(), ".mover");
|
|
743
|
+
let label = os.hostname();
|
|
744
|
+
try { label = getMachineId(moverDir) || label; } catch {}
|
|
745
|
+
const body = JSON.stringify({ key: key.trim(), organization_id: POLAR_ORG_ID, label });
|
|
738
746
|
await new Promise((resolve, reject) => {
|
|
739
747
|
const req = https.request({
|
|
740
748
|
hostname: "api.polar.sh",
|
|
@@ -822,11 +830,30 @@ async function downloadPayload(key) {
|
|
|
822
830
|
method: "GET",
|
|
823
831
|
timeout: 60000,
|
|
824
832
|
}, (res2) => {
|
|
825
|
-
|
|
826
|
-
|
|
833
|
+
// v4.7.6 (post-audit): stream-and-abort with running byte counter
|
|
834
|
+
// instead of buffering then checking size. A malicious upstream
|
|
835
|
+
// could send GBs of data and OOM Node before the post-download
|
|
836
|
+
// 100MB stat check fires. Now we destroy the connection the
|
|
837
|
+
// moment the running total exceeds MAX_TARBALL_BYTES.
|
|
838
|
+
const MAX_TARBALL_BYTES = 100 * 1024 * 1024;
|
|
839
|
+
let total = 0;
|
|
840
|
+
const ws = fs.createWriteStream(tarPath);
|
|
841
|
+
let aborted = false;
|
|
842
|
+
res2.on("data", (c) => {
|
|
843
|
+
total += c.length;
|
|
844
|
+
if (total > MAX_TARBALL_BYTES) {
|
|
845
|
+
aborted = true;
|
|
846
|
+
try { res2.destroy(); } catch {}
|
|
847
|
+
try { ws.destroy(); } catch {}
|
|
848
|
+
try { fs.unlinkSync(tarPath); } catch {}
|
|
849
|
+
reject(new Error(`Payload exceeded ${MAX_TARBALL_BYTES} bytes during download`));
|
|
850
|
+
return;
|
|
851
|
+
}
|
|
852
|
+
ws.write(c);
|
|
853
|
+
});
|
|
827
854
|
res2.on("end", () => {
|
|
828
|
-
|
|
829
|
-
resolve();
|
|
855
|
+
if (aborted) return;
|
|
856
|
+
ws.end(() => resolve());
|
|
830
857
|
});
|
|
831
858
|
});
|
|
832
859
|
req2.on("error", reject);
|
|
@@ -850,11 +877,26 @@ async function downloadPayload(key) {
|
|
|
850
877
|
reject(new Error(`Download failed (HTTP ${res.statusCode})`));
|
|
851
878
|
return;
|
|
852
879
|
}
|
|
853
|
-
|
|
854
|
-
|
|
880
|
+
// v4.7.6 (post-audit): same stream-and-abort behavior as the redirect path.
|
|
881
|
+
const MAX_TARBALL_BYTES = 100 * 1024 * 1024;
|
|
882
|
+
let total = 0;
|
|
883
|
+
const ws = fs.createWriteStream(tarPath);
|
|
884
|
+
let aborted = false;
|
|
885
|
+
res.on("data", (c) => {
|
|
886
|
+
total += c.length;
|
|
887
|
+
if (total > MAX_TARBALL_BYTES) {
|
|
888
|
+
aborted = true;
|
|
889
|
+
try { res.destroy(); } catch {}
|
|
890
|
+
try { ws.destroy(); } catch {}
|
|
891
|
+
try { fs.unlinkSync(tarPath); } catch {}
|
|
892
|
+
reject(new Error(`Payload exceeded ${MAX_TARBALL_BYTES} bytes during download`));
|
|
893
|
+
return;
|
|
894
|
+
}
|
|
895
|
+
ws.write(c);
|
|
896
|
+
});
|
|
855
897
|
res.on("end", () => {
|
|
856
|
-
|
|
857
|
-
resolve();
|
|
898
|
+
if (aborted) return;
|
|
899
|
+
ws.end(() => resolve());
|
|
858
900
|
});
|
|
859
901
|
});
|
|
860
902
|
req.on("error", reject);
|
|
@@ -862,10 +904,75 @@ async function downloadPayload(key) {
|
|
|
862
904
|
req.end();
|
|
863
905
|
});
|
|
864
906
|
|
|
865
|
-
// Validate tar contents before extraction
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
907
|
+
// ── Validate tar contents before extraction ────────────────────────────
|
|
908
|
+
//
|
|
909
|
+
// SECURITY (v4.7.5 → v4.7.6): zip-slip hardening (commit 2d5cdd2) only
|
|
910
|
+
// checked path strings (absolute paths, `..` traversal). It did NOT check
|
|
911
|
+
// entry TYPE. A crafted tarball could embed a symlink (e.g.,
|
|
912
|
+
// `mover-link -> /etc/passwd`) which `tar -xzf` would happily extract,
|
|
913
|
+
// creating a file outside the destination dir on first follow.
|
|
914
|
+
//
|
|
915
|
+
// Fix: switch to `tar -tvzf` which prints the entry type as the first
|
|
916
|
+
// character (- regular, d directory, l symlink, h hardlink, c char dev,
|
|
917
|
+
// b block dev). Reject anything that isn't a regular file, directory, or
|
|
918
|
+
// long-link metadata (which is followed by a regular entry).
|
|
919
|
+
//
|
|
920
|
+
// Also enforce a 100MB hard cap (compressed) post-download — if the file
|
|
921
|
+
// on disk is bigger than that, refuse before extraction. v4.7.5 had no
|
|
922
|
+
// size check, so a 2GB tarball would OOM Node before validation.
|
|
923
|
+
const MAX_TARBALL_BYTES = 100 * 1024 * 1024;
|
|
924
|
+
const tarStat = fs.statSync(tarPath);
|
|
925
|
+
if (tarStat.size > MAX_TARBALL_BYTES) {
|
|
926
|
+
fs.unlinkSync(tarPath);
|
|
927
|
+
throw new Error(`Payload too large: ${tarStat.size} bytes > ${MAX_TARBALL_BYTES}`);
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
// v4.7.6 (post-audit): use TWO tar listings.
|
|
931
|
+
//
|
|
932
|
+
// 1. `tar -tzf` → paths-only output (one path per line, exact). Used to
|
|
933
|
+
// validate against absolute paths and `..` traversal. v4.7.5's
|
|
934
|
+
// paths-only validation worked here, the regression was the missing
|
|
935
|
+
// type check, not the path parser.
|
|
936
|
+
//
|
|
937
|
+
// 2. `tar -tvzf` → verbose output. Used ONLY to extract the type
|
|
938
|
+
// character (first column). We intentionally do NOT parse the path
|
|
939
|
+
// field from verbose output because BSD tar's verbose format is
|
|
940
|
+
// fragile when paths contain spaces (e.g., "_Template Project/Chats &
|
|
941
|
+
// Resources/...") — last-whitespace-token parsing would silently
|
|
942
|
+
// drop the leading parts of a multi-word path.
|
|
943
|
+
//
|
|
944
|
+
// The ordering of -tvzf and -tzf output is identical (tar walks the
|
|
945
|
+
// archive in the same order), so we zip them by line index for the
|
|
946
|
+
// type/path correlation.
|
|
947
|
+
const pathsListing = execSync(`tar -tzf "${tarPath}"`, { encoding: 'utf8' });
|
|
948
|
+
const verboseListing = execSync(`tar -tvzf "${tarPath}"`, { encoding: 'utf8' });
|
|
949
|
+
const pathLines = pathsListing.split('\n').filter(Boolean);
|
|
950
|
+
const verboseLines = verboseListing.split('\n').filter(Boolean);
|
|
951
|
+
const badPaths = [];
|
|
952
|
+
const badTypes = [];
|
|
953
|
+
if (pathLines.length !== verboseLines.length) {
|
|
954
|
+
throw new Error(`Tar listing inconsistency: ${pathLines.length} path lines vs ${verboseLines.length} verbose lines`);
|
|
955
|
+
}
|
|
956
|
+
for (let i = 0; i < pathLines.length; i++) {
|
|
957
|
+
const entryPath = pathLines[i];
|
|
958
|
+
const typeChar = verboseLines[i].charAt(0);
|
|
959
|
+
if (!entryPath) continue;
|
|
960
|
+
// Reject by type: anything except regular file (-), directory (d).
|
|
961
|
+
// Long-link metadata uses 'L' or 'K'; tar emits these followed by
|
|
962
|
+
// another entry — we'd reject the link metadata here too which is fine
|
|
963
|
+
// because we don't ship long names.
|
|
964
|
+
if (typeChar !== '-' && typeChar !== 'd') {
|
|
965
|
+
badTypes.push(`${typeChar} ${entryPath}`);
|
|
966
|
+
continue;
|
|
967
|
+
}
|
|
968
|
+
if (entryPath.startsWith('/') || entryPath.includes('..')) {
|
|
969
|
+
badPaths.push(entryPath);
|
|
970
|
+
}
|
|
971
|
+
}
|
|
972
|
+
if (badTypes.length > 0) {
|
|
973
|
+
fs.unlinkSync(tarPath);
|
|
974
|
+
throw new Error('Payload contains non-regular entries (symlinks/hardlinks/devices): ' + badTypes.slice(0, 5).join(', '));
|
|
975
|
+
}
|
|
869
976
|
if (badPaths.length > 0) {
|
|
870
977
|
fs.unlinkSync(tarPath);
|
|
871
978
|
throw new Error('Payload contains unsafe paths: ' + badPaths.join(', '));
|
|
@@ -1605,14 +1712,25 @@ async function runUninstall(vaultPath) {
|
|
|
1605
1712
|
const rulesExist = rulesPaths.some(p => fs.existsSync(p.path));
|
|
1606
1713
|
if (rulesExist) categories.push({ id: "rules", name: "Rules", description: "Global rules files for all agents", items: rulesPaths });
|
|
1607
1714
|
|
|
1608
|
-
// Skills
|
|
1715
|
+
// Skills — v4.7.6: cover all agents Mover OS installs into. v4.7.5 only
|
|
1716
|
+
// listed 5 of ~10 paths, leaving orphaned skills behind on uninstall for
|
|
1717
|
+
// Gemini CLI, Cline, Roo Code, Aider, OpenCode, Continue, and the shared
|
|
1718
|
+
// ~/.agents/skills/ pool. Derived from AGENT_REGISTRY, but kept literal
|
|
1719
|
+
// here because uninstall runs after the registry may have changed in
|
|
1720
|
+
// newer bundles. Drift-detection lives in v4.8.0 which moves to a
|
|
1721
|
+
// collectInstallPaths(kind) helper.
|
|
1609
1722
|
const skillsPaths = [
|
|
1610
1723
|
{ label: "Claude Code skills", path: path.join(home, ".claude", "skills"), dir: true, keepBuiltins: true },
|
|
1611
1724
|
{ label: "Cursor skills", path: path.join(home, ".cursor", "skills"), dir: true },
|
|
1612
1725
|
{ label: "Codex skills", path: path.join(home, ".codex", "skills"), dir: true },
|
|
1613
|
-
{ label: "
|
|
1726
|
+
{ label: "Gemini CLI skills", path: path.join(home, ".gemini", "skills"), dir: true },
|
|
1614
1727
|
{ label: "Antigravity skills", path: path.join(home, ".gemini", "antigravity", "skills"), dir: true },
|
|
1615
|
-
|
|
1728
|
+
{ label: "Windsurf skills (legacy)", path: path.join(home, ".windsurf", "skills"), dir: true },
|
|
1729
|
+
{ label: "Windsurf skills", path: path.join(home, ".codeium", "windsurf", "skills"), dir: true },
|
|
1730
|
+
{ label: "Cline skills", path: path.join(home, ".cline", "skills"), dir: true },
|
|
1731
|
+
{ label: "Roo Code skills (vault-relative)", path: vaultPath && path.join(vaultPath, ".roo", "skills"), dir: true },
|
|
1732
|
+
{ label: "Cross-agent shared skills", path: path.join(home, ".agents", "skills"), dir: true },
|
|
1733
|
+
].filter(p => p.path);
|
|
1616
1734
|
const skillsExist = skillsPaths.some(p => fs.existsSync(p.path));
|
|
1617
1735
|
if (skillsExist) categories.push({ id: "skills", name: "Skills", description: "61 curated skill packs", items: skillsPaths });
|
|
1618
1736
|
|
|
@@ -1754,7 +1872,15 @@ async function runUninstall(vaultPath) {
|
|
|
1754
1872
|
barLn(dim("Deactivating license..."));
|
|
1755
1873
|
try {
|
|
1756
1874
|
const https = require("https");
|
|
1757
|
-
|
|
1875
|
+
// v4.7.6: match activateKey's label scheme (machine_id) so deactivate
|
|
1876
|
+
// actually frees the activation slot. v4.7.5 deactivated by hostname,
|
|
1877
|
+
// which only worked if the user hadn't renamed their machine since
|
|
1878
|
+
// install. Fallback: if machine_id read fails, try hostname (covers
|
|
1879
|
+
// pre-v4.7.3 activations that stored hostname).
|
|
1880
|
+
const moverDir = path.join(os.homedir(), ".mover");
|
|
1881
|
+
let label = os.hostname();
|
|
1882
|
+
try { label = getMachineId(moverDir) || label; } catch {}
|
|
1883
|
+
const body = JSON.stringify({ key: cfg.licenseKey, organization_id: POLAR_ORG_ID, label });
|
|
1758
1884
|
await new Promise((resolve, reject) => {
|
|
1759
1885
|
const req = https.request({
|
|
1760
1886
|
hostname: "api.polar.sh",
|
|
@@ -1923,13 +2049,13 @@ const AGENT_REGISTRY = {
|
|
|
1923
2049
|
// ── Enhanced Tier ──────────────────────────────────────────────────────────
|
|
1924
2050
|
"codex": {
|
|
1925
2051
|
name: "Codex",
|
|
1926
|
-
tier: "
|
|
1927
|
-
tierDesc: "AGENTS.md, skills (skills = commands)",
|
|
2052
|
+
tier: "full",
|
|
2053
|
+
tierDesc: "AGENTS.md, skills (skills = commands), 5 hooks",
|
|
1928
2054
|
detect: () => cmdExists("codex") || fs.existsSync(path.join(H, ".codex")),
|
|
1929
2055
|
rules: { type: "agents-md", dest: () => path.join(H, ".codex", "AGENTS.md") },
|
|
1930
2056
|
skills: { dest: () => path.join(H, ".codex", "skills") },
|
|
1931
2057
|
commands: null,
|
|
1932
|
-
hooks:
|
|
2058
|
+
hooks: { type: "codex-hooks-json", dest: () => path.join(H, ".codex", "hooks.json") },
|
|
1933
2059
|
},
|
|
1934
2060
|
"antigravity": {
|
|
1935
2061
|
name: "Antigravity",
|
|
@@ -2137,7 +2263,7 @@ const SKILL_CATEGORIES = {
|
|
|
2137
2263
|
"json-canvas": "obsidian",
|
|
2138
2264
|
// Tools (always installed — core utilities)
|
|
2139
2265
|
"defuddle": "tools",
|
|
2140
|
-
"skill-creator": "tools",
|
|
2266
|
+
"mover-skill-creator": "tools",
|
|
2141
2267
|
"find-skills": "tools",
|
|
2142
2268
|
};
|
|
2143
2269
|
|
|
@@ -2151,19 +2277,32 @@ const CATEGORY_META = [
|
|
|
2151
2277
|
{ id: "obsidian", name: "Obsidian", desc: "markdown, bases, canvas, CLI" },
|
|
2152
2278
|
];
|
|
2153
2279
|
|
|
2280
|
+
// Dev/eval artifacts that ship in src/skills/ but must NEVER be installed as
|
|
2281
|
+
// runtime skills. Matched as a directory name suffix so e.g.
|
|
2282
|
+
// `friction-enforcer-workspace/iteration-1/skill-snapshot/SKILL.md` (an evaluation
|
|
2283
|
+
// snapshot of friction-enforcer) does not get installed as a separate skill named
|
|
2284
|
+
// `skill-snapshot`. Caused 1+ duplicate skill in v4.7.5 installs which compounded
|
|
2285
|
+
// the skill-description budget pressure that drops descriptions at runtime.
|
|
2286
|
+
const SKILL_DEV_DIR_SUFFIXES = ["-workspace", "-benchmark", "-sandbox"];
|
|
2287
|
+
|
|
2288
|
+
function isDevSkillDir(name) {
|
|
2289
|
+
return SKILL_DEV_DIR_SUFFIXES.some((s) => name.endsWith(s));
|
|
2290
|
+
}
|
|
2291
|
+
|
|
2154
2292
|
function findSkills(bundleDir) {
|
|
2155
2293
|
const skillsDir = path.join(bundleDir, "src", "skills");
|
|
2156
2294
|
if (!fs.existsSync(skillsDir)) return [];
|
|
2157
2295
|
const skills = [];
|
|
2158
2296
|
const walk = (dir) => {
|
|
2159
2297
|
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
2298
|
+
if (!entry.isDirectory()) continue;
|
|
2299
|
+
// Skip dev/eval artifact roots and anything inside them.
|
|
2300
|
+
if (isDevSkillDir(entry.name)) continue;
|
|
2160
2301
|
const full = path.join(dir, entry.name);
|
|
2161
|
-
if (
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
walk(full);
|
|
2166
|
-
}
|
|
2302
|
+
if (fs.existsSync(path.join(full, "SKILL.md"))) {
|
|
2303
|
+
skills.push({ name: entry.name, path: full, category: SKILL_CATEGORIES[entry.name] || "tools" });
|
|
2304
|
+
} else {
|
|
2305
|
+
walk(full);
|
|
2167
2306
|
}
|
|
2168
2307
|
}
|
|
2169
2308
|
};
|
|
@@ -2240,6 +2379,176 @@ Stuck: /debug-resistance
|
|
|
2240
2379
|
}
|
|
2241
2380
|
|
|
2242
2381
|
// ─── Claude Code hooks (settings.json) ──────────────────────────────────────
|
|
2382
|
+
// ─── Codex hook config generator ────────────────────────────────────────────
|
|
2383
|
+
// v4.7.5: Codex hooks invoked through mover-hook-adapter.js for schema
|
|
2384
|
+
// translation. MVP scope: session-start, engine-protection, git-safety,
|
|
2385
|
+
// plan-sync-reminder, dirty-tree-guard (no session-log-reminder under Codex
|
|
2386
|
+
// — that script is Claude-transcript-specific).
|
|
2387
|
+
//
|
|
2388
|
+
// IMPORTANT: Codex runs hook commands through cmd.exe on Windows, which does
|
|
2389
|
+
// NOT expand $HOME. We resolve absolute paths at install time so the same
|
|
2390
|
+
// hooks.json works on macOS/Linux/Windows.
|
|
2391
|
+
function generateCodexHooks() {
|
|
2392
|
+
const home = os.homedir();
|
|
2393
|
+
// Forward slashes work on all three OSes when invoking node directly.
|
|
2394
|
+
const fwd = (p) => p.split(path.sep).join("/");
|
|
2395
|
+
const hooksRoot = fwd(path.join(home, ".codex", "hooks"));
|
|
2396
|
+
const adapter = `"${hooksRoot}/mover-hook-adapter.js"`;
|
|
2397
|
+
const hookDir = `"${hooksRoot}`;
|
|
2398
|
+
return JSON.stringify(
|
|
2399
|
+
{
|
|
2400
|
+
hooks: {
|
|
2401
|
+
SessionStart: [
|
|
2402
|
+
{
|
|
2403
|
+
matcher: "startup|resume|clear",
|
|
2404
|
+
hooks: [
|
|
2405
|
+
{
|
|
2406
|
+
// v4.7.6: switched from "full" to "resume" mode. The full primer
|
|
2407
|
+
// is ~12K chars which mover-hook-adapter.js wraps as
|
|
2408
|
+
// additionalContext for Codex, eating the skill-description
|
|
2409
|
+
// budget. Codex sessions only need lightweight context refresh.
|
|
2410
|
+
// Timeout 15s (was 5s): session-start.sh:122 calls `npm view`
|
|
2411
|
+
// on cold cache; 5s killed cold-start under Codex.
|
|
2412
|
+
type: "command",
|
|
2413
|
+
command: `node ${adapter} codex SessionStart ${hookDir}/session-start.sh" resume`,
|
|
2414
|
+
timeout: 15,
|
|
2415
|
+
},
|
|
2416
|
+
],
|
|
2417
|
+
},
|
|
2418
|
+
],
|
|
2419
|
+
PreToolUse: [
|
|
2420
|
+
{
|
|
2421
|
+
matcher: "Bash",
|
|
2422
|
+
hooks: [
|
|
2423
|
+
{
|
|
2424
|
+
type: "command",
|
|
2425
|
+
command: `node ${adapter} codex PreToolUse ${hookDir}/git-safety.sh"`,
|
|
2426
|
+
timeout: 5,
|
|
2427
|
+
},
|
|
2428
|
+
],
|
|
2429
|
+
},
|
|
2430
|
+
{
|
|
2431
|
+
matcher: "Edit|Write|apply_patch",
|
|
2432
|
+
hooks: [
|
|
2433
|
+
{
|
|
2434
|
+
type: "command",
|
|
2435
|
+
command: `node ${adapter} codex PreToolUse ${hookDir}/engine-protection.sh"`,
|
|
2436
|
+
timeout: 5,
|
|
2437
|
+
},
|
|
2438
|
+
],
|
|
2439
|
+
},
|
|
2440
|
+
],
|
|
2441
|
+
PostToolUse: [
|
|
2442
|
+
{
|
|
2443
|
+
matcher: "Edit|Write|apply_patch",
|
|
2444
|
+
hooks: [
|
|
2445
|
+
{
|
|
2446
|
+
type: "command",
|
|
2447
|
+
command: `node ${adapter} codex PostToolUse ${hookDir}/plan-sync-reminder.sh"`,
|
|
2448
|
+
timeout: 5,
|
|
2449
|
+
},
|
|
2450
|
+
],
|
|
2451
|
+
},
|
|
2452
|
+
],
|
|
2453
|
+
Stop: [
|
|
2454
|
+
{
|
|
2455
|
+
hooks: [
|
|
2456
|
+
{
|
|
2457
|
+
type: "command",
|
|
2458
|
+
command: `node ${adapter} codex Stop ${hookDir}/dirty-tree-guard.sh"`,
|
|
2459
|
+
timeout: 10,
|
|
2460
|
+
},
|
|
2461
|
+
],
|
|
2462
|
+
},
|
|
2463
|
+
],
|
|
2464
|
+
},
|
|
2465
|
+
},
|
|
2466
|
+
null,
|
|
2467
|
+
2
|
|
2468
|
+
);
|
|
2469
|
+
}
|
|
2470
|
+
|
|
2471
|
+
// ─── Gemini hook config generator ───────────────────────────────────────────
|
|
2472
|
+
// v4.7.5: Gemini events differ from Claude/Codex — UserPromptSubmit→BeforeAgent,
|
|
2473
|
+
// PreToolUse→BeforeTool, PostToolUse→AfterTool, Stop→AfterAgent. Adapter
|
|
2474
|
+
// translates schema; we map event names here.
|
|
2475
|
+
// Timeouts are in milliseconds per Gemini hook spec (default 60000).
|
|
2476
|
+
// Absolute paths used so cmd.exe on Windows can resolve correctly.
|
|
2477
|
+
function generateGeminiHooks() {
|
|
2478
|
+
const home = os.homedir();
|
|
2479
|
+
const fwd = (p) => p.split(path.sep).join("/");
|
|
2480
|
+
const hooksRoot = fwd(path.join(home, ".gemini", "hooks"));
|
|
2481
|
+
const adapter = `"${hooksRoot}/mover-hook-adapter.js"`;
|
|
2482
|
+
const hookDir = `"${hooksRoot}`;
|
|
2483
|
+
return {
|
|
2484
|
+
SessionStart: [
|
|
2485
|
+
{
|
|
2486
|
+
matcher: "startup",
|
|
2487
|
+
hooks: [
|
|
2488
|
+
{
|
|
2489
|
+
// v4.7.6: same fix as the Codex hook — switched from "full" to
|
|
2490
|
+
// "resume" to keep mover-hook-adapter additionalContext within
|
|
2491
|
+
// budget. Timeout raised to 15s for cold-cache npm view fallback.
|
|
2492
|
+
name: "mover-session-start",
|
|
2493
|
+
type: "command",
|
|
2494
|
+
command: `node ${adapter} gemini SessionStart ${hookDir}/session-start.sh" resume`,
|
|
2495
|
+
timeout: 15000,
|
|
2496
|
+
},
|
|
2497
|
+
],
|
|
2498
|
+
},
|
|
2499
|
+
],
|
|
2500
|
+
BeforeTool: [
|
|
2501
|
+
{
|
|
2502
|
+
matcher: "write_file|replace",
|
|
2503
|
+
hooks: [
|
|
2504
|
+
{
|
|
2505
|
+
name: "mover-engine-protection",
|
|
2506
|
+
type: "command",
|
|
2507
|
+
command: `node ${adapter} gemini BeforeTool ${hookDir}/engine-protection.sh"`,
|
|
2508
|
+
timeout: 5000,
|
|
2509
|
+
},
|
|
2510
|
+
],
|
|
2511
|
+
},
|
|
2512
|
+
{
|
|
2513
|
+
matcher: "run_shell_command",
|
|
2514
|
+
hooks: [
|
|
2515
|
+
{
|
|
2516
|
+
name: "mover-git-safety",
|
|
2517
|
+
type: "command",
|
|
2518
|
+
command: `node ${adapter} gemini BeforeTool ${hookDir}/git-safety.sh"`,
|
|
2519
|
+
timeout: 5000,
|
|
2520
|
+
},
|
|
2521
|
+
],
|
|
2522
|
+
},
|
|
2523
|
+
],
|
|
2524
|
+
AfterTool: [
|
|
2525
|
+
{
|
|
2526
|
+
matcher: "write_file|replace",
|
|
2527
|
+
hooks: [
|
|
2528
|
+
{
|
|
2529
|
+
name: "mover-plan-sync",
|
|
2530
|
+
type: "command",
|
|
2531
|
+
command: `node ${adapter} gemini AfterTool ${hookDir}/plan-sync-reminder.sh"`,
|
|
2532
|
+
timeout: 5000,
|
|
2533
|
+
},
|
|
2534
|
+
],
|
|
2535
|
+
},
|
|
2536
|
+
],
|
|
2537
|
+
AfterAgent: [
|
|
2538
|
+
{
|
|
2539
|
+
hooks: [
|
|
2540
|
+
{
|
|
2541
|
+
name: "mover-dirty-tree-guard",
|
|
2542
|
+
type: "command",
|
|
2543
|
+
command: `node ${adapter} gemini AfterAgent ${hookDir}/dirty-tree-guard.sh"`,
|
|
2544
|
+
timeout: 10000,
|
|
2545
|
+
},
|
|
2546
|
+
],
|
|
2547
|
+
},
|
|
2548
|
+
],
|
|
2549
|
+
};
|
|
2550
|
+
}
|
|
2551
|
+
|
|
2243
2552
|
function generateClaudeSettings() {
|
|
2244
2553
|
return JSON.stringify(
|
|
2245
2554
|
{
|
|
@@ -2868,6 +3177,20 @@ function installSkillPacks(bundleDir, destDir, selectedCategories) {
|
|
|
2868
3177
|
// Skip unchanged skills
|
|
2869
3178
|
const sourceHash = computeSkillHash(skill.path);
|
|
2870
3179
|
if (manifest.skills[skill.name]?.hash === sourceHash && fs.existsSync(dest)) {
|
|
3180
|
+
// v4.7.6 (post-audit): even when skipping, ensure the .mover-installed
|
|
3181
|
+
// stamp exists. Pre-v4.7.6 installs lack the stamp; without this
|
|
3182
|
+
// backfill, the manifest+stamp logic in the orphan cleanup wouldn't
|
|
3183
|
+
// see them as Mover-owned on the first v4.7.6 update if the manifest
|
|
3184
|
+
// entry was lost (rare but possible).
|
|
3185
|
+
try {
|
|
3186
|
+
const stampPath = path.join(dest, ".mover-installed");
|
|
3187
|
+
if (!fs.existsSync(stampPath)) {
|
|
3188
|
+
fs.writeFileSync(
|
|
3189
|
+
stampPath,
|
|
3190
|
+
JSON.stringify({ name: skill.name, version: "v4.7.6", at: new Date().toISOString(), backfilled: true }, null, 2)
|
|
3191
|
+
);
|
|
3192
|
+
}
|
|
3193
|
+
} catch {}
|
|
2871
3194
|
installedNames.add(skill.name);
|
|
2872
3195
|
skipped++;
|
|
2873
3196
|
continue;
|
|
@@ -2875,23 +3198,40 @@ function installSkillPacks(bundleDir, destDir, selectedCategories) {
|
|
|
2875
3198
|
|
|
2876
3199
|
if (fs.existsSync(dest)) fs.rmSync(dest, { recursive: true, force: true });
|
|
2877
3200
|
copyDirRecursive(skill.path, dest);
|
|
3201
|
+
// v4.7.6: stamp file proves Mover OS owns this skill. Used by orphan
|
|
3202
|
+
// cleanup below — only stamped skills are deletable on update. User-
|
|
3203
|
+
// created skills (e.g., a skill the user wrote and dropped into the
|
|
3204
|
+
// skills dir) survive even if their SKILL.md happens to contain
|
|
3205
|
+
// "## Activation" or "## When to Use" headings.
|
|
3206
|
+
try {
|
|
3207
|
+
fs.writeFileSync(
|
|
3208
|
+
path.join(dest, ".mover-installed"),
|
|
3209
|
+
JSON.stringify({ name: skill.name, version: "v4.7.6", at: new Date().toISOString() }, null, 2)
|
|
3210
|
+
);
|
|
3211
|
+
} catch {}
|
|
2878
3212
|
manifest.skills[skill.name] = { hash: sourceHash, installedAt: new Date().toISOString() };
|
|
2879
3213
|
installedNames.add(skill.name);
|
|
2880
3214
|
count++;
|
|
2881
3215
|
}
|
|
2882
3216
|
|
|
2883
|
-
// Clean orphaned skills (renamed/removed in updates)
|
|
2884
|
-
//
|
|
3217
|
+
// Clean orphaned skills (renamed/removed in updates).
|
|
3218
|
+
// v4.7.6: ONLY remove skills that have a `.mover-installed` stamp from a
|
|
3219
|
+
// prior install OR appear in the manifest. User-created skills (no stamp,
|
|
3220
|
+
// not in manifest) are preserved regardless of their SKILL.md contents.
|
|
3221
|
+
// Pre-v4.7.6 installs lack the stamp; the manifest entry covers those
|
|
3222
|
+
// (manifest is written for every prior install). After one cycle of v4.7.6
|
|
3223
|
+
// install/update, every shipped skill has both stamp and manifest entry.
|
|
2885
3224
|
for (const dir of fs.readdirSync(destDir)) {
|
|
2886
3225
|
if (installedNames.has(dir)) continue;
|
|
2887
3226
|
const dirPath = path.join(destDir, dir);
|
|
2888
3227
|
try {
|
|
2889
|
-
if (fs.statSync(dirPath).isDirectory()
|
|
2890
|
-
|
|
2891
|
-
|
|
2892
|
-
|
|
2893
|
-
|
|
2894
|
-
}
|
|
3228
|
+
if (!fs.statSync(dirPath).isDirectory()) continue;
|
|
3229
|
+
if (!fs.existsSync(path.join(dirPath, "SKILL.md"))) continue;
|
|
3230
|
+
const hasStamp = fs.existsSync(path.join(dirPath, ".mover-installed"));
|
|
3231
|
+
const inManifest = Boolean(manifest.skills[dir]);
|
|
3232
|
+
if (hasStamp || inManifest) {
|
|
3233
|
+
fs.rmSync(dirPath, { recursive: true, force: true });
|
|
3234
|
+
ln(` ${dim("Removed orphan skill:")} ${dir}`);
|
|
2895
3235
|
}
|
|
2896
3236
|
} catch (e) { /* skip */ }
|
|
2897
3237
|
}
|
|
@@ -2944,10 +3284,15 @@ function installHooksForClaude(bundleDir, vaultPath) {
|
|
|
2944
3284
|
if (!existing.hooks[event]) {
|
|
2945
3285
|
existing.hooks[event] = entries;
|
|
2946
3286
|
} else {
|
|
2947
|
-
// Check if our hooks are already registered (by command substring)
|
|
3287
|
+
// Check if our hooks are already registered (by command substring).
|
|
3288
|
+
// v4.7.6: split on both / and \\ so the basename extraction works on
|
|
3289
|
+
// Windows where command paths use backslashes. The forward-slash-only
|
|
3290
|
+
// split returned the entire command string on Windows, which never
|
|
3291
|
+
// matched the substring check, leading to duplicate hooks accumulating
|
|
3292
|
+
// on every install.
|
|
2948
3293
|
const existingCmds = JSON.stringify(existing.hooks[event]);
|
|
2949
3294
|
const alreadyHas = entries[0].hooks.every(
|
|
2950
|
-
(h) => existingCmds.includes(h.command.split(
|
|
3295
|
+
(h) => existingCmds.includes(h.command.split(/[\\/]/).pop().replace('"', ""))
|
|
2951
3296
|
);
|
|
2952
3297
|
if (!alreadyHas) {
|
|
2953
3298
|
existing.hooks[event].push(...entries);
|
|
@@ -2969,6 +3314,252 @@ function installHooksForClaude(bundleDir, vaultPath) {
|
|
|
2969
3314
|
return count;
|
|
2970
3315
|
}
|
|
2971
3316
|
|
|
3317
|
+
// ─── Multi-agent hook installer (v4.7.5) ────────────────────────────────────
|
|
3318
|
+
// Copies the MVP hook set (5 enforcement hooks + adapter + shared lib) to the
|
|
3319
|
+
// agent's hook directory and writes its hook config. Used by Codex and Gemini.
|
|
3320
|
+
const MVP_HOOK_SCRIPTS = [
|
|
3321
|
+
"session-start.sh",
|
|
3322
|
+
"engine-protection.sh",
|
|
3323
|
+
"git-safety.sh",
|
|
3324
|
+
"plan-sync-reminder.sh",
|
|
3325
|
+
"dirty-tree-guard.sh",
|
|
3326
|
+
"mover-lib.sh", // sourced by the others
|
|
3327
|
+
];
|
|
3328
|
+
|
|
3329
|
+
// Section-aware TOML upsert. Sets [section] key = value, preserving comments,
|
|
3330
|
+
// existing keys, and other sections. If section exists with the key set to a
|
|
3331
|
+
// different value, the value is REPLACED (not duplicated). If the section
|
|
3332
|
+
// header has whitespace variants like `[ features ]`, treat as the same section.
|
|
3333
|
+
function upsertTomlKey(filePath, section, key, value) {
|
|
3334
|
+
let content = "";
|
|
3335
|
+
if (fs.existsSync(filePath)) {
|
|
3336
|
+
content = fs.readFileSync(filePath, "utf8");
|
|
3337
|
+
}
|
|
3338
|
+
|
|
3339
|
+
// Match table header in any whitespace variant: [section], [ section ], etc.
|
|
3340
|
+
// Also tolerate trailing comment after header: [section] # ...
|
|
3341
|
+
const headerRe = (name) =>
|
|
3342
|
+
new RegExp(
|
|
3343
|
+
`^\\s*\\[\\s*${name.replace(/[.*+?^${}()|[\\]\\\\]/g, "\\$&")}\\s*\\](?:[^\\n]*)$`,
|
|
3344
|
+
"m"
|
|
3345
|
+
);
|
|
3346
|
+
// Any section header (used to find section boundaries)
|
|
3347
|
+
const anyHeaderRe = /^\s*\[\s*[^\]]+\s*\](?:[^\n]*)$/m;
|
|
3348
|
+
|
|
3349
|
+
const lines = content.split("\n");
|
|
3350
|
+
let inSection = false;
|
|
3351
|
+
let sectionStart = -1;
|
|
3352
|
+
let sectionEnd = lines.length;
|
|
3353
|
+
for (let i = 0; i < lines.length; i++) {
|
|
3354
|
+
if (headerRe(section).test(lines[i])) {
|
|
3355
|
+
inSection = true;
|
|
3356
|
+
sectionStart = i;
|
|
3357
|
+
// Find next header (or EOF) — that's section end
|
|
3358
|
+
for (let j = i + 1; j < lines.length; j++) {
|
|
3359
|
+
if (anyHeaderRe.test(lines[j])) {
|
|
3360
|
+
sectionEnd = j;
|
|
3361
|
+
break;
|
|
3362
|
+
}
|
|
3363
|
+
}
|
|
3364
|
+
break;
|
|
3365
|
+
}
|
|
3366
|
+
}
|
|
3367
|
+
|
|
3368
|
+
const newKv = `${key} = ${value}`;
|
|
3369
|
+
// Match existing key in this section: tolerates whitespace + comment
|
|
3370
|
+
const keyRe = new RegExp(
|
|
3371
|
+
`^\\s*${key.replace(/[.*+?^${}()|[\\]\\\\]/g, "\\$&")}\\s*=.*$`
|
|
3372
|
+
);
|
|
3373
|
+
|
|
3374
|
+
if (inSection) {
|
|
3375
|
+
let replaced = false;
|
|
3376
|
+
for (let i = sectionStart + 1; i < sectionEnd; i++) {
|
|
3377
|
+
if (keyRe.test(lines[i])) {
|
|
3378
|
+
lines[i] = newKv;
|
|
3379
|
+
replaced = true;
|
|
3380
|
+
break;
|
|
3381
|
+
}
|
|
3382
|
+
}
|
|
3383
|
+
if (!replaced) {
|
|
3384
|
+
// Insert key right after section header
|
|
3385
|
+
lines.splice(sectionStart + 1, 0, newKv);
|
|
3386
|
+
}
|
|
3387
|
+
} else {
|
|
3388
|
+
// Section absent — append fresh section at EOF
|
|
3389
|
+
if (lines.length > 0 && lines[lines.length - 1].trim() !== "") {
|
|
3390
|
+
lines.push("");
|
|
3391
|
+
}
|
|
3392
|
+
lines.push(`[${section}]`);
|
|
3393
|
+
lines.push(newKv);
|
|
3394
|
+
}
|
|
3395
|
+
|
|
3396
|
+
fs.writeFileSync(filePath, lines.join("\n"), "utf8");
|
|
3397
|
+
}
|
|
3398
|
+
|
|
3399
|
+
function copyMvpHooks(bundleDir, destDir) {
|
|
3400
|
+
const hooksSrc = path.join(bundleDir, "src", "hooks");
|
|
3401
|
+
if (!fs.existsSync(hooksSrc)) return 0;
|
|
3402
|
+
fs.mkdirSync(destDir, { recursive: true });
|
|
3403
|
+
let count = 0;
|
|
3404
|
+
for (const file of MVP_HOOK_SCRIPTS) {
|
|
3405
|
+
const src = path.join(hooksSrc, file);
|
|
3406
|
+
if (!fs.existsSync(src)) continue;
|
|
3407
|
+
const dst = path.join(destDir, file);
|
|
3408
|
+
const content = fs
|
|
3409
|
+
.readFileSync(src, "utf8")
|
|
3410
|
+
.replace(/\r\n/g, "\n")
|
|
3411
|
+
.replace(/\r/g, "\n");
|
|
3412
|
+
fs.writeFileSync(dst, content, { mode: 0o755 });
|
|
3413
|
+
count++;
|
|
3414
|
+
}
|
|
3415
|
+
// Adapter (Node script — copy preserving binary mode)
|
|
3416
|
+
const adapterSrc = path.join(hooksSrc, "mover-hook-adapter.js");
|
|
3417
|
+
if (fs.existsSync(adapterSrc)) {
|
|
3418
|
+
const dst = path.join(destDir, "mover-hook-adapter.js");
|
|
3419
|
+
const content = fs
|
|
3420
|
+
.readFileSync(adapterSrc, "utf8")
|
|
3421
|
+
.replace(/\r\n/g, "\n")
|
|
3422
|
+
.replace(/\r/g, "\n");
|
|
3423
|
+
fs.writeFileSync(dst, content, { mode: 0o755 });
|
|
3424
|
+
count++;
|
|
3425
|
+
}
|
|
3426
|
+
return count;
|
|
3427
|
+
}
|
|
3428
|
+
|
|
3429
|
+
function installHooksForCodex(bundleDir) {
|
|
3430
|
+
const home = os.homedir();
|
|
3431
|
+
const codexDir = path.join(home, ".codex");
|
|
3432
|
+
const hooksDst = path.join(codexDir, "hooks");
|
|
3433
|
+
|
|
3434
|
+
// Detect Codex install
|
|
3435
|
+
if (!fs.existsSync(codexDir) && !cmdExists("codex")) return 0;
|
|
3436
|
+
|
|
3437
|
+
fs.mkdirSync(codexDir, { recursive: true });
|
|
3438
|
+
const count = copyMvpHooks(bundleDir, hooksDst);
|
|
3439
|
+
if (count === 0) return 0;
|
|
3440
|
+
|
|
3441
|
+
// Write hooks.json (deep-merge if existing).
|
|
3442
|
+
// v4.7.5 fix: per-entry idempotency, not per-event. Previous coarse check
|
|
3443
|
+
// skipped sibling entries when one Mover hook was already registered.
|
|
3444
|
+
const hooksJsonPath = path.join(codexDir, "hooks.json");
|
|
3445
|
+
const newConfig = JSON.parse(generateCodexHooks());
|
|
3446
|
+
if (fs.existsSync(hooksJsonPath)) {
|
|
3447
|
+
try {
|
|
3448
|
+
const existing = JSON.parse(fs.readFileSync(hooksJsonPath, "utf8"));
|
|
3449
|
+
mergeHooksConfig(existing, newConfig);
|
|
3450
|
+
fs.writeFileSync(hooksJsonPath, JSON.stringify(existing, null, 2), "utf8");
|
|
3451
|
+
} catch {
|
|
3452
|
+
try {
|
|
3453
|
+
fs.copyFileSync(hooksJsonPath, hooksJsonPath + ".bak");
|
|
3454
|
+
} catch {}
|
|
3455
|
+
fs.writeFileSync(hooksJsonPath, JSON.stringify(newConfig, null, 2), "utf8");
|
|
3456
|
+
}
|
|
3457
|
+
} else {
|
|
3458
|
+
fs.writeFileSync(hooksJsonPath, JSON.stringify(newConfig, null, 2), "utf8");
|
|
3459
|
+
}
|
|
3460
|
+
|
|
3461
|
+
// Enable codex_hooks feature in config.toml (section-aware upsert).
|
|
3462
|
+
// Handles edge cases the prior regex approach missed:
|
|
3463
|
+
// [features] # comment — replacement misses the table header
|
|
3464
|
+
// codex_hooks = false — would create duplicate key
|
|
3465
|
+
// [ features ] — bare-line regex misses whitespace variant
|
|
3466
|
+
// Strategy: parse file into sections, locate or create [features],
|
|
3467
|
+
// upsert codex_hooks=true within that section, reassemble.
|
|
3468
|
+
const configToml = path.join(codexDir, "config.toml");
|
|
3469
|
+
upsertTomlKey(configToml, "features", "codex_hooks", "true");
|
|
3470
|
+
|
|
3471
|
+
return count;
|
|
3472
|
+
}
|
|
3473
|
+
|
|
3474
|
+
function installHooksForGemini(bundleDir) {
|
|
3475
|
+
const home = os.homedir();
|
|
3476
|
+
const geminiDir = path.join(home, ".gemini");
|
|
3477
|
+
const hooksDst = path.join(geminiDir, "hooks");
|
|
3478
|
+
|
|
3479
|
+
// Detect Gemini install
|
|
3480
|
+
if (
|
|
3481
|
+
!fs.existsSync(geminiDir) &&
|
|
3482
|
+
!cmdExists("gemini") &&
|
|
3483
|
+
!fs.existsSync(path.join(geminiDir, "settings.json"))
|
|
3484
|
+
)
|
|
3485
|
+
return 0;
|
|
3486
|
+
|
|
3487
|
+
fs.mkdirSync(geminiDir, { recursive: true });
|
|
3488
|
+
const count = copyMvpHooks(bundleDir, hooksDst);
|
|
3489
|
+
if (count === 0) return 0;
|
|
3490
|
+
|
|
3491
|
+
// Deep-merge into settings.json
|
|
3492
|
+
const settingsPath = path.join(geminiDir, "settings.json");
|
|
3493
|
+
const newHooks = generateGeminiHooks();
|
|
3494
|
+
let existing = {};
|
|
3495
|
+
if (fs.existsSync(settingsPath)) {
|
|
3496
|
+
try {
|
|
3497
|
+
existing = JSON.parse(fs.readFileSync(settingsPath, "utf8"));
|
|
3498
|
+
} catch {
|
|
3499
|
+
try {
|
|
3500
|
+
fs.copyFileSync(settingsPath, settingsPath + ".bak");
|
|
3501
|
+
} catch {}
|
|
3502
|
+
existing = {};
|
|
3503
|
+
}
|
|
3504
|
+
}
|
|
3505
|
+
if (!existing.hooks) existing.hooks = {};
|
|
3506
|
+
mergeHooksConfig(existing, { hooks: newHooks });
|
|
3507
|
+
fs.writeFileSync(settingsPath, JSON.stringify(existing, null, 2), "utf8");
|
|
3508
|
+
|
|
3509
|
+
return count;
|
|
3510
|
+
}
|
|
3511
|
+
|
|
3512
|
+
// Per-entry hook merge.
|
|
3513
|
+
//
|
|
3514
|
+
// v4.7.5 strategy was: skip if an entry with same matcher + same exact commands
|
|
3515
|
+
// exists. That prevented duplicates on re-run BUT also meant existing v4.7.5
|
|
3516
|
+
// installs with broken commands (e.g. `session-start.sh full` instead of the
|
|
3517
|
+
// v4.7.6 `session-start.sh resume`) never got upgraded — the merge saw a Mover
|
|
3518
|
+
// entry and skipped, leaving the old broken hook in place.
|
|
3519
|
+
//
|
|
3520
|
+
// v4.7.6 strategy: identify Mover entries by command containing
|
|
3521
|
+
// "mover-hook-adapter.js" or "src/hooks/" path. For those, REPLACE the existing
|
|
3522
|
+
// entry with the new one. For non-Mover entries (user customizations), leave
|
|
3523
|
+
// untouched and append the new Mover entry alongside.
|
|
3524
|
+
function mergeHooksConfig(existing, newConfig) {
|
|
3525
|
+
if (!existing.hooks) existing.hooks = {};
|
|
3526
|
+
for (const [event, newEntries] of Object.entries(newConfig.hooks || {})) {
|
|
3527
|
+
if (!existing.hooks[event]) {
|
|
3528
|
+
existing.hooks[event] = newEntries;
|
|
3529
|
+
continue;
|
|
3530
|
+
}
|
|
3531
|
+
for (const newEntry of newEntries) {
|
|
3532
|
+
const matcher = newEntry.matcher; // may be undefined
|
|
3533
|
+
// v4.7.6 (post-audit): we identify the matching matcher entry and
|
|
3534
|
+
// replace ONLY the Mover-owned commands inside it. Non-Mover commands
|
|
3535
|
+
// a user has added at the same matcher level are preserved. The prior
|
|
3536
|
+
// approach replaced the whole entry, wiping user customizations.
|
|
3537
|
+
const isMoverCmd = (c) =>
|
|
3538
|
+
typeof c === "string" &&
|
|
3539
|
+
(c.includes("mover-hook-adapter.js") || c.includes("src/hooks/"));
|
|
3540
|
+
const matcherIdx = existing.hooks[event].findIndex(
|
|
3541
|
+
(e) => (e.matcher || "") === (matcher || "")
|
|
3542
|
+
);
|
|
3543
|
+
if (matcherIdx < 0) {
|
|
3544
|
+
// No entry at this matcher — append cleanly.
|
|
3545
|
+
existing.hooks[event].push(newEntry);
|
|
3546
|
+
continue;
|
|
3547
|
+
}
|
|
3548
|
+
const matcherEntry = existing.hooks[event][matcherIdx];
|
|
3549
|
+
if (!Array.isArray(matcherEntry.hooks)) matcherEntry.hooks = [];
|
|
3550
|
+
// Drop existing Mover-owned hooks (we will re-add the new versions).
|
|
3551
|
+
// Keep every user-owned hook untouched.
|
|
3552
|
+
matcherEntry.hooks = matcherEntry.hooks.filter(
|
|
3553
|
+
(h) => !isMoverCmd(h && h.command)
|
|
3554
|
+
);
|
|
3555
|
+
// Append the fresh Mover hooks for this matcher.
|
|
3556
|
+
for (const h of newEntry.hooks || []) {
|
|
3557
|
+
if (isMoverCmd(h && h.command)) matcherEntry.hooks.push(h);
|
|
3558
|
+
}
|
|
3559
|
+
}
|
|
3560
|
+
}
|
|
3561
|
+
}
|
|
3562
|
+
|
|
2972
3563
|
// ─── Per-agent install orchestrators ────────────────────────────────────────
|
|
2973
3564
|
function installClaudeCode(bundleDir, vaultPath, skillOpts) {
|
|
2974
3565
|
const home = os.homedir();
|
|
@@ -3122,6 +3713,12 @@ function installCodex(bundleDir, vaultPath, skillOpts) {
|
|
|
3122
3713
|
if (skCount > 0) steps.push(`${skCount} skills`);
|
|
3123
3714
|
}
|
|
3124
3715
|
|
|
3716
|
+
// v4.7.5: Native Codex hook support via mover-hook-adapter.js
|
|
3717
|
+
if (!skillOpts?.skipHooks) {
|
|
3718
|
+
const hkCount = installHooksForCodex(bundleDir);
|
|
3719
|
+
if (hkCount > 0) steps.push(`${hkCount} hooks`);
|
|
3720
|
+
}
|
|
3721
|
+
|
|
3125
3722
|
return steps;
|
|
3126
3723
|
}
|
|
3127
3724
|
|
|
@@ -3149,9 +3746,40 @@ function installWindsurf(bundleDir, vaultPath, skillOpts) {
|
|
|
3149
3746
|
}
|
|
3150
3747
|
|
|
3151
3748
|
if (skillOpts && skillOpts.install) {
|
|
3152
|
-
|
|
3749
|
+
// v4.7.6: standardize on the registry-canonical Windsurf path
|
|
3750
|
+
// (~/.codeium/windsurf/skills). v4.7.5 wrote to ~/.windsurf/skills which
|
|
3751
|
+
// conflicted with the registry entry and meant skills were duplicated
|
|
3752
|
+
// across both paths on machines that had been installed pre-v4.7.5.
|
|
3753
|
+
const skillsDir = AGENT_REGISTRY.windsurf.skills.dest();
|
|
3754
|
+
const legacySkillsDir = path.join(home, ".windsurf", "skills");
|
|
3153
3755
|
const skCount = installSkillPacks(bundleDir, skillsDir, skillOpts.categories);
|
|
3154
3756
|
if (skCount > 0) steps.push(`${skCount} skills`);
|
|
3757
|
+
|
|
3758
|
+
// Migrate from legacy ~/.windsurf/skills.
|
|
3759
|
+
// v4.7.6 (post-audit): only delete legacy entries that have a
|
|
3760
|
+
// .mover-installed stamp file (proving Mover OS owns them). Same-name
|
|
3761
|
+
// user-created skills in the legacy path that lack the stamp are
|
|
3762
|
+
// preserved. The prior "same-name = delete" logic risked nuking
|
|
3763
|
+
// divergent user skills.
|
|
3764
|
+
if (fs.existsSync(legacySkillsDir) && legacySkillsDir !== skillsDir) {
|
|
3765
|
+
try {
|
|
3766
|
+
for (const entry of fs.readdirSync(legacySkillsDir)) {
|
|
3767
|
+
const legacyEntry = path.join(legacySkillsDir, entry);
|
|
3768
|
+
const canonicalEntry = path.join(skillsDir, entry);
|
|
3769
|
+
if (!fs.statSync(legacyEntry).isDirectory()) continue;
|
|
3770
|
+
if (!fs.existsSync(canonicalEntry)) continue;
|
|
3771
|
+
// Require a .mover-installed stamp at the legacy path for delete.
|
|
3772
|
+
const legacyStamp = path.join(legacyEntry, ".mover-installed");
|
|
3773
|
+
if (fs.existsSync(legacyStamp)) {
|
|
3774
|
+
fs.rmSync(legacyEntry, { recursive: true, force: true });
|
|
3775
|
+
}
|
|
3776
|
+
}
|
|
3777
|
+
// Remove empty legacy parent dir.
|
|
3778
|
+
try {
|
|
3779
|
+
if (fs.readdirSync(legacySkillsDir).length === 0) fs.rmdirSync(legacySkillsDir);
|
|
3780
|
+
} catch {}
|
|
3781
|
+
} catch {}
|
|
3782
|
+
}
|
|
3155
3783
|
}
|
|
3156
3784
|
|
|
3157
3785
|
return steps;
|
|
@@ -3179,8 +3807,46 @@ function installGeminiCli(bundleDir, vaultPath, skillOpts, writtenFiles) {
|
|
|
3179
3807
|
if (wfCount > 0) steps.push(`${wfCount} commands`);
|
|
3180
3808
|
|
|
3181
3809
|
if (skillOpts && skillOpts.install) {
|
|
3182
|
-
const
|
|
3810
|
+
const geminiSkillsDir = path.join(geminiDir, "skills");
|
|
3811
|
+
const skCount = installSkillPacks(bundleDir, geminiSkillsDir, skillOpts.categories);
|
|
3183
3812
|
if (skCount > 0) steps.push(`${skCount} skills`);
|
|
3813
|
+
|
|
3814
|
+
// v4.7.6: clean up duplicate Mover skills in ~/.agents/skills/.
|
|
3815
|
+
//
|
|
3816
|
+
// Gemini CLI scans both ~/.gemini/skills/ (canonical) AND
|
|
3817
|
+
// ~/.agents/skills/ (cross-agent shared pool) at session start. If a Mover
|
|
3818
|
+
// install has populated both, Gemini emits a wall of "Skill conflict
|
|
3819
|
+
// detected: ... is overriding ..." warnings. Worse, every shipped skill
|
|
3820
|
+
// takes a budget slot twice in the agent's effective skill manifest.
|
|
3821
|
+
//
|
|
3822
|
+
// Fix: after writing to the canonical path, scan ~/.agents/skills/ for
|
|
3823
|
+
// entries that we just installed (same name as a directory we own at the
|
|
3824
|
+
// canonical path) and remove only those. User skills in ~/.agents/skills/
|
|
3825
|
+
// (Amp/etc.) stay untouched.
|
|
3826
|
+
// v4.7.6 (post-audit): require a `.mover-installed` stamp on the shared
|
|
3827
|
+
// entry before deleting. Prior "same-name = delete" risked nuking
|
|
3828
|
+
// user-created divergent skills with the same folder name.
|
|
3829
|
+
const sharedSkillsDir = path.join(home, ".agents", "skills");
|
|
3830
|
+
if (fs.existsSync(sharedSkillsDir) && sharedSkillsDir !== geminiSkillsDir) {
|
|
3831
|
+
try {
|
|
3832
|
+
for (const entry of fs.readdirSync(sharedSkillsDir)) {
|
|
3833
|
+
const sharedEntry = path.join(sharedSkillsDir, entry);
|
|
3834
|
+
const canonicalEntry = path.join(geminiSkillsDir, entry);
|
|
3835
|
+
if (!fs.statSync(sharedEntry).isDirectory()) continue;
|
|
3836
|
+
if (!fs.existsSync(canonicalEntry)) continue;
|
|
3837
|
+
const sharedStamp = path.join(sharedEntry, ".mover-installed");
|
|
3838
|
+
if (fs.existsSync(sharedStamp)) {
|
|
3839
|
+
fs.rmSync(sharedEntry, { recursive: true, force: true });
|
|
3840
|
+
}
|
|
3841
|
+
}
|
|
3842
|
+
} catch {}
|
|
3843
|
+
}
|
|
3844
|
+
}
|
|
3845
|
+
|
|
3846
|
+
// v4.7.5: Native Gemini hook support via mover-hook-adapter.js
|
|
3847
|
+
if (!skillOpts?.skipHooks) {
|
|
3848
|
+
const hkCount = installHooksForGemini(bundleDir);
|
|
3849
|
+
if (hkCount > 0) steps.push(`${hkCount} hooks`);
|
|
3184
3850
|
}
|
|
3185
3851
|
|
|
3186
3852
|
return steps;
|