@thecat69/cache-ctrl 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +558 -0
- package/cache_ctrl.ts +153 -0
- package/package.json +35 -0
- package/skills/cache-ctrl-caller/SKILL.md +154 -0
- package/skills/cache-ctrl-external/SKILL.md +130 -0
- package/skills/cache-ctrl-local/SKILL.md +213 -0
- package/src/cache/cacheManager.ts +241 -0
- package/src/cache/externalCache.ts +127 -0
- package/src/cache/localCache.ts +9 -0
- package/src/commands/checkFiles.ts +83 -0
- package/src/commands/checkFreshness.ts +123 -0
- package/src/commands/flush.ts +55 -0
- package/src/commands/inspect.ts +184 -0
- package/src/commands/install.ts +13 -0
- package/src/commands/invalidate.ts +53 -0
- package/src/commands/list.ts +83 -0
- package/src/commands/prune.ts +110 -0
- package/src/commands/search.ts +57 -0
- package/src/commands/touch.ts +47 -0
- package/src/commands/write.ts +170 -0
- package/src/files/changeDetector.ts +122 -0
- package/src/files/gitFiles.ts +41 -0
- package/src/files/openCodeInstaller.ts +66 -0
- package/src/http/freshnessChecker.ts +116 -0
- package/src/index.ts +557 -0
- package/src/search/keywordSearch.ts +59 -0
- package/src/types/cache.ts +91 -0
- package/src/types/commands.ts +192 -0
- package/src/types/result.ts +36 -0
- package/src/utils/fileStem.ts +7 -0
- package/src/utils/validate.ts +50 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { findRepoRoot, listCacheFiles, writeCache } from "../cache/cacheManager.js";
|
|
2
|
+
import { resolveTopExternalMatch } from "../cache/externalCache.js";
|
|
3
|
+
import { resolveLocalCachePath } from "../cache/localCache.js";
|
|
4
|
+
import { ErrorCode, type Result } from "../types/result.js";
|
|
5
|
+
import type { TouchArgs, TouchResult } from "../types/commands.js";
|
|
6
|
+
import { validateSubject } from "../utils/validate.js";
|
|
7
|
+
|
|
8
|
+
export async function touchCommand(args: TouchArgs): Promise<Result<TouchResult["value"]>> {
|
|
9
|
+
try {
|
|
10
|
+
const repoRoot = await findRepoRoot(process.cwd());
|
|
11
|
+
const newTimestamp = new Date().toISOString();
|
|
12
|
+
const touched: string[] = [];
|
|
13
|
+
|
|
14
|
+
if (args.agent === "external") {
|
|
15
|
+
let filesToTouch: string[];
|
|
16
|
+
|
|
17
|
+
if (args.subject) {
|
|
18
|
+
const subjectCheck = validateSubject(args.subject);
|
|
19
|
+
if (!subjectCheck.ok) return subjectCheck;
|
|
20
|
+
const matchResult = await resolveTopExternalMatch(repoRoot, args.subject);
|
|
21
|
+
if (!matchResult.ok) return matchResult;
|
|
22
|
+
filesToTouch = [matchResult.value];
|
|
23
|
+
} else {
|
|
24
|
+
const filesResult = await listCacheFiles("external", repoRoot);
|
|
25
|
+
if (!filesResult.ok) return filesResult;
|
|
26
|
+
filesToTouch = filesResult.value;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
for (const filePath of filesToTouch) {
|
|
30
|
+
const writeResult = await writeCache(filePath, { fetched_at: newTimestamp });
|
|
31
|
+
if (!writeResult.ok) return writeResult;
|
|
32
|
+
touched.push(filePath);
|
|
33
|
+
}
|
|
34
|
+
} else {
|
|
35
|
+
// local
|
|
36
|
+
const localPath = resolveLocalCachePath(repoRoot);
|
|
37
|
+
const writeResult = await writeCache(localPath, { timestamp: newTimestamp });
|
|
38
|
+
if (!writeResult.ok) return writeResult;
|
|
39
|
+
touched.push(localPath);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return { ok: true, value: { touched, new_timestamp: newTimestamp } };
|
|
43
|
+
} catch (err) {
|
|
44
|
+
const error = err as Error;
|
|
45
|
+
return { ok: false, error: error.message, code: ErrorCode.UNKNOWN };
|
|
46
|
+
}
|
|
47
|
+
}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import { join } from "node:path";
|
|
2
|
+
import { ExternalCacheFileSchema, LocalCacheFileSchema } from "../types/cache.js";
|
|
3
|
+
import { ErrorCode, type Result } from "../types/result.js";
|
|
4
|
+
import type { WriteArgs, WriteResult } from "../types/commands.js";
|
|
5
|
+
import { writeCache, findRepoRoot, resolveCacheDir, readCache } from "../cache/cacheManager.js";
|
|
6
|
+
import { validateSubject, formatZodError } from "../utils/validate.js";
|
|
7
|
+
import { resolveTrackedFileStats, filterExistingFiles } from "../files/changeDetector.js";
|
|
8
|
+
import type { TrackedFile } from "../types/cache.js";
|
|
9
|
+
import { TrackedFileSchema } from "../types/cache.js";
|
|
10
|
+
|
|
11
|
+
function evictFactsForDeletedPaths(
|
|
12
|
+
facts: Record<string, string[]>,
|
|
13
|
+
survivingFiles: TrackedFile[],
|
|
14
|
+
): Record<string, string[]> {
|
|
15
|
+
const survivingPaths = new Set(survivingFiles.map((f) => f.path));
|
|
16
|
+
return Object.fromEntries(Object.entries(facts).filter(([path]) => survivingPaths.has(path)));
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export async function writeCommand(args: WriteArgs): Promise<Result<WriteResult["value"]>> {
|
|
20
|
+
try {
|
|
21
|
+
const repoRoot = await findRepoRoot(process.cwd());
|
|
22
|
+
|
|
23
|
+
if (args.agent === "external") {
|
|
24
|
+
// subject is required
|
|
25
|
+
if (!args.subject) {
|
|
26
|
+
return { ok: false, error: "subject is required for external agent", code: ErrorCode.INVALID_ARGS };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const subjectValidation = validateSubject(args.subject);
|
|
30
|
+
if (!subjectValidation.ok) return subjectValidation;
|
|
31
|
+
|
|
32
|
+
// if content.subject is set but mismatches the subject param → error
|
|
33
|
+
if (args.content["subject"] !== undefined && args.content["subject"] !== args.subject) {
|
|
34
|
+
return {
|
|
35
|
+
ok: false,
|
|
36
|
+
error: `content.subject "${String(args.content["subject"])}" does not match subject argument "${args.subject}"`,
|
|
37
|
+
code: ErrorCode.VALIDATION_ERROR,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// inject subject into content if absent
|
|
42
|
+
const contentWithSubject = { ...args.content, subject: args.subject };
|
|
43
|
+
|
|
44
|
+
// validate against ExternalCacheFileSchema
|
|
45
|
+
const parsed = ExternalCacheFileSchema.safeParse(contentWithSubject);
|
|
46
|
+
if (!parsed.success) {
|
|
47
|
+
const message = formatZodError(parsed.error);
|
|
48
|
+
return { ok: false, error: `Validation failed: ${message}`, code: ErrorCode.VALIDATION_ERROR };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const cacheDir = resolveCacheDir("external", repoRoot);
|
|
52
|
+
const filePath = join(cacheDir, `${args.subject}.json`);
|
|
53
|
+
const writeResult = await writeCache(filePath, contentWithSubject);
|
|
54
|
+
if (!writeResult.ok) return writeResult;
|
|
55
|
+
return { ok: true, value: { file: filePath } };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// local — auto-inject server-side timestamp; agent must not control this field
|
|
59
|
+
const contentWithTimestamp: Record<string, unknown> = {
|
|
60
|
+
...args.content,
|
|
61
|
+
timestamp: new Date().toISOString(),
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
// Resolve real mtimes for submitted tracked_files if present
|
|
65
|
+
const rawTrackedFiles = contentWithTimestamp["tracked_files"];
|
|
66
|
+
let survivingSubmitted: TrackedFile[] = [];
|
|
67
|
+
let guardedPaths = new Set<string>();
|
|
68
|
+
|
|
69
|
+
if (Array.isArray(rawTrackedFiles)) {
|
|
70
|
+
const validEntries = rawTrackedFiles
|
|
71
|
+
.filter(
|
|
72
|
+
(entry): entry is { path: string } =>
|
|
73
|
+
entry !== null &&
|
|
74
|
+
typeof entry === "object" &&
|
|
75
|
+
typeof (entry as Record<string, unknown>)["path"] === "string",
|
|
76
|
+
)
|
|
77
|
+
.map((entry) => ({ path: entry.path }));
|
|
78
|
+
|
|
79
|
+
guardedPaths = new Set(validEntries.map((e) => e.path));
|
|
80
|
+
|
|
81
|
+
const resolved = await resolveTrackedFileStats(validEntries, repoRoot);
|
|
82
|
+
// Evict submitted entries for files that are missing or path-traversal-rejected
|
|
83
|
+
survivingSubmitted = resolved.filter((f) => f.mtime !== 0);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Guard: submitted facts paths must be a strict subset of submitted tracked_files paths
|
|
87
|
+
const rawSubmittedFacts = contentWithTimestamp["facts"];
|
|
88
|
+
if (
|
|
89
|
+
rawSubmittedFacts !== null &&
|
|
90
|
+
rawSubmittedFacts !== undefined &&
|
|
91
|
+
typeof rawSubmittedFacts === "object" &&
|
|
92
|
+
!Array.isArray(rawSubmittedFacts)
|
|
93
|
+
) {
|
|
94
|
+
const violatingPaths = Object.keys(rawSubmittedFacts as Record<string, string[]>).filter(
|
|
95
|
+
(p) => !guardedPaths.has(p),
|
|
96
|
+
);
|
|
97
|
+
if (violatingPaths.length > 0) {
|
|
98
|
+
return {
|
|
99
|
+
ok: false,
|
|
100
|
+
error: `facts contains paths not in submitted tracked_files: ${violatingPaths.join(", ")}`,
|
|
101
|
+
code: ErrorCode.VALIDATION_ERROR,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Read existing cache to perform per-path merge
|
|
107
|
+
const localCacheDir = resolveCacheDir("local", repoRoot);
|
|
108
|
+
const filePath = join(localCacheDir, "context.json");
|
|
109
|
+
|
|
110
|
+
const readResult = await readCache(filePath);
|
|
111
|
+
let existingContent: Record<string, unknown> = {};
|
|
112
|
+
let existingTrackedFiles: TrackedFile[] = [];
|
|
113
|
+
|
|
114
|
+
if (readResult.ok) {
|
|
115
|
+
existingContent = readResult.value;
|
|
116
|
+
// Validate the on-disk tracked_files against the schema — fall back to [] on corrupt/missing data
|
|
117
|
+
const parseResult = TrackedFileSchema.array().safeParse(existingContent["tracked_files"]);
|
|
118
|
+
existingTrackedFiles = parseResult.success ? parseResult.data : [];
|
|
119
|
+
} else if (readResult.code !== ErrorCode.FILE_NOT_FOUND) {
|
|
120
|
+
return { ok: false, error: readResult.error, code: readResult.code };
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Keep existing entries whose paths are NOT being replaced by the submitted set
|
|
124
|
+
const submittedPaths = new Set(survivingSubmitted.map((f) => f.path));
|
|
125
|
+
const existingNotSubmitted = existingTrackedFiles.filter((f) => !submittedPaths.has(f.path));
|
|
126
|
+
|
|
127
|
+
// Evict deleted files from the preserved existing entries
|
|
128
|
+
const survivingExisting = await filterExistingFiles(existingNotSubmitted, repoRoot);
|
|
129
|
+
|
|
130
|
+
const mergedTrackedFiles = [...survivingExisting, ...survivingSubmitted];
|
|
131
|
+
|
|
132
|
+
// Per-path merge for facts (mirrors tracked_files merge)
|
|
133
|
+
const existingFactsRaw = existingContent["facts"];
|
|
134
|
+
const submittedFactsRaw = contentWithTimestamp["facts"];
|
|
135
|
+
|
|
136
|
+
const existingFacts =
|
|
137
|
+
typeof existingFactsRaw === "object" && existingFactsRaw !== null && !Array.isArray(existingFactsRaw)
|
|
138
|
+
? (existingFactsRaw as Record<string, string[]>)
|
|
139
|
+
: {};
|
|
140
|
+
const submittedFacts =
|
|
141
|
+
typeof submittedFactsRaw === "object" && submittedFactsRaw !== null && !Array.isArray(submittedFactsRaw)
|
|
142
|
+
? (submittedFactsRaw as Record<string, string[]>)
|
|
143
|
+
: {};
|
|
144
|
+
|
|
145
|
+
const rawMergedFacts = { ...existingFacts, ...submittedFacts };
|
|
146
|
+
const mergedFacts = evictFactsForDeletedPaths(rawMergedFacts, mergedTrackedFiles);
|
|
147
|
+
|
|
148
|
+
// Merge top-level fields: existing base → then submitted content (submitted wins)
|
|
149
|
+
const processedContent: Record<string, unknown> = {
|
|
150
|
+
...existingContent,
|
|
151
|
+
...contentWithTimestamp,
|
|
152
|
+
tracked_files: mergedTrackedFiles,
|
|
153
|
+
facts: mergedFacts,
|
|
154
|
+
};
|
|
155
|
+
|
|
156
|
+
const parsed = LocalCacheFileSchema.safeParse(processedContent);
|
|
157
|
+
if (!parsed.success) {
|
|
158
|
+
const message = formatZodError(parsed.error);
|
|
159
|
+
return { ok: false, error: `Validation failed: ${message}`, code: ErrorCode.VALIDATION_ERROR };
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// processedContent is used (not parsed.data) to preserve loose fields not known to the schema — intentional merge semantics
|
|
163
|
+
const writeResult = await writeCache(filePath, processedContent, "replace");
|
|
164
|
+
if (!writeResult.ok) return writeResult;
|
|
165
|
+
return { ok: true, value: { file: filePath } };
|
|
166
|
+
} catch (err) {
|
|
167
|
+
const error = err as Error;
|
|
168
|
+
return { ok: false, error: error.message, code: ErrorCode.UNKNOWN };
|
|
169
|
+
}
|
|
170
|
+
}
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { readFile, lstat } from "node:fs/promises";
|
|
2
|
+
import { createHash } from "node:crypto";
|
|
3
|
+
import { resolve, isAbsolute } from "node:path";
|
|
4
|
+
import type { TrackedFile } from "../types/cache.js";
|
|
5
|
+
|
|
6
|
+
export interface FileComparisonResult {
|
|
7
|
+
path: string;
|
|
8
|
+
status: "changed" | "unchanged" | "missing";
|
|
9
|
+
reason?: "mtime" | "hash" | "missing";
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export async function compareTrackedFile(file: TrackedFile, repoRoot: string): Promise<FileComparisonResult> {
|
|
13
|
+
const absolutePath = resolveTrackedFilePath(file.path, repoRoot);
|
|
14
|
+
|
|
15
|
+
if (absolutePath === null) {
|
|
16
|
+
// Path traversal attempt — treat as missing
|
|
17
|
+
return { path: file.path, status: "missing", reason: "missing" };
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
try {
|
|
21
|
+
// lstat: mtime reflects the symlink node, not the target; hash check covers content drift when hash is stored
|
|
22
|
+
const fileStat = await lstat(absolutePath);
|
|
23
|
+
const currentMtime = fileStat.mtimeMs;
|
|
24
|
+
|
|
25
|
+
if (currentMtime === file.mtime) {
|
|
26
|
+
return { path: file.path, status: "unchanged" };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// mtime differs
|
|
30
|
+
if (file.hash) {
|
|
31
|
+
const currentHash = await computeFileHash(absolutePath);
|
|
32
|
+
if (currentHash === file.hash) {
|
|
33
|
+
// Hash matches despite mtime change — just a touch
|
|
34
|
+
return { path: file.path, status: "unchanged" };
|
|
35
|
+
}
|
|
36
|
+
return { path: file.path, status: "changed", reason: "hash" };
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// No hash stored — mtime change alone is sufficient
|
|
40
|
+
return { path: file.path, status: "changed", reason: "mtime" };
|
|
41
|
+
} catch (err) {
|
|
42
|
+
if (err instanceof Error && "code" in err && (err as NodeJS.ErrnoException).code === "ENOENT") {
|
|
43
|
+
return { path: file.path, status: "missing", reason: "missing" };
|
|
44
|
+
}
|
|
45
|
+
// Re-throw unexpected errors
|
|
46
|
+
throw err;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function computeFileHash(filePath: string): Promise<string> {
|
|
51
|
+
const content = await readFile(filePath);
|
|
52
|
+
return createHash("sha256").update(content).digest("hex");
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Resolves a tracked file path against the repo root.
|
|
57
|
+
* Returns null if the resolved path escapes the repo root (path traversal guard).
|
|
58
|
+
*/
|
|
59
|
+
export function resolveTrackedFilePath(inputPath: string, repoRoot: string): string | null {
|
|
60
|
+
const resolved = isAbsolute(inputPath) ? resolve(inputPath) : resolve(repoRoot, inputPath);
|
|
61
|
+
// Normalize root to ensure trailing slash for prefix matching
|
|
62
|
+
const normalizedRoot = repoRoot.endsWith("/") ? repoRoot : repoRoot + "/";
|
|
63
|
+
if (!resolved.startsWith(normalizedRoot) && resolved !== repoRoot) {
|
|
64
|
+
return null; // path traversal rejected
|
|
65
|
+
}
|
|
66
|
+
return resolved;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Checks existence of already-tracked files via lstat(). Used during write to evict stale entries
|
|
71
|
+
* (deleted files). Does NOT recompute mtime or hash — only confirms the file is still present on disk.
|
|
72
|
+
*/
|
|
73
|
+
export async function filterExistingFiles(files: TrackedFile[], repoRoot: string): Promise<TrackedFile[]> {
|
|
74
|
+
const results = await Promise.all(
|
|
75
|
+
files.map(async (file): Promise<TrackedFile | null> => {
|
|
76
|
+
const absolutePath = resolveTrackedFilePath(file.path, repoRoot);
|
|
77
|
+
if (absolutePath === null) {
|
|
78
|
+
// Path traversal rejected — evict
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
try {
|
|
82
|
+
await lstat(absolutePath);
|
|
83
|
+
return file;
|
|
84
|
+
} catch (err) {
|
|
85
|
+
if (err instanceof Error && "code" in err && (err as NodeJS.ErrnoException).code === "ENOENT") {
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
88
|
+
throw err;
|
|
89
|
+
}
|
|
90
|
+
}),
|
|
91
|
+
);
|
|
92
|
+
return results.filter((entry): entry is TrackedFile => entry !== null);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Resolves filesystem stats (mtime and hash) for a list of path-only tracked file entries.
|
|
97
|
+
* For each entry: if the path is valid and the file exists, computes mtime via lstat().mtimeMs
|
|
98
|
+
* and hash via SHA-256 in parallel, returning { path, mtime, hash }.
|
|
99
|
+
* Falls back to { path, mtime: 0 } (no hash) on path traversal rejection or missing file.
|
|
100
|
+
* Never throws — always returns gracefully.
|
|
101
|
+
*/
|
|
102
|
+
export async function resolveTrackedFileStats(
|
|
103
|
+
files: Array<{ path: string }>,
|
|
104
|
+
repoRoot: string,
|
|
105
|
+
): Promise<TrackedFile[]> {
|
|
106
|
+
return Promise.all(
|
|
107
|
+
files.map(async (file) => {
|
|
108
|
+
const absolutePath = resolveTrackedFilePath(file.path, repoRoot);
|
|
109
|
+
if (absolutePath === null) {
|
|
110
|
+
return { path: file.path, mtime: 0 };
|
|
111
|
+
}
|
|
112
|
+
try {
|
|
113
|
+
// lstat: mtime reflects the symlink node; hash is computed from the target content via readFile
|
|
114
|
+
const [fileStat, hash] = await Promise.all([lstat(absolutePath), computeFileHash(absolutePath)]);
|
|
115
|
+
return { path: file.path, mtime: fileStat.mtimeMs, hash };
|
|
116
|
+
} catch {
|
|
117
|
+
// Always return gracefully per the "never throws" contract — do not propagate filesystem errors
|
|
118
|
+
return { path: file.path, mtime: 0 };
|
|
119
|
+
}
|
|
120
|
+
}),
|
|
121
|
+
);
|
|
122
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
|
|
4
|
+
const execFileAsync = promisify(execFile);
|
|
5
|
+
|
|
6
|
+
function parseGitOutput(stdout: string): string[] {
|
|
7
|
+
return stdout
|
|
8
|
+
.split("\n")
|
|
9
|
+
.map((l) => l.trim())
|
|
10
|
+
.filter((l) => l.length > 0);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export async function getGitTrackedFiles(repoRoot: string): Promise<string[]> {
|
|
14
|
+
try {
|
|
15
|
+
const result = await execFileAsync("git", ["ls-files"], { cwd: repoRoot, maxBuffer: 10 * 1024 * 1024 });
|
|
16
|
+
return parseGitOutput(result.stdout);
|
|
17
|
+
} catch {
|
|
18
|
+
return [];
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export async function getGitDeletedFiles(repoRoot: string): Promise<string[]> {
|
|
23
|
+
try {
|
|
24
|
+
const result = await execFileAsync("git", ["ls-files", "--deleted"], { cwd: repoRoot, maxBuffer: 10 * 1024 * 1024 });
|
|
25
|
+
return parseGitOutput(result.stdout);
|
|
26
|
+
} catch {
|
|
27
|
+
return [];
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export async function getUntrackedNonIgnoredFiles(repoRoot: string): Promise<string[]> {
|
|
32
|
+
try {
|
|
33
|
+
const result = await execFileAsync("git", ["ls-files", "--others", "--exclude-standard"], {
|
|
34
|
+
cwd: repoRoot,
|
|
35
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
36
|
+
});
|
|
37
|
+
return parseGitOutput(result.stdout).filter((p) => !p.endsWith("/"));
|
|
38
|
+
} catch {
|
|
39
|
+
return [];
|
|
40
|
+
}
|
|
41
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { copyFile, mkdir, writeFile } from "node:fs/promises";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
|
|
5
|
+
import type { InstallResult } from "../types/commands.js";
|
|
6
|
+
import { ErrorCode, type Result } from "../types/result.js";
|
|
7
|
+
|
|
8
|
+
const SKILL_NAMES = ["cache-ctrl-external", "cache-ctrl-local", "cache-ctrl-caller"] as const;
|
|
9
|
+
|
|
10
|
+
export function resolveOpenCodeConfigDir(overrideDir?: string): string {
|
|
11
|
+
if (overrideDir !== undefined) {
|
|
12
|
+
return overrideDir;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
if (process.platform === "win32") {
|
|
16
|
+
const appData = process.env.APPDATA ?? path.join(os.homedir(), "AppData", "Roaming");
|
|
17
|
+
return path.join(appData, "opencode");
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const xdgConfigHome = process.env.XDG_CONFIG_HOME ?? path.join(os.homedir(), ".config");
|
|
21
|
+
return path.join(xdgConfigHome, "opencode");
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function buildToolWrapperContent(packageRoot: string): string {
|
|
25
|
+
const normalizedPackageRoot = packageRoot.replace(/\\/g, "/");
|
|
26
|
+
|
|
27
|
+
return [
|
|
28
|
+
"// Auto-generated by cache-ctrl install — do not edit manually",
|
|
29
|
+
"// Re-run 'cache-ctrl install' after 'npm update -g @thecat69/cache-ctrl'",
|
|
30
|
+
`export * from \"${normalizedPackageRoot}/cache_ctrl.ts\";`,
|
|
31
|
+
].join("\n");
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export async function installOpenCodeIntegration(configDir: string, packageRoot: string): Promise<Result<InstallResult>> {
|
|
35
|
+
try {
|
|
36
|
+
const toolDir = path.join(configDir, "tools");
|
|
37
|
+
const toolPath = path.join(toolDir, "cache_ctrl.ts");
|
|
38
|
+
|
|
39
|
+
await mkdir(toolDir, { recursive: true, mode: 0o755 });
|
|
40
|
+
await writeFile(toolPath, buildToolWrapperContent(packageRoot), { encoding: "utf-8", mode: 0o644 });
|
|
41
|
+
|
|
42
|
+
const skillPaths: string[] = [];
|
|
43
|
+
|
|
44
|
+
for (const skillName of SKILL_NAMES) {
|
|
45
|
+
const targetSkillDir = path.join(configDir, "skills", skillName);
|
|
46
|
+
const sourceSkillPath = path.join(packageRoot, "skills", skillName, "SKILL.md");
|
|
47
|
+
const targetSkillPath = path.join(targetSkillDir, "SKILL.md");
|
|
48
|
+
|
|
49
|
+
await mkdir(targetSkillDir, { recursive: true, mode: 0o755 });
|
|
50
|
+
await copyFile(sourceSkillPath, targetSkillPath);
|
|
51
|
+
skillPaths.push(targetSkillPath);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return {
|
|
55
|
+
ok: true,
|
|
56
|
+
value: {
|
|
57
|
+
toolPath,
|
|
58
|
+
skillPaths,
|
|
59
|
+
configDir,
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
} catch (err) {
|
|
63
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
64
|
+
return { ok: false, error: message, code: ErrorCode.FILE_WRITE_ERROR };
|
|
65
|
+
}
|
|
66
|
+
}
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
export interface FreshnessCheckInput {
|
|
2
|
+
url: string;
|
|
3
|
+
etag?: string;
|
|
4
|
+
last_modified?: string;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export interface FreshnessCheckOutput {
|
|
8
|
+
url: string;
|
|
9
|
+
status: "fresh" | "stale" | "error";
|
|
10
|
+
http_status?: number;
|
|
11
|
+
etag?: string;
|
|
12
|
+
last_modified?: string;
|
|
13
|
+
error?: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* RFC-1918 / loopback / link-local / ULA / mapped-IPv6 IP pattern.
|
|
18
|
+
* Blocks raw IP literals only — does NOT do DNS resolution.
|
|
19
|
+
*
|
|
20
|
+
* Covers:
|
|
21
|
+
* - 127.x loopback IPv4
|
|
22
|
+
* - ::1 loopback IPv6 (URL.hostname returns "[::1]")
|
|
23
|
+
* - localhost loopback hostname
|
|
24
|
+
* - 10.x RFC-1918 class A
|
|
25
|
+
* - 169.254.x link-local IPv4
|
|
26
|
+
* - 172.16–31.x RFC-1918 class B
|
|
27
|
+
* - 192.168.x RFC-1918 class C
|
|
28
|
+
* - 0.0.0.0 unspecified IPv4
|
|
29
|
+
* - fc00::/7 RFC-4193 unique-local IPv6 (ULA — fc or fd prefix)
|
|
30
|
+
* - ::ffff: IPv4-mapped IPv6
|
|
31
|
+
*/
|
|
32
|
+
const PRIVATE_IP_PATTERN =
|
|
33
|
+
/^(127\.|localhost$|10\.|169\.254\.|172\.(1[6-9]|2\d|3[01])\.|192\.168\.|0\.0\.0\.0$|\[::1\]$|::1$|::ffff:|f[cd][0-9a-f]{0,2}:|\[f[cd][0-9a-f]{0,2}:)/i;
|
|
34
|
+
|
|
35
|
+
export function isAllowedUrl(url: string): { allowed: boolean; reason?: string } {
|
|
36
|
+
try {
|
|
37
|
+
const parsed = new URL(url);
|
|
38
|
+
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
|
39
|
+
return { allowed: false, reason: `Disallowed URL scheme — only http and https are permitted: ${url}` };
|
|
40
|
+
}
|
|
41
|
+
if (PRIVATE_IP_PATTERN.test(parsed.hostname)) {
|
|
42
|
+
return { allowed: false, reason: `Requests to private/loopback addresses are not permitted: ${url}` };
|
|
43
|
+
}
|
|
44
|
+
return { allowed: true };
|
|
45
|
+
} catch {
|
|
46
|
+
return { allowed: false, reason: `Invalid URL: ${url}` };
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function checkFreshness(input: FreshnessCheckInput): Promise<FreshnessCheckOutput> {
|
|
51
|
+
const allowCheck = isAllowedUrl(input.url);
|
|
52
|
+
if (!allowCheck.allowed) {
|
|
53
|
+
const reason = allowCheck.reason ?? `Freshness check blocked for URL: ${input.url}`;
|
|
54
|
+
return {
|
|
55
|
+
url: input.url,
|
|
56
|
+
status: "error",
|
|
57
|
+
error: reason,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const controller = new AbortController();
|
|
62
|
+
const timeoutId = setTimeout(() => controller.abort(), 10_000);
|
|
63
|
+
|
|
64
|
+
try {
|
|
65
|
+
const headers: Record<string, string> = {};
|
|
66
|
+
if (input.etag) {
|
|
67
|
+
headers["If-None-Match"] = input.etag;
|
|
68
|
+
}
|
|
69
|
+
if (input.last_modified) {
|
|
70
|
+
headers["If-Modified-Since"] = input.last_modified;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const response = await fetch(input.url, {
|
|
74
|
+
method: "HEAD",
|
|
75
|
+
headers,
|
|
76
|
+
signal: controller.signal,
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
if (response.status === 304) {
|
|
80
|
+
return {
|
|
81
|
+
url: input.url,
|
|
82
|
+
status: "fresh",
|
|
83
|
+
http_status: 304,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (response.status === 200) {
|
|
88
|
+
const etag = response.headers.get("etag") ?? undefined;
|
|
89
|
+
const lastModified = response.headers.get("last-modified") ?? undefined;
|
|
90
|
+
return {
|
|
91
|
+
url: input.url,
|
|
92
|
+
status: "stale",
|
|
93
|
+
http_status: 200,
|
|
94
|
+
...(etag !== undefined ? { etag } : {}),
|
|
95
|
+
...(lastModified !== undefined ? { last_modified: lastModified } : {}),
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// 4xx/5xx
|
|
100
|
+
return {
|
|
101
|
+
url: input.url,
|
|
102
|
+
status: "error",
|
|
103
|
+
http_status: response.status,
|
|
104
|
+
error: `HTTP ${response.status}: ${response.statusText}`,
|
|
105
|
+
};
|
|
106
|
+
} catch (err) {
|
|
107
|
+
const error = err as Error;
|
|
108
|
+
return {
|
|
109
|
+
url: input.url,
|
|
110
|
+
status: "error",
|
|
111
|
+
error: error.message,
|
|
112
|
+
};
|
|
113
|
+
} finally {
|
|
114
|
+
clearTimeout(timeoutId);
|
|
115
|
+
}
|
|
116
|
+
}
|