@thecat69/cache-ctrl 1.1.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,16 +5,9 @@ description: How to use cache-ctrl to detect file changes and manage the local c
5
5
 
6
6
  # cache-ctrl — Local Cache Usage
7
7
 
8
- Manage `.ai/local-context-gatherer_cache/context.json` to avoid redundant full-repo scans.
9
- Two tiers of access — use the best one available.
10
-
11
- > Availability Detection: see `cache-ctrl-caller`.
12
-
13
- ---
14
-
15
8
  ## Fact-Writing Rules
16
- Per-file `facts` entries are no longer flat string arrays. Each path now maps to a
17
- **`FileFacts` object**:
9
+
10
+ Per-file entries use the `FileFacts` object shape:
18
11
 
19
12
  ```json
20
13
  {
@@ -25,135 +18,93 @@ Per-file `facts` entries are no longer flat string arrays. Each path now maps to
25
18
  }
26
19
  ```
27
20
 
28
- Required and recommended fields:
29
-
30
- - **`summary` is mandatory** when writing a file entry. Keep it to one sentence.
31
- - **`role` is mandatory** when writing a file entry. Must be one of:
32
- - `entry-point`
33
- - `interface`
34
- - `implementation`
35
- - `test`
36
- - `config`
37
- - **`importance` is optional but strongly recommended**:
38
- - `1` = core module
39
- - `2` = supporting module
40
- - `3` = peripheral/config module
41
- - **`facts` is optional** and capped at **10 items**, each **≤ 300 chars**.
21
+ Fields:
22
+ - **`summary`** — mandatory. One sentence.
23
+ - **`role`** mandatory. One of: `entry-point`, `interface`, `implementation`, `test`, `config`.
24
+ - **`importance`** strongly recommended. `1` = core, `2` = supporting, `3` = peripheral.
25
+ - **`facts`** — optional. Max 10 items, each ≤ 300 chars.
42
26
 
43
27
  Content quality rules:
44
-
45
- - **Never write** raw import lines, function bodies, code snippets, or verbatim text from the file.
28
+ - **Never write** raw import lines, code snippets, or verbatim file content.
46
29
  - **Do write** concise architectural observations: purpose, key exports, constraints, dependencies, notable patterns.
30
+ - Write facts as **enumerable observations** — one entry per distinct property, up to the 10-item limit.
47
31
 
48
- **Good `facts[]` item** ✅:
32
+ Good example ✅:
49
33
  > `"Delegates local writes to writeLocalCommand and preserves unrelated paths through per-path merge"`
50
34
 
51
- **Bad `facts[]` item** ❌:
52
- > `"import { ExternalCacheFileSchema, LocalCacheFileSchema } from '../types/cache.js'; import { ErrorCode, Result } from '../types/result.js'"` ← raw file content
35
+ Bad example ❌:
36
+ > `"import { ExternalCacheFileSchema } from '../types/cache.js'"` ← raw file content
53
37
 
54
- **Global facts** are for cross-cutting structural observations only (e.g. CLI entry pattern, installation steps). Max 20, each ≤ 300 chars. Only update global_facts when you re-read a structural file (AGENTS.md, install.sh, package.json, *.toml, opencode.json).
38
+ **Global facts** cross-cutting structural observations only (CLI entry pattern, installation steps, etc.). Max 20, each ≤ 300 chars. Only update `global_facts` when re-reading a structural file: `AGENTS.md`, `install.sh`, `opencode.json`, `package.json`, `*.toml`.
55
39
 
56
- ---
40
+ ## Scan Workflow
57
41
 
58
- ## Mandatory: Write Before Return
59
- **Every invocation that reads any file MUST call `cache_ctrl_write_local` before returning — no exceptions, no edge cases.**
42
+ 1. Call `cache_ctrl_check_files` to identify changed and new files.
43
+ 2. Read only the changed/new files (skip unchanged ones).
44
+ 3. Extract `FileFacts` per file (follow Fact-Writing Rules above).
45
+ 4. Call `cache_ctrl_write_local` — **mandatory** (see Write-Before-Return Rule below for the skip exception).
46
+ 5. Return your summary.
60
47
 
61
- Sequential checklist (do not skip any step):
48
+ > **⚠ Cache is non-exhaustive:** `status: "unchanged"` only confirms previously-tracked files are stable — it does not mean the file set is complete. Always check `new_files` and `deleted_git_files` in the response.
62
49
 
63
- 1. Call `cache_ctrl_check_files` — identify changed/new files
64
- 2. Read only the changed/new files (skip unchanged ones)
65
- 3. Extract concise facts per file (follow Fact-Writing Rules above)
66
- 4. **Call `cache_ctrl_write_local` — MANDATORY. NO EXCEPTIONS.** (even if only 1 file changed, even if only global_facts changed, even if you believe the facts are identical to what is cached)
67
- 5. Return your summary
50
+ ## Write-Before-Return Rule
68
51
 
69
- > **⛔ Write-or-fail rule**: If you read any file in steps 2–3, you MUST call `cache_ctrl_write_local` in step 4. Returning without writing after reading files is a critical failure — the cache will be stale and the orchestrator will detect the missing write and re-invoke you. Even if zero files were read, you must still consult the decision table below before deciding to skip the write.
52
+ **Every invocation that reads any file MUST call `cache_ctrl_write_local` before returning.**
70
53
 
71
- **The only time you may skip `cache_ctrl_write_local` is when ALL of the following are true simultaneously:**
54
+ The only time you may skip the write is when ALL of the following are true:
72
55
 
73
56
  | Condition | Required value |
74
57
  |---|---|
75
- | `changed_files` from `cache_ctrl_check_files` | empty `[]` |
76
- | `new_files` from `cache_ctrl_check_files` | empty `[]` |
77
- | No files were force-requested by the caller | true |
58
+ | `changed_files` from `check_files` | `[]` |
59
+ | `new_files` from `check_files` | `[]` |
60
+ | No files were force-requested by caller | true |
78
61
  | Cache already exists and is non-empty | true |
79
62
  | This invocation was NOT triggered by a cache invalidation | true |
80
63
 
81
- If any one of these conditions is not met, you **must** write.
82
-
83
- ---
84
-
85
- ## Startup Workflow
86
- ### 1. Check if tracked files changed
87
-
88
- **Tier 1:** Call `cache_ctrl_check_files` (no parameters).
89
- **Tier 2:** `cache-ctrl check-files`
90
-
91
- Result interpretation (Tier 1 & 2):
92
- - `status: "unchanged"` → tracked files are content-stable; skip re-scan and return cached context.
93
- - `status: "changed"` → at least one tracked file changed; proceed to **delta scan**.
94
- - `status: "unchanged"` with empty `tracked_files` → cold start, proceed to scan.
95
-
96
- The response also reports:
97
- - `new_files` — untracked non-ignored files absent from cache, plus git-tracked files absent from cache when the cache is non-empty (blank-slate caches skip git-tracked files to avoid false positives on cold start)
98
- - `deleted_git_files` — git-tracked files deleted from the working tree (reported by `git ls-files --deleted`)
99
-
100
- > **⚠ Cache is non-exhaustive**: `status: "unchanged"` only confirms that previously-tracked files are content-stable — it does not mean the file set is complete. Always check `new_files` and `deleted_git_files` in the response; if either is non-empty, include those paths in the next write to keep the cache up to date.
64
+ If any condition is not met, you **must** write.
101
65
 
102
- ### 2. Invalidate before writing (optional)
103
- > Do this only if cache is really outdated and a full rescan is needed. Otherwise just proceed with next step (writing).
66
+ > **⛔ Write-or-fail:** Returning without writing after reading files is a critical failure — the cache will be stale. Even if you believe facts are unchanged, if you read a file, you write.
104
67
 
105
- **Tier 1:** Call `cache_ctrl_invalidate` with `agent: "local"`.
106
- **Tier 2:** `cache-ctrl invalidate local`
68
+ ## `cache_ctrl_write_local` Reference
107
69
 
108
- ### 3. Write cache after scanning
109
- **Always use the write tool/command — never write the file directly.** Direct writes bypass schema validation and can silently corrupt the cache format.
70
+ Always use `cache_ctrl_write_local` — never write cache files directly.
110
71
 
111
- > **Write is per-path merge**: Submitted `tracked_files` entries replace existing entries for the same paths. Paths not in the submission are preserved. Entries for files deleted from disk are evicted automatically (no agent action needed).
112
-
113
- #### Input fields (top-level args)
72
+ #### Input fields
114
73
 
115
74
  | Field | Type | Required | Notes |
116
75
  |---|---|---|---|
117
76
  | `topic` | `string` | ✅ | Human description of what was scanned |
118
77
  | `description` | `string` | ✅ | One-liner for keyword search |
119
- | `tracked_files` | `Array<{ path: string }>` | ✅ | Paths to track; `mtime` and `hash` are auto-computed by the tool |
120
- | `global_facts` | `string[]` | optional | Repo-level facts; last-write-wins; see trigger rule below |
121
- | `facts` | `Record<string, FileFacts>` | optional | Per-file structured facts keyed by path; per-path merge |
122
- | `cache_miss_reason` | `string` | optional | Why the previous cache was discarded |
123
-
124
- > **Cold start vs incremental**: On first run (no existing cache), submit all relevant files. On subsequent runs, submit only new and changed files — the tool merges them in.
78
+ | `tracked_files` | `Array<{ path: string }>` | ✅ | `mtime` and `hash` are auto-computed |
79
+ | `facts` | `Record<string, FileFacts>` | optional | Per-file structured facts; per-path merge |
80
+ | `global_facts` | `string[]` | optional | Last-write-wins; see trigger rule above |
81
+ | `cache_miss_reason` | `string` | optional | Why prior cache was discarded |
125
82
 
126
- > **Auto-set by the tool — do not include**: `timestamp` (current UTC), `mtime` (filesystem `lstat()`), and `hash` (SHA-256) per `tracked_files` entry.
83
+ > **Auto-set by the tool — do not include:** `timestamp`, `mtime`, `hash`.
84
+ > **Write is per-path merge:** Submitted paths replace existing entries for those paths. Other paths are preserved. Deleted-file entries are evicted automatically.
127
85
 
128
- ### Scope rule for `facts`
129
- Submit `facts` ONLY for files you actually read in this session (i.e., files present in
130
- your submitted `tracked_files`). Never reconstruct or re-submit facts for unchanged files —
131
- the tool preserves them automatically via per-path merge.
86
+ #### Scope rule for `facts`
132
87
 
133
- Submitting a facts key for a path absent from submitted `tracked_files` is a
134
- VALIDATION_ERROR and the entire write is rejected.
88
+ Submit `facts` ONLY for files you actually read in this session (files present in `tracked_files`). Never reconstruct or re-submit facts for unchanged files — the tool preserves them automatically.
135
89
 
136
- ### Fact completeness
137
- When a file appears in `changed_files` or `new_files`, read the **whole file** before writing
138
- facts — not just the diff. A 2-line change does not support a complete re-description of the
139
- file, and submitting partial facts for a re-read path **permanently replaces** whatever was
140
- cached before.
90
+ Submitting a `facts` key for a path absent from `tracked_files` is a `VALIDATION_ERROR` and the entire write is rejected.
141
91
 
142
- Write facts as **enumerable observations** — one entry per notable characteristic (purpose,
143
- structure, key dependencies, patterns, constraints, entry points). Do not bundle multiple
144
- distinct properties into a single string. A file should have as many fact entries as it has
145
- distinct notable properties, up to the 10-item limit.
92
+ #### Fact completeness
146
93
 
147
- Each per-file `facts` entry MUST include `summary` + `role`, should include `importance`,
148
- and may include an optional `facts[]` list.
94
+ When a file appears in `changed_files` or `new_files`, read the **whole file** before writing facts — not just the diff. Submitting partial facts for a re-read path **permanently replaces** whatever was cached.
149
95
 
150
- #### `cache_ctrl_write_local` facts shape example (`FileFacts`)
96
+ #### Example
151
97
 
152
98
  ```json
153
99
  {
100
+ "topic": "src/commands scan",
101
+ "description": "Scan of src/commands after write refactor",
102
+ "tracked_files": [
103
+ { "path": "src/commands/writeLocal.ts" }
104
+ ],
154
105
  "facts": {
155
106
  "src/commands/writeLocal.ts": {
156
- "summary": "Thin router dispatching write calls to writeLocal or writeExternal based on agent type.",
107
+ "summary": "Thin router dispatching write calls based on agent type.",
157
108
  "role": "implementation",
158
109
  "importance": 2,
159
110
  "facts": [
@@ -165,58 +116,16 @@ and may include an optional `facts[]` list.
165
116
  }
166
117
  ```
167
118
 
168
- ### When to submit `global_facts`
169
- Submit `global_facts` only when you re-read at least one structural file in this session:
170
- AGENTS.md, install.sh, opencode.json, package.json, *.toml config files.
171
-
172
- If none of those are in `changed_files` or `new_files`, omit `global_facts` from the write.
173
- The existing value is preserved automatically.
174
-
175
- ### Eviction
176
- Facts for files deleted from disk are evicted automatically on the next write — no agent
177
- action needed. `global_facts` is never evicted.
178
-
179
- #### Tier 1 — `cache_ctrl_write_local`
180
-
181
- ```json
182
- {
183
- "topic": "neovim plugin configuration scan",
184
- "description": "Full scan of lua/plugins tree for neovim lazy.nvim setup",
185
- "tracked_files": [
186
- { "path": "lua/plugins/ui/bufferline.lua" },
187
- { "path": "lua/plugins/lsp/nvim-lspconfig.lua" }
188
- ]
189
- }
190
- ```
191
-
192
- #### Tier 2 — CLI
119
+ ## Eviction
193
120
 
194
- `cache-ctrl write-local --data '<json>'` pass the same top-level fields as the JSON value.
121
+ Facts for files deleted from disk are evicted automatically on the next write no agent action needed. `global_facts` is never evicted.
195
122
 
196
- ### 4. Confirm cache (optional)
197
- **Tier 1:** Call `cache_ctrl_list` with `agent: "local"` to confirm the entry was written.
198
- **Tier 2:** `cache-ctrl list --agent local`
123
+ ## Tool Reference
199
124
 
200
- Note: local entries show `is_stale: true` only when `cache_ctrl_check_files` detects actual changes.
201
-
202
- ---
203
-
204
- ## Tool / Command Reference
205
- | Operation | Tier 1 (built-in) | Tier 2 (CLI) |
206
- |---|---|---|
207
- | Detect file changes | `cache_ctrl_check_files` | `cache-ctrl check-files` |
208
- | Invalidate cache | `cache_ctrl_invalidate` | `cache-ctrl invalidate local` |
209
- | Confirm written | `cache_ctrl_list` | `cache-ctrl list --agent local` |
210
- | Read facts (filtered) | `cache_ctrl_inspect` with `filter`, `folder`, or `searchFacts` | `cache-ctrl inspect local context --filter <kw>[,<kw>...]` / `--folder <path>` / `--search-facts <kw>[,<kw>...]` |
211
- | Read all facts (rare) | `cache_ctrl_inspect` (no filter) | `cache-ctrl inspect local context` |
212
- | Write cache | `cache_ctrl_write_local` | `cache-ctrl write-local --data '<json>'` |
213
-
214
- > For `inspect` filter targeting options, see `cache-ctrl-caller`.
215
-
216
- > All `cache_ctrl_*` tools return `server_time`; see `cache-ctrl-caller` for freshness-decision usage.
217
-
218
- ## Cache Location
219
-
220
- `.ai/local-context-gatherer_cache/context.json` — single file, no per-subject splitting.
221
-
222
- No time-based TTL for Tier 1/2. Freshness determined by `cache_ctrl_check_files`.
125
+ | Operation | Tool |
126
+ |---|---|
127
+ | Detect file changes | `cache_ctrl_check_files` |
128
+ | Invalidate cache | `cache_ctrl_invalidate` (agent: "local") |
129
+ | Write cache | `cache_ctrl_write_local` |
130
+ | Read facts (filtered) | `cache_ctrl_inspect` (agent: "local", filter / folder / search_facts) |
131
+ | Confirm written | `cache_ctrl_list` (agent: "local") |
@@ -90,11 +90,8 @@ function buildInLinks(graph: DependencyGraph, nodes: string[]): Map<string, stri
90
90
 
91
91
  for (const [sourceNode, graphNode] of graph.entries()) {
92
92
  for (const targetNode of graphNode.deps) {
93
- if (!inLinks.has(targetNode)) {
94
- continue;
95
- }
96
93
  const targetInLinks = inLinks.get(targetNode);
97
- if (targetInLinks !== undefined) {
94
+ if (targetInLinks) {
98
95
  targetInLinks.push(sourceNode);
99
96
  }
100
97
  }
@@ -1,5 +1,4 @@
1
- import { readFile, writeFile, rename, stat, unlink, readdir, mkdir } from "node:fs/promises";
2
- import { open } from "node:fs/promises";
1
+ import { readFile, writeFile, rename, stat, unlink, readdir, mkdir, open } from "node:fs/promises";
3
2
  import { join, dirname } from "node:path";
4
3
  import { randomBytes } from "node:crypto";
5
4
  import type { AgentType, CacheEntry, ExternalCacheFile, LocalCacheFile } from "../types/cache.js";
@@ -1,7 +1,8 @@
1
- import type { ExternalCacheFile, HeaderMeta } from "../types/cache.js";
1
+ import type { ExternalCacheFile } from "../types/cache.js";
2
2
  import { ErrorCode, type Result } from "../types/result.js";
3
- import { loadExternalCacheEntries } from "./cacheManager.js";
3
+ import { listCacheFiles, loadExternalCacheEntries, writeCache } from "./cacheManager.js";
4
4
  import { scoreEntry } from "../search/keywordSearch.js";
5
+ import { validateSubject } from "../utils/validate.js";
5
6
 
6
7
  const DEFAULT_MAX_AGE_MS = 24 * 60 * 60 * 1000;
7
8
 
@@ -30,26 +31,6 @@ export function isExternalStale(entry: ExternalCacheFile, maxAgeMs?: number): bo
30
31
  return isFetchedAtStale(entry.fetched_at ?? "", maxAgeMs);
31
32
  }
32
33
 
33
- /**
34
- * Merges newly fetched header metadata into an external cache entry.
35
- *
36
- * @param existing - Existing external cache entry.
37
- * @param updates - Per-URL header metadata updates.
38
- * @returns New entry with merged `header_metadata`.
39
- */
40
- export function mergeHeaderMetadata(
41
- existing: ExternalCacheFile,
42
- updates: Record<string, HeaderMeta>,
43
- ): ExternalCacheFile {
44
- return {
45
- ...existing,
46
- header_metadata: {
47
- ...existing.header_metadata,
48
- ...updates,
49
- },
50
- };
51
- }
52
-
53
34
  /**
54
35
  * Formats human-readable age text from an external `fetched_at` timestamp.
55
36
  *
@@ -102,3 +83,40 @@ export async function resolveTopExternalMatch(repoRoot: string, subject: string)
102
83
 
103
84
  return { ok: true, value: scored[0]!.entry.file };
104
85
  }
86
+
87
+ /**
88
+ * Updates `fetched_at` for one external entry (best subject match) or all entries.
89
+ *
90
+ * @param repoRoot - Repository root.
91
+ * @param subject - Optional subject keyword; when provided, only top match is updated.
92
+ * @param fetchedAt - New ISO timestamp value (or empty string to invalidate).
93
+ * @returns Updated file paths.
94
+ */
95
+ export async function updateExternalFetchedAt(
96
+ repoRoot: string,
97
+ subject: string | undefined,
98
+ fetchedAt: string,
99
+ ): Promise<Result<string[]>> {
100
+ let filesToUpdate: string[];
101
+
102
+ if (subject) {
103
+ const subjectCheck = validateSubject(subject);
104
+ if (!subjectCheck.ok) return subjectCheck;
105
+ const matchResult = await resolveTopExternalMatch(repoRoot, subject);
106
+ if (!matchResult.ok) return matchResult;
107
+ filesToUpdate = [matchResult.value];
108
+ } else {
109
+ const filesResult = await listCacheFiles("external", repoRoot);
110
+ if (!filesResult.ok) return filesResult;
111
+ filesToUpdate = filesResult.value;
112
+ }
113
+
114
+ const updated: string[] = [];
115
+ for (const filePath of filesToUpdate) {
116
+ const writeResult = await writeCache(filePath, { fetched_at: fetchedAt });
117
+ if (!writeResult.ok) return writeResult;
118
+ updated.push(filePath);
119
+ }
120
+
121
+ return { ok: true, value: updated };
122
+ }
@@ -30,20 +30,20 @@ export async function checkFilesCommand(): Promise<Result<CheckFilesResult["valu
30
30
 
31
31
  const changedFiles: Array<{ path: string; reason: "mtime" | "hash" | "missing" }> = [];
32
32
  const unchangedFiles: string[] = [];
33
- const missingFiles: string[] = [];
34
33
 
35
34
  for (const trackedFile of trackedFiles) {
36
35
  const result = await compareTrackedFile(trackedFile, repoRoot);
37
36
  if (result.status === "unchanged") {
38
37
  unchangedFiles.push(trackedFile.path);
39
38
  } else if (result.status === "missing") {
40
- missingFiles.push(trackedFile.path);
41
39
  changedFiles.push({ path: trackedFile.path, reason: "missing" });
42
40
  } else {
43
41
  changedFiles.push({ path: trackedFile.path, reason: result.reason ?? "mtime" });
44
42
  }
45
43
  }
46
44
 
45
+ const missingFiles = changedFiles.filter((file) => file.reason === "missing").map((file) => file.path);
46
+
47
47
  const [gitTrackedFiles, deletedGitFiles, untrackedNonIgnoredFiles] = await Promise.all([
48
48
  getGitTrackedFiles(repoRoot),
49
49
  getGitDeletedFiles(repoRoot),
@@ -1,11 +1,10 @@
1
- import { findRepoRoot, listCacheFiles, writeCache, readCache } from "../cache/cacheManager.js";
2
- import { resolveTopExternalMatch } from "../cache/externalCache.js";
1
+ import { findRepoRoot, writeCache, readCache } from "../cache/cacheManager.js";
2
+ import { updateExternalFetchedAt } from "../cache/externalCache.js";
3
3
  import { resolveGraphCachePath } from "../cache/graphCache.js";
4
4
  import { resolveLocalCachePath } from "../cache/localCache.js";
5
5
  import { ErrorCode, type Result } from "../types/result.js";
6
6
  import type { InvalidateArgs, InvalidateResult } from "../types/commands.js";
7
7
  import { toUnknownResult } from "../utils/errors.js";
8
- import { validateSubject } from "../utils/validate.js";
9
8
 
10
9
  /**
11
10
  * Marks cache entries stale by zeroing their freshness timestamps.
@@ -20,25 +19,9 @@ export async function invalidateCommand(args: InvalidateArgs): Promise<Result<In
20
19
  const invalidated: string[] = [];
21
20
 
22
21
  if (args.agent === "external") {
23
- let filesToInvalidate: string[];
24
-
25
- if (args.subject) {
26
- const subjectCheck = validateSubject(args.subject);
27
- if (!subjectCheck.ok) return subjectCheck;
28
- const matchResult = await resolveTopExternalMatch(repoRoot, args.subject);
29
- if (!matchResult.ok) return matchResult;
30
- filesToInvalidate = [matchResult.value];
31
- } else {
32
- const filesResult = await listCacheFiles("external", repoRoot);
33
- if (!filesResult.ok) return filesResult;
34
- filesToInvalidate = filesResult.value;
35
- }
36
-
37
- for (const filePath of filesToInvalidate) {
38
- const writeResult = await writeCache(filePath, { fetched_at: "" });
39
- if (!writeResult.ok) return writeResult;
40
- invalidated.push(filePath);
41
- }
22
+ const updateResult = await updateExternalFetchedAt(repoRoot, args.subject, "");
23
+ if (!updateResult.ok) return updateResult;
24
+ invalidated.push(...updateResult.value);
42
25
  } else {
43
26
  // local — only invalidate if the file already exists
44
27
  const localPath = resolveLocalCachePath(repoRoot);
@@ -1,10 +1,9 @@
1
- import { findRepoRoot, listCacheFiles, writeCache } from "../cache/cacheManager.js";
2
- import { resolveTopExternalMatch } from "../cache/externalCache.js";
1
+ import { findRepoRoot, writeCache } from "../cache/cacheManager.js";
2
+ import { updateExternalFetchedAt } from "../cache/externalCache.js";
3
3
  import { resolveLocalCachePath } from "../cache/localCache.js";
4
- import { ErrorCode, type Result } from "../types/result.js";
4
+ import { type Result } from "../types/result.js";
5
5
  import type { TouchArgs, TouchResult } from "../types/commands.js";
6
6
  import { toUnknownResult } from "../utils/errors.js";
7
- import { validateSubject } from "../utils/validate.js";
8
7
 
9
8
  /**
10
9
  * Marks cache entries fresh by setting timestamps to current UTC time.
@@ -20,25 +19,9 @@ export async function touchCommand(args: TouchArgs): Promise<Result<TouchResult[
20
19
  const touched: string[] = [];
21
20
 
22
21
  if (args.agent === "external") {
23
- let filesToTouch: string[];
24
-
25
- if (args.subject) {
26
- const subjectCheck = validateSubject(args.subject);
27
- if (!subjectCheck.ok) return subjectCheck;
28
- const matchResult = await resolveTopExternalMatch(repoRoot, args.subject);
29
- if (!matchResult.ok) return matchResult;
30
- filesToTouch = [matchResult.value];
31
- } else {
32
- const filesResult = await listCacheFiles("external", repoRoot);
33
- if (!filesResult.ok) return filesResult;
34
- filesToTouch = filesResult.value;
35
- }
36
-
37
- for (const filePath of filesToTouch) {
38
- const writeResult = await writeCache(filePath, { fetched_at: newTimestamp });
39
- if (!writeResult.ok) return writeResult;
40
- touched.push(filePath);
41
- }
22
+ const updateResult = await updateExternalFetchedAt(repoRoot, args.subject, newTimestamp);
23
+ if (!updateResult.ok) return updateResult;
24
+ touched.push(...updateResult.value);
42
25
  } else {
43
26
  // local
44
27
  const localPath = resolveLocalCachePath(repoRoot);
@@ -0,0 +1,103 @@
1
+ import { readdir, rm, unlink } from "node:fs/promises";
2
+ import os from "node:os";
3
+ import path from "node:path";
4
+
5
+ import { resolveOpenCodeConfigDir } from "../files/openCodeInstaller.js";
6
+ import type { UninstallArgs, UninstallResult } from "../types/commands.js";
7
+ import { ErrorCode, type Result } from "../types/result.js";
8
+
9
+ const textDecoder = new TextDecoder();
10
+ const CACHE_CTRL_SKILL_DIR_PATTERN = /^cache-ctrl-/;
11
+
12
+ /**
13
+ * Removes cache-ctrl OpenCode integration files and uninstalls the global npm package.
14
+ */
15
+ export async function uninstallCommand(args: UninstallArgs): Promise<Result<UninstallResult>> {
16
+ try {
17
+ if (args.configDir !== undefined) {
18
+ const absConfigDir = path.isAbsolute(args.configDir)
19
+ ? path.resolve(args.configDir)
20
+ : path.resolve(process.cwd(), args.configDir);
21
+ const home = os.homedir();
22
+ if (!absConfigDir.startsWith(home + path.sep) && absConfigDir !== home) {
23
+ return {
24
+ ok: false,
25
+ error: `--config-dir must be within the user home directory, got: ${args.configDir}`,
26
+ code: ErrorCode.INVALID_ARGS,
27
+ };
28
+ }
29
+ }
30
+
31
+ const removed: string[] = [];
32
+ const warnings: string[] = [];
33
+ let packageUninstalled = true;
34
+
35
+ const configDir = resolveOpenCodeConfigDir(args.configDir);
36
+ const toolFilePath = path.join(configDir, "tools", "cache_ctrl.ts");
37
+ const skillsDirPath = path.join(configDir, "skills");
38
+ const localBinaryPath = path.join(os.homedir(), ".local", "bin", "cache-ctrl");
39
+
40
+ try {
41
+ await unlink(toolFilePath);
42
+ removed.push(toolFilePath);
43
+ } catch (err) {
44
+ if (err instanceof Error && "code" in err && err.code === "ENOENT") {
45
+ warnings.push(`Tool file not found: ${toolFilePath}`);
46
+ } else {
47
+ throw err;
48
+ }
49
+ }
50
+
51
+ try {
52
+ const skillEntries = await readdir(skillsDirPath, { withFileTypes: true });
53
+ for (const skillEntry of skillEntries) {
54
+ if (!skillEntry.isDirectory() || !CACHE_CTRL_SKILL_DIR_PATTERN.test(skillEntry.name)) {
55
+ continue;
56
+ }
57
+ const skillPath = path.join(skillsDirPath, skillEntry.name);
58
+ await rm(skillPath, { recursive: true });
59
+ removed.push(skillPath);
60
+ }
61
+ } catch (err) {
62
+ if (err instanceof Error && "code" in err && err.code === "ENOENT") {
63
+ warnings.push(`Skills directory not found: ${skillsDirPath}`);
64
+ } else {
65
+ throw err;
66
+ }
67
+ }
68
+
69
+ try {
70
+ await unlink(localBinaryPath);
71
+ removed.push(localBinaryPath);
72
+ } catch (err) {
73
+ if (err instanceof Error && "code" in err && err.code === "ENOENT") {
74
+ warnings.push(`Local binary not found: ${localBinaryPath}`);
75
+ } else {
76
+ throw err;
77
+ }
78
+ }
79
+
80
+ const uninstallProcess = Bun.spawnSync(["npm", "uninstall", "-g", "@thecat69/cache-ctrl"]);
81
+ if (uninstallProcess.exitCode !== 0) {
82
+ packageUninstalled = false;
83
+ const npmError = textDecoder.decode(uninstallProcess.stderr);
84
+ warnings.push(npmError.length > 0 ? npmError : "npm uninstall -g @thecat69/cache-ctrl failed");
85
+ }
86
+
87
+ return {
88
+ ok: true,
89
+ value: {
90
+ removed,
91
+ packageUninstalled,
92
+ warnings,
93
+ },
94
+ };
95
+ } catch (err) {
96
+ const message = err instanceof Error ? err.message : String(err);
97
+ return {
98
+ ok: false,
99
+ error: message,
100
+ code: ErrorCode.UNKNOWN,
101
+ };
102
+ }
103
+ }
@@ -0,0 +1,65 @@
1
+ import os from "node:os";
2
+ import path from "node:path";
3
+
4
+ import type { UpdateArgs, UpdateResult } from "../types/commands.js";
5
+ import { ErrorCode, type Result } from "../types/result.js";
6
+
7
+ import { installCommand } from "./install.js";
8
+
9
+ const textDecoder = new TextDecoder();
10
+
11
+ /**
12
+ * Updates the globally installed npm package and refreshes OpenCode integration files.
13
+ */
14
+ export async function updateCommand(args: UpdateArgs): Promise<Result<UpdateResult>> {
15
+ try {
16
+ if (args.configDir !== undefined) {
17
+ const absConfigDir = path.isAbsolute(args.configDir)
18
+ ? path.resolve(args.configDir)
19
+ : path.resolve(process.cwd(), args.configDir);
20
+ const home = os.homedir();
21
+ if (!absConfigDir.startsWith(home + path.sep) && absConfigDir !== home) {
22
+ return {
23
+ ok: false,
24
+ error: `--config-dir must be within the user home directory, got: ${args.configDir}`,
25
+ code: ErrorCode.INVALID_ARGS,
26
+ };
27
+ }
28
+ }
29
+
30
+ const warnings: string[] = [];
31
+ let packageUpdated = true;
32
+
33
+ const installProcess = Bun.spawnSync(["npm", "install", "-g", "@thecat69/cache-ctrl@latest"]);
34
+ if (installProcess.exitCode !== 0) {
35
+ packageUpdated = false;
36
+ const npmError = textDecoder.decode(installProcess.stderr);
37
+ warnings.push(npmError.length > 0 ? npmError : "npm install -g @thecat69/cache-ctrl@latest failed");
38
+ }
39
+
40
+ const installResult = await installCommand({ ...(args.configDir !== undefined ? { configDir: args.configDir } : {}) });
41
+ if (!installResult.ok) {
42
+ return {
43
+ ok: false,
44
+ error: installResult.error,
45
+ code: installResult.code,
46
+ };
47
+ }
48
+
49
+ return {
50
+ ok: true,
51
+ value: {
52
+ packageUpdated,
53
+ installedPaths: [installResult.value.toolPath, ...installResult.value.skillPaths],
54
+ warnings,
55
+ },
56
+ };
57
+ } catch (err) {
58
+ const message = err instanceof Error ? err.message : String(err);
59
+ return {
60
+ ok: false,
61
+ error: message,
62
+ code: ErrorCode.UNKNOWN,
63
+ };
64
+ }
65
+ }