@thecat69/cache-ctrl 1.0.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +202 -28
  2. package/cache_ctrl.ts +125 -13
  3. package/package.json +2 -1
  4. package/skills/cache-ctrl-caller/SKILL.md +45 -31
  5. package/skills/cache-ctrl-external/SKILL.md +20 -45
  6. package/skills/cache-ctrl-local/SKILL.md +95 -86
  7. package/src/analysis/graphBuilder.ts +85 -0
  8. package/src/analysis/pageRank.ts +167 -0
  9. package/src/analysis/symbolExtractor.ts +240 -0
  10. package/src/cache/cacheManager.ts +52 -2
  11. package/src/cache/externalCache.ts +41 -64
  12. package/src/cache/graphCache.ts +12 -0
  13. package/src/cache/localCache.ts +2 -0
  14. package/src/commands/checkFiles.ts +7 -4
  15. package/src/commands/checkFreshness.ts +19 -19
  16. package/src/commands/flush.ts +9 -2
  17. package/src/commands/graph.ts +131 -0
  18. package/src/commands/inspect.ts +13 -181
  19. package/src/commands/inspectExternal.ts +79 -0
  20. package/src/commands/inspectLocal.ts +134 -0
  21. package/src/commands/install.ts +6 -0
  22. package/src/commands/invalidate.ts +19 -2
  23. package/src/commands/list.ts +11 -11
  24. package/src/commands/map.ts +87 -0
  25. package/src/commands/prune.ts +20 -8
  26. package/src/commands/search.ts +9 -2
  27. package/src/commands/touch.ts +9 -2
  28. package/src/commands/version.ts +14 -0
  29. package/src/commands/watch.ts +253 -0
  30. package/src/commands/writeExternal.ts +51 -0
  31. package/src/commands/writeLocal.ts +123 -0
  32. package/src/files/changeDetector.ts +15 -0
  33. package/src/files/gitFiles.ts +15 -0
  34. package/src/files/openCodeInstaller.ts +21 -2
  35. package/src/http/freshnessChecker.ts +23 -1
  36. package/src/index.ts +253 -28
  37. package/src/search/keywordSearch.ts +24 -0
  38. package/src/types/cache.ts +42 -18
  39. package/src/types/commands.ts +99 -1
  40. package/src/types/result.ts +27 -7
  41. package/src/utils/errors.ts +14 -0
  42. package/src/utils/traversal.ts +42 -0
  43. package/src/commands/write.ts +0 -170
@@ -6,16 +6,9 @@ description: How to use cache-ctrl to check staleness, search, and manage the ex
6
6
  # cache-ctrl — External Cache Usage
7
7
 
8
8
  Manage `.ai/external-context-gatherer_cache/` to avoid redundant HTTP fetches.
9
- Three tiers of access — use the best one available.
9
+ Two tiers of access — use the best one available.
10
10
 
11
- ## Availability Detection (run once at startup)
12
-
13
- 1. Call `cache_ctrl_list` (built-in tool).
14
- - Success → **use Tier 1** for all operations below.
15
- - Failure (tool not found / permission denied) → continue to step 2.
16
- 2. Run `bash: "which cache-ctrl"`.
17
- - Exit 0 → **use Tier 2** for all operations below.
18
- - Not found → **use Tier 3** for all operations below.
11
+ > Availability Detection: see `cache-ctrl-caller`.
19
12
 
20
13
  ---
21
14
 
@@ -25,7 +18,6 @@ Three tiers of access — use the best one available.
25
18
 
26
19
  **Tier 1:** Call `cache_ctrl_list` with `agent: "external"`.
27
20
  **Tier 2:** `cache-ctrl list --agent external`
28
- **Tier 3:** `glob` `.ai/external-context-gatherer_cache/*.json` → for each match, `read` the file and check `fetched_at`. Stale if `fetched_at` is empty or older than 24 hours.
29
21
 
30
22
  - Entry for target subject is fresh → **skip fetching, return cached content**.
31
23
  - Entry is stale or absent → proceed to step 2.
@@ -34,10 +26,9 @@ For borderline cases (entry recently turned stale):
34
26
 
35
27
  **Tier 1:** Call `cache_ctrl_check_freshness` with the subject keyword.
36
28
  **Tier 2:** `cache-ctrl check-freshness <subject-keyword>`
37
- **Tier 3:** Re-read the file and compare `fetched_at` with current time. If within the last hour, treat as fresh.
38
29
 
39
- - `overall: "fresh"` (Tier 1/2) or fresh by timestamp (Tier 3) → skip fetch.
40
- - `overall: "stale"` / `"error"` or stale by timestamp → proceed to fetch.
30
+ - `overall: "fresh"` (Tier 1/2) → skip fetch.
31
+ - `overall: "stale"` / `"error"` → proceed to fetch.
41
32
 
42
33
  ### 2. Search before creating a new subject
43
34
 
@@ -45,30 +36,23 @@ Before fetching a brand-new subject, check whether related info is already cache
45
36
 
46
37
  **Tier 1:** Call `cache_ctrl_search` with relevant keywords.
47
38
  **Tier 2:** `cache-ctrl search <keyword> [<keyword>...]`
48
- **Tier 3:** `glob` `.ai/external-context-gatherer_cache/*.json` → `read` each file, scan the `subject` and `description` fields for keyword matches.
49
39
 
50
40
  ### 3. Write cache after fetching
51
41
 
52
- **Always use the write tool/command — never write cache files directly via `edit`.** Direct writes bypass schema validation and can silently corrupt the cache format.
42
+ **Always use the write tool/command — never write cache files directly.** Direct writes bypass schema validation and can silently corrupt the cache format.
53
43
 
54
- **Tier 1:** Call `cache_ctrl_write` with:
44
+ **Tier 1:** Call `cache_ctrl_write_external` with:
55
45
  ```json
56
46
  {
57
- "agent": "external",
58
47
  "subject": "<subject>",
59
- "content": {
60
- "subject": "<subject>",
61
- "description": "<one-line summary>",
62
- "fetched_at": "<ISO 8601 now>",
63
- "sources": [{ "type": "<type>", "url": "<canonical-url>" }],
64
- "header_metadata": {}
65
- }
48
+ "description": "<one-line summary>",
49
+ "fetched_at": "<ISO 8601 now>",
50
+ "sources": [{ "type": "<type>", "url": "<canonical-url>" }],
51
+ "header_metadata": {}
66
52
  }
67
53
  ```
68
54
 
69
- **Tier 2:** `cache-ctrl write external <subject> --data '<json>'`
70
-
71
- **Tier 3:** Same as Tier 2 — there is no direct-file fallback for writes. If neither Tier 1 nor Tier 2 is available, request access to one of them.
55
+ **Tier 2:** `cache-ctrl write-external <subject> --data '<json>'`
72
56
 
73
57
  #### ExternalCacheFile schema
74
58
 
@@ -98,20 +82,19 @@ All fields are validated on write. Unknown extra fields are allowed and preserve
98
82
 
99
83
  **Tier 1:** Call `cache_ctrl_invalidate` with `agent: "external"` and the subject keyword.
100
84
  **Tier 2:** `cache-ctrl invalidate external <subject-keyword>`
101
- **Tier 3:** `read` the file, set `fetched_at` to `""`, `edit` it back.
102
85
 
103
86
  ---
104
87
 
105
88
  ## Tool / Command Reference
106
89
 
107
- | Operation | Tier 1 (built-in) | Tier 2 (CLI) | Tier 3 (manual) |
108
- |---|---|---|---|
109
- | List entries | `cache_ctrl_list` | `cache-ctrl list --agent external` | `glob` + `read` each JSON |
110
- | HTTP freshness check | `cache_ctrl_check_freshness` | `cache-ctrl check-freshness <subject>` | compare `fetched_at` with now |
111
- | Search entries | `cache_ctrl_search` | `cache-ctrl search <kw>...` | `glob` + scan `subject`/`description` |
112
- | View full entry | `cache_ctrl_inspect` | `cache-ctrl inspect external <subject>` | `read` file directly |
113
- | Invalidate entry | `cache_ctrl_invalidate` | `cache-ctrl invalidate external <subject>` | set `fetched_at` to `""` via `edit` |
114
- | Write entry | `cache_ctrl_write` | `cache-ctrl write external <subject> --data '<json>'` | ❌ not available |
90
+ | Operation | Tier 1 (built-in) | Tier 2 (CLI) |
91
+ |---|---|---|
92
+ | List entries | `cache_ctrl_list` | `cache-ctrl list --agent external` |
93
+ | HTTP freshness check | `cache_ctrl_check_freshness` | `cache-ctrl check-freshness <subject>` |
94
+ | Search entries | `cache_ctrl_search` | `cache-ctrl search <kw>...` |
95
+ | View full entry | `cache_ctrl_inspect` | `cache-ctrl inspect external <subject>` |
96
+ | Invalidate entry | `cache_ctrl_invalidate` | `cache-ctrl invalidate external <subject>` |
97
+ | Write entry | `cache_ctrl_write_external` | `cache-ctrl write-external <subject> --data '<json>'` |
115
98
 
116
99
  ## Cache Location
117
100
 
@@ -119,12 +102,4 @@ All fields are validated on write. Unknown extra fields are allowed and preserve
119
102
 
120
103
  Staleness threshold: `fetched_at` is empty **or** older than 24 hours.
121
104
 
122
- ## server_time in Responses
123
-
124
- Every `cache_ctrl_*` tool call returns a `server_time` field at the outer JSON level:
125
-
126
- ```json
127
- { "ok": true, "value": { ... }, "server_time": "2026-04-05T12:34:56.789Z" }
128
- ```
129
-
130
- Use this to assess how stale `fetched_at` timestamps are — you do not need `bash` or system access to know the current time.
105
+ > All `cache_ctrl_*` tools return `server_time`; see `cache-ctrl-caller` for freshness-decision usage.
@@ -6,67 +6,91 @@ description: How to use cache-ctrl to detect file changes and manage the local c
6
6
  # cache-ctrl — Local Cache Usage
7
7
 
8
8
  Manage `.ai/local-context-gatherer_cache/context.json` to avoid redundant full-repo scans.
9
- Three tiers of access — use the best one available.
9
+ Two tiers of access — use the best one available.
10
10
 
11
- ## Availability Detection (run once at startup)
12
-
13
- 1. Call `cache_ctrl_check_files` (built-in tool).
14
- - Success → **use Tier 1** for all operations below.
15
- - Failure (tool not found / permission denied) → continue to step 2.
16
- 2. Run `bash: "which cache-ctrl"`.
17
- - Exit 0 → **use Tier 2** for all operations below.
18
- - Not found → **use Tier 3** for all operations below.
11
+ > Availability Detection: see `cache-ctrl-caller`.
19
12
 
20
13
  ---
21
14
 
22
15
  ## Fact-Writing Rules
16
+ Per-file `facts` entries are no longer flat string arrays. Each path now maps to a
17
+ **`FileFacts` object**:
18
+
19
+ ```json
20
+ {
21
+ "summary": "One-sentence description of what this file does",
22
+ "role": "implementation",
23
+ "importance": 2,
24
+ "facts": ["Concise observation 1", "Concise observation 2"]
25
+ }
26
+ ```
27
+
28
+ Required and recommended fields:
29
+
30
+ - **`summary` is mandatory** when writing a file entry. Keep it to one sentence.
31
+ - **`role` is mandatory** when writing a file entry. Must be one of:
32
+ - `entry-point`
33
+ - `interface`
34
+ - `implementation`
35
+ - `test`
36
+ - `config`
37
+ - **`importance` is optional but strongly recommended**:
38
+ - `1` = core module
39
+ - `2` = supporting module
40
+ - `3` = peripheral/config module
41
+ - **`facts` is optional** and capped at **10 items**, each **≤ 300 chars**.
23
42
 
24
- Facts must be **concise observations** about a file — not reproductions of its content.
43
+ Content quality rules:
25
44
 
26
- - **Each fact string must be ≤ 300 characters** (schema hard limit: 800). If an observation needs more, split it into two facts or summarize.
27
- - **Max 30 facts per file.** Choose only the most architecturally meaningful observations.
28
- - **Never write**: raw import lines, function bodies, code snippets, or verbatim text from the file.
29
- - **Do write**: what the file exports, what pattern it uses, what dependencies it has, what its responsibility is.
45
+ - **Never write** raw import lines, function bodies, code snippets, or verbatim text from the file.
46
+ - **Do write** concise architectural observations: purpose, key exports, constraints, dependencies, notable patterns.
30
47
 
31
- **Good fact** ✅:
32
- > `"Exports writeCommand validates subject, merges per-path facts atomically, returns Result<WriteResult>"`
48
+ **Good `facts[]` item** ✅:
49
+ > `"Delegates local writes to writeLocalCommand and preserves unrelated paths through per-path merge"`
33
50
 
34
- **Bad fact** ❌:
35
- > `"import { ExternalCacheFileSchema, LocalCacheFileSchema } from '../types/cache.js'; import { ErrorCode, Result } from '../types/result.js'; import { WriteArgs, WriteResult } from '../types/commands.js'"` ← this is raw file content
51
+ **Bad `facts[]` item** ❌:
52
+ > `"import { ExternalCacheFileSchema, LocalCacheFileSchema } from '../types/cache.js'; import { ErrorCode, Result } from '../types/result.js'"` ← raw file content
36
53
 
37
54
  **Global facts** are for cross-cutting structural observations only (e.g. CLI entry pattern, installation steps). Max 20, each ≤ 300 chars. Only update global_facts when you re-read a structural file (AGENTS.md, install.sh, package.json, *.toml, opencode.json).
38
55
 
39
56
  ---
40
57
 
41
58
  ## Mandatory: Write Before Return
42
-
43
- **Every invocation must call `cache_ctrl_write` before returning.** Returning without writing is a failure — the orchestrator will detect the missing write and re-invoke you.
59
+ **Every invocation that reads any file MUST call `cache_ctrl_write_local` before returning — no exceptions, no edge cases.**
44
60
 
45
61
  Sequential checklist (do not skip any step):
46
62
 
47
63
  1. Call `cache_ctrl_check_files` — identify changed/new files
48
64
  2. Read only the changed/new files (skip unchanged ones)
49
65
  3. Extract concise facts per file (follow Fact-Writing Rules above)
50
- 4. **Call `cache_ctrl_write` — MANDATORY** (even if only 1 file changed, even if only global_facts changed)
66
+ 4. **Call `cache_ctrl_write_local` — MANDATORY. NO EXCEPTIONS.** (even if only 1 file changed, even if only global_facts changed, even if you believe the facts are identical to what is cached)
51
67
  5. Return your summary
52
68
 
53
- If there are no changed files, the cache already exists and is non-empty, **and you were not invoked after a cache invalidation**, you may skip the write but only in this case.
69
+ > **⛔ Write-or-fail rule**: If you read any file in steps 2–3, you MUST call `cache_ctrl_write_local` in step 4. Returning without writing after reading files is a critical failure — the cache will be stale and the orchestrator will detect the missing write and re-invoke you. Even if zero files were read, you must still consult the decision table below before deciding to skip the write.
70
+
71
+ **The only time you may skip `cache_ctrl_write_local` is when ALL of the following are true simultaneously:**
72
+
73
+ | Condition | Required value |
74
+ |---|---|
75
+ | `changed_files` from `cache_ctrl_check_files` | empty `[]` |
76
+ | `new_files` from `cache_ctrl_check_files` | empty `[]` |
77
+ | No files were force-requested by the caller | true |
78
+ | Cache already exists and is non-empty | true |
79
+ | This invocation was NOT triggered by a cache invalidation | true |
80
+
81
+ If any one of these conditions is not met, you **must** write.
54
82
 
55
83
  ---
56
84
 
57
85
  ## Startup Workflow
58
-
59
86
  ### 1. Check if tracked files changed
60
87
 
61
88
  **Tier 1:** Call `cache_ctrl_check_files` (no parameters).
62
89
  **Tier 2:** `cache-ctrl check-files`
63
- **Tier 3:** `read` `.ai/local-context-gatherer_cache/context.json`.
64
- - File absent → cold start, proceed to scan.
65
- - File present → check `timestamp`. If older than 1 hour, treat as stale and re-scan. Otherwise treat as fresh.
66
90
 
67
91
  Result interpretation (Tier 1 & 2):
68
92
  - `status: "unchanged"` → tracked files are content-stable; skip re-scan and return cached context.
69
- - `status: "changed"` → at least one tracked file changed; proceed to **delta scan** (read content of `changed_files` + `new_files` only — do not re-read unchanged files).
93
+ - `status: "changed"` → at least one tracked file changed; proceed to **delta scan**.
70
94
  - `status: "unchanged"` with empty `tracked_files` → cold start, proceed to scan.
71
95
 
72
96
  The response also reports:
@@ -76,20 +100,17 @@ The response also reports:
76
100
  > **⚠ Cache is non-exhaustive**: `status: "unchanged"` only confirms that previously-tracked files are content-stable — it does not mean the file set is complete. Always check `new_files` and `deleted_git_files` in the response; if either is non-empty, include those paths in the next write to keep the cache up to date.
77
101
 
78
102
  ### 2. Invalidate before writing (optional)
79
-
80
103
  > Do this only if cache is really outdated and a full rescan is needed. Otherwise just proceed with next step (writing).
81
104
 
82
105
  **Tier 1:** Call `cache_ctrl_invalidate` with `agent: "local"`.
83
106
  **Tier 2:** `cache-ctrl invalidate local`
84
- **Tier 3:** Skip — overwriting the file in step 3 is sufficient.
85
107
 
86
108
  ### 3. Write cache after scanning
87
-
88
- **Always use the write tool/command — never edit the file directly.** Direct writes bypass schema validation and can silently corrupt the cache format.
109
+ **Always use the write tool/command — never write the file directly.** Direct writes bypass schema validation and can silently corrupt the cache format.
89
110
 
90
111
  > **Write is per-path merge**: Submitted `tracked_files` entries replace existing entries for the same paths. Paths not in the submission are preserved. Entries for files deleted from disk are evicted automatically (no agent action needed).
91
112
 
92
- #### Input fields (`content` object)
113
+ #### Input fields (top-level args)
93
114
 
94
115
  | Field | Type | Required | Notes |
95
116
  |---|---|---|---|
@@ -97,7 +118,7 @@ The response also reports:
97
118
  | `description` | `string` | ✅ | One-liner for keyword search |
98
119
  | `tracked_files` | `Array<{ path: string }>` | ✅ | Paths to track; `mtime` and `hash` are auto-computed by the tool |
99
120
  | `global_facts` | `string[]` | optional | Repo-level facts; last-write-wins; see trigger rule below |
100
- | `facts` | `Record<string, string[]>` | optional | Per-file facts keyed by path; per-path merge |
121
+ | `facts` | `Record<string, FileFacts>` | optional | Per-file structured facts keyed by path; per-path merge |
101
122
  | `cache_miss_reason` | `string` | optional | Why the previous cache was discarded |
102
123
 
103
124
  > **Cold start vs incremental**: On first run (no existing cache), submit all relevant files. On subsequent runs, submit only new and changed files — the tool merges them in.
@@ -105,7 +126,6 @@ The response also reports:
105
126
  > **Auto-set by the tool — do not include**: `timestamp` (current UTC), `mtime` (filesystem `lstat()`), and `hash` (SHA-256) per `tracked_files` entry.
106
127
 
107
128
  ### Scope rule for `facts`
108
-
109
129
  Submit `facts` ONLY for files you actually read in this session (i.e., files present in
110
130
  your submitted `tracked_files`). Never reconstruct or re-submit facts for unchanged files —
111
131
  the tool preserves them automatically via per-path merge.
@@ -114,7 +134,6 @@ Submitting a facts key for a path absent from submitted `tracked_files` is a
114
134
  VALIDATION_ERROR and the entire write is rejected.
115
135
 
116
136
  ### Fact completeness
117
-
118
137
  When a file appears in `changed_files` or `new_files`, read the **whole file** before writing
119
138
  facts — not just the diff. A 2-line change does not support a complete re-description of the
120
139
  file, and submitting partial facts for a re-read path **permanently replaces** whatever was
@@ -123,10 +142,30 @@ cached before.
123
142
  Write facts as **enumerable observations** — one entry per notable characteristic (purpose,
124
143
  structure, key dependencies, patterns, constraints, entry points). Do not bundle multiple
125
144
  distinct properties into a single string. A file should have as many fact entries as it has
126
- distinct notable properties, not a prose summary compressed into one or two lines.
145
+ distinct notable properties, up to the 10-item limit.
127
146
 
128
- ### When to submit `global_facts`
147
+ Each per-file `facts` entry MUST include `summary` + `role`, should include `importance`,
148
+ and may include an optional `facts[]` list.
129
149
 
150
+ #### `cache_ctrl_write_local` facts shape example (`FileFacts`)
151
+
152
+ ```json
153
+ {
154
+ "facts": {
155
+ "src/commands/writeLocal.ts": {
156
+ "summary": "Thin router dispatching write calls to writeLocal or writeExternal based on agent type.",
157
+ "role": "implementation",
158
+ "importance": 2,
159
+ "facts": [
160
+ "Delegates to writeLocalCommand for agent=local",
161
+ "Delegates to writeExternalCommand for all other agents"
162
+ ]
163
+ }
164
+ }
165
+ }
166
+ ```
167
+
168
+ ### When to submit `global_facts`
130
169
  Submit `global_facts` only when you re-read at least one structural file in this session:
131
170
  AGENTS.md, install.sh, opencode.json, package.json, *.toml config files.
132
171
 
@@ -134,80 +173,50 @@ If none of those are in `changed_files` or `new_files`, omit `global_facts` from
134
173
  The existing value is preserved automatically.
135
174
 
136
175
  ### Eviction
137
-
138
176
  Facts for files deleted from disk are evicted automatically on the next write — no agent
139
177
  action needed. `global_facts` is never evicted.
140
178
 
141
- #### Tier 1 — `cache_ctrl_write`
179
+ #### Tier 1 — `cache_ctrl_write_local`
142
180
 
143
181
  ```json
144
182
  {
145
- "agent": "local",
146
- "content": {
147
- "topic": "neovim plugin configuration scan",
148
- "description": "Full scan of lua/plugins tree for neovim lazy.nvim setup",
149
- "tracked_files": [
150
- { "path": "lua/plugins/ui/bufferline.lua" },
151
- { "path": "lua/plugins/lsp/nvim-lspconfig.lua" }
152
- ]
153
- }
183
+ "topic": "neovim plugin configuration scan",
184
+ "description": "Full scan of lua/plugins tree for neovim lazy.nvim setup",
185
+ "tracked_files": [
186
+ { "path": "lua/plugins/ui/bufferline.lua" },
187
+ { "path": "lua/plugins/lsp/nvim-lspconfig.lua" }
188
+ ]
154
189
  }
155
190
  ```
156
191
 
157
192
  #### Tier 2 — CLI
158
193
 
159
- `cache-ctrl write local --data '<json>'` — pass the same `content` object as JSON string.
160
-
161
- #### Tier 3
162
-
163
- Not available — there is no direct-file fallback for writes. If neither Tier 1 nor Tier 2 is accessible, request access to one of them.
194
+ `cache-ctrl write-local --data '<json>'` — pass the same top-level fields as the JSON value.
164
195
 
165
196
  ### 4. Confirm cache (optional)
166
-
167
197
  **Tier 1:** Call `cache_ctrl_list` with `agent: "local"` to confirm the entry was written.
168
198
  **Tier 2:** `cache-ctrl list --agent local`
169
- **Tier 3:** `read` `.ai/local-context-gatherer_cache/context.json` and verify `timestamp` is current.
170
199
 
171
- Note: local entries show `is_stale: true` only when `cache_ctrl_check_files` detects actual changes (changed files, new non-ignored files, or deleted files). A freshly-written cache with no subsequent file changes will show `is_stale: false`.
200
+ Note: local entries show `is_stale: true` only when `cache_ctrl_check_files` detects actual changes.
172
201
 
173
202
  ---
174
203
 
175
204
  ## Tool / Command Reference
205
+ | Operation | Tier 1 (built-in) | Tier 2 (CLI) |
206
+ |---|---|---|
207
+ | Detect file changes | `cache_ctrl_check_files` | `cache-ctrl check-files` |
208
+ | Invalidate cache | `cache_ctrl_invalidate` | `cache-ctrl invalidate local` |
209
+ | Confirm written | `cache_ctrl_list` | `cache-ctrl list --agent local` |
210
+ | Read facts (filtered) | `cache_ctrl_inspect` with `filter`, `folder`, or `searchFacts` | `cache-ctrl inspect local context --filter <kw>[,<kw>...]` / `--folder <path>` / `--search-facts <kw>[,<kw>...]` |
211
+ | Read all facts (rare) | `cache_ctrl_inspect` (no filter) | `cache-ctrl inspect local context` |
212
+ | Write cache | `cache_ctrl_write_local` | `cache-ctrl write-local --data '<json>'` |
176
213
 
177
- | Operation | Tier 1 (built-in) | Tier 2 (CLI) | Tier 3 (manual) |
178
- |---|---|---|---|
179
- | Detect file changes | `cache_ctrl_check_files` | `cache-ctrl check-files` | read `context.json`, check `timestamp` |
180
- | Invalidate cache | `cache_ctrl_invalidate` | `cache-ctrl invalidate local` | overwrite file in next step |
181
- | Confirm written | `cache_ctrl_list` | `cache-ctrl list --agent local` | `read` file, check `timestamp` |
182
- | Read facts (filtered) | `cache_ctrl_inspect` with `filter`, `folder`, or `searchFacts` | `cache-ctrl inspect local context --filter <kw>[,<kw>...]` / `--folder <path>` / `--search-facts <kw>[,<kw>...]` | `read` file, extract `facts`/`global_facts` |
183
- | Read all facts (rare) | `cache_ctrl_inspect` (no filter) | `cache-ctrl inspect local context` | `read` file directly |
184
- | Write cache | `cache_ctrl_write` | `cache-ctrl write local --data '<json>'` | ❌ not available |
185
-
186
- > **⚠ Always use at least one filter when reading facts for a specific task.** Three targeting options are available — use the most specific one that fits your task:
187
- >
188
- > | Flag | What it matches | Best for |
189
- > |---|---|---|
190
- > | `--filter <kw>` | File path contains keyword | When you know which files by name/path segment |
191
- > | `--folder <path>` | File path starts with folder prefix (recursive) | When you need all files in a directory subtree |
192
- > | `--search-facts <kw>` | Any fact string contains keyword | When you need files related to a concept, pattern, or API |
193
- >
194
- > The flags are AND-ed when combined. Omit all filters only when you genuinely need facts for the entire repository (rare — e.g. building a full index; only appropriate for ≤ ~20 tracked files). An unfiltered `inspect` on a large repo can return thousands of fact strings.
195
-
196
- > **`tracked_files` is never returned by `inspect` for the local agent.** It is internal operational metadata consumed by `check-files`. It will not appear in any inspect response.
197
-
198
- ## server_time in Responses
199
-
200
- Every `cache_ctrl_*` tool call returns a `server_time` field at the outer JSON level:
201
-
202
- ```json
203
- { "ok": true, "value": { ... }, "server_time": "2026-04-05T12:34:56.789Z" }
204
- ```
214
+ > For `inspect` filter targeting options, see `cache-ctrl-caller`.
205
215
 
206
- Use this to assess how stale stored timestamps are — you do not need `bash` or system access to know the current time.
216
+ > All `cache_ctrl_*` tools return `server_time`; see `cache-ctrl-caller` for freshness-decision usage.
207
217
 
208
218
  ## Cache Location
209
219
 
210
220
  `.ai/local-context-gatherer_cache/context.json` — single file, no per-subject splitting.
211
221
 
212
222
  No time-based TTL for Tier 1/2. Freshness determined by `cache_ctrl_check_files`.
213
- Tier 3 uses a 1-hour `timestamp` TTL as a rough proxy.
@@ -0,0 +1,85 @@
1
+ import path from "node:path";
2
+
3
+ import { extractSymbols } from "./symbolExtractor.js";
4
+
5
+ /** Dependency metadata tracked for one source file node in the graph. */
6
+ export interface GraphNode {
7
+ deps: string[];
8
+ defs: string[];
9
+ }
10
+
11
+ /** Directed dependency graph keyed by absolute source file path. */
12
+ export type DependencyGraph = Map<string, GraphNode>;
13
+
14
+ const RESOLUTION_EXTENSIONS = ["", ".ts", ".tsx", ".js", ".jsx"];
15
+
16
+ function resolveDependencyToKnownFile(depPath: string, knownFiles: Set<string>): string | null {
17
+ for (const extension of RESOLUTION_EXTENSIONS) {
18
+ const candidatePath = `${depPath}${extension}`;
19
+ if (knownFiles.has(candidatePath)) {
20
+ return candidatePath;
21
+ }
22
+ }
23
+
24
+ const basename = path.basename(depPath);
25
+ if (basename.endsWith(".js")) {
26
+ const withoutJs = depPath.slice(0, -3);
27
+ for (const extension of [".ts", ".tsx"]) {
28
+ const candidatePath = `${withoutJs}${extension}`;
29
+ if (knownFiles.has(candidatePath)) {
30
+ return candidatePath;
31
+ }
32
+ }
33
+ }
34
+
35
+ if (basename.endsWith(".jsx")) {
36
+ const withoutJsx = depPath.slice(0, -4);
37
+ const candidatePath = `${withoutJsx}.tsx`;
38
+ if (knownFiles.has(candidatePath)) {
39
+ return candidatePath;
40
+ }
41
+ }
42
+
43
+ return null;
44
+ }
45
+
46
+ /**
47
+ * Build a dependency graph for all source files under repoRoot.
48
+ *
49
+ * @param filePaths - Source file paths to include as graph nodes.
50
+ * @param repoRoot - Repository root for symbol extraction and import resolution.
51
+ * @returns Dependency graph keyed by resolved absolute file paths.
52
+ *
53
+ * Files not in the provided list are filtered from deps.
54
+ */
55
+ export async function buildGraph(filePaths: string[], repoRoot: string): Promise<DependencyGraph> {
56
+ const absoluteFilePaths = filePaths.map((filePath) => path.resolve(filePath));
57
+ const knownFileSet = new Set(absoluteFilePaths);
58
+
59
+ const extractedSymbols = await Promise.all(
60
+ absoluteFilePaths.map(async (filePath) => ({
61
+ filePath,
62
+ symbols: await extractSymbols(filePath, repoRoot),
63
+ })),
64
+ );
65
+
66
+ const graph: DependencyGraph = new Map();
67
+
68
+ for (const { filePath, symbols } of extractedSymbols) {
69
+ const resolvedDependencies = new Set<string>();
70
+
71
+ for (const dependency of symbols.deps) {
72
+ const resolvedDependency = resolveDependencyToKnownFile(dependency, knownFileSet);
73
+ if (resolvedDependency !== null) {
74
+ resolvedDependencies.add(resolvedDependency);
75
+ }
76
+ }
77
+
78
+ graph.set(filePath, {
79
+ deps: [...resolvedDependencies],
80
+ defs: symbols.defs,
81
+ });
82
+ }
83
+
84
+ return graph;
85
+ }
@@ -0,0 +1,167 @@
1
+ import type { DependencyGraph } from "./graphBuilder.js";
2
+
3
+ /** Tuning options for dependency-graph PageRank computation. */
4
+ export interface PageRankOptions {
5
+ /** Damping factor (default 0.85) */
6
+ dampingFactor?: number;
7
+ /** Max iterations (default 100) */
8
+ maxIterations?: number;
9
+ /** Convergence threshold (default 1e-6) */
10
+ tolerance?: number;
11
+ /** Files to use as personalization seeds (boosts their rank and neighbors) */
12
+ seedFiles?: string[];
13
+ }
14
+
15
+ /**
16
+ * Compute Personalized PageRank over a dependency graph.
17
+ * Returns a map of file path → rank score (normalized, sums to 1.0).
18
+ * Higher rank = more central / more relevant to seed files.
19
+ */
20
+ export function computePageRank(
21
+ graph: DependencyGraph,
22
+ options?: PageRankOptions,
23
+ ): Map<string, number> {
24
+ const nodes = [...graph.keys()];
25
+ const nodeCount = nodes.length;
26
+
27
+ if (nodeCount === 0) {
28
+ return new Map();
29
+ }
30
+
31
+ const dampingFactor = options?.dampingFactor ?? 0.85;
32
+ const maxIterations = options?.maxIterations ?? 100;
33
+ const tolerance = options?.tolerance ?? 1e-6;
34
+
35
+ const personalization = buildPersonalizationVector(nodes, options?.seedFiles);
36
+ const inLinks = buildInLinks(graph, nodes);
37
+
38
+ let ranks = new Map<string, number>();
39
+ const initialRank = 1 / nodeCount;
40
+ for (const node of nodes) {
41
+ ranks.set(node, initialRank);
42
+ }
43
+
44
+ for (let iteration = 0; iteration < maxIterations; iteration += 1) {
45
+ const danglingRank = computeDanglingRank(graph, ranks);
46
+ const danglingContribution = dampingFactor * (danglingRank / nodeCount);
47
+
48
+ const nextRanks = new Map<string, number>();
49
+ let totalDelta = 0;
50
+
51
+ for (const node of nodes) {
52
+ const incomingNodes = inLinks.get(node) ?? [];
53
+ let incomingContribution = 0;
54
+
55
+ for (const sourceNode of incomingNodes) {
56
+ const sourceRank = ranks.get(sourceNode);
57
+ if (sourceRank === undefined) {
58
+ continue;
59
+ }
60
+
61
+ const outDegree = graph.get(sourceNode)?.deps.length ?? 0;
62
+ if (outDegree > 0) {
63
+ incomingContribution += sourceRank / outDegree;
64
+ }
65
+ }
66
+
67
+ const personalWeight = personalization.get(node) ?? 0;
68
+ const rank = (1 - dampingFactor) * personalWeight + dampingFactor * incomingContribution + danglingContribution;
69
+ nextRanks.set(node, rank);
70
+
71
+ const previousRank = ranks.get(node) ?? 0;
72
+ totalDelta += Math.abs(rank - previousRank);
73
+ }
74
+
75
+ ranks = nextRanks;
76
+
77
+ if (totalDelta < tolerance) {
78
+ break;
79
+ }
80
+ }
81
+
82
+ return normalizeRanks(ranks);
83
+ }
84
+
85
+ function buildInLinks(graph: DependencyGraph, nodes: string[]): Map<string, string[]> {
86
+ const inLinks = new Map<string, string[]>();
87
+ for (const node of nodes) {
88
+ inLinks.set(node, []);
89
+ }
90
+
91
+ for (const [sourceNode, graphNode] of graph.entries()) {
92
+ for (const targetNode of graphNode.deps) {
93
+ if (!inLinks.has(targetNode)) {
94
+ continue;
95
+ }
96
+ const targetInLinks = inLinks.get(targetNode);
97
+ if (targetInLinks !== undefined) {
98
+ targetInLinks.push(sourceNode);
99
+ }
100
+ }
101
+ }
102
+
103
+ return inLinks;
104
+ }
105
+
106
+ function buildPersonalizationVector(nodes: string[], seedFiles: string[] | undefined): Map<string, number> {
107
+ const vector = new Map<string, number>();
108
+
109
+ const seedSet = new Set(seedFiles ?? []);
110
+ const validSeeds = nodes.filter((node) => seedSet.has(node));
111
+
112
+ if (validSeeds.length > 0) {
113
+ const seedWeight = 1 / validSeeds.length;
114
+ for (const node of nodes) {
115
+ vector.set(node, 0);
116
+ }
117
+ for (const seed of validSeeds) {
118
+ vector.set(seed, seedWeight);
119
+ }
120
+ return vector;
121
+ }
122
+
123
+ const uniformWeight = 1 / nodes.length;
124
+ for (const node of nodes) {
125
+ vector.set(node, uniformWeight);
126
+ }
127
+ return vector;
128
+ }
129
+
130
+ function computeDanglingRank(graph: DependencyGraph, ranks: Map<string, number>): number {
131
+ let danglingRank = 0;
132
+
133
+ for (const [node, graphNode] of graph.entries()) {
134
+ if (graphNode.deps.length > 0) {
135
+ continue;
136
+ }
137
+ danglingRank += ranks.get(node) ?? 0;
138
+ }
139
+
140
+ return danglingRank;
141
+ }
142
+
143
+ function normalizeRanks(ranks: Map<string, number>): Map<string, number> {
144
+ let totalRank = 0;
145
+ for (const value of ranks.values()) {
146
+ totalRank += value;
147
+ }
148
+
149
+ if (totalRank <= 0) {
150
+ const normalized = new Map<string, number>();
151
+ const size = ranks.size;
152
+ if (size === 0) {
153
+ return normalized;
154
+ }
155
+ const uniformRank = 1 / size;
156
+ for (const node of ranks.keys()) {
157
+ normalized.set(node, uniformRank);
158
+ }
159
+ return normalized;
160
+ }
161
+
162
+ const normalized = new Map<string, number>();
163
+ for (const [node, value] of ranks.entries()) {
164
+ normalized.set(node, value / totalRank);
165
+ }
166
+ return normalized;
167
+ }