grepmax 0.14.0 → 0.14.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/lib/daemon/daemon.js +19 -2
- package/dist/lib/index/batch-processor.js +19 -3
- package/dist/lib/index/syncer.js +13 -4
- package/dist/lib/store/vector-db.js +16 -0
- package/package.json +1 -1
- package/plugins/grepmax/.claude-plugin/plugin.json +1 -1
- package/plugins/grepmax/hooks/start.js +1 -1
- package/plugins/grepmax/hooks/subagent-start.js +1 -1
- package/plugins/grepmax/skills/grepmax/SKILL.md +1 -1
package/README.md
CHANGED
|
@@ -118,8 +118,7 @@ Plugins auto-update when you run `npm install -g grepmax@latest` — no need to
|
|
|
118
118
|
|
|
119
119
|
| Tool | Description |
|
|
120
120
|
| --- | --- |
|
|
121
|
-
| `semantic_search` | Search by meaning. 16+ params: query, limit, role, language, scope (project/all), etc. |
|
|
122
|
-
| `search_all` | Cross-project search. Same params + project filtering. |
|
|
121
|
+
| `semantic_search` | Search by meaning. 16+ params: query, limit, role, language, scope (project/all), project filtering, etc. |
|
|
123
122
|
| `code_skeleton` | File structure with bodies collapsed (~4x fewer tokens). |
|
|
124
123
|
| `trace_calls` | Call graph: importers, callers (multi-hop), callees with file:line. |
|
|
125
124
|
| `extract_symbol` | Complete function/class body by symbol name. |
|
|
@@ -287,6 +286,7 @@ fixtures/
|
|
|
287
286
|
gmax doctor # Check health
|
|
288
287
|
gmax doctor --fix # Auto-repair (compact, prune, fix locks)
|
|
289
288
|
gmax doctor --agent # Machine-readable health output
|
|
289
|
+
gmax index # Reindex (auto-detects and repairs cache/vector mismatches)
|
|
290
290
|
gmax index --reset # Full reindex from scratch
|
|
291
291
|
gmax watch stop && gmax watch --daemon -b # Restart daemon
|
|
292
292
|
```
|
|
@@ -294,6 +294,9 @@ class Daemon {
|
|
|
294
294
|
const { walk } = yield Promise.resolve().then(() => __importStar(require("../index/walker")));
|
|
295
295
|
const { INDEXABLE_EXTENSIONS } = yield Promise.resolve().then(() => __importStar(require("../../config")));
|
|
296
296
|
const { isFileCached } = yield Promise.resolve().then(() => __importStar(require("../utils/cache-check")));
|
|
297
|
+
const rootPrefix = root.endsWith("/") ? root : `${root}/`;
|
|
298
|
+
const cachedPaths = yield this.metaCache.getKeysWithPrefix(rootPrefix);
|
|
299
|
+
const seenPaths = new Set();
|
|
297
300
|
let queued = 0;
|
|
298
301
|
try {
|
|
299
302
|
for (var _d = true, _e = __asyncValues(walk(root, {
|
|
@@ -307,6 +310,7 @@ class Daemon {
|
|
|
307
310
|
const bn = path.basename(absPath).toLowerCase();
|
|
308
311
|
if (!INDEXABLE_EXTENSIONS.has(ext) && !INDEXABLE_EXTENSIONS.has(bn))
|
|
309
312
|
continue;
|
|
313
|
+
seenPaths.add(absPath);
|
|
310
314
|
try {
|
|
311
315
|
const stats = yield fs.promises.stat(absPath);
|
|
312
316
|
const cached = this.metaCache.get(absPath);
|
|
@@ -325,8 +329,21 @@ class Daemon {
|
|
|
325
329
|
}
|
|
326
330
|
finally { if (e_1) throw e_1.error; }
|
|
327
331
|
}
|
|
328
|
-
|
|
329
|
-
|
|
332
|
+
// Purge files deleted while daemon was offline
|
|
333
|
+
let purged = 0;
|
|
334
|
+
for (const cachedPath of cachedPaths) {
|
|
335
|
+
if (!seenPaths.has(cachedPath)) {
|
|
336
|
+
processor.handleFileEvent("unlink", cachedPath);
|
|
337
|
+
purged++;
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
if (queued > 0 || purged > 0) {
|
|
341
|
+
const parts = [];
|
|
342
|
+
if (queued > 0)
|
|
343
|
+
parts.push(`${queued} changed`);
|
|
344
|
+
if (purged > 0)
|
|
345
|
+
parts.push(`${purged} deleted`);
|
|
346
|
+
console.log(`[daemon:${path.basename(root)}] Catchup: ${parts.join(", ")} file(s) while offline`);
|
|
330
347
|
}
|
|
331
348
|
});
|
|
332
349
|
}
|
|
@@ -53,6 +53,7 @@ const pool_1 = require("../workers/pool");
|
|
|
53
53
|
const watcher_batch_1 = require("./watcher-batch");
|
|
54
54
|
const DEBOUNCE_MS = 2000;
|
|
55
55
|
const MAX_RETRIES = 5;
|
|
56
|
+
const MAX_BATCH_SIZE = 50;
|
|
56
57
|
class ProjectBatchProcessor {
|
|
57
58
|
constructor(opts) {
|
|
58
59
|
this.pending = new Map();
|
|
@@ -112,12 +113,22 @@ class ProjectBatchProcessor {
|
|
|
112
113
|
(0, logger_1.log)(this.wtag, `Batch timed out after ${this.batchTimeoutMs}ms, aborting`);
|
|
113
114
|
batchAc.abort();
|
|
114
115
|
}, this.batchTimeoutMs);
|
|
115
|
-
const batch = new Map(
|
|
116
|
-
|
|
116
|
+
const batch = new Map();
|
|
117
|
+
let taken = 0;
|
|
118
|
+
for (const [absPath, event] of this.pending) {
|
|
119
|
+
batch.set(absPath, event);
|
|
120
|
+
taken++;
|
|
121
|
+
if (taken >= MAX_BATCH_SIZE)
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
124
|
+
for (const key of batch.keys()) {
|
|
125
|
+
this.pending.delete(key);
|
|
126
|
+
}
|
|
117
127
|
const filenames = [...batch.keys()].map((p) => path.basename(p));
|
|
118
128
|
(0, logger_1.log)(this.wtag, `Processing ${batch.size} changed files: ${filenames.join(", ")}`);
|
|
119
129
|
const start = Date.now();
|
|
120
130
|
let reindexed = 0;
|
|
131
|
+
let processed = 0;
|
|
121
132
|
try {
|
|
122
133
|
// No lock needed — daemon is the single writer to LanceDB/MetaCache
|
|
123
134
|
const pool = (0, pool_1.getWorkerPool)();
|
|
@@ -130,6 +141,10 @@ class ProjectBatchProcessor {
|
|
|
130
141
|
if (batchAc.signal.aborted)
|
|
131
142
|
break;
|
|
132
143
|
attempted.add(absPath);
|
|
144
|
+
processed++;
|
|
145
|
+
if (batch.size > 10 && (processed % 10 === 0 || processed === batch.size)) {
|
|
146
|
+
(0, logger_1.log)(this.wtag, `Progress: ${processed}/${batch.size} (${reindexed} reindexed)`);
|
|
147
|
+
}
|
|
133
148
|
if (event === "unlink") {
|
|
134
149
|
deletes.push(absPath);
|
|
135
150
|
metaDeletes.push(absPath);
|
|
@@ -219,7 +234,8 @@ class ProjectBatchProcessor {
|
|
|
219
234
|
if (reindexed > 0) {
|
|
220
235
|
(_a = this.onReindex) === null || _a === void 0 ? void 0 : _a.call(this, reindexed, duration);
|
|
221
236
|
}
|
|
222
|
-
|
|
237
|
+
const remaining = this.pending.size;
|
|
238
|
+
(0, logger_1.log)(this.wtag, `Batch complete: ${batch.size} files, ${reindexed} reindexed (${(duration / 1000).toFixed(1)}s)${remaining > 0 ? ` — ${remaining} remaining` : ""}`);
|
|
223
239
|
for (const absPath of batch.keys()) {
|
|
224
240
|
this.retryCount.delete(absPath);
|
|
225
241
|
}
|
package/dist/lib/index/syncer.js
CHANGED
|
@@ -234,16 +234,25 @@ function initialSync(options) {
|
|
|
234
234
|
// Scope checks to this project's paths only
|
|
235
235
|
const projectKeys = yield mc.getKeysWithPrefix(rootPrefix);
|
|
236
236
|
(0, logger_1.log)("index", `Cached files: ${projectKeys.size}`);
|
|
237
|
-
// Coherence check: if LMDB has entries
|
|
238
|
-
//
|
|
239
|
-
//
|
|
240
|
-
|
|
237
|
+
// Coherence check: if LMDB has substantially more entries than LanceDB
|
|
238
|
+
// has distinct files, the vector store is out of sync (e.g. batch
|
|
239
|
+
// timeouts wrote MetaCache but not vectors, compaction failure, etc.).
|
|
240
|
+
// Clear the stale cache entries so those files get re-embedded.
|
|
241
|
+
const vectorFileCount = yield vectorDb.countDistinctFilesForPath(rootPrefix);
|
|
242
|
+
if (projectKeys.size > 0 && vectorFileCount === 0) {
|
|
241
243
|
(0, logger_1.log)("index", `Stale cache detected: ${projectKeys.size} cached files but no vectors — clearing cache`);
|
|
242
244
|
for (const key of projectKeys) {
|
|
243
245
|
mc.delete(key);
|
|
244
246
|
}
|
|
245
247
|
projectKeys.clear();
|
|
246
248
|
}
|
|
249
|
+
else if (projectKeys.size > 0 && vectorFileCount < projectKeys.size * 0.8) {
|
|
250
|
+
(0, logger_1.log)("index", `Partial cache detected: ${vectorFileCount} files in vectors vs ${projectKeys.size} in cache — clearing cache to re-embed missing files`);
|
|
251
|
+
for (const key of projectKeys) {
|
|
252
|
+
mc.delete(key);
|
|
253
|
+
}
|
|
254
|
+
projectKeys.clear();
|
|
255
|
+
}
|
|
247
256
|
const modelChanged = (0, index_config_1.checkModelMismatch)(paths.configPath);
|
|
248
257
|
if (reset || modelChanged) {
|
|
249
258
|
if (modelChanged) {
|
|
@@ -442,6 +442,22 @@ class VectorDB {
|
|
|
442
442
|
return rows.length > 0;
|
|
443
443
|
});
|
|
444
444
|
}
|
|
445
|
+
countDistinctFilesForPath(pathPrefix) {
|
|
446
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
447
|
+
const table = yield this.ensureTable();
|
|
448
|
+
const prefix = pathPrefix.endsWith("/") ? pathPrefix : `${pathPrefix}/`;
|
|
449
|
+
const rows = yield table
|
|
450
|
+
.query()
|
|
451
|
+
.select(["path"])
|
|
452
|
+
.where(`path LIKE '${(0, filter_builder_1.escapeSqlString)(prefix)}%'`)
|
|
453
|
+
.toArray();
|
|
454
|
+
const unique = new Set();
|
|
455
|
+
for (const r of rows) {
|
|
456
|
+
unique.add(String(r.path));
|
|
457
|
+
}
|
|
458
|
+
return unique.size;
|
|
459
|
+
});
|
|
460
|
+
}
|
|
445
461
|
getStats() {
|
|
446
462
|
return __awaiter(this, void 0, void 0, function* () {
|
|
447
463
|
const table = yield this.ensureTable();
|
package/package.json
CHANGED
|
@@ -126,7 +126,7 @@ async function main() {
|
|
|
126
126
|
hookSpecificOutput: {
|
|
127
127
|
hookEventName: "SessionStart",
|
|
128
128
|
additionalContext:
|
|
129
|
-
'gmax ready. Use Bash(gmax "query" --agent) for search (one line per result, 89% fewer tokens). Bash(gmax extract <symbol>) for full function body. Bash(gmax peek <symbol>) for quick overview (sig+callers+callees). Bash(gmax trace <symbol>) for call graphs. Bash(gmax skeleton <path>) for structure. Bash(gmax diff [ref]) for git changes. Bash(gmax test <symbol>) for test coverage. Bash(gmax impact <symbol>) for blast radius. Bash(gmax similar <symbol>) for similar code. Bash(gmax context "topic" --budget 4000) for topic summary. Bash(gmax status) to check indexed projects. --agent flag works on search, trace, symbols, related, recent, status, project, extract, peek, diff, test, impact, similar. If search says "not added yet", run Bash(gmax add).',
|
|
129
|
+
'gmax ready. Use Bash(gmax "query" --agent) for search (one line per result, 89% fewer tokens). Bash(gmax extract <symbol>) for full function body. Bash(gmax peek <symbol>) for quick overview (sig+callers+callees). Bash(gmax trace <symbol>) for call graphs. Bash(gmax skeleton <path>) for structure. Bash(gmax diff [ref]) for git changes. Bash(gmax test <symbol>) for test coverage. Bash(gmax impact <symbol>) for blast radius. Bash(gmax similar <symbol>) for similar code. Bash(gmax context "topic" --budget 4000) for topic summary. Bash(gmax status) to check indexed projects. --agent flag works on search, trace, symbols, related, recent, status, project, extract, peek, diff, test, impact, similar. If search says "not added yet", run Bash(gmax add). If results look stale, run Bash(gmax index) to repair.',
|
|
130
130
|
},
|
|
131
131
|
};
|
|
132
132
|
process.stdout.write(JSON.stringify(response));
|
|
@@ -52,7 +52,7 @@ async function main() {
|
|
|
52
52
|
hookSpecificOutput: {
|
|
53
53
|
hookEventName: "SubagentStart",
|
|
54
54
|
additionalContext:
|
|
55
|
-
'gmax semantic search is available. Use Bash(gmax "query" --agent) for concept search, Bash(gmax peek <symbol>) for overview, Bash(gmax extract <symbol>) for full body, Bash(gmax trace <symbol>) for call graph.',
|
|
55
|
+
'gmax semantic search is available. Use Bash(gmax "query" --agent) for concept search, Bash(gmax peek <symbol>) for overview, Bash(gmax extract <symbol>) for full body, Bash(gmax trace <symbol>) for call graph. If results look stale, run Bash(gmax index) to repair.',
|
|
56
56
|
},
|
|
57
57
|
};
|
|
58
58
|
process.stdout.write(JSON.stringify(response));
|
|
@@ -229,4 +229,4 @@ gmax llm on/off/start/stop/status # manage local LLM server
|
|
|
229
229
|
|
|
230
230
|
1. Check if the project is added: `Bash(gmax status)`
|
|
231
231
|
2. If not added: `Bash(gmax add)`
|
|
232
|
-
3. If stale: `Bash(gmax index)` to
|
|
232
|
+
3. If stale: `Bash(gmax index)` to re-index (auto-detects and repairs cache/vector mismatches)
|