nano-brain 2026.8.2 → 2026.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -4
- package/package.json +1 -1
- package/src/bench/runner.ts +19 -2
- package/src/cli/utils.ts +2 -0
- package/src/jobs/watcher.ts +1 -1
package/README.md
CHANGED
|
@@ -737,13 +737,25 @@ nano-brain cache stats # Show cache statistics
|
|
|
737
737
|
|
|
738
738
|
### Benchmarking
|
|
739
739
|
|
|
740
|
+
Measures search quality (P@5, R@10, MRR) and latency across FTS, vector, and hybrid modes.
|
|
741
|
+
|
|
740
742
|
```bash
|
|
741
|
-
nano-brain bench
|
|
742
|
-
nano-brain bench --
|
|
743
|
-
nano-brain bench
|
|
744
|
-
|
|
743
|
+
nano-brain bench run # Run benchmark suite (scale-100)
|
|
744
|
+
nano-brain bench run --scale 500 # Larger corpus
|
|
745
|
+
nano-brain bench compare new.json baseline.json # Regression check
|
|
746
|
+
```
|
|
747
|
+
|
|
748
|
+
Current results on v2026.8.2 (100 docs, Ollama local):
|
|
749
|
+
|
|
750
|
+
```
|
|
751
|
+
Mode P@5 R@10 MRR Latency (p50)
|
|
752
|
+
FTS 0.975 0.985 1.000 1ms
|
|
753
|
+
Vector 0.875 0.925 1.000 29ms
|
|
754
|
+
Hybrid 0.835 0.970 1.000 34ms
|
|
745
755
|
```
|
|
746
756
|
|
|
757
|
+
See [`benchmarks/README.md`](benchmarks/README.md) for full explanation of metrics, use cases, and regression detection.
|
|
758
|
+
|
|
747
759
|
### Logging
|
|
748
760
|
|
|
749
761
|
```bash
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nano-brain",
|
|
3
|
-
"version": "2026.8.
|
|
3
|
+
"version": "2026.8.3",
|
|
4
4
|
"description": "Persistent memory and code intelligence for AI coding agents. Local MCP server with self-learning hybrid search (BM25 + vector + knowledge graph + LLM reranking), automatic session ingestion, codebase indexing, and 22 tools. Learns your preferences over time. Works with OpenCode, Claude, Cursor, Windsurf, and any MCP client.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
package/src/bench/runner.ts
CHANGED
|
@@ -212,9 +212,21 @@ async function measureQuality(
|
|
|
212
212
|
|
|
213
213
|
async function insertDocs(
|
|
214
214
|
dbPath: string,
|
|
215
|
-
fixturesDir: string
|
|
215
|
+
fixturesDir: string,
|
|
216
|
+
ollamaUrl: string | null
|
|
216
217
|
): Promise<LatencyStats> {
|
|
217
218
|
const store = createStore(dbPath);
|
|
219
|
+
store.ensureVecTable(768);
|
|
220
|
+
|
|
221
|
+
let embedder: { embed(text: string): Promise<{ embedding: number[] }>; dispose(): void } | null = null;
|
|
222
|
+
if (ollamaUrl) {
|
|
223
|
+
try {
|
|
224
|
+
embedder = await createEmbeddingProvider({ embeddingConfig: { url: ollamaUrl } });
|
|
225
|
+
} catch {
|
|
226
|
+
embedder = null;
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
218
230
|
const docsDir = path.join(fixturesDir, 'docs');
|
|
219
231
|
const docFiles = fs.readdirSync(docsDir).filter(f => f.endsWith('.md'));
|
|
220
232
|
const insertTimes: number[] = [];
|
|
@@ -240,9 +252,14 @@ async function insertDocs(
|
|
|
240
252
|
active: true,
|
|
241
253
|
projectHash,
|
|
242
254
|
});
|
|
255
|
+
if (embedder) {
|
|
256
|
+
const { embedding } = await embedder.embed(content);
|
|
257
|
+
store.insertEmbedding(hash, 0, 0, embedding, 'nomic-embed-text');
|
|
258
|
+
}
|
|
243
259
|
insertTimes.push(Date.now() - t0);
|
|
244
260
|
}
|
|
245
261
|
} finally {
|
|
262
|
+
embedder?.dispose();
|
|
246
263
|
store.close();
|
|
247
264
|
}
|
|
248
265
|
|
|
@@ -427,7 +444,7 @@ export async function runBenchmarkSuite(opts: RunOptions): Promise<BenchResult>
|
|
|
427
444
|
}
|
|
428
445
|
|
|
429
446
|
console.log(' Inserting docs...');
|
|
430
|
-
const insertLatency = await insertDocs(testDbPath, fixturesDir);
|
|
447
|
+
const insertLatency = await insertDocs(testDbPath, fixturesDir, ollamaUrl);
|
|
431
448
|
|
|
432
449
|
console.log(' Running quality metrics...');
|
|
433
450
|
const { quality, latency: queryLatency } = await measureQuality(testDbPath, groundTruth, ollamaUrl);
|
package/src/cli/utils.ts
CHANGED
|
@@ -54,6 +54,8 @@ export function getHttpPort(): number {
|
|
|
54
54
|
}
|
|
55
55
|
|
|
56
56
|
export function resolveOpenCodeStorageDir(): string {
|
|
57
|
+
// Explicit override (useful in Docker where homedir != host homedir)
|
|
58
|
+
if (process.env.OPENCODE_STORAGE_DIR) return process.env.OPENCODE_STORAGE_DIR;
|
|
57
59
|
// XDG path (Linux): ~/.local/share/opencode/storage
|
|
58
60
|
const xdgData = process.env.XDG_DATA_HOME || path.join(os.homedir(), '.local', 'share');
|
|
59
61
|
const xdgPath = path.join(xdgData, 'opencode', 'storage');
|
package/src/jobs/watcher.ts
CHANGED
|
@@ -129,7 +129,7 @@ export function startWatcher(options: WatcherOptions): Watcher {
|
|
|
129
129
|
pollIntervalMs = 300000,
|
|
130
130
|
sessionPollMs = 120000,
|
|
131
131
|
embedIntervalMs = 60000,
|
|
132
|
-
sessionStorageDir = path.join(os.homedir(), '.local/share/opencode/storage'),
|
|
132
|
+
sessionStorageDir = process.env.OPENCODE_STORAGE_DIR ?? path.join(os.homedir(), '.local/share/opencode/storage'),
|
|
133
133
|
outputDir = path.join(os.homedir(), '.nano-brain/sessions'),
|
|
134
134
|
storageConfig,
|
|
135
135
|
dbPath,
|