wicked-brain 0.8.1 → 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "wicked-brain",
3
- "version": "0.8.1",
3
+ "version": "0.8.2",
4
4
  "type": "module",
5
5
  "description": "Digital brain as skills for AI coding CLIs — no vector DB, no embeddings, no infrastructure",
6
6
  "keywords": [
@@ -24,6 +24,44 @@ function escapeFtsQuery(query) {
24
24
  /** Weight factor for backlink count in search ranking (PageRank-lite). */
25
25
  const BACKLINK_WEIGHT = 0.5;
26
26
 
27
+ /**
28
+ * Additive boost applied to FTS5 BM25 score when a query term appears as a
29
+ * substring of the document's path. BM25 scores in SQLite FTS5 are negative
30
+ * (more negative = more relevant), so we SUBTRACT this value to push path
31
+ * matches ahead. Addresses the case where a query term matches a module/file
32
+ * name but the chunk body has only sparse mentions: a dense body chunk in an
33
+ * unrelated file can have a very negative BM25, so a multiplicative boost on
34
+ * the sparse-but-path-matching chunk's weaker score is insufficient. A flat
35
+ * additive bonus larger than the typical BM25 magnitude reliably promotes it.
36
+ */
37
+ const PATH_MATCH_BOOST = 20;
38
+
39
+ /**
40
+ * Overfetch multiplier for path-name boost re-ranking. We pull this many times
41
+ * the requested limit from FTS so that boosted rows below the BM25 cutoff can
42
+ * still be promoted into the top N.
43
+ */
44
+ const PATH_BOOST_OVERFETCH = 5;
45
+
46
+ /** Tokenize a free-text query the same way we want to match against paths:
47
+ * lowercase, split on non-word (underscores preserved). */
48
+ function tokenizeQueryForPath(query) {
49
+ return query
50
+ .toLowerCase()
51
+ .split(/[^\w]+/)
52
+ .filter(Boolean);
53
+ }
54
+
55
+ /** Returns true if any query term appears as a substring of the lowercased path. */
56
+ function pathMatchesQuery(path, terms) {
57
+ if (!path || terms.length === 0) return false;
58
+ const lowered = path.toLowerCase();
59
+ for (const term of terms) {
60
+ if (lowered.includes(term)) return true;
61
+ }
62
+ return false;
63
+ }
64
+
27
65
  /** Weight factor for average backlink confidence in search ranking. */
28
66
  const CONFIDENCE_WEIGHT = 0.3;
29
67
 
@@ -267,7 +305,11 @@ export class SqliteSearch {
267
305
  const sinceClause = since ? `AND d.indexed_at >= ?` : "";
268
306
  const sinceParams = since ? [new Date(since).getTime()] : [];
269
307
 
270
- const rows = this.#db
308
+ // Overfetch so the path-name boost can promote rows that sit below the
309
+ // raw BM25 cutoff. We re-rank in JS, then slice to the requested limit.
310
+ const fetchLimit = (limit + offset) * PATH_BOOST_OVERFETCH;
311
+
312
+ const rawRows = this.#db
271
313
  .prepare(`
272
314
  SELECT
273
315
  d.id,
@@ -277,7 +319,8 @@ export class SqliteSearch {
277
319
  SUBSTR(d.content, 1, 1000) AS raw_content,
278
320
  COALESCE(link_count.cnt, 0) AS backlink_count,
279
321
  COALESCE(ac.cnt, 0) AS access_count,
280
- COALESCE(link_conf.avg_conf, 0.5) AS avg_backlink_confidence
322
+ COALESCE(link_conf.avg_conf, 0.5) AS avg_backlink_confidence,
323
+ (f.rank - (COALESCE(link_count.cnt, 0) * ${BACKLINK_WEIGHT}) - (COALESCE(ac.cnt, 0) * ${SEARCH_ACCESS_WEIGHT}) - (COALESCE(link_conf.avg_conf, 0.5) * ${CONFIDENCE_WEIGHT})) AS composite_score
281
324
  FROM documents_fts f
282
325
  JOIN documents d ON d.id = f.id
283
326
  LEFT JOIN (
@@ -297,15 +340,28 @@ export class SqliteSearch {
297
340
  ) ac ON d.id = ac.doc_id
298
341
  WHERE documents_fts MATCH ?
299
342
  ${sinceClause}
300
- ORDER BY (f.rank - (COALESCE(link_count.cnt, 0) * ${BACKLINK_WEIGHT}) - (COALESCE(ac.cnt, 0) * ${SEARCH_ACCESS_WEIGHT}) - (COALESCE(link_conf.avg_conf, 0.5) * ${CONFIDENCE_WEIGHT}))
301
- LIMIT ? OFFSET ?
343
+ ORDER BY composite_score
344
+ LIMIT ?
302
345
  `)
303
- .all(escaped, ...sinceParams, limit, offset)
304
- .map((row) => {
305
- const body_excerpt = extractBodyExcerpt(row.raw_content ?? "");
306
- delete row.raw_content;
307
- return { ...row, body_excerpt };
308
- });
346
+ .all(escaped, ...sinceParams, fetchLimit);
347
+
348
+ // Path-name boost: if any query term appears in the path, multiply the
349
+ // (negative) composite score by PATH_MATCH_BOOST so it sorts higher.
350
+ const queryTerms = tokenizeQueryForPath(query);
351
+ for (const row of rawRows) {
352
+ row.boosted_score = pathMatchesQuery(row.path, queryTerms)
353
+ ? row.composite_score - PATH_MATCH_BOOST
354
+ : row.composite_score;
355
+ }
356
+ rawRows.sort((a, b) => a.boosted_score - b.boosted_score);
357
+
358
+ const rows = rawRows.slice(offset, offset + limit).map((row) => {
359
+ const body_excerpt = extractBodyExcerpt(row.raw_content ?? "");
360
+ delete row.raw_content;
361
+ delete row.composite_score;
362
+ delete row.boosted_score;
363
+ return { ...row, body_excerpt };
364
+ });
309
365
 
310
366
  const countRow = this.#db
311
367
  .prepare(
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "wicked-brain-server",
3
- "version": "0.8.1",
3
+ "version": "0.8.2",
4
4
  "type": "module",
5
5
  "description": "SQLite FTS5 search server for wicked-brain digital knowledge bases",
6
6
  "keywords": [
@@ -323,12 +323,14 @@ async function ingestFile(filePath) {
323
323
  // Note: These keywords are for FTS indexing. The LLM-based ingest
324
324
  // generates richer synonym-expanded tags in the contains: field.
325
325
  // This batch script extracts basic keywords only.
326
+ // Replace non-word chars with space (not empty) so adjacent tokens don't glue.
327
+ // Preserve underscores so snake_case identifiers survive. Floor at 4 chars so
328
+ // short domain terms like 'task', 'hook', 'crew' aren't dropped.
329
+ const cleaned = chunks[i].toLowerCase().replace(/[^a-z0-9_\s-]/g, " ");
330
+ const tokens = cleaned.split(/\s+/).filter(Boolean);
326
331
  const keywords = [...new Set(
327
- chunks[i].toLowerCase()
328
- .replace(/[^a-z0-9\s-]/g, "")
329
- .split(/\s+/)
330
- .filter(w => w.length > 5 && !STOP.has(w))
331
- )].slice(0, 10);
332
+ tokens.filter(w => w.length >= 4 && !STOP.has(w))
333
+ )].slice(0, 12);
332
334
 
333
335
  const frontmatter = [
334
336
  "---",