@tai-io/codesearch 2026.313.1655 → 2026.313.2014

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,3 @@
1
- export declare const BUILD_VERSION = "2026.313.1655";
2
- export declare const BUILD_TIMESTAMP = "2026-03-13T16:55:38.141Z";
1
+ export declare const BUILD_VERSION = "2026.313.2014";
2
+ export declare const BUILD_TIMESTAMP = "2026-03-13T20:14:47.248Z";
3
3
  //# sourceMappingURL=build-info.d.ts.map
@@ -1,4 +1,4 @@
1
1
  // Auto-generated by scripts/generate-build-info.ts — do not edit
2
- export const BUILD_VERSION = '2026.313.1655';
3
- export const BUILD_TIMESTAMP = '2026-03-13T16:55:38.141Z';
2
+ export const BUILD_VERSION = '2026.313.2014';
3
+ export const BUILD_TIMESTAMP = '2026-03-13T20:14:47.248Z';
4
4
  //# sourceMappingURL=build-info.js.map
@@ -12,7 +12,7 @@ export async function searchCode(rootPath, query, embedding, vectordb, options =
12
12
  }
13
13
  const limit = Math.min(Math.max(1, options.limit ?? DEFAULT_LIMIT), MAX_LIMIT);
14
14
  const queryVector = await embedding.embed(query);
15
- const overFetchLimit = Math.min(limit * 5, MAX_LIMIT);
15
+ const overFetchLimit = Math.min(limit * 3, MAX_LIMIT);
16
16
  const results = await vectordb.search(collectionName, {
17
17
  queryVector,
18
18
  queryText: query,
@@ -64,13 +64,12 @@ export function formatCompactResults(results, query, rootPath) {
64
64
  }
65
65
  const lines = [
66
66
  `Found ${results.length} result(s) for "${query}" in ${rootPath}:\n`,
67
- '| # | File | Lines | Score | ~Tokens |',
68
- '|---|------|-------|-------|---------|',
67
+ '| # | File | Lines | Score |',
68
+ '|---|------|-------|-------|',
69
69
  ];
70
70
  for (let i = 0; i < results.length; i++) {
71
71
  const r = results[i];
72
- const tokens = Math.ceil(r.content.length / 4);
73
- lines.push(`| ${i + 1} | \`${r.relativePath}\` | ${r.startLine}-${r.endLine} | ${r.score.toFixed(2)} | ~${tokens} |`);
72
+ lines.push(`| ${i + 1} | \`${r.relativePath}\` | ${r.startLine}-${r.endLine} | ${r.score.toFixed(2)} |`);
74
73
  }
75
74
  lines.push('');
76
75
  lines.push('Use the Read tool to view full code for specific results.');
@@ -81,14 +80,22 @@ export function formatSearchResults(results, query, rootPath) {
81
80
  return `No results found for "${query}" in ${rootPath}.`;
82
81
  }
83
82
  const lines = [`Found ${results.length} result(s) for "${query}" in ${rootPath}:\n`];
83
+ const MAX_CONTENT_LINES = 20;
84
84
  for (let i = 0; i < results.length; i++) {
85
85
  const r = results[i];
86
86
  lines.push(`### Result ${i + 1} of ${results.length}`);
87
- lines.push(`**File:** \`${r.relativePath}\` (lines ${r.startLine}-${r.endLine})`);
88
- lines.push(`**Language:** ${r.language} | **Score:** ${r.score.toFixed(4)}`);
87
+ lines.push(`**File:** \`${r.relativePath}\` (lines ${r.startLine}-${r.endLine}) | Score: ${r.score.toFixed(2)}`);
89
88
  const safeLang = r.language.replace(/[^a-zA-Z0-9_+-]/g, '');
89
+ const contentLines = r.content.split('\n');
90
+ const truncated = contentLines.length > MAX_CONTENT_LINES;
91
+ const displayContent = truncated
92
+ ? contentLines.slice(0, MAX_CONTENT_LINES).join('\n')
93
+ : r.content;
90
94
  lines.push('```' + safeLang);
91
- lines.push(r.content);
95
+ lines.push(displayContent);
96
+ if (truncated) {
97
+ lines.push(`// ... ${contentLines.length - MAX_CONTENT_LINES} more lines`);
98
+ }
92
99
  lines.push('```');
93
100
  lines.push('');
94
101
  }
@@ -268,7 +268,6 @@ export class SqliteVectorDB {
268
268
  async search(name, params) {
269
269
  const safe = sanitizeName(name);
270
270
  try {
271
- const fetchLimit = params.limit * 2;
272
271
  // Extension filter clause
273
272
  let extClause = '';
274
273
  let extParams = [];
@@ -277,26 +276,35 @@ export class SqliteVectorDB {
277
276
  extClause = `AND c.file_extension IN (${placeholders})`;
278
277
  extParams = params.extensionFilter;
279
278
  }
280
- // Dense vector search
279
+ // Dense vector search — JOIN chunks to get metadata in one pass
281
280
  const denseRows = this.db
282
- .prepare(`SELECT v.id, v.distance
281
+ .prepare(`SELECT v.id, v.distance,
282
+ c.content, c.relative_path, c.start_line, c.end_line,
283
+ c.file_extension, c.language, c.file_category
283
284
  FROM "${safe}_vec" v
284
285
  JOIN chunks c ON c.id = v.id AND c.collection = ?
285
286
  WHERE v.vector MATCH ?
286
287
  AND k = ?
287
288
  ${extClause}
288
289
  ORDER BY v.distance ASC`)
289
- .all(safe, vecToBuffer(params.queryVector), fetchLimit, ...extParams);
290
- // Convert distance to similarity score (cosine distance → similarity)
290
+ .all(safe, vecToBuffer(params.queryVector), params.limit, ...extParams);
291
+ // Convert distance to similarity score and attach payload
291
292
  const denseResults = denseRows.map((row) => ({
292
293
  id: row.id,
293
294
  score: 1 - row.distance,
294
- payload: null,
295
+ payload: {
296
+ content: row.content,
297
+ relativePath: row.relative_path,
298
+ startLine: row.start_line,
299
+ endLine: row.end_line,
300
+ fileExtension: row.file_extension,
301
+ language: row.language,
302
+ fileCategory: row.file_category ?? '',
303
+ },
295
304
  }));
296
- // FTS search
305
+ // FTS search — JOIN chunks to get metadata in one pass
297
306
  let textResults = [];
298
307
  if (params.queryText) {
299
- // Tokenize query for FTS5 — escape special chars, join with OR
300
308
  const ftsQuery = params.queryText
301
309
  .split(/\s+/)
302
310
  .filter((t) => t.length > 0)
@@ -304,89 +312,34 @@ export class SqliteVectorDB {
304
312
  .join(' OR ');
305
313
  if (ftsQuery) {
306
314
  const ftsRows = this.db
307
- .prepare(`SELECT c.id, c.content
315
+ .prepare(`SELECT c.id, c.content, c.relative_path, c.start_line, c.end_line,
316
+ c.file_extension, c.language, c.file_category
308
317
  FROM "${safe}_fts" f
309
318
  JOIN chunks c ON c.rowid = f.rowid AND c.collection = ?
310
319
  WHERE "${safe}_fts" MATCH ?
311
320
  ${extClause}
312
321
  LIMIT ?`)
313
- .all(safe, ftsQuery, ...extParams, fetchLimit);
322
+ .all(safe, ftsQuery, ...extParams, params.limit);
314
323
  const pointsForRank = ftsRows.map((r) => ({
315
324
  id: r.id,
316
- payload: { content: r.content },
325
+ payload: {
326
+ content: r.content,
327
+ relativePath: r.relative_path,
328
+ startLine: r.start_line,
329
+ endLine: r.end_line,
330
+ fileExtension: r.file_extension,
331
+ language: r.language,
332
+ fileCategory: r.file_category ?? '',
333
+ },
317
334
  }));
318
335
  textResults = rankByTermFrequency(pointsForRank, params.queryText);
319
336
  }
320
337
  }
321
- // If we have both dense and text results, fuse them
322
338
  if (denseResults.length === 0 && textResults.length === 0) {
323
339
  return [];
324
340
  }
325
- const fused = reciprocalRankFusion(denseResults, textResults, params.limit * 2);
326
- // Fetch full metadata for the fused result IDs
327
- const resultIds = fused.map((r) => r.relativePath || '').length
328
- ? fused
329
- : [];
330
- if (fused.length === 0)
331
- return [];
332
- // Get IDs from fused results — they're in the SearchResult format from RRF
333
- // but we need to re-fetch from chunks for full metadata
334
- // RRF returns SearchResult[] which has relativePath etc from payload extraction
335
- // We need the chunk IDs — let's collect from both dense and text results
336
- const allIds = new Set();
337
- for (const d of denseResults)
338
- allIds.add(String(d.id));
339
- for (const t of textResults)
340
- allIds.add(String(t.id));
341
- if (allIds.size === 0)
342
- return [];
343
- const idList = [...allIds];
344
- const placeholders = idList.map(() => '?').join(', ');
345
- const metadataRows = this.db
346
- .prepare(`SELECT id, content, relative_path, start_line, end_line,
347
- file_extension, language, file_category, indexed_at
348
- FROM chunks
349
- WHERE id IN (${placeholders}) AND collection = ?`)
350
- .all(...idList, safe);
351
- const metaMap = new Map(metadataRows.map((r) => [r.id, r]));
352
- // Now rebuild dense and text results with full payloads for RRF
353
- const denseWithPayload = denseResults
354
- .filter((d) => metaMap.has(String(d.id)))
355
- .map((d) => {
356
- const m = metaMap.get(String(d.id));
357
- return {
358
- id: d.id,
359
- score: d.score,
360
- payload: {
361
- content: m.content,
362
- relativePath: m.relative_path,
363
- startLine: m.start_line,
364
- endLine: m.end_line,
365
- fileExtension: m.file_extension,
366
- language: m.language,
367
- fileCategory: m.file_category ?? '',
368
- },
369
- };
370
- });
371
- const textWithPayload = textResults
372
- .filter((t) => metaMap.has(String(t.id)))
373
- .map((t) => {
374
- const m = metaMap.get(String(t.id));
375
- return {
376
- id: t.id,
377
- rawScore: t.rawScore,
378
- payload: {
379
- content: m.content,
380
- relativePath: m.relative_path,
381
- startLine: m.start_line,
382
- endLine: m.end_line,
383
- fileExtension: m.file_extension,
384
- language: m.language,
385
- fileCategory: m.file_category ?? '',
386
- },
387
- };
388
- });
389
- return reciprocalRankFusion(denseWithPayload, textWithPayload, params.limit);
341
+ // Single RRF pass metadata already attached
342
+ return reciprocalRankFusion(denseResults, textResults, params.limit);
390
343
  }
391
344
  catch (err) {
392
345
  throw new VectorDBError(`Search failed in collection "${name}"`, err);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tai-io/codesearch",
3
- "version": "2026.313.1655",
3
+ "version": "2026.313.2014",
4
4
  "description": "Semantic code search MCP server for Claude Code",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",