codebasesearch 0.1.8 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/mcp.js CHANGED
@@ -22,7 +22,8 @@ import { Server } from '@modelcontextprotocol/sdk/server/index.js';
22
22
  import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
23
23
  import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js';
24
24
  import { cwd } from 'process';
25
- import { join, existsSync, readFileSync, appendFileSync, writeFileSync } from 'fs';
25
+ import { join } from 'path';
26
+ import { existsSync, readFileSync, appendFileSync, writeFileSync } from 'fs';
26
27
  import { supervisor } from './src/supervisor.js';
27
28
 
28
29
  async function ensureIgnoreEntry(rootPath) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codebasesearch",
3
- "version": "0.1.8",
3
+ "version": "0.1.10",
4
4
  "description": "Ultra-simple code search tool with Jina embeddings, LanceDB, and MCP protocol support",
5
5
  "type": "module",
6
6
  "bin": {
@@ -20,8 +20,7 @@
20
20
  "node": ">=18.0.0"
21
21
  },
22
22
  "scripts": {
23
- "start": "node ./bin/code-search.js",
24
- "postinstall": "node scripts/patch-transformers.js"
23
+ "start": "node ./bin/code-search.js"
25
24
  },
26
25
  "dependencies": {
27
26
  "@huggingface/transformers": "^3.8.1",
@@ -3,52 +3,57 @@ import { resolve } from 'path';
3
3
  import { existsSync } from 'fs';
4
4
  import { loadIgnorePatterns } from './ignore-parser.js';
5
5
  import { scanRepository } from './scanner.js';
6
- import { generateEmbeddings } from './embeddings.js';
7
- import { initStore, upsertChunks, closeStore } from './store.js';
8
- import { executeSearch } from './search.js';
6
+ import { buildTextIndex, searchText } from './text-search.js';
9
7
 
10
- async function performSearch(repositoryPath, query) {
8
+ let indexCache = new Map();
9
+
10
+ async function initializeIndex(repositoryPath) {
11
11
  const absolutePath = resolve(repositoryPath);
12
+ const cacheKey = absolutePath;
12
13
 
13
- if (!existsSync(absolutePath)) {
14
- return { error: 'Repository path not found', results: [] };
14
+ if (indexCache.has(cacheKey)) {
15
+ return indexCache.get(cacheKey);
15
16
  }
16
17
 
17
18
  try {
18
19
  const ignorePatterns = loadIgnorePatterns(absolutePath);
19
- const dbPath = resolve(absolutePath, '.code-search');
20
-
21
- await initStore(dbPath);
22
-
23
20
  const chunks = scanRepository(absolutePath, ignorePatterns);
21
+
24
22
  if (chunks.length === 0) {
25
- await closeStore();
26
- return { query, results: [], message: 'No code chunks found' };
23
+ return { error: 'No code chunks found', chunks: [], index: null };
27
24
  }
28
25
 
29
- const batchSize = 32;
30
- const allEmbeddings = [];
26
+ const index = buildTextIndex(chunks);
27
+ const indexData = { chunks, index };
28
+ indexCache.set(cacheKey, indexData);
31
29
 
32
- for (let i = 0; i < chunks.length; i += batchSize) {
33
- const batchTexts = chunks.slice(i, i + batchSize).map(c => c.content);
34
- const batchEmbeddings = await generateEmbeddings(batchTexts);
35
- allEmbeddings.push(...batchEmbeddings);
36
- }
30
+ return indexData;
31
+ } catch (error) {
32
+ return { error: error.message, chunks: [], index: null };
33
+ }
34
+ }
37
35
 
38
- const chunksWithEmbeddings = chunks.map((chunk, idx) => ({
39
- ...chunk,
40
- vector: allEmbeddings[idx],
41
- }));
36
+ async function performSearch(repositoryPath, query) {
37
+ const absolutePath = resolve(repositoryPath);
38
+
39
+ if (!existsSync(absolutePath)) {
40
+ return { error: 'Repository path not found', results: [] };
41
+ }
42
+
43
+ try {
44
+ const indexData = await initializeIndex(absolutePath);
45
+
46
+ if (indexData.error) {
47
+ return { error: indexData.error, results: [] };
48
+ }
42
49
 
43
- await upsertChunks(chunksWithEmbeddings);
44
- const results = await executeSearch(query);
45
- await closeStore();
50
+ const results = searchText(query, indexData.chunks, indexData.index);
46
51
 
47
52
  return {
48
53
  query,
49
54
  repository: absolutePath,
50
55
  resultsCount: results.length,
51
- results: results.map((result, idx) => ({
56
+ results: results.slice(0, 10).map((result, idx) => ({
52
57
  rank: idx + 1,
53
58
  file: result.file_path,
54
59
  lines: `${result.line_start}-${result.line_end}`,
@@ -57,7 +62,6 @@ async function performSearch(repositoryPath, query) {
57
62
  })),
58
63
  };
59
64
  } catch (error) {
60
- await closeStore().catch(() => {});
61
65
  return { error: error.message, results: [] };
62
66
  }
63
67
  }
@@ -0,0 +1,48 @@
1
+ export function buildTextIndex(chunks) {
2
+ const index = new Map();
3
+
4
+ chunks.forEach((chunk, idx) => {
5
+ const tokens = tokenize(chunk.content);
6
+ tokens.forEach(token => {
7
+ if (!index.has(token)) {
8
+ index.set(token, []);
9
+ }
10
+ index.get(token).push(idx);
11
+ });
12
+ });
13
+
14
+ return index;
15
+ }
16
+
17
+ export function searchText(query, chunks, index) {
18
+ const queryTokens = tokenize(query);
19
+ const chunkScores = new Map();
20
+
21
+ queryTokens.forEach(token => {
22
+ if (index.has(token)) {
23
+ index.get(token).forEach(chunkIdx => {
24
+ if (!chunkScores.has(chunkIdx)) {
25
+ chunkScores.set(chunkIdx, 0);
26
+ }
27
+ chunkScores.set(chunkIdx, chunkScores.get(chunkIdx) + 1);
28
+ });
29
+ }
30
+ });
31
+
32
+ const results = Array.from(chunkScores.entries())
33
+ .map(([idx, score]) => ({
34
+ ...chunks[idx],
35
+ score: score / queryTokens.length,
36
+ matchCount: score,
37
+ }))
38
+ .filter(r => r.score > 0)
39
+ .sort((a, b) => b.score - a.score);
40
+
41
+ return results;
42
+ }
43
+
44
+ function tokenize(text) {
45
+ return text
46
+ .toLowerCase()
47
+ .match(/\b\w+\b/g) || [];
48
+ }