codebasesearch 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/search-worker.js +32 -28
- package/src/text-search.js +129 -0
package/package.json
CHANGED
package/src/search-worker.js
CHANGED
|
@@ -3,52 +3,57 @@ import { resolve } from 'path';
|
|
|
3
3
|
import { existsSync } from 'fs';
|
|
4
4
|
import { loadIgnorePatterns } from './ignore-parser.js';
|
|
5
5
|
import { scanRepository } from './scanner.js';
|
|
6
|
-
import {
|
|
7
|
-
import { initStore, upsertChunks, closeStore } from './store.js';
|
|
8
|
-
import { executeSearch } from './search.js';
|
|
6
|
+
import { buildTextIndex, searchText } from './text-search.js';
|
|
9
7
|
|
|
10
|
-
|
|
8
|
+
let indexCache = new Map();
|
|
9
|
+
|
|
10
|
+
async function initializeIndex(repositoryPath) {
|
|
11
11
|
const absolutePath = resolve(repositoryPath);
|
|
12
|
+
const cacheKey = absolutePath;
|
|
12
13
|
|
|
13
|
-
if (
|
|
14
|
-
return
|
|
14
|
+
if (indexCache.has(cacheKey)) {
|
|
15
|
+
return indexCache.get(cacheKey);
|
|
15
16
|
}
|
|
16
17
|
|
|
17
18
|
try {
|
|
18
19
|
const ignorePatterns = loadIgnorePatterns(absolutePath);
|
|
19
|
-
const dbPath = resolve(absolutePath, '.code-search');
|
|
20
|
-
|
|
21
|
-
await initStore(dbPath);
|
|
22
|
-
|
|
23
20
|
const chunks = scanRepository(absolutePath, ignorePatterns);
|
|
21
|
+
|
|
24
22
|
if (chunks.length === 0) {
|
|
25
|
-
|
|
26
|
-
return { query, results: [], message: 'No code chunks found' };
|
|
23
|
+
return { error: 'No code chunks found', chunks: [], indexData: null };
|
|
27
24
|
}
|
|
28
25
|
|
|
29
|
-
const
|
|
30
|
-
const
|
|
26
|
+
const indexData = buildTextIndex(chunks);
|
|
27
|
+
const result = { chunks, indexData };
|
|
28
|
+
indexCache.set(cacheKey, result);
|
|
31
29
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
30
|
+
return result;
|
|
31
|
+
} catch (error) {
|
|
32
|
+
return { error: error.message, chunks: [], indexData: null };
|
|
33
|
+
}
|
|
34
|
+
}
|
|
37
35
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
36
|
+
async function performSearch(repositoryPath, query) {
|
|
37
|
+
const absolutePath = resolve(repositoryPath);
|
|
38
|
+
|
|
39
|
+
if (!existsSync(absolutePath)) {
|
|
40
|
+
return { error: 'Repository path not found', results: [] };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const indexData = await initializeIndex(absolutePath);
|
|
45
|
+
|
|
46
|
+
if (indexData.error) {
|
|
47
|
+
return { error: indexData.error, results: [] };
|
|
48
|
+
}
|
|
42
49
|
|
|
43
|
-
|
|
44
|
-
const results = await executeSearch(query);
|
|
45
|
-
await closeStore();
|
|
50
|
+
const results = searchText(query, indexData.chunks, indexData.indexData);
|
|
46
51
|
|
|
47
52
|
return {
|
|
48
53
|
query,
|
|
49
54
|
repository: absolutePath,
|
|
50
55
|
resultsCount: results.length,
|
|
51
|
-
results: results.map((result, idx) => ({
|
|
56
|
+
results: results.slice(0, 10).map((result, idx) => ({
|
|
52
57
|
rank: idx + 1,
|
|
53
58
|
file: result.file_path,
|
|
54
59
|
lines: `${result.line_start}-${result.line_end}`,
|
|
@@ -57,7 +62,6 @@ async function performSearch(repositoryPath, query) {
|
|
|
57
62
|
})),
|
|
58
63
|
};
|
|
59
64
|
} catch (error) {
|
|
60
|
-
await closeStore().catch(() => {});
|
|
61
65
|
return { error: error.message, results: [] };
|
|
62
66
|
}
|
|
63
67
|
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
export function buildTextIndex(chunks) {
|
|
2
|
+
const index = new Map();
|
|
3
|
+
const chunkMetadata = [];
|
|
4
|
+
|
|
5
|
+
chunks.forEach((chunk, idx) => {
|
|
6
|
+
const tokens = tokenize(chunk.content);
|
|
7
|
+
const symbols = extractSymbols(chunk.content);
|
|
8
|
+
const frequency = new Map();
|
|
9
|
+
|
|
10
|
+
tokens.forEach(token => {
|
|
11
|
+
frequency.set(token, (frequency.get(token) || 0) + 1);
|
|
12
|
+
if (!index.has(token)) {
|
|
13
|
+
index.set(token, []);
|
|
14
|
+
}
|
|
15
|
+
index.get(token).push(idx);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
chunkMetadata[idx] = {
|
|
19
|
+
tokens,
|
|
20
|
+
symbols,
|
|
21
|
+
frequency,
|
|
22
|
+
isCode: isCodeFile(chunk.file_path),
|
|
23
|
+
};
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
return { index, chunkMetadata };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export function searchText(query, chunks, indexData) {
|
|
30
|
+
const { index, chunkMetadata } = indexData;
|
|
31
|
+
const queryTokens = tokenize(query);
|
|
32
|
+
const querySymbols = extractSymbols(query);
|
|
33
|
+
const chunkScores = new Map();
|
|
34
|
+
|
|
35
|
+
chunks.forEach((chunk, idx) => {
|
|
36
|
+
let score = 0;
|
|
37
|
+
|
|
38
|
+
queryTokens.forEach(token => {
|
|
39
|
+
if (index.has(token)) {
|
|
40
|
+
if (index.get(token).includes(idx)) {
|
|
41
|
+
const freq = chunkMetadata[idx].frequency.get(token) || 1;
|
|
42
|
+
const boost = token.length > 4 ? 1.5 : 1;
|
|
43
|
+
score += boost * freq;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
querySymbols.forEach(symbol => {
|
|
49
|
+
if (chunkMetadata[idx].symbols.includes(symbol)) {
|
|
50
|
+
score += 5;
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
const exactMatch = chunk.content.includes(query);
|
|
55
|
+
if (exactMatch) {
|
|
56
|
+
score += 10;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (chunkMetadata[idx].isCode) {
|
|
60
|
+
score *= 1.2;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (score > 0) {
|
|
64
|
+
chunkScores.set(idx, score);
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
const results = Array.from(chunkScores.entries())
|
|
69
|
+
.map(([idx, score]) => ({
|
|
70
|
+
...chunks[idx],
|
|
71
|
+
score: Math.min(score / 100, 1),
|
|
72
|
+
_rawScore: score,
|
|
73
|
+
}))
|
|
74
|
+
.filter(r => r.score > 0)
|
|
75
|
+
.sort((a, b) => b._rawScore - a._rawScore);
|
|
76
|
+
|
|
77
|
+
return results;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function tokenize(text) {
|
|
81
|
+
const tokens = new Set();
|
|
82
|
+
|
|
83
|
+
text.toLowerCase().split(/\s+/).forEach(word => {
|
|
84
|
+
if (word.length === 0) return;
|
|
85
|
+
|
|
86
|
+
tokens.add(word.replace(/[^\w]/g, ''));
|
|
87
|
+
|
|
88
|
+
const camelCaseTokens = word.match(/[a-z]+|[A-Z][a-z]*|[0-9]+/g) || [];
|
|
89
|
+
camelCaseTokens.forEach(t => {
|
|
90
|
+
if (t.length > 1) tokens.add(t.toLowerCase());
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
const snakeCaseTokens = word.split(/[-_]/).filter(t => t.length > 0);
|
|
94
|
+
snakeCaseTokens.forEach(t => {
|
|
95
|
+
if (t.length > 1) tokens.add(t.toLowerCase());
|
|
96
|
+
});
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
return Array.from(tokens).filter(t => t.length > 1);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function extractSymbols(text) {
|
|
103
|
+
const symbols = new Set();
|
|
104
|
+
|
|
105
|
+
const functionMatches = text.match(/(?:async\s+)?function\s+(\w+)|const\s+(\w+)\s*=\s*(?:async\s*)?\(/g) || [];
|
|
106
|
+
functionMatches.forEach(match => {
|
|
107
|
+
const name = match.match(/\w+(?=\s*[=\(])/)?.[0];
|
|
108
|
+
if (name) symbols.add(name.toLowerCase());
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
const classMatches = text.match(/class\s+(\w+)/g) || [];
|
|
112
|
+
classMatches.forEach(match => {
|
|
113
|
+
const name = match.match(/\w+$/)?.[0];
|
|
114
|
+
if (name) symbols.add(name.toLowerCase());
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
const exportMatches = text.match(/export\s+(?:async\s+)?(?:function|class)\s+(\w+)/g) || [];
|
|
118
|
+
exportMatches.forEach(match => {
|
|
119
|
+
const name = match.match(/\w+$/)?.[0];
|
|
120
|
+
if (name) symbols.add(name.toLowerCase());
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
return Array.from(symbols);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
function isCodeFile(filePath) {
|
|
127
|
+
const codeExtensions = ['.js', '.ts', '.jsx', '.tsx', '.py', '.java', '.go', '.rs', '.rb'];
|
|
128
|
+
return codeExtensions.some(ext => filePath.endsWith(ext));
|
|
129
|
+
}
|