codecritique 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -114
- package/package.json +10 -9
- package/src/content-retrieval.test.js +775 -0
- package/src/custom-documents.test.js +440 -0
- package/src/feedback-loader.test.js +529 -0
- package/src/llm.test.js +256 -0
- package/src/project-analyzer.test.js +747 -0
- package/src/rag-analyzer.js +12 -0
- package/src/rag-analyzer.test.js +1109 -0
- package/src/rag-review.test.js +317 -0
- package/src/setupTests.js +131 -0
- package/src/zero-shot-classifier-open.test.js +278 -0
- package/src/embeddings/cache-manager.js +0 -364
- package/src/embeddings/constants.js +0 -40
- package/src/embeddings/database.js +0 -921
- package/src/embeddings/errors.js +0 -208
- package/src/embeddings/factory.js +0 -447
- package/src/embeddings/file-processor.js +0 -851
- package/src/embeddings/model-manager.js +0 -337
- package/src/embeddings/similarity-calculator.js +0 -97
- package/src/embeddings/types.js +0 -113
- package/src/pr-history/analyzer.js +0 -579
- package/src/pr-history/bot-detector.js +0 -123
- package/src/pr-history/cli-utils.js +0 -204
- package/src/pr-history/comment-processor.js +0 -549
- package/src/pr-history/database.js +0 -819
- package/src/pr-history/github-client.js +0 -629
- package/src/technology-keywords.json +0 -753
- package/src/utils/command.js +0 -48
- package/src/utils/constants.js +0 -263
- package/src/utils/context-inference.js +0 -364
- package/src/utils/document-detection.js +0 -105
- package/src/utils/file-validation.js +0 -271
- package/src/utils/git.js +0 -232
- package/src/utils/language-detection.js +0 -170
- package/src/utils/logging.js +0 -24
- package/src/utils/markdown.js +0 -132
- package/src/utils/mobilebert-tokenizer.js +0 -141
- package/src/utils/pr-chunking.js +0 -276
- package/src/utils/string-utils.js +0 -28
|
@@ -0,0 +1,747 @@
|
|
|
1
|
+
import crypto from 'node:crypto';
|
|
2
|
+
import fs from 'node:fs';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { getDefaultEmbeddingsSystem } from './embeddings/factory.js';
|
|
5
|
+
import * as llm from './llm.js';
|
|
6
|
+
import { ProjectAnalyzer } from './project-analyzer.js';
|
|
7
|
+
import { isDocumentationFile, isTestFile } from './utils/file-validation.js';
|
|
8
|
+
|
|
9
|
+
vi.mock('node:fs', () => ({
|
|
10
|
+
default: {
|
|
11
|
+
existsSync: vi.fn(),
|
|
12
|
+
statSync: vi.fn(),
|
|
13
|
+
readFileSync: vi.fn(),
|
|
14
|
+
},
|
|
15
|
+
}));
|
|
16
|
+
|
|
17
|
+
vi.mock('node:crypto', () => ({
|
|
18
|
+
default: {
|
|
19
|
+
createHash: vi.fn(),
|
|
20
|
+
},
|
|
21
|
+
}));
|
|
22
|
+
|
|
23
|
+
vi.mock('./embeddings/factory.js', () => ({
|
|
24
|
+
getDefaultEmbeddingsSystem: vi.fn(),
|
|
25
|
+
}));
|
|
26
|
+
|
|
27
|
+
vi.mock('./llm.js', () => ({
|
|
28
|
+
sendPromptToClaude: vi.fn(),
|
|
29
|
+
}));
|
|
30
|
+
|
|
31
|
+
vi.mock('./utils/file-validation.js', () => ({
|
|
32
|
+
isDocumentationFile: vi.fn(),
|
|
33
|
+
isTestFile: vi.fn(),
|
|
34
|
+
}));
|
|
35
|
+
|
|
36
|
+
describe('ProjectAnalyzer', () => {
|
|
37
|
+
let analyzer;
|
|
38
|
+
let mockEmbeddingsSystem;
|
|
39
|
+
let mockDbConnection;
|
|
40
|
+
let mockTable;
|
|
41
|
+
let mockHash;
|
|
42
|
+
|
|
43
|
+
const mockProjectPath = '/mock/project';
|
|
44
|
+
const mockKeyFiles = [
|
|
45
|
+
{
|
|
46
|
+
relativePath: 'package.json',
|
|
47
|
+
fullPath: '/mock/project/package.json',
|
|
48
|
+
category: 'package',
|
|
49
|
+
size: 1024,
|
|
50
|
+
lastModified: new Date('2024-01-01'),
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
relativePath: 'src/index.js',
|
|
54
|
+
fullPath: '/mock/project/src/index.js',
|
|
55
|
+
category: 'entry',
|
|
56
|
+
size: 512,
|
|
57
|
+
lastModified: new Date('2024-01-02'),
|
|
58
|
+
},
|
|
59
|
+
];
|
|
60
|
+
|
|
61
|
+
beforeEach(() => {
|
|
62
|
+
analyzer = new ProjectAnalyzer();
|
|
63
|
+
|
|
64
|
+
// Setup mock hash
|
|
65
|
+
mockHash = {
|
|
66
|
+
update: vi.fn().mockReturnThis(),
|
|
67
|
+
digest: vi.fn().mockReturnValue('mock-hash-value'),
|
|
68
|
+
};
|
|
69
|
+
crypto.createHash.mockReturnValue(mockHash);
|
|
70
|
+
|
|
71
|
+
// Setup mock table
|
|
72
|
+
mockTable = {
|
|
73
|
+
query: vi.fn().mockReturnValue({
|
|
74
|
+
select: vi.fn().mockReturnThis(),
|
|
75
|
+
where: vi.fn().mockReturnThis(),
|
|
76
|
+
limit: vi.fn().mockReturnThis(),
|
|
77
|
+
toArray: vi.fn().mockResolvedValue([]),
|
|
78
|
+
}),
|
|
79
|
+
optimize: vi.fn().mockResolvedValue(undefined),
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
// Setup mock database connection
|
|
83
|
+
mockDbConnection = {
|
|
84
|
+
openTable: vi.fn().mockResolvedValue(mockTable),
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
// Setup mock embeddings system
|
|
88
|
+
mockEmbeddingsSystem = {
|
|
89
|
+
initialize: vi.fn().mockResolvedValue(undefined),
|
|
90
|
+
getProjectSummary: vi.fn().mockResolvedValue(null),
|
|
91
|
+
storeProjectSummary: vi.fn().mockResolvedValue(undefined),
|
|
92
|
+
databaseManager: {
|
|
93
|
+
getDB: vi.fn().mockResolvedValue(mockDbConnection),
|
|
94
|
+
fileEmbeddingsTable: 'file_embeddings',
|
|
95
|
+
},
|
|
96
|
+
};
|
|
97
|
+
getDefaultEmbeddingsSystem.mockReturnValue(mockEmbeddingsSystem);
|
|
98
|
+
|
|
99
|
+
// Setup mock fs
|
|
100
|
+
fs.existsSync.mockReturnValue(true);
|
|
101
|
+
fs.statSync.mockReturnValue({
|
|
102
|
+
size: 1024,
|
|
103
|
+
mtime: new Date('2024-01-01'),
|
|
104
|
+
});
|
|
105
|
+
fs.readFileSync.mockReturnValue('{"name": "test-project", "dependencies": {}}');
|
|
106
|
+
|
|
107
|
+
// Setup mock LLM
|
|
108
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
109
|
+
content: '{"selectedFiles": ["package.json"]}',
|
|
110
|
+
json: { selectedFiles: ['package.json'] },
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
// Setup mock file validation functions
|
|
114
|
+
isDocumentationFile.mockReturnValue(false);
|
|
115
|
+
isTestFile.mockReturnValue(false);
|
|
116
|
+
|
|
117
|
+
vi.spyOn(console, 'log').mockImplementation(() => {});
|
|
118
|
+
vi.spyOn(console, 'error').mockImplementation(() => {});
|
|
119
|
+
vi.spyOn(console, 'warn').mockImplementation(() => {});
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
afterEach(() => {
|
|
123
|
+
vi.restoreAllMocks();
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
describe('constructor', () => {
|
|
127
|
+
it('should initialize with null/empty default values', () => {
|
|
128
|
+
expect(analyzer.llm).toBeNull();
|
|
129
|
+
expect(analyzer.projectSummary).toBeNull();
|
|
130
|
+
expect(analyzer.keyFiles).toEqual([]);
|
|
131
|
+
expect(analyzer.lastAnalysisHash).toBeNull();
|
|
132
|
+
});
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
describe('analyzeProject', () => {
|
|
136
|
+
beforeEach(() => {
|
|
137
|
+
// Setup default LLM response for project summary
|
|
138
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
139
|
+
content: JSON.stringify({
|
|
140
|
+
projectName: 'test-project',
|
|
141
|
+
projectType: 'Node.js CLI',
|
|
142
|
+
mainFrameworks: ['Node.js'],
|
|
143
|
+
technologies: ['JavaScript', 'Node.js'],
|
|
144
|
+
architecture: { pattern: 'Module-based', description: 'desc', layers: [] },
|
|
145
|
+
keyComponents: [],
|
|
146
|
+
customImplementations: [],
|
|
147
|
+
}),
|
|
148
|
+
json: {
|
|
149
|
+
projectName: 'test-project',
|
|
150
|
+
projectType: 'Node.js CLI',
|
|
151
|
+
mainFrameworks: ['Node.js'],
|
|
152
|
+
technologies: ['JavaScript', 'Node.js'],
|
|
153
|
+
architecture: { pattern: 'Module-based', description: 'desc', layers: [] },
|
|
154
|
+
keyComponents: [],
|
|
155
|
+
customImplementations: [],
|
|
156
|
+
},
|
|
157
|
+
});
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
it('should return existing summary if up-to-date (hash matches)', async () => {
|
|
161
|
+
const existingSummary = {
|
|
162
|
+
projectName: 'test-project',
|
|
163
|
+
keyFiles: mockKeyFiles,
|
|
164
|
+
keyFilesHash: 'mock-hash-value',
|
|
165
|
+
};
|
|
166
|
+
mockEmbeddingsSystem.getProjectSummary.mockResolvedValue(existingSummary);
|
|
167
|
+
|
|
168
|
+
const result = await analyzer.analyzeProject(mockProjectPath, { verbose: true });
|
|
169
|
+
|
|
170
|
+
expect(result).toBeDefined();
|
|
171
|
+
expect(mockEmbeddingsSystem.getProjectSummary).toHaveBeenCalledWith(mockProjectPath);
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it('should regenerate analysis if key files hash changed', async () => {
|
|
175
|
+
const existingSummary = {
|
|
176
|
+
projectName: 'test-project',
|
|
177
|
+
keyFiles: mockKeyFiles,
|
|
178
|
+
keyFilesHash: 'old-hash-value', // Different from mock-hash-value
|
|
179
|
+
};
|
|
180
|
+
mockEmbeddingsSystem.getProjectSummary.mockResolvedValue(existingSummary);
|
|
181
|
+
|
|
182
|
+
// Mock table query to return some files for discovery
|
|
183
|
+
mockTable
|
|
184
|
+
.query()
|
|
185
|
+
.toArray.mockResolvedValue([{ path: 'package.json', name: 'package.json', content: '{}', type: 'json', language: 'json' }]);
|
|
186
|
+
|
|
187
|
+
await analyzer.analyzeProject(mockProjectPath, { verbose: true });
|
|
188
|
+
|
|
189
|
+
expect(llm.sendPromptToClaude).toHaveBeenCalled();
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
it('should force analysis when forceAnalysis option is true', async () => {
|
|
193
|
+
const existingSummary = {
|
|
194
|
+
projectName: 'test-project',
|
|
195
|
+
keyFiles: mockKeyFiles,
|
|
196
|
+
keyFilesHash: 'mock-hash-value',
|
|
197
|
+
};
|
|
198
|
+
mockEmbeddingsSystem.getProjectSummary.mockResolvedValue(existingSummary);
|
|
199
|
+
|
|
200
|
+
// Mock table query to return some files for discovery
|
|
201
|
+
mockTable
|
|
202
|
+
.query()
|
|
203
|
+
.toArray.mockResolvedValue([{ path: 'package.json', name: 'package.json', content: '{}', type: 'json', language: 'json' }]);
|
|
204
|
+
|
|
205
|
+
await analyzer.analyzeProject(mockProjectPath, { forceAnalysis: true });
|
|
206
|
+
|
|
207
|
+
expect(llm.sendPromptToClaude).toHaveBeenCalled();
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
it('should perform first-time analysis when no existing summary', async () => {
|
|
211
|
+
mockEmbeddingsSystem.getProjectSummary.mockResolvedValue(null);
|
|
212
|
+
|
|
213
|
+
// Mock table query to return some files for discovery
|
|
214
|
+
mockTable
|
|
215
|
+
.query()
|
|
216
|
+
.toArray.mockResolvedValue([{ path: 'package.json', name: 'package.json', content: '{}', type: 'json', language: 'json' }]);
|
|
217
|
+
|
|
218
|
+
const result = await analyzer.analyzeProject(mockProjectPath, { verbose: true });
|
|
219
|
+
|
|
220
|
+
expect(result).toBeDefined();
|
|
221
|
+
expect(mockEmbeddingsSystem.storeProjectSummary).toHaveBeenCalled();
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
it('should return fallback summary on error', async () => {
|
|
225
|
+
mockEmbeddingsSystem.getProjectSummary.mockRejectedValue(new Error('DB Error'));
|
|
226
|
+
llm.sendPromptToClaude.mockRejectedValue(new Error('LLM Error'));
|
|
227
|
+
|
|
228
|
+
const result = await analyzer.analyzeProject(mockProjectPath);
|
|
229
|
+
|
|
230
|
+
expect(result.fallback).toBe(true);
|
|
231
|
+
expect(result.projectName).toBeDefined();
|
|
232
|
+
});
|
|
233
|
+
});
|
|
234
|
+
|
|
235
|
+
describe('loadExistingAnalysis', () => {
|
|
236
|
+
it('should load and transform existing analysis from database', async () => {
|
|
237
|
+
const storedSummary = {
|
|
238
|
+
projectName: 'test-project',
|
|
239
|
+
keyFiles: [{ path: 'package.json', category: 'package', lastModified: '2024-01-01T00:00:00.000Z' }],
|
|
240
|
+
};
|
|
241
|
+
mockEmbeddingsSystem.getProjectSummary.mockResolvedValue(storedSummary);
|
|
242
|
+
|
|
243
|
+
const result = await analyzer.loadExistingAnalysis(mockProjectPath);
|
|
244
|
+
|
|
245
|
+
expect(result).toBeDefined();
|
|
246
|
+
expect(result.keyFiles[0].relativePath).toBe('package.json');
|
|
247
|
+
expect(result.keyFiles[0].fullPath).toBe(path.join(mockProjectPath, 'package.json'));
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
it('should return null if no existing analysis', async () => {
|
|
251
|
+
mockEmbeddingsSystem.getProjectSummary.mockResolvedValue(null);
|
|
252
|
+
|
|
253
|
+
const result = await analyzer.loadExistingAnalysis(mockProjectPath);
|
|
254
|
+
|
|
255
|
+
expect(result).toBeNull();
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
it('should return null on error', async () => {
|
|
259
|
+
mockEmbeddingsSystem.getProjectSummary.mockRejectedValue(new Error('DB Error'));
|
|
260
|
+
|
|
261
|
+
const result = await analyzer.loadExistingAnalysis(mockProjectPath);
|
|
262
|
+
|
|
263
|
+
expect(result).toBeNull();
|
|
264
|
+
expect(console.error).toHaveBeenCalled();
|
|
265
|
+
});
|
|
266
|
+
});
|
|
267
|
+
|
|
268
|
+
describe('storeAnalysis', () => {
|
|
269
|
+
it('should store analysis to embeddings system', async () => {
|
|
270
|
+
const summary = { projectName: 'test-project' };
|
|
271
|
+
|
|
272
|
+
await analyzer.storeAnalysis(mockProjectPath, summary);
|
|
273
|
+
|
|
274
|
+
expect(mockEmbeddingsSystem.storeProjectSummary).toHaveBeenCalledWith(mockProjectPath, summary);
|
|
275
|
+
expect(console.log).toHaveBeenCalledWith(expect.stringContaining('Project analysis stored'));
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
it('should handle storage errors gracefully', async () => {
|
|
279
|
+
mockEmbeddingsSystem.storeProjectSummary.mockRejectedValue(new Error('Storage failed'));
|
|
280
|
+
const summary = { projectName: 'test-project' };
|
|
281
|
+
|
|
282
|
+
await analyzer.storeAnalysis(mockProjectPath, summary);
|
|
283
|
+
|
|
284
|
+
expect(console.error).toHaveBeenCalled();
|
|
285
|
+
});
|
|
286
|
+
});
|
|
287
|
+
|
|
288
|
+
describe('validateAndUpdateKeyFiles', () => {
|
|
289
|
+
it('should validate and keep existing key files', async () => {
|
|
290
|
+
const existingFiles = [
|
|
291
|
+
{ relativePath: 'package.json', category: 'package' },
|
|
292
|
+
{ relativePath: 'src/index.js', category: 'entry' },
|
|
293
|
+
];
|
|
294
|
+
|
|
295
|
+
const result = await analyzer.validateAndUpdateKeyFiles(existingFiles, mockProjectPath);
|
|
296
|
+
|
|
297
|
+
expect(result.length).toBe(2);
|
|
298
|
+
expect(result[0].relativePath).toBe('package.json');
|
|
299
|
+
expect(result[0].fullPath).toBe(path.join(mockProjectPath, 'package.json'));
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
it('should trigger fresh discovery when too few files exist (<70%)', async () => {
|
|
303
|
+
const existingFiles = [
|
|
304
|
+
{ relativePath: 'package.json', category: 'package' },
|
|
305
|
+
{ relativePath: 'missing.js', category: 'entry' },
|
|
306
|
+
{ relativePath: 'another.js', category: 'entry' },
|
|
307
|
+
];
|
|
308
|
+
|
|
309
|
+
// Reset and set up the mock to only find package.json (1 of 3 = 33% < 70%)
|
|
310
|
+
fs.existsSync.mockReset();
|
|
311
|
+
fs.existsSync.mockImplementation((filePath) => {
|
|
312
|
+
return filePath.endsWith('package.json');
|
|
313
|
+
});
|
|
314
|
+
fs.statSync.mockReturnValue({
|
|
315
|
+
size: 1024,
|
|
316
|
+
mtime: new Date('2024-01-01'),
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
await analyzer.validateAndUpdateKeyFiles(existingFiles, mockProjectPath);
|
|
320
|
+
|
|
321
|
+
// With 1 of 3 files found (33%), it should trigger fresh discovery
|
|
322
|
+
expect(console.log).toHaveBeenCalledWith(expect.stringContaining('Many key files missing'));
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
it('should filter out missing files and keep existing ones', async () => {
|
|
326
|
+
const existingFiles = [
|
|
327
|
+
{ relativePath: 'package.json', category: 'package' },
|
|
328
|
+
{ relativePath: 'src/index.js', category: 'entry' },
|
|
329
|
+
{ relativePath: 'src/app.js', category: 'entry' },
|
|
330
|
+
{ relativePath: 'missing.js', category: 'utility' },
|
|
331
|
+
];
|
|
332
|
+
|
|
333
|
+
// Reset and set up the mock - 3 of 4 files exist (75% > 70% threshold)
|
|
334
|
+
fs.existsSync.mockReset();
|
|
335
|
+
fs.existsSync.mockImplementation((filePath) => {
|
|
336
|
+
return !filePath.endsWith('missing.js');
|
|
337
|
+
});
|
|
338
|
+
fs.statSync.mockReturnValue({
|
|
339
|
+
size: 1024,
|
|
340
|
+
mtime: new Date('2024-01-01'),
|
|
341
|
+
});
|
|
342
|
+
|
|
343
|
+
const result = await analyzer.validateAndUpdateKeyFiles(existingFiles, mockProjectPath);
|
|
344
|
+
|
|
345
|
+
expect(result.length).toBe(3);
|
|
346
|
+
expect(result.map((f) => f.relativePath)).toContain('package.json');
|
|
347
|
+
expect(result.map((f) => f.relativePath)).toContain('src/index.js');
|
|
348
|
+
expect(result.map((f) => f.relativePath)).not.toContain('missing.js');
|
|
349
|
+
});
|
|
350
|
+
|
|
351
|
+
it('should trigger fresh discovery if too many files are missing (>30%)', async () => {
|
|
352
|
+
fs.existsSync.mockReturnValue(false); // All files missing
|
|
353
|
+
|
|
354
|
+
const existingFiles = [
|
|
355
|
+
{ relativePath: 'file1.js', category: 'entry' },
|
|
356
|
+
{ relativePath: 'file2.js', category: 'entry' },
|
|
357
|
+
{ relativePath: 'file3.js', category: 'entry' },
|
|
358
|
+
];
|
|
359
|
+
|
|
360
|
+
// Mock for fresh discovery
|
|
361
|
+
mockTable
|
|
362
|
+
.query()
|
|
363
|
+
.toArray.mockResolvedValue([{ path: 'package.json', name: 'package.json', content: '{}', type: 'json', language: 'json' }]);
|
|
364
|
+
fs.existsSync.mockImplementation((p) => p.includes('package.json'));
|
|
365
|
+
|
|
366
|
+
await analyzer.validateAndUpdateKeyFiles(existingFiles, mockProjectPath);
|
|
367
|
+
|
|
368
|
+
// Should trigger discoverKeyFilesWithLLM
|
|
369
|
+
expect(console.log).toHaveBeenCalledWith(expect.stringContaining('Many key files missing'));
|
|
370
|
+
});
|
|
371
|
+
});
|
|
372
|
+
|
|
373
|
+
describe('discoverKeyFilesWithLLM', () => {
|
|
374
|
+
it('should discover key files using embeddings database', async () => {
|
|
375
|
+
mockTable.query().toArray.mockResolvedValue([
|
|
376
|
+
{ path: 'package.json', name: 'package.json', content: '{"name": "test"}', type: 'json', language: 'json' },
|
|
377
|
+
{ path: 'src/index.js', name: 'index.js', content: 'export default {};', type: 'js', language: 'javascript' },
|
|
378
|
+
]);
|
|
379
|
+
|
|
380
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
381
|
+
content: '["package.json", "src/index.js"]',
|
|
382
|
+
json: { selectedFiles: ['package.json', 'src/index.js'] },
|
|
383
|
+
});
|
|
384
|
+
|
|
385
|
+
const result = await analyzer.discoverKeyFilesWithLLM(mockProjectPath);
|
|
386
|
+
|
|
387
|
+
expect(mockEmbeddingsSystem.initialize).toHaveBeenCalled();
|
|
388
|
+
expect(result).toBeDefined();
|
|
389
|
+
});
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
describe('mineKeyFilesFromEmbeddings', () => {
|
|
393
|
+
it('should query embeddings database for key files', async () => {
|
|
394
|
+
mockTable
|
|
395
|
+
.query()
|
|
396
|
+
.toArray.mockResolvedValue([
|
|
397
|
+
{ path: 'webpack.config.js', name: 'webpack.config.js', content: 'module.exports = {}', type: 'js', language: 'javascript' },
|
|
398
|
+
]);
|
|
399
|
+
|
|
400
|
+
await analyzer.mineKeyFilesFromEmbeddings(mockProjectPath);
|
|
401
|
+
|
|
402
|
+
expect(mockDbConnection.openTable).toHaveBeenCalled();
|
|
403
|
+
expect(mockTable.query).toHaveBeenCalled();
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
it('should handle table optimization errors gracefully', async () => {
|
|
407
|
+
mockTable.optimize.mockRejectedValue(new Error('legacy format'));
|
|
408
|
+
mockTable.query().toArray.mockResolvedValue([]);
|
|
409
|
+
|
|
410
|
+
const result = await analyzer.mineKeyFilesFromEmbeddings(mockProjectPath);
|
|
411
|
+
|
|
412
|
+
expect(result).toEqual([]);
|
|
413
|
+
expect(console.log).toHaveBeenCalledWith(expect.stringContaining('Skipping optimization'));
|
|
414
|
+
});
|
|
415
|
+
|
|
416
|
+
it('should return empty array on query error', async () => {
|
|
417
|
+
// Mock the table.query to throw an error inside the queryFiles function
|
|
418
|
+
mockTable.query.mockReturnValue({
|
|
419
|
+
select: vi.fn().mockReturnThis(),
|
|
420
|
+
where: vi.fn().mockImplementation(() => {
|
|
421
|
+
throw new Error('Query failed');
|
|
422
|
+
}),
|
|
423
|
+
limit: vi.fn().mockReturnThis(),
|
|
424
|
+
toArray: vi.fn().mockRejectedValue(new Error('Query failed')),
|
|
425
|
+
});
|
|
426
|
+
|
|
427
|
+
const result = await analyzer.mineKeyFilesFromEmbeddings(mockProjectPath);
|
|
428
|
+
|
|
429
|
+
// Should return empty array since queries fail but error is caught
|
|
430
|
+
expect(result).toEqual([]);
|
|
431
|
+
});
|
|
432
|
+
});
|
|
433
|
+
|
|
434
|
+
describe('matchesFileType', () => {
|
|
435
|
+
it('should match config files by regex', () => {
|
|
436
|
+
expect(analyzer.matchesFileType('webpack.config.js', 'webpack.config.js', 'config')).toBe(true);
|
|
437
|
+
expect(analyzer.matchesFileType('tsconfig.json', 'tsconfig.json', 'config')).toBe(true);
|
|
438
|
+
});
|
|
439
|
+
|
|
440
|
+
it('should match entry files by regex', () => {
|
|
441
|
+
expect(analyzer.matchesFileType('src/index.js', 'index.js', 'entry')).toBe(true);
|
|
442
|
+
expect(analyzer.matchesFileType('src/main.ts', 'main.ts', 'entry')).toBe(true);
|
|
443
|
+
});
|
|
444
|
+
|
|
445
|
+
it('should delegate to isDocumentationFile for docs type', () => {
|
|
446
|
+
isDocumentationFile.mockReturnValue(true);
|
|
447
|
+
expect(analyzer.matchesFileType('docs/README.md', 'README.md', 'docs')).toBe(true);
|
|
448
|
+
expect(isDocumentationFile).toHaveBeenCalledWith('docs/README.md');
|
|
449
|
+
});
|
|
450
|
+
|
|
451
|
+
it('should delegate to isTestFile for tests type', () => {
|
|
452
|
+
isTestFile.mockReturnValue(true);
|
|
453
|
+
expect(analyzer.matchesFileType('src/utils.test.js', 'utils.test.js', 'tests')).toBe(true);
|
|
454
|
+
expect(isTestFile).toHaveBeenCalledWith('src/utils.test.js');
|
|
455
|
+
});
|
|
456
|
+
|
|
457
|
+
it('should return false for unknown file types', () => {
|
|
458
|
+
expect(analyzer.matchesFileType('random.txt', 'random.txt', 'unknown-type')).toBe(false);
|
|
459
|
+
});
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
describe('selectFinalKeyFiles', () => {
|
|
463
|
+
it('should use LLM to select final key files', async () => {
|
|
464
|
+
const candidates = [
|
|
465
|
+
{ path: 'package.json', category: 'package', content: '{"name": "test"}', source: 'package-search' },
|
|
466
|
+
{ path: 'src/index.js', category: 'entry', content: 'export default {};', source: 'entry-search' },
|
|
467
|
+
];
|
|
468
|
+
|
|
469
|
+
// Ensure analyzer.llm is set to the mocked llm module
|
|
470
|
+
analyzer.llm = llm;
|
|
471
|
+
|
|
472
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
473
|
+
content: '["package.json"]',
|
|
474
|
+
json: { selectedFiles: ['package.json'] },
|
|
475
|
+
});
|
|
476
|
+
|
|
477
|
+
const result = await analyzer.selectFinalKeyFiles(candidates, mockProjectPath);
|
|
478
|
+
|
|
479
|
+
expect(llm.sendPromptToClaude).toHaveBeenCalled();
|
|
480
|
+
expect(result.length).toBe(1);
|
|
481
|
+
expect(result[0].relativePath).toBe('package.json');
|
|
482
|
+
});
|
|
483
|
+
|
|
484
|
+
it('should return empty array if no candidates', async () => {
|
|
485
|
+
const result = await analyzer.selectFinalKeyFiles([], mockProjectPath);
|
|
486
|
+
|
|
487
|
+
expect(result).toEqual([]);
|
|
488
|
+
expect(llm.sendPromptToClaude).not.toHaveBeenCalled();
|
|
489
|
+
});
|
|
490
|
+
|
|
491
|
+
it('should fallback to automatic selection on LLM error', async () => {
|
|
492
|
+
const candidates = [{ path: 'package.json', category: 'package', content: '{}' }];
|
|
493
|
+
|
|
494
|
+
llm.sendPromptToClaude.mockRejectedValue(new Error('LLM Error'));
|
|
495
|
+
|
|
496
|
+
await analyzer.selectFinalKeyFiles(candidates, mockProjectPath);
|
|
497
|
+
|
|
498
|
+
expect(console.error).toHaveBeenCalled();
|
|
499
|
+
expect(console.log).toHaveBeenCalledWith(expect.stringContaining('Falling back to automatic selection'));
|
|
500
|
+
});
|
|
501
|
+
|
|
502
|
+
it('should fallback if LLM returns invalid response', async () => {
|
|
503
|
+
const candidates = [{ path: 'package.json', category: 'package', content: '{}' }];
|
|
504
|
+
|
|
505
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
506
|
+
content: 'invalid response',
|
|
507
|
+
json: { selectedFiles: null },
|
|
508
|
+
});
|
|
509
|
+
|
|
510
|
+
await analyzer.selectFinalKeyFiles(candidates, mockProjectPath);
|
|
511
|
+
|
|
512
|
+
expect(console.error).toHaveBeenCalled();
|
|
513
|
+
});
|
|
514
|
+
});
|
|
515
|
+
|
|
516
|
+
describe('fallbackFileSelection', () => {
|
|
517
|
+
it('should select files based on category limits', () => {
|
|
518
|
+
const candidates = [
|
|
519
|
+
{ path: 'package.json', category: 'package', content: '{}' },
|
|
520
|
+
{ path: 'package-lock.json', category: 'package', content: '{}' },
|
|
521
|
+
{ path: 'webpack.config.js', category: 'config', content: '{}' },
|
|
522
|
+
{ path: 'tsconfig.json', category: 'config', content: '{}' },
|
|
523
|
+
];
|
|
524
|
+
|
|
525
|
+
const result = analyzer.fallbackFileSelection(candidates, mockProjectPath);
|
|
526
|
+
|
|
527
|
+
expect(result.length).toBeLessThanOrEqual(15);
|
|
528
|
+
expect(result.some((f) => f.relativePath === 'package.json')).toBe(true);
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
it('should filter out non-existent files', () => {
|
|
532
|
+
fs.existsSync.mockImplementation((p) => !p.includes('nonexistent'));
|
|
533
|
+
|
|
534
|
+
const candidates = [
|
|
535
|
+
{ path: 'package.json', category: 'package', content: '{}' },
|
|
536
|
+
{ path: 'nonexistent.js', category: 'entry', content: '{}' },
|
|
537
|
+
];
|
|
538
|
+
|
|
539
|
+
const result = analyzer.fallbackFileSelection(candidates, mockProjectPath);
|
|
540
|
+
|
|
541
|
+
expect(result.length).toBe(1);
|
|
542
|
+
expect(result[0].relativePath).toBe('package.json');
|
|
543
|
+
});
|
|
544
|
+
});
|
|
545
|
+
|
|
546
|
+
describe('calculateKeyFilesHash', () => {
|
|
547
|
+
it('should calculate hash based on file paths and content', async () => {
|
|
548
|
+
const keyFiles = [{ relativePath: 'package.json', fullPath: '/mock/project/package.json', size: 100, lastModified: new Date() }];
|
|
549
|
+
|
|
550
|
+
const result = await analyzer.calculateKeyFilesHash(keyFiles);
|
|
551
|
+
|
|
552
|
+
expect(crypto.createHash).toHaveBeenCalledWith('sha256');
|
|
553
|
+
expect(mockHash.update).toHaveBeenCalled();
|
|
554
|
+
expect(mockHash.digest).toHaveBeenCalledWith('hex');
|
|
555
|
+
expect(result).toBe('mock-hash-value');
|
|
556
|
+
});
|
|
557
|
+
|
|
558
|
+
it('should handle missing files gracefully', async () => {
|
|
559
|
+
fs.existsSync.mockReturnValue(false);
|
|
560
|
+
const keyFiles = [{ relativePath: 'missing.js', fullPath: '/mock/project/missing.js', size: 100 }];
|
|
561
|
+
|
|
562
|
+
const result = await analyzer.calculateKeyFilesHash(keyFiles);
|
|
563
|
+
|
|
564
|
+
expect(result).toBe('mock-hash-value');
|
|
565
|
+
});
|
|
566
|
+
|
|
567
|
+
it('should skip content for large files (>50KB)', async () => {
|
|
568
|
+
const keyFiles = [{ relativePath: 'large.js', fullPath: '/mock/project/large.js', size: 60 * 1024, lastModified: new Date() }];
|
|
569
|
+
|
|
570
|
+
await analyzer.calculateKeyFilesHash(keyFiles);
|
|
571
|
+
|
|
572
|
+
expect(fs.readFileSync).not.toHaveBeenCalled();
|
|
573
|
+
});
|
|
574
|
+
});
|
|
575
|
+
|
|
576
|
+
describe('generateProjectSummary', () => {
|
|
577
|
+
it('should generate project summary using LLM', async () => {
|
|
578
|
+
const mockSummary = {
|
|
579
|
+
projectName: 'test-project',
|
|
580
|
+
projectType: 'Node.js CLI',
|
|
581
|
+
mainFrameworks: ['Node.js'],
|
|
582
|
+
technologies: ['JavaScript'],
|
|
583
|
+
architecture: { pattern: 'Module-based', description: 'desc', layers: [] },
|
|
584
|
+
keyComponents: [],
|
|
585
|
+
customImplementations: [],
|
|
586
|
+
};
|
|
587
|
+
|
|
588
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
589
|
+
content: JSON.stringify(mockSummary),
|
|
590
|
+
json: mockSummary,
|
|
591
|
+
});
|
|
592
|
+
|
|
593
|
+
const result = await analyzer.generateProjectSummary(mockKeyFiles, mockProjectPath);
|
|
594
|
+
|
|
595
|
+
expect(result.projectName).toBe('test-project');
|
|
596
|
+
expect(result.analysisDate).toBeDefined();
|
|
597
|
+
expect(result.projectPath).toBe(mockProjectPath);
|
|
598
|
+
});
|
|
599
|
+
|
|
600
|
+
it('should return fallback summary on LLM error', async () => {
|
|
601
|
+
llm.sendPromptToClaude.mockRejectedValue(new Error('LLM Error'));
|
|
602
|
+
|
|
603
|
+
const result = await analyzer.generateProjectSummary(mockKeyFiles, mockProjectPath);
|
|
604
|
+
|
|
605
|
+
expect(result.fallback).toBe(true);
|
|
606
|
+
expect(console.error).toHaveBeenCalled();
|
|
607
|
+
});
|
|
608
|
+
|
|
609
|
+
it('should throw error if LLM returns invalid JSON', async () => {
|
|
610
|
+
llm.sendPromptToClaude.mockResolvedValue({
|
|
611
|
+
content: 'not valid json',
|
|
612
|
+
json: null,
|
|
613
|
+
});
|
|
614
|
+
|
|
615
|
+
const result = await analyzer.generateProjectSummary(mockKeyFiles, mockProjectPath);
|
|
616
|
+
|
|
617
|
+
expect(result.fallback).toBe(true);
|
|
618
|
+
});
|
|
619
|
+
});
|
|
620
|
+
|
|
621
|
+
describe('extractFileContents', () => {
|
|
622
|
+
it('should extract and format file contents', async () => {
|
|
623
|
+
fs.readFileSync.mockReturnValue('file content here');
|
|
624
|
+
|
|
625
|
+
const result = await analyzer.extractFileContents(mockKeyFiles);
|
|
626
|
+
|
|
627
|
+
expect(result).toContain('package.json');
|
|
628
|
+
expect(result).toContain('file content here');
|
|
629
|
+
});
|
|
630
|
+
|
|
631
|
+
it('should limit total content size to 100KB', async () => {
|
|
632
|
+
const largeContent = 'x'.repeat(150 * 1024); // 150KB
|
|
633
|
+
fs.readFileSync.mockReturnValue(largeContent);
|
|
634
|
+
|
|
635
|
+
const result = await analyzer.extractFileContents(mockKeyFiles);
|
|
636
|
+
|
|
637
|
+
expect(result.length).toBeLessThan(150 * 1024);
|
|
638
|
+
});
|
|
639
|
+
|
|
640
|
+
it('should handle file read errors gracefully', async () => {
|
|
641
|
+
fs.readFileSync.mockImplementation(() => {
|
|
642
|
+
throw new Error('Cannot read file');
|
|
643
|
+
});
|
|
644
|
+
|
|
645
|
+
const result = await analyzer.extractFileContents(mockKeyFiles);
|
|
646
|
+
|
|
647
|
+
expect(result).toContain('Could not read file');
|
|
648
|
+
});
|
|
649
|
+
|
|
650
|
+
it('should limit to 25 files maximum', async () => {
|
|
651
|
+
const manyFiles = Array(30)
|
|
652
|
+
.fill(0)
|
|
653
|
+
.map((_, i) => ({
|
|
654
|
+
relativePath: `file${i}.js`,
|
|
655
|
+
fullPath: `/mock/project/file${i}.js`,
|
|
656
|
+
category: 'entry',
|
|
657
|
+
}));
|
|
658
|
+
|
|
659
|
+
fs.readFileSync.mockReturnValue('content');
|
|
660
|
+
await analyzer.extractFileContents(manyFiles);
|
|
661
|
+
|
|
662
|
+
// Should read at most 25 files (or less if size limit is reached first)
|
|
663
|
+
expect(fs.readFileSync).toHaveBeenCalledTimes(25);
|
|
664
|
+
});
|
|
665
|
+
});
|
|
666
|
+
|
|
667
|
+
describe('validateProjectSummary', () => {
|
|
668
|
+
it('should validate and fill in missing fields', () => {
|
|
669
|
+
const partialSummary = {
|
|
670
|
+
projectName: 'test',
|
|
671
|
+
projectType: 'CLI',
|
|
672
|
+
};
|
|
673
|
+
|
|
674
|
+
const result = analyzer.validateProjectSummary(partialSummary);
|
|
675
|
+
|
|
676
|
+
expect(result.projectName).toBe('test');
|
|
677
|
+
expect(result.mainFrameworks).toEqual([]);
|
|
678
|
+
expect(result.technologies).toEqual([]);
|
|
679
|
+
expect(result.customImplementations).toEqual([]);
|
|
680
|
+
expect(result.stateManagement.approach).toBe('Unknown');
|
|
681
|
+
});
|
|
682
|
+
|
|
683
|
+
it('should preserve existing fields', () => {
|
|
684
|
+
const summary = {
|
|
685
|
+
projectName: 'test',
|
|
686
|
+
projectType: 'CLI',
|
|
687
|
+
mainFrameworks: ['React'],
|
|
688
|
+
technologies: ['TypeScript'],
|
|
689
|
+
customImplementations: [{ name: 'hook', description: 'desc', extendsStandard: 'React' }],
|
|
690
|
+
stateManagement: { approach: 'Redux', patterns: ['feature-based'] },
|
|
691
|
+
};
|
|
692
|
+
|
|
693
|
+
const result = analyzer.validateProjectSummary(summary);
|
|
694
|
+
|
|
695
|
+
expect(result.mainFrameworks).toEqual(['React']);
|
|
696
|
+
expect(result.stateManagement.approach).toBe('Redux');
|
|
697
|
+
});
|
|
698
|
+
});
|
|
699
|
+
|
|
700
|
+
describe('createFallbackSummary', () => {
|
|
701
|
+
it('should create fallback summary from package.json', () => {
|
|
702
|
+
fs.existsSync.mockReturnValue(true);
|
|
703
|
+
fs.readFileSync.mockReturnValue(
|
|
704
|
+
JSON.stringify({
|
|
705
|
+
name: 'my-project',
|
|
706
|
+
dependencies: { react: '18.0.0', lodash: '4.0.0' },
|
|
707
|
+
devDependencies: { jest: '29.0.0' },
|
|
708
|
+
})
|
|
709
|
+
);
|
|
710
|
+
|
|
711
|
+
const result = analyzer.createFallbackSummary(mockProjectPath, mockKeyFiles);
|
|
712
|
+
|
|
713
|
+
expect(result.projectName).toBe('my-project');
|
|
714
|
+
expect(result.technologies).toContain('react');
|
|
715
|
+
expect(result.fallback).toBe(true);
|
|
716
|
+
expect(result.keyFilesCount).toBe(mockKeyFiles.length);
|
|
717
|
+
});
|
|
718
|
+
|
|
719
|
+
it('should use directory name if package.json is not available', () => {
|
|
720
|
+
fs.existsSync.mockReturnValue(false);
|
|
721
|
+
|
|
722
|
+
const result = analyzer.createFallbackSummary(mockProjectPath);
|
|
723
|
+
|
|
724
|
+
expect(result.projectName).toBe('project');
|
|
725
|
+
expect(result.fallback).toBe(true);
|
|
726
|
+
});
|
|
727
|
+
|
|
728
|
+
it('should handle package.json parse errors', () => {
|
|
729
|
+
fs.existsSync.mockReturnValue(true);
|
|
730
|
+
fs.readFileSync.mockReturnValue('invalid json');
|
|
731
|
+
|
|
732
|
+
const result = analyzer.createFallbackSummary(mockProjectPath);
|
|
733
|
+
|
|
734
|
+
expect(result.projectName).toBe('project');
|
|
735
|
+
expect(result.technologies).toEqual([]);
|
|
736
|
+
});
|
|
737
|
+
|
|
738
|
+
it('should include default review guidelines', () => {
|
|
739
|
+
fs.existsSync.mockReturnValue(false);
|
|
740
|
+
|
|
741
|
+
const result = analyzer.createFallbackSummary(mockProjectPath);
|
|
742
|
+
|
|
743
|
+
expect(result.reviewGuidelines.length).toBeGreaterThan(0);
|
|
744
|
+
expect(result.reviewGuidelines[0]).toContain('Follow established patterns');
|
|
745
|
+
});
|
|
746
|
+
});
|
|
747
|
+
});
|