@iservu-inc/adf-cli 0.3.6 → 0.4.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.project/chats/{current → complete}/2025-10-03_AI-PROVIDER-INTEGRATION.md +34 -35
- package/.project/chats/complete/2025-10-04_CONFIG-COMMAND.md +503 -0
- package/.project/chats/current/2025-10-04_PHASE-4-1-SMART-FILTERING.md +381 -0
- package/.project/chats/current/SESSION-STATUS.md +68 -27
- package/.project/docs/PHASE-4-2-LEARNING-SYSTEM.md +881 -0
- package/.project/docs/SMART-FILTERING-SYSTEM.md +385 -0
- package/.project/docs/goals/PROJECT-VISION.md +32 -10
- package/CHANGELOG.md +109 -1
- package/README.md +476 -381
- package/lib/analyzers/project-analyzer.js +380 -0
- package/lib/commands/config.js +68 -1
- package/lib/filters/question-filter.js +480 -0
- package/lib/frameworks/interviewer.js +184 -3
- package/lib/learning/learning-manager.js +447 -0
- package/lib/learning/pattern-detector.js +376 -0
- package/lib/learning/rule-generator.js +304 -0
- package/lib/learning/skip-tracker.js +260 -0
- package/lib/learning/storage.js +296 -0
- package/package.json +70 -69
- package/tests/learning-storage.test.js +184 -0
- package/tests/pattern-detector.test.js +297 -0
- package/tests/project-analyzer.test.js +221 -0
- package/tests/question-filter.test.js +297 -0
- package/tests/skip-tracker.test.js +198 -0
- /package/.project/chats/{current → complete}/2025-10-03_FRAMEWORK-UPDATE-SYSTEM.md +0 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const storage = require('../lib/learning/storage');
|
|
4
|
+
|
|
5
|
+
describe('Learning Storage', () => {
|
|
6
|
+
const tempDir = path.join(__dirname, 'temp-learning-test');
|
|
7
|
+
|
|
8
|
+
beforeEach(async () => {
|
|
9
|
+
await fs.ensureDir(tempDir);
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterEach(async () => {
|
|
13
|
+
await fs.remove(tempDir);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
describe('ensureLearningDirectory', () => {
|
|
17
|
+
test('should create learning directory if it does not exist', async () => {
|
|
18
|
+
await storage.ensureLearningDirectory(tempDir);
|
|
19
|
+
|
|
20
|
+
const learningPath = path.join(tempDir, '.adf', 'learning');
|
|
21
|
+
const exists = await fs.pathExists(learningPath);
|
|
22
|
+
|
|
23
|
+
expect(exists).toBe(true);
|
|
24
|
+
});
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
describe('writeLearningData and readLearningData', () => {
|
|
28
|
+
test('should write and read learning data', async () => {
|
|
29
|
+
const data = {
|
|
30
|
+
version: '1.0',
|
|
31
|
+
sessions: [
|
|
32
|
+
{ sessionId: 'test-1', skips: [] }
|
|
33
|
+
]
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
await storage.writeLearningData(tempDir, 'test-data.json', data);
|
|
37
|
+
const retrieved = await storage.readLearningData(tempDir, 'test-data.json');
|
|
38
|
+
|
|
39
|
+
expect(retrieved).toEqual(data);
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
test('should return null for non-existent file', async () => {
|
|
43
|
+
const retrieved = await storage.readLearningData(tempDir, 'non-existent.json');
|
|
44
|
+
expect(retrieved).toBeNull();
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
test('should handle corrupted JSON gracefully', async () => {
|
|
48
|
+
const learningPath = path.join(tempDir, '.adf', 'learning');
|
|
49
|
+
await fs.ensureDir(learningPath);
|
|
50
|
+
await fs.writeFile(path.join(learningPath, 'corrupted.json'), 'invalid json {]');
|
|
51
|
+
|
|
52
|
+
const retrieved = await storage.readLearningData(tempDir, 'corrupted.json');
|
|
53
|
+
expect(retrieved).toBeNull();
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
describe('appendToLearningHistory', () => {
|
|
58
|
+
test('should append to existing history', async () => {
|
|
59
|
+
const initial = {
|
|
60
|
+
version: '1.0',
|
|
61
|
+
sessions: [{ sessionId: 'session-1' }]
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
await storage.writeLearningData(tempDir, 'history.json', initial);
|
|
65
|
+
await storage.appendToLearningHistory(tempDir, 'history.json', { sessionId: 'session-2' }, 'sessions');
|
|
66
|
+
|
|
67
|
+
const retrieved = await storage.readLearningData(tempDir, 'history.json');
|
|
68
|
+
expect(retrieved.sessions.length).toBe(2);
|
|
69
|
+
expect(retrieved.sessions[1].sessionId).toBe('session-2');
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
test('should create new file if it does not exist', async () => {
|
|
73
|
+
await storage.appendToLearningHistory(tempDir, 'new-history.json', { sessionId: 'session-1' }, 'sessions');
|
|
74
|
+
|
|
75
|
+
const retrieved = await storage.readLearningData(tempDir, 'new-history.json');
|
|
76
|
+
expect(retrieved.sessions.length).toBe(1);
|
|
77
|
+
expect(retrieved.sessions[0].sessionId).toBe('session-1');
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
describe('getSkipHistory', () => {
|
|
82
|
+
test('should return empty history if file does not exist', async () => {
|
|
83
|
+
const history = await storage.getSkipHistory(tempDir);
|
|
84
|
+
expect(history).toEqual({ version: '1.0', sessions: [] });
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
test('should return existing skip history', async () => {
|
|
88
|
+
const data = {
|
|
89
|
+
version: '1.0',
|
|
90
|
+
sessions: [
|
|
91
|
+
{ sessionId: 'test', skips: [{ questionId: 'q1' }] }
|
|
92
|
+
]
|
|
93
|
+
};
|
|
94
|
+
|
|
95
|
+
await storage.writeLearningData(tempDir, 'skip-history.json', data);
|
|
96
|
+
const history = await storage.getSkipHistory(tempDir);
|
|
97
|
+
|
|
98
|
+
expect(history.sessions.length).toBe(1);
|
|
99
|
+
expect(history.sessions[0].skips.length).toBe(1);
|
|
100
|
+
});
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
describe('getLearningConfig', () => {
|
|
104
|
+
test('should return default config if file does not exist', async () => {
|
|
105
|
+
const config = await storage.getLearningConfig(tempDir);
|
|
106
|
+
|
|
107
|
+
expect(config.enabled).toBe(true);
|
|
108
|
+
expect(config.trackSkips).toBe(true);
|
|
109
|
+
expect(config.minSessionsForPattern).toBe(3);
|
|
110
|
+
expect(config.minConfidenceForAutoFilter).toBe(75);
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
test('should return existing config', async () => {
|
|
114
|
+
const customConfig = {
|
|
115
|
+
version: '1.0',
|
|
116
|
+
enabled: false,
|
|
117
|
+
minSessionsForPattern: 5
|
|
118
|
+
};
|
|
119
|
+
|
|
120
|
+
await storage.writeLearningData(tempDir, 'config.json', customConfig);
|
|
121
|
+
const config = await storage.getLearningConfig(tempDir);
|
|
122
|
+
|
|
123
|
+
expect(config.enabled).toBe(false);
|
|
124
|
+
expect(config.minSessionsForPattern).toBe(5);
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
describe('clearLearningData', () => {
|
|
129
|
+
test('should remove all JSON files from learning directory', async () => {
|
|
130
|
+
await storage.writeLearningData(tempDir, 'file1.json', { test: 1 });
|
|
131
|
+
await storage.writeLearningData(tempDir, 'file2.json', { test: 2 });
|
|
132
|
+
|
|
133
|
+
await storage.clearLearningData(tempDir);
|
|
134
|
+
|
|
135
|
+
const file1 = await storage.readLearningData(tempDir, 'file1.json');
|
|
136
|
+
const file2 = await storage.readLearningData(tempDir, 'file2.json');
|
|
137
|
+
|
|
138
|
+
expect(file1).toBeNull();
|
|
139
|
+
expect(file2).toBeNull();
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
test('should not fail if learning directory does not exist', async () => {
|
|
143
|
+
await expect(storage.clearLearningData(tempDir)).resolves.not.toThrow();
|
|
144
|
+
});
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
describe('getLearningDataSize', () => {
|
|
148
|
+
test('should return 0 if no learning data exists', async () => {
|
|
149
|
+
const size = await storage.getLearningDataSize(tempDir);
|
|
150
|
+
expect(size).toBe(0);
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
test('should return total size of all JSON files', async () => {
|
|
154
|
+
await storage.writeLearningData(tempDir, 'file1.json', { test: 'data1' });
|
|
155
|
+
await storage.writeLearningData(tempDir, 'file2.json', { test: 'data2' });
|
|
156
|
+
|
|
157
|
+
const size = await storage.getLearningDataSize(tempDir);
|
|
158
|
+
expect(size).toBeGreaterThan(0);
|
|
159
|
+
});
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
describe('updateLearningStats', () => {
|
|
163
|
+
test('should update statistics', async () => {
|
|
164
|
+
await storage.updateLearningStats(tempDir, {
|
|
165
|
+
totalSessions: 5,
|
|
166
|
+
totalSkips: 20
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const stats = await storage.getLearningStats(tempDir);
|
|
170
|
+
expect(stats.totalSessions).toBe(5);
|
|
171
|
+
expect(stats.totalSkips).toBe(20);
|
|
172
|
+
expect(stats.lastUpdated).toBeTruthy();
|
|
173
|
+
});
|
|
174
|
+
|
|
175
|
+
test('should preserve existing stats when updating', async () => {
|
|
176
|
+
await storage.updateLearningStats(tempDir, { totalSessions: 5 });
|
|
177
|
+
await storage.updateLearningStats(tempDir, { totalSkips: 20 });
|
|
178
|
+
|
|
179
|
+
const stats = await storage.getLearningStats(tempDir);
|
|
180
|
+
expect(stats.totalSessions).toBe(5);
|
|
181
|
+
expect(stats.totalSkips).toBe(20);
|
|
182
|
+
});
|
|
183
|
+
});
|
|
184
|
+
});
|
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { PatternDetector, detectPatterns, getPatternSummary } = require('../lib/learning/pattern-detector');
|
|
4
|
+
|
|
5
|
+
describe('Pattern Detector', () => {
|
|
6
|
+
const tempDir = path.join(__dirname, 'temp-pattern-test');
|
|
7
|
+
|
|
8
|
+
beforeEach(async () => {
|
|
9
|
+
await fs.ensureDir(tempDir);
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterEach(async () => {
|
|
13
|
+
await fs.remove(tempDir);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
describe('PatternDetector', () => {
|
|
17
|
+
test('should detect consistent skip patterns', () => {
|
|
18
|
+
const skipHistory = {
|
|
19
|
+
sessions: [
|
|
20
|
+
{
|
|
21
|
+
sessionId: 's1',
|
|
22
|
+
skips: [
|
|
23
|
+
{ questionId: 'q1', text: 'Deploy where?', category: 'deployment', reason: 'manual' }
|
|
24
|
+
]
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
sessionId: 's2',
|
|
28
|
+
skips: [
|
|
29
|
+
{ questionId: 'q1', text: 'Deploy where?', category: 'deployment', reason: 'manual' }
|
|
30
|
+
]
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
sessionId: 's3',
|
|
34
|
+
skips: [
|
|
35
|
+
{ questionId: 'q1', text: 'Deploy where?', category: 'deployment', reason: 'manual' }
|
|
36
|
+
]
|
|
37
|
+
}
|
|
38
|
+
]
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const detector = new PatternDetector(skipHistory, {}, { minSessionsForPattern: 3 });
|
|
42
|
+
const patterns = detector.detectConsistentSkips();
|
|
43
|
+
|
|
44
|
+
expect(patterns.length).toBeGreaterThan(0);
|
|
45
|
+
expect(patterns[0].questionId).toBe('q1');
|
|
46
|
+
expect(patterns[0].confidence).toBe(100); // 3/3 sessions
|
|
47
|
+
expect(patterns[0].type).toBe('consistent_skip');
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
test('should not detect patterns with insufficient sessions', () => {
|
|
51
|
+
const skipHistory = {
|
|
52
|
+
sessions: [
|
|
53
|
+
{
|
|
54
|
+
sessionId: 's1',
|
|
55
|
+
skips: [
|
|
56
|
+
{ questionId: 'q1', text: 'Q1', reason: 'manual' }
|
|
57
|
+
]
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
sessionId: 's2',
|
|
61
|
+
skips: [
|
|
62
|
+
{ questionId: 'q1', text: 'Q1', reason: 'manual' }
|
|
63
|
+
]
|
|
64
|
+
}
|
|
65
|
+
]
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
const detector = new PatternDetector(skipHistory, {}, { minSessionsForPattern: 3 });
|
|
69
|
+
const patterns = detector.detectConsistentSkips();
|
|
70
|
+
|
|
71
|
+
expect(patterns.length).toBe(0); // Not enough sessions
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
test('should detect category skip patterns', () => {
|
|
75
|
+
const skipHistory = {
|
|
76
|
+
sessions: [
|
|
77
|
+
{
|
|
78
|
+
sessionId: 's1',
|
|
79
|
+
skips: [
|
|
80
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' },
|
|
81
|
+
{ questionId: 'q2', category: 'deployment', reason: 'manual' },
|
|
82
|
+
{ questionId: 'q3', category: 'deployment', reason: 'manual' },
|
|
83
|
+
{ questionId: 'q4', category: 'deployment', reason: 'manual' },
|
|
84
|
+
{ questionId: 'q5', category: 'deployment', reason: 'manual' }
|
|
85
|
+
]
|
|
86
|
+
}
|
|
87
|
+
]
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
const detector = new PatternDetector(skipHistory, {});
|
|
91
|
+
const patterns = detector.detectCategoryPatterns();
|
|
92
|
+
|
|
93
|
+
expect(patterns.length).toBeGreaterThan(0);
|
|
94
|
+
expect(patterns[0].category).toBe('deployment');
|
|
95
|
+
expect(patterns[0].type).toBe('category_skip');
|
|
96
|
+
expect(patterns[0].confidence).toBe(100); // 5/5 questions skipped
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
test('should detect framework-specific skip patterns', () => {
|
|
100
|
+
const skipHistory = {
|
|
101
|
+
sessions: [
|
|
102
|
+
{
|
|
103
|
+
sessionId: 's1',
|
|
104
|
+
frameworks: ['Next.js'],
|
|
105
|
+
skips: [
|
|
106
|
+
{ questionId: 'q_routing', text: 'Routing setup?', category: 'frontend', reason: 'manual' }
|
|
107
|
+
]
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
sessionId: 's2',
|
|
111
|
+
frameworks: ['Next.js'],
|
|
112
|
+
skips: [
|
|
113
|
+
{ questionId: 'q_routing', text: 'Routing setup?', category: 'frontend', reason: 'manual' }
|
|
114
|
+
]
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
sessionId: 's3',
|
|
118
|
+
frameworks: ['Next.js'],
|
|
119
|
+
skips: [
|
|
120
|
+
{ questionId: 'q_routing', text: 'Routing setup?', category: 'frontend', reason: 'manual' }
|
|
121
|
+
]
|
|
122
|
+
}
|
|
123
|
+
]
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
const detector = new PatternDetector(skipHistory, {}, { minSessionsForPattern: 3 });
|
|
127
|
+
const patterns = detector.detectFrameworkPatterns();
|
|
128
|
+
|
|
129
|
+
expect(patterns.length).toBeGreaterThan(0);
|
|
130
|
+
expect(patterns[0].framework).toBe('Next.js');
|
|
131
|
+
expect(patterns[0].questionId).toBe('q_routing');
|
|
132
|
+
expect(patterns[0].confidence).toBe(100); // 3/3 Next.js sessions
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
test('should detect user preference patterns', () => {
|
|
136
|
+
const skipHistory = { sessions: [] };
|
|
137
|
+
const answerHistory = {
|
|
138
|
+
sessions: [
|
|
139
|
+
{
|
|
140
|
+
sessionId: 's1',
|
|
141
|
+
answers: [
|
|
142
|
+
{ questionId: 'q1', category: 'testing', wordCount: 15 },
|
|
143
|
+
{ questionId: 'q2', category: 'testing', wordCount: 18 },
|
|
144
|
+
{ questionId: 'q3', category: 'testing', wordCount: 20 }
|
|
145
|
+
]
|
|
146
|
+
}
|
|
147
|
+
]
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
const detector = new PatternDetector(skipHistory, answerHistory);
|
|
151
|
+
const patterns = detector.detectUserPreferences();
|
|
152
|
+
|
|
153
|
+
expect(patterns.length).toBeGreaterThan(0);
|
|
154
|
+
expect(patterns[0].category).toBe('testing');
|
|
155
|
+
expect(patterns[0].preference).toBe('brief_answers');
|
|
156
|
+
expect(patterns[0].avgWordCount).toBeLessThan(30);
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
test('should detect detailed answer preferences', () => {
|
|
160
|
+
const skipHistory = { sessions: [] };
|
|
161
|
+
const answerHistory = {
|
|
162
|
+
sessions: [
|
|
163
|
+
{
|
|
164
|
+
sessionId: 's1',
|
|
165
|
+
answers: [
|
|
166
|
+
{ questionId: 'q1', category: 'architecture', wordCount: 120 },
|
|
167
|
+
{ questionId: 'q2', category: 'architecture', wordCount: 150 },
|
|
168
|
+
{ questionId: 'q3', category: 'architecture', wordCount: 110 }
|
|
169
|
+
]
|
|
170
|
+
}
|
|
171
|
+
]
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
const detector = new PatternDetector(skipHistory, answerHistory);
|
|
175
|
+
const patterns = detector.detectUserPreferences();
|
|
176
|
+
|
|
177
|
+
expect(patterns.length).toBeGreaterThan(0);
|
|
178
|
+
expect(patterns[0].category).toBe('architecture');
|
|
179
|
+
expect(patterns[0].preference).toBe('detailed_answers');
|
|
180
|
+
expect(patterns[0].avgWordCount).toBeGreaterThan(100);
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
test('should aggregate all pattern types', () => {
|
|
184
|
+
const skipHistory = {
|
|
185
|
+
sessions: [
|
|
186
|
+
{
|
|
187
|
+
sessionId: 's1',
|
|
188
|
+
skips: [
|
|
189
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' }
|
|
190
|
+
]
|
|
191
|
+
},
|
|
192
|
+
{
|
|
193
|
+
sessionId: 's2',
|
|
194
|
+
skips: [
|
|
195
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' }
|
|
196
|
+
]
|
|
197
|
+
},
|
|
198
|
+
{
|
|
199
|
+
sessionId: 's3',
|
|
200
|
+
skips: [
|
|
201
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' }
|
|
202
|
+
]
|
|
203
|
+
}
|
|
204
|
+
]
|
|
205
|
+
};
|
|
206
|
+
|
|
207
|
+
const detector = new PatternDetector(skipHistory, {}, { minSessionsForPattern: 3 });
|
|
208
|
+
const allPatterns = detector.detectAllPatterns();
|
|
209
|
+
|
|
210
|
+
expect(allPatterns).toHaveProperty('consistentSkips');
|
|
211
|
+
expect(allPatterns).toHaveProperty('categoryPatterns');
|
|
212
|
+
expect(allPatterns).toHaveProperty('frameworkPatterns');
|
|
213
|
+
expect(allPatterns).toHaveProperty('userPreferences');
|
|
214
|
+
expect(allPatterns.consistentSkips.length).toBeGreaterThan(0);
|
|
215
|
+
});
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
describe('detectPatterns', () => {
|
|
219
|
+
test('should detect patterns from project files', async () => {
|
|
220
|
+
const skipHistory = {
|
|
221
|
+
version: '1.0',
|
|
222
|
+
sessions: [
|
|
223
|
+
{
|
|
224
|
+
sessionId: 's1',
|
|
225
|
+
skips: [
|
|
226
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' }
|
|
227
|
+
]
|
|
228
|
+
},
|
|
229
|
+
{
|
|
230
|
+
sessionId: 's2',
|
|
231
|
+
skips: [
|
|
232
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' }
|
|
233
|
+
]
|
|
234
|
+
},
|
|
235
|
+
{
|
|
236
|
+
sessionId: 's3',
|
|
237
|
+
skips: [
|
|
238
|
+
{ questionId: 'q1', category: 'deployment', reason: 'manual' }
|
|
239
|
+
]
|
|
240
|
+
}
|
|
241
|
+
]
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
await fs.ensureDir(path.join(tempDir, '.adf', 'learning'));
|
|
245
|
+
await fs.writeJSON(path.join(tempDir, '.adf', 'learning', 'skip-history.json'), skipHistory);
|
|
246
|
+
await fs.writeJSON(path.join(tempDir, '.adf', 'learning', 'answer-history.json'), { version: '1.0', sessions: [] });
|
|
247
|
+
await fs.writeJSON(path.join(tempDir, '.adf', 'learning', 'config.json'), { minSessionsForPattern: 3 });
|
|
248
|
+
|
|
249
|
+
const patterns = await detectPatterns(tempDir);
|
|
250
|
+
|
|
251
|
+
expect(patterns).toHaveProperty('consistentSkips');
|
|
252
|
+
expect(patterns.consistentSkips.length).toBeGreaterThan(0);
|
|
253
|
+
});
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
describe('getPatternSummary', () => {
|
|
257
|
+
test('should summarize patterns by confidence level', () => {
|
|
258
|
+
const patterns = {
|
|
259
|
+
consistentSkips: [
|
|
260
|
+
{ confidence: 90 },
|
|
261
|
+
{ confidence: 70 },
|
|
262
|
+
{ confidence: 50 }
|
|
263
|
+
],
|
|
264
|
+
categoryPatterns: [
|
|
265
|
+
{ confidence: 85 }
|
|
266
|
+
],
|
|
267
|
+
frameworkPatterns: [],
|
|
268
|
+
userPreferences: []
|
|
269
|
+
};
|
|
270
|
+
|
|
271
|
+
const summary = getPatternSummary(patterns);
|
|
272
|
+
|
|
273
|
+
expect(summary.total).toBe(4);
|
|
274
|
+
expect(summary.highConfidence).toBe(2); // 90, 85
|
|
275
|
+
expect(summary.mediumConfidence).toBe(1); // 70
|
|
276
|
+
expect(summary.lowConfidence).toBe(1); // 50
|
|
277
|
+
expect(summary.byType.consistentSkips).toBe(3);
|
|
278
|
+
expect(summary.byType.categoryPatterns).toBe(1);
|
|
279
|
+
});
|
|
280
|
+
|
|
281
|
+
test('should handle empty patterns', () => {
|
|
282
|
+
const patterns = {
|
|
283
|
+
consistentSkips: [],
|
|
284
|
+
categoryPatterns: [],
|
|
285
|
+
frameworkPatterns: [],
|
|
286
|
+
userPreferences: []
|
|
287
|
+
};
|
|
288
|
+
|
|
289
|
+
const summary = getPatternSummary(patterns);
|
|
290
|
+
|
|
291
|
+
expect(summary.total).toBe(0);
|
|
292
|
+
expect(summary.highConfidence).toBe(0);
|
|
293
|
+
expect(summary.mediumConfidence).toBe(0);
|
|
294
|
+
expect(summary.lowConfidence).toBe(0);
|
|
295
|
+
});
|
|
296
|
+
});
|
|
297
|
+
});
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { analyzeProject, getProjectSummary, PROJECT_TYPES } = require('../lib/analyzers/project-analyzer');
|
|
4
|
+
|
|
5
|
+
describe('Project Analyzer', () => {
|
|
6
|
+
const tempDir = path.join(__dirname, 'temp-test-project');
|
|
7
|
+
|
|
8
|
+
beforeEach(async () => {
|
|
9
|
+
await fs.ensureDir(tempDir);
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterEach(async () => {
|
|
13
|
+
await fs.remove(tempDir);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
describe('analyzeProject', () => {
|
|
17
|
+
test('should detect React web app from package.json', async () => {
|
|
18
|
+
// Create package.json with React
|
|
19
|
+
const packageJson = {
|
|
20
|
+
name: 'test-app',
|
|
21
|
+
version: '1.0.0',
|
|
22
|
+
description: 'A React web application',
|
|
23
|
+
dependencies: {
|
|
24
|
+
react: '^18.0.0',
|
|
25
|
+
'react-dom': '^18.0.0'
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
30
|
+
|
|
31
|
+
const context = await analyzeProject(tempDir);
|
|
32
|
+
|
|
33
|
+
expect(context.type).toBe(PROJECT_TYPES.WEB_APP);
|
|
34
|
+
expect(context.frameworks).toContain('React');
|
|
35
|
+
expect(context.languages).toContain('JavaScript/TypeScript');
|
|
36
|
+
expect(context.confidence).toBeGreaterThan(50);
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
test('should detect CLI tool from package.json bin field', async () => {
|
|
40
|
+
const packageJson = {
|
|
41
|
+
name: 'test-cli',
|
|
42
|
+
version: '1.0.0',
|
|
43
|
+
description: 'A command-line tool',
|
|
44
|
+
bin: {
|
|
45
|
+
'test-cli': 'bin/cli.js'
|
|
46
|
+
},
|
|
47
|
+
dependencies: {
|
|
48
|
+
commander: '^9.0.0'
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
53
|
+
|
|
54
|
+
const context = await analyzeProject(tempDir);
|
|
55
|
+
|
|
56
|
+
expect(context.type).toBe(PROJECT_TYPES.CLI_TOOL);
|
|
57
|
+
expect(context.confidence).toBeGreaterThan(50);
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
test('should detect npm library from package.json main field', async () => {
|
|
61
|
+
const packageJson = {
|
|
62
|
+
name: 'test-library',
|
|
63
|
+
version: '1.0.0',
|
|
64
|
+
description: 'A utility library',
|
|
65
|
+
main: 'index.js',
|
|
66
|
+
private: false
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
70
|
+
|
|
71
|
+
const context = await analyzeProject(tempDir);
|
|
72
|
+
|
|
73
|
+
expect(context.type).toBe(PROJECT_TYPES.LIBRARY);
|
|
74
|
+
expect(context.confidence).toBeGreaterThan(30);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
test('should detect API server from dependencies', async () => {
|
|
78
|
+
const packageJson = {
|
|
79
|
+
name: 'test-api',
|
|
80
|
+
version: '1.0.0',
|
|
81
|
+
description: 'RESTful API server',
|
|
82
|
+
dependencies: {
|
|
83
|
+
express: '^4.18.0',
|
|
84
|
+
cors: '^2.8.5'
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
89
|
+
|
|
90
|
+
const context = await analyzeProject(tempDir);
|
|
91
|
+
|
|
92
|
+
expect(context.type).toBe(PROJECT_TYPES.API_SERVER);
|
|
93
|
+
expect(context.frameworks).toContain('Express');
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
test('should detect fullstack app with React and Express', async () => {
|
|
97
|
+
const packageJson = {
|
|
98
|
+
name: 'test-fullstack',
|
|
99
|
+
version: '1.0.0',
|
|
100
|
+
dependencies: {
|
|
101
|
+
react: '^18.0.0',
|
|
102
|
+
express: '^4.18.0'
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
107
|
+
|
|
108
|
+
const context = await analyzeProject(tempDir);
|
|
109
|
+
|
|
110
|
+
expect(context.type).toBe(PROJECT_TYPES.FULLSTACK);
|
|
111
|
+
expect(context.frameworks).toContain('React');
|
|
112
|
+
expect(context.frameworks).toContain('Express');
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
test('should extract description from README', async () => {
|
|
116
|
+
const readmeContent = `# Test Project
|
|
117
|
+
|
|
118
|
+
This is a test project for demonstrating the analyzer. It has a clear description.
|
|
119
|
+
|
|
120
|
+
## Features
|
|
121
|
+
|
|
122
|
+
- Feature 1
|
|
123
|
+
- Feature 2
|
|
124
|
+
`;
|
|
125
|
+
|
|
126
|
+
await fs.writeFile(path.join(tempDir, 'README.md'), readmeContent);
|
|
127
|
+
|
|
128
|
+
const context = await analyzeProject(tempDir);
|
|
129
|
+
|
|
130
|
+
expect(context.description).toBeTruthy();
|
|
131
|
+
expect(context.description).toContain('test project');
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
test('should detect test framework presence', async () => {
|
|
135
|
+
const packageJson = {
|
|
136
|
+
name: 'test-project',
|
|
137
|
+
devDependencies: {
|
|
138
|
+
jest: '^29.0.0'
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
143
|
+
|
|
144
|
+
const context = await analyzeProject(tempDir);
|
|
145
|
+
|
|
146
|
+
expect(context.hasTests).toBe(true);
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
test('should detect Docker presence', async () => {
|
|
150
|
+
await fs.writeFile(path.join(tempDir, 'Dockerfile'), 'FROM node:18');
|
|
151
|
+
|
|
152
|
+
const context = await analyzeProject(tempDir);
|
|
153
|
+
|
|
154
|
+
expect(context.hasDocker).toBe(true);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
test('should handle project with no package.json gracefully', async () => {
|
|
158
|
+
const context = await analyzeProject(tempDir);
|
|
159
|
+
|
|
160
|
+
expect(context.type).toBe(PROJECT_TYPES.UNKNOWN);
|
|
161
|
+
expect(context.confidence).toBeLessThan(50);
|
|
162
|
+
});
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
describe('getProjectSummary', () => {
|
|
166
|
+
test('should return readable summary for web app', () => {
|
|
167
|
+
const context = {
|
|
168
|
+
type: PROJECT_TYPES.WEB_APP,
|
|
169
|
+
frameworks: ['React', 'Next.js'],
|
|
170
|
+
languages: ['JavaScript/TypeScript'],
|
|
171
|
+
confidence: 90
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
const summary = getProjectSummary(context);
|
|
175
|
+
|
|
176
|
+
expect(summary).toContain('Web Application');
|
|
177
|
+
expect(summary).toContain('React');
|
|
178
|
+
expect(summary).toContain('Next.js');
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
test('should return readable summary for CLI tool', () => {
|
|
182
|
+
const context = {
|
|
183
|
+
type: PROJECT_TYPES.CLI_TOOL,
|
|
184
|
+
frameworks: [],
|
|
185
|
+
languages: ['JavaScript/TypeScript'],
|
|
186
|
+
confidence: 85
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
const summary = getProjectSummary(context);
|
|
190
|
+
|
|
191
|
+
expect(summary).toContain('CLI Tool');
|
|
192
|
+
});
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
describe('Confidence scoring', () => {
|
|
196
|
+
test('should have high confidence with complete information', async () => {
|
|
197
|
+
const packageJson = {
|
|
198
|
+
name: 'complete-project',
|
|
199
|
+
version: '1.0.0',
|
|
200
|
+
description: 'A complete project with all metadata',
|
|
201
|
+
bin: { cli: 'bin/cli.js' },
|
|
202
|
+
dependencies: { commander: '^9.0.0' },
|
|
203
|
+
devDependencies: { jest: '^29.0.0' }
|
|
204
|
+
};
|
|
205
|
+
|
|
206
|
+
await fs.writeJson(path.join(tempDir, 'package.json'), packageJson);
|
|
207
|
+
await fs.writeFile(path.join(tempDir, 'README.md'), '# Project\n\nDescription here');
|
|
208
|
+
await fs.writeFile(path.join(tempDir, 'Dockerfile'), 'FROM node:18');
|
|
209
|
+
|
|
210
|
+
const context = await analyzeProject(tempDir);
|
|
211
|
+
|
|
212
|
+
expect(context.confidence).toBeGreaterThanOrEqual(75);
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
test('should have low confidence with minimal information', async () => {
|
|
216
|
+
const context = await analyzeProject(tempDir);
|
|
217
|
+
|
|
218
|
+
expect(context.confidence).toBeLessThan(50);
|
|
219
|
+
});
|
|
220
|
+
});
|
|
221
|
+
});
|