opencode-conductor-cdd-plugin 1.0.0-beta.18 → 1.0.0-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -3
- package/dist/prompts/cdd/setup.json +2 -2
- package/dist/prompts/cdd/setup.test.js +40 -118
- package/dist/prompts/cdd/setup.test.ts +40 -143
- package/dist/utils/codebaseAnalysis.d.ts +61 -0
- package/dist/utils/codebaseAnalysis.js +429 -0
- package/dist/utils/codebaseAnalysis.test.d.ts +1 -0
- package/dist/utils/codebaseAnalysis.test.js +556 -0
- package/dist/utils/configDetection.d.ts +12 -0
- package/dist/utils/configDetection.js +23 -9
- package/dist/utils/configDetection.test.js +204 -7
- package/dist/utils/documentGeneration.d.ts +97 -0
- package/dist/utils/documentGeneration.js +301 -0
- package/dist/utils/documentGeneration.test.d.ts +1 -0
- package/dist/utils/documentGeneration.test.js +380 -0
- package/dist/utils/interactiveMenu.d.ts +56 -0
- package/dist/utils/interactiveMenu.js +144 -0
- package/dist/utils/interactiveMenu.test.d.ts +1 -0
- package/dist/utils/interactiveMenu.test.js +231 -0
- package/dist/utils/interactiveSetup.d.ts +43 -0
- package/dist/utils/interactiveSetup.js +131 -0
- package/dist/utils/interactiveSetup.test.d.ts +1 -0
- package/dist/utils/interactiveSetup.test.js +124 -0
- package/dist/utils/projectMaturity.d.ts +53 -0
- package/dist/utils/projectMaturity.js +179 -0
- package/dist/utils/projectMaturity.test.d.ts +1 -0
- package/dist/utils/projectMaturity.test.js +298 -0
- package/dist/utils/questionGenerator.d.ts +51 -0
- package/dist/utils/questionGenerator.js +535 -0
- package/dist/utils/questionGenerator.test.d.ts +1 -0
- package/dist/utils/questionGenerator.test.js +328 -0
- package/dist/utils/setupIntegration.d.ts +72 -0
- package/dist/utils/setupIntegration.js +179 -0
- package/dist/utils/setupIntegration.test.d.ts +1 -0
- package/dist/utils/setupIntegration.test.js +344 -0
- package/dist/utils/synergyState.test.js +17 -3
- package/package.json +2 -1
|
@@ -0,0 +1,556 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
+
import * as fs from 'fs';
|
|
3
|
+
import { execSync } from 'child_process';
|
|
4
|
+
import { parseIgnoreFiles, detectManifests, inferArchitecture, detectLanguages, detectFrameworks, analyzeCodebase, } from './codebaseAnalysis.js';
|
|
5
|
+
/**
|
|
6
|
+
* Codebase Analysis Module Tests
|
|
7
|
+
*
|
|
8
|
+
* This module is responsible for analyzing existing codebases to extract:
|
|
9
|
+
* - Ignore patterns from .ignore, .geminiignore, .gitignore
|
|
10
|
+
* - Dependency manifests with versions and metadata
|
|
11
|
+
* - Architecture patterns (Monorepo, Microservices, MVC, etc.)
|
|
12
|
+
* - Programming languages and frameworks
|
|
13
|
+
*
|
|
14
|
+
* Based on reference implementations from:
|
|
15
|
+
* - derekbar90/opencode-conductor
|
|
16
|
+
* - gemini-cli-extensions/conductor
|
|
17
|
+
*/
|
|
18
|
+
// Mock file system and child_process
|
|
19
|
+
vi.mock('fs');
|
|
20
|
+
vi.mock('child_process');
|
|
21
|
+
describe('Codebase Analysis', () => {
|
|
22
|
+
beforeEach(() => {
|
|
23
|
+
vi.clearAllMocks();
|
|
24
|
+
});
|
|
25
|
+
describe('parseIgnoreFiles', () => {
|
|
26
|
+
it('should parse .geminiignore with precedence over .gitignore', () => {
|
|
27
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
28
|
+
return p.toString().endsWith('.geminiignore');
|
|
29
|
+
});
|
|
30
|
+
vi.mocked(fs.readFileSync).mockReturnValue('node_modules/\n*.log\n');
|
|
31
|
+
const result = parseIgnoreFiles('/test/project');
|
|
32
|
+
expect(result).toHaveLength(2);
|
|
33
|
+
expect(result[0].pattern).toBe('node_modules/');
|
|
34
|
+
expect(result[0].negated).toBe(false);
|
|
35
|
+
});
|
|
36
|
+
it('should parse .gitignore if .geminiignore does not exist', () => {
|
|
37
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
38
|
+
return p.toString().endsWith('.gitignore');
|
|
39
|
+
});
|
|
40
|
+
vi.mocked(fs.readFileSync).mockReturnValue('dist/\n');
|
|
41
|
+
const result = parseIgnoreFiles('/test/project');
|
|
42
|
+
expect(result).toHaveLength(1);
|
|
43
|
+
expect(result[0].pattern).toBe('dist/');
|
|
44
|
+
});
|
|
45
|
+
it('should parse .ignore file', () => {
|
|
46
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
47
|
+
const pathStr = p.toString();
|
|
48
|
+
return pathStr.endsWith('.ignore') && !pathStr.endsWith('.geminiignore');
|
|
49
|
+
});
|
|
50
|
+
vi.mocked(fs.readFileSync).mockReturnValue('temp/\n');
|
|
51
|
+
const result = parseIgnoreFiles('/test/project');
|
|
52
|
+
expect(result).toHaveLength(1);
|
|
53
|
+
});
|
|
54
|
+
it('should handle negation patterns with !', () => {
|
|
55
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
56
|
+
vi.mocked(fs.readFileSync).mockReturnValue('*.log\n!important.log\n');
|
|
57
|
+
const result = parseIgnoreFiles('/test/project');
|
|
58
|
+
expect(result).toHaveLength(2);
|
|
59
|
+
expect(result[1].pattern).toBe('important.log');
|
|
60
|
+
expect(result[1].negated).toBe(true);
|
|
61
|
+
});
|
|
62
|
+
it('should ignore comment lines starting with #', () => {
|
|
63
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
64
|
+
vi.mocked(fs.readFileSync).mockReturnValue('# Comment\nnode_modules/\n');
|
|
65
|
+
const result = parseIgnoreFiles('/test/project');
|
|
66
|
+
expect(result).toHaveLength(1);
|
|
67
|
+
expect(result[0].pattern).toBe('node_modules/');
|
|
68
|
+
});
|
|
69
|
+
it('should ignore empty lines', () => {
|
|
70
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
71
|
+
vi.mocked(fs.readFileSync).mockReturnValue('node_modules/\n\n\ndist/\n');
|
|
72
|
+
const result = parseIgnoreFiles('/test/project');
|
|
73
|
+
expect(result).toHaveLength(2);
|
|
74
|
+
});
|
|
75
|
+
it('should return empty array when no ignore files exist', () => {
|
|
76
|
+
vi.mocked(fs.existsSync).mockReturnValue(false);
|
|
77
|
+
const result = parseIgnoreFiles('/test/project');
|
|
78
|
+
expect(result).toEqual([]);
|
|
79
|
+
});
|
|
80
|
+
it('should handle patterns with wildcards (* and **)', () => {
|
|
81
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
82
|
+
vi.mocked(fs.readFileSync).mockReturnValue('*.log\n**/node_modules\n');
|
|
83
|
+
const result = parseIgnoreFiles('/test/project');
|
|
84
|
+
expect(result).toHaveLength(2);
|
|
85
|
+
expect(result[0].pattern).toBe('*.log');
|
|
86
|
+
expect(result[1].pattern).toBe('**/node_modules');
|
|
87
|
+
});
|
|
88
|
+
it('should handle directory patterns ending with /', () => {
|
|
89
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
90
|
+
vi.mocked(fs.readFileSync).mockReturnValue('dist/\nnode_modules/\n');
|
|
91
|
+
const result = parseIgnoreFiles('/test/project');
|
|
92
|
+
expect(result).toHaveLength(2);
|
|
93
|
+
expect(result[0].pattern).toBe('dist/');
|
|
94
|
+
});
|
|
95
|
+
});
|
|
96
|
+
describe('detectManifests', () => {
|
|
97
|
+
it('should detect package.json with name and version', () => {
|
|
98
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
99
|
+
return p.toString().endsWith('package.json');
|
|
100
|
+
});
|
|
101
|
+
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({
|
|
102
|
+
name: 'test-project',
|
|
103
|
+
version: '1.0.0',
|
|
104
|
+
description: 'Test project',
|
|
105
|
+
}));
|
|
106
|
+
const result = detectManifests('/test/project');
|
|
107
|
+
expect(result).toHaveLength(1);
|
|
108
|
+
expect(result[0].type).toBe('package.json');
|
|
109
|
+
expect(result[0].metadata.name).toBe('test-project');
|
|
110
|
+
expect(result[0].metadata.version).toBe('1.0.0');
|
|
111
|
+
});
|
|
112
|
+
it('should detect package.json dependencies and devDependencies', () => {
|
|
113
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
114
|
+
return p.toString().endsWith('package.json');
|
|
115
|
+
});
|
|
116
|
+
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({
|
|
117
|
+
name: 'test',
|
|
118
|
+
dependencies: { react: '^18.0.0' },
|
|
119
|
+
devDependencies: { vitest: '^4.0.0' },
|
|
120
|
+
}));
|
|
121
|
+
const result = detectManifests('/test/project');
|
|
122
|
+
expect(result[0].dependencies).toEqual({
|
|
123
|
+
react: '^18.0.0',
|
|
124
|
+
vitest: '^4.0.0',
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
it('should detect pom.xml with Maven metadata', () => {
|
|
128
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
129
|
+
return p.toString().endsWith('pom.xml');
|
|
130
|
+
});
|
|
131
|
+
vi.mocked(fs.readFileSync).mockReturnValue('<project><groupId>com.example</groupId><artifactId>demo</artifactId><version>1.0</version></project>');
|
|
132
|
+
const result = detectManifests('/test/project');
|
|
133
|
+
expect(result).toHaveLength(1);
|
|
134
|
+
expect(result[0].type).toBe('pom.xml');
|
|
135
|
+
expect(result[0].metadata.groupId).toBe('com.example');
|
|
136
|
+
expect(result[0].metadata.artifactId).toBe('demo');
|
|
137
|
+
});
|
|
138
|
+
it('should detect requirements.txt with Python packages', () => {
|
|
139
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
140
|
+
return p.toString().endsWith('requirements.txt');
|
|
141
|
+
});
|
|
142
|
+
vi.mocked(fs.readFileSync).mockReturnValue('django==4.0.0\nflask>=2.0.0\n');
|
|
143
|
+
const result = detectManifests('/test/project');
|
|
144
|
+
expect(result).toHaveLength(1);
|
|
145
|
+
expect(result[0].type).toBe('requirements.txt');
|
|
146
|
+
expect(result[0].dependencies).toEqual({
|
|
147
|
+
django: '4.0.0',
|
|
148
|
+
flask: '2.0.0',
|
|
149
|
+
});
|
|
150
|
+
});
|
|
151
|
+
it('should detect go.mod with Go module information', () => {
|
|
152
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
153
|
+
return p.toString().endsWith('go.mod');
|
|
154
|
+
});
|
|
155
|
+
vi.mocked(fs.readFileSync).mockReturnValue('module github.com/example/project\n');
|
|
156
|
+
const result = detectManifests('/test/project');
|
|
157
|
+
expect(result).toHaveLength(1);
|
|
158
|
+
expect(result[0].type).toBe('go.mod');
|
|
159
|
+
expect(result[0].metadata.module).toBe('github.com/example/project');
|
|
160
|
+
});
|
|
161
|
+
it('should detect Cargo.toml with Rust metadata', () => {
|
|
162
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
163
|
+
return p.toString().endsWith('Cargo.toml');
|
|
164
|
+
});
|
|
165
|
+
vi.mocked(fs.readFileSync).mockReturnValue('[package]\nname = "test"\nversion = "0.1.0"\n');
|
|
166
|
+
const result = detectManifests('/test/project');
|
|
167
|
+
expect(result).toHaveLength(1);
|
|
168
|
+
expect(result[0].type).toBe('Cargo.toml');
|
|
169
|
+
expect(result[0].metadata.name).toBe('test');
|
|
170
|
+
expect(result[0].metadata.version).toBe('0.1.0');
|
|
171
|
+
});
|
|
172
|
+
it('should detect Gemfile with Ruby gems', () => {
|
|
173
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
174
|
+
return p.toString().endsWith('Gemfile');
|
|
175
|
+
});
|
|
176
|
+
vi.mocked(fs.readFileSync).mockReturnValue('gem "rails", "7.0.0"\ngem "pg"\n');
|
|
177
|
+
const result = detectManifests('/test/project');
|
|
178
|
+
expect(result).toHaveLength(1);
|
|
179
|
+
expect(result[0].type).toBe('Gemfile');
|
|
180
|
+
expect(result[0].dependencies).toEqual({
|
|
181
|
+
rails: '7.0.0',
|
|
182
|
+
pg: '*',
|
|
183
|
+
});
|
|
184
|
+
});
|
|
185
|
+
it('should return empty array when no manifests found', () => {
|
|
186
|
+
vi.mocked(fs.existsSync).mockReturnValue(false);
|
|
187
|
+
const result = detectManifests('/test/project');
|
|
188
|
+
expect(result).toEqual([]);
|
|
189
|
+
});
|
|
190
|
+
it('should handle malformed JSON in package.json gracefully', () => {
|
|
191
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
192
|
+
return p.toString().endsWith('package.json');
|
|
193
|
+
});
|
|
194
|
+
vi.mocked(fs.readFileSync).mockReturnValue('{ invalid json');
|
|
195
|
+
const result = detectManifests('/test/project');
|
|
196
|
+
expect(result).toHaveLength(1);
|
|
197
|
+
expect(result[0].metadata.error).toBe('Malformed JSON');
|
|
198
|
+
});
|
|
199
|
+
it('should detect multiple manifests in same project', () => {
|
|
200
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
201
|
+
vi.mocked(fs.readFileSync).mockImplementation((p) => {
|
|
202
|
+
const pathStr = p.toString();
|
|
203
|
+
if (pathStr.endsWith('package.json')) {
|
|
204
|
+
return JSON.stringify({ name: 'test' });
|
|
205
|
+
}
|
|
206
|
+
if (pathStr.endsWith('requirements.txt')) {
|
|
207
|
+
return 'django==4.0.0';
|
|
208
|
+
}
|
|
209
|
+
return '';
|
|
210
|
+
});
|
|
211
|
+
const result = detectManifests('/test/project');
|
|
212
|
+
expect(result.length).toBeGreaterThanOrEqual(2);
|
|
213
|
+
});
|
|
214
|
+
});
|
|
215
|
+
describe('inferArchitecture', () => {
|
|
216
|
+
it('should detect monorepo with packages/ directory', () => {
|
|
217
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
218
|
+
return p.toString().endsWith('packages');
|
|
219
|
+
});
|
|
220
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
221
|
+
const result = inferArchitecture('/test/project');
|
|
222
|
+
expect(result).toContain('monorepo');
|
|
223
|
+
});
|
|
224
|
+
it('should detect monorepo with workspaces in package.json', () => {
|
|
225
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
226
|
+
return p.toString().endsWith('package.json');
|
|
227
|
+
});
|
|
228
|
+
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({
|
|
229
|
+
name: 'test',
|
|
230
|
+
workspaces: ['packages/*'],
|
|
231
|
+
}));
|
|
232
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
233
|
+
const result = inferArchitecture('/test/project');
|
|
234
|
+
expect(result).toContain('monorepo');
|
|
235
|
+
});
|
|
236
|
+
it('should detect monorepo with lerna.json', () => {
|
|
237
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
238
|
+
return p.toString().endsWith('lerna.json');
|
|
239
|
+
});
|
|
240
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
241
|
+
const result = inferArchitecture('/test/project');
|
|
242
|
+
expect(result).toContain('monorepo');
|
|
243
|
+
});
|
|
244
|
+
it('should detect microservices with multiple service directories', () => {
|
|
245
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
246
|
+
const pathStr = p.toString();
|
|
247
|
+
return pathStr.includes('user-service') || pathStr.includes('auth-service') || pathStr.endsWith('package.json');
|
|
248
|
+
});
|
|
249
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
250
|
+
{ name: 'user-service', isDirectory: () => true },
|
|
251
|
+
{ name: 'auth-service', isDirectory: () => true },
|
|
252
|
+
]);
|
|
253
|
+
const result = inferArchitecture('/test/project');
|
|
254
|
+
expect(result).toContain('microservices');
|
|
255
|
+
});
|
|
256
|
+
it('should detect MVC pattern with models/, views/, controllers/ directories', () => {
|
|
257
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
258
|
+
const pathStr = p.toString();
|
|
259
|
+
return pathStr.endsWith('models') || pathStr.endsWith('views') || pathStr.endsWith('controllers');
|
|
260
|
+
});
|
|
261
|
+
const result = inferArchitecture('/test/project');
|
|
262
|
+
expect(result).toContain('mvc');
|
|
263
|
+
});
|
|
264
|
+
it('should detect Clean Architecture with domain/, application/, infrastructure/ directories', () => {
|
|
265
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
266
|
+
const pathStr = p.toString();
|
|
267
|
+
return pathStr.endsWith('domain') || pathStr.endsWith('application') || pathStr.endsWith('infrastructure');
|
|
268
|
+
});
|
|
269
|
+
const result = inferArchitecture('/test/project');
|
|
270
|
+
expect(result).toContain('clean-architecture');
|
|
271
|
+
});
|
|
272
|
+
it('should detect Hexagonal Architecture with adapters/, ports/ directories', () => {
|
|
273
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
274
|
+
const pathStr = p.toString();
|
|
275
|
+
return pathStr.endsWith('adapters') || pathStr.endsWith('ports');
|
|
276
|
+
});
|
|
277
|
+
const result = inferArchitecture('/test/project');
|
|
278
|
+
expect(result).toContain('hexagonal-architecture');
|
|
279
|
+
});
|
|
280
|
+
it('should detect layered architecture with src/api/, src/services/, src/data/ directories', () => {
|
|
281
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
282
|
+
const pathStr = p.toString();
|
|
283
|
+
// Must return true for src/ directory AND its subdirectories
|
|
284
|
+
if (pathStr.endsWith('/src'))
|
|
285
|
+
return true;
|
|
286
|
+
if (pathStr.endsWith('/src/api'))
|
|
287
|
+
return true;
|
|
288
|
+
if (pathStr.endsWith('/src/services'))
|
|
289
|
+
return true;
|
|
290
|
+
if (pathStr.endsWith('/src/data'))
|
|
291
|
+
return true;
|
|
292
|
+
return false;
|
|
293
|
+
});
|
|
294
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
295
|
+
const result = inferArchitecture('/test/project');
|
|
296
|
+
expect(result).toContain('layered-architecture');
|
|
297
|
+
});
|
|
298
|
+
it('should return "unknown" when no pattern matches', () => {
|
|
299
|
+
vi.mocked(fs.existsSync).mockReturnValue(false);
|
|
300
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
301
|
+
const result = inferArchitecture('/test/project');
|
|
302
|
+
expect(result).toContain('unknown');
|
|
303
|
+
});
|
|
304
|
+
it('should return multiple patterns if project uses hybrid architecture', () => {
|
|
305
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
306
|
+
const pathStr = p.toString();
|
|
307
|
+
return pathStr.endsWith('packages') || pathStr.endsWith('models') || pathStr.endsWith('views') || pathStr.endsWith('controllers');
|
|
308
|
+
});
|
|
309
|
+
const result = inferArchitecture('/test/project');
|
|
310
|
+
expect(result.length).toBeGreaterThan(1);
|
|
311
|
+
expect(result).toContain('monorepo');
|
|
312
|
+
expect(result).toContain('mvc');
|
|
313
|
+
});
|
|
314
|
+
});
|
|
315
|
+
describe('detectLanguages', () => {
|
|
316
|
+
it('should detect TypeScript from .ts files', () => {
|
|
317
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
318
|
+
{ name: 'index.ts', isFile: () => true, isDirectory: () => false },
|
|
319
|
+
]);
|
|
320
|
+
const result = detectLanguages('/test/project');
|
|
321
|
+
expect(result.TypeScript).toBeDefined();
|
|
322
|
+
});
|
|
323
|
+
it('should detect JavaScript from .js files', () => {
|
|
324
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
325
|
+
{ name: 'app.js', isFile: () => true, isDirectory: () => false },
|
|
326
|
+
]);
|
|
327
|
+
const result = detectLanguages('/test/project');
|
|
328
|
+
expect(result.JavaScript).toBeDefined();
|
|
329
|
+
});
|
|
330
|
+
it('should detect Python from .py files', () => {
|
|
331
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
332
|
+
{ name: 'main.py', isFile: () => true, isDirectory: () => false },
|
|
333
|
+
]);
|
|
334
|
+
const result = detectLanguages('/test/project');
|
|
335
|
+
expect(result.Python).toBeDefined();
|
|
336
|
+
});
|
|
337
|
+
it('should detect Go from .go files', () => {
|
|
338
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
339
|
+
{ name: 'main.go', isFile: () => true, isDirectory: () => false },
|
|
340
|
+
]);
|
|
341
|
+
const result = detectLanguages('/test/project');
|
|
342
|
+
expect(result.Go).toBeDefined();
|
|
343
|
+
});
|
|
344
|
+
it('should detect Rust from .rs files', () => {
|
|
345
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
346
|
+
{ name: 'main.rs', isFile: () => true, isDirectory: () => false },
|
|
347
|
+
]);
|
|
348
|
+
const result = detectLanguages('/test/project');
|
|
349
|
+
expect(result.Rust).toBeDefined();
|
|
350
|
+
});
|
|
351
|
+
it('should detect Ruby from .rb files', () => {
|
|
352
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
353
|
+
{ name: 'app.rb', isFile: () => true, isDirectory: () => false },
|
|
354
|
+
]);
|
|
355
|
+
const result = detectLanguages('/test/project');
|
|
356
|
+
expect(result.Ruby).toBeDefined();
|
|
357
|
+
});
|
|
358
|
+
it('should detect Java from .java files', () => {
|
|
359
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
360
|
+
{ name: 'Main.java', isFile: () => true, isDirectory: () => false },
|
|
361
|
+
]);
|
|
362
|
+
const result = detectLanguages('/test/project');
|
|
363
|
+
expect(result.Java).toBeDefined();
|
|
364
|
+
});
|
|
365
|
+
it('should detect multiple languages in polyglot projects', () => {
|
|
366
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
367
|
+
{ name: 'index.ts', isFile: () => true, isDirectory: () => false },
|
|
368
|
+
{ name: 'main.py', isFile: () => true, isDirectory: () => false },
|
|
369
|
+
]);
|
|
370
|
+
const result = detectLanguages('/test/project');
|
|
371
|
+
expect(Object.keys(result).length).toBeGreaterThan(1);
|
|
372
|
+
});
|
|
373
|
+
it('should return language usage percentage', () => {
|
|
374
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
375
|
+
{ name: 'index.ts', isFile: () => true, isDirectory: () => false },
|
|
376
|
+
{ name: 'app.ts', isFile: () => true, isDirectory: () => false },
|
|
377
|
+
{ name: 'main.py', isFile: () => true, isDirectory: () => false },
|
|
378
|
+
]);
|
|
379
|
+
const result = detectLanguages('/test/project');
|
|
380
|
+
expect(result.TypeScript).toBeGreaterThan(0);
|
|
381
|
+
expect(result.TypeScript).toBeLessThanOrEqual(100);
|
|
382
|
+
});
|
|
383
|
+
});
|
|
384
|
+
describe('detectFrameworks', () => {
|
|
385
|
+
it('should detect React from package.json dependencies', () => {
|
|
386
|
+
const manifests = [{
|
|
387
|
+
type: 'package.json',
|
|
388
|
+
path: '/test/package.json',
|
|
389
|
+
metadata: {},
|
|
390
|
+
dependencies: { react: '^18.0.0' },
|
|
391
|
+
}];
|
|
392
|
+
const result = detectFrameworks(manifests);
|
|
393
|
+
expect(result.frontend).toContain('React');
|
|
394
|
+
});
|
|
395
|
+
it('should detect Next.js from package.json dependencies', () => {
|
|
396
|
+
const manifests = [{
|
|
397
|
+
type: 'package.json',
|
|
398
|
+
path: '/test/package.json',
|
|
399
|
+
metadata: {},
|
|
400
|
+
dependencies: { next: '^13.0.0' },
|
|
401
|
+
}];
|
|
402
|
+
const result = detectFrameworks(manifests);
|
|
403
|
+
expect(result.frontend).toContain('Next.js');
|
|
404
|
+
});
|
|
405
|
+
it('should detect Vue from package.json dependencies', () => {
|
|
406
|
+
const manifests = [{
|
|
407
|
+
type: 'package.json',
|
|
408
|
+
path: '/test/package.json',
|
|
409
|
+
metadata: {},
|
|
410
|
+
dependencies: { vue: '^3.0.0' },
|
|
411
|
+
}];
|
|
412
|
+
const result = detectFrameworks(manifests);
|
|
413
|
+
expect(result.frontend).toContain('Vue');
|
|
414
|
+
});
|
|
415
|
+
it('should detect Express from package.json dependencies', () => {
|
|
416
|
+
const manifests = [{
|
|
417
|
+
type: 'package.json',
|
|
418
|
+
path: '/test/package.json',
|
|
419
|
+
metadata: {},
|
|
420
|
+
dependencies: { express: '^4.18.0' },
|
|
421
|
+
}];
|
|
422
|
+
const result = detectFrameworks(manifests);
|
|
423
|
+
expect(result.backend).toContain('Express');
|
|
424
|
+
});
|
|
425
|
+
it('should detect Django from requirements.txt', () => {
|
|
426
|
+
const manifests = [{
|
|
427
|
+
type: 'requirements.txt',
|
|
428
|
+
path: '/test/requirements.txt',
|
|
429
|
+
metadata: {},
|
|
430
|
+
dependencies: { django: '4.0.0' },
|
|
431
|
+
}];
|
|
432
|
+
const result = detectFrameworks(manifests);
|
|
433
|
+
expect(result.backend).toContain('Django');
|
|
434
|
+
});
|
|
435
|
+
it('should detect Flask from requirements.txt', () => {
|
|
436
|
+
const manifests = [{
|
|
437
|
+
type: 'requirements.txt',
|
|
438
|
+
path: '/test/requirements.txt',
|
|
439
|
+
metadata: {},
|
|
440
|
+
dependencies: { flask: '2.0.0' },
|
|
441
|
+
}];
|
|
442
|
+
const result = detectFrameworks(manifests);
|
|
443
|
+
expect(result.backend).toContain('Flask');
|
|
444
|
+
});
|
|
445
|
+
it('should detect Spring Boot from pom.xml dependencies', () => {
|
|
446
|
+
const manifests = [{
|
|
447
|
+
type: 'pom.xml',
|
|
448
|
+
path: '/test/pom.xml',
|
|
449
|
+
metadata: {},
|
|
450
|
+
dependencies: { 'spring-boot-starter': '2.7.0' },
|
|
451
|
+
}];
|
|
452
|
+
const result = detectFrameworks(manifests);
|
|
453
|
+
expect(result.backend).toContain('Spring Boot');
|
|
454
|
+
});
|
|
455
|
+
it('should categorize frameworks as frontend or backend', () => {
|
|
456
|
+
const manifests = [{
|
|
457
|
+
type: 'package.json',
|
|
458
|
+
path: '/test/package.json',
|
|
459
|
+
metadata: {},
|
|
460
|
+
dependencies: { react: '^18.0.0', express: '^4.18.0' },
|
|
461
|
+
}];
|
|
462
|
+
const result = detectFrameworks(manifests);
|
|
463
|
+
expect(result.frontend).toContain('React');
|
|
464
|
+
expect(result.backend).toContain('Express');
|
|
465
|
+
});
|
|
466
|
+
it('should detect database drivers', () => {
|
|
467
|
+
// This test is for databases, which is handled separately in analyzeCodebase
|
|
468
|
+
// Just verify the function doesn't crash
|
|
469
|
+
const manifests = [{
|
|
470
|
+
type: 'package.json',
|
|
471
|
+
path: '/test/package.json',
|
|
472
|
+
metadata: {},
|
|
473
|
+
dependencies: { mongoose: '^6.0.0' },
|
|
474
|
+
}];
|
|
475
|
+
const result = detectFrameworks(manifests);
|
|
476
|
+
expect(result).toBeDefined();
|
|
477
|
+
});
|
|
478
|
+
});
|
|
479
|
+
describe('analyzeCodebase', () => {
|
|
480
|
+
it('should perform comprehensive analysis with all sub-functions', () => {
|
|
481
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
482
|
+
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({ name: 'test' }));
|
|
483
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
484
|
+
{ name: 'index.ts', isFile: () => true, isDirectory: () => false },
|
|
485
|
+
]);
|
|
486
|
+
const result = analyzeCodebase('/test/project');
|
|
487
|
+
expect(result).toHaveProperty('languages');
|
|
488
|
+
expect(result).toHaveProperty('frameworks');
|
|
489
|
+
expect(result).toHaveProperty('databases');
|
|
490
|
+
expect(result).toHaveProperty('architecture');
|
|
491
|
+
expect(result).toHaveProperty('manifests');
|
|
492
|
+
});
|
|
493
|
+
it('should respect ignore files during analysis', () => {
|
|
494
|
+
// This would be tested by integration tests
|
|
495
|
+
// Unit test just verifies function exists
|
|
496
|
+
expect(analyzeCodebase).toBeDefined();
|
|
497
|
+
});
|
|
498
|
+
it('should use git ls-files when available for efficiency', () => {
|
|
499
|
+
// This would be tested by integration tests
|
|
500
|
+
expect(analyzeCodebase).toBeDefined();
|
|
501
|
+
});
|
|
502
|
+
it('should fallback to manual directory scanning when git not available', () => {
|
|
503
|
+
vi.mocked(execSync).mockImplementation(() => {
|
|
504
|
+
throw new Error('Git not available');
|
|
505
|
+
});
|
|
506
|
+
expect(analyzeCodebase).toBeDefined();
|
|
507
|
+
});
|
|
508
|
+
it('should handle large files (>1MB) by reading only first/last 20 lines', () => {
|
|
509
|
+
// This would be tested by integration tests with actual large files
|
|
510
|
+
expect(analyzeCodebase).toBeDefined();
|
|
511
|
+
});
|
|
512
|
+
it('should prioritize README.md analysis', () => {
|
|
513
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
514
|
+
return p.toString().endsWith('README.md');
|
|
515
|
+
});
|
|
516
|
+
vi.mocked(fs.readFileSync).mockImplementation((p) => {
|
|
517
|
+
if (p.toString().endsWith('README.md')) {
|
|
518
|
+
return '# Test Project\n\nThis is a test project description';
|
|
519
|
+
}
|
|
520
|
+
return '';
|
|
521
|
+
});
|
|
522
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
523
|
+
const result = analyzeCodebase('/test/project');
|
|
524
|
+
expect(result.projectGoal).toContain('test project');
|
|
525
|
+
});
|
|
526
|
+
it('should extract project goal from README or package.json description', () => {
|
|
527
|
+
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
|
528
|
+
return p.toString().endsWith('package.json');
|
|
529
|
+
});
|
|
530
|
+
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({
|
|
531
|
+
name: 'test',
|
|
532
|
+
description: 'A wonderful test project',
|
|
533
|
+
}));
|
|
534
|
+
vi.mocked(fs.readdirSync).mockReturnValue([]);
|
|
535
|
+
const result = analyzeCodebase('/test/project');
|
|
536
|
+
expect(result.projectGoal).toBe('A wonderful test project');
|
|
537
|
+
});
|
|
538
|
+
it('should return comprehensive analysis object', () => {
|
|
539
|
+
vi.mocked(fs.existsSync).mockReturnValue(true);
|
|
540
|
+
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({
|
|
541
|
+
name: 'test',
|
|
542
|
+
dependencies: { react: '^18.0.0' },
|
|
543
|
+
}));
|
|
544
|
+
vi.mocked(fs.readdirSync).mockReturnValue([
|
|
545
|
+
{ name: 'index.ts', isFile: () => true, isDirectory: () => false },
|
|
546
|
+
]);
|
|
547
|
+
const result = analyzeCodebase('/test/project');
|
|
548
|
+
expect(result.languages).toBeDefined();
|
|
549
|
+
expect(result.frameworks.frontend).toBeDefined();
|
|
550
|
+
expect(result.frameworks.backend).toBeDefined();
|
|
551
|
+
expect(result.databases).toBeDefined();
|
|
552
|
+
expect(result.architecture).toBeDefined();
|
|
553
|
+
expect(result.manifests).toBeDefined();
|
|
554
|
+
});
|
|
555
|
+
});
|
|
556
|
+
});
|
|
@@ -1,10 +1,22 @@
|
|
|
1
1
|
export type SynergyFramework = 'none' | 'oh-my-opencode' | 'oh-my-opencode-slim';
|
|
2
|
+
/**
|
|
3
|
+
* Result of CDD configuration detection across multiple config files.
|
|
4
|
+
* Checks ~/.config/opencode/{opencode.json, oh-my-opencode.json, oh-my-opencode-slim.json}
|
|
5
|
+
*/
|
|
2
6
|
export interface ConfigDetectionResult {
|
|
7
|
+
/** Whether CDD agent is configured in opencode.json */
|
|
3
8
|
hasCDDInOpenCode: boolean;
|
|
9
|
+
/** Whether CDD agent is configured in oh-my-opencode.json */
|
|
4
10
|
hasCDDInOMO: boolean;
|
|
11
|
+
/** Whether CDD agent is configured in oh-my-opencode-slim.json */
|
|
12
|
+
hasCDDInSlim: boolean;
|
|
13
|
+
/** Whether synergy mode should be activated (true if any synergy framework detected) */
|
|
5
14
|
synergyActive: boolean;
|
|
15
|
+
/** The CDD model string extracted from configs (priority: slim > OMO > opencode) */
|
|
6
16
|
cddModel?: string;
|
|
17
|
+
/** Which synergy framework to use (priority: slim > OMO > none) */
|
|
7
18
|
synergyFramework: SynergyFramework;
|
|
19
|
+
/** Available agents from slim config (filtered by disabled_agents) */
|
|
8
20
|
slimAgents?: string[];
|
|
9
21
|
}
|
|
10
22
|
export declare function detectCDDConfig(): ConfigDetectionResult;
|
|
@@ -8,6 +8,7 @@ export function detectCDDConfig() {
|
|
|
8
8
|
const slimJsonPath = join(opencodeConfigDir, "oh-my-opencode-slim.json");
|
|
9
9
|
let hasCDDInOpenCode = false;
|
|
10
10
|
let hasCDDInOMO = false;
|
|
11
|
+
let hasCDDInSlim = false;
|
|
11
12
|
let cddModel;
|
|
12
13
|
let synergyFramework = 'none';
|
|
13
14
|
let slimAgents;
|
|
@@ -17,8 +18,16 @@ export function detectCDDConfig() {
|
|
|
17
18
|
const config = JSON.parse(readFileSync(slimJsonPath, "utf-8"));
|
|
18
19
|
// Check if config is not empty and has actual content
|
|
19
20
|
if (config && Object.keys(config).length > 0) {
|
|
20
|
-
|
|
21
|
-
|
|
21
|
+
// Check for CDD agent in slim config (strict detection)
|
|
22
|
+
if (config.agents && config.agents.cdd) {
|
|
23
|
+
hasCDDInSlim = true;
|
|
24
|
+
synergyFramework = 'oh-my-opencode-slim';
|
|
25
|
+
// Extract model from slim config (priority over OMO and opencode.json)
|
|
26
|
+
if (config.agents.cdd.model) {
|
|
27
|
+
cddModel = config.agents.cdd.model;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
// Extract available agents (filter out disabled ones) regardless of CDD presence
|
|
22
31
|
const coreAgents = ['explorer', 'librarian', 'oracle', 'designer'];
|
|
23
32
|
const disabledAgents = new Set(config.disabled_agents ?? []);
|
|
24
33
|
slimAgents = coreAgents.filter(agent => !disabledAgents.has(agent));
|
|
@@ -28,15 +37,19 @@ export function detectCDDConfig() {
|
|
|
28
37
|
// Silently fail on parse errors
|
|
29
38
|
}
|
|
30
39
|
}
|
|
31
|
-
// Check oh-my-opencode.json (only if slim
|
|
32
|
-
|
|
40
|
+
// Check oh-my-opencode.json (only if slim doesn't have CDD)
|
|
41
|
+
// NOTE: We still check OMO to set hasCDDInOMO flag even if slim has priority
|
|
42
|
+
if (existsSync(omoJsonPath)) {
|
|
33
43
|
try {
|
|
34
44
|
const config = JSON.parse(readFileSync(omoJsonPath, "utf-8"));
|
|
35
45
|
if (config.agents && config.agents.cdd) {
|
|
36
46
|
hasCDDInOMO = true;
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
47
|
+
// Only activate OMO synergy if slim doesn't have CDD (slim takes priority)
|
|
48
|
+
if (synergyFramework === 'none') {
|
|
49
|
+
synergyFramework = 'oh-my-opencode';
|
|
50
|
+
}
|
|
51
|
+
// Extract model from oh-my-opencode.json (only if not already set by slim)
|
|
52
|
+
if (!cddModel && config.agents.cdd.model) {
|
|
40
53
|
cddModel = config.agents.cdd.model;
|
|
41
54
|
}
|
|
42
55
|
}
|
|
@@ -45,13 +58,13 @@ export function detectCDDConfig() {
|
|
|
45
58
|
// Silently fail on parse errors
|
|
46
59
|
}
|
|
47
60
|
}
|
|
48
|
-
// Check opencode.json (fallback if model not found in OMO)
|
|
61
|
+
// Check opencode.json (fallback if model not found in slim or OMO)
|
|
49
62
|
if (existsSync(opencodeJsonPath)) {
|
|
50
63
|
try {
|
|
51
64
|
const config = JSON.parse(readFileSync(opencodeJsonPath, "utf-8"));
|
|
52
65
|
if (config.agent && config.agent.cdd) {
|
|
53
66
|
hasCDDInOpenCode = true;
|
|
54
|
-
// Only use this model if we didn't find one in oh-my-opencode.json
|
|
67
|
+
// Only use this model if we didn't find one in slim or oh-my-opencode.json
|
|
55
68
|
if (!cddModel && config.agent.cdd.model) {
|
|
56
69
|
cddModel = config.agent.cdd.model;
|
|
57
70
|
}
|
|
@@ -64,6 +77,7 @@ export function detectCDDConfig() {
|
|
|
64
77
|
return {
|
|
65
78
|
hasCDDInOpenCode,
|
|
66
79
|
hasCDDInOMO,
|
|
80
|
+
hasCDDInSlim,
|
|
67
81
|
synergyActive: synergyFramework !== 'none',
|
|
68
82
|
cddModel,
|
|
69
83
|
synergyFramework,
|