opencode-conductor-cdd-plugin 1.0.0-beta.17 → 1.0.0-beta.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/prompts/agent/cdd.md +16 -16
  2. package/dist/prompts/agent/implementer.md +5 -5
  3. package/dist/prompts/agent.md +7 -7
  4. package/dist/prompts/cdd/implement.json +1 -1
  5. package/dist/prompts/cdd/revert.json +1 -1
  6. package/dist/prompts/cdd/setup.json +2 -2
  7. package/dist/prompts/cdd/setup.test.js +40 -118
  8. package/dist/prompts/cdd/setup.test.ts +40 -143
  9. package/dist/test/integration/rebrand.test.js +15 -14
  10. package/dist/utils/agentMapping.js +2 -0
  11. package/dist/utils/archive-tracks.d.ts +28 -0
  12. package/dist/utils/archive-tracks.js +154 -1
  13. package/dist/utils/archive-tracks.test.d.ts +1 -0
  14. package/dist/utils/archive-tracks.test.js +495 -0
  15. package/dist/utils/codebaseAnalysis.d.ts +61 -0
  16. package/dist/utils/codebaseAnalysis.js +429 -0
  17. package/dist/utils/codebaseAnalysis.test.d.ts +1 -0
  18. package/dist/utils/codebaseAnalysis.test.js +556 -0
  19. package/dist/utils/documentGeneration.d.ts +97 -0
  20. package/dist/utils/documentGeneration.js +301 -0
  21. package/dist/utils/documentGeneration.test.d.ts +1 -0
  22. package/dist/utils/documentGeneration.test.js +380 -0
  23. package/dist/utils/interactiveMenu.d.ts +56 -0
  24. package/dist/utils/interactiveMenu.js +144 -0
  25. package/dist/utils/interactiveMenu.test.d.ts +1 -0
  26. package/dist/utils/interactiveMenu.test.js +231 -0
  27. package/dist/utils/interactiveSetup.d.ts +43 -0
  28. package/dist/utils/interactiveSetup.js +131 -0
  29. package/dist/utils/interactiveSetup.test.d.ts +1 -0
  30. package/dist/utils/interactiveSetup.test.js +124 -0
  31. package/dist/utils/metadataTracker.d.ts +39 -0
  32. package/dist/utils/metadataTracker.js +105 -0
  33. package/dist/utils/metadataTracker.test.d.ts +1 -0
  34. package/dist/utils/metadataTracker.test.js +265 -0
  35. package/dist/utils/planParser.d.ts +25 -0
  36. package/dist/utils/planParser.js +107 -0
  37. package/dist/utils/planParser.test.d.ts +1 -0
  38. package/dist/utils/planParser.test.js +119 -0
  39. package/dist/utils/projectMaturity.d.ts +53 -0
  40. package/dist/utils/projectMaturity.js +179 -0
  41. package/dist/utils/projectMaturity.test.d.ts +1 -0
  42. package/dist/utils/projectMaturity.test.js +298 -0
  43. package/dist/utils/questionGenerator.d.ts +51 -0
  44. package/dist/utils/questionGenerator.js +535 -0
  45. package/dist/utils/questionGenerator.test.d.ts +1 -0
  46. package/dist/utils/questionGenerator.test.js +328 -0
  47. package/dist/utils/setupIntegration.d.ts +72 -0
  48. package/dist/utils/setupIntegration.js +179 -0
  49. package/dist/utils/setupIntegration.test.d.ts +1 -0
  50. package/dist/utils/setupIntegration.test.js +344 -0
  51. package/dist/utils/statusDisplay.d.ts +35 -0
  52. package/dist/utils/statusDisplay.js +81 -0
  53. package/dist/utils/statusDisplay.test.d.ts +1 -0
  54. package/dist/utils/statusDisplay.test.js +102 -0
  55. package/package.json +1 -1
@@ -0,0 +1,495 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from 'vitest';
2
+ import { isTrackFinished, parseMetadata, isPlanFullyCompleted, moveTrackToArchive, removeTrackFromRegistry, archiveCompletedTracks } from './archive-tracks.js';
3
+ import { mkdirSync, writeFileSync, existsSync, readFileSync, rmSync } from 'fs';
4
+ import { join } from 'path';
5
+ import { tmpdir } from 'os';
6
+ describe('archive-tracks', () => {
7
+ describe('parseMetadata', () => {
8
+ it('should parse valid metadata.json content', () => {
9
+ const jsonContent = JSON.stringify({
10
+ track_id: 'test_track_001',
11
+ type: 'feature',
12
+ status: 'completed',
13
+ created_at: '2026-01-19T00:00:00Z',
14
+ updated_at: '2026-01-19T12:00:00Z',
15
+ description: 'Test track'
16
+ });
17
+ const result = parseMetadata(jsonContent);
18
+ expect(result).toEqual({
19
+ track_id: 'test_track_001',
20
+ type: 'feature',
21
+ status: 'completed',
22
+ created_at: '2026-01-19T00:00:00Z',
23
+ updated_at: '2026-01-19T12:00:00Z',
24
+ description: 'Test track'
25
+ });
26
+ });
27
+ it('should throw error for invalid JSON', () => {
28
+ expect(() => parseMetadata('invalid json')).toThrow();
29
+ });
30
+ it('should throw error for missing required fields', () => {
31
+ const jsonContent = JSON.stringify({
32
+ track_id: 'test_track_001'
33
+ });
34
+ expect(() => parseMetadata(jsonContent)).toThrow('Invalid metadata');
35
+ });
36
+ });
37
+ describe('isPlanFullyCompleted', () => {
38
+ it('should return true when all tasks are marked as completed', () => {
39
+ const planContent = `
40
+ # Plan
41
+
42
+ ## Phase 1
43
+ - [x] Task 1
44
+ - [x] Task 2
45
+
46
+ ## Phase 2
47
+ - [x] Task 3
48
+ `;
49
+ expect(isPlanFullyCompleted(planContent)).toBe(true);
50
+ });
51
+ it('should return false when any task is incomplete', () => {
52
+ const planContent = `
53
+ # Plan
54
+
55
+ ## Phase 1
56
+ - [x] Task 1
57
+ - [ ] Task 2
58
+
59
+ ## Phase 2
60
+ - [x] Task 3
61
+ `;
62
+ expect(isPlanFullyCompleted(planContent)).toBe(false);
63
+ });
64
+ it('should return false when there are in-progress tasks', () => {
65
+ const planContent = `
66
+ # Plan
67
+
68
+ ## Phase 1
69
+ - [x] Task 1
70
+ - [~] Task 2
71
+
72
+ ## Phase 2
73
+ - [x] Task 3
74
+ `;
75
+ expect(isPlanFullyCompleted(planContent)).toBe(false);
76
+ });
77
+ it('should return true for plan with no tasks', () => {
78
+ const planContent = `
79
+ # Plan
80
+
81
+ Just a description, no tasks.
82
+ `;
83
+ expect(isPlanFullyCompleted(planContent)).toBe(true);
84
+ });
85
+ it('should handle nested sub-tasks correctly', () => {
86
+ const planContent = `
87
+ ## Phase 1
88
+ - [x] Task 1
89
+ - [x] Subtask 1.1
90
+ - [x] Subtask 1.2
91
+ - [x] Task 2
92
+ `;
93
+ expect(isPlanFullyCompleted(planContent)).toBe(true);
94
+ });
95
+ it('should return false if any sub-task is incomplete', () => {
96
+ const planContent = `
97
+ ## Phase 1
98
+ - [x] Task 1
99
+ - [x] Subtask 1.1
100
+ - [ ] Subtask 1.2
101
+ - [x] Task 2
102
+ `;
103
+ expect(isPlanFullyCompleted(planContent)).toBe(false);
104
+ });
105
+ });
106
+ describe('isTrackFinished', () => {
107
+ it('should return true when metadata is completed and plan is fully checked', () => {
108
+ const metadata = {
109
+ track_id: 'test_001',
110
+ type: 'feature',
111
+ status: 'completed',
112
+ created_at: '2026-01-19T00:00:00Z',
113
+ updated_at: '2026-01-19T12:00:00Z',
114
+ description: 'Test'
115
+ };
116
+ const planContent = `
117
+ ## Phase 1
118
+ - [x] Task 1
119
+ - [x] Task 2
120
+ `;
121
+ expect(isTrackFinished(metadata, planContent)).toBe(true);
122
+ });
123
+ it('should return false when metadata status is not completed', () => {
124
+ const metadata = {
125
+ track_id: 'test_001',
126
+ type: 'feature',
127
+ status: 'in_progress',
128
+ created_at: '2026-01-19T00:00:00Z',
129
+ updated_at: '2026-01-19T12:00:00Z',
130
+ description: 'Test'
131
+ };
132
+ const planContent = `
133
+ ## Phase 1
134
+ - [x] Task 1
135
+ - [x] Task 2
136
+ `;
137
+ expect(isTrackFinished(metadata, planContent)).toBe(false);
138
+ });
139
+ it('should return false when plan has incomplete tasks', () => {
140
+ const metadata = {
141
+ track_id: 'test_001',
142
+ type: 'feature',
143
+ status: 'completed',
144
+ created_at: '2026-01-19T00:00:00Z',
145
+ updated_at: '2026-01-19T12:00:00Z',
146
+ description: 'Test'
147
+ };
148
+ const planContent = `
149
+ ## Phase 1
150
+ - [x] Task 1
151
+ - [ ] Task 2
152
+ `;
153
+ expect(isTrackFinished(metadata, planContent)).toBe(false);
154
+ });
155
+ it('should return false when both metadata and plan are incomplete', () => {
156
+ const metadata = {
157
+ track_id: 'test_001',
158
+ type: 'feature',
159
+ status: 'new',
160
+ created_at: '2026-01-19T00:00:00Z',
161
+ updated_at: '2026-01-19T12:00:00Z',
162
+ description: 'Test'
163
+ };
164
+ const planContent = `
165
+ ## Phase 1
166
+ - [ ] Task 1
167
+ - [ ] Task 2
168
+ `;
169
+ expect(isTrackFinished(metadata, planContent)).toBe(false);
170
+ });
171
+ });
172
+ describe('moveTrackToArchive', () => {
173
+ let testDir;
174
+ let tracksDir;
175
+ let archiveDir;
176
+ beforeEach(() => {
177
+ // Create temporary test directory structure
178
+ testDir = join(tmpdir(), `archive-test-${Date.now()}`);
179
+ tracksDir = join(testDir, 'conductor-cdd', 'tracks');
180
+ archiveDir = join(testDir, 'conductor-cdd', 'archive');
181
+ mkdirSync(tracksDir, { recursive: true });
182
+ });
183
+ afterEach(() => {
184
+ // Clean up test directory
185
+ if (existsSync(testDir)) {
186
+ rmSync(testDir, { recursive: true, force: true });
187
+ }
188
+ });
189
+ it('should move track directory to archive', () => {
190
+ const trackId = 'test_track_001';
191
+ const trackPath = join(tracksDir, trackId);
192
+ // Create track directory with files
193
+ mkdirSync(trackPath);
194
+ writeFileSync(join(trackPath, 'spec.md'), '# Spec');
195
+ writeFileSync(join(trackPath, 'plan.md'), '# Plan');
196
+ // Move to archive
197
+ moveTrackToArchive(testDir, trackId);
198
+ // Verify track no longer in tracks/
199
+ expect(existsSync(trackPath)).toBe(false);
200
+ // Verify track now in archive/
201
+ const archivedPath = join(archiveDir, trackId);
202
+ expect(existsSync(archivedPath)).toBe(true);
203
+ expect(existsSync(join(archivedPath, 'spec.md'))).toBe(true);
204
+ expect(existsSync(join(archivedPath, 'plan.md'))).toBe(true);
205
+ });
206
+ it('should create archive directory if it does not exist', () => {
207
+ const trackId = 'test_track_002';
208
+ const trackPath = join(tracksDir, trackId);
209
+ mkdirSync(trackPath);
210
+ writeFileSync(join(trackPath, 'spec.md'), '# Spec');
211
+ // Archive dir should not exist yet
212
+ expect(existsSync(archiveDir)).toBe(false);
213
+ // Move to archive
214
+ moveTrackToArchive(testDir, trackId);
215
+ // Archive dir should now exist
216
+ expect(existsSync(archiveDir)).toBe(true);
217
+ expect(existsSync(join(archiveDir, trackId))).toBe(true);
218
+ });
219
+ it('should throw error if source track does not exist', () => {
220
+ expect(() => moveTrackToArchive(testDir, 'nonexistent_track')).toThrow();
221
+ });
222
+ });
223
+ describe('removeTrackFromRegistry', () => {
224
+ let testDir;
225
+ let tracksFile;
226
+ beforeEach(() => {
227
+ testDir = join(tmpdir(), `registry-test-${Date.now()}`);
228
+ mkdirSync(testDir, { recursive: true });
229
+ tracksFile = join(testDir, 'tracks.md');
230
+ });
231
+ afterEach(() => {
232
+ if (existsSync(testDir)) {
233
+ rmSync(testDir, { recursive: true, force: true });
234
+ }
235
+ });
236
+ it('should remove track entry from tracks.md', () => {
237
+ const tracksContent = `# Project Tracks
238
+
239
+ ---
240
+
241
+ - [x] **Track: First Track**
242
+ *Link: [./tracks/first_track/](./tracks/first_track/)*
243
+
244
+ ---
245
+
246
+ - [x] **Track: Second Track**
247
+ *Link: [./tracks/second_track/](./tracks/second_track/)*
248
+
249
+ ---
250
+
251
+ - [~] **Track: Third Track**
252
+ *Link: [./tracks/third_track/](./tracks/third_track/)*
253
+ `;
254
+ writeFileSync(tracksFile, tracksContent);
255
+ // Remove second track
256
+ removeTrackFromRegistry(tracksFile, 'second_track');
257
+ const updatedContent = readFileSync(tracksFile, 'utf-8');
258
+ // Should not contain second track
259
+ expect(updatedContent).not.toContain('Second Track');
260
+ expect(updatedContent).not.toContain('second_track');
261
+ // Should still contain other tracks
262
+ expect(updatedContent).toContain('First Track');
263
+ expect(updatedContent).toContain('Third Track');
264
+ });
265
+ it('should handle track entry with separator lines correctly', () => {
266
+ const tracksContent = `# Project Tracks
267
+
268
+ ---
269
+
270
+ - [x] **Track: Keep This**
271
+ *Link: [./tracks/keep_this/](./tracks/keep_this/)*
272
+
273
+ ---
274
+
275
+ - [x] **Track: Remove This**
276
+ *Link: [./tracks/remove_this/](./tracks/remove_this/)*
277
+
278
+ ---
279
+
280
+ - [~] **Track: Also Keep**
281
+ *Link: [./tracks/also_keep/](./tracks/also_keep/)*
282
+ `;
283
+ writeFileSync(tracksFile, tracksContent);
284
+ removeTrackFromRegistry(tracksFile, 'remove_this');
285
+ const updatedContent = readFileSync(tracksFile, 'utf-8');
286
+ expect(updatedContent).not.toContain('Remove This');
287
+ expect(updatedContent).toContain('Keep This');
288
+ expect(updatedContent).toContain('Also Keep');
289
+ // Should have clean separator lines (no duplicate ---)
290
+ const separatorCount = (updatedContent.match(/^---$/gm) || []).length;
291
+ expect(separatorCount).toBe(2); // One separator between each remaining track
292
+ });
293
+ it('should do nothing if track not found in registry', () => {
294
+ const tracksContent = `# Project Tracks
295
+
296
+ - [x] **Track: Some Track**
297
+ *Link: [./tracks/some_track/](./tracks/some_track/)*
298
+ `;
299
+ writeFileSync(tracksFile, tracksContent);
300
+ // Try to remove non-existent track
301
+ removeTrackFromRegistry(tracksFile, 'nonexistent');
302
+ const updatedContent = readFileSync(tracksFile, 'utf-8');
303
+ // Content should be unchanged
304
+ expect(updatedContent).toBe(tracksContent);
305
+ });
306
+ });
307
+ describe('archiveCompletedTracks (Integration)', () => {
308
+ let testDir;
309
+ let tracksDir;
310
+ let archiveDir;
311
+ let tracksFile;
312
+ beforeEach(() => {
313
+ testDir = join(tmpdir(), `archive-integration-${Date.now()}`);
314
+ const cddDir = join(testDir, 'conductor-cdd');
315
+ tracksDir = join(cddDir, 'tracks');
316
+ archiveDir = join(cddDir, 'archive');
317
+ tracksFile = join(cddDir, 'tracks.md');
318
+ mkdirSync(tracksDir, { recursive: true });
319
+ });
320
+ afterEach(() => {
321
+ if (existsSync(testDir)) {
322
+ rmSync(testDir, { recursive: true, force: true });
323
+ }
324
+ });
325
+ it('should archive tracks that are fully completed', () => {
326
+ // Create completed track
327
+ const completedTrackId = 'completed_track_001';
328
+ const completedPath = join(tracksDir, completedTrackId);
329
+ mkdirSync(completedPath);
330
+ const completedMetadata = {
331
+ track_id: completedTrackId,
332
+ type: 'feature',
333
+ status: 'completed',
334
+ created_at: '2026-01-19T00:00:00Z',
335
+ updated_at: '2026-01-19T12:00:00Z',
336
+ description: 'Completed track'
337
+ };
338
+ const completedPlan = `
339
+ ## Phase 1
340
+ - [x] Task 1
341
+ - [x] Task 2
342
+ `;
343
+ writeFileSync(join(completedPath, 'metadata.json'), JSON.stringify(completedMetadata, null, 2));
344
+ writeFileSync(join(completedPath, 'plan.md'), completedPlan);
345
+ writeFileSync(join(completedPath, 'spec.md'), '# Spec');
346
+ // Create tracks.md
347
+ const tracksContent = `# Project Tracks
348
+
349
+ ---
350
+
351
+ - [x] **Track: Completed Track**
352
+ *Link: [./tracks/${completedTrackId}/](./tracks/${completedTrackId}/)*
353
+
354
+ ---
355
+ `;
356
+ writeFileSync(tracksFile, tracksContent);
357
+ // Run archival
358
+ const result = archiveCompletedTracks(testDir);
359
+ // Verify track was archived
360
+ expect(result.archived).toContain(completedTrackId);
361
+ expect(result.errors).toHaveLength(0);
362
+ // Verify track moved to archive
363
+ expect(existsSync(completedPath)).toBe(false);
364
+ expect(existsSync(join(archiveDir, completedTrackId))).toBe(true);
365
+ // Verify removed from tracks.md
366
+ const updatedTracks = readFileSync(tracksFile, 'utf-8');
367
+ expect(updatedTracks).not.toContain(completedTrackId);
368
+ });
369
+ it('should skip tracks that are not completed', () => {
370
+ // Create incomplete track (metadata complete but plan incomplete)
371
+ const incompleteTrackId = 'incomplete_track_001';
372
+ const incompletePath = join(tracksDir, incompleteTrackId);
373
+ mkdirSync(incompletePath);
374
+ const incompleteMetadata = {
375
+ track_id: incompleteTrackId,
376
+ type: 'feature',
377
+ status: 'completed',
378
+ created_at: '2026-01-19T00:00:00Z',
379
+ updated_at: '2026-01-19T12:00:00Z',
380
+ description: 'Incomplete track'
381
+ };
382
+ const incompletePlan = `
383
+ ## Phase 1
384
+ - [x] Task 1
385
+ - [ ] Task 2
386
+ `;
387
+ writeFileSync(join(incompletePath, 'metadata.json'), JSON.stringify(incompleteMetadata, null, 2));
388
+ writeFileSync(join(incompletePath, 'plan.md'), incompletePlan);
389
+ const tracksContent = `# Project Tracks
390
+
391
+ ---
392
+
393
+ - [~] **Track: Incomplete Track**
394
+ *Link: [./tracks/${incompleteTrackId}/](./tracks/${incompleteTrackId}/)*
395
+
396
+ ---
397
+ `;
398
+ writeFileSync(tracksFile, tracksContent);
399
+ // Run archival
400
+ const result = archiveCompletedTracks(testDir);
401
+ // Verify track was skipped
402
+ expect(result.skipped).toContain(incompleteTrackId);
403
+ expect(result.archived).not.toContain(incompleteTrackId);
404
+ // Verify track still in tracks/
405
+ expect(existsSync(incompletePath)).toBe(true);
406
+ expect(existsSync(join(archiveDir, incompleteTrackId))).toBe(false);
407
+ });
408
+ it('should handle multiple tracks correctly', () => {
409
+ // Create 2 completed tracks and 1 incomplete
410
+ const completed1 = 'completed_001';
411
+ const completed2 = 'completed_002';
412
+ const incomplete1 = 'incomplete_001';
413
+ for (const trackId of [completed1, completed2]) {
414
+ const trackPath = join(tracksDir, trackId);
415
+ mkdirSync(trackPath);
416
+ writeFileSync(join(trackPath, 'metadata.json'), JSON.stringify({
417
+ track_id: trackId,
418
+ type: 'feature',
419
+ status: 'completed',
420
+ created_at: '2026-01-19T00:00:00Z',
421
+ updated_at: '2026-01-19T12:00:00Z',
422
+ description: 'Completed'
423
+ }, null, 2));
424
+ writeFileSync(join(trackPath, 'plan.md'), '## Phase 1\n- [x] Task 1');
425
+ }
426
+ const incompletePath = join(tracksDir, incomplete1);
427
+ mkdirSync(incompletePath);
428
+ writeFileSync(join(incompletePath, 'metadata.json'), JSON.stringify({
429
+ track_id: incomplete1,
430
+ type: 'feature',
431
+ status: 'in_progress',
432
+ created_at: '2026-01-19T00:00:00Z',
433
+ updated_at: '2026-01-19T12:00:00Z',
434
+ description: 'In progress'
435
+ }, null, 2));
436
+ writeFileSync(join(incompletePath, 'plan.md'), '## Phase 1\n- [x] Task 1');
437
+ const tracksContent = `# Project Tracks
438
+
439
+ ---
440
+
441
+ - [x] **Track: Completed 1**
442
+ *Link: [./tracks/${completed1}/](./tracks/${completed1}/)*
443
+
444
+ ---
445
+
446
+ - [x] **Track: Completed 2**
447
+ *Link: [./tracks/${completed2}/](./tracks/${completed2}/)*
448
+
449
+ ---
450
+
451
+ - [~] **Track: Incomplete 1**
452
+ *Link: [./tracks/${incomplete1}/](./tracks/${incomplete1}/)*
453
+
454
+ ---
455
+ `;
456
+ writeFileSync(tracksFile, tracksContent);
457
+ // Run archival
458
+ const result = archiveCompletedTracks(testDir);
459
+ // Verify results
460
+ expect(result.archived).toHaveLength(2);
461
+ expect(result.archived).toContain(completed1);
462
+ expect(result.archived).toContain(completed2);
463
+ expect(result.skipped).toContain(incomplete1);
464
+ // Verify filesystem state
465
+ expect(existsSync(join(tracksDir, completed1))).toBe(false);
466
+ expect(existsSync(join(tracksDir, completed2))).toBe(false);
467
+ expect(existsSync(join(tracksDir, incomplete1))).toBe(true);
468
+ expect(existsSync(join(archiveDir, completed1))).toBe(true);
469
+ expect(existsSync(join(archiveDir, completed2))).toBe(true);
470
+ });
471
+ it('should handle tracks with missing metadata.json gracefully', () => {
472
+ const trackId = 'missing_metadata_001';
473
+ const trackPath = join(tracksDir, trackId);
474
+ mkdirSync(trackPath);
475
+ writeFileSync(join(trackPath, 'plan.md'), '## Phase 1\n- [x] Task 1');
476
+ const tracksContent = `# Project Tracks
477
+
478
+ ---
479
+
480
+ - [x] **Track: Missing Metadata**
481
+ *Link: [./tracks/${trackId}/](./tracks/${trackId}/)*
482
+
483
+ ---
484
+ `;
485
+ writeFileSync(tracksFile, tracksContent);
486
+ // Run archival
487
+ const result = archiveCompletedTracks(testDir);
488
+ // Should record an error
489
+ expect(result.errors).toHaveLength(1);
490
+ expect(result.errors[0].trackId).toBe(trackId);
491
+ // Track should still exist
492
+ expect(existsSync(trackPath)).toBe(true);
493
+ });
494
+ });
495
+ });
@@ -0,0 +1,61 @@
1
+ /**
2
+ * Codebase Analysis Module
3
+ *
4
+ * Analyzes existing codebases to extract context for brownfield projects:
5
+ * - Ignore patterns from various ignore files
6
+ * - Dependency manifests with metadata
7
+ * - Architecture patterns (Monorepo, MVC, Microservices, etc.)
8
+ * - Programming languages and frameworks
9
+ *
10
+ * Based on reference implementations:
11
+ * - derekbar90/opencode-conductor
12
+ * - gemini-cli-extensions/conductor
13
+ */
14
+ export interface IgnorePattern {
15
+ pattern: string;
16
+ negated: boolean;
17
+ }
18
+ export interface ManifestInfo {
19
+ type: 'package.json' | 'pom.xml' | 'requirements.txt' | 'go.mod' | 'Cargo.toml' | 'Gemfile';
20
+ path: string;
21
+ metadata: Record<string, any>;
22
+ dependencies?: Record<string, string>;
23
+ }
24
+ export interface CodebaseAnalysis {
25
+ languages: Record<string, number>;
26
+ frameworks: {
27
+ frontend: string[];
28
+ backend: string[];
29
+ };
30
+ databases: string[];
31
+ architecture: string[];
32
+ manifests: ManifestInfo[];
33
+ projectGoal?: string;
34
+ }
35
+ /**
36
+ * Parse ignore files with precedence: .geminiignore > .ignore > .gitignore
37
+ */
38
+ export declare function parseIgnoreFiles(projectPath: string): IgnorePattern[];
39
+ /**
40
+ * Detect and parse dependency manifests
41
+ */
42
+ export declare function detectManifests(projectPath: string): ManifestInfo[];
43
+ /**
44
+ * Infer architecture patterns from project structure
45
+ */
46
+ export declare function inferArchitecture(projectPath: string): string[];
47
+ /**
48
+ * Detect programming languages by file extensions
49
+ */
50
+ export declare function detectLanguages(projectPath: string): Record<string, number>;
51
+ /**
52
+ * Detect frameworks from manifests
53
+ */
54
+ export declare function detectFrameworks(manifests: ManifestInfo[]): {
55
+ frontend: string[];
56
+ backend: string[];
57
+ };
58
+ /**
59
+ * Main function: Analyze entire codebase
60
+ */
61
+ export declare function analyzeCodebase(projectPath: string): CodebaseAnalysis;