prjct-cli 0.9.1 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +165 -0
- package/core/__tests__/agentic/agent-router.test.js +398 -0
- package/core/__tests__/agentic/context-filter.test.js +494 -0
- package/core/__tests__/agentic/prompt-builder.test.js +39 -47
- package/core/__tests__/domain/agent-generator.test.js +29 -36
- package/core/__tests__/domain/agent-loader.test.js +179 -0
- package/core/__tests__/domain/analyzer.test.js +324 -0
- package/core/__tests__/infrastructure/author-detector.test.js +103 -0
- package/core/__tests__/infrastructure/config-manager.test.js +454 -0
- package/core/__tests__/infrastructure/path-manager.test.js +412 -0
- package/core/__tests__/utils/jsonl-helper.test.js +387 -0
- package/core/agentic/agent-router.js +253 -186
- package/core/agentic/command-executor.js +61 -13
- package/core/agentic/context-filter.js +92 -88
- package/core/agentic/prompt-builder.js +51 -1
- package/core/commands.js +85 -59
- package/core/domain/agent-generator.js +77 -46
- package/core/domain/agent-loader.js +183 -0
- package/core/domain/agent-matcher.js +217 -0
- package/core/domain/agent-validator.js +217 -0
- package/core/domain/context-estimator.js +175 -0
- package/core/domain/product-standards.js +92 -0
- package/core/domain/smart-cache.js +157 -0
- package/core/domain/task-analyzer.js +353 -0
- package/core/domain/tech-detector.js +365 -0
- package/package.json +3 -2
|
@@ -6,13 +6,8 @@ import path from 'path'
|
|
|
6
6
|
|
|
7
7
|
describe('Agent Generator', () => {
|
|
8
8
|
const testProjectId = 'test-agent-gen-' + Date.now()
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
beforeEach(() => {
|
|
13
|
-
generator = new AgentGenerator(testProjectId)
|
|
14
|
-
agentsDir = path.join(os.homedir(), '.prjct-cli', 'projects', testProjectId, 'agents')
|
|
15
|
-
})
|
|
9
|
+
const agentsDir = path.join(os.homedir(), '.prjct-cli', 'projects', testProjectId, 'agents')
|
|
10
|
+
const generator = new AgentGenerator(testProjectId)
|
|
16
11
|
|
|
17
12
|
afterEach(async () => {
|
|
18
13
|
// Cleanup test files
|
|
@@ -66,13 +61,10 @@ describe('Agent Generator', () => {
|
|
|
66
61
|
|
|
67
62
|
const content = await fs.readFile(path.join(agentsDir, 'backend-agent.md'), 'utf-8')
|
|
68
63
|
|
|
69
|
-
expect(content).toContain('#
|
|
70
|
-
expect(content).toContain('
|
|
71
|
-
expect(content).toContain('
|
|
72
|
-
|
|
73
|
-
expect(content).toContain('Node.js, Express, PostgreSQL')
|
|
74
|
-
expect(content).toContain('## Responsibilities')
|
|
75
|
-
expect(content).toContain('API development and database management')
|
|
64
|
+
expect(content).toContain('# AGENT: BACKEND-AGENT')
|
|
65
|
+
expect(content).toContain('Role: Backend Developer')
|
|
66
|
+
expect(content).toContain('## META-INSTRUCTION')
|
|
67
|
+
// Expertise and Responsibilities are now part of the context or analysis instructions
|
|
76
68
|
})
|
|
77
69
|
|
|
78
70
|
it('should include project context in agent file', async () => {
|
|
@@ -88,7 +80,7 @@ describe('Agent Generator', () => {
|
|
|
88
80
|
|
|
89
81
|
const content = await fs.readFile(path.join(agentsDir, 'context-agent.md'), 'utf-8')
|
|
90
82
|
|
|
91
|
-
expect(content).toContain('##
|
|
83
|
+
expect(content).toContain('## PROJECT CONTEXT')
|
|
92
84
|
expect(content).toContain('framework')
|
|
93
85
|
expect(content).toContain('React')
|
|
94
86
|
expect(content).toContain('version')
|
|
@@ -102,10 +94,10 @@ describe('Agent Generator', () => {
|
|
|
102
94
|
|
|
103
95
|
const content = await fs.readFile(path.join(agentsDir, 'minimal-agent.md'), 'utf-8')
|
|
104
96
|
|
|
105
|
-
expect(content).toContain('#
|
|
106
|
-
expect(content).toContain('
|
|
107
|
-
expect(content).toContain('
|
|
108
|
-
expect(content).toContain('No
|
|
97
|
+
expect(content).toContain('# AGENT: MINIMAL-AGENT')
|
|
98
|
+
expect(content).toContain('## META-INSTRUCTION')
|
|
99
|
+
expect(content).toContain('ANALYZE the provided PROJECT CONTEXT')
|
|
100
|
+
expect(content).toContain('No specific project context provided')
|
|
109
101
|
})
|
|
110
102
|
|
|
111
103
|
it('should create output directory if not exists', async () => {
|
|
@@ -149,7 +141,7 @@ describe('Agent Generator', () => {
|
|
|
149
141
|
|
|
150
142
|
const content = await fs.readFile(path.join(agentsDir, 'fallback-agent.md'), 'utf-8')
|
|
151
143
|
|
|
152
|
-
expect(content).toContain('#
|
|
144
|
+
expect(content).toContain('# AGENT: FALLBACK-AGENT')
|
|
153
145
|
})
|
|
154
146
|
})
|
|
155
147
|
|
|
@@ -251,17 +243,17 @@ describe('Agent Generator', () => {
|
|
|
251
243
|
await generator.generateDynamicAgent('remove-me', { role: 'Remove' })
|
|
252
244
|
|
|
253
245
|
// Verify they exist
|
|
254
|
-
|
|
255
|
-
expect(
|
|
246
|
+
const initialAgents = await generator.listAgents()
|
|
247
|
+
expect(initialAgents).toHaveLength(2)
|
|
256
248
|
|
|
257
249
|
// Cleanup obsolete
|
|
258
250
|
const removed = await generator.cleanupObsoleteAgents(['keep-me'])
|
|
259
251
|
expect(removed).toContain('remove-me')
|
|
260
252
|
|
|
261
253
|
// Verify cleanup
|
|
262
|
-
|
|
263
|
-
expect(
|
|
264
|
-
expect(
|
|
254
|
+
const finalAgents = await generator.listAgents()
|
|
255
|
+
expect(finalAgents).toHaveLength(1)
|
|
256
|
+
expect(finalAgents).toContain('keep-me')
|
|
265
257
|
})
|
|
266
258
|
|
|
267
259
|
it('should handle agent file content correctly', async () => {
|
|
@@ -278,19 +270,20 @@ describe('Agent Generator', () => {
|
|
|
278
270
|
const content = await fs.readFile(path.join(agentsDir, 'full-agent.md'), 'utf-8')
|
|
279
271
|
|
|
280
272
|
// Should have all sections
|
|
281
|
-
expect(content).toContain('#
|
|
282
|
-
expect(content).toContain('
|
|
283
|
-
expect(content).toContain('##
|
|
284
|
-
expect(content).toContain('##
|
|
285
|
-
expect(content).toContain('##
|
|
286
|
-
expect(content).toContain('##
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
expect(content).toContain('React, Node.js, PostgreSQL, Docker')
|
|
291
|
-
expect(content).toContain('Build and deploy full stack applications')
|
|
273
|
+
expect(content).toContain('# AGENT: FULL-AGENT')
|
|
274
|
+
expect(content).toContain('Role: Full Stack Developer')
|
|
275
|
+
expect(content).toContain('## META-INSTRUCTION')
|
|
276
|
+
expect(content).toContain('## DOMAIN AUTHORITY')
|
|
277
|
+
expect(content).toContain('## DYNAMIC STANDARDS')
|
|
278
|
+
expect(content).toContain('## ORCHESTRATION PROTOCOL')
|
|
279
|
+
expect(content).toContain('## PROJECT CONTEXT')
|
|
280
|
+
|
|
281
|
+
// Should have context content
|
|
292
282
|
expect(content).toContain('MERN')
|
|
293
283
|
expect(content).toContain('AWS')
|
|
284
|
+
|
|
285
|
+
// Should NOT have hardcoded tech lists anymore
|
|
286
|
+
// expect(content).toContain('React, Node.js, PostgreSQL, Docker') // This is no longer explicitly listed in EXPERTISE section as that section is gone
|
|
294
287
|
})
|
|
295
288
|
})
|
|
296
289
|
})
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for AgentLoader
|
|
3
|
+
* Verifies that agents are loaded correctly from project files
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs').promises
|
|
7
|
+
const path = require('path')
|
|
8
|
+
const os = require('os')
|
|
9
|
+
const { describe, it, expect, beforeEach, afterEach } = require('vitest')
|
|
10
|
+
const AgentLoader = require('../../domain/agent-loader')
|
|
11
|
+
|
|
12
|
+
describe('AgentLoader', () => {
|
|
13
|
+
let testProjectId
|
|
14
|
+
let testAgentsDir
|
|
15
|
+
let loader
|
|
16
|
+
|
|
17
|
+
beforeEach(async () => {
|
|
18
|
+
// Create unique test project ID
|
|
19
|
+
testProjectId = `test-${Date.now()}`
|
|
20
|
+
testAgentsDir = path.join(os.homedir(), '.prjct-cli', 'projects', testProjectId, 'agents')
|
|
21
|
+
await fs.mkdir(testAgentsDir, { recursive: true })
|
|
22
|
+
loader = new AgentLoader(testProjectId)
|
|
23
|
+
})
|
|
24
|
+
|
|
25
|
+
afterEach(async () => {
|
|
26
|
+
// Cleanup: Remove test agents directory
|
|
27
|
+
try {
|
|
28
|
+
await fs.rm(testAgentsDir, { recursive: true, force: true })
|
|
29
|
+
} catch (error) {
|
|
30
|
+
// Ignore cleanup errors
|
|
31
|
+
}
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
describe('loadAgent', () => {
|
|
35
|
+
it('should load an existing agent from file', async () => {
|
|
36
|
+
// Create test agent file
|
|
37
|
+
const agentName = 'frontend-specialist'
|
|
38
|
+
const agentContent = `# AGENT: FRONTEND-SPECIALIST
|
|
39
|
+
Role: Frontend Development Specialist
|
|
40
|
+
|
|
41
|
+
## META-INSTRUCTION
|
|
42
|
+
You are a frontend specialist.
|
|
43
|
+
|
|
44
|
+
## DOMAIN AUTHORITY
|
|
45
|
+
You are the owner of the frontend domain.
|
|
46
|
+
`
|
|
47
|
+
|
|
48
|
+
const agentPath = path.join(testAgentsDir, `${agentName}.md`)
|
|
49
|
+
await fs.writeFile(agentPath, agentContent, 'utf-8')
|
|
50
|
+
|
|
51
|
+
// Load agent
|
|
52
|
+
const agent = await loader.loadAgent(agentName)
|
|
53
|
+
|
|
54
|
+
// Verify
|
|
55
|
+
expect(agent).not.toBeNull()
|
|
56
|
+
expect(agent.name).toBe(agentName)
|
|
57
|
+
expect(agent.content).toBe(agentContent)
|
|
58
|
+
expect(agent.role).toBe('Frontend Development Specialist')
|
|
59
|
+
expect(agent.domain).toBe('frontend')
|
|
60
|
+
})
|
|
61
|
+
|
|
62
|
+
it('should return null for non-existent agent', async () => {
|
|
63
|
+
const agent = await loader.loadAgent('non-existent-agent')
|
|
64
|
+
expect(agent).toBeNull()
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
it('should cache loaded agents', async () => {
|
|
68
|
+
// Create test agent
|
|
69
|
+
const agentName = 'backend-specialist'
|
|
70
|
+
const agentPath = path.join(testAgentsDir, `${agentName}.md`)
|
|
71
|
+
await fs.writeFile(agentPath, '# AGENT: BACKEND-SPECIALIST\nRole: Backend Specialist', 'utf-8')
|
|
72
|
+
|
|
73
|
+
// Load twice
|
|
74
|
+
const agent1 = await loader.loadAgent(agentName)
|
|
75
|
+
const agent2 = await loader.loadAgent(agentName)
|
|
76
|
+
|
|
77
|
+
// Should be same object (cached)
|
|
78
|
+
expect(agent1).toBe(agent2)
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
it('should extract skills from agent content', async () => {
|
|
82
|
+
const agentName = 'react-specialist'
|
|
83
|
+
const agentContent = `# AGENT: REACT-SPECIALIST
|
|
84
|
+
Role: React Development Specialist
|
|
85
|
+
|
|
86
|
+
This agent specializes in React, TypeScript, and Next.js.
|
|
87
|
+
`
|
|
88
|
+
|
|
89
|
+
const agentPath = path.join(testAgentsDir, `${agentName}.md`)
|
|
90
|
+
await fs.writeFile(agentPath, agentContent, 'utf-8')
|
|
91
|
+
|
|
92
|
+
const agent = await loader.loadAgent(agentName)
|
|
93
|
+
|
|
94
|
+
expect(agent.skills).toContain('React')
|
|
95
|
+
expect(agent.skills).toContain('TypeScript')
|
|
96
|
+
expect(agent.skills).toContain('Next.js')
|
|
97
|
+
})
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
describe('loadAllAgents', () => {
|
|
101
|
+
it('should load all agents in the directory', async () => {
|
|
102
|
+
// Create multiple agent files
|
|
103
|
+
const agents = [
|
|
104
|
+
{ name: 'frontend-specialist', content: '# AGENT: FRONTEND-SPECIALIST\nRole: Frontend' },
|
|
105
|
+
{ name: 'backend-specialist', content: '# AGENT: BACKEND-SPECIALIST\nRole: Backend' },
|
|
106
|
+
{ name: 'qa-specialist', content: '# AGENT: QA-SPECIALIST\nRole: QA' }
|
|
107
|
+
]
|
|
108
|
+
|
|
109
|
+
for (const agent of agents) {
|
|
110
|
+
const agentPath = path.join(testAgentsDir, `${agent.name}.md`)
|
|
111
|
+
await fs.writeFile(agentPath, agent.content, 'utf-8')
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Load all
|
|
115
|
+
const loadedAgents = await loader.loadAllAgents()
|
|
116
|
+
|
|
117
|
+
expect(loadedAgents).toHaveLength(3)
|
|
118
|
+
expect(loadedAgents.map(a => a.name)).toContain('frontend-specialist')
|
|
119
|
+
expect(loadedAgents.map(a => a.name)).toContain('backend-specialist')
|
|
120
|
+
expect(loadedAgents.map(a => a.name)).toContain('qa-specialist')
|
|
121
|
+
})
|
|
122
|
+
|
|
123
|
+
it('should return empty array if no agents exist', async () => {
|
|
124
|
+
const agents = await loader.loadAllAgents()
|
|
125
|
+
expect(agents).toEqual([])
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
it('should ignore non-markdown files', async () => {
|
|
129
|
+
// Create agent file and non-agent file
|
|
130
|
+
const agentPath = path.join(testAgentsDir, 'frontend-specialist.md')
|
|
131
|
+
await fs.writeFile(agentPath, '# AGENT', 'utf-8')
|
|
132
|
+
|
|
133
|
+
const otherFile = path.join(testAgentsDir, 'config.json')
|
|
134
|
+
await fs.writeFile(otherFile, '{}', 'utf-8')
|
|
135
|
+
|
|
136
|
+
const agents = await loader.loadAllAgents()
|
|
137
|
+
|
|
138
|
+
expect(agents).toHaveLength(1)
|
|
139
|
+
expect(agents[0].name).toBe('frontend-specialist')
|
|
140
|
+
})
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
describe('agentExists', () => {
|
|
144
|
+
it('should return true for existing agent', async () => {
|
|
145
|
+
const agentName = 'test-agent'
|
|
146
|
+
const agentPath = path.join(testAgentsDir, `${agentName}.md`)
|
|
147
|
+
await fs.writeFile(agentPath, '# AGENT', 'utf-8')
|
|
148
|
+
|
|
149
|
+
const exists = await loader.agentExists(agentName)
|
|
150
|
+
expect(exists).toBe(true)
|
|
151
|
+
})
|
|
152
|
+
|
|
153
|
+
it('should return false for non-existent agent', async () => {
|
|
154
|
+
const exists = await loader.agentExists('non-existent')
|
|
155
|
+
expect(exists).toBe(false)
|
|
156
|
+
})
|
|
157
|
+
})
|
|
158
|
+
|
|
159
|
+
describe('clearCache', () => {
|
|
160
|
+
it('should clear the agent cache', async () => {
|
|
161
|
+
const agentName = 'test-agent'
|
|
162
|
+
const agentPath = path.join(testAgentsDir, `${agentName}.md`)
|
|
163
|
+
await fs.writeFile(agentPath, '# AGENT', 'utf-8')
|
|
164
|
+
|
|
165
|
+
// Load and cache
|
|
166
|
+
const agent1 = await loader.loadAgent(agentName)
|
|
167
|
+
expect(agent1).not.toBeNull()
|
|
168
|
+
|
|
169
|
+
// Clear cache
|
|
170
|
+
loader.clearCache()
|
|
171
|
+
|
|
172
|
+
// Load again - should still work but be new object
|
|
173
|
+
const agent2 = await loader.loadAgent(agentName)
|
|
174
|
+
expect(agent2).not.toBeNull()
|
|
175
|
+
// Note: In real usage, they might be same due to file system, but cache is cleared
|
|
176
|
+
})
|
|
177
|
+
})
|
|
178
|
+
})
|
|
179
|
+
|
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
|
|
2
|
+
import { createRequire } from 'module'
|
|
3
|
+
import path from 'path'
|
|
4
|
+
import fs from 'fs/promises'
|
|
5
|
+
import os from 'os'
|
|
6
|
+
|
|
7
|
+
const require = createRequire(import.meta.url)
|
|
8
|
+
|
|
9
|
+
describe('Codebase Analyzer', () => {
|
|
10
|
+
let analyzer
|
|
11
|
+
let testProjectPath
|
|
12
|
+
let tempDir
|
|
13
|
+
|
|
14
|
+
beforeEach(async () => {
|
|
15
|
+
analyzer = require('../../domain/analyzer.js')
|
|
16
|
+
|
|
17
|
+
// Create temporary test directory
|
|
18
|
+
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'prjct-test-'))
|
|
19
|
+
testProjectPath = tempDir
|
|
20
|
+
|
|
21
|
+
analyzer.init(testProjectPath)
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
afterEach(async () => {
|
|
25
|
+
if (tempDir) {
|
|
26
|
+
try {
|
|
27
|
+
await fs.rm(tempDir, { recursive: true, force: true })
|
|
28
|
+
} catch (error) {
|
|
29
|
+
// Ignore cleanup errors
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
describe('init()', () => {
|
|
35
|
+
it('should initialize with project path', () => {
|
|
36
|
+
analyzer.init('/test/path')
|
|
37
|
+
expect(analyzer.projectPath).toBe('/test/path')
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
it('should use current directory if no path provided', () => {
|
|
41
|
+
analyzer.init()
|
|
42
|
+
expect(analyzer.projectPath).toBeDefined()
|
|
43
|
+
})
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
describe('readPackageJson()', () => {
|
|
47
|
+
it('should read package.json when it exists', async () => {
|
|
48
|
+
const packageJson = {
|
|
49
|
+
name: 'test-project',
|
|
50
|
+
version: '1.0.0',
|
|
51
|
+
dependencies: { express: '^4.18.0' }
|
|
52
|
+
}
|
|
53
|
+
await fs.writeFile(
|
|
54
|
+
path.join(testProjectPath, 'package.json'),
|
|
55
|
+
JSON.stringify(packageJson, null, 2)
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
const result = await analyzer.readPackageJson()
|
|
59
|
+
|
|
60
|
+
expect(result).toBeDefined()
|
|
61
|
+
expect(result.name).toBe('test-project')
|
|
62
|
+
expect(result.dependencies.express).toBeDefined()
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
it('should return null when package.json does not exist', async () => {
|
|
66
|
+
const result = await analyzer.readPackageJson()
|
|
67
|
+
|
|
68
|
+
expect(result).toBeNull()
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
it('should return null for invalid JSON', async () => {
|
|
72
|
+
await fs.writeFile(path.join(testProjectPath, 'package.json'), 'invalid json{')
|
|
73
|
+
|
|
74
|
+
const result = await analyzer.readPackageJson()
|
|
75
|
+
|
|
76
|
+
expect(result).toBeNull()
|
|
77
|
+
})
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
describe('readCargoToml()', () => {
|
|
81
|
+
it('should read Cargo.toml when it exists', async () => {
|
|
82
|
+
const cargoContent = '[package]\nname = "test-project"\nversion = "0.1.0"'
|
|
83
|
+
await fs.writeFile(path.join(testProjectPath, 'Cargo.toml'), cargoContent)
|
|
84
|
+
|
|
85
|
+
const result = await analyzer.readCargoToml()
|
|
86
|
+
|
|
87
|
+
expect(result).toBeDefined()
|
|
88
|
+
expect(result).toContain('test-project')
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
it('should return null when Cargo.toml does not exist', async () => {
|
|
92
|
+
const result = await analyzer.readCargoToml()
|
|
93
|
+
|
|
94
|
+
expect(result).toBeNull()
|
|
95
|
+
})
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
describe('readRequirements()', () => {
|
|
99
|
+
it('should read requirements.txt when it exists', async () => {
|
|
100
|
+
const requirements = 'flask==2.0.0\nrequests==2.28.0'
|
|
101
|
+
await fs.writeFile(path.join(testProjectPath, 'requirements.txt'), requirements)
|
|
102
|
+
|
|
103
|
+
const result = await analyzer.readRequirements()
|
|
104
|
+
|
|
105
|
+
expect(result).toBeDefined()
|
|
106
|
+
expect(result).toContain('flask')
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
it('should return null when requirements.txt does not exist', async () => {
|
|
110
|
+
const result = await analyzer.readRequirements()
|
|
111
|
+
|
|
112
|
+
expect(result).toBeNull()
|
|
113
|
+
})
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
describe('readGoMod()', () => {
|
|
117
|
+
it('should read go.mod when it exists', async () => {
|
|
118
|
+
const goMod = 'module test-project\n\ngo 1.19'
|
|
119
|
+
await fs.writeFile(path.join(testProjectPath, 'go.mod'), goMod)
|
|
120
|
+
|
|
121
|
+
const result = await analyzer.readGoMod()
|
|
122
|
+
|
|
123
|
+
expect(result).toBeDefined()
|
|
124
|
+
expect(result).toContain('test-project')
|
|
125
|
+
})
|
|
126
|
+
|
|
127
|
+
it('should return null when go.mod does not exist', async () => {
|
|
128
|
+
const result = await analyzer.readGoMod()
|
|
129
|
+
|
|
130
|
+
expect(result).toBeNull()
|
|
131
|
+
})
|
|
132
|
+
})
|
|
133
|
+
|
|
134
|
+
describe('listDirectories()', () => {
|
|
135
|
+
it('should list directories in project root', async () => {
|
|
136
|
+
await fs.mkdir(path.join(testProjectPath, 'src'), { recursive: true })
|
|
137
|
+
await fs.mkdir(path.join(testProjectPath, 'tests'), { recursive: true })
|
|
138
|
+
await fs.writeFile(path.join(testProjectPath, 'file.txt'), 'content')
|
|
139
|
+
|
|
140
|
+
const directories = await analyzer.listDirectories()
|
|
141
|
+
|
|
142
|
+
expect(directories).toContain('src')
|
|
143
|
+
expect(directories).toContain('tests')
|
|
144
|
+
expect(directories).not.toContain('file.txt')
|
|
145
|
+
})
|
|
146
|
+
|
|
147
|
+
it('should exclude hidden directories', async () => {
|
|
148
|
+
await fs.mkdir(path.join(testProjectPath, '.git'), { recursive: true })
|
|
149
|
+
await fs.mkdir(path.join(testProjectPath, 'src'), { recursive: true })
|
|
150
|
+
|
|
151
|
+
const directories = await analyzer.listDirectories()
|
|
152
|
+
|
|
153
|
+
expect(directories).not.toContain('.git')
|
|
154
|
+
expect(directories).toContain('src')
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
it('should exclude node_modules', async () => {
|
|
158
|
+
await fs.mkdir(path.join(testProjectPath, 'node_modules'), { recursive: true })
|
|
159
|
+
await fs.mkdir(path.join(testProjectPath, 'src'), { recursive: true })
|
|
160
|
+
|
|
161
|
+
const directories = await analyzer.listDirectories()
|
|
162
|
+
|
|
163
|
+
expect(directories).not.toContain('node_modules')
|
|
164
|
+
expect(directories).toContain('src')
|
|
165
|
+
})
|
|
166
|
+
|
|
167
|
+
it('should return empty array for non-existent directory', async () => {
|
|
168
|
+
analyzer.init('/nonexistent/path')
|
|
169
|
+
|
|
170
|
+
const directories = await analyzer.listDirectories()
|
|
171
|
+
|
|
172
|
+
expect(directories).toEqual([])
|
|
173
|
+
})
|
|
174
|
+
})
|
|
175
|
+
|
|
176
|
+
describe('fileExists()', () => {
|
|
177
|
+
it('should return true for existing file', async () => {
|
|
178
|
+
await fs.writeFile(path.join(testProjectPath, 'test.txt'), 'content')
|
|
179
|
+
|
|
180
|
+
const exists = await analyzer.fileExists('test.txt')
|
|
181
|
+
|
|
182
|
+
expect(exists).toBe(true)
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
it('should return false for non-existent file', async () => {
|
|
186
|
+
const exists = await analyzer.fileExists('nonexistent.txt')
|
|
187
|
+
|
|
188
|
+
expect(exists).toBe(false)
|
|
189
|
+
})
|
|
190
|
+
})
|
|
191
|
+
|
|
192
|
+
describe('readFile()', () => {
|
|
193
|
+
it('should read file content', async () => {
|
|
194
|
+
const content = 'file content here'
|
|
195
|
+
await fs.writeFile(path.join(testProjectPath, 'test.txt'), content)
|
|
196
|
+
|
|
197
|
+
const result = await analyzer.readFile('test.txt')
|
|
198
|
+
|
|
199
|
+
expect(result).toBe(content)
|
|
200
|
+
})
|
|
201
|
+
|
|
202
|
+
it('should return null for non-existent file', async () => {
|
|
203
|
+
const result = await analyzer.readFile('nonexistent.txt')
|
|
204
|
+
|
|
205
|
+
expect(result).toBeNull()
|
|
206
|
+
})
|
|
207
|
+
|
|
208
|
+
it('should read nested files', async () => {
|
|
209
|
+
await fs.mkdir(path.join(testProjectPath, 'src'), { recursive: true })
|
|
210
|
+
await fs.writeFile(path.join(testProjectPath, 'src', 'app.js'), 'console.log("test")')
|
|
211
|
+
|
|
212
|
+
const result = await analyzer.readFile('src/app.js')
|
|
213
|
+
|
|
214
|
+
expect(result).toBe('console.log("test")')
|
|
215
|
+
})
|
|
216
|
+
})
|
|
217
|
+
|
|
218
|
+
describe('getGitLog()', () => {
|
|
219
|
+
it('should return git log when git repo exists', async () => {
|
|
220
|
+
// Initialize git repo for testing
|
|
221
|
+
try {
|
|
222
|
+
const { exec } = require('child_process')
|
|
223
|
+
const { promisify } = require('util')
|
|
224
|
+
const execAsync = promisify(exec)
|
|
225
|
+
|
|
226
|
+
await execAsync('git init', { cwd: testProjectPath })
|
|
227
|
+
await execAsync('git config user.email "test@test.com"', { cwd: testProjectPath })
|
|
228
|
+
await execAsync('git config user.name "Test User"', { cwd: testProjectPath })
|
|
229
|
+
await fs.writeFile(path.join(testProjectPath, 'test.txt'), 'content')
|
|
230
|
+
await execAsync('git add test.txt', { cwd: testProjectPath })
|
|
231
|
+
await execAsync('git commit -m "Initial commit"', { cwd: testProjectPath })
|
|
232
|
+
|
|
233
|
+
const log = await analyzer.getGitLog(10)
|
|
234
|
+
|
|
235
|
+
expect(typeof log).toBe('string')
|
|
236
|
+
} catch (error) {
|
|
237
|
+
// Git might not be available, skip test
|
|
238
|
+
expect(true).toBe(true)
|
|
239
|
+
}
|
|
240
|
+
})
|
|
241
|
+
|
|
242
|
+
it('should return empty string when git repo does not exist', async () => {
|
|
243
|
+
const log = await analyzer.getGitLog()
|
|
244
|
+
|
|
245
|
+
expect(log).toBe('')
|
|
246
|
+
})
|
|
247
|
+
})
|
|
248
|
+
|
|
249
|
+
describe('getGitStats()', () => {
|
|
250
|
+
it('should return git statistics when git repo exists', async () => {
|
|
251
|
+
try {
|
|
252
|
+
const { exec } = require('child_process')
|
|
253
|
+
const { promisify } = require('util')
|
|
254
|
+
const execAsync = promisify(exec)
|
|
255
|
+
|
|
256
|
+
await execAsync('git init', { cwd: testProjectPath })
|
|
257
|
+
await execAsync('git config user.email "test@test.com"', { cwd: testProjectPath })
|
|
258
|
+
await execAsync('git config user.name "Test User"', { cwd: testProjectPath })
|
|
259
|
+
await fs.writeFile(path.join(testProjectPath, 'test.txt'), 'content')
|
|
260
|
+
await execAsync('git add test.txt', { cwd: testProjectPath })
|
|
261
|
+
await execAsync('git commit -m "Initial commit"', { cwd: testProjectPath })
|
|
262
|
+
|
|
263
|
+
const stats = await analyzer.getGitStats()
|
|
264
|
+
|
|
265
|
+
expect(stats).toBeDefined()
|
|
266
|
+
expect(stats.totalCommits).toBeGreaterThanOrEqual(0)
|
|
267
|
+
expect(stats.contributors).toBeGreaterThanOrEqual(0)
|
|
268
|
+
} catch (error) {
|
|
269
|
+
// Git might not be available, skip test
|
|
270
|
+
expect(true).toBe(true)
|
|
271
|
+
}
|
|
272
|
+
})
|
|
273
|
+
|
|
274
|
+
it('should return default stats when git repo does not exist', async () => {
|
|
275
|
+
const stats = await analyzer.getGitStats()
|
|
276
|
+
|
|
277
|
+
expect(stats).toBeDefined()
|
|
278
|
+
expect(stats.totalCommits).toBe(0)
|
|
279
|
+
expect(stats.contributors).toBe(0)
|
|
280
|
+
expect(stats.age).toBe('unknown')
|
|
281
|
+
})
|
|
282
|
+
})
|
|
283
|
+
|
|
284
|
+
describe('countFiles()', () => {
|
|
285
|
+
it('should count files in project', async () => {
|
|
286
|
+
await fs.writeFile(path.join(testProjectPath, 'file1.txt'), 'content')
|
|
287
|
+
await fs.writeFile(path.join(testProjectPath, 'file2.txt'), 'content')
|
|
288
|
+
await fs.mkdir(path.join(testProjectPath, 'src'), { recursive: true })
|
|
289
|
+
await fs.writeFile(path.join(testProjectPath, 'src', 'app.js'), 'content')
|
|
290
|
+
|
|
291
|
+
const count = await analyzer.countFiles()
|
|
292
|
+
|
|
293
|
+
expect(count).toBeGreaterThan(0)
|
|
294
|
+
})
|
|
295
|
+
|
|
296
|
+
it('should return 0 for empty directory', async () => {
|
|
297
|
+
const count = await analyzer.countFiles()
|
|
298
|
+
|
|
299
|
+
// May have some files, so just check it's a number
|
|
300
|
+
expect(typeof count).toBe('number')
|
|
301
|
+
})
|
|
302
|
+
})
|
|
303
|
+
|
|
304
|
+
describe('findFiles()', () => {
|
|
305
|
+
it('should find files matching pattern', async () => {
|
|
306
|
+
await fs.writeFile(path.join(testProjectPath, 'app.js'), 'content')
|
|
307
|
+
await fs.writeFile(path.join(testProjectPath, 'test.js'), 'content')
|
|
308
|
+
await fs.mkdir(path.join(testProjectPath, 'src'), { recursive: true })
|
|
309
|
+
await fs.writeFile(path.join(testProjectPath, 'src', 'app.js'), 'content')
|
|
310
|
+
|
|
311
|
+
const files = await analyzer.findFiles('app.js')
|
|
312
|
+
|
|
313
|
+
expect(files.length).toBeGreaterThan(0)
|
|
314
|
+
expect(files.some(f => f.includes('app.js'))).toBe(true)
|
|
315
|
+
})
|
|
316
|
+
|
|
317
|
+
it('should return empty array when no files match', async () => {
|
|
318
|
+
const files = await analyzer.findFiles('nonexistent-pattern-xyz')
|
|
319
|
+
|
|
320
|
+
expect(files).toEqual([])
|
|
321
|
+
})
|
|
322
|
+
})
|
|
323
|
+
})
|
|
324
|
+
|