ai-sprint-kit 1.3.1 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +35 -123
- package/README.md +39 -207
- package/bin/ai-sprint.js +105 -0
- package/lib/auth.js +73 -0
- package/lib/installer.js +59 -195
- package/lib/messages.js +53 -0
- package/package.json +15 -18
- package/bin/cli.js +0 -135
- package/lib/scanner.js +0 -321
- package/templates/.claude/.env.example +0 -13
- package/templates/.claude/agents/debugger.md +0 -668
- package/templates/.claude/agents/devops.md +0 -728
- package/templates/.claude/agents/docs.md +0 -662
- package/templates/.claude/agents/implementer.md +0 -288
- package/templates/.claude/agents/planner.md +0 -273
- package/templates/.claude/agents/researcher.md +0 -454
- package/templates/.claude/agents/reviewer.md +0 -644
- package/templates/.claude/agents/security.md +0 -203
- package/templates/.claude/agents/tester.md +0 -647
- package/templates/.claude/commands/ai-sprint-auto.md +0 -150
- package/templates/.claude/commands/ai-sprint-code.md +0 -316
- package/templates/.claude/commands/ai-sprint-debug.md +0 -453
- package/templates/.claude/commands/ai-sprint-deploy.md +0 -475
- package/templates/.claude/commands/ai-sprint-docs.md +0 -519
- package/templates/.claude/commands/ai-sprint-plan.md +0 -136
- package/templates/.claude/commands/ai-sprint-review.md +0 -433
- package/templates/.claude/commands/ai-sprint-scan.md +0 -146
- package/templates/.claude/commands/ai-sprint-secure.md +0 -88
- package/templates/.claude/commands/ai-sprint-test.md +0 -352
- package/templates/.claude/commands/ai-sprint-validate.md +0 -253
- package/templates/.claude/settings.json +0 -27
- package/templates/.claude/skills/codebase-context/SKILL.md +0 -68
- package/templates/.claude/skills/codebase-context/references/reading-context.md +0 -68
- package/templates/.claude/skills/codebase-context/references/refresh-triggers.md +0 -82
- package/templates/.claude/skills/implementation/SKILL.md +0 -70
- package/templates/.claude/skills/implementation/references/error-handling.md +0 -106
- package/templates/.claude/skills/implementation/references/security-patterns.md +0 -73
- package/templates/.claude/skills/implementation/references/validation-patterns.md +0 -107
- package/templates/.claude/skills/memory/SKILL.md +0 -67
- package/templates/.claude/skills/memory/references/decisions-format.md +0 -68
- package/templates/.claude/skills/memory/references/learning-format.md +0 -74
- package/templates/.claude/skills/planning/SKILL.md +0 -72
- package/templates/.claude/skills/planning/references/plan-templates.md +0 -81
- package/templates/.claude/skills/planning/references/research-phase.md +0 -62
- package/templates/.claude/skills/planning/references/solution-design.md +0 -66
- package/templates/.claude/skills/quality-assurance/SKILL.md +0 -79
- package/templates/.claude/skills/quality-assurance/references/review-checklist.md +0 -72
- package/templates/.claude/skills/quality-assurance/references/security-checklist.md +0 -70
- package/templates/.claude/skills/quality-assurance/references/testing-strategy.md +0 -85
- package/templates/.claude/skills/quality-assurance/scripts/check-size.py +0 -333
- package/templates/.claude/statusline.sh +0 -126
- package/templates/.claude/workflows/development-rules.md +0 -133
- package/templates/.claude/workflows/orchestration-protocol.md +0 -194
- package/templates/.mcp.json.example +0 -36
- package/templates/CLAUDE.md +0 -412
- package/templates/README.md +0 -331
- package/templates/ai_context/codebase/.gitkeep +0 -0
- package/templates/ai_context/memory/active.md +0 -15
- package/templates/ai_context/memory/decisions.md +0 -18
- package/templates/ai_context/memory/learning.md +0 -22
- package/templates/ai_context/plans/.gitkeep +0 -0
- package/templates/ai_context/reports/.gitkeep +0 -0
- package/templates/docs/user-guide-th.md +0 -454
- package/templates/docs/user-guide.md +0 -595
package/lib/scanner.js
DELETED
|
@@ -1,321 +0,0 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const ora = require('ora');
|
|
4
|
-
const chalk = require('chalk');
|
|
5
|
-
const execa = require('execa');
|
|
6
|
-
|
|
7
|
-
// Common source code directories to detect
|
|
8
|
-
const SOURCE_DIRS = ['src', 'app', 'lib', 'pages', 'components', 'packages', 'modules'];
|
|
9
|
-
const SOURCE_EXTENSIONS = ['.js', '.ts', '.jsx', '.tsx', '.py', '.go', '.rs', '.java', '.rb', '.php'];
|
|
10
|
-
|
|
11
|
-
/**
|
|
12
|
-
* Check if target directory contains source code
|
|
13
|
-
* @param {string} targetDir - Directory to check
|
|
14
|
-
* @returns {Promise<boolean>} - True if source code detected
|
|
15
|
-
*/
|
|
16
|
-
async function detectSourceCode(targetDir) {
|
|
17
|
-
// Check for common source directories
|
|
18
|
-
for (const dir of SOURCE_DIRS) {
|
|
19
|
-
const dirPath = path.join(targetDir, dir);
|
|
20
|
-
if (await fs.pathExists(dirPath)) {
|
|
21
|
-
const stats = await fs.stat(dirPath);
|
|
22
|
-
if (stats.isDirectory()) {
|
|
23
|
-
return true;
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// Check for source files in root
|
|
29
|
-
try {
|
|
30
|
-
const files = await fs.readdir(targetDir);
|
|
31
|
-
for (const file of files) {
|
|
32
|
-
const ext = path.extname(file).toLowerCase();
|
|
33
|
-
if (SOURCE_EXTENSIONS.includes(ext)) {
|
|
34
|
-
return true;
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
} catch (error) {
|
|
38
|
-
// Ignore read errors
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
return false;
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
/**
|
|
45
|
-
* Check if repomix is available
|
|
46
|
-
* @returns {Promise<{available: boolean, command: string}>}
|
|
47
|
-
*/
|
|
48
|
-
async function checkRepomix() {
|
|
49
|
-
// Check global installation
|
|
50
|
-
try {
|
|
51
|
-
await execa('repomix', ['--version']);
|
|
52
|
-
return { available: true, command: 'repomix' };
|
|
53
|
-
} catch (error) {
|
|
54
|
-
// Not globally installed, will use npx
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
// Check if npx is available
|
|
58
|
-
try {
|
|
59
|
-
await execa('npx', ['--version']);
|
|
60
|
-
return { available: true, command: 'npx repomix' };
|
|
61
|
-
} catch (error) {
|
|
62
|
-
return { available: false, command: null };
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
/**
|
|
67
|
-
* Run repomix scan on target directory
|
|
68
|
-
*/
|
|
69
|
-
async function runRepomixScan(targetDir, outputDir, options = {}) {
|
|
70
|
-
const { useNpx = false } = options;
|
|
71
|
-
await fs.ensureDir(outputDir);
|
|
72
|
-
|
|
73
|
-
const xmlOutput = path.join(outputDir, 'repomix-output.xml');
|
|
74
|
-
const mdOutput = path.join(outputDir, 'overview.md');
|
|
75
|
-
|
|
76
|
-
await runRepomixCommand(targetDir, xmlOutput, 'xml', useNpx);
|
|
77
|
-
await runRepomixCommand(targetDir, mdOutput, 'markdown', useNpx);
|
|
78
|
-
|
|
79
|
-
return parseRepomixStats(xmlOutput);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
/**
|
|
83
|
-
* Execute repomix command with specified style
|
|
84
|
-
*/
|
|
85
|
-
async function runRepomixCommand(targetDir, outputPath, style, useNpx) {
|
|
86
|
-
const baseCmd = useNpx ? 'npx' : 'repomix';
|
|
87
|
-
const baseArgs = useNpx ? ['repomix'] : [];
|
|
88
|
-
|
|
89
|
-
const args = [...baseArgs, '--compress', '--style', style, '-o', outputPath];
|
|
90
|
-
|
|
91
|
-
await execa(baseCmd, args, {
|
|
92
|
-
cwd: targetDir,
|
|
93
|
-
timeout: 300000
|
|
94
|
-
});
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
/**
|
|
98
|
-
* Parse repomix XML output for statistics
|
|
99
|
-
*/
|
|
100
|
-
async function parseRepomixStats(xmlPath) {
|
|
101
|
-
const stats = { totalFiles: 0, totalTokens: 0, compressedTokens: 0 };
|
|
102
|
-
|
|
103
|
-
try {
|
|
104
|
-
const xmlContent = await fs.readFile(xmlPath, 'utf-8');
|
|
105
|
-
const fileMatches = xmlContent.match(/<file path="/g);
|
|
106
|
-
if (fileMatches) {
|
|
107
|
-
stats.totalFiles = fileMatches.length;
|
|
108
|
-
}
|
|
109
|
-
} catch (error) {
|
|
110
|
-
// Stats extraction failed, continue with defaults
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
return stats;
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
/**
|
|
117
|
-
* Generate directory structure file
|
|
118
|
-
* @param {string} targetDir - Directory to scan
|
|
119
|
-
* @param {string} outputDir - Output directory
|
|
120
|
-
*/
|
|
121
|
-
async function generateStructure(targetDir, outputDir) {
|
|
122
|
-
const structureFile = path.join(outputDir, 'structure.md');
|
|
123
|
-
let content = '# Project Structure\n\n```\n';
|
|
124
|
-
|
|
125
|
-
try {
|
|
126
|
-
// Try using tree command
|
|
127
|
-
const { stdout } = await execa('tree', [
|
|
128
|
-
'-I', 'node_modules|.git|.venv|__pycache__|dist|build|.next|coverage',
|
|
129
|
-
'-L', '4',
|
|
130
|
-
'--noreport'
|
|
131
|
-
], {
|
|
132
|
-
cwd: targetDir,
|
|
133
|
-
timeout: 30000
|
|
134
|
-
});
|
|
135
|
-
content += stdout;
|
|
136
|
-
} catch (error) {
|
|
137
|
-
// Fallback: simple directory listing
|
|
138
|
-
content += await generateSimpleTree(targetDir, '', 0, 4);
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
content += '\n```\n';
|
|
142
|
-
await fs.writeFile(structureFile, content);
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
/**
|
|
146
|
-
* Simple tree generator fallback
|
|
147
|
-
*/
|
|
148
|
-
async function generateSimpleTree(dir, prefix, depth, maxDepth) {
|
|
149
|
-
if (depth >= maxDepth) return '';
|
|
150
|
-
|
|
151
|
-
const ignoreDirs = ['node_modules', '.git', '.venv', '__pycache__', 'dist', 'build', '.next', 'coverage'];
|
|
152
|
-
let result = '';
|
|
153
|
-
|
|
154
|
-
try {
|
|
155
|
-
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
156
|
-
const filtered = entries.filter(e => !ignoreDirs.includes(e.name) && !e.name.startsWith('.'));
|
|
157
|
-
|
|
158
|
-
for (let i = 0; i < filtered.length; i++) {
|
|
159
|
-
const entry = filtered[i];
|
|
160
|
-
const isLast = i === filtered.length - 1;
|
|
161
|
-
const connector = isLast ? '└── ' : '├── ';
|
|
162
|
-
const newPrefix = prefix + (isLast ? ' ' : '│ ');
|
|
163
|
-
|
|
164
|
-
result += prefix + connector + entry.name + '\n';
|
|
165
|
-
|
|
166
|
-
if (entry.isDirectory()) {
|
|
167
|
-
result += await generateSimpleTree(
|
|
168
|
-
path.join(dir, entry.name),
|
|
169
|
-
newPrefix,
|
|
170
|
-
depth + 1,
|
|
171
|
-
maxDepth
|
|
172
|
-
);
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
} catch (error) {
|
|
176
|
-
// Ignore errors
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
return result;
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
/**
|
|
183
|
-
* Create default .repomixignore file
|
|
184
|
-
* @param {string} outputDir - Output directory
|
|
185
|
-
*/
|
|
186
|
-
async function createRepomixIgnore(outputDir) {
|
|
187
|
-
const ignorePath = path.join(outputDir, '.repomixignore');
|
|
188
|
-
|
|
189
|
-
if (await fs.pathExists(ignorePath)) {
|
|
190
|
-
return; // Don't overwrite existing
|
|
191
|
-
}
|
|
192
|
-
|
|
193
|
-
const content = `# AI Sprint default ignore patterns
|
|
194
|
-
node_modules/
|
|
195
|
-
.git/
|
|
196
|
-
dist/
|
|
197
|
-
build/
|
|
198
|
-
.next/
|
|
199
|
-
.venv/
|
|
200
|
-
__pycache__/
|
|
201
|
-
*.pyc
|
|
202
|
-
.env*
|
|
203
|
-
*.log
|
|
204
|
-
coverage/
|
|
205
|
-
.nyc_output/
|
|
206
|
-
*.min.js
|
|
207
|
-
*.min.css
|
|
208
|
-
package-lock.json
|
|
209
|
-
yarn.lock
|
|
210
|
-
pnpm-lock.yaml
|
|
211
|
-
`;
|
|
212
|
-
|
|
213
|
-
await fs.writeFile(ignorePath, content);
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
/**
|
|
217
|
-
* Write scan metadata
|
|
218
|
-
* @param {string} outputDir - Output directory
|
|
219
|
-
* @param {object} stats - Scan statistics
|
|
220
|
-
*/
|
|
221
|
-
async function writeMetadata(outputDir, stats) {
|
|
222
|
-
const metadataPath = path.join(outputDir, 'scan-metadata.json');
|
|
223
|
-
|
|
224
|
-
const metadata = {
|
|
225
|
-
scanDate: new Date().toISOString(),
|
|
226
|
-
scanDuration: stats.duration || 0,
|
|
227
|
-
totalFiles: stats.totalFiles || 0,
|
|
228
|
-
totalTokens: stats.totalTokens || 0,
|
|
229
|
-
compressedTokens: stats.compressedTokens || 0,
|
|
230
|
-
compressionRatio: stats.totalTokens > 0
|
|
231
|
-
? Math.round((1 - stats.compressedTokens / stats.totalTokens) * 100) / 100
|
|
232
|
-
: 0,
|
|
233
|
-
securityIssues: stats.securityIssues || 0
|
|
234
|
-
};
|
|
235
|
-
|
|
236
|
-
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
|
|
237
|
-
return metadata;
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
/**
|
|
241
|
-
* Main entry point for codebase scanning
|
|
242
|
-
*/
|
|
243
|
-
async function scanCodebase(targetDir, options = {}) {
|
|
244
|
-
const { silent = false } = options;
|
|
245
|
-
const outputDir = path.join(targetDir, 'ai_context', 'codebase');
|
|
246
|
-
|
|
247
|
-
// Pre-flight checks
|
|
248
|
-
const preflight = await runPreflightChecks(targetDir, silent);
|
|
249
|
-
if (preflight.skipped) return preflight;
|
|
250
|
-
|
|
251
|
-
const spinner = silent ? null : ora('Scanning codebase...').start();
|
|
252
|
-
|
|
253
|
-
try {
|
|
254
|
-
const result = await executeScan(targetDir, outputDir, preflight.command, spinner);
|
|
255
|
-
if (spinner) spinner.succeed(`Codebase scanned (${result.stats.totalFiles} files, ${result.stats.duration}s)`);
|
|
256
|
-
return result;
|
|
257
|
-
} catch (error) {
|
|
258
|
-
return handleScanError(error, spinner, silent);
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
/**
|
|
263
|
-
* Run pre-flight checks before scanning
|
|
264
|
-
*/
|
|
265
|
-
async function runPreflightChecks(targetDir, silent) {
|
|
266
|
-
const hasSource = await detectSourceCode(targetDir);
|
|
267
|
-
if (!hasSource) {
|
|
268
|
-
if (!silent) console.log(chalk.gray(' No source code detected. Skipping scan.'));
|
|
269
|
-
return { skipped: true, reason: 'no-source' };
|
|
270
|
-
}
|
|
271
|
-
|
|
272
|
-
const { available, command } = await checkRepomix();
|
|
273
|
-
if (!available) {
|
|
274
|
-
if (!silent) {
|
|
275
|
-
console.log(chalk.yellow(' ⚠️ Repomix not available. Skipping scan.'));
|
|
276
|
-
console.log(chalk.gray(' Install with: npm install -g repomix'));
|
|
277
|
-
}
|
|
278
|
-
return { skipped: true, reason: 'no-repomix' };
|
|
279
|
-
}
|
|
280
|
-
|
|
281
|
-
return { skipped: false, command };
|
|
282
|
-
}
|
|
283
|
-
|
|
284
|
-
/**
|
|
285
|
-
* Execute the actual scan operation
|
|
286
|
-
*/
|
|
287
|
-
async function executeScan(targetDir, outputDir, command, spinner) {
|
|
288
|
-
const startTime = Date.now();
|
|
289
|
-
|
|
290
|
-
await fs.ensureDir(outputDir);
|
|
291
|
-
await createRepomixIgnore(outputDir);
|
|
292
|
-
|
|
293
|
-
const useNpx = command.includes('npx');
|
|
294
|
-
const stats = await runRepomixScan(targetDir, outputDir, { useNpx });
|
|
295
|
-
|
|
296
|
-
if (spinner) spinner.text = 'Generating structure...';
|
|
297
|
-
await generateStructure(targetDir, outputDir);
|
|
298
|
-
|
|
299
|
-
stats.duration = Math.round((Date.now() - startTime) / 1000 * 10) / 10;
|
|
300
|
-
const metadata = await writeMetadata(outputDir, stats);
|
|
301
|
-
|
|
302
|
-
return { success: true, outputDir, stats: metadata };
|
|
303
|
-
}
|
|
304
|
-
|
|
305
|
-
/**
|
|
306
|
-
* Handle scan errors
|
|
307
|
-
*/
|
|
308
|
-
function handleScanError(error, spinner, silent) {
|
|
309
|
-
if (spinner) spinner.fail('Codebase scan failed');
|
|
310
|
-
if (!silent) {
|
|
311
|
-
console.log(chalk.yellow(` ⚠️ ${error.message}`));
|
|
312
|
-
console.log(chalk.gray(' Run /scan manually after fixing the issue.'));
|
|
313
|
-
}
|
|
314
|
-
return { success: false, error: error.message };
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
module.exports = {
|
|
318
|
-
scanCodebase,
|
|
319
|
-
detectSourceCode,
|
|
320
|
-
checkRepomix
|
|
321
|
-
};
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
# AI Sprint Framework - Environment Variables
|
|
2
|
-
|
|
3
|
-
# Security Scanning (Optional)
|
|
4
|
-
# Get tokens from: https://snyk.io, https://semgrep.dev
|
|
5
|
-
SNYK_TOKEN=
|
|
6
|
-
SEMGREP_APP_TOKEN=
|
|
7
|
-
|
|
8
|
-
# Claude API (Optional - only if using custom models)
|
|
9
|
-
# Get from: https://console.anthropic.com
|
|
10
|
-
ANTHROPIC_API_KEY=
|
|
11
|
-
|
|
12
|
-
# Project Settings
|
|
13
|
-
NODE_ENV=development
|