claude-cli-advanced-starter-pack 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/OVERVIEW.md +597 -0
- package/README.md +439 -0
- package/bin/gtask.js +282 -0
- package/bin/postinstall.js +53 -0
- package/package.json +69 -0
- package/src/agents/phase-dev-templates.js +1011 -0
- package/src/agents/templates.js +668 -0
- package/src/analysis/checklist-parser.js +414 -0
- package/src/analysis/codebase.js +481 -0
- package/src/cli/menu.js +958 -0
- package/src/commands/claude-audit.js +1482 -0
- package/src/commands/claude-settings.js +2243 -0
- package/src/commands/create-agent.js +681 -0
- package/src/commands/create-command.js +337 -0
- package/src/commands/create-hook.js +262 -0
- package/src/commands/create-phase-dev/codebase-analyzer.js +813 -0
- package/src/commands/create-phase-dev/documentation-generator.js +352 -0
- package/src/commands/create-phase-dev/post-completion.js +404 -0
- package/src/commands/create-phase-dev/scale-calculator.js +344 -0
- package/src/commands/create-phase-dev/wizard.js +492 -0
- package/src/commands/create-phase-dev.js +481 -0
- package/src/commands/create-skill.js +313 -0
- package/src/commands/create.js +446 -0
- package/src/commands/decompose.js +392 -0
- package/src/commands/detect-tech-stack.js +768 -0
- package/src/commands/explore-mcp/claude-md-updater.js +252 -0
- package/src/commands/explore-mcp/mcp-installer.js +346 -0
- package/src/commands/explore-mcp/mcp-registry.js +438 -0
- package/src/commands/explore-mcp.js +638 -0
- package/src/commands/gtask-init.js +641 -0
- package/src/commands/help.js +128 -0
- package/src/commands/init.js +1890 -0
- package/src/commands/install.js +250 -0
- package/src/commands/list.js +116 -0
- package/src/commands/roadmap.js +750 -0
- package/src/commands/setup-wizard.js +482 -0
- package/src/commands/setup.js +351 -0
- package/src/commands/sync.js +534 -0
- package/src/commands/test-run.js +456 -0
- package/src/commands/test-setup.js +456 -0
- package/src/commands/validate.js +67 -0
- package/src/config/tech-stack.defaults.json +182 -0
- package/src/config/tech-stack.schema.json +502 -0
- package/src/github/client.js +359 -0
- package/src/index.js +84 -0
- package/src/templates/claude-command.js +244 -0
- package/src/templates/issue-body.js +284 -0
- package/src/testing/config.js +411 -0
- package/src/utils/template-engine.js +398 -0
- package/src/utils/validate-templates.js +223 -0
- package/src/utils.js +396 -0
- package/templates/commands/ccasp-setup.template.md +113 -0
- package/templates/commands/context-audit.template.md +97 -0
- package/templates/commands/create-task-list.template.md +382 -0
- package/templates/commands/deploy-full.template.md +261 -0
- package/templates/commands/github-task-start.template.md +99 -0
- package/templates/commands/github-update.template.md +69 -0
- package/templates/commands/happy-start.template.md +117 -0
- package/templates/commands/phase-track.template.md +142 -0
- package/templates/commands/tunnel-start.template.md +127 -0
- package/templates/commands/tunnel-stop.template.md +106 -0
- package/templates/hooks/context-guardian.template.js +173 -0
- package/templates/hooks/deployment-orchestrator.template.js +219 -0
- package/templates/hooks/github-progress-hook.template.js +197 -0
- package/templates/hooks/happy-checkpoint-manager.template.js +222 -0
- package/templates/hooks/phase-dev-enforcer.template.js +183 -0
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Documentation Generator
|
|
3
|
+
*
|
|
4
|
+
* Generates all phased development documentation:
|
|
5
|
+
* - PROGRESS.json
|
|
6
|
+
* - EXECUTIVE_SUMMARY.md
|
|
7
|
+
* - API specifications
|
|
8
|
+
* - RAG execution agent
|
|
9
|
+
* - Interactive command
|
|
10
|
+
* - Enforcement hooks
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import chalk from 'chalk';
|
|
14
|
+
import ora from 'ora';
|
|
15
|
+
import { existsSync, mkdirSync, writeFileSync } from 'fs';
|
|
16
|
+
import { join, dirname } from 'path';
|
|
17
|
+
import {
|
|
18
|
+
generateProgressJson,
|
|
19
|
+
generateExecutiveSummary,
|
|
20
|
+
generateMiddlewareSpec,
|
|
21
|
+
generateApiEndpoints,
|
|
22
|
+
generateDatabaseSchema,
|
|
23
|
+
generateDeploymentConfig,
|
|
24
|
+
generatePhaseExecutorAgent,
|
|
25
|
+
generatePhaseDevCommand,
|
|
26
|
+
generateTestDefinitions,
|
|
27
|
+
generatePhaseDevEnforcerHook,
|
|
28
|
+
} from '../../agents/phase-dev-templates.js';
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Generate all documentation for a phased development plan
|
|
32
|
+
*
|
|
33
|
+
* @param {Object} config - Project configuration
|
|
34
|
+
* @param {Array} enhancements - Enabled enhancements
|
|
35
|
+
* @returns {Object} Generation results
|
|
36
|
+
*/
|
|
37
|
+
export async function generatePhaseDevDocumentation(config, enhancements = []) {
|
|
38
|
+
const spinner = ora('Generating documentation...').start();
|
|
39
|
+
const results = { files: [], errors: [] };
|
|
40
|
+
|
|
41
|
+
const { projectSlug } = config;
|
|
42
|
+
const cwd = process.cwd();
|
|
43
|
+
|
|
44
|
+
// Define output directories
|
|
45
|
+
const docsDir = join(cwd, '.claude', 'docs', projectSlug);
|
|
46
|
+
const agentsDir = join(cwd, '.claude', 'agents');
|
|
47
|
+
const commandsDir = join(cwd, '.claude', 'commands');
|
|
48
|
+
const hooksDir = join(cwd, '.claude', 'hooks', 'tools');
|
|
49
|
+
|
|
50
|
+
// Ensure directories exist
|
|
51
|
+
[docsDir, agentsDir, commandsDir, hooksDir].forEach((dir) => {
|
|
52
|
+
if (!existsSync(dir)) {
|
|
53
|
+
mkdirSync(dir, { recursive: true });
|
|
54
|
+
}
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
try {
|
|
58
|
+
// 1. Generate PROGRESS.json
|
|
59
|
+
spinner.text = 'Creating PROGRESS.json...';
|
|
60
|
+
const progressPath = join(docsDir, 'PROGRESS.json');
|
|
61
|
+
const progressContent = generateProgressJson({
|
|
62
|
+
...config,
|
|
63
|
+
enhancements,
|
|
64
|
+
});
|
|
65
|
+
writeFileSync(progressPath, progressContent, 'utf8');
|
|
66
|
+
results.files.push({ name: 'PROGRESS.json', path: progressPath });
|
|
67
|
+
|
|
68
|
+
// 2. Generate EXECUTIVE_SUMMARY.md
|
|
69
|
+
spinner.text = 'Creating EXECUTIVE_SUMMARY.md...';
|
|
70
|
+
const summaryPath = join(docsDir, 'EXECUTIVE_SUMMARY.md');
|
|
71
|
+
const summaryContent = generateExecutiveSummary(config);
|
|
72
|
+
writeFileSync(summaryPath, summaryContent, 'utf8');
|
|
73
|
+
results.files.push({ name: 'EXECUTIVE_SUMMARY.md', path: summaryPath });
|
|
74
|
+
|
|
75
|
+
// 3. Generate API specifications (if backend required)
|
|
76
|
+
if (config.architecture?.backend !== 'none') {
|
|
77
|
+
spinner.text = 'Creating API specifications...';
|
|
78
|
+
|
|
79
|
+
// MIDDLEWARE_SPEC.md
|
|
80
|
+
const middlewarePath = join(docsDir, 'MIDDLEWARE_SPEC.md');
|
|
81
|
+
writeFileSync(middlewarePath, generateMiddlewareSpec(config), 'utf8');
|
|
82
|
+
results.files.push({ name: 'MIDDLEWARE_SPEC.md', path: middlewarePath });
|
|
83
|
+
|
|
84
|
+
// API_ENDPOINTS.md
|
|
85
|
+
const apiPath = join(docsDir, 'API_ENDPOINTS.md');
|
|
86
|
+
writeFileSync(apiPath, generateApiEndpoints(config), 'utf8');
|
|
87
|
+
results.files.push({ name: 'API_ENDPOINTS.md', path: apiPath });
|
|
88
|
+
|
|
89
|
+
// DATABASE_SCHEMA.md
|
|
90
|
+
const schemaPath = join(docsDir, 'DATABASE_SCHEMA.md');
|
|
91
|
+
writeFileSync(schemaPath, generateDatabaseSchema(config), 'utf8');
|
|
92
|
+
results.files.push({ name: 'DATABASE_SCHEMA.md', path: schemaPath });
|
|
93
|
+
|
|
94
|
+
// DEPLOYMENT_CONFIG.md
|
|
95
|
+
const deployPath = join(docsDir, 'DEPLOYMENT_CONFIG.md');
|
|
96
|
+
writeFileSync(deployPath, generateDeploymentConfig(config), 'utf8');
|
|
97
|
+
results.files.push({ name: 'DEPLOYMENT_CONFIG.md', path: deployPath });
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// 4. Generate RAG Phase Executor Agent
|
|
101
|
+
spinner.text = 'Creating phase executor agent...';
|
|
102
|
+
const agentPath = join(agentsDir, `${projectSlug}-phase-executor-agent.md`);
|
|
103
|
+
const agentContent = generatePhaseExecutorAgent(config);
|
|
104
|
+
writeFileSync(agentPath, agentContent, 'utf8');
|
|
105
|
+
results.files.push({
|
|
106
|
+
name: `${projectSlug}-phase-executor-agent.md`,
|
|
107
|
+
path: agentPath,
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
// 5. Generate Interactive Slash Command
|
|
111
|
+
spinner.text = 'Creating slash command...';
|
|
112
|
+
const commandPath = join(commandsDir, `phase-dev-${projectSlug}.md`);
|
|
113
|
+
const commandContent = generatePhaseDevCommand(config);
|
|
114
|
+
writeFileSync(commandPath, commandContent, 'utf8');
|
|
115
|
+
results.files.push({
|
|
116
|
+
name: `phase-dev-${projectSlug}.md`,
|
|
117
|
+
path: commandPath,
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
// 6. Generate Test Definitions (if testing enhancement enabled)
|
|
121
|
+
if (enhancements.includes('testing')) {
|
|
122
|
+
spinner.text = 'Creating test definitions...';
|
|
123
|
+
const testPath = join(docsDir, 'TEST_DEFINITIONS.json');
|
|
124
|
+
const testContent = generateTestDefinitions(config);
|
|
125
|
+
writeFileSync(testPath, testContent, 'utf8');
|
|
126
|
+
results.files.push({ name: 'TEST_DEFINITIONS.json', path: testPath });
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// 7. Generate Enforcement Hook (if hooks enhancement enabled)
|
|
130
|
+
if (enhancements.includes('hooks')) {
|
|
131
|
+
spinner.text = 'Creating enforcement hook...';
|
|
132
|
+
const hookPath = join(hooksDir, `${projectSlug}-enforcer.js`);
|
|
133
|
+
const hookContent = generatePhaseDevEnforcerHook(config);
|
|
134
|
+
writeFileSync(hookPath, hookContent, 'utf8');
|
|
135
|
+
results.files.push({
|
|
136
|
+
name: `${projectSlug}-enforcer.js`,
|
|
137
|
+
path: hookPath,
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
spinner.succeed(`Generated ${results.files.length} files`);
|
|
142
|
+
} catch (error) {
|
|
143
|
+
spinner.fail('Error generating documentation');
|
|
144
|
+
results.errors.push(error.message);
|
|
145
|
+
throw error;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return results;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Display generation results
|
|
153
|
+
*/
|
|
154
|
+
export function displayGenerationResults(results) {
|
|
155
|
+
console.log('');
|
|
156
|
+
console.log(chalk.green.bold('📁 Generated Files:'));
|
|
157
|
+
console.log('');
|
|
158
|
+
|
|
159
|
+
results.files.forEach((file) => {
|
|
160
|
+
const relativePath = file.path.replace(process.cwd(), '.');
|
|
161
|
+
console.log(` ${chalk.green('✓')} ${file.name}`);
|
|
162
|
+
console.log(` ${chalk.dim(relativePath)}`);
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
if (results.errors.length > 0) {
|
|
166
|
+
console.log('');
|
|
167
|
+
console.log(chalk.red.bold('⚠️ Errors:'));
|
|
168
|
+
results.errors.forEach((err) => {
|
|
169
|
+
console.log(` ${chalk.red('✗')} ${err}`);
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
console.log('');
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Generate backend configuration based on architecture
|
|
178
|
+
* Adapts to user's detected/specified stack - no hardcoded defaults
|
|
179
|
+
*/
|
|
180
|
+
export function generateBackendConfig(architecture) {
|
|
181
|
+
if (!architecture?.backend || architecture.backend === 'none') {
|
|
182
|
+
return {};
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Use detected/specified deployment and database, or null if not specified
|
|
186
|
+
const deploymentPlatform = architecture.deployment?.platform || null;
|
|
187
|
+
const databaseType = architecture.database?.type || null;
|
|
188
|
+
|
|
189
|
+
const config = {
|
|
190
|
+
middleware: ['Authentication', 'Rate Limiting', 'Error Handling', 'CORS'],
|
|
191
|
+
apiEndpoints: [],
|
|
192
|
+
databaseTables: [],
|
|
193
|
+
websocketEvents: [],
|
|
194
|
+
deployment: {
|
|
195
|
+
platform: deploymentPlatform,
|
|
196
|
+
database: databaseType,
|
|
197
|
+
},
|
|
198
|
+
};
|
|
199
|
+
|
|
200
|
+
// Add auth endpoints if needed
|
|
201
|
+
if (architecture.needsAuth) {
|
|
202
|
+
config.apiEndpoints.push(
|
|
203
|
+
{
|
|
204
|
+
method: 'POST',
|
|
205
|
+
path: '/auth/login',
|
|
206
|
+
description: 'User login',
|
|
207
|
+
auth: 'None',
|
|
208
|
+
request: { email: 'string', password: 'string' },
|
|
209
|
+
response: { token: 'string', user: 'object' },
|
|
210
|
+
},
|
|
211
|
+
{
|
|
212
|
+
method: 'POST',
|
|
213
|
+
path: '/auth/logout',
|
|
214
|
+
description: 'User logout',
|
|
215
|
+
auth: 'Required',
|
|
216
|
+
request: {},
|
|
217
|
+
response: { success: true },
|
|
218
|
+
},
|
|
219
|
+
{
|
|
220
|
+
method: 'GET',
|
|
221
|
+
path: '/auth/me',
|
|
222
|
+
description: 'Get current user',
|
|
223
|
+
auth: 'Required',
|
|
224
|
+
request: {},
|
|
225
|
+
response: { user: 'object' },
|
|
226
|
+
}
|
|
227
|
+
);
|
|
228
|
+
|
|
229
|
+
// Generate database-appropriate column types
|
|
230
|
+
const idType = getIdType(databaseType);
|
|
231
|
+
const timestampType = getTimestampType(databaseType);
|
|
232
|
+
const timestampDefault = getTimestampDefault(databaseType);
|
|
233
|
+
|
|
234
|
+
config.databaseTables.push({
|
|
235
|
+
name: 'users',
|
|
236
|
+
purpose: 'User accounts and authentication',
|
|
237
|
+
columns: [
|
|
238
|
+
{ name: 'id', type: idType, constraints: 'PRIMARY KEY' },
|
|
239
|
+
{ name: 'email', type: 'VARCHAR(255)', constraints: 'UNIQUE NOT NULL' },
|
|
240
|
+
{ name: 'password_hash', type: 'VARCHAR(255)', constraints: 'NOT NULL' },
|
|
241
|
+
{ name: 'created_at', type: timestampType, constraints: timestampDefault },
|
|
242
|
+
{ name: 'updated_at', type: timestampType, constraints: timestampDefault },
|
|
243
|
+
],
|
|
244
|
+
indexes: databaseType === 'mongodb' ? [] : ['CREATE UNIQUE INDEX idx_users_email ON users(email)'],
|
|
245
|
+
relationships: [],
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// Add WebSocket events if needed
|
|
250
|
+
if (architecture.needsRealtime) {
|
|
251
|
+
config.websocketEvents.push(
|
|
252
|
+
{ name: 'connect', description: 'Client connected' },
|
|
253
|
+
{ name: 'disconnect', description: 'Client disconnected' },
|
|
254
|
+
{ name: 'update', description: 'Data update broadcast' }
|
|
255
|
+
);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
return config;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Get appropriate ID type for database
|
|
263
|
+
*/
|
|
264
|
+
function getIdType(databaseType) {
|
|
265
|
+
switch (databaseType) {
|
|
266
|
+
case 'mongodb':
|
|
267
|
+
return 'ObjectId';
|
|
268
|
+
case 'mysql':
|
|
269
|
+
case 'mariadb':
|
|
270
|
+
return 'INT AUTO_INCREMENT';
|
|
271
|
+
case 'sqlite':
|
|
272
|
+
return 'INTEGER';
|
|
273
|
+
default:
|
|
274
|
+
// PostgreSQL and others
|
|
275
|
+
return 'UUID';
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Get appropriate timestamp type for database
|
|
281
|
+
*/
|
|
282
|
+
function getTimestampType(databaseType) {
|
|
283
|
+
switch (databaseType) {
|
|
284
|
+
case 'mongodb':
|
|
285
|
+
return 'Date';
|
|
286
|
+
case 'mysql':
|
|
287
|
+
case 'mariadb':
|
|
288
|
+
return 'DATETIME';
|
|
289
|
+
case 'sqlite':
|
|
290
|
+
return 'TEXT';
|
|
291
|
+
default:
|
|
292
|
+
return 'TIMESTAMP';
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Get appropriate timestamp default for database
|
|
298
|
+
*/
|
|
299
|
+
function getTimestampDefault(databaseType) {
|
|
300
|
+
switch (databaseType) {
|
|
301
|
+
case 'mongodb':
|
|
302
|
+
return '';
|
|
303
|
+
case 'mysql':
|
|
304
|
+
case 'mariadb':
|
|
305
|
+
return 'DEFAULT CURRENT_TIMESTAMP';
|
|
306
|
+
case 'sqlite':
|
|
307
|
+
return "DEFAULT (datetime('now'))";
|
|
308
|
+
default:
|
|
309
|
+
return 'DEFAULT NOW()';
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
/**
|
|
314
|
+
* Create a git checkpoint before generation
|
|
315
|
+
*/
|
|
316
|
+
export async function createGitCheckpoint(projectSlug) {
|
|
317
|
+
const spinner = ora('Creating git checkpoint...').start();
|
|
318
|
+
|
|
319
|
+
try {
|
|
320
|
+
const { execSync } = await import('child_process');
|
|
321
|
+
|
|
322
|
+
// Check if in git repo
|
|
323
|
+
try {
|
|
324
|
+
execSync('git rev-parse --git-dir', { stdio: 'ignore' });
|
|
325
|
+
} catch {
|
|
326
|
+
spinner.info('Not a git repository, skipping checkpoint');
|
|
327
|
+
return null;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// Create checkpoint commit if there are changes
|
|
331
|
+
const status = execSync('git status --porcelain', {
|
|
332
|
+
encoding: 'utf8',
|
|
333
|
+
}).trim();
|
|
334
|
+
|
|
335
|
+
if (status) {
|
|
336
|
+
// Stash any uncommitted changes
|
|
337
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
338
|
+
const stashName = `gtask-phase-dev-${projectSlug}-${timestamp}`;
|
|
339
|
+
|
|
340
|
+
execSync(`git stash push -m "${stashName}"`, { stdio: 'ignore' });
|
|
341
|
+
spinner.succeed(`Created checkpoint: ${stashName}`);
|
|
342
|
+
|
|
343
|
+
return stashName;
|
|
344
|
+
} else {
|
|
345
|
+
spinner.info('No uncommitted changes to checkpoint');
|
|
346
|
+
return null;
|
|
347
|
+
}
|
|
348
|
+
} catch (error) {
|
|
349
|
+
spinner.warn(`Checkpoint skipped: ${error.message}`);
|
|
350
|
+
return null;
|
|
351
|
+
}
|
|
352
|
+
}
|