@iservu-inc/adf-cli 0.3.0 → 0.3.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.project/chats/{current → complete}/2025-10-03_AGENTS-MD-AND-TOOL-GENERATORS.md +82 -17
- package/.project/chats/current/2025-10-03_AI-PROVIDER-INTEGRATION.md +569 -0
- package/.project/chats/current/2025-10-03_FRAMEWORK-UPDATE-SYSTEM.md +497 -0
- package/.project/chats/current/SESSION-STATUS.md +127 -0
- package/.project/docs/AI-PROVIDER-INTEGRATION.md +600 -0
- package/.project/docs/FRAMEWORK-UPDATE-INTEGRATION.md +421 -0
- package/.project/docs/FRAMEWORK-UPDATE-SYSTEM.md +832 -0
- package/.project/docs/PROJECT-STRUCTURE-EXPLANATION.md +500 -0
- package/.project/docs/architecture/SYSTEM-DESIGN.md +122 -1
- package/.project/docs/goals/PROJECT-VISION.md +33 -28
- package/CHANGELOG.md +148 -0
- package/README.md +100 -11
- package/bin/adf.js +7 -0
- package/lib/ai/ai-client.js +328 -0
- package/lib/ai/ai-config.js +398 -0
- package/lib/commands/config.js +154 -0
- package/lib/commands/init.js +56 -10
- package/lib/frameworks/interviewer.js +89 -11
- package/lib/frameworks/progress-tracker.js +8 -1
- package/package.json +15 -3
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
const inquirer = require('inquirer');
|
|
2
|
+
const autocomplete = require('inquirer-autocomplete-prompt');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const fs = require('fs-extra');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const os = require('os');
|
|
7
|
+
|
|
8
|
+
// Register autocomplete prompt
|
|
9
|
+
inquirer.registerPrompt('autocomplete', autocomplete);
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* AI Provider Configuration
|
|
13
|
+
* Handles setup and validation of AI providers for the interview system
|
|
14
|
+
* - Saves API keys to .env file for persistence
|
|
15
|
+
* - Fetches available models dynamically from providers
|
|
16
|
+
* - Autocomplete model selection with filtering
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
const AI_PROVIDERS = {
|
|
20
|
+
ANTHROPIC: {
|
|
21
|
+
id: 'anthropic',
|
|
22
|
+
name: 'Anthropic Claude',
|
|
23
|
+
envVar: 'ANTHROPIC_API_KEY',
|
|
24
|
+
requiredFormat: 'sk-ant-',
|
|
25
|
+
website: 'https://console.anthropic.com/',
|
|
26
|
+
setup: 'Get your API key from https://console.anthropic.com/',
|
|
27
|
+
defaultModels: ['claude-sonnet-4-5-20250929', 'claude-3-5-sonnet-20241022', 'claude-3-opus-20240229']
|
|
28
|
+
},
|
|
29
|
+
OPENAI: {
|
|
30
|
+
id: 'openai',
|
|
31
|
+
name: 'OpenAI GPT',
|
|
32
|
+
envVar: 'OPENAI_API_KEY',
|
|
33
|
+
requiredFormat: 'sk-',
|
|
34
|
+
website: 'https://platform.openai.com/',
|
|
35
|
+
setup: 'Get your API key from https://platform.openai.com/api-keys',
|
|
36
|
+
defaultModels: ['gpt-4-turbo', 'gpt-4o', 'gpt-4', 'gpt-3.5-turbo']
|
|
37
|
+
},
|
|
38
|
+
GOOGLE: {
|
|
39
|
+
id: 'google',
|
|
40
|
+
name: 'Google Gemini',
|
|
41
|
+
envVar: 'GOOGLE_API_KEY',
|
|
42
|
+
requiredFormat: '', // Google keys don't have consistent prefix
|
|
43
|
+
website: 'https://ai.google.dev/',
|
|
44
|
+
setup: 'Get your API key from https://aistudio.google.com/app/apikey',
|
|
45
|
+
defaultModels: ['gemini-2.0-flash-exp', 'gemini-1.5-pro', 'gemini-1.5-flash']
|
|
46
|
+
},
|
|
47
|
+
OPENROUTER: {
|
|
48
|
+
id: 'openrouter',
|
|
49
|
+
name: 'OpenRouter (Multi-Model)',
|
|
50
|
+
envVar: 'OPENROUTER_API_KEY',
|
|
51
|
+
requiredFormat: 'sk-or-',
|
|
52
|
+
website: 'https://openrouter.ai/',
|
|
53
|
+
setup: 'Get your API key from https://openrouter.ai/keys',
|
|
54
|
+
defaultModels: [
|
|
55
|
+
'anthropic/claude-sonnet-4-5',
|
|
56
|
+
'openai/gpt-4-turbo',
|
|
57
|
+
'google/gemini-pro-1.5',
|
|
58
|
+
'meta-llama/llama-3.1-70b-instruct'
|
|
59
|
+
]
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Get or create .env file path
|
|
65
|
+
*/
|
|
66
|
+
function getEnvFilePath(projectPath) {
|
|
67
|
+
const adfDir = path.join(projectPath, '.adf');
|
|
68
|
+
return path.join(adfDir, '.env');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Load existing .env file
|
|
73
|
+
*/
|
|
74
|
+
async function loadEnvFile(envPath) {
|
|
75
|
+
if (await fs.pathExists(envPath)) {
|
|
76
|
+
const content = await fs.readFile(envPath, 'utf-8');
|
|
77
|
+
const env = {};
|
|
78
|
+
content.split('\n').forEach(line => {
|
|
79
|
+
const trimmed = line.trim();
|
|
80
|
+
if (trimmed && !trimmed.startsWith('#')) {
|
|
81
|
+
const [key, ...valueParts] = trimmed.split('=');
|
|
82
|
+
if (key && valueParts.length > 0) {
|
|
83
|
+
env[key.trim()] = valueParts.join('=').trim().replace(/^["']|["']$/g, '');
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
return env;
|
|
88
|
+
}
|
|
89
|
+
return {};
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Save API key to .env file
|
|
94
|
+
*/
|
|
95
|
+
async function saveToEnvFile(envPath, key, value) {
|
|
96
|
+
const env = await loadEnvFile(envPath);
|
|
97
|
+
env[key] = value;
|
|
98
|
+
|
|
99
|
+
const lines = [
|
|
100
|
+
'# AI Provider API Keys for adf-cli',
|
|
101
|
+
'# DO NOT commit this file to version control',
|
|
102
|
+
''
|
|
103
|
+
];
|
|
104
|
+
|
|
105
|
+
for (const [k, v] of Object.entries(env)) {
|
|
106
|
+
lines.push(`${k}="${v}"`);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
await fs.ensureDir(path.dirname(envPath));
|
|
110
|
+
await fs.writeFile(envPath, lines.join('\n'), 'utf-8');
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Load .env file into process.env
|
|
115
|
+
*/
|
|
116
|
+
function loadEnvIntoProcess(envPath) {
|
|
117
|
+
require('dotenv').config({ path: envPath });
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Fetch available models from provider API
|
|
122
|
+
*/
|
|
123
|
+
async function fetchAvailableModels(provider, apiKey) {
|
|
124
|
+
const ora = require('ora');
|
|
125
|
+
const spinner = ora('Fetching available models...').start();
|
|
126
|
+
|
|
127
|
+
try {
|
|
128
|
+
switch (provider.id) {
|
|
129
|
+
case 'anthropic':
|
|
130
|
+
// Anthropic doesn't have a models list API, use defaults
|
|
131
|
+
spinner.succeed('Using known Anthropic models');
|
|
132
|
+
return provider.defaultModels;
|
|
133
|
+
|
|
134
|
+
case 'openai':
|
|
135
|
+
const OpenAI = require('openai');
|
|
136
|
+
const openai = new OpenAI({ apiKey });
|
|
137
|
+
const response = await openai.models.list();
|
|
138
|
+
const gptModels = response.data
|
|
139
|
+
.filter(m => m.id.startsWith('gpt-'))
|
|
140
|
+
.map(m => m.id)
|
|
141
|
+
.sort();
|
|
142
|
+
spinner.succeed(`Found ${gptModels.length} OpenAI models`);
|
|
143
|
+
return gptModels.length > 0 ? gptModels : provider.defaultModels;
|
|
144
|
+
|
|
145
|
+
case 'google':
|
|
146
|
+
// Google doesn't have a public models list API, use defaults
|
|
147
|
+
spinner.succeed('Using known Google Gemini models');
|
|
148
|
+
return provider.defaultModels;
|
|
149
|
+
|
|
150
|
+
case 'openrouter':
|
|
151
|
+
const fetch = require('node-fetch');
|
|
152
|
+
const orResponse = await fetch('https://openrouter.ai/api/v1/models', {
|
|
153
|
+
headers: {
|
|
154
|
+
'Authorization': `Bearer ${apiKey}`
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
const orData = await orResponse.json();
|
|
158
|
+
const orModels = orData.data.map(m => m.id).sort();
|
|
159
|
+
spinner.succeed(`Found ${orModels.length} OpenRouter models`);
|
|
160
|
+
return orModels.length > 0 ? orModels : provider.defaultModels;
|
|
161
|
+
|
|
162
|
+
default:
|
|
163
|
+
spinner.warn('Model fetching not supported, using defaults');
|
|
164
|
+
return provider.defaultModels;
|
|
165
|
+
}
|
|
166
|
+
} catch (error) {
|
|
167
|
+
spinner.fail(`Failed to fetch models: ${error.message}`);
|
|
168
|
+
console.log(chalk.yellow(' Using default model list\n'));
|
|
169
|
+
return provider.defaultModels;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Prompt user to select and configure AI provider
|
|
175
|
+
*/
|
|
176
|
+
async function configureAIProvider(projectPath = process.cwd()) {
|
|
177
|
+
console.log(chalk.cyan.bold('\n🤖 AI Provider Configuration\n'));
|
|
178
|
+
console.log(chalk.gray('This interview requires an AI assistant to analyze your answers and provide insights.\n'));
|
|
179
|
+
|
|
180
|
+
const envPath = getEnvFilePath(projectPath);
|
|
181
|
+
|
|
182
|
+
// Load existing .env file
|
|
183
|
+
const existingEnv = await loadEnvFile(envPath);
|
|
184
|
+
|
|
185
|
+
// Check for existing API keys (from process.env or .env file)
|
|
186
|
+
const availableProviders = [];
|
|
187
|
+
for (const [key, provider] of Object.entries(AI_PROVIDERS)) {
|
|
188
|
+
const apiKey = process.env[provider.envVar] || existingEnv[provider.envVar];
|
|
189
|
+
if (apiKey) {
|
|
190
|
+
availableProviders.push(provider);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
if (availableProviders.length > 0) {
|
|
195
|
+
console.log(chalk.green('✓ Detected API keys for:'));
|
|
196
|
+
availableProviders.forEach(p => {
|
|
197
|
+
console.log(chalk.gray(` • ${p.name} (${p.envVar})`));
|
|
198
|
+
});
|
|
199
|
+
console.log('');
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// Provider selection
|
|
203
|
+
const providerChoices = [
|
|
204
|
+
{
|
|
205
|
+
name: `${AI_PROVIDERS.ANTHROPIC.name} ${availableProviders.find(p => p.id === 'anthropic') ? chalk.green('✓ Configured') : ''}`,
|
|
206
|
+
value: 'anthropic',
|
|
207
|
+
short: 'Anthropic'
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
name: `${AI_PROVIDERS.OPENAI.name} ${availableProviders.find(p => p.id === 'openai') ? chalk.green('✓ Configured') : ''}`,
|
|
211
|
+
value: 'openai',
|
|
212
|
+
short: 'OpenAI'
|
|
213
|
+
},
|
|
214
|
+
{
|
|
215
|
+
name: `${AI_PROVIDERS.GOOGLE.name} ${availableProviders.find(p => p.id === 'google') ? chalk.green('✓ Configured') : ''}`,
|
|
216
|
+
value: 'google',
|
|
217
|
+
short: 'Google'
|
|
218
|
+
},
|
|
219
|
+
{
|
|
220
|
+
name: `${AI_PROVIDERS.OPENROUTER.name} ${availableProviders.find(p => p.id === 'openrouter') ? chalk.green('✓ Configured') : ''}`,
|
|
221
|
+
value: 'openrouter',
|
|
222
|
+
short: 'OpenRouter'
|
|
223
|
+
}
|
|
224
|
+
];
|
|
225
|
+
|
|
226
|
+
const { provider } = await inquirer.prompt([
|
|
227
|
+
{
|
|
228
|
+
type: 'list',
|
|
229
|
+
name: 'provider',
|
|
230
|
+
message: 'Select AI provider:',
|
|
231
|
+
choices: providerChoices
|
|
232
|
+
}
|
|
233
|
+
]);
|
|
234
|
+
|
|
235
|
+
const selectedProvider = AI_PROVIDERS[provider.toUpperCase()];
|
|
236
|
+
|
|
237
|
+
// Check if API key exists
|
|
238
|
+
let apiKey = process.env[selectedProvider.envVar] || existingEnv[selectedProvider.envVar];
|
|
239
|
+
|
|
240
|
+
if (!apiKey) {
|
|
241
|
+
console.log(chalk.yellow(`\n⚠️ ${selectedProvider.envVar} not found\n`));
|
|
242
|
+
console.log(chalk.gray(`Setup instructions:`));
|
|
243
|
+
console.log(chalk.gray(` ${selectedProvider.setup}\n`));
|
|
244
|
+
|
|
245
|
+
const { manualKey } = await inquirer.prompt([
|
|
246
|
+
{
|
|
247
|
+
type: 'password',
|
|
248
|
+
name: 'manualKey',
|
|
249
|
+
message: `Enter your ${selectedProvider.name} API key:`,
|
|
250
|
+
mask: '*',
|
|
251
|
+
validate: (input) => {
|
|
252
|
+
if (!input || input.trim().length === 0) {
|
|
253
|
+
return 'API key cannot be empty';
|
|
254
|
+
}
|
|
255
|
+
if (selectedProvider.requiredFormat && !input.startsWith(selectedProvider.requiredFormat)) {
|
|
256
|
+
return `API key should start with "${selectedProvider.requiredFormat}"`;
|
|
257
|
+
}
|
|
258
|
+
return true;
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
]);
|
|
262
|
+
|
|
263
|
+
apiKey = manualKey.trim();
|
|
264
|
+
|
|
265
|
+
// Save to .env file
|
|
266
|
+
await saveToEnvFile(envPath, selectedProvider.envVar, apiKey);
|
|
267
|
+
|
|
268
|
+
console.log(chalk.green(`\n✓ API key saved to: ${path.relative(projectPath, envPath)}`));
|
|
269
|
+
console.log(chalk.gray(' This file is gitignored and will persist across sessions.\n'));
|
|
270
|
+
|
|
271
|
+
// Load into current process
|
|
272
|
+
process.env[selectedProvider.envVar] = apiKey;
|
|
273
|
+
} else {
|
|
274
|
+
const keySource = existingEnv[selectedProvider.envVar] ? '.env file' : 'environment';
|
|
275
|
+
console.log(chalk.green(`\n✓ Using API key from ${keySource}`));
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
// Fetch available models
|
|
279
|
+
const availableModels = await fetchAvailableModels(selectedProvider, apiKey);
|
|
280
|
+
|
|
281
|
+
// Model selection with autocomplete
|
|
282
|
+
console.log('');
|
|
283
|
+
const { model } = await inquirer.prompt([
|
|
284
|
+
{
|
|
285
|
+
type: 'autocomplete',
|
|
286
|
+
name: 'model',
|
|
287
|
+
message: `Select ${selectedProvider.name} model (type to filter):`,
|
|
288
|
+
source: async (answersSoFar, input) => {
|
|
289
|
+
const filtered = input
|
|
290
|
+
? availableModels.filter(m => m.toLowerCase().includes(input.toLowerCase()))
|
|
291
|
+
: availableModels;
|
|
292
|
+
return filtered;
|
|
293
|
+
},
|
|
294
|
+
pageSize: 10
|
|
295
|
+
}
|
|
296
|
+
]);
|
|
297
|
+
|
|
298
|
+
const config = {
|
|
299
|
+
provider: selectedProvider.id,
|
|
300
|
+
providerName: selectedProvider.name,
|
|
301
|
+
model,
|
|
302
|
+
apiKey,
|
|
303
|
+
envVar: selectedProvider.envVar,
|
|
304
|
+
envPath
|
|
305
|
+
};
|
|
306
|
+
|
|
307
|
+
// Test connection (optional but recommended)
|
|
308
|
+
const { testConnection } = await inquirer.prompt([
|
|
309
|
+
{
|
|
310
|
+
type: 'confirm',
|
|
311
|
+
name: 'testConnection',
|
|
312
|
+
message: 'Test AI connection before starting?',
|
|
313
|
+
default: true
|
|
314
|
+
}
|
|
315
|
+
]);
|
|
316
|
+
|
|
317
|
+
if (testConnection) {
|
|
318
|
+
const ora = require('ora');
|
|
319
|
+
const spinner = ora('Testing AI connection...').start();
|
|
320
|
+
|
|
321
|
+
try {
|
|
322
|
+
const AIClient = require('./ai-client');
|
|
323
|
+
const client = new AIClient(config);
|
|
324
|
+
await client.test();
|
|
325
|
+
spinner.succeed(chalk.green('AI connection successful!'));
|
|
326
|
+
} catch (error) {
|
|
327
|
+
spinner.fail(chalk.red('AI connection failed'));
|
|
328
|
+
console.log(chalk.red(`\nError: ${error.message}\n`));
|
|
329
|
+
|
|
330
|
+
const { retry } = await inquirer.prompt([
|
|
331
|
+
{
|
|
332
|
+
type: 'confirm',
|
|
333
|
+
name: 'retry',
|
|
334
|
+
message: 'Try again with different configuration?',
|
|
335
|
+
default: true
|
|
336
|
+
}
|
|
337
|
+
]);
|
|
338
|
+
|
|
339
|
+
if (retry) {
|
|
340
|
+
return configureAIProvider(projectPath); // Recursive retry
|
|
341
|
+
} else {
|
|
342
|
+
process.exit(1);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
console.log(chalk.gray('\n' + '━'.repeat(60)) + '\n');
|
|
348
|
+
|
|
349
|
+
return config;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* Detect which providers have API keys in environment or .env file
|
|
354
|
+
*/
|
|
355
|
+
async function detectAvailableProviders(projectPath = process.cwd()) {
|
|
356
|
+
const envPath = getEnvFilePath(projectPath);
|
|
357
|
+
const existingEnv = await loadEnvFile(envPath);
|
|
358
|
+
|
|
359
|
+
const available = [];
|
|
360
|
+
|
|
361
|
+
for (const [key, provider] of Object.entries(AI_PROVIDERS)) {
|
|
362
|
+
const apiKey = process.env[provider.envVar] || existingEnv[provider.envVar];
|
|
363
|
+
if (apiKey) {
|
|
364
|
+
available.push(provider);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
return available;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Validate API key format
|
|
373
|
+
*/
|
|
374
|
+
function validateAPIKey(provider, apiKey) {
|
|
375
|
+
const config = AI_PROVIDERS[provider.toUpperCase()];
|
|
376
|
+
|
|
377
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
378
|
+
return { valid: false, error: 'API key cannot be empty' };
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
if (config.requiredFormat && !apiKey.startsWith(config.requiredFormat)) {
|
|
382
|
+
return {
|
|
383
|
+
valid: false,
|
|
384
|
+
error: `API key should start with "${config.requiredFormat}"`
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
return { valid: true };
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
module.exports = {
|
|
392
|
+
configureAIProvider,
|
|
393
|
+
detectAvailableProviders,
|
|
394
|
+
validateAPIKey,
|
|
395
|
+
AI_PROVIDERS,
|
|
396
|
+
loadEnvIntoProcess,
|
|
397
|
+
getEnvFilePath
|
|
398
|
+
};
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
const inquirer = require('inquirer');
|
|
2
|
+
const chalk = require('chalk');
|
|
3
|
+
const fs = require('fs-extra');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { configureAIProvider, getEnvFilePath, loadEnvFile } = require('../ai/ai-config');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Configuration categories available in ADF CLI
|
|
9
|
+
*/
|
|
10
|
+
const CONFIG_CATEGORIES = {
|
|
11
|
+
AI_PROVIDER: {
|
|
12
|
+
name: 'AI Provider Setup',
|
|
13
|
+
description: 'Configure AI provider (Anthropic, OpenAI, Google Gemini, OpenRouter)',
|
|
14
|
+
value: 'ai-provider'
|
|
15
|
+
}
|
|
16
|
+
// Future config categories can be added here:
|
|
17
|
+
// PROJECT_SETTINGS: { name: 'Project Settings', description: '...', value: 'project' },
|
|
18
|
+
// DEPLOYMENT: { name: 'Deployment Preferences', description: '...', value: 'deployment' },
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Check if AI provider is already configured
|
|
23
|
+
*/
|
|
24
|
+
async function isAIConfigured(projectPath = process.cwd()) {
|
|
25
|
+
const envPath = getEnvFilePath(projectPath);
|
|
26
|
+
|
|
27
|
+
if (!await fs.pathExists(envPath)) {
|
|
28
|
+
return { configured: false };
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const envVars = await loadEnvFile(envPath);
|
|
32
|
+
|
|
33
|
+
// Check if any AI provider key exists
|
|
34
|
+
const aiKeys = [
|
|
35
|
+
'ANTHROPIC_API_KEY',
|
|
36
|
+
'OPENAI_API_KEY',
|
|
37
|
+
'GOOGLE_API_KEY',
|
|
38
|
+
'OPENROUTER_API_KEY'
|
|
39
|
+
];
|
|
40
|
+
|
|
41
|
+
for (const key of aiKeys) {
|
|
42
|
+
if (envVars[key] && envVars[key].length > 0) {
|
|
43
|
+
return {
|
|
44
|
+
configured: true,
|
|
45
|
+
provider: key.replace('_API_KEY', '').toLowerCase()
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
return { configured: false };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Display configuration status for AI provider
|
|
55
|
+
*/
|
|
56
|
+
function displayAIStatus(status) {
|
|
57
|
+
if (status.configured) {
|
|
58
|
+
const providerName = status.provider.charAt(0).toUpperCase() + status.provider.slice(1);
|
|
59
|
+
return `${chalk.green('✓ Configured')} ${chalk.gray(`(${providerName})`)}`;
|
|
60
|
+
} else {
|
|
61
|
+
return chalk.yellow('○ Not configured');
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Main config command
|
|
67
|
+
*/
|
|
68
|
+
async function config() {
|
|
69
|
+
console.log(chalk.cyan.bold('\n⚙️ ADF Configuration\n'));
|
|
70
|
+
|
|
71
|
+
const cwd = process.cwd();
|
|
72
|
+
|
|
73
|
+
// Check AI configuration status
|
|
74
|
+
const aiStatus = await isAIConfigured(cwd);
|
|
75
|
+
|
|
76
|
+
// Build choices with status indicators
|
|
77
|
+
const choices = [
|
|
78
|
+
{
|
|
79
|
+
name: `${CONFIG_CATEGORIES.AI_PROVIDER.name} - ${displayAIStatus(aiStatus)}`,
|
|
80
|
+
value: CONFIG_CATEGORIES.AI_PROVIDER.value,
|
|
81
|
+
short: CONFIG_CATEGORIES.AI_PROVIDER.name
|
|
82
|
+
},
|
|
83
|
+
new inquirer.Separator(),
|
|
84
|
+
{
|
|
85
|
+
name: chalk.gray('← Back'),
|
|
86
|
+
value: 'back'
|
|
87
|
+
}
|
|
88
|
+
];
|
|
89
|
+
|
|
90
|
+
const { category } = await inquirer.prompt([
|
|
91
|
+
{
|
|
92
|
+
type: 'list',
|
|
93
|
+
name: 'category',
|
|
94
|
+
message: 'Select configuration category:',
|
|
95
|
+
choices: choices,
|
|
96
|
+
pageSize: 10
|
|
97
|
+
}
|
|
98
|
+
]);
|
|
99
|
+
|
|
100
|
+
if (category === 'back') {
|
|
101
|
+
console.log(chalk.yellow('\n✋ Configuration cancelled.\n'));
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Handle selected category
|
|
106
|
+
switch (category) {
|
|
107
|
+
case 'ai-provider':
|
|
108
|
+
await configureAIProviderCategory(cwd, aiStatus);
|
|
109
|
+
break;
|
|
110
|
+
|
|
111
|
+
// Future categories will be handled here
|
|
112
|
+
default:
|
|
113
|
+
console.log(chalk.red('\n❌ Configuration category not implemented yet.\n'));
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Configure AI Provider category
|
|
119
|
+
*/
|
|
120
|
+
async function configureAIProviderCategory(cwd, aiStatus) {
|
|
121
|
+
console.log(chalk.gray('\n' + '─'.repeat(60) + '\n'));
|
|
122
|
+
|
|
123
|
+
if (aiStatus.configured) {
|
|
124
|
+
const providerName = aiStatus.provider.charAt(0).toUpperCase() + aiStatus.provider.slice(1);
|
|
125
|
+
console.log(chalk.green(`✓ AI Provider already configured: ${providerName}\n`));
|
|
126
|
+
|
|
127
|
+
const { reconfigure } = await inquirer.prompt([
|
|
128
|
+
{
|
|
129
|
+
type: 'confirm',
|
|
130
|
+
name: 'reconfigure',
|
|
131
|
+
message: 'Do you want to reconfigure your AI provider?',
|
|
132
|
+
default: false
|
|
133
|
+
}
|
|
134
|
+
]);
|
|
135
|
+
|
|
136
|
+
if (!reconfigure) {
|
|
137
|
+
console.log(chalk.yellow('\n✋ Configuration unchanged.\n'));
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Run AI provider configuration
|
|
143
|
+
console.log('');
|
|
144
|
+
const aiConfig = await configureAIProvider(cwd);
|
|
145
|
+
|
|
146
|
+
if (aiConfig) {
|
|
147
|
+
console.log(chalk.green.bold('\n✅ AI Provider configured successfully!\n'));
|
|
148
|
+
console.log(chalk.gray(` Provider: ${aiConfig.providerName}`));
|
|
149
|
+
console.log(chalk.gray(` Model: ${aiConfig.model}`));
|
|
150
|
+
console.log(chalk.gray(` Config saved to: ${getEnvFilePath(cwd)}\n`));
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
module.exports = config;
|
package/lib/commands/init.js
CHANGED
|
@@ -10,6 +10,7 @@ const {
|
|
|
10
10
|
const Interviewer = require('../frameworks/interviewer');
|
|
11
11
|
const SessionManager = require('../frameworks/session-manager');
|
|
12
12
|
const { deployToTool } = require('./deploy');
|
|
13
|
+
const { configureAIProvider, loadEnvIntoProcess, getEnvFilePath } = require('../ai/ai-config');
|
|
13
14
|
|
|
14
15
|
async function init(options) {
|
|
15
16
|
console.log(chalk.cyan.bold('\n🚀 AgentDevFramework - Software Development Requirements\n'));
|
|
@@ -17,13 +18,41 @@ async function init(options) {
|
|
|
17
18
|
const cwd = process.cwd();
|
|
18
19
|
const adfDir = path.join(cwd, '.adf');
|
|
19
20
|
|
|
21
|
+
// Load .env file if it exists (for API keys)
|
|
22
|
+
const envPath = getEnvFilePath(cwd);
|
|
23
|
+
if (await fs.pathExists(envPath)) {
|
|
24
|
+
loadEnvIntoProcess(envPath);
|
|
25
|
+
}
|
|
26
|
+
|
|
20
27
|
// Check for resumable sessions FIRST (before asking to overwrite)
|
|
21
28
|
const sessionManager = new SessionManager(cwd);
|
|
22
29
|
const existingSession = await sessionManager.promptToResume();
|
|
23
30
|
|
|
24
31
|
if (existingSession) {
|
|
25
32
|
// Resume existing session
|
|
26
|
-
|
|
33
|
+
// Check if session has AI config (from resumed session)
|
|
34
|
+
let aiConfig = existingSession.progress.aiConfig;
|
|
35
|
+
|
|
36
|
+
if (aiConfig) {
|
|
37
|
+
// We have AI config from session, but need to verify API key exists
|
|
38
|
+
const apiKey = process.env[aiConfig.envVar];
|
|
39
|
+
if (!apiKey) {
|
|
40
|
+
console.log(chalk.yellow(`\n⚠️ Previous session used ${aiConfig.providerName}`));
|
|
41
|
+
console.log(chalk.yellow(`Please configure API key to resume...\n`));
|
|
42
|
+
aiConfig = await configureAIProvider(cwd);
|
|
43
|
+
} else {
|
|
44
|
+
// Add API key to config (it's not stored in session for security)
|
|
45
|
+
aiConfig.apiKey = apiKey;
|
|
46
|
+
console.log(chalk.green(`\n✓ Resuming with ${aiConfig.providerName} (${aiConfig.model})\n`));
|
|
47
|
+
}
|
|
48
|
+
} else {
|
|
49
|
+
// Old session without AI config, configure now
|
|
50
|
+
console.log(chalk.yellow('\n⚠️ This session was created before AI provider integration.'));
|
|
51
|
+
console.log(chalk.yellow('Please configure AI provider to continue...\n'));
|
|
52
|
+
aiConfig = await configureAIProvider(cwd);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const interviewer = new Interviewer(existingSession.progress.framework || 'balanced', cwd, existingSession, aiConfig);
|
|
27
56
|
const sessionPath = await interviewer.start();
|
|
28
57
|
|
|
29
58
|
console.log(chalk.green.bold('\n✨ Requirements gathering complete!\n'));
|
|
@@ -76,17 +105,34 @@ async function init(options) {
|
|
|
76
105
|
// Create .adf directory
|
|
77
106
|
await fs.ensureDir(adfDir);
|
|
78
107
|
|
|
79
|
-
//
|
|
108
|
+
// Configure AI Provider (OPTIONAL - can be done later with 'adf config')
|
|
109
|
+
let aiConfig = null;
|
|
110
|
+
|
|
111
|
+
const { configureAI } = await inquirer.prompt([
|
|
112
|
+
{
|
|
113
|
+
type: 'confirm',
|
|
114
|
+
name: 'configureAI',
|
|
115
|
+
message: 'Configure AI provider now? (Enables intelligent follow-up questions)',
|
|
116
|
+
default: true
|
|
117
|
+
}
|
|
118
|
+
]);
|
|
119
|
+
|
|
120
|
+
if (configureAI) {
|
|
121
|
+
aiConfig = await configureAIProvider(cwd);
|
|
122
|
+
} else {
|
|
123
|
+
console.log(chalk.yellow('\n💡 You can configure AI later by running: adf config\n'));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Start interview (with or without AI)
|
|
80
127
|
console.log(chalk.gray('\n' + '━'.repeat(60)) + '\n');
|
|
81
128
|
|
|
82
|
-
const interviewer = new Interviewer(workflow, cwd);
|
|
129
|
+
const interviewer = new Interviewer(workflow, cwd, null, aiConfig);
|
|
83
130
|
const sessionPath = await interviewer.start();
|
|
84
131
|
|
|
85
|
-
// Show
|
|
86
|
-
console.log(chalk.cyan('📋
|
|
87
|
-
console.log(chalk.gray(`
|
|
88
|
-
console.log(chalk.gray(`
|
|
89
|
-
console.log(chalk.gray(` 3. Start building based on the detailed requirements\n`));
|
|
132
|
+
// Show completion message
|
|
133
|
+
console.log(chalk.cyan('📋 Requirements Complete!\n'));
|
|
134
|
+
console.log(chalk.gray(` ✓ Files saved to: ${sessionPath}/outputs/`));
|
|
135
|
+
console.log(chalk.gray(` ✓ You can review your requirements anytime\n`));
|
|
90
136
|
|
|
91
137
|
// Optional: Deploy to tool
|
|
92
138
|
if (options.tool) {
|
|
@@ -97,8 +143,8 @@ async function init(options) {
|
|
|
97
143
|
{
|
|
98
144
|
type: 'confirm',
|
|
99
145
|
name: 'deployNow',
|
|
100
|
-
message: '
|
|
101
|
-
default:
|
|
146
|
+
message: 'Automatically deploy to your IDE? (I\'ll configure everything for you)',
|
|
147
|
+
default: true
|
|
102
148
|
}
|
|
103
149
|
]);
|
|
104
150
|
|