@iservu-inc/adf-cli 0.3.0 → 0.4.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.project/chats/{current → complete}/2025-10-03_AGENTS-MD-AND-TOOL-GENERATORS.md +82 -17
- package/.project/chats/complete/2025-10-03_AI-PROVIDER-INTEGRATION.md +568 -0
- package/.project/chats/complete/2025-10-03_FRAMEWORK-UPDATE-SYSTEM.md +497 -0
- package/.project/chats/complete/2025-10-04_CONFIG-COMMAND.md +503 -0
- package/.project/chats/current/2025-10-04_PHASE-4-1-SMART-FILTERING.md +381 -0
- package/.project/chats/current/SESSION-STATUS.md +168 -0
- package/.project/docs/AI-PROVIDER-INTEGRATION.md +600 -0
- package/.project/docs/FRAMEWORK-UPDATE-INTEGRATION.md +421 -0
- package/.project/docs/FRAMEWORK-UPDATE-SYSTEM.md +832 -0
- package/.project/docs/PHASE-4-2-LEARNING-SYSTEM.md +881 -0
- package/.project/docs/PROJECT-STRUCTURE-EXPLANATION.md +500 -0
- package/.project/docs/SMART-FILTERING-SYSTEM.md +385 -0
- package/.project/docs/architecture/SYSTEM-DESIGN.md +122 -1
- package/.project/docs/goals/PROJECT-VISION.md +61 -34
- package/CHANGELOG.md +257 -1
- package/README.md +476 -292
- package/bin/adf.js +7 -0
- package/lib/ai/ai-client.js +328 -0
- package/lib/ai/ai-config.js +398 -0
- package/lib/analyzers/project-analyzer.js +380 -0
- package/lib/commands/config.js +221 -0
- package/lib/commands/init.js +56 -10
- package/lib/filters/question-filter.js +480 -0
- package/lib/frameworks/interviewer.js +271 -12
- package/lib/frameworks/progress-tracker.js +8 -1
- package/lib/learning/learning-manager.js +447 -0
- package/lib/learning/pattern-detector.js +376 -0
- package/lib/learning/rule-generator.js +304 -0
- package/lib/learning/skip-tracker.js +260 -0
- package/lib/learning/storage.js +296 -0
- package/package.json +70 -57
- package/tests/learning-storage.test.js +184 -0
- package/tests/pattern-detector.test.js +297 -0
- package/tests/project-analyzer.test.js +221 -0
- package/tests/question-filter.test.js +297 -0
- package/tests/skip-tracker.test.js +198 -0
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
const inquirer = require('inquirer');
|
|
2
|
+
const autocomplete = require('inquirer-autocomplete-prompt');
|
|
3
|
+
const chalk = require('chalk');
|
|
4
|
+
const fs = require('fs-extra');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const os = require('os');
|
|
7
|
+
|
|
8
|
+
// Register autocomplete prompt
|
|
9
|
+
inquirer.registerPrompt('autocomplete', autocomplete);
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* AI Provider Configuration
|
|
13
|
+
* Handles setup and validation of AI providers for the interview system
|
|
14
|
+
* - Saves API keys to .env file for persistence
|
|
15
|
+
* - Fetches available models dynamically from providers
|
|
16
|
+
* - Autocomplete model selection with filtering
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
const AI_PROVIDERS = {
|
|
20
|
+
ANTHROPIC: {
|
|
21
|
+
id: 'anthropic',
|
|
22
|
+
name: 'Anthropic Claude',
|
|
23
|
+
envVar: 'ANTHROPIC_API_KEY',
|
|
24
|
+
requiredFormat: 'sk-ant-',
|
|
25
|
+
website: 'https://console.anthropic.com/',
|
|
26
|
+
setup: 'Get your API key from https://console.anthropic.com/',
|
|
27
|
+
defaultModels: ['claude-sonnet-4-5-20250929', 'claude-3-5-sonnet-20241022', 'claude-3-opus-20240229']
|
|
28
|
+
},
|
|
29
|
+
OPENAI: {
|
|
30
|
+
id: 'openai',
|
|
31
|
+
name: 'OpenAI GPT',
|
|
32
|
+
envVar: 'OPENAI_API_KEY',
|
|
33
|
+
requiredFormat: 'sk-',
|
|
34
|
+
website: 'https://platform.openai.com/',
|
|
35
|
+
setup: 'Get your API key from https://platform.openai.com/api-keys',
|
|
36
|
+
defaultModels: ['gpt-4-turbo', 'gpt-4o', 'gpt-4', 'gpt-3.5-turbo']
|
|
37
|
+
},
|
|
38
|
+
GOOGLE: {
|
|
39
|
+
id: 'google',
|
|
40
|
+
name: 'Google Gemini',
|
|
41
|
+
envVar: 'GOOGLE_API_KEY',
|
|
42
|
+
requiredFormat: '', // Google keys don't have consistent prefix
|
|
43
|
+
website: 'https://ai.google.dev/',
|
|
44
|
+
setup: 'Get your API key from https://aistudio.google.com/app/apikey',
|
|
45
|
+
defaultModels: ['gemini-2.0-flash-exp', 'gemini-1.5-pro', 'gemini-1.5-flash']
|
|
46
|
+
},
|
|
47
|
+
OPENROUTER: {
|
|
48
|
+
id: 'openrouter',
|
|
49
|
+
name: 'OpenRouter (Multi-Model)',
|
|
50
|
+
envVar: 'OPENROUTER_API_KEY',
|
|
51
|
+
requiredFormat: 'sk-or-',
|
|
52
|
+
website: 'https://openrouter.ai/',
|
|
53
|
+
setup: 'Get your API key from https://openrouter.ai/keys',
|
|
54
|
+
defaultModels: [
|
|
55
|
+
'anthropic/claude-sonnet-4-5',
|
|
56
|
+
'openai/gpt-4-turbo',
|
|
57
|
+
'google/gemini-pro-1.5',
|
|
58
|
+
'meta-llama/llama-3.1-70b-instruct'
|
|
59
|
+
]
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Get or create .env file path
|
|
65
|
+
*/
|
|
66
|
+
function getEnvFilePath(projectPath) {
|
|
67
|
+
const adfDir = path.join(projectPath, '.adf');
|
|
68
|
+
return path.join(adfDir, '.env');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Load existing .env file
|
|
73
|
+
*/
|
|
74
|
+
async function loadEnvFile(envPath) {
|
|
75
|
+
if (await fs.pathExists(envPath)) {
|
|
76
|
+
const content = await fs.readFile(envPath, 'utf-8');
|
|
77
|
+
const env = {};
|
|
78
|
+
content.split('\n').forEach(line => {
|
|
79
|
+
const trimmed = line.trim();
|
|
80
|
+
if (trimmed && !trimmed.startsWith('#')) {
|
|
81
|
+
const [key, ...valueParts] = trimmed.split('=');
|
|
82
|
+
if (key && valueParts.length > 0) {
|
|
83
|
+
env[key.trim()] = valueParts.join('=').trim().replace(/^["']|["']$/g, '');
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
return env;
|
|
88
|
+
}
|
|
89
|
+
return {};
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Save API key to .env file
|
|
94
|
+
*/
|
|
95
|
+
async function saveToEnvFile(envPath, key, value) {
|
|
96
|
+
const env = await loadEnvFile(envPath);
|
|
97
|
+
env[key] = value;
|
|
98
|
+
|
|
99
|
+
const lines = [
|
|
100
|
+
'# AI Provider API Keys for adf-cli',
|
|
101
|
+
'# DO NOT commit this file to version control',
|
|
102
|
+
''
|
|
103
|
+
];
|
|
104
|
+
|
|
105
|
+
for (const [k, v] of Object.entries(env)) {
|
|
106
|
+
lines.push(`${k}="${v}"`);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
await fs.ensureDir(path.dirname(envPath));
|
|
110
|
+
await fs.writeFile(envPath, lines.join('\n'), 'utf-8');
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Load .env file into process.env
|
|
115
|
+
*/
|
|
116
|
+
function loadEnvIntoProcess(envPath) {
|
|
117
|
+
require('dotenv').config({ path: envPath });
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Fetch available models from provider API
|
|
122
|
+
*/
|
|
123
|
+
async function fetchAvailableModels(provider, apiKey) {
|
|
124
|
+
const ora = require('ora');
|
|
125
|
+
const spinner = ora('Fetching available models...').start();
|
|
126
|
+
|
|
127
|
+
try {
|
|
128
|
+
switch (provider.id) {
|
|
129
|
+
case 'anthropic':
|
|
130
|
+
// Anthropic doesn't have a models list API, use defaults
|
|
131
|
+
spinner.succeed('Using known Anthropic models');
|
|
132
|
+
return provider.defaultModels;
|
|
133
|
+
|
|
134
|
+
case 'openai':
|
|
135
|
+
const OpenAI = require('openai');
|
|
136
|
+
const openai = new OpenAI({ apiKey });
|
|
137
|
+
const response = await openai.models.list();
|
|
138
|
+
const gptModels = response.data
|
|
139
|
+
.filter(m => m.id.startsWith('gpt-'))
|
|
140
|
+
.map(m => m.id)
|
|
141
|
+
.sort();
|
|
142
|
+
spinner.succeed(`Found ${gptModels.length} OpenAI models`);
|
|
143
|
+
return gptModels.length > 0 ? gptModels : provider.defaultModels;
|
|
144
|
+
|
|
145
|
+
case 'google':
|
|
146
|
+
// Google doesn't have a public models list API, use defaults
|
|
147
|
+
spinner.succeed('Using known Google Gemini models');
|
|
148
|
+
return provider.defaultModels;
|
|
149
|
+
|
|
150
|
+
case 'openrouter':
|
|
151
|
+
const fetch = require('node-fetch');
|
|
152
|
+
const orResponse = await fetch('https://openrouter.ai/api/v1/models', {
|
|
153
|
+
headers: {
|
|
154
|
+
'Authorization': `Bearer ${apiKey}`
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
const orData = await orResponse.json();
|
|
158
|
+
const orModels = orData.data.map(m => m.id).sort();
|
|
159
|
+
spinner.succeed(`Found ${orModels.length} OpenRouter models`);
|
|
160
|
+
return orModels.length > 0 ? orModels : provider.defaultModels;
|
|
161
|
+
|
|
162
|
+
default:
|
|
163
|
+
spinner.warn('Model fetching not supported, using defaults');
|
|
164
|
+
return provider.defaultModels;
|
|
165
|
+
}
|
|
166
|
+
} catch (error) {
|
|
167
|
+
spinner.fail(`Failed to fetch models: ${error.message}`);
|
|
168
|
+
console.log(chalk.yellow(' Using default model list\n'));
|
|
169
|
+
return provider.defaultModels;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Prompt user to select and configure AI provider
|
|
175
|
+
*/
|
|
176
|
+
async function configureAIProvider(projectPath = process.cwd()) {
|
|
177
|
+
console.log(chalk.cyan.bold('\n🤖 AI Provider Configuration\n'));
|
|
178
|
+
console.log(chalk.gray('This interview requires an AI assistant to analyze your answers and provide insights.\n'));
|
|
179
|
+
|
|
180
|
+
const envPath = getEnvFilePath(projectPath);
|
|
181
|
+
|
|
182
|
+
// Load existing .env file
|
|
183
|
+
const existingEnv = await loadEnvFile(envPath);
|
|
184
|
+
|
|
185
|
+
// Check for existing API keys (from process.env or .env file)
|
|
186
|
+
const availableProviders = [];
|
|
187
|
+
for (const [key, provider] of Object.entries(AI_PROVIDERS)) {
|
|
188
|
+
const apiKey = process.env[provider.envVar] || existingEnv[provider.envVar];
|
|
189
|
+
if (apiKey) {
|
|
190
|
+
availableProviders.push(provider);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
if (availableProviders.length > 0) {
|
|
195
|
+
console.log(chalk.green('✓ Detected API keys for:'));
|
|
196
|
+
availableProviders.forEach(p => {
|
|
197
|
+
console.log(chalk.gray(` • ${p.name} (${p.envVar})`));
|
|
198
|
+
});
|
|
199
|
+
console.log('');
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// Provider selection
|
|
203
|
+
const providerChoices = [
|
|
204
|
+
{
|
|
205
|
+
name: `${AI_PROVIDERS.ANTHROPIC.name} ${availableProviders.find(p => p.id === 'anthropic') ? chalk.green('✓ Configured') : ''}`,
|
|
206
|
+
value: 'anthropic',
|
|
207
|
+
short: 'Anthropic'
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
name: `${AI_PROVIDERS.OPENAI.name} ${availableProviders.find(p => p.id === 'openai') ? chalk.green('✓ Configured') : ''}`,
|
|
211
|
+
value: 'openai',
|
|
212
|
+
short: 'OpenAI'
|
|
213
|
+
},
|
|
214
|
+
{
|
|
215
|
+
name: `${AI_PROVIDERS.GOOGLE.name} ${availableProviders.find(p => p.id === 'google') ? chalk.green('✓ Configured') : ''}`,
|
|
216
|
+
value: 'google',
|
|
217
|
+
short: 'Google'
|
|
218
|
+
},
|
|
219
|
+
{
|
|
220
|
+
name: `${AI_PROVIDERS.OPENROUTER.name} ${availableProviders.find(p => p.id === 'openrouter') ? chalk.green('✓ Configured') : ''}`,
|
|
221
|
+
value: 'openrouter',
|
|
222
|
+
short: 'OpenRouter'
|
|
223
|
+
}
|
|
224
|
+
];
|
|
225
|
+
|
|
226
|
+
const { provider } = await inquirer.prompt([
|
|
227
|
+
{
|
|
228
|
+
type: 'list',
|
|
229
|
+
name: 'provider',
|
|
230
|
+
message: 'Select AI provider:',
|
|
231
|
+
choices: providerChoices
|
|
232
|
+
}
|
|
233
|
+
]);
|
|
234
|
+
|
|
235
|
+
const selectedProvider = AI_PROVIDERS[provider.toUpperCase()];
|
|
236
|
+
|
|
237
|
+
// Check if API key exists
|
|
238
|
+
let apiKey = process.env[selectedProvider.envVar] || existingEnv[selectedProvider.envVar];
|
|
239
|
+
|
|
240
|
+
if (!apiKey) {
|
|
241
|
+
console.log(chalk.yellow(`\n⚠️ ${selectedProvider.envVar} not found\n`));
|
|
242
|
+
console.log(chalk.gray(`Setup instructions:`));
|
|
243
|
+
console.log(chalk.gray(` ${selectedProvider.setup}\n`));
|
|
244
|
+
|
|
245
|
+
const { manualKey } = await inquirer.prompt([
|
|
246
|
+
{
|
|
247
|
+
type: 'password',
|
|
248
|
+
name: 'manualKey',
|
|
249
|
+
message: `Enter your ${selectedProvider.name} API key:`,
|
|
250
|
+
mask: '*',
|
|
251
|
+
validate: (input) => {
|
|
252
|
+
if (!input || input.trim().length === 0) {
|
|
253
|
+
return 'API key cannot be empty';
|
|
254
|
+
}
|
|
255
|
+
if (selectedProvider.requiredFormat && !input.startsWith(selectedProvider.requiredFormat)) {
|
|
256
|
+
return `API key should start with "${selectedProvider.requiredFormat}"`;
|
|
257
|
+
}
|
|
258
|
+
return true;
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
]);
|
|
262
|
+
|
|
263
|
+
apiKey = manualKey.trim();
|
|
264
|
+
|
|
265
|
+
// Save to .env file
|
|
266
|
+
await saveToEnvFile(envPath, selectedProvider.envVar, apiKey);
|
|
267
|
+
|
|
268
|
+
console.log(chalk.green(`\n✓ API key saved to: ${path.relative(projectPath, envPath)}`));
|
|
269
|
+
console.log(chalk.gray(' This file is gitignored and will persist across sessions.\n'));
|
|
270
|
+
|
|
271
|
+
// Load into current process
|
|
272
|
+
process.env[selectedProvider.envVar] = apiKey;
|
|
273
|
+
} else {
|
|
274
|
+
const keySource = existingEnv[selectedProvider.envVar] ? '.env file' : 'environment';
|
|
275
|
+
console.log(chalk.green(`\n✓ Using API key from ${keySource}`));
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
// Fetch available models
|
|
279
|
+
const availableModels = await fetchAvailableModels(selectedProvider, apiKey);
|
|
280
|
+
|
|
281
|
+
// Model selection with autocomplete
|
|
282
|
+
console.log('');
|
|
283
|
+
const { model } = await inquirer.prompt([
|
|
284
|
+
{
|
|
285
|
+
type: 'autocomplete',
|
|
286
|
+
name: 'model',
|
|
287
|
+
message: `Select ${selectedProvider.name} model (type to filter):`,
|
|
288
|
+
source: async (answersSoFar, input) => {
|
|
289
|
+
const filtered = input
|
|
290
|
+
? availableModels.filter(m => m.toLowerCase().includes(input.toLowerCase()))
|
|
291
|
+
: availableModels;
|
|
292
|
+
return filtered;
|
|
293
|
+
},
|
|
294
|
+
pageSize: 10
|
|
295
|
+
}
|
|
296
|
+
]);
|
|
297
|
+
|
|
298
|
+
const config = {
|
|
299
|
+
provider: selectedProvider.id,
|
|
300
|
+
providerName: selectedProvider.name,
|
|
301
|
+
model,
|
|
302
|
+
apiKey,
|
|
303
|
+
envVar: selectedProvider.envVar,
|
|
304
|
+
envPath
|
|
305
|
+
};
|
|
306
|
+
|
|
307
|
+
// Test connection (optional but recommended)
|
|
308
|
+
const { testConnection } = await inquirer.prompt([
|
|
309
|
+
{
|
|
310
|
+
type: 'confirm',
|
|
311
|
+
name: 'testConnection',
|
|
312
|
+
message: 'Test AI connection before starting?',
|
|
313
|
+
default: true
|
|
314
|
+
}
|
|
315
|
+
]);
|
|
316
|
+
|
|
317
|
+
if (testConnection) {
|
|
318
|
+
const ora = require('ora');
|
|
319
|
+
const spinner = ora('Testing AI connection...').start();
|
|
320
|
+
|
|
321
|
+
try {
|
|
322
|
+
const AIClient = require('./ai-client');
|
|
323
|
+
const client = new AIClient(config);
|
|
324
|
+
await client.test();
|
|
325
|
+
spinner.succeed(chalk.green('AI connection successful!'));
|
|
326
|
+
} catch (error) {
|
|
327
|
+
spinner.fail(chalk.red('AI connection failed'));
|
|
328
|
+
console.log(chalk.red(`\nError: ${error.message}\n`));
|
|
329
|
+
|
|
330
|
+
const { retry } = await inquirer.prompt([
|
|
331
|
+
{
|
|
332
|
+
type: 'confirm',
|
|
333
|
+
name: 'retry',
|
|
334
|
+
message: 'Try again with different configuration?',
|
|
335
|
+
default: true
|
|
336
|
+
}
|
|
337
|
+
]);
|
|
338
|
+
|
|
339
|
+
if (retry) {
|
|
340
|
+
return configureAIProvider(projectPath); // Recursive retry
|
|
341
|
+
} else {
|
|
342
|
+
process.exit(1);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
console.log(chalk.gray('\n' + '━'.repeat(60)) + '\n');
|
|
348
|
+
|
|
349
|
+
return config;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/**
|
|
353
|
+
* Detect which providers have API keys in environment or .env file
|
|
354
|
+
*/
|
|
355
|
+
async function detectAvailableProviders(projectPath = process.cwd()) {
|
|
356
|
+
const envPath = getEnvFilePath(projectPath);
|
|
357
|
+
const existingEnv = await loadEnvFile(envPath);
|
|
358
|
+
|
|
359
|
+
const available = [];
|
|
360
|
+
|
|
361
|
+
for (const [key, provider] of Object.entries(AI_PROVIDERS)) {
|
|
362
|
+
const apiKey = process.env[provider.envVar] || existingEnv[provider.envVar];
|
|
363
|
+
if (apiKey) {
|
|
364
|
+
available.push(provider);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
return available;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Validate API key format
|
|
373
|
+
*/
|
|
374
|
+
function validateAPIKey(provider, apiKey) {
|
|
375
|
+
const config = AI_PROVIDERS[provider.toUpperCase()];
|
|
376
|
+
|
|
377
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
378
|
+
return { valid: false, error: 'API key cannot be empty' };
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
if (config.requiredFormat && !apiKey.startsWith(config.requiredFormat)) {
|
|
382
|
+
return {
|
|
383
|
+
valid: false,
|
|
384
|
+
error: `API key should start with "${config.requiredFormat}"`
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
return { valid: true };
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
module.exports = {
|
|
392
|
+
configureAIProvider,
|
|
393
|
+
detectAvailableProviders,
|
|
394
|
+
validateAPIKey,
|
|
395
|
+
AI_PROVIDERS,
|
|
396
|
+
loadEnvIntoProcess,
|
|
397
|
+
getEnvFilePath
|
|
398
|
+
};
|