@juspay/neurolink 7.34.0 → 7.36.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +64 -7
- package/dist/adapters/providerImageAdapter.d.ts +56 -0
- package/dist/adapters/providerImageAdapter.js +257 -0
- package/dist/cli/commands/config.d.ts +20 -20
- package/dist/cli/commands/setup-anthropic.d.ts +16 -0
- package/dist/cli/commands/setup-anthropic.js +414 -0
- package/dist/cli/commands/setup-azure.d.ts +17 -0
- package/dist/cli/commands/setup-azure.js +415 -0
- package/dist/cli/commands/setup-bedrock.d.ts +13 -0
- package/dist/cli/commands/setup-bedrock.js +487 -0
- package/dist/cli/commands/setup-gcp.d.ts +18 -0
- package/dist/cli/commands/setup-gcp.js +569 -0
- package/dist/cli/commands/setup-google-ai.d.ts +16 -0
- package/dist/cli/commands/setup-google-ai.js +369 -0
- package/dist/cli/commands/setup-huggingface.d.ts +8 -0
- package/dist/cli/commands/setup-huggingface.js +200 -0
- package/dist/cli/commands/setup-mistral.d.ts +8 -0
- package/dist/cli/commands/setup-mistral.js +233 -0
- package/dist/cli/commands/setup-openai.d.ts +16 -0
- package/dist/cli/commands/setup-openai.js +402 -0
- package/dist/cli/commands/setup.d.ts +19 -0
- package/dist/cli/commands/setup.js +539 -0
- package/dist/cli/factories/commandFactory.d.ts +5 -0
- package/dist/cli/factories/commandFactory.js +67 -3
- package/dist/cli/factories/setupCommandFactory.d.ts +18 -0
- package/dist/cli/factories/setupCommandFactory.js +137 -0
- package/dist/cli/parser.js +4 -1
- package/dist/cli/utils/envManager.d.ts +3 -2
- package/dist/cli/utils/envManager.js +18 -4
- package/dist/core/baseProvider.js +99 -45
- package/dist/core/types.d.ts +3 -0
- package/dist/lib/adapters/providerImageAdapter.d.ts +56 -0
- package/dist/lib/adapters/providerImageAdapter.js +257 -0
- package/dist/lib/core/baseProvider.js +99 -45
- package/dist/lib/core/types.d.ts +3 -0
- package/dist/lib/neurolink.js +8 -3
- package/dist/lib/types/content.d.ts +78 -0
- package/dist/lib/types/content.js +5 -0
- package/dist/lib/types/conversation.d.ts +19 -0
- package/dist/lib/types/generateTypes.d.ts +4 -1
- package/dist/lib/types/streamTypes.d.ts +6 -3
- package/dist/lib/utils/imageProcessor.d.ts +84 -0
- package/dist/lib/utils/imageProcessor.js +362 -0
- package/dist/lib/utils/messageBuilder.d.ts +8 -1
- package/dist/lib/utils/messageBuilder.js +279 -0
- package/dist/neurolink.js +8 -3
- package/dist/types/content.d.ts +78 -0
- package/dist/types/content.js +5 -0
- package/dist/types/conversation.d.ts +19 -0
- package/dist/types/generateTypes.d.ts +4 -1
- package/dist/types/streamTypes.d.ts +6 -3
- package/dist/utils/imageProcessor.d.ts +84 -0
- package/dist/utils/imageProcessor.js +362 -0
- package/dist/utils/messageBuilder.d.ts +8 -1
- package/dist/utils/messageBuilder.js +279 -0
- package/package.json +1 -1
@@ -0,0 +1,233 @@
|
|
1
|
+
import chalk from "chalk";
|
2
|
+
import ora from "ora";
|
3
|
+
import inquirer from "inquirer";
|
4
|
+
import fs from "fs";
|
5
|
+
import path from "path";
|
6
|
+
import { logger } from "../../lib/utils/logger.js";
|
7
|
+
/**
|
8
|
+
* Validates Mistral API key format
|
9
|
+
* Mistral keys typically start with "sk-" and contain base62 plus "-" or "_".
|
10
|
+
*/
|
11
|
+
function validateMistralApiKey(apiKey) {
|
12
|
+
const key = apiKey.trim();
|
13
|
+
return /^sk-[A-Za-z0-9_-]{20,}$/.test(key);
|
14
|
+
}
|
15
|
+
/**
|
16
|
+
* Safely parse and reconstruct .env file content
|
17
|
+
*/
|
18
|
+
function updateEnvFile(updates) {
|
19
|
+
const envPath = path.resolve(process.cwd(), ".env");
|
20
|
+
let envContent = "";
|
21
|
+
// Read existing .env if it exists
|
22
|
+
if (fs.existsSync(envPath)) {
|
23
|
+
envContent = fs.readFileSync(envPath, "utf-8");
|
24
|
+
}
|
25
|
+
// Parse existing environment variables
|
26
|
+
const envLines = envContent.split("\n");
|
27
|
+
const envVars = new Map();
|
28
|
+
// Track which variables we found and their positions
|
29
|
+
envLines.forEach((line, index) => {
|
30
|
+
const trimmed = line.trim();
|
31
|
+
if (trimmed && !trimmed.startsWith("#")) {
|
32
|
+
const equalIndex = trimmed.indexOf("=");
|
33
|
+
if (equalIndex > 0) {
|
34
|
+
const key = trimmed.substring(0, equalIndex);
|
35
|
+
const value = trimmed.substring(equalIndex + 1);
|
36
|
+
envVars.set(key, { value, lineIndex: index });
|
37
|
+
}
|
38
|
+
}
|
39
|
+
});
|
40
|
+
// Update existing variables or add new ones
|
41
|
+
for (const [key, newValue] of Object.entries(updates)) {
|
42
|
+
const existing = envVars.get(key);
|
43
|
+
if (existing) {
|
44
|
+
// Update existing variable
|
45
|
+
envLines[existing.lineIndex] = `${key}=${newValue}`;
|
46
|
+
}
|
47
|
+
else {
|
48
|
+
// Add new variable
|
49
|
+
envLines.push(`${key}=${newValue}`);
|
50
|
+
}
|
51
|
+
}
|
52
|
+
// Write updated content
|
53
|
+
fs.writeFileSync(envPath, envLines.join("\n"));
|
54
|
+
}
|
55
|
+
/**
|
56
|
+
* Check current Mistral AI configuration
|
57
|
+
*/
|
58
|
+
function checkMistralConfig() {
|
59
|
+
const apiKey = process.env.MISTRAL_API_KEY;
|
60
|
+
const model = process.env.MISTRAL_MODEL;
|
61
|
+
const hasApiKey = !!apiKey;
|
62
|
+
const hasModel = !!model;
|
63
|
+
const isValid = typeof apiKey === "string" && validateMistralApiKey(apiKey);
|
64
|
+
return {
|
65
|
+
hasApiKey,
|
66
|
+
hasModel,
|
67
|
+
apiKey: apiKey ? `${apiKey.substring(0, 8)}...` : undefined,
|
68
|
+
model,
|
69
|
+
isValid,
|
70
|
+
};
|
71
|
+
}
|
72
|
+
export const handleMistralSetup = async (argv) => {
|
73
|
+
const spinner = ora();
|
74
|
+
try {
|
75
|
+
spinner.start("Checking Mistral AI configuration...");
|
76
|
+
const config = checkMistralConfig();
|
77
|
+
spinner.stop();
|
78
|
+
// Display current status
|
79
|
+
logger.always(chalk.bold.blue("\n🧠 Mistral AI Configuration Status\n"));
|
80
|
+
logger.always(`${config.hasApiKey ? "✅" : "❌"} API Key: ${config.apiKey || "Not set"}`);
|
81
|
+
logger.always(`${config.hasModel ? "✅" : "⚠️"} Model: ${config.model || "Not set (will use default)"}`);
|
82
|
+
if (config.isValid) {
|
83
|
+
logger.always(chalk.green("\n✅ Mistral AI is properly configured!"));
|
84
|
+
if (argv.check) {
|
85
|
+
return;
|
86
|
+
}
|
87
|
+
const { shouldReconfigure } = await inquirer.prompt([
|
88
|
+
{
|
89
|
+
type: "confirm",
|
90
|
+
name: "shouldReconfigure",
|
91
|
+
message: "Configuration looks good. Do you want to reconfigure anyway?",
|
92
|
+
default: false,
|
93
|
+
},
|
94
|
+
]);
|
95
|
+
if (!shouldReconfigure) {
|
96
|
+
logger.always(chalk.green("✅ Keeping existing configuration."));
|
97
|
+
return;
|
98
|
+
}
|
99
|
+
}
|
100
|
+
else {
|
101
|
+
logger.always(chalk.yellow("\n⚠️ Mistral AI configuration needs setup."));
|
102
|
+
if (argv.check) {
|
103
|
+
process.exit(1);
|
104
|
+
}
|
105
|
+
}
|
106
|
+
if (argv["non-interactive"]) {
|
107
|
+
logger.always(chalk.yellow("Non-interactive mode: Skipping configuration setup."));
|
108
|
+
logger.always(chalk.blue("Please set MISTRAL_API_KEY manually."));
|
109
|
+
return;
|
110
|
+
}
|
111
|
+
// Interactive setup
|
112
|
+
logger.always(chalk.blue("\n🛠️ Let's configure Mistral AI!\n"));
|
113
|
+
// Show instructions for getting API key
|
114
|
+
logger.always(chalk.yellow("📋 To get your Mistral AI API key:"));
|
115
|
+
logger.always("1. Visit: https://console.mistral.ai/");
|
116
|
+
logger.always("2. Sign up or sign in to your account");
|
117
|
+
logger.always("3. Go to 'API Keys' section");
|
118
|
+
logger.always("4. Create a new API key");
|
119
|
+
logger.always("5. Copy the API key\n");
|
120
|
+
// Step 1: API Key
|
121
|
+
const { apiKey } = await inquirer.prompt([
|
122
|
+
{
|
123
|
+
type: "password",
|
124
|
+
name: "apiKey",
|
125
|
+
message: "Enter your Mistral AI API key:",
|
126
|
+
mask: "*",
|
127
|
+
validate: (input) => {
|
128
|
+
if (!input.trim()) {
|
129
|
+
return "API key is required";
|
130
|
+
}
|
131
|
+
if (!validateMistralApiKey(input.trim())) {
|
132
|
+
return "Invalid API key format. Should be alphanumeric and at least 20 characters";
|
133
|
+
}
|
134
|
+
return true;
|
135
|
+
},
|
136
|
+
},
|
137
|
+
]);
|
138
|
+
// Step 2: Model Selection
|
139
|
+
const { modelChoice } = await inquirer.prompt([
|
140
|
+
{
|
141
|
+
type: "list",
|
142
|
+
name: "modelChoice",
|
143
|
+
message: "Select a Mistral model:",
|
144
|
+
choices: [
|
145
|
+
{
|
146
|
+
name: "mistral-small (Balanced performance - Recommended)",
|
147
|
+
value: "mistral-small",
|
148
|
+
},
|
149
|
+
{
|
150
|
+
name: "mistral-tiny (Fastest, most cost-effective)",
|
151
|
+
value: "mistral-tiny",
|
152
|
+
},
|
153
|
+
{
|
154
|
+
name: "mistral-medium (Enhanced capabilities)",
|
155
|
+
value: "mistral-medium",
|
156
|
+
},
|
157
|
+
{
|
158
|
+
name: "mistral-large (Most capable model)",
|
159
|
+
value: "mistral-large",
|
160
|
+
},
|
161
|
+
{
|
162
|
+
name: "open-mistral-7b (Open source model)",
|
163
|
+
value: "open-mistral-7b",
|
164
|
+
},
|
165
|
+
{
|
166
|
+
name: "open-mixtral-8x7b (Open source mixture of experts)",
|
167
|
+
value: "open-mixtral-8x7b",
|
168
|
+
},
|
169
|
+
{
|
170
|
+
name: "Custom model name",
|
171
|
+
value: "custom",
|
172
|
+
},
|
173
|
+
],
|
174
|
+
default: "mistral-small",
|
175
|
+
},
|
176
|
+
]);
|
177
|
+
let selectedModel = modelChoice;
|
178
|
+
if (modelChoice === "custom") {
|
179
|
+
const { customModel } = await inquirer.prompt([
|
180
|
+
{
|
181
|
+
type: "input",
|
182
|
+
name: "customModel",
|
183
|
+
message: "Enter custom Mistral model name:",
|
184
|
+
validate: (input) => {
|
185
|
+
const trimmed = input.trim();
|
186
|
+
if (!trimmed) {
|
187
|
+
return "Model name is required";
|
188
|
+
}
|
189
|
+
return true;
|
190
|
+
},
|
191
|
+
},
|
192
|
+
]);
|
193
|
+
selectedModel = customModel.trim();
|
194
|
+
}
|
195
|
+
// Save configuration
|
196
|
+
spinner.start("Saving configuration...");
|
197
|
+
const updates = {
|
198
|
+
MISTRAL_API_KEY: apiKey.trim(),
|
199
|
+
MISTRAL_MODEL: selectedModel,
|
200
|
+
};
|
201
|
+
updateEnvFile(updates);
|
202
|
+
spinner.stop();
|
203
|
+
logger.always(chalk.green("\n✅ Mistral AI configuration saved successfully!"));
|
204
|
+
logger.always(chalk.blue("\n📖 Usage examples:"));
|
205
|
+
logger.always(chalk.gray(' neurolink generate "Hello, how are you?" --provider mistral'));
|
206
|
+
logger.always(chalk.gray(` neurolink generate "Explain quantum physics" --provider mistral --model ${selectedModel}`));
|
207
|
+
logger.always(chalk.blue("\n🔗 Resources:"));
|
208
|
+
logger.always(chalk.gray(" • Mistral AI Console: https://console.mistral.ai/"));
|
209
|
+
logger.always(chalk.gray(" • API Documentation: https://docs.mistral.ai/"));
|
210
|
+
logger.always(chalk.gray(" • Model Information: https://docs.mistral.ai/getting-started/models/"));
|
211
|
+
logger.always(chalk.blue("\n💡 Features:"));
|
212
|
+
logger.always(chalk.gray(" • European GDPR-compliant AI"));
|
213
|
+
logger.always(chalk.gray(" • Multilingual support"));
|
214
|
+
logger.always(chalk.gray(" • Fast inference speeds"));
|
215
|
+
}
|
216
|
+
catch (error) {
|
217
|
+
spinner.stop();
|
218
|
+
logger.error("Mistral AI setup failed", error);
|
219
|
+
throw error;
|
220
|
+
}
|
221
|
+
};
|
222
|
+
export const setupMistralBuilder = {
|
223
|
+
check: {
|
224
|
+
type: "boolean",
|
225
|
+
describe: "Only check existing configuration without prompting",
|
226
|
+
default: false,
|
227
|
+
},
|
228
|
+
"non-interactive": {
|
229
|
+
type: "boolean",
|
230
|
+
describe: "Skip interactive prompts",
|
231
|
+
default: false,
|
232
|
+
},
|
233
|
+
};
|
@@ -0,0 +1,16 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
/**
|
3
|
+
* OpenAI Setup Command
|
4
|
+
*
|
5
|
+
* Simple setup for OpenAI API integration:
|
6
|
+
* - OPENAI_API_KEY (required)
|
7
|
+
* - OPENAI_MODEL (optional, with popular choices)
|
8
|
+
*
|
9
|
+
* Follows the same UX patterns as setup-gcp and setup-bedrock
|
10
|
+
*/
|
11
|
+
interface OpenAISetupArgv {
|
12
|
+
check?: boolean;
|
13
|
+
nonInteractive?: boolean;
|
14
|
+
}
|
15
|
+
export declare function handleOpenAISetup(argv: OpenAISetupArgv): Promise<void>;
|
16
|
+
export {};
|
@@ -0,0 +1,402 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
/**
|
3
|
+
* OpenAI Setup Command
|
4
|
+
*
|
5
|
+
* Simple setup for OpenAI API integration:
|
6
|
+
* - OPENAI_API_KEY (required)
|
7
|
+
* - OPENAI_MODEL (optional, with popular choices)
|
8
|
+
*
|
9
|
+
* Follows the same UX patterns as setup-gcp and setup-bedrock
|
10
|
+
*/
|
11
|
+
import fs from "fs";
|
12
|
+
import path from "path";
|
13
|
+
import inquirer from "inquirer";
|
14
|
+
import chalk from "chalk";
|
15
|
+
import ora from "ora";
|
16
|
+
import { logger } from "../../lib/utils/logger.js";
|
17
|
+
export async function handleOpenAISetup(argv) {
|
18
|
+
try {
|
19
|
+
const options = {
|
20
|
+
checkOnly: argv.check || false,
|
21
|
+
interactive: !argv.nonInteractive,
|
22
|
+
};
|
23
|
+
logger.always(chalk.blue("🔍 Checking OpenAI configuration..."));
|
24
|
+
// Step 1: Check for existing configuration
|
25
|
+
const hasApiKey = !!process.env.OPENAI_API_KEY;
|
26
|
+
const hasModel = !!process.env.OPENAI_MODEL;
|
27
|
+
// Display current status
|
28
|
+
displayCurrentStatus(hasApiKey, hasModel);
|
29
|
+
// Check-only mode - show status and exit
|
30
|
+
if (options.checkOnly) {
|
31
|
+
if (hasApiKey && process.env.OPENAI_API_KEY) {
|
32
|
+
logger.always(chalk.green("✅ OpenAI setup complete"));
|
33
|
+
logger.always(` API Key: ${maskCredential(process.env.OPENAI_API_KEY)}`);
|
34
|
+
if (hasModel) {
|
35
|
+
logger.always(` Model: ${process.env.OPENAI_MODEL}`);
|
36
|
+
}
|
37
|
+
else {
|
38
|
+
logger.always(" Model: (using provider default)");
|
39
|
+
}
|
40
|
+
}
|
41
|
+
else {
|
42
|
+
logger.always(chalk.yellow("⚠️ OpenAI setup incomplete"));
|
43
|
+
}
|
44
|
+
return;
|
45
|
+
}
|
46
|
+
const config = {};
|
47
|
+
// Step 2: Handle existing configuration
|
48
|
+
if (hasApiKey && process.env.OPENAI_API_KEY) {
|
49
|
+
logger.always(chalk.green("✅ OpenAI API key found in environment"));
|
50
|
+
logger.always(` API Key: ${maskCredential(process.env.OPENAI_API_KEY)}`);
|
51
|
+
if (hasModel) {
|
52
|
+
logger.always(` Model: ${process.env.OPENAI_MODEL}`);
|
53
|
+
}
|
54
|
+
else {
|
55
|
+
logger.always(" Model: (using provider default)");
|
56
|
+
}
|
57
|
+
if (options.interactive) {
|
58
|
+
const { reconfigure } = await inquirer.prompt([
|
59
|
+
{
|
60
|
+
type: "confirm",
|
61
|
+
name: "reconfigure",
|
62
|
+
message: "OpenAI is already configured. Do you want to reconfigure?",
|
63
|
+
default: false,
|
64
|
+
},
|
65
|
+
]);
|
66
|
+
if (!reconfigure) {
|
67
|
+
// Still offer model selection if no model is set
|
68
|
+
if (!hasModel) {
|
69
|
+
const { wantsCustomModel } = await inquirer.prompt([
|
70
|
+
{
|
71
|
+
type: "confirm",
|
72
|
+
name: "wantsCustomModel",
|
73
|
+
message: "Do you want to specify a custom OpenAI model? (optional)",
|
74
|
+
default: false,
|
75
|
+
},
|
76
|
+
]);
|
77
|
+
if (wantsCustomModel) {
|
78
|
+
config.model = await promptForModel();
|
79
|
+
}
|
80
|
+
}
|
81
|
+
else {
|
82
|
+
// Offer to change existing model
|
83
|
+
const { wantsChangeModel } = await inquirer.prompt([
|
84
|
+
{
|
85
|
+
type: "confirm",
|
86
|
+
name: "wantsChangeModel",
|
87
|
+
message: `Do you want to change the OpenAI model? (current: ${process.env.OPENAI_MODEL})`,
|
88
|
+
default: false,
|
89
|
+
},
|
90
|
+
]);
|
91
|
+
if (wantsChangeModel) {
|
92
|
+
config.model = await promptForModel();
|
93
|
+
}
|
94
|
+
}
|
95
|
+
if (config.model) {
|
96
|
+
await updateEnvFile(config);
|
97
|
+
logger.always(chalk.green("✅ Model configuration updated!"));
|
98
|
+
logger.always(` OPENAI_MODEL=${config.model}`);
|
99
|
+
}
|
100
|
+
else {
|
101
|
+
logger.always(chalk.blue("👍 Keeping existing configuration."));
|
102
|
+
}
|
103
|
+
// Show usage example
|
104
|
+
showUsageExample();
|
105
|
+
return;
|
106
|
+
}
|
107
|
+
else {
|
108
|
+
// User chose to reconfigure - mark this for proper handling
|
109
|
+
logger.always(chalk.blue("📝 Reconfiguring OpenAI setup..."));
|
110
|
+
config.isReconfiguring = true;
|
111
|
+
}
|
112
|
+
}
|
113
|
+
else {
|
114
|
+
// Non-interactive mode - just use existing credentials
|
115
|
+
logger.always(chalk.green("✅ Setup complete! Using existing OpenAI configuration."));
|
116
|
+
return;
|
117
|
+
}
|
118
|
+
}
|
119
|
+
// Step 3: Interactive setup for missing or reconfiguring credentials
|
120
|
+
if (options.interactive) {
|
121
|
+
const isReconfiguring = config.isReconfiguring === true;
|
122
|
+
// Handle API key setup/reconfiguration
|
123
|
+
if (!hasApiKey) {
|
124
|
+
// No API key exists - prompt for it
|
125
|
+
logger.always("");
|
126
|
+
logger.always(chalk.yellow("📋 To get your OpenAI API key:"));
|
127
|
+
logger.always("1. Visit: https://platform.openai.com/api-keys");
|
128
|
+
logger.always("2. Log in to your OpenAI account");
|
129
|
+
logger.always("3. Click 'Create new secret key'");
|
130
|
+
logger.always("4. Copy the API key (starts with sk-)");
|
131
|
+
logger.always("");
|
132
|
+
const { apiKey } = await inquirer.prompt([
|
133
|
+
{
|
134
|
+
type: "password",
|
135
|
+
name: "apiKey",
|
136
|
+
message: "Enter your OpenAI API key:",
|
137
|
+
validate: validateApiKey,
|
138
|
+
},
|
139
|
+
]);
|
140
|
+
config.apiKey = apiKey.trim();
|
141
|
+
}
|
142
|
+
else if (isReconfiguring) {
|
143
|
+
// API key exists and user is reconfiguring - ask if they want to change it
|
144
|
+
const { wantsChangeApiKey } = await inquirer.prompt([
|
145
|
+
{
|
146
|
+
type: "confirm",
|
147
|
+
name: "wantsChangeApiKey",
|
148
|
+
message: `Do you want to change the OpenAI API key? (current: ${process.env.OPENAI_API_KEY ? maskCredential(process.env.OPENAI_API_KEY) : "****"})`,
|
149
|
+
default: false,
|
150
|
+
},
|
151
|
+
]);
|
152
|
+
if (wantsChangeApiKey) {
|
153
|
+
logger.always("");
|
154
|
+
logger.always(chalk.yellow("📋 To get your OpenAI API key:"));
|
155
|
+
logger.always("1. Visit: https://platform.openai.com/api-keys");
|
156
|
+
logger.always("2. Log in to your OpenAI account");
|
157
|
+
logger.always("3. Click 'Create new secret key'");
|
158
|
+
logger.always("4. Copy the API key (starts with sk-)");
|
159
|
+
logger.always("");
|
160
|
+
const { apiKey } = await inquirer.prompt([
|
161
|
+
{
|
162
|
+
type: "password",
|
163
|
+
name: "apiKey",
|
164
|
+
message: "Enter your new OpenAI API key (replacing existing):",
|
165
|
+
validate: validateApiKey,
|
166
|
+
},
|
167
|
+
]);
|
168
|
+
config.apiKey = apiKey.trim();
|
169
|
+
}
|
170
|
+
}
|
171
|
+
// Prompt for model selection
|
172
|
+
const { wantsCustomModel } = await inquirer.prompt([
|
173
|
+
{
|
174
|
+
type: "confirm",
|
175
|
+
name: "wantsCustomModel",
|
176
|
+
message: hasModel
|
177
|
+
? `Do you want to change the OpenAI model? (current: ${process.env.OPENAI_MODEL})`
|
178
|
+
: "Do you want to specify a custom OpenAI model? (optional - will use default if not specified)",
|
179
|
+
default: false,
|
180
|
+
},
|
181
|
+
]);
|
182
|
+
if (wantsCustomModel) {
|
183
|
+
config.model = await promptForModel();
|
184
|
+
}
|
185
|
+
}
|
186
|
+
else {
|
187
|
+
// Non-interactive mode
|
188
|
+
logger.always(chalk.yellow("⚠️ Non-interactive mode: setup incomplete"));
|
189
|
+
logger.always(chalk.yellow("💡 Run without --non-interactive to configure OpenAI"));
|
190
|
+
return;
|
191
|
+
}
|
192
|
+
// Step 4: Update .env file
|
193
|
+
if (config.apiKey || config.model) {
|
194
|
+
await updateEnvFile(config);
|
195
|
+
logger.always(chalk.green("✅ OpenAI setup complete!"));
|
196
|
+
if (config.apiKey) {
|
197
|
+
logger.always(` API Key: ${maskCredential(config.apiKey)}`);
|
198
|
+
}
|
199
|
+
if (config.model) {
|
200
|
+
logger.always(` Model: ${config.model}`);
|
201
|
+
}
|
202
|
+
// Show usage example
|
203
|
+
showUsageExample();
|
204
|
+
}
|
205
|
+
else if (options.interactive && !options.checkOnly) {
|
206
|
+
logger.always(chalk.green("✅ Setup complete!"));
|
207
|
+
showUsageExample();
|
208
|
+
}
|
209
|
+
}
|
210
|
+
catch (error) {
|
211
|
+
logger.error(chalk.red("❌ OpenAI setup failed:"));
|
212
|
+
logger.error(chalk.red(error instanceof Error ? error.message : "Unknown error"));
|
213
|
+
process.exit(1);
|
214
|
+
}
|
215
|
+
}
|
216
|
+
/**
|
217
|
+
* Display current configuration status
|
218
|
+
*/
|
219
|
+
function displayCurrentStatus(hasApiKey, hasModel) {
|
220
|
+
if (hasApiKey) {
|
221
|
+
logger.always(chalk.green("✔ OPENAI_API_KEY found in environment"));
|
222
|
+
}
|
223
|
+
else {
|
224
|
+
logger.always(chalk.red("✘ OPENAI_API_KEY not found"));
|
225
|
+
}
|
226
|
+
if (hasModel) {
|
227
|
+
logger.always(chalk.green(`✔ OPENAI_MODEL found: ${process.env.OPENAI_MODEL}`));
|
228
|
+
}
|
229
|
+
else {
|
230
|
+
logger.always(chalk.yellow("⚠ OPENAI_MODEL not set (will use provider default)"));
|
231
|
+
}
|
232
|
+
}
|
233
|
+
/**
|
234
|
+
* Validate OpenAI API key format
|
235
|
+
*/
|
236
|
+
function validateApiKey(input) {
|
237
|
+
if (!input.trim()) {
|
238
|
+
return "OpenAI API key is required";
|
239
|
+
}
|
240
|
+
const trimmed = input.trim();
|
241
|
+
if (!trimmed.startsWith("sk-")) {
|
242
|
+
return "OpenAI API key should start with 'sk-'";
|
243
|
+
}
|
244
|
+
if (trimmed.length < 20) {
|
245
|
+
return "OpenAI API key seems too short";
|
246
|
+
}
|
247
|
+
// Basic format check: sk-[project id or old format][32+ char random string]
|
248
|
+
if (!/^sk-[a-zA-Z0-9_-]{20,}$/.test(trimmed)) {
|
249
|
+
return "Invalid OpenAI API key format";
|
250
|
+
}
|
251
|
+
return true;
|
252
|
+
}
|
253
|
+
/**
|
254
|
+
* Prompt user for model selection
|
255
|
+
*/
|
256
|
+
async function promptForModel() {
|
257
|
+
const { modelChoice } = await inquirer.prompt([
|
258
|
+
{
|
259
|
+
type: "list",
|
260
|
+
name: "modelChoice",
|
261
|
+
message: "Select an OpenAI model:",
|
262
|
+
choices: [
|
263
|
+
{
|
264
|
+
name: "gpt-4o (Recommended - Latest multimodal model)",
|
265
|
+
value: "gpt-4o",
|
266
|
+
},
|
267
|
+
{
|
268
|
+
name: "gpt-4o-mini (Cost-effective, fast)",
|
269
|
+
value: "gpt-4o-mini",
|
270
|
+
},
|
271
|
+
{
|
272
|
+
name: "gpt-4-turbo (Previous generation)",
|
273
|
+
value: "gpt-4-turbo",
|
274
|
+
},
|
275
|
+
{
|
276
|
+
name: "gpt-3.5-turbo (Legacy, most cost-effective)",
|
277
|
+
value: "gpt-3.5-turbo",
|
278
|
+
},
|
279
|
+
{
|
280
|
+
name: "Custom model (enter manually)",
|
281
|
+
value: "custom",
|
282
|
+
},
|
283
|
+
],
|
284
|
+
},
|
285
|
+
]);
|
286
|
+
if (modelChoice === "custom") {
|
287
|
+
const { customModel } = await inquirer.prompt([
|
288
|
+
{
|
289
|
+
type: "input",
|
290
|
+
name: "customModel",
|
291
|
+
message: "Enter your custom OpenAI model name:",
|
292
|
+
validate: (input) => {
|
293
|
+
if (!input.trim()) {
|
294
|
+
return "Model name is required";
|
295
|
+
}
|
296
|
+
// Basic validation - OpenAI models typically follow certain patterns
|
297
|
+
const trimmed = input.trim();
|
298
|
+
if (!/^[a-z0-9-._]+$/i.test(trimmed)) {
|
299
|
+
return "Model name should contain only letters, numbers, hyphens, dots, and underscores";
|
300
|
+
}
|
301
|
+
return true;
|
302
|
+
},
|
303
|
+
},
|
304
|
+
]);
|
305
|
+
return customModel.trim();
|
306
|
+
}
|
307
|
+
return modelChoice;
|
308
|
+
}
|
309
|
+
/**
|
310
|
+
* Update .env file with OpenAI configuration
|
311
|
+
*/
|
312
|
+
async function updateEnvFile(config) {
|
313
|
+
const envPath = path.join(process.cwd(), ".env");
|
314
|
+
const spinner = ora("💾 Updating .env file...").start();
|
315
|
+
try {
|
316
|
+
let envContent = "";
|
317
|
+
// Read existing .env file if it exists
|
318
|
+
if (fs.existsSync(envPath)) {
|
319
|
+
envContent = fs.readFileSync(envPath, "utf8");
|
320
|
+
}
|
321
|
+
// Parse existing environment variables
|
322
|
+
const envLines = envContent.split("\n");
|
323
|
+
const existingVars = new Map();
|
324
|
+
const otherLines = [];
|
325
|
+
for (const line of envLines) {
|
326
|
+
const trimmed = line.trim();
|
327
|
+
if (trimmed && !trimmed.startsWith("#")) {
|
328
|
+
const equalsIndex = trimmed.indexOf("=");
|
329
|
+
if (equalsIndex > 0) {
|
330
|
+
const key = trimmed.substring(0, equalsIndex);
|
331
|
+
const value = trimmed.substring(equalsIndex + 1);
|
332
|
+
existingVars.set(key, value);
|
333
|
+
}
|
334
|
+
else {
|
335
|
+
otherLines.push(line);
|
336
|
+
}
|
337
|
+
}
|
338
|
+
else {
|
339
|
+
otherLines.push(line);
|
340
|
+
}
|
341
|
+
}
|
342
|
+
// Update OpenAI variables
|
343
|
+
if (config.apiKey) {
|
344
|
+
existingVars.set("OPENAI_API_KEY", config.apiKey);
|
345
|
+
}
|
346
|
+
if (config.model) {
|
347
|
+
existingVars.set("OPENAI_MODEL", config.model);
|
348
|
+
}
|
349
|
+
// Reconstruct .env content preserving structure
|
350
|
+
const newEnvLines = [];
|
351
|
+
// Add non-variable lines first (comments, empty lines)
|
352
|
+
for (const line of otherLines) {
|
353
|
+
newEnvLines.push(line);
|
354
|
+
}
|
355
|
+
// Add separator comment for OpenAI if needed
|
356
|
+
if ((config.apiKey || config.model) &&
|
357
|
+
!envContent.includes("OPENAI CONFIGURATION") &&
|
358
|
+
!envContent.includes("# OpenAI")) {
|
359
|
+
if (newEnvLines.length > 0 &&
|
360
|
+
newEnvLines[newEnvLines.length - 1].trim()) {
|
361
|
+
newEnvLines.push("");
|
362
|
+
}
|
363
|
+
newEnvLines.push("# OpenAI Configuration");
|
364
|
+
}
|
365
|
+
// Add all environment variables
|
366
|
+
for (const [key, value] of existingVars.entries()) {
|
367
|
+
newEnvLines.push(`${key}=${value}`);
|
368
|
+
}
|
369
|
+
// Write updated content
|
370
|
+
const finalContent = newEnvLines.join("\n") + (newEnvLines.length > 0 ? "\n" : "");
|
371
|
+
fs.writeFileSync(envPath, finalContent, "utf8");
|
372
|
+
spinner.succeed(chalk.green("✔ .env file updated successfully"));
|
373
|
+
}
|
374
|
+
catch (error) {
|
375
|
+
spinner.fail(chalk.red("❌ Failed to update .env file"));
|
376
|
+
logger.error(chalk.red(`Error: ${error instanceof Error ? error.message : "Unknown error"}`));
|
377
|
+
throw error;
|
378
|
+
}
|
379
|
+
}
|
380
|
+
/**
|
381
|
+
* Mask API key for display
|
382
|
+
*/
|
383
|
+
function maskCredential(credential) {
|
384
|
+
if (!credential || credential.length < 8) {
|
385
|
+
return "****";
|
386
|
+
}
|
387
|
+
const knownPrefixes = ["sk-"];
|
388
|
+
const prefix = knownPrefixes.find((p) => credential.startsWith(p)) ??
|
389
|
+
credential.slice(0, 3);
|
390
|
+
const end = credential.slice(-4);
|
391
|
+
const stars = "*".repeat(Math.max(4, credential.length - prefix.length - 4));
|
392
|
+
return `${prefix}${stars}${end}`;
|
393
|
+
}
|
394
|
+
/**
|
395
|
+
* Show usage example
|
396
|
+
*/
|
397
|
+
function showUsageExample() {
|
398
|
+
logger.always("");
|
399
|
+
logger.always(chalk.green("🚀 You can now use OpenAI with the NeuroLink CLI:"));
|
400
|
+
logger.always(chalk.cyan(" pnpm cli generate 'Hello from OpenAI!' --provider openai"));
|
401
|
+
logger.always(chalk.cyan(" pnpm cli generate 'Explain quantum computing' --provider openai"));
|
402
|
+
}
|
@@ -0,0 +1,19 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
/**
|
3
|
+
* NeuroLink Setup Command - Main Provider Setup Wizard
|
4
|
+
*
|
5
|
+
* Provides a beautiful welcome experience for new users and guided
|
6
|
+
* provider selection, while delegating to existing setup commands.
|
7
|
+
*/
|
8
|
+
interface SetupArgs {
|
9
|
+
provider?: string;
|
10
|
+
list?: boolean;
|
11
|
+
status?: boolean;
|
12
|
+
interactive?: boolean;
|
13
|
+
help?: boolean;
|
14
|
+
}
|
15
|
+
/**
|
16
|
+
* Main setup command handler
|
17
|
+
*/
|
18
|
+
export declare function handleSetup(argv: SetupArgs): Promise<void>;
|
19
|
+
export {};
|