@xelauvas/xela-cli 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bin/xela.js +61 -11
  2. package/package.json +1 -1
package/bin/xela.js CHANGED
@@ -12,13 +12,47 @@ const XELA_HOME = join(homedir(), '.xela');
12
12
  const XELA_CONFIG = join(XELA_HOME, 'config.json');
13
13
 
14
14
  const PROVIDERS = {
15
- openrouter: { baseUrl: 'https://openrouter.ai/api/v1', model: 'qwen/qwen3.6-plus-preview:free' },
16
- groq: { baseUrl: 'https://api.groq.com/openai/v1', model: 'qwen-qwq-32b' },
17
- ollama: { baseUrl: 'http://localhost:11434/v1', model: 'qwen2.5-coder:7b' },
18
- deepseek: { baseUrl: 'https://api.deepseek.com', model: 'deepseek-chat' },
19
- openai: { baseUrl: undefined, model: 'gpt-4o' },
20
- cerebras: { baseUrl: 'https://api.cerebras.ai/v1', model: 'llama-3.3-70b' },
21
- sambanova: { baseUrl: 'https://api.sambanova.ai/v1', model: 'Meta-Llama-3.3-70B-Instruct' },
15
+ openrouter: {
16
+ baseUrl: 'https://openrouter.ai/api/v1',
17
+ models: [
18
+ 'qwen/qwen3.6-plus-preview:free',
19
+ 'deepseek/deepseek-chat-v3-0324:free',
20
+ 'google/gemini-2.5-pro-exp-03-25:free',
21
+ 'meta-llama/llama-4-maverick:free',
22
+ 'nvidia/llama-3.1-nemotron-ultra-253b:free',
23
+ ],
24
+ default: 'qwen/qwen3.6-plus-preview:free',
25
+ },
26
+ groq: {
27
+ baseUrl: 'https://api.groq.com/openai/v1',
28
+ models: ['qwen-qwq-32b', 'llama-3.3-70b-versatile', 'gemma2-9b-it', 'mixtral-8x7b-32768'],
29
+ default: 'qwen-qwq-32b',
30
+ },
31
+ ollama: {
32
+ baseUrl: 'http://localhost:11434/v1',
33
+ models: ['qwen2.5-coder:7b', 'llama3.2:latest', 'codellama:latest', 'deepseek-coder-v2:latest'],
34
+ default: 'qwen2.5-coder:7b',
35
+ },
36
+ deepseek: {
37
+ baseUrl: 'https://api.deepseek.com',
38
+ models: ['deepseek-chat', 'deepseek-reasoner'],
39
+ default: 'deepseek-chat',
40
+ },
41
+ openai: {
42
+ baseUrl: undefined,
43
+ models: ['gpt-4o', 'gpt-4o-mini', 'gpt-4.1', 'o4-mini'],
44
+ default: 'gpt-4o',
45
+ },
46
+ cerebras: {
47
+ baseUrl: 'https://api.cerebras.ai/v1',
48
+ models: ['llama-3.3-70b', 'llama-3.1-8b'],
49
+ default: 'llama-3.3-70b',
50
+ },
51
+ sambanova: {
52
+ baseUrl: 'https://api.sambanova.ai/v1',
53
+ models: ['Meta-Llama-3.3-70B-Instruct', 'DeepSeek-R1-Distill-Llama-70B'],
54
+ default: 'Meta-Llama-3.3-70B-Instruct',
55
+ },
22
56
  };
23
57
 
24
58
  function ask(question) {
@@ -60,10 +94,26 @@ async function setup() {
60
94
  }
61
95
  }
62
96
 
97
+ // Pick model
98
+ const providerInfo = PROVIDERS[provider];
99
+ const models = providerInfo.models;
100
+ console.log('');
101
+ console.log(' Models:');
102
+ models.forEach((m, i) => {
103
+ const tag = m === providerInfo.default ? ' (default)' : '';
104
+ console.log(` ${i + 1}. ${m}${tag}`);
105
+ });
106
+ console.log('');
107
+ const modelChoice = await ask(' Pick a model [1]: ');
108
+ const modelIdx = (parseInt(modelChoice) || 1) - 1;
109
+ const model = models[Math.min(modelIdx, models.length - 1)];
110
+
63
111
  mkdirSync(XELA_HOME, { recursive: true });
64
- const config = { provider, apiKey, model: '', baseUrl: '' };
112
+ const config = { provider, apiKey, model, baseUrl: '' };
65
113
  writeFileSync(XELA_CONFIG, JSON.stringify(config, null, 2));
66
114
  console.log('');
115
+ console.log(` Provider: ${provider}`);
116
+ console.log(` Model: ${model}`);
67
117
  console.log(` Saved to ${XELA_CONFIG}`);
68
118
  console.log(' You can edit it anytime: nano ~/.xela/config.json');
69
119
  console.log('');
@@ -85,17 +135,17 @@ if (!existsSync(XELA_CONFIG)) {
85
135
  }
86
136
 
87
137
  const provider = config.provider || 'openrouter';
88
- const providerInfo = PROVIDERS[provider] || PROVIDERS.openrouter;
138
+ const pInfo = PROVIDERS[provider] || PROVIDERS.openrouter;
89
139
 
90
140
  // Set env vars
91
141
  if (config.apiKey) {
92
142
  process.env.OPENAI_API_KEY = config.apiKey;
93
143
  }
94
144
  if (!process.env.OPENAI_BASE_URL) {
95
- process.env.OPENAI_BASE_URL = config.baseUrl || providerInfo.baseUrl;
145
+ process.env.OPENAI_BASE_URL = config.baseUrl || pInfo.baseUrl;
96
146
  }
97
147
  if (!process.env.OPENAI_MODEL) {
98
- process.env.OPENAI_MODEL = config.model || providerInfo.model || 'gpt-4o';
148
+ process.env.OPENAI_MODEL = config.model || pInfo.default || 'gpt-4o';
99
149
  }
100
150
 
101
151
  // Handle -m/--model flag
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xelauvas/xela-cli",
3
- "version": "0.1.2",
3
+ "version": "0.1.3",
4
4
  "description": "Xela — AI coding assistant powered by any model (OpenRouter, Groq, Ollama, DeepSeek, OpenAI)",
5
5
  "author": "xelauvas",
6
6
  "license": "MIT",