@xelauvas/xela-cli 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bin/xela.js +132 -43
  2. package/package.json +1 -1
package/bin/xela.js CHANGED
@@ -4,67 +4,148 @@ import { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';
4
4
  import { join } from 'path';
5
5
  import { fileURLToPath } from 'url';
6
6
  import { homedir } from 'os';
7
+ import { createInterface } from 'readline';
7
8
 
8
9
  const __dirname = fileURLToPath(new URL('.', import.meta.url));
9
10
  const INSTALL_DIR = join(__dirname, '..');
10
11
  const XELA_HOME = join(homedir(), '.xela');
11
12
  const XELA_CONFIG = join(XELA_HOME, 'config.json');
12
13
 
13
- // Create config on first run
14
- if (!existsSync(XELA_CONFIG)) {
14
+ const PROVIDERS = {
15
+ openrouter: {
16
+ baseUrl: 'https://openrouter.ai/api/v1',
17
+ models: [
18
+ 'qwen/qwen3.6-plus-preview:free',
19
+ 'deepseek/deepseek-chat-v3-0324:free',
20
+ 'google/gemini-2.5-pro-exp-03-25:free',
21
+ 'meta-llama/llama-4-maverick:free',
22
+ 'nvidia/llama-3.1-nemotron-ultra-253b:free',
23
+ ],
24
+ default: 'qwen/qwen3.6-plus-preview:free',
25
+ },
26
+ groq: {
27
+ baseUrl: 'https://api.groq.com/openai/v1',
28
+ models: ['qwen-qwq-32b', 'llama-3.3-70b-versatile', 'gemma2-9b-it', 'mixtral-8x7b-32768'],
29
+ default: 'qwen-qwq-32b',
30
+ },
31
+ ollama: {
32
+ baseUrl: 'http://localhost:11434/v1',
33
+ models: ['qwen2.5-coder:7b', 'llama3.2:latest', 'codellama:latest', 'deepseek-coder-v2:latest'],
34
+ default: 'qwen2.5-coder:7b',
35
+ },
36
+ deepseek: {
37
+ baseUrl: 'https://api.deepseek.com',
38
+ models: ['deepseek-chat', 'deepseek-reasoner'],
39
+ default: 'deepseek-chat',
40
+ },
41
+ openai: {
42
+ baseUrl: undefined,
43
+ models: ['gpt-4o', 'gpt-4o-mini', 'gpt-4.1', 'o4-mini'],
44
+ default: 'gpt-4o',
45
+ },
46
+ cerebras: {
47
+ baseUrl: 'https://api.cerebras.ai/v1',
48
+ models: ['llama-3.3-70b', 'llama-3.1-8b'],
49
+ default: 'llama-3.3-70b',
50
+ },
51
+ sambanova: {
52
+ baseUrl: 'https://api.sambanova.ai/v1',
53
+ models: ['Meta-Llama-3.3-70B-Instruct', 'DeepSeek-R1-Distill-Llama-70B'],
54
+ default: 'Meta-Llama-3.3-70B-Instruct',
55
+ },
56
+ };
57
+
58
+ function ask(question) {
59
+ const rl = createInterface({ input: process.stdin, output: process.stdout });
60
+ return new Promise(resolve => rl.question(question, answer => { rl.close(); resolve(answer.trim()); }));
61
+ }
62
+
63
+ async function setup() {
64
+ console.log('');
65
+ console.log(' Welcome to Xela! Let\'s get you set up.');
66
+ console.log('');
67
+
68
+ // Pick provider
69
+ const providerNames = Object.keys(PROVIDERS);
70
+ console.log(' Providers:');
71
+ providerNames.forEach((p, i) => console.log(` ${i + 1}. ${p}`));
72
+ console.log('');
73
+ const choice = await ask(' Pick a provider [1]: ');
74
+ const idx = (parseInt(choice) || 1) - 1;
75
+ const provider = providerNames[Math.min(idx, providerNames.length - 1)];
76
+
77
+ // Get API key (skip for ollama)
78
+ let apiKey = '';
79
+ if (provider === 'ollama') {
80
+ apiKey = 'ollama';
81
+ } else {
82
+ console.log('');
83
+ if (provider === 'openrouter') {
84
+ console.log(' Get a free key at: https://openrouter.ai/keys');
85
+ } else if (provider === 'groq') {
86
+ console.log(' Get a free key at: https://console.groq.com/keys');
87
+ } else if (provider === 'deepseek') {
88
+ console.log(' Get a key at: https://platform.deepseek.com/api_keys');
89
+ }
90
+ apiKey = await ask(' API key: ');
91
+ if (!apiKey) {
92
+ console.log(' No key provided. You can add it later in ~/.xela/config.json');
93
+ apiKey = '';
94
+ }
95
+ }
96
+
97
+ // Pick model
98
+ const providerInfo = PROVIDERS[provider];
99
+ const models = providerInfo.models;
100
+ console.log('');
101
+ console.log(' Models:');
102
+ models.forEach((m, i) => {
103
+ const tag = m === providerInfo.default ? ' (default)' : '';
104
+ console.log(` ${i + 1}. ${m}${tag}`);
105
+ });
106
+ console.log('');
107
+ const modelChoice = await ask(' Pick a model [1]: ');
108
+ const modelIdx = (parseInt(modelChoice) || 1) - 1;
109
+ const model = models[Math.min(modelIdx, models.length - 1)];
110
+
15
111
  mkdirSync(XELA_HOME, { recursive: true });
16
- writeFileSync(XELA_CONFIG, JSON.stringify({
17
- provider: 'openrouter',
18
- apiKey: 'sk-or-your-key-here',
19
- model: '',
20
- baseUrl: '',
21
- }, null, 2));
112
+ const config = { provider, apiKey, model, baseUrl: '' };
113
+ writeFileSync(XELA_CONFIG, JSON.stringify(config, null, 2));
22
114
  console.log('');
23
- console.log(' Welcome to Xela!');
24
- console.log(` Config created at ${XELA_CONFIG}`);
25
- console.log(` Edit it to add your API key`);
115
+ console.log(` Provider: ${provider}`);
116
+ console.log(` Model: ${model}`);
117
+ console.log(` Saved to ${XELA_CONFIG}`);
118
+ console.log(' You can edit it anytime: nano ~/.xela/config.json');
26
119
  console.log('');
120
+ return config;
27
121
  }
28
122
 
29
- // Load config
123
+ // Load or create config
30
124
  let config = {};
31
- try {
32
- config = JSON.parse(readFileSync(XELA_CONFIG, 'utf-8'));
33
- } catch {}
125
+ if (!existsSync(XELA_CONFIG)) {
126
+ config = await setup();
127
+ } else {
128
+ try {
129
+ config = JSON.parse(readFileSync(XELA_CONFIG, 'utf-8'));
130
+ } catch {}
131
+ // If key is still placeholder, run setup again
132
+ if (!config.apiKey || config.apiKey === 'sk-or-your-key-here') {
133
+ config = await setup();
134
+ }
135
+ }
34
136
 
35
137
  const provider = config.provider || 'openrouter';
138
+ const pInfo = PROVIDERS[provider] || PROVIDERS.openrouter;
36
139
 
37
- // Set env vars from config
38
- if (config.apiKey && config.apiKey !== 'sk-or-your-key-here') {
140
+ // Set env vars
141
+ if (config.apiKey) {
39
142
  process.env.OPENAI_API_KEY = config.apiKey;
40
143
  }
41
-
42
- // Auto-detect base URL
43
144
  if (!process.env.OPENAI_BASE_URL) {
44
- const baseUrls = {
45
- openrouter: 'https://openrouter.ai/api/v1',
46
- groq: 'https://api.groq.com/openai/v1',
47
- ollama: 'http://localhost:11434/v1',
48
- deepseek: 'https://api.deepseek.com',
49
- openai: undefined,
50
- cerebras: 'https://api.cerebras.ai/v1',
51
- sambanova: 'https://api.sambanova.ai/v1',
52
- };
53
- if (baseUrls[provider]) process.env.OPENAI_BASE_URL = config.baseUrl || baseUrls[provider];
145
+ process.env.OPENAI_BASE_URL = config.baseUrl || pInfo.baseUrl;
54
146
  }
55
-
56
- // Auto-detect model
57
147
  if (!process.env.OPENAI_MODEL) {
58
- const defaultModels = {
59
- openrouter: 'qwen/qwen3.6-plus-preview:free',
60
- groq: 'qwen-qwq-32b',
61
- ollama: 'qwen2.5-coder:7b',
62
- deepseek: 'deepseek-chat',
63
- openai: 'gpt-4o',
64
- cerebras: 'llama-3.3-70b',
65
- sambanova: 'Meta-Llama-3.3-70B-Instruct',
66
- };
67
- process.env.OPENAI_MODEL = config.model || defaultModels[provider] || 'gpt-4o';
148
+ process.env.OPENAI_MODEL = config.model || pInfo.default || 'gpt-4o';
68
149
  }
69
150
 
70
151
  // Handle -m/--model flag
@@ -77,5 +158,13 @@ for (let i = 0; i < args.length; i++) {
77
158
  }
78
159
  }
79
160
 
80
- // Import and run the CLI
81
- await import(join(INSTALL_DIR, 'src', 'entrypoints', 'cli.tsx'));
161
+ // Launch node with the TSX loader shim
162
+ try {
163
+ execFileSync(process.execPath, [
164
+ '--import', join(INSTALL_DIR, 'src', '_shims', 'register.js'),
165
+ join(INSTALL_DIR, 'start.js'),
166
+ ...args,
167
+ ], { stdio: 'inherit', env: process.env });
168
+ } catch (e) {
169
+ process.exit(e.status || 1);
170
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xelauvas/xela-cli",
3
- "version": "0.1.1",
3
+ "version": "0.1.3",
4
4
  "description": "Xela — AI coding assistant powered by any model (OpenRouter, Groq, Ollama, DeepSeek, OpenAI)",
5
5
  "author": "xelauvas",
6
6
  "license": "MIT",