limbo-ai 1.11.0 → 1.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +24 -0
  2. package/cli.js +260 -20
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -25,6 +25,30 @@ This will:
25
25
 
26
26
  Limbo binds to `127.0.0.1:18789`.
27
27
 
28
+ ### Agent Installation
29
+
30
+ AI agents can install Limbo non-interactively using CLI flags:
31
+
32
+ ```bash
33
+ npx limbo-ai start --provider openrouter --api-key sk-or-v1-xxx --model auto
34
+ ```
35
+
36
+ **Required flags:**
37
+ | Flag | Description |
38
+ |------|-------------|
39
+ | `--provider` | `openai`, `anthropic`, or `openrouter` |
40
+ | `--api-key` | Your provider API key |
41
+
42
+ **Optional flags:**
43
+ | Flag | Default | Description |
44
+ |------|---------|-------------|
45
+ | `--model` | Provider default | Model name (e.g. `anthropic/claude-sonnet-4-6`) |
46
+ | `--language` | `en` | CLI language (`en` or `es`) |
47
+
48
+ Headless mode skips Telegram setup. To add Telegram later, run `npx limbo-ai start --reconfigure`.
49
+
50
+ > **Note:** Subscription-based auth (ChatGPT/Codex, Claude Code) requires interactive setup because it involves browser-based OAuth or token pasting. Use `npx limbo-ai start` without flags for subscription auth.
51
+
28
52
  ### Available commands
29
53
 
30
54
  ```sh
package/cli.js CHANGED
@@ -19,7 +19,74 @@ const ENV_FILE = path.join(LIMBO_DIR, '.env');
19
19
  const COMPOSE_FILE = path.join(LIMBO_DIR, 'docker-compose.yml');
20
20
  const GHCR_IMAGE = 'ghcr.io/tomasward1/limbo';
21
21
  const DEFAULT_TAG = require('./package.json').version;
22
- const PORT = 18789;
22
+ const DEFAULT_PORT = 18789;
23
+ const COEXIST_PORT = 18900;
24
+ let PORT = DEFAULT_PORT;
25
+
26
+ // ─── OpenClaw Detection ─────────────────────────────────────────────────────
27
+
28
+ function isPortInUse(port) {
29
+ try {
30
+ execSync(
31
+ `node -e "const s=require('net').connect(${port},'127.0.0.1');s.on('connect',()=>{s.destroy();process.exit(0)});s.on('error',()=>process.exit(1));setTimeout(()=>process.exit(1),1500);"`,
32
+ { stdio: 'pipe', timeout: 3000 }
33
+ );
34
+ return true;
35
+ } catch {
36
+ return false;
37
+ }
38
+ }
39
+
40
+ function detectExistingOpenClaw() {
41
+ if (!isPortInUse(DEFAULT_PORT)) return null;
42
+
43
+ let processInfo = 'unknown process';
44
+ try {
45
+ const lsof = execSync(`lsof -i :${DEFAULT_PORT} -t 2>/dev/null`, { encoding: 'utf8', stdio: 'pipe' }).trim();
46
+ if (lsof) {
47
+ const pid = lsof.split('\n')[0];
48
+ const cmdline = execSync(`ps -p ${pid} -o command= 2>/dev/null`, { encoding: 'utf8', stdio: 'pipe' }).trim();
49
+ if (cmdline.includes('openclaw')) processInfo = 'OpenClaw';
50
+ else if (cmdline.includes('docker')) processInfo = 'Docker container';
51
+ else processInfo = cmdline.slice(0, 60);
52
+ }
53
+ } catch { /* lsof not available or no match */ }
54
+
55
+ return { port: DEFAULT_PORT, processInfo };
56
+ }
57
+
58
+ function findExistingApiKeys() {
59
+ const searchPaths = [
60
+ '/opt/openclaw/.env',
61
+ '/opt/openclaw/secrets/llm_api_key',
62
+ path.join(os.homedir(), '.openclaw', '.env'),
63
+ ];
64
+
65
+ for (const envPath of searchPaths) {
66
+ try {
67
+ if (!fs.existsSync(envPath)) continue;
68
+
69
+ // If it's a secrets file (single value), read directly
70
+ if (envPath.endsWith('llm_api_key')) {
71
+ const key = fs.readFileSync(envPath, 'utf8').trim();
72
+ if (key) return { source: path.dirname(envPath), keys: { LLM_API_KEY: key } };
73
+ continue;
74
+ }
75
+
76
+ // Parse .env file
77
+ const content = fs.readFileSync(envPath, 'utf8');
78
+ const keys = {};
79
+ for (const line of content.split('\n')) {
80
+ const match = line.match(/^(LLM_API_KEY|ANTHROPIC_API_KEY|OPENAI_API_KEY|OPENROUTER_API_KEY|MODEL_PROVIDER|MODEL_NAME)=(.+)$/);
81
+ if (match) keys[match[1]] = match[2].replace(/^["']|["']$/g, '').trim();
82
+ }
83
+ const hasKey = keys.LLM_API_KEY || keys.ANTHROPIC_API_KEY || keys.OPENAI_API_KEY || keys.OPENROUTER_API_KEY;
84
+ if (hasKey) return { source: path.dirname(envPath), keys };
85
+ } catch { /* permission denied etc — skip */ }
86
+ }
87
+
88
+ return null;
89
+ }
23
90
 
24
91
  // OpenClaw compatibility snapshots from official docs:
25
92
  // - https://docs.openclaw.ai/providers/openai
@@ -50,6 +117,12 @@ const MODEL_CATALOG = {
50
117
  menuModels: ['claude-opus-4-6', 'claude-sonnet-4-6'],
51
118
  supportedModels: ['claude-opus-4-6', 'claude-sonnet-4-6', 'claude-opus-4-1', 'claude-sonnet-4'],
52
119
  },
120
+ 'openrouter:api-key': {
121
+ provider: 'openrouter',
122
+ defaultModel: 'auto',
123
+ menuModels: [],
124
+ supportedModels: [],
125
+ },
53
126
  };
54
127
 
55
128
  const ASCII_ART = String.raw`
@@ -61,7 +134,8 @@ const ASCII_ART = String.raw`
61
134
  `;
62
135
 
63
136
  // docker-compose.yml written to ~/.limbo on install
64
- const COMPOSE_CONTENT = `services:
137
+ function composeContent() {
138
+ return `services:
65
139
  limbo:
66
140
  image: ${GHCR_IMAGE}:${DEFAULT_TAG}
67
141
  restart: unless-stopped
@@ -92,6 +166,7 @@ const COMPOSE_CONTENT = `services:
92
166
  environment:
93
167
  OPENCLAW_CONFIG_PATH: /home/limbo/.openclaw/openclaw.json
94
168
  OPENCLAW_STATE_DIR: /home/limbo/.openclaw
169
+ LIMBO_PORT: "${PORT}"
95
170
  healthcheck:
96
171
  test:
97
172
  - CMD-SHELL
@@ -115,9 +190,11 @@ volumes:
115
190
  limbo-data:
116
191
  limbo-openclaw-state:
117
192
  `;
193
+ }
118
194
 
119
195
  // Hardened variant: adds Squid egress proxy sidecar with domain allowlist
120
- const COMPOSE_CONTENT_HARDENED = `services:
196
+ function composeContentHardened() {
197
+ return `services:
121
198
  limbo:
122
199
  image: ${GHCR_IMAGE}:${DEFAULT_TAG}
123
200
  restart: unless-stopped
@@ -148,6 +225,7 @@ const COMPOSE_CONTENT_HARDENED = `services:
148
225
  environment:
149
226
  OPENCLAW_CONFIG_PATH: /home/limbo/.openclaw/openclaw.json
150
227
  OPENCLAW_STATE_DIR: /home/limbo/.openclaw
228
+ LIMBO_PORT: "${PORT}"
151
229
  HTTP_PROXY: http://squid:3128
152
230
  HTTPS_PROXY: http://squid:3128
153
231
  NO_PROXY: "127.0.0.1,localhost"
@@ -202,6 +280,7 @@ volumes:
202
280
  limbo-data:
203
281
  limbo-openclaw-state:
204
282
  `;
283
+ }
205
284
 
206
285
  const TEXT = {
207
286
  en: {
@@ -211,6 +290,7 @@ const TEXT = {
211
290
  providerQuestion: 'AI Provider',
212
291
  providerOpenAI: 'Codex (OpenAI)',
213
292
  providerAnthropic: 'Claude (Anthropic)',
293
+ providerOpenRouter: 'OpenRouter (100+ models)',
214
294
  accessMethodQuestion: 'Access method',
215
295
  accessSubscriptionOpenAI: 'ChatGPT / Codex subscription',
216
296
  accessSubscriptionAnthropic: 'Claude Code subscription',
@@ -225,6 +305,11 @@ const TEXT = {
225
305
  requiredField: 'This field is required.',
226
306
  invalidOpenAIKey: 'OpenAI API keys usually start with "sk-".',
227
307
  invalidAnthropicKey: 'Anthropic API keys usually start with "sk-ant-".',
308
+ openRouterApiKeyPrompt: ' OpenRouter API key (sk-or-...): ',
309
+ openRouterKeyWarn: 'OpenRouter API keys usually start with "sk-or-". Proceeding anyway.',
310
+ openRouterKeyHint: 'Get your key at: https://openrouter.ai/keys',
311
+ openRouterModelPrompt: ' Model name (blank = auto-routing): ',
312
+ openRouterModelHint: 'Examples: anthropic/claude-sonnet-4-6, openai/gpt-4o, google/gemini-2.5-pro',
228
313
  telegramQuestion: 'Want to speak to Limbo through Telegram?',
229
314
  telegramBotFatherSteps: [
230
315
  'To create a Telegram bot:',
@@ -304,6 +389,13 @@ const TEXT = {
304
389
  helpReconfigure: 'Reconfigure auth and onboarding settings (use with start)',
305
390
  securityNotice: 'Security notice: Limbo runs AI agents inside a Docker container with access to your API keys and vault data. The container can make network requests to AI provider APIs. Do not store sensitive secrets (passwords, private keys) in your vault notes.',
306
391
  unknownCommand: (cmd) => `Unknown command: ${cmd}`,
392
+ headlessMissingApiKey: '--provider requires --api-key. Subscription auth needs interactive setup: npx limbo-ai start',
393
+ headlessInvalidProvider: 'Invalid --provider. Use: openai, anthropic, or openrouter',
394
+ headlessStarting: 'Headless mode: configuring...',
395
+ helpProvider: 'Set provider for headless install (openai, anthropic, openrouter)',
396
+ helpApiKey: 'API key for headless install',
397
+ helpModel: 'Model name (optional, uses provider default)',
398
+ helpLanguage: 'Language: en, es (default: en)',
307
399
  },
308
400
  es: {
309
401
  languageName: 'Espanol',
@@ -312,6 +404,7 @@ const TEXT = {
312
404
  providerQuestion: 'AI Provider',
313
405
  providerOpenAI: 'Codex (OpenAI)',
314
406
  providerAnthropic: 'Claude (Anthropic)',
407
+ providerOpenRouter: 'OpenRouter (100+ modelos)',
315
408
  accessMethodQuestion: 'Metodo de acceso',
316
409
  accessSubscriptionOpenAI: 'Suscripcion ChatGPT / Codex',
317
410
  accessSubscriptionAnthropic: 'Suscripcion Claude Code',
@@ -326,6 +419,11 @@ const TEXT = {
326
419
  requiredField: 'Este campo es obligatorio.',
327
420
  invalidOpenAIKey: 'Las API keys de OpenAI normalmente empiezan con "sk-".',
328
421
  invalidAnthropicKey: 'Las API keys de Anthropic normalmente empiezan con "sk-ant-".',
422
+ openRouterApiKeyPrompt: ' OpenRouter API key (sk-or-...): ',
423
+ openRouterKeyWarn: 'Las API keys de OpenRouter normalmente empiezan con "sk-or-". Continuando igual.',
424
+ openRouterKeyHint: 'Consegui tu key en: https://openrouter.ai/keys',
425
+ openRouterModelPrompt: ' Nombre del modelo (vacio = auto-routing): ',
426
+ openRouterModelHint: 'Ejemplos: anthropic/claude-sonnet-4-6, openai/gpt-4o, google/gemini-2.5-pro',
329
427
  telegramQuestion: 'Quieres hablar con Limbo por Telegram?',
330
428
  telegramBotFatherSteps: [
331
429
  'Para crear un bot de Telegram:',
@@ -405,6 +503,13 @@ const TEXT = {
405
503
  helpReconfigure: 'Reconfigura auth y onboarding (usar con start)',
406
504
  securityNotice: 'Aviso de seguridad: Limbo corre agentes de IA dentro de un container Docker con acceso a tus API keys y datos del vault. El container puede hacer requests a las APIs de los proveedores de IA. No guardes secretos sensibles (passwords, claves privadas) en las notas del vault.',
407
505
  unknownCommand: (cmd) => `Comando desconocido: ${cmd}`,
506
+ headlessMissingApiKey: '--provider requiere --api-key. La autenticacion por suscripcion necesita setup interactivo: npx limbo-ai start',
507
+ headlessInvalidProvider: '--provider invalido. Usa: openai, anthropic, o openrouter',
508
+ headlessStarting: 'Modo headless: configurando...',
509
+ helpProvider: 'Setea el provider para instalacion headless (openai, anthropic, openrouter)',
510
+ helpApiKey: 'API key para instalacion headless',
511
+ helpModel: 'Nombre del modelo (opcional, usa el default del provider)',
512
+ helpLanguage: 'Idioma: en, es (default: en)',
408
513
  },
409
514
  };
410
515
 
@@ -433,6 +538,11 @@ function t(lang, key, ...args) {
433
538
  return typeof value === 'function' ? value(...args) : value;
434
539
  }
435
540
 
541
+ function parseFlag(flag) {
542
+ const idx = process.argv.indexOf(flag);
543
+ return idx !== -1 && idx + 1 < process.argv.length ? process.argv[idx + 1] : undefined;
544
+ }
545
+
436
546
  function sleep(ms) {
437
547
  Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
438
548
  }
@@ -564,6 +674,7 @@ function normalizeConfig(cfg, existingEnv = {}) {
564
674
  AUTH_MODE: cfg.authMode || existingEnv.AUTH_MODE || 'api-key',
565
675
  MODEL_PROVIDER: cfg.provider || existingEnv.MODEL_PROVIDER || 'anthropic',
566
676
  MODEL_NAME: cfg.modelName || existingEnv.MODEL_NAME || 'claude-opus-4-6',
677
+ LIMBO_PORT: String(PORT),
567
678
  OPENAI_API_KEY: cfg.provider === 'openai' && cfg.apiKey ? cfg.apiKey : (cfg.keepExisting ? existingEnv.OPENAI_API_KEY || '' : ''),
568
679
  ANTHROPIC_API_KEY: cfg.provider === 'anthropic' && cfg.apiKey ? cfg.apiKey : (cfg.keepExisting ? existingEnv.ANTHROPIC_API_KEY || '' : ''),
569
680
  LLM_API_KEY: cfg.apiKey || (cfg.keepExisting ? existingEnv.LLM_API_KEY || '' : ''),
@@ -590,7 +701,7 @@ function writeSecrets(cfg, existingEnv = {}) {
590
701
  }
591
702
 
592
703
  const SECRET_KEYS = new Set([
593
- 'LLM_API_KEY', 'OPENAI_API_KEY', 'ANTHROPIC_API_KEY',
704
+ 'LLM_API_KEY', 'OPENAI_API_KEY', 'ANTHROPIC_API_KEY', 'OPENROUTER_API_KEY',
594
705
  'TELEGRAM_BOT_TOKEN', 'OPENCLAW_GATEWAY_TOKEN',
595
706
  ]);
596
707
 
@@ -615,12 +726,29 @@ function waitForHealthy(lang, maxAttempts = 12) {
615
726
  return false;
616
727
  }
617
728
 
729
+ function deriveProviderFamily(provider) {
730
+ if (!provider) return 'anthropic';
731
+ if (provider.startsWith('openai')) return 'openai';
732
+ if (provider === 'openrouter') return 'openrouter';
733
+ return 'anthropic';
734
+ }
735
+
618
736
  function getModelCatalog(providerFamily, authMode) {
619
737
  return MODEL_CATALOG[`${providerFamily}:${authMode}`];
620
738
  }
621
739
 
622
740
  async function chooseModel(lang, providerFamily, authMode) {
623
741
  const catalog = getModelCatalog(providerFamily, authMode);
742
+
743
+ if (!catalog.menuModels.length) {
744
+ console.log(` ${c.dim}${t(lang, 'openRouterModelHint')}${c.reset}`);
745
+ const modelName = await promptValidated(
746
+ t(lang, 'openRouterModelPrompt'),
747
+ (value) => ({ ok: true, value: value || catalog.defaultModel }),
748
+ );
749
+ return modelName;
750
+ }
751
+
624
752
  const options = catalog.menuModels.map((model) => ({ label: model, value: model }));
625
753
  options.push({ label: t(lang, 'customModel'), value: '__custom__' });
626
754
 
@@ -646,6 +774,32 @@ async function chooseModel(lang, providerFamily, authMode) {
646
774
  async function collectConfig(existingEnv = {}) {
647
775
  console.log(`${c.cyan}${ASCII_ART}${c.reset}`);
648
776
 
777
+ // Check for existing API keys from another OpenClaw installation
778
+ const existingKeys = findExistingApiKeys();
779
+ if (existingKeys && !existingEnv.LLM_API_KEY && !existingEnv.ANTHROPIC_API_KEY && !existingEnv.OPENAI_API_KEY) {
780
+ const keyValue = existingKeys.keys.LLM_API_KEY || existingKeys.keys.ANTHROPIC_API_KEY || existingKeys.keys.OPENAI_API_KEY || existingKeys.keys.OPENROUTER_API_KEY || '';
781
+ const maskedKey = keyValue ? keyValue.slice(0, 10) + '...' : 'found';
782
+
783
+ console.log(`
784
+ ${c.cyan}Found existing API keys${c.reset} from ${existingKeys.source}
785
+ ${c.dim}Key: ${maskedKey}${c.reset}
786
+ `);
787
+
788
+ const { select } = await getClack();
789
+ const reuseChoice = await select({
790
+ message: 'Reuse existing API configuration?',
791
+ options: [
792
+ { value: 'yes', label: 'Yes, use existing keys' },
793
+ { value: 'no', label: 'No, configure new keys' },
794
+ ],
795
+ });
796
+ await maybeHandleClackCancel(reuseChoice);
797
+
798
+ if (reuseChoice === 'yes') {
799
+ Object.assign(existingEnv, existingKeys.keys);
800
+ }
801
+ }
802
+
649
803
  const language = (await selectMenu(t('en', 'chooseLanguage'), [
650
804
  { label: TEXT.en.languageName, value: 'en' },
651
805
  { label: TEXT.es.languageName, value: 'es' },
@@ -654,24 +808,38 @@ async function collectConfig(existingEnv = {}) {
654
808
  const providerFamily = (await selectMenu(t(language, 'providerQuestion'), [
655
809
  { label: t(language, 'providerOpenAI'), value: 'openai' },
656
810
  { label: t(language, 'providerAnthropic'), value: 'anthropic' },
811
+ { label: t(language, 'providerOpenRouter'), value: 'openrouter' },
657
812
  ], language)).value;
658
813
 
659
- const accessMethod = (await selectMenu(t(language, 'accessMethodQuestion'), [
660
- {
661
- label: providerFamily === 'openai'
662
- ? t(language, 'accessSubscriptionOpenAI')
663
- : t(language, 'accessSubscriptionAnthropic'),
664
- value: 'subscription',
665
- },
666
- { label: t(language, 'accessApiKey'), value: 'api-key' },
667
- ], language)).value;
814
+ let accessMethod;
815
+ if (providerFamily === 'openrouter') {
816
+ accessMethod = 'api-key';
817
+ } else {
818
+ accessMethod = (await selectMenu(t(language, 'accessMethodQuestion'), [
819
+ {
820
+ label: providerFamily === 'openai'
821
+ ? t(language, 'accessSubscriptionOpenAI')
822
+ : t(language, 'accessSubscriptionAnthropic'),
823
+ value: 'subscription',
824
+ },
825
+ { label: t(language, 'accessApiKey'), value: 'api-key' },
826
+ ], language)).value;
827
+ }
668
828
 
669
829
  const modelName = await chooseModel(language, providerFamily, accessMethod);
670
830
  const provider = getModelCatalog(providerFamily, accessMethod).provider;
671
831
  let apiKey = '';
672
832
 
673
833
  if (accessMethod === 'api-key') {
674
- if (providerFamily === 'openai') {
834
+ const reusedKey = existingEnv.LLM_API_KEY
835
+ || (providerFamily === 'openai' && existingEnv.OPENAI_API_KEY)
836
+ || (providerFamily === 'anthropic' && existingEnv.ANTHROPIC_API_KEY)
837
+ || (providerFamily === 'openrouter' && existingEnv.OPENROUTER_API_KEY)
838
+ || '';
839
+
840
+ if (reusedKey) {
841
+ apiKey = reusedKey;
842
+ } else if (providerFamily === 'openai') {
675
843
  apiKey = await promptValidated(
676
844
  t(language, 'openAiApiKeyPrompt'),
677
845
  (value) => {
@@ -680,6 +848,16 @@ async function collectConfig(existingEnv = {}) {
680
848
  return { ok: true, value };
681
849
  },
682
850
  );
851
+ } else if (providerFamily === 'openrouter') {
852
+ console.log(` ${c.dim}${t(language, 'openRouterKeyHint')}${c.reset}`);
853
+ apiKey = await promptValidated(
854
+ t(language, 'openRouterApiKeyPrompt'),
855
+ (value) => {
856
+ if (!value) return { ok: false, message: t(language, 'requiredField') };
857
+ if (!value.startsWith('sk-or-')) warn(t(language, 'openRouterKeyWarn'));
858
+ return { ok: true, value };
859
+ },
860
+ );
683
861
  } else {
684
862
  apiKey = await promptValidated(
685
863
  t(language, 'anthropicApiKeyPrompt'),
@@ -750,7 +928,7 @@ function ensureComposeFile(hardened = false) {
750
928
  if (fs.existsSync(src)) fs.copyFileSync(src, dest);
751
929
  }
752
930
  }
753
- fs.writeFileSync(COMPOSE_FILE, hardened ? COMPOSE_CONTENT_HARDENED : COMPOSE_CONTENT);
931
+ fs.writeFileSync(COMPOSE_FILE, hardened ? composeContentHardened() : composeContent());
754
932
  }
755
933
 
756
934
  function readSecretFile(name) {
@@ -1121,14 +1299,72 @@ async function cmdStart() {
1121
1299
  if (!hasDocker()) die(t('en', 'dockerMissing'));
1122
1300
 
1123
1301
  const hardened = process.argv.includes('--hardened');
1124
- ensureComposeFile(hardened);
1125
1302
 
1303
+ // ── Detect existing OpenClaw ──────────────────────────────────────────────
1126
1304
  const existingEnv = parseEnvFile();
1127
1305
  const alreadyHasEnv = fs.existsSync(ENV_FILE);
1306
+
1307
+ if (existingEnv.LIMBO_PORT) {
1308
+ const parsed = parseInt(existingEnv.LIMBO_PORT, 10);
1309
+ if (!Number.isFinite(parsed) || parsed < 1 || parsed > 65535) {
1310
+ warn(`Invalid LIMBO_PORT="${existingEnv.LIMBO_PORT}" in .env, using default ${DEFAULT_PORT}`);
1311
+ } else {
1312
+ PORT = parsed;
1313
+ }
1314
+ } else {
1315
+ const existing = detectExistingOpenClaw();
1316
+ if (existing) {
1317
+ console.log(`
1318
+ ${c.yellow}${c.bold}Existing OpenClaw detected${c.reset}
1319
+ ${c.dim}Port ${existing.port} is in use (${existing.processInfo})${c.reset}
1320
+
1321
+ Limbo will run its own OpenClaw instance on port ${c.bold}${COEXIST_PORT}${c.reset}.
1322
+ Both can coexist safely — separate containers, separate data.
1323
+ `);
1324
+ PORT = COEXIST_PORT;
1325
+ }
1326
+ }
1327
+
1328
+ ensureComposeFile(hardened);
1128
1329
  let cfg;
1129
1330
  let lang = existingEnv.CLI_LANGUAGE || 'en';
1130
1331
 
1131
- if (alreadyHasEnv) {
1332
+ // ── Headless mode ──────────────────────────────────────────────────────────
1333
+ const flagProvider = parseFlag('--provider');
1334
+ const flagApiKey = parseFlag('--api-key');
1335
+ const flagModel = parseFlag('--model');
1336
+ const flagLang = parseFlag('--language') || 'en';
1337
+
1338
+ if (flagProvider) {
1339
+ const validProviders = ['openai', 'anthropic', 'openrouter'];
1340
+ if (!validProviders.includes(flagProvider)) {
1341
+ die(t(flagLang, 'headlessInvalidProvider'));
1342
+ }
1343
+ if (!flagApiKey) {
1344
+ die(t(flagLang, 'headlessMissingApiKey'));
1345
+ }
1346
+
1347
+ lang = flagLang;
1348
+ const providerFamily = deriveProviderFamily(flagProvider);
1349
+ const catalog = getModelCatalog(providerFamily, 'api-key');
1350
+ const modelName = flagModel || catalog.defaultModel;
1351
+
1352
+ log(t(lang, 'headlessStarting'));
1353
+ cfg = {
1354
+ language: lang,
1355
+ authMode: 'api-key',
1356
+ provider: catalog.provider,
1357
+ providerFamily,
1358
+ modelName,
1359
+ apiKey: flagApiKey,
1360
+ telegramEnabled: 'false',
1361
+ telegramToken: '',
1362
+ telegramAutoPair: 'false',
1363
+ gatewayToken: ensureGatewayToken(existingEnv),
1364
+ };
1365
+ writeEnv({ ...cfg, CLI_LANGUAGE: cfg.language }, existingEnv);
1366
+ ok(t(cfg.language, 'envWritten'));
1367
+ } else if (alreadyHasEnv) {
1132
1368
  log(existingEnv.MODEL_PROVIDER ? t(lang, 'foundExistingConfig') : `Found existing config at ${ENV_FILE}`);
1133
1369
  const reconfig = process.argv.includes('--reconfigure');
1134
1370
  if (!reconfig) {
@@ -1138,7 +1374,7 @@ async function cmdStart() {
1138
1374
  cfg = {
1139
1375
  language: lang,
1140
1376
  provider: existingEnv.MODEL_PROVIDER || 'anthropic',
1141
- providerFamily: (existingEnv.MODEL_PROVIDER || 'anthropic').startsWith('openai') ? 'openai' : 'anthropic',
1377
+ providerFamily: deriveProviderFamily(existingEnv.MODEL_PROVIDER),
1142
1378
  authMode: existingEnv.AUTH_MODE || 'api-key',
1143
1379
  modelName: existingEnv.MODEL_NAME || 'claude-opus-4-6',
1144
1380
  telegramEnabled: existingEnv.TELEGRAM_ENABLED || 'false',
@@ -1245,8 +1481,12 @@ ${c.bold}Commands:${c.reset}
1245
1481
  help Show this help
1246
1482
 
1247
1483
  ${c.bold}Flags:${c.reset}
1248
- --reconfigure Reconfigure auth and onboarding settings (use with start)
1249
- --hardened Enable egress proxy (restricts outbound to AI provider APIs only)
1484
+ --reconfigure Reconfigure auth and onboarding settings (use with start)
1485
+ --hardened Enable egress proxy (restricts outbound to AI provider APIs only)
1486
+ --provider <name> Set provider for headless install (openai, anthropic, openrouter)
1487
+ --api-key <key> API key for headless install
1488
+ --model <name> Model name (optional, uses provider default)
1489
+ --language <code> Language: en, es (default: en)
1250
1490
 
1251
1491
  ${c.bold}Data directory:${c.reset} ${LIMBO_DIR}
1252
1492
  `);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "limbo-ai",
3
- "version": "1.11.0",
3
+ "version": "1.13.0",
4
4
  "description": "Your personal AI memory agent — install and manage Limbo via npx",
5
5
  "type": "commonjs",
6
6
  "bin": {