limbo-ai 1.12.0 → 1.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +24 -0
  2. package/cli.js +198 -9
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -25,6 +25,30 @@ This will:
25
25
 
26
26
  Limbo binds to `127.0.0.1:18789`.
27
27
 
28
+ ### Agent Installation
29
+
30
+ AI agents can install Limbo non-interactively using CLI flags:
31
+
32
+ ```bash
33
+ npx limbo-ai start --provider openrouter --api-key sk-or-v1-xxx --model auto
34
+ ```
35
+
36
+ **Required flags:**
37
+ | Flag | Description |
38
+ |------|-------------|
39
+ | `--provider` | `openai`, `anthropic`, or `openrouter` |
40
+ | `--api-key` | Your provider API key |
41
+
42
+ **Optional flags:**
43
+ | Flag | Default | Description |
44
+ |------|---------|-------------|
45
+ | `--model` | Provider default | Model name (e.g. `anthropic/claude-sonnet-4-6`) |
46
+ | `--language` | `en` | CLI language (`en` or `es`) |
47
+
48
+ Headless mode skips Telegram setup. To add Telegram later, run `npx limbo-ai start --reconfigure`.
49
+
50
+ > **Note:** Subscription-based auth (ChatGPT/Codex, Claude Code) requires interactive setup because it involves browser-based OAuth or token pasting. Use `npx limbo-ai start` without flags for subscription auth.
51
+
28
52
  ### Available commands
29
53
 
30
54
  ```sh
package/cli.js CHANGED
@@ -19,7 +19,74 @@ const ENV_FILE = path.join(LIMBO_DIR, '.env');
19
19
  const COMPOSE_FILE = path.join(LIMBO_DIR, 'docker-compose.yml');
20
20
  const GHCR_IMAGE = 'ghcr.io/tomasward1/limbo';
21
21
  const DEFAULT_TAG = require('./package.json').version;
22
- const PORT = 18789;
22
+ const DEFAULT_PORT = 18789;
23
+ const COEXIST_PORT = 18900;
24
+ let PORT = DEFAULT_PORT;
25
+
26
+ // ─── OpenClaw Detection ─────────────────────────────────────────────────────
27
+
28
+ function isPortInUse(port) {
29
+ try {
30
+ execSync(
31
+ `node -e "const s=require('net').connect(${port},'127.0.0.1');s.on('connect',()=>{s.destroy();process.exit(0)});s.on('error',()=>process.exit(1));setTimeout(()=>process.exit(1),1500);"`,
32
+ { stdio: 'pipe', timeout: 3000 }
33
+ );
34
+ return true;
35
+ } catch {
36
+ return false;
37
+ }
38
+ }
39
+
40
+ function detectExistingOpenClaw() {
41
+ if (!isPortInUse(DEFAULT_PORT)) return null;
42
+
43
+ let processInfo = 'unknown process';
44
+ try {
45
+ const lsof = execSync(`lsof -i :${DEFAULT_PORT} -t 2>/dev/null`, { encoding: 'utf8', stdio: 'pipe' }).trim();
46
+ if (lsof) {
47
+ const pid = lsof.split('\n')[0];
48
+ const cmdline = execSync(`ps -p ${pid} -o command= 2>/dev/null`, { encoding: 'utf8', stdio: 'pipe' }).trim();
49
+ if (cmdline.includes('openclaw')) processInfo = 'OpenClaw';
50
+ else if (cmdline.includes('docker')) processInfo = 'Docker container';
51
+ else processInfo = cmdline.slice(0, 60);
52
+ }
53
+ } catch { /* lsof not available or no match */ }
54
+
55
+ return { port: DEFAULT_PORT, processInfo };
56
+ }
57
+
58
+ function findExistingApiKeys() {
59
+ const searchPaths = [
60
+ '/opt/openclaw/.env',
61
+ '/opt/openclaw/secrets/llm_api_key',
62
+ path.join(os.homedir(), '.openclaw', '.env'),
63
+ ];
64
+
65
+ for (const envPath of searchPaths) {
66
+ try {
67
+ if (!fs.existsSync(envPath)) continue;
68
+
69
+ // If it's a secrets file (single value), read directly
70
+ if (envPath.endsWith('llm_api_key')) {
71
+ const key = fs.readFileSync(envPath, 'utf8').trim();
72
+ if (key) return { source: path.dirname(envPath), keys: { LLM_API_KEY: key } };
73
+ continue;
74
+ }
75
+
76
+ // Parse .env file
77
+ const content = fs.readFileSync(envPath, 'utf8');
78
+ const keys = {};
79
+ for (const line of content.split('\n')) {
80
+ const match = line.match(/^(LLM_API_KEY|ANTHROPIC_API_KEY|OPENAI_API_KEY|OPENROUTER_API_KEY|MODEL_PROVIDER|MODEL_NAME)=(.+)$/);
81
+ if (match) keys[match[1]] = match[2].replace(/^["']|["']$/g, '').trim();
82
+ }
83
+ const hasKey = keys.LLM_API_KEY || keys.ANTHROPIC_API_KEY || keys.OPENAI_API_KEY || keys.OPENROUTER_API_KEY;
84
+ if (hasKey) return { source: path.dirname(envPath), keys };
85
+ } catch { /* permission denied etc — skip */ }
86
+ }
87
+
88
+ return null;
89
+ }
23
90
 
24
91
  // OpenClaw compatibility snapshots from official docs:
25
92
  // - https://docs.openclaw.ai/providers/openai
@@ -67,7 +134,8 @@ const ASCII_ART = String.raw`
67
134
  `;
68
135
 
69
136
  // docker-compose.yml written to ~/.limbo on install
70
- const COMPOSE_CONTENT = `services:
137
+ function composeContent() {
138
+ return `services:
71
139
  limbo:
72
140
  image: ${GHCR_IMAGE}:${DEFAULT_TAG}
73
141
  restart: unless-stopped
@@ -98,6 +166,7 @@ const COMPOSE_CONTENT = `services:
98
166
  environment:
99
167
  OPENCLAW_CONFIG_PATH: /home/limbo/.openclaw/openclaw.json
100
168
  OPENCLAW_STATE_DIR: /home/limbo/.openclaw
169
+ LIMBO_PORT: "${PORT}"
101
170
  healthcheck:
102
171
  test:
103
172
  - CMD-SHELL
@@ -121,9 +190,11 @@ volumes:
121
190
  limbo-data:
122
191
  limbo-openclaw-state:
123
192
  `;
193
+ }
124
194
 
125
195
  // Hardened variant: adds Squid egress proxy sidecar with domain allowlist
126
- const COMPOSE_CONTENT_HARDENED = `services:
196
+ function composeContentHardened() {
197
+ return `services:
127
198
  limbo:
128
199
  image: ${GHCR_IMAGE}:${DEFAULT_TAG}
129
200
  restart: unless-stopped
@@ -154,6 +225,7 @@ const COMPOSE_CONTENT_HARDENED = `services:
154
225
  environment:
155
226
  OPENCLAW_CONFIG_PATH: /home/limbo/.openclaw/openclaw.json
156
227
  OPENCLAW_STATE_DIR: /home/limbo/.openclaw
228
+ LIMBO_PORT: "${PORT}"
157
229
  HTTP_PROXY: http://squid:3128
158
230
  HTTPS_PROXY: http://squid:3128
159
231
  NO_PROXY: "127.0.0.1,localhost"
@@ -208,6 +280,7 @@ volumes:
208
280
  limbo-data:
209
281
  limbo-openclaw-state:
210
282
  `;
283
+ }
211
284
 
212
285
  const TEXT = {
213
286
  en: {
@@ -316,6 +389,13 @@ const TEXT = {
316
389
  helpReconfigure: 'Reconfigure auth and onboarding settings (use with start)',
317
390
  securityNotice: 'Security notice: Limbo runs AI agents inside a Docker container with access to your API keys and vault data. The container can make network requests to AI provider APIs. Do not store sensitive secrets (passwords, private keys) in your vault notes.',
318
391
  unknownCommand: (cmd) => `Unknown command: ${cmd}`,
392
+ headlessMissingApiKey: '--provider requires --api-key. Subscription auth needs interactive setup: npx limbo-ai start',
393
+ headlessInvalidProvider: 'Invalid --provider. Use: openai, anthropic, or openrouter',
394
+ headlessStarting: 'Headless mode: configuring...',
395
+ helpProvider: 'Set provider for headless install (openai, anthropic, openrouter)',
396
+ helpApiKey: 'API key for headless install',
397
+ helpModel: 'Model name (optional, uses provider default)',
398
+ helpLanguage: 'Language: en, es (default: en)',
319
399
  },
320
400
  es: {
321
401
  languageName: 'Espanol',
@@ -423,6 +503,13 @@ const TEXT = {
423
503
  helpReconfigure: 'Reconfigura auth y onboarding (usar con start)',
424
504
  securityNotice: 'Aviso de seguridad: Limbo corre agentes de IA dentro de un container Docker con acceso a tus API keys y datos del vault. El container puede hacer requests a las APIs de los proveedores de IA. No guardes secretos sensibles (passwords, claves privadas) en las notas del vault.',
425
505
  unknownCommand: (cmd) => `Comando desconocido: ${cmd}`,
506
+ headlessMissingApiKey: '--provider requiere --api-key. La autenticacion por suscripcion necesita setup interactivo: npx limbo-ai start',
507
+ headlessInvalidProvider: '--provider invalido. Usa: openai, anthropic, o openrouter',
508
+ headlessStarting: 'Modo headless: configurando...',
509
+ helpProvider: 'Setea el provider para instalacion headless (openai, anthropic, openrouter)',
510
+ helpApiKey: 'API key para instalacion headless',
511
+ helpModel: 'Nombre del modelo (opcional, usa el default del provider)',
512
+ helpLanguage: 'Idioma: en, es (default: en)',
426
513
  },
427
514
  };
428
515
 
@@ -451,6 +538,11 @@ function t(lang, key, ...args) {
451
538
  return typeof value === 'function' ? value(...args) : value;
452
539
  }
453
540
 
541
+ function parseFlag(flag) {
542
+ const idx = process.argv.indexOf(flag);
543
+ return idx !== -1 && idx + 1 < process.argv.length ? process.argv[idx + 1] : undefined;
544
+ }
545
+
454
546
  function sleep(ms) {
455
547
  Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
456
548
  }
@@ -582,6 +674,7 @@ function normalizeConfig(cfg, existingEnv = {}) {
582
674
  AUTH_MODE: cfg.authMode || existingEnv.AUTH_MODE || 'api-key',
583
675
  MODEL_PROVIDER: cfg.provider || existingEnv.MODEL_PROVIDER || 'anthropic',
584
676
  MODEL_NAME: cfg.modelName || existingEnv.MODEL_NAME || 'claude-opus-4-6',
677
+ LIMBO_PORT: String(PORT),
585
678
  OPENAI_API_KEY: cfg.provider === 'openai' && cfg.apiKey ? cfg.apiKey : (cfg.keepExisting ? existingEnv.OPENAI_API_KEY || '' : ''),
586
679
  ANTHROPIC_API_KEY: cfg.provider === 'anthropic' && cfg.apiKey ? cfg.apiKey : (cfg.keepExisting ? existingEnv.ANTHROPIC_API_KEY || '' : ''),
587
680
  LLM_API_KEY: cfg.apiKey || (cfg.keepExisting ? existingEnv.LLM_API_KEY || '' : ''),
@@ -681,6 +774,32 @@ async function chooseModel(lang, providerFamily, authMode) {
681
774
  async function collectConfig(existingEnv = {}) {
682
775
  console.log(`${c.cyan}${ASCII_ART}${c.reset}`);
683
776
 
777
+ // Check for existing API keys from another OpenClaw installation
778
+ const existingKeys = findExistingApiKeys();
779
+ if (existingKeys && !existingEnv.LLM_API_KEY && !existingEnv.ANTHROPIC_API_KEY && !existingEnv.OPENAI_API_KEY) {
780
+ const keyValue = existingKeys.keys.LLM_API_KEY || existingKeys.keys.ANTHROPIC_API_KEY || existingKeys.keys.OPENAI_API_KEY || existingKeys.keys.OPENROUTER_API_KEY || '';
781
+ const maskedKey = keyValue ? keyValue.slice(0, 10) + '...' : 'found';
782
+
783
+ console.log(`
784
+ ${c.cyan}Found existing API keys${c.reset} from ${existingKeys.source}
785
+ ${c.dim}Key: ${maskedKey}${c.reset}
786
+ `);
787
+
788
+ const { select } = await getClack();
789
+ const reuseChoice = await select({
790
+ message: 'Reuse existing API configuration?',
791
+ options: [
792
+ { value: 'yes', label: 'Yes, use existing keys' },
793
+ { value: 'no', label: 'No, configure new keys' },
794
+ ],
795
+ });
796
+ await maybeHandleClackCancel(reuseChoice);
797
+
798
+ if (reuseChoice === 'yes') {
799
+ Object.assign(existingEnv, existingKeys.keys);
800
+ }
801
+ }
802
+
684
803
  const language = (await selectMenu(t('en', 'chooseLanguage'), [
685
804
  { label: TEXT.en.languageName, value: 'en' },
686
805
  { label: TEXT.es.languageName, value: 'es' },
@@ -712,7 +831,15 @@ async function collectConfig(existingEnv = {}) {
712
831
  let apiKey = '';
713
832
 
714
833
  if (accessMethod === 'api-key') {
715
- if (providerFamily === 'openai') {
834
+ const reusedKey = existingEnv.LLM_API_KEY
835
+ || (providerFamily === 'openai' && existingEnv.OPENAI_API_KEY)
836
+ || (providerFamily === 'anthropic' && existingEnv.ANTHROPIC_API_KEY)
837
+ || (providerFamily === 'openrouter' && existingEnv.OPENROUTER_API_KEY)
838
+ || '';
839
+
840
+ if (reusedKey) {
841
+ apiKey = reusedKey;
842
+ } else if (providerFamily === 'openai') {
716
843
  apiKey = await promptValidated(
717
844
  t(language, 'openAiApiKeyPrompt'),
718
845
  (value) => {
@@ -801,7 +928,7 @@ function ensureComposeFile(hardened = false) {
801
928
  if (fs.existsSync(src)) fs.copyFileSync(src, dest);
802
929
  }
803
930
  }
804
- fs.writeFileSync(COMPOSE_FILE, hardened ? COMPOSE_CONTENT_HARDENED : COMPOSE_CONTENT);
931
+ fs.writeFileSync(COMPOSE_FILE, hardened ? composeContentHardened() : composeContent());
805
932
  }
806
933
 
807
934
  function readSecretFile(name) {
@@ -1172,14 +1299,72 @@ async function cmdStart() {
1172
1299
  if (!hasDocker()) die(t('en', 'dockerMissing'));
1173
1300
 
1174
1301
  const hardened = process.argv.includes('--hardened');
1175
- ensureComposeFile(hardened);
1176
1302
 
1303
+ // ── Detect existing OpenClaw ──────────────────────────────────────────────
1177
1304
  const existingEnv = parseEnvFile();
1178
1305
  const alreadyHasEnv = fs.existsSync(ENV_FILE);
1306
+
1307
+ if (existingEnv.LIMBO_PORT) {
1308
+ const parsed = parseInt(existingEnv.LIMBO_PORT, 10);
1309
+ if (!Number.isFinite(parsed) || parsed < 1 || parsed > 65535) {
1310
+ warn(`Invalid LIMBO_PORT="${existingEnv.LIMBO_PORT}" in .env, using default ${DEFAULT_PORT}`);
1311
+ } else {
1312
+ PORT = parsed;
1313
+ }
1314
+ } else {
1315
+ const existing = detectExistingOpenClaw();
1316
+ if (existing) {
1317
+ console.log(`
1318
+ ${c.yellow}${c.bold}Existing OpenClaw detected${c.reset}
1319
+ ${c.dim}Port ${existing.port} is in use (${existing.processInfo})${c.reset}
1320
+
1321
+ Limbo will run its own OpenClaw instance on port ${c.bold}${COEXIST_PORT}${c.reset}.
1322
+ Both can coexist safely — separate containers, separate data.
1323
+ `);
1324
+ PORT = COEXIST_PORT;
1325
+ }
1326
+ }
1327
+
1328
+ ensureComposeFile(hardened);
1179
1329
  let cfg;
1180
1330
  let lang = existingEnv.CLI_LANGUAGE || 'en';
1181
1331
 
1182
- if (alreadyHasEnv) {
1332
+ // ── Headless mode ──────────────────────────────────────────────────────────
1333
+ const flagProvider = parseFlag('--provider');
1334
+ const flagApiKey = parseFlag('--api-key');
1335
+ const flagModel = parseFlag('--model');
1336
+ const flagLang = parseFlag('--language') || 'en';
1337
+
1338
+ if (flagProvider) {
1339
+ const validProviders = ['openai', 'anthropic', 'openrouter'];
1340
+ if (!validProviders.includes(flagProvider)) {
1341
+ die(t(flagLang, 'headlessInvalidProvider'));
1342
+ }
1343
+ if (!flagApiKey) {
1344
+ die(t(flagLang, 'headlessMissingApiKey'));
1345
+ }
1346
+
1347
+ lang = flagLang;
1348
+ const providerFamily = deriveProviderFamily(flagProvider);
1349
+ const catalog = getModelCatalog(providerFamily, 'api-key');
1350
+ const modelName = flagModel || catalog.defaultModel;
1351
+
1352
+ log(t(lang, 'headlessStarting'));
1353
+ cfg = {
1354
+ language: lang,
1355
+ authMode: 'api-key',
1356
+ provider: catalog.provider,
1357
+ providerFamily,
1358
+ modelName,
1359
+ apiKey: flagApiKey,
1360
+ telegramEnabled: 'false',
1361
+ telegramToken: '',
1362
+ telegramAutoPair: 'false',
1363
+ gatewayToken: ensureGatewayToken(existingEnv),
1364
+ };
1365
+ writeEnv({ ...cfg, CLI_LANGUAGE: cfg.language }, existingEnv);
1366
+ ok(t(cfg.language, 'envWritten'));
1367
+ } else if (alreadyHasEnv) {
1183
1368
  log(existingEnv.MODEL_PROVIDER ? t(lang, 'foundExistingConfig') : `Found existing config at ${ENV_FILE}`);
1184
1369
  const reconfig = process.argv.includes('--reconfigure');
1185
1370
  if (!reconfig) {
@@ -1296,8 +1481,12 @@ ${c.bold}Commands:${c.reset}
1296
1481
  help Show this help
1297
1482
 
1298
1483
  ${c.bold}Flags:${c.reset}
1299
- --reconfigure Reconfigure auth and onboarding settings (use with start)
1300
- --hardened Enable egress proxy (restricts outbound to AI provider APIs only)
1484
+ --reconfigure Reconfigure auth and onboarding settings (use with start)
1485
+ --hardened Enable egress proxy (restricts outbound to AI provider APIs only)
1486
+ --provider <name> Set provider for headless install (openai, anthropic, openrouter)
1487
+ --api-key <key> API key for headless install
1488
+ --model <name> Model name (optional, uses provider default)
1489
+ --language <code> Language: en, es (default: en)
1301
1490
 
1302
1491
  ${c.bold}Data directory:${c.reset} ${LIMBO_DIR}
1303
1492
  `);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "limbo-ai",
3
- "version": "1.12.0",
3
+ "version": "1.13.0",
4
4
  "description": "Your personal AI memory agent — install and manage Limbo via npx",
5
5
  "type": "commonjs",
6
6
  "bin": {