@darksol/terminal 0.9.0 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,7 @@ import { join, dirname } from 'path';
9
9
  import { homedir } from 'os';
10
10
  import { spawn } from 'child_process';
11
11
  import { fileURLToPath } from 'url';
12
+ import { getConfiguredModel, getModelSelectionMeta, getProviderDefaultModel } from '../llm/models.js';
12
13
 
13
14
  // ══════════════════════════════════════════════════
14
15
  // CHAT LOG PERSISTENCE
@@ -416,12 +417,32 @@ export async function handleMenuSelect(id, value, item, ws) {
416
417
  })));
417
418
  return {};
418
419
  }
420
+ if (value === 'model') {
421
+ return showModelSelectionMenu(ws);
422
+ }
419
423
  if (value === 'keys') {
420
424
  return await handleCommand('keys', ws);
421
425
  }
422
426
  ws.sendLine('');
423
427
  return {};
424
428
 
429
+ case 'config_model':
430
+ if (value === 'back') {
431
+ ws.sendLine('');
432
+ return {};
433
+ }
434
+ if (value === '__custom__') {
435
+ ws.sendPrompt('config_model_input', 'Model:', { provider: getConfig('llm.provider') || 'openai' });
436
+ return {};
437
+ }
438
+ saveSelectedModel(value);
439
+ chatEngines.delete(ws);
440
+ ws.sendLine('');
441
+ ws.sendLine(` ${ANSI.green}✓ Model set to ${value}${ANSI.reset}`);
442
+ ws.sendLine(` ${ANSI.dim}AI session refreshed.${ANSI.reset}`);
443
+ ws.sendLine('');
444
+ return {};
445
+
425
446
  case 'main_menu':
426
447
  if (value === 'back') {
427
448
  ws.sendLine('');
@@ -429,7 +450,6 @@ export async function handleMenuSelect(id, value, item, ws) {
429
450
  }
430
451
  return await handleCommand(value, ws);
431
452
  }
432
-
433
453
  return {};
434
454
  }
435
455
 
@@ -452,6 +472,7 @@ export async function handlePromptResponse(id, value, meta, ws) {
452
472
  if (service === 'openai') ws.sendLine(` ${ANSI.dim}Key should start with sk-${ANSI.reset}`);
453
473
  if (service === 'anthropic') ws.sendLine(` ${ANSI.dim}Key should start with sk-ant-${ANSI.reset}`);
454
474
  if (service === 'openrouter') ws.sendLine(` ${ANSI.dim}Key should start with sk-or-${ANSI.reset}`);
475
+ if (service === 'minimax') ws.sendLine(` ${ANSI.dim}Get a key: ${svc.docsUrl}${ANSI.reset}`);
455
476
  if (service === 'ollama') ws.sendLine(` ${ANSI.dim}Should be a URL like http://localhost:11434${ANSI.reset}`);
456
477
  ws.sendLine('');
457
478
  return {};
@@ -472,6 +493,23 @@ export async function handlePromptResponse(id, value, meta, ws) {
472
493
  return {};
473
494
  }
474
495
 
496
+ if (id === 'config_model_input') {
497
+ const provider = meta?.provider || getConfig('llm.provider') || 'openai';
498
+ const model = String(value || '').trim();
499
+ if (!model) {
500
+ ws.sendLine(` ${ANSI.red}✗ Model is required${ANSI.reset}`);
501
+ ws.sendLine('');
502
+ return {};
503
+ }
504
+
505
+ saveSelectedModel(model, provider);
506
+ chatEngines.delete(ws);
507
+ ws.sendLine(` ${ANSI.green}✓ Model set to ${model}${ANSI.reset}`);
508
+ ws.sendLine(` ${ANSI.dim}AI session refreshed.${ANSI.reset}`);
509
+ ws.sendLine('');
510
+ return {};
511
+ }
512
+
475
513
  if (id === 'cards_status_id') {
476
514
  if (!value) { ws.sendLine(` ${ANSI.red}✗ Cancelled${ANSI.reset}`); ws.sendLine(''); return {}; }
477
515
  return await showCardStatus(value.trim(), ws);
@@ -782,13 +820,15 @@ export function getAIStatus() {
782
820
  const dim = '\x1b[38;2;102;102;102m';
783
821
  const reset = '\x1b[0m';
784
822
 
785
- const providers = ['openai', 'anthropic', 'openrouter', 'ollama', 'bankr'];
823
+ const providers = ['openai', 'anthropic', 'openrouter', 'minimax', 'ollama', 'bankr'];
786
824
  const connected = providers.filter(p => hasKey(p));
787
825
  const soul = hasSoul() ? getSoul() : null;
788
826
 
789
827
  if (connected.length > 0) {
790
828
  const names = connected.map(p => SERVICES[p]?.name || p).join(', ');
791
- return ` ${green}● AI ready${reset} ${dim}(${names})${reset}\r\n ${dim}Type ${gold}ai <question>${dim} to start chatting. Chat logs saved to ~/.darksol/chat-logs/${reset}\r\n\r\n`;
829
+ const provider = getConfig('llm.provider') || connected[0];
830
+ const model = provider === 'bankr' ? 'gateway managed' : (getConfiguredModel(provider) || getProviderDefaultModel(provider) || 'default');
831
+ return ` ${green}● AI ready${reset} ${dim}(${names} | ${provider}/${model})${reset}\r\n ${dim}Type ${gold}ai <question>${dim} to start chatting. Chat logs saved to ~/.darksol/chat-logs/${reset}\r\n\r\n`;
792
832
  }
793
833
 
794
834
  return [
@@ -798,6 +838,7 @@ export function getAIStatus() {
798
838
  ` ${green}keys add openai sk-...${reset} ${dim}OpenAI (GPT-4o)${reset}`,
799
839
  ` ${green}keys add anthropic sk-ant-...${reset} ${dim}Anthropic (Claude)${reset}`,
800
840
  ` ${green}keys add openrouter sk-or-...${reset} ${dim}OpenRouter (any model)${reset}`,
841
+ ` ${green}keys add minimax <key>${reset} ${dim}MiniMax (MiniMax-M2.5)${reset}`,
801
842
  ` ${green}keys add bankr bk_...${reset} ${dim}Bankr LLM Gateway (crypto credits)${reset}`,
802
843
  ` ${green}keys add ollama http://...${reset} ${dim}Ollama (free, local)${reset}`,
803
844
  '',
@@ -834,7 +875,7 @@ export async function handleCommand(cmd, ws) {
834
875
  case 'mail':
835
876
  return await cmdMail(args, ws);
836
877
  case 'config':
837
- return await cmdConfig(ws);
878
+ return await cmdConfig(args, ws);
838
879
  case 'oracle':
839
880
  return await cmdOracle(args, ws);
840
881
  case 'cards':
@@ -850,6 +891,8 @@ export async function handleCommand(cmd, ws) {
850
891
  case 'agent':
851
892
  case 'signer':
852
893
  return await cmdAgent(args, ws);
894
+ case 'task':
895
+ return await cmdAgent(['task', ...args], ws);
853
896
  case 'ai':
854
897
  case 'ask':
855
898
  case 'chat':
@@ -1339,7 +1382,84 @@ async function showWalletDetail(name, ws) {
1339
1382
  async function cmdAgent(args, ws) {
1340
1383
  const sub = (args[0] || 'menu').toLowerCase();
1341
1384
 
1385
+ if (sub === 'task') {
1386
+ const goal = args.slice(1).join(' ').trim();
1387
+ if (!goal) {
1388
+ return {
1389
+ output: `\r\n ${ANSI.dim}Usage: agent task <goal> [--max-steps N] [--allow-actions]${ANSI.reset}\r\n ${ANSI.dim}Shortcut: task <goal>${ANSI.reset}\r\n\r\n`,
1390
+ };
1391
+ }
1392
+
1393
+ const allowActions = args.includes('--allow-actions');
1394
+ const maxIndex = args.findIndex((arg) => arg === '--max-steps');
1395
+ const maxSteps = maxIndex >= 0 ? parseInt(args[maxIndex + 1], 10) || 10 : 10;
1396
+ const filteredGoal = args
1397
+ .slice(1)
1398
+ .filter((arg, index, arr) => arg !== '--allow-actions' && !(arg === '--max-steps' || arr[index - 1] === '--max-steps'))
1399
+ .join(' ')
1400
+ .trim();
1401
+
1402
+ const { runAgentTask } = await import('../agent/index.js');
1403
+ ws.sendLine(`${ANSI.gold} ◆ AGENT TASK${ANSI.reset}`);
1404
+ ws.sendLine(`${ANSI.dim} ${'─'.repeat(50)}${ANSI.reset}`);
1405
+ ws.sendLine(` ${ANSI.white}Goal:${ANSI.reset} ${filteredGoal}`);
1406
+ ws.sendLine(` ${ANSI.darkGold}Mode:${ANSI.reset} ${allowActions ? 'actions enabled' : 'safe mode'}`);
1407
+ ws.sendLine('');
1408
+
1409
+ const result = await runAgentTask(filteredGoal, {
1410
+ maxSteps,
1411
+ allowActions,
1412
+ onProgress: (event) => {
1413
+ if (event.type === 'thought') {
1414
+ ws.sendLine(` ${ANSI.darkGold}[step ${event.step}]${ANSI.reset} ${ANSI.white}${event.action}${ANSI.reset}`);
1415
+ if (event.thought) {
1416
+ ws.sendLine(` ${ANSI.dim}${event.thought}${ANSI.reset}`);
1417
+ }
1418
+ }
1419
+ if (event.type === 'observation') {
1420
+ const summary = event.observation?.summary || event.observation?.error;
1421
+ if (summary) ws.sendLine(` ${ANSI.dim}${summary}${ANSI.reset}`);
1422
+ ws.sendLine('');
1423
+ }
1424
+ },
1425
+ });
1426
+
1427
+ ws.sendLine(` ${ANSI.green}Final:${ANSI.reset} ${result.final}`);
1428
+ ws.sendLine(` ${ANSI.dim}Status ${result.status} • ${result.stepsTaken}/${result.maxSteps} steps • ${result.stopReason}${ANSI.reset}`);
1429
+ ws.sendLine('');
1430
+ return {};
1431
+ }
1432
+
1433
+ if (sub === 'plan') {
1434
+ const goal = args.slice(1).join(' ').trim();
1435
+ if (!goal) {
1436
+ return { output: ` ${ANSI.dim}Usage: agent plan <goal>${ANSI.reset}\r\n` };
1437
+ }
1438
+ const { planAgentGoal } = await import('../agent/index.js');
1439
+ const plan = await planAgentGoal(goal);
1440
+ ws.sendLine(`${ANSI.gold} ◆ AGENT PLAN${ANSI.reset}`);
1441
+ ws.sendLine(`${ANSI.dim} ${'─'.repeat(50)}${ANSI.reset}`);
1442
+ ws.sendLine(` ${ANSI.white}${plan.summary}${ANSI.reset}`);
1443
+ ws.sendLine('');
1444
+ plan.steps.forEach((step, index) => ws.sendLine(` ${ANSI.darkGold}${index + 1}.${ANSI.reset} ${step}`));
1445
+ ws.sendLine('');
1446
+ return {};
1447
+ }
1448
+
1342
1449
  if (sub === 'status') {
1450
+ const { getAgentStatus } = await import('../agent/index.js');
1451
+ const status = getAgentStatus();
1452
+ if (status?.goal || status?.summary) {
1453
+ ws.sendLine(`${ANSI.gold} ◆ AGENT STATUS${ANSI.reset}`);
1454
+ ws.sendLine(`${ANSI.dim} ${'─'.repeat(50)}${ANSI.reset}`);
1455
+ ws.sendLine(` ${ANSI.darkGold}Status${ANSI.reset} ${ANSI.white}${status.status || '-'}${ANSI.reset}`);
1456
+ ws.sendLine(` ${ANSI.darkGold}Goal${ANSI.reset} ${ANSI.white}${status.goal || '-'}${ANSI.reset}`);
1457
+ ws.sendLine(` ${ANSI.darkGold}Summary${ANSI.reset} ${ANSI.white}${status.summary || '-'}${ANSI.reset}`);
1458
+ ws.sendLine(` ${ANSI.darkGold}Steps${ANSI.reset} ${ANSI.white}${status.stepsTaken || 0}${status.maxSteps ? `/${status.maxSteps}` : ''}${ANSI.reset}`);
1459
+ ws.sendLine(` ${ANSI.darkGold}Actions${ANSI.reset} ${ANSI.white}${status.allowActions ? 'enabled' : 'safe mode'}${ANSI.reset}`);
1460
+ ws.sendLine('');
1461
+ return {};
1462
+ }
1343
1463
  return await showSignerStatus(ws);
1344
1464
  }
1345
1465
 
@@ -1713,11 +1833,20 @@ async function cmdFacilitator(args, ws) {
1713
1833
  return {};
1714
1834
  }
1715
1835
 
1716
- async function cmdConfig(ws) {
1836
+ async function cmdConfig(args, ws) {
1837
+ const sub = args[0]?.toLowerCase();
1838
+ if (sub === 'model') {
1839
+ return showModelSelectionMenu(ws);
1840
+ }
1841
+
1717
1842
  const chain = getConfig('chain') || 'base';
1718
1843
  const wallet = getConfig('activeWallet') || '(none)';
1719
1844
  const slippage = getConfig('slippage') || '0.5';
1720
1845
  const email = getConfig('mailEmail') || '(none)';
1846
+ const provider = getConfig('llm.provider') || '(not set)';
1847
+ const model = provider === 'bankr'
1848
+ ? 'gateway managed'
1849
+ : getConfiguredModel(provider === '(not set)' ? 'openai' : provider) || '(default)';
1721
1850
 
1722
1851
  ws.sendLine(`${ANSI.gold} ◆ CONFIG${ANSI.reset}`);
1723
1852
  ws.sendLine(`${ANSI.dim} ${'─'.repeat(50)}${ANSI.reset}`);
@@ -1725,12 +1854,15 @@ async function cmdConfig(ws) {
1725
1854
  ws.sendLine(` ${ANSI.darkGold}Wallet${ANSI.reset} ${ANSI.white}${wallet}${ANSI.reset}`);
1726
1855
  ws.sendLine(` ${ANSI.darkGold}Slippage${ANSI.reset} ${ANSI.white}${slippage}%${ANSI.reset}`);
1727
1856
  ws.sendLine(` ${ANSI.darkGold}Mail${ANSI.reset} ${ANSI.white}${email}${ANSI.reset}`);
1857
+ ws.sendLine(` ${ANSI.darkGold}LLM Provider${ANSI.reset} ${ANSI.white}${provider}${ANSI.reset}`);
1858
+ ws.sendLine(` ${ANSI.darkGold}LLM Model${ANSI.reset} ${ANSI.white}${model}${ANSI.reset}`);
1728
1859
  ws.sendLine(` ${ANSI.darkGold}AI${ANSI.reset} ${hasAnyLLM() ? `${ANSI.green}● Ready${ANSI.reset}` : `${ANSI.dim}○ Not configured${ANSI.reset}`}`);
1729
1860
  ws.sendLine('');
1730
1861
 
1731
1862
  // Offer interactive config
1732
1863
  ws.sendMenu('config_action', '◆ Configure', [
1733
1864
  { value: 'chain', label: '🔗 Change chain', desc: `Currently: ${chain}` },
1865
+ { value: 'model', label: '🧠 Change model', desc: `Currently: ${model}` },
1734
1866
  { value: 'keys', label: '🔑 LLM / API keys', desc: '' },
1735
1867
  { value: 'back', label: '← Back', desc: '' },
1736
1868
  ]);
@@ -1738,6 +1870,49 @@ async function cmdConfig(ws) {
1738
1870
  return {};
1739
1871
  }
1740
1872
 
1873
+ /**
1874
+ * Show model selection menu for current provider
1875
+ */
1876
+ function showModelSelectionMenu(ws) {
1877
+ const provider = getConfig('llm.provider') || 'openai';
1878
+ const meta = getModelSelectionMeta(provider);
1879
+
1880
+ if (meta.managed) {
1881
+ ws.sendLine(` ${ANSI.dim}Bankr selects the backing model automatically.${ANSI.reset}`);
1882
+ ws.sendLine('');
1883
+ return {};
1884
+ }
1885
+
1886
+ if (meta.textInput) {
1887
+ ws.sendPrompt('config_model_input', 'Model:', { provider });
1888
+ return {};
1889
+ }
1890
+
1891
+ const items = (meta.choices || []).map(choice => ({
1892
+ value: choice.value,
1893
+ label: choice.value,
1894
+ desc: choice.desc,
1895
+ }));
1896
+
1897
+ if (meta.allowCustom) {
1898
+ items.push({ value: '__custom__', label: 'Custom model', desc: 'Type any model string' });
1899
+ }
1900
+
1901
+ items.push({ value: 'back', label: '← Back', desc: '' });
1902
+ ws.sendMenu('config_model', '🧠 Select Model', items);
1903
+ return {};
1904
+ }
1905
+
1906
+ /**
1907
+ * Save selected model to config
1908
+ */
1909
+ function saveSelectedModel(model, provider = getConfig('llm.provider') || 'openai') {
1910
+ setConfig('llm.model', model);
1911
+ if (provider === 'ollama') {
1912
+ setConfig('ollamaModel', model);
1913
+ }
1914
+ }
1915
+
1741
1916
  // ══════════════════════════════════════════════════
1742
1917
  // AI CHAT — LLM-powered assistant in the web shell
1743
1918
  // ══════════════════════════════════════════════════
@@ -1938,7 +2113,7 @@ async function cmdKeys(args, ws) {
1938
2113
 
1939
2114
  if (!svc) {
1940
2115
  ws.sendLine(` ${ANSI.red}✗ Unknown service: ${service}${ANSI.reset}`);
1941
- ws.sendLine(` ${ANSI.dim}Available: openai, anthropic, openrouter, ollama${ANSI.reset}`);
2116
+ ws.sendLine(` ${ANSI.dim}Available: openai, anthropic, openrouter, minimax, ollama, bankr${ANSI.reset}`);
1942
2117
  ws.sendLine('');
1943
2118
  return {};
1944
2119
  }
@@ -1986,7 +2161,7 @@ async function cmdKeys(args, ws) {
1986
2161
  ws.sendLine(`${ANSI.dim} ${'─'.repeat(50)}${ANSI.reset}`);
1987
2162
  ws.sendLine('');
1988
2163
 
1989
- const llmProviders = ['openai', 'anthropic', 'openrouter', 'ollama', 'bankr'];
2164
+ const llmProviders = ['openai', 'anthropic', 'openrouter', 'minimax', 'ollama', 'bankr'];
1990
2165
  ws.sendLine(` ${ANSI.gold}LLM Providers:${ANSI.reset}`);
1991
2166
  for (const p of llmProviders) {
1992
2167
  const svc = SERVICES[p];
@@ -2000,6 +2175,7 @@ async function cmdKeys(args, ws) {
2000
2175
  ws.sendLine(` ${ANSI.green}keys add openai sk-...${ANSI.reset} ${ANSI.dim}Add OpenAI key${ANSI.reset}`);
2001
2176
  ws.sendLine(` ${ANSI.green}keys add anthropic sk-ant-...${ANSI.reset} ${ANSI.dim}Add Anthropic key${ANSI.reset}`);
2002
2177
  ws.sendLine(` ${ANSI.green}keys add openrouter sk-or-...${ANSI.reset} ${ANSI.dim}Add OpenRouter key${ANSI.reset}`);
2178
+ ws.sendLine(` ${ANSI.green}keys add minimax <key>${ANSI.reset} ${ANSI.dim}Add MiniMax key${ANSI.reset}`);
2003
2179
  ws.sendLine(` ${ANSI.green}keys add ollama http://...${ANSI.reset} ${ANSI.dim}Add Ollama host${ANSI.reset}`);
2004
2180
  ws.sendLine('');
2005
2181