@otonix/cli 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +80 -17
  2. package/dist/cli.js +148 -6
  3. package/package.json +2 -2
package/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  Command-line tool for the [Otonix](https://otonix.tech) sovereign compute platform.
4
4
 
5
- Initialize, register, and manage autonomous AI agents from your terminal.
5
+ Initialize, register, and manage autonomous AI agents from your terminal. Includes built-in access to the Bankr LLM Gateway for multi-model AI inference.
6
6
 
7
7
  ## Install
8
8
 
@@ -69,8 +69,48 @@ otonix log "Trade executed BTC/USDC" --category trading
69
69
  | Command | Description |
70
70
  |---------|-------------|
71
71
  | `otonix engine` | Show autonomic engine status |
72
+ | `otonix marketplace` | List marketplace services |
72
73
  | `otonix x402` | Show x402 payment configuration |
73
74
 
75
+ ### LLM Gateway (Bankr)
76
+
77
+ Access multi-model AI inference directly from your terminal. Supports Claude, GPT, Gemini, Kimi, Qwen, and more via the [Bankr LLM Gateway](https://bankr.bot/llm).
78
+
79
+ | Command | Description |
80
+ |---------|-------------|
81
+ | `otonix llm:init <key>` | Configure Bankr LLM Gateway API key |
82
+ | `otonix llm:models` | List available AI models |
83
+ | `otonix llm:chat <prompt>` | Send a chat completion request |
84
+ | `otonix llm:usage` | Show LLM usage summary |
85
+ | `otonix llm:health` | Check gateway health status |
86
+
87
+ #### LLM Setup
88
+
89
+ Get your Bankr API key from [bankr.bot/api](https://bankr.bot/api) (make sure LLM Gateway access is enabled):
90
+
91
+ ```bash
92
+ otonix llm:init bk_YOUR_API_KEY
93
+ ```
94
+
95
+ #### LLM Examples
96
+
97
+ ```bash
98
+ # List available models
99
+ otonix llm:models
100
+
101
+ # Chat with default model (claude-haiku-4.5)
102
+ otonix llm:chat "What is sovereign compute?"
103
+
104
+ # Chat with a specific model
105
+ otonix llm:chat "Analyze BTC price action" --model gpt-5-mini
106
+
107
+ # Check usage (last 7 days)
108
+ otonix llm:usage --days 7
109
+
110
+ # Check gateway health
111
+ otonix llm:health
112
+ ```
113
+
74
114
  ## Command Options
75
115
 
76
116
  ### `otonix register`
@@ -107,6 +147,26 @@ otonix actions --limit 50
107
147
  |--------|---------|-------------|
108
148
  | `--limit` | `20` | Number of actions to show |
109
149
 
150
+ ### `otonix llm:chat`
151
+
152
+ ```bash
153
+ otonix llm:chat "Your prompt here" --model claude-haiku-4.5
154
+ ```
155
+
156
+ | Option | Default | Description |
157
+ |--------|---------|-------------|
158
+ | `--model` | `claude-haiku-4.5` | AI model to use |
159
+
160
+ ### `otonix llm:usage`
161
+
162
+ ```bash
163
+ otonix llm:usage --days 7
164
+ ```
165
+
166
+ | Option | Default | Description |
167
+ |--------|---------|-------------|
168
+ | `--days` | `30` | Number of days to show |
169
+
110
170
  ## Configuration
111
171
 
112
172
  Config is stored at `~/.otonix/config.json` with file permissions `600`.
@@ -116,7 +176,8 @@ Config is stored at `~/.otonix/config.json` with file permissions `600`.
116
176
  "apiKey": "otonix_xxxx",
117
177
  "endpoint": "https://app.otonix.tech",
118
178
  "agentId": "uuid-xxxx",
119
- "agentName": "my-agent"
179
+ "agentName": "my-agent",
180
+ "bankrApiKey": "bk_xxxx"
120
181
  }
121
182
  ```
122
183
 
@@ -141,24 +202,26 @@ $ otonix register --name sentinel-01
141
202
  Tier: active
142
203
  Interval: 60s
143
204
 
205
+ $ otonix llm:init bk_4GYZ...WXFKD
206
+ Bankr LLM Gateway key saved.
207
+ Gateway: https://llm.bankr.bot
208
+ Key: bk_4GY...XFKD
209
+
210
+ $ otonix llm:chat "What infrastructure should I monitor?"
211
+ Model: claude-haiku-4.5
212
+ Prompt: What infrastructure should I monitor?
213
+
214
+ Thinking...
215
+
216
+ Focus on CPU usage, memory consumption, disk I/O, network latency,
217
+ and service uptime. Set up alerts for anomalies in each metric.
218
+
219
+ Tokens: 24 in / 87 out
220
+ Model: claude-haiku-4.5
221
+
144
222
  $ otonix heartbeat
145
223
  Heartbeat sent — sentinel-01 [active] tier:active credits:$50.00
146
224
 
147
- $ otonix log "Deployed monitoring stack" --category infra
148
- Logged: [infra] Deployed monitoring stack (completed)
149
-
150
- $ otonix status
151
- Agent Status:
152
- ID: e2998be4-b77c-495e-a535-a6e4ca9dc768
153
- Name: sentinel-01
154
- Status: active
155
- Tier: full
156
- Credits: $50.00
157
- Model: claude-opus-4-6
158
- VPS IP: 10.0.1.1
159
- Heartbeat: 12s ago
160
- Actions: 3
161
-
162
225
  $ otonix engine
163
226
  Autonomic Engine Status:
164
227
  Running: yes
package/dist/cli.js CHANGED
@@ -28,7 +28,7 @@ var import_sdk = require("@otonix/sdk");
28
28
  var fs = __toESM(require("fs"));
29
29
  var path = __toESM(require("path"));
30
30
  var readline = __toESM(require("readline"));
31
- var VERSION = "1.1.0";
31
+ var VERSION = "1.2.0";
32
32
  var CONFIG_DIR = path.join(process.env.HOME || "~", ".otonix");
33
33
  var CONFIG_FILE = path.join(CONFIG_DIR, "config.json");
34
34
  function loadConfig() {
@@ -108,6 +108,11 @@ var HELP = `
108
108
  engine Show autonomic engine status
109
109
  marketplace List marketplace services
110
110
  x402 Show x402 payment config
111
+ llm:init <key> Configure Bankr LLM Gateway API key
112
+ llm:models List available LLM models
113
+ llm:chat <prompt> Send a chat completion request
114
+ llm:usage [--days N] Show LLM usage summary
115
+ llm:health Check gateway health
111
116
  whoami Show current configuration
112
117
  version Show CLI version
113
118
  help Show this help
@@ -119,6 +124,8 @@ var HELP = `
119
124
  otonix heartbeat:loop
120
125
  otonix log "Trade executed BTC/USDC" --category trading
121
126
  otonix actions --limit 20
127
+ otonix llm:init bk_YOUR_API_KEY
128
+ otonix llm:chat "Explain smart contracts" --model claude-haiku-4.5
122
129
  `;
123
130
  async function cmdInit() {
124
131
  console.log("\n Otonix CLI Setup\n");
@@ -476,6 +483,125 @@ async function cmdX402() {
476
483
  console.log(` Facilitator: ${config.facilitatorUrl}
477
484
  `);
478
485
  }
486
+ function getBankrClient() {
487
+ const config = loadConfig();
488
+ if (!config?.bankrApiKey) {
489
+ console.error(" Error: Bankr LLM key not configured. Run 'otonix llm:init <key>' first.");
490
+ process.exit(1);
491
+ }
492
+ return new import_sdk.BankrLLM({ apiKey: config.bankrApiKey });
493
+ }
494
+ function cmdLlmInit(key) {
495
+ if (!key) {
496
+ console.error(" Usage: otonix llm:init <bankr_api_key>");
497
+ process.exit(1);
498
+ }
499
+ if (!key.startsWith("bk_")) {
500
+ console.error(" Error: Bankr API key must start with 'bk_'");
501
+ process.exit(1);
502
+ }
503
+ const config = loadConfig();
504
+ if (!config) {
505
+ console.error(" Error: Run 'otonix init' first to configure Otonix.");
506
+ process.exit(1);
507
+ }
508
+ config.bankrApiKey = key;
509
+ saveConfig(config);
510
+ console.log(`
511
+ Bankr LLM Gateway key saved.`);
512
+ console.log(` Gateway: https://llm.bankr.bot`);
513
+ console.log(` Key: ${key.slice(0, 6)}...${key.slice(-4)}
514
+ `);
515
+ }
516
+ async function cmdLlmModels() {
517
+ const llm = getBankrClient();
518
+ const models = await llm.listModels();
519
+ if (!models.length) {
520
+ console.log("\n No models available.\n");
521
+ return;
522
+ }
523
+ console.log(`
524
+ Available Models \u2014 ${models.length} total
525
+ `);
526
+ printTable(
527
+ models.map((m) => ({
528
+ Model: m.id,
529
+ Provider: m.owned_by
530
+ }))
531
+ );
532
+ console.log();
533
+ }
534
+ async function cmdLlmChat(args) {
535
+ const llm = getBankrClient();
536
+ let model = "claude-haiku-4.5";
537
+ const modelIdx = args.indexOf("--model");
538
+ if (modelIdx !== -1 && args[modelIdx + 1]) {
539
+ model = args[modelIdx + 1];
540
+ args.splice(modelIdx, 2);
541
+ }
542
+ const promptText = args.join(" ");
543
+ if (!promptText) {
544
+ console.error(" Usage: otonix llm:chat <prompt> [--model <model>]");
545
+ process.exit(1);
546
+ }
547
+ console.log(`
548
+ Model: ${model}`);
549
+ console.log(` Prompt: ${promptText}
550
+ `);
551
+ console.log(" Thinking...\n");
552
+ const result = await llm.chat({
553
+ model,
554
+ messages: [{ role: "user", content: promptText }]
555
+ });
556
+ const content = result.choices?.[0]?.message?.content || "No response";
557
+ console.log(` ${content.split("\n").join("\n ")}`);
558
+ console.log(`
559
+ Tokens: ${result.usage?.prompt_tokens || 0} in / ${result.usage?.completion_tokens || 0} out`);
560
+ console.log(` Model: ${result.model}
561
+ `);
562
+ }
563
+ async function cmdLlmUsage(args) {
564
+ const llm = getBankrClient();
565
+ let days = 30;
566
+ const daysIdx = args.indexOf("--days");
567
+ if (daysIdx !== -1 && args[daysIdx + 1]) {
568
+ days = parseInt(args[daysIdx + 1]) || 30;
569
+ }
570
+ const usage = await llm.getUsage(days);
571
+ console.log(`
572
+ LLM Usage (${days} days)
573
+ `);
574
+ console.log(` Total Requests: ${usage.totals.totalRequests}`);
575
+ console.log(` Input Tokens: ${usage.totals.totalInputTokens.toLocaleString()}`);
576
+ console.log(` Output Tokens: ${usage.totals.totalOutputTokens.toLocaleString()}`);
577
+ console.log(` Total Cost: $${usage.totals.totalCost.toFixed(2)}`);
578
+ if (usage.byModel?.length) {
579
+ console.log(`
580
+ By Model:`);
581
+ printTable(
582
+ usage.byModel.map((m) => ({
583
+ Model: m.model,
584
+ Provider: m.provider,
585
+ Requests: m.requests,
586
+ Cost: `$${m.totalCost.toFixed(2)}`
587
+ }))
588
+ );
589
+ }
590
+ console.log();
591
+ }
592
+ async function cmdLlmHealth() {
593
+ const llm = getBankrClient();
594
+ const health = await llm.checkHealth();
595
+ console.log(`
596
+ LLM Gateway Health:`);
597
+ console.log(` Status: ${health.status}`);
598
+ if (health.providers) {
599
+ for (const [name, ok] of Object.entries(health.providers)) {
600
+ console.log(` ${name}: ${ok ? "online" : "offline"}`);
601
+ }
602
+ }
603
+ console.log();
604
+ }
479
605
  function cmdWhoami() {
480
606
  const config = loadConfig();
481
607
  if (!config) {
@@ -484,11 +610,12 @@ function cmdWhoami() {
484
610
  }
485
611
  console.log(`
486
612
  Current Config:`);
487
- console.log(` Endpoint: ${config.endpoint}`);
488
- console.log(` API Key: ${config.apiKey.slice(0, 12)}...${config.apiKey.slice(-4)}`);
489
- console.log(` Agent ID: ${config.agentId || "not registered"}`);
490
- console.log(` Agent: ${config.agentName || "\u2014"}`);
491
- console.log(` Config: ${CONFIG_FILE}
613
+ console.log(` Endpoint: ${config.endpoint}`);
614
+ console.log(` API Key: ${config.apiKey.slice(0, 12)}...${config.apiKey.slice(-4)}`);
615
+ console.log(` Agent ID: ${config.agentId || "not registered"}`);
616
+ console.log(` Agent: ${config.agentName || "\u2014"}`);
617
+ console.log(` Bankr LLM: ${config.bankrApiKey ? config.bankrApiKey.slice(0, 6) + "..." + config.bankrApiKey.slice(-4) : "not configured"}`);
618
+ console.log(` Config: ${CONFIG_FILE}
492
619
  `);
493
620
  }
494
621
  async function main() {
@@ -539,6 +666,21 @@ async function main() {
539
666
  case "x402":
540
667
  await cmdX402();
541
668
  break;
669
+ case "llm:init":
670
+ cmdLlmInit(rest[0]);
671
+ break;
672
+ case "llm:models":
673
+ await cmdLlmModels();
674
+ break;
675
+ case "llm:chat":
676
+ await cmdLlmChat(rest);
677
+ break;
678
+ case "llm:usage":
679
+ await cmdLlmUsage(rest);
680
+ break;
681
+ case "llm:health":
682
+ await cmdLlmHealth();
683
+ break;
542
684
  case "whoami":
543
685
  cmdWhoami();
544
686
  break;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@otonix/cli",
3
- "version": "1.1.0",
3
+ "version": "1.2.1",
4
4
  "description": "CLI tool for the Otonix sovereign compute platform — initialize agents, generate API keys, register, monitor status, and manage infrastructure from the terminal.",
5
5
  "main": "dist/cli.js",
6
6
  "bin": {
@@ -38,7 +38,7 @@
38
38
  "node": ">=18"
39
39
  },
40
40
  "dependencies": {
41
- "@otonix/sdk": "^1.1.0"
41
+ "@otonix/sdk": "^1.2.0"
42
42
  },
43
43
  "devDependencies": {
44
44
  "tsup": "^8.0.0",