@otonix/cli 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +178 -6
- package/package.json +2 -2
package/dist/cli.js
CHANGED
|
@@ -28,7 +28,7 @@ var import_sdk = require("@otonix/sdk");
|
|
|
28
28
|
var fs = __toESM(require("fs"));
|
|
29
29
|
var path = __toESM(require("path"));
|
|
30
30
|
var readline = __toESM(require("readline"));
|
|
31
|
-
var VERSION = "1.
|
|
31
|
+
var VERSION = "1.2.0";
|
|
32
32
|
var CONFIG_DIR = path.join(process.env.HOME || "~", ".otonix");
|
|
33
33
|
var CONFIG_FILE = path.join(CONFIG_DIR, "config.json");
|
|
34
34
|
function loadConfig() {
|
|
@@ -106,7 +106,13 @@ var HELP = `
|
|
|
106
106
|
domains List registered domains
|
|
107
107
|
sandboxes List VPS sandboxes
|
|
108
108
|
engine Show autonomic engine status
|
|
109
|
+
marketplace List marketplace services
|
|
109
110
|
x402 Show x402 payment config
|
|
111
|
+
llm:init <key> Configure Bankr LLM Gateway API key
|
|
112
|
+
llm:models List available LLM models
|
|
113
|
+
llm:chat <prompt> Send a chat completion request
|
|
114
|
+
llm:usage [--days N] Show LLM usage summary
|
|
115
|
+
llm:health Check gateway health
|
|
110
116
|
whoami Show current configuration
|
|
111
117
|
version Show CLI version
|
|
112
118
|
help Show this help
|
|
@@ -118,6 +124,8 @@ var HELP = `
|
|
|
118
124
|
otonix heartbeat:loop
|
|
119
125
|
otonix log "Trade executed BTC/USDC" --category trading
|
|
120
126
|
otonix actions --limit 20
|
|
127
|
+
otonix llm:init bk_YOUR_API_KEY
|
|
128
|
+
otonix llm:chat "Explain smart contracts" --model claude-haiku-4.5
|
|
121
129
|
`;
|
|
122
130
|
async function cmdInit() {
|
|
123
131
|
console.log("\n Otonix CLI Setup\n");
|
|
@@ -437,6 +445,32 @@ async function cmdEngine() {
|
|
|
437
445
|
}
|
|
438
446
|
console.log();
|
|
439
447
|
}
|
|
448
|
+
async function cmdMarketplace() {
|
|
449
|
+
const client = getClient();
|
|
450
|
+
const services = await client.listMarketplaceServices();
|
|
451
|
+
if (!services.length) {
|
|
452
|
+
console.log("\n No marketplace services available.\n");
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
console.log(`
|
|
456
|
+
Agent Marketplace \u2014 ${services.length} services
|
|
457
|
+
`);
|
|
458
|
+
printTable(
|
|
459
|
+
services.map((s) => ({
|
|
460
|
+
ID: s.id,
|
|
461
|
+
Name: s.name,
|
|
462
|
+
Category: s.category,
|
|
463
|
+
Price: `${s.priceOtx} $OTX/${s.priceUnit}`,
|
|
464
|
+
Uptime: `${s.uptime}%`,
|
|
465
|
+
Rating: `${s.rating}/5`,
|
|
466
|
+
Jobs: s.jobsCompleted,
|
|
467
|
+
Status: s.status
|
|
468
|
+
}))
|
|
469
|
+
);
|
|
470
|
+
console.log(`
|
|
471
|
+
Use 'curl https://app.otonix.tech/api/marketplace/services/<id>' for details.
|
|
472
|
+
`);
|
|
473
|
+
}
|
|
440
474
|
async function cmdX402() {
|
|
441
475
|
const client = getClient();
|
|
442
476
|
const config = await client.getX402Config();
|
|
@@ -449,6 +483,125 @@ async function cmdX402() {
|
|
|
449
483
|
console.log(` Facilitator: ${config.facilitatorUrl}
|
|
450
484
|
`);
|
|
451
485
|
}
|
|
486
|
+
function getBankrClient() {
|
|
487
|
+
const config = loadConfig();
|
|
488
|
+
if (!config?.bankrApiKey) {
|
|
489
|
+
console.error(" Error: Bankr LLM key not configured. Run 'otonix llm:init <key>' first.");
|
|
490
|
+
process.exit(1);
|
|
491
|
+
}
|
|
492
|
+
return new import_sdk.BankrLLM({ apiKey: config.bankrApiKey });
|
|
493
|
+
}
|
|
494
|
+
function cmdLlmInit(key) {
|
|
495
|
+
if (!key) {
|
|
496
|
+
console.error(" Usage: otonix llm:init <bankr_api_key>");
|
|
497
|
+
process.exit(1);
|
|
498
|
+
}
|
|
499
|
+
if (!key.startsWith("bk_")) {
|
|
500
|
+
console.error(" Error: Bankr API key must start with 'bk_'");
|
|
501
|
+
process.exit(1);
|
|
502
|
+
}
|
|
503
|
+
const config = loadConfig();
|
|
504
|
+
if (!config) {
|
|
505
|
+
console.error(" Error: Run 'otonix init' first to configure Otonix.");
|
|
506
|
+
process.exit(1);
|
|
507
|
+
}
|
|
508
|
+
config.bankrApiKey = key;
|
|
509
|
+
saveConfig(config);
|
|
510
|
+
console.log(`
|
|
511
|
+
Bankr LLM Gateway key saved.`);
|
|
512
|
+
console.log(` Gateway: https://llm.bankr.bot`);
|
|
513
|
+
console.log(` Key: ${key.slice(0, 6)}...${key.slice(-4)}
|
|
514
|
+
`);
|
|
515
|
+
}
|
|
516
|
+
async function cmdLlmModels() {
|
|
517
|
+
const llm = getBankrClient();
|
|
518
|
+
const models = await llm.listModels();
|
|
519
|
+
if (!models.length) {
|
|
520
|
+
console.log("\n No models available.\n");
|
|
521
|
+
return;
|
|
522
|
+
}
|
|
523
|
+
console.log(`
|
|
524
|
+
Available Models \u2014 ${models.length} total
|
|
525
|
+
`);
|
|
526
|
+
printTable(
|
|
527
|
+
models.map((m) => ({
|
|
528
|
+
Model: m.id,
|
|
529
|
+
Provider: m.owned_by
|
|
530
|
+
}))
|
|
531
|
+
);
|
|
532
|
+
console.log();
|
|
533
|
+
}
|
|
534
|
+
async function cmdLlmChat(args) {
|
|
535
|
+
const llm = getBankrClient();
|
|
536
|
+
let model = "claude-haiku-4.5";
|
|
537
|
+
const modelIdx = args.indexOf("--model");
|
|
538
|
+
if (modelIdx !== -1 && args[modelIdx + 1]) {
|
|
539
|
+
model = args[modelIdx + 1];
|
|
540
|
+
args.splice(modelIdx, 2);
|
|
541
|
+
}
|
|
542
|
+
const promptText = args.join(" ");
|
|
543
|
+
if (!promptText) {
|
|
544
|
+
console.error(" Usage: otonix llm:chat <prompt> [--model <model>]");
|
|
545
|
+
process.exit(1);
|
|
546
|
+
}
|
|
547
|
+
console.log(`
|
|
548
|
+
Model: ${model}`);
|
|
549
|
+
console.log(` Prompt: ${promptText}
|
|
550
|
+
`);
|
|
551
|
+
console.log(" Thinking...\n");
|
|
552
|
+
const result = await llm.chat({
|
|
553
|
+
model,
|
|
554
|
+
messages: [{ role: "user", content: promptText }]
|
|
555
|
+
});
|
|
556
|
+
const content = result.choices?.[0]?.message?.content || "No response";
|
|
557
|
+
console.log(` ${content.split("\n").join("\n ")}`);
|
|
558
|
+
console.log(`
|
|
559
|
+
Tokens: ${result.usage?.prompt_tokens || 0} in / ${result.usage?.completion_tokens || 0} out`);
|
|
560
|
+
console.log(` Model: ${result.model}
|
|
561
|
+
`);
|
|
562
|
+
}
|
|
563
|
+
async function cmdLlmUsage(args) {
|
|
564
|
+
const llm = getBankrClient();
|
|
565
|
+
let days = 30;
|
|
566
|
+
const daysIdx = args.indexOf("--days");
|
|
567
|
+
if (daysIdx !== -1 && args[daysIdx + 1]) {
|
|
568
|
+
days = parseInt(args[daysIdx + 1]) || 30;
|
|
569
|
+
}
|
|
570
|
+
const usage = await llm.getUsage(days);
|
|
571
|
+
console.log(`
|
|
572
|
+
LLM Usage (${days} days)
|
|
573
|
+
`);
|
|
574
|
+
console.log(` Total Requests: ${usage.totals.totalRequests}`);
|
|
575
|
+
console.log(` Input Tokens: ${usage.totals.totalInputTokens.toLocaleString()}`);
|
|
576
|
+
console.log(` Output Tokens: ${usage.totals.totalOutputTokens.toLocaleString()}`);
|
|
577
|
+
console.log(` Total Cost: $${usage.totals.totalCost.toFixed(2)}`);
|
|
578
|
+
if (usage.byModel?.length) {
|
|
579
|
+
console.log(`
|
|
580
|
+
By Model:`);
|
|
581
|
+
printTable(
|
|
582
|
+
usage.byModel.map((m) => ({
|
|
583
|
+
Model: m.model,
|
|
584
|
+
Provider: m.provider,
|
|
585
|
+
Requests: m.requests,
|
|
586
|
+
Cost: `$${m.totalCost.toFixed(2)}`
|
|
587
|
+
}))
|
|
588
|
+
);
|
|
589
|
+
}
|
|
590
|
+
console.log();
|
|
591
|
+
}
|
|
592
|
+
async function cmdLlmHealth() {
|
|
593
|
+
const llm = getBankrClient();
|
|
594
|
+
const health = await llm.checkHealth();
|
|
595
|
+
console.log(`
|
|
596
|
+
LLM Gateway Health:`);
|
|
597
|
+
console.log(` Status: ${health.status}`);
|
|
598
|
+
if (health.providers) {
|
|
599
|
+
for (const [name, ok] of Object.entries(health.providers)) {
|
|
600
|
+
console.log(` ${name}: ${ok ? "online" : "offline"}`);
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
console.log();
|
|
604
|
+
}
|
|
452
605
|
function cmdWhoami() {
|
|
453
606
|
const config = loadConfig();
|
|
454
607
|
if (!config) {
|
|
@@ -457,11 +610,12 @@ function cmdWhoami() {
|
|
|
457
610
|
}
|
|
458
611
|
console.log(`
|
|
459
612
|
Current Config:`);
|
|
460
|
-
console.log(` Endpoint:
|
|
461
|
-
console.log(` API Key:
|
|
462
|
-
console.log(` Agent ID:
|
|
463
|
-
console.log(` Agent:
|
|
464
|
-
console.log(`
|
|
613
|
+
console.log(` Endpoint: ${config.endpoint}`);
|
|
614
|
+
console.log(` API Key: ${config.apiKey.slice(0, 12)}...${config.apiKey.slice(-4)}`);
|
|
615
|
+
console.log(` Agent ID: ${config.agentId || "not registered"}`);
|
|
616
|
+
console.log(` Agent: ${config.agentName || "\u2014"}`);
|
|
617
|
+
console.log(` Bankr LLM: ${config.bankrApiKey ? config.bankrApiKey.slice(0, 6) + "..." + config.bankrApiKey.slice(-4) : "not configured"}`);
|
|
618
|
+
console.log(` Config: ${CONFIG_FILE}
|
|
465
619
|
`);
|
|
466
620
|
}
|
|
467
621
|
async function main() {
|
|
@@ -506,9 +660,27 @@ async function main() {
|
|
|
506
660
|
case "engine":
|
|
507
661
|
await cmdEngine();
|
|
508
662
|
break;
|
|
663
|
+
case "marketplace":
|
|
664
|
+
await cmdMarketplace();
|
|
665
|
+
break;
|
|
509
666
|
case "x402":
|
|
510
667
|
await cmdX402();
|
|
511
668
|
break;
|
|
669
|
+
case "llm:init":
|
|
670
|
+
cmdLlmInit(rest[0]);
|
|
671
|
+
break;
|
|
672
|
+
case "llm:models":
|
|
673
|
+
await cmdLlmModels();
|
|
674
|
+
break;
|
|
675
|
+
case "llm:chat":
|
|
676
|
+
await cmdLlmChat(rest);
|
|
677
|
+
break;
|
|
678
|
+
case "llm:usage":
|
|
679
|
+
await cmdLlmUsage(rest);
|
|
680
|
+
break;
|
|
681
|
+
case "llm:health":
|
|
682
|
+
await cmdLlmHealth();
|
|
683
|
+
break;
|
|
512
684
|
case "whoami":
|
|
513
685
|
cmdWhoami();
|
|
514
686
|
break;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@otonix/cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.2.0",
|
|
4
4
|
"description": "CLI tool for the Otonix sovereign compute platform — initialize agents, generate API keys, register, monitor status, and manage infrastructure from the terminal.",
|
|
5
5
|
"main": "dist/cli.js",
|
|
6
6
|
"bin": {
|
|
@@ -38,7 +38,7 @@
|
|
|
38
38
|
"node": ">=18"
|
|
39
39
|
},
|
|
40
40
|
"dependencies": {
|
|
41
|
-
"@otonix/sdk": "^1.
|
|
41
|
+
"@otonix/sdk": "^1.2.0"
|
|
42
42
|
},
|
|
43
43
|
"devDependencies": {
|
|
44
44
|
"tsup": "^8.0.0",
|