@johpaz/hive-cli 1.0.4 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/commands/gateway.ts +1 -1
- package/src/commands/onboard.ts +69 -55
- package/src/commands/update.ts +11 -11
- package/src/index.ts +1 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@johpaz/hive-cli",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.6",
|
|
4
4
|
"description": "Hive CLI — Command line interface for the Hive AI Gateway",
|
|
5
5
|
"bin": {
|
|
6
6
|
"hive": "src/index.ts"
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
},
|
|
16
16
|
"dependencies": {
|
|
17
17
|
"@clack/prompts": "^1.0.1",
|
|
18
|
-
"@johpaz/hive-core": "^1.0.
|
|
18
|
+
"@johpaz/hive-core": "^1.0.6",
|
|
19
19
|
"js-yaml": "latest"
|
|
20
20
|
},
|
|
21
21
|
"devDependencies": {
|
package/src/commands/gateway.ts
CHANGED
|
@@ -51,7 +51,7 @@ export async function start(flags: string[]): Promise<void> {
|
|
|
51
51
|
║ ██║ ██║██║ ╚████╔╝ ███████╗ ║
|
|
52
52
|
║ ╚═╝ ╚═╝╚═╝ ╚═══╝ ╚══════╝ ║
|
|
53
53
|
║ ║
|
|
54
|
-
║ Personal AI Gateway — v1.0.
|
|
54
|
+
║ Personal AI Gateway — v1.0.6 ║
|
|
55
55
|
╚════════════════════════════════════════════╝
|
|
56
56
|
`);
|
|
57
57
|
|
package/src/commands/onboard.ts
CHANGED
|
@@ -3,16 +3,16 @@ import * as fs from "fs";
|
|
|
3
3
|
import * as path from "path";
|
|
4
4
|
import * as yaml from "js-yaml";
|
|
5
5
|
|
|
6
|
-
const VERSION = "1.0.
|
|
6
|
+
const VERSION = "1.0.6";
|
|
7
7
|
|
|
8
8
|
const DEFAULT_MODELS: Record<string, string> = {
|
|
9
|
-
anthropic: "claude-sonnet-4-
|
|
10
|
-
openai: "gpt-
|
|
11
|
-
gemini: "gemini-2.
|
|
9
|
+
anthropic: "claude-sonnet-4-6",
|
|
10
|
+
openai: "gpt-5.2",
|
|
11
|
+
gemini: "gemini-2.5-flash",
|
|
12
12
|
deepseek: "deepseek-chat",
|
|
13
|
-
kimi: "
|
|
14
|
-
openrouter: "
|
|
15
|
-
ollama: "llama3.
|
|
13
|
+
kimi: "kimi-k2.5",
|
|
14
|
+
openrouter: "meta-llama/llama-3.3-70b-instruct",
|
|
15
|
+
ollama: "llama3.3:8b",
|
|
16
16
|
};
|
|
17
17
|
|
|
18
18
|
const PROVIDER_BASE_URLS: Record<string, string> = {
|
|
@@ -47,40 +47,43 @@ const API_KEY_LINKS: Record<string, string> = {
|
|
|
47
47
|
|
|
48
48
|
const AVAILABLE_MODELS: Record<string, Array<{ value: string; label: string; hint?: string }>> = {
|
|
49
49
|
anthropic: [
|
|
50
|
-
{ value: "claude-sonnet-4-
|
|
51
|
-
{ value: "claude-opus-4-
|
|
52
|
-
{ value: "claude-haiku-4-
|
|
50
|
+
{ value: "claude-sonnet-4-6", label: "Claude Sonnet 4.6", hint: "Recomendado — mejor equilibrio, 1M contexto" },
|
|
51
|
+
{ value: "claude-opus-4-6", label: "Claude Opus 4.6", hint: "Más potente — agentic coding, 1M contexto" },
|
|
52
|
+
{ value: "claude-haiku-4-6", label: "Claude Haiku 4.6", hint: "Más rápido y económico" },
|
|
53
53
|
],
|
|
54
54
|
openai: [
|
|
55
|
-
{ value: "gpt-
|
|
56
|
-
{ value: "gpt-
|
|
57
|
-
{ value: "
|
|
55
|
+
{ value: "gpt-5.2", label: "GPT-5.2", hint: "Recomendado — 400K contexto, latest" },
|
|
56
|
+
{ value: "gpt-5.1", label: "GPT-5.1", hint: "Versión anterior estable" },
|
|
57
|
+
{ value: "gpt-5.2-codex", label: "GPT-5.2 Codex", hint: "Especializado en código" },
|
|
58
|
+
{ value: "o4-mini", label: "o4-mini", hint: "Razonamiento avanzado, económico" },
|
|
58
59
|
],
|
|
59
60
|
gemini: [
|
|
60
|
-
{ value: "gemini-
|
|
61
|
-
{ value: "gemini-2.
|
|
62
|
-
{ value: "gemini-
|
|
61
|
+
{ value: "gemini-3-flash-preview", label: "Gemini 3 Flash (Preview)", hint: "Frontier-class, muy económico" },
|
|
62
|
+
{ value: "gemini-2.5-flash", label: "Gemini 2.5 Flash", hint: "Recomendado — estable, rápido" },
|
|
63
|
+
{ value: "gemini-2.5-pro", label: "Gemini 2.5 Pro", hint: "Más potente — razonamiento profundo" },
|
|
64
|
+
{ value: "gemini-3.1-pro-preview", label: "Gemini 3.1 Pro (Preview)", hint: "Latest — tareas complejas" },
|
|
63
65
|
],
|
|
64
66
|
deepseek: [
|
|
65
|
-
{ value: "deepseek-chat", label: "DeepSeek-V3", hint: "Recomendado — muy económico" },
|
|
66
|
-
{ value: "deepseek-reasoner", label: "DeepSeek-R1", hint: "Razonamiento
|
|
67
|
+
{ value: "deepseek-chat", label: "DeepSeek-V3", hint: "Recomendado — muy económico, capaz" },
|
|
68
|
+
{ value: "deepseek-reasoner", label: "DeepSeek-R1", hint: "Razonamiento profundo" },
|
|
69
|
+
{ value: "deepseek-coder", label: "DeepSeek Coder", hint: "Especializado en código" },
|
|
67
70
|
],
|
|
68
71
|
kimi: [
|
|
69
|
-
{ value: "
|
|
70
|
-
{ value: "
|
|
71
|
-
{ value: "
|
|
72
|
+
{ value: "kimi-k2.5", label: "Kimi K2.5", hint: "Recomendado — multimodal, agentic, 1T params" },
|
|
73
|
+
{ value: "kimi-k2-thinking", label: "Kimi K2 Thinking", hint: "Largo razonamiento" },
|
|
74
|
+
{ value: "kimi-k2-turbo-preview", label: "Kimi K2 Turbo", hint: "Rápido, preview" },
|
|
72
75
|
],
|
|
73
76
|
openrouter: [
|
|
74
|
-
{ value: "
|
|
75
|
-
{ value: "google/gemini-2.0-flash", label: "Gemini 2.0 Flash
|
|
76
|
-
{ value: "deepseek/deepseek-
|
|
77
|
-
{ value: "
|
|
77
|
+
{ value: "meta-llama/llama-3.3-70b-instruct", label: "Llama 3.3 70B", hint: "Gratis — GPT-4 level" },
|
|
78
|
+
{ value: "google/gemini-2.0-flash-exp:free", label: "Gemini 2.0 Flash", hint: "Gratis — 1M contexto" },
|
|
79
|
+
{ value: "deepseek/deepseek-r1:free", label: "DeepSeek R1", hint: "Gratis — razonamiento fuerte" },
|
|
80
|
+
{ value: "anthropic/claude-sonnet-4-6", label: "Claude Sonnet 4.6", hint: "Vía OpenRouter" },
|
|
78
81
|
],
|
|
79
82
|
ollama: [
|
|
80
|
-
{ value: "llama3.
|
|
81
|
-
{ value: "
|
|
82
|
-
{ value: "mistral", label: "Mistral 7B", hint: "
|
|
83
|
-
{ value: "
|
|
83
|
+
{ value: "llama3.3:8b", label: "Llama 3.3 8B", hint: "Recomendado — general, ~5GB RAM" },
|
|
84
|
+
{ value: "qwen2.5:7b", label: "Qwen 2.5 7B", hint: "Multilingual, código, ~4.5GB RAM" },
|
|
85
|
+
{ value: "mistral:7b", label: "Mistral 7B", hint: "Rápido, ~4GB RAM" },
|
|
86
|
+
{ value: "phi4:14b", label: "Phi-4 14B", hint: "Mejor calidad, ~8GB RAM" },
|
|
84
87
|
],
|
|
85
88
|
};
|
|
86
89
|
|
|
@@ -226,18 +229,42 @@ async function generateConfig(config: OnboardConfig): Promise<void> {
|
|
|
226
229
|
fs.mkdirSync(hiveDir, { recursive: true });
|
|
227
230
|
}
|
|
228
231
|
|
|
232
|
+
const baseUrlMap: Record<string, string> = {
|
|
233
|
+
gemini: "https://generativelanguage.googleapis.com/v1beta",
|
|
234
|
+
deepseek: "https://api.deepseek.com/v1",
|
|
235
|
+
kimi: "https://api.moonshot.cn/v1",
|
|
236
|
+
ollama: "http://localhost:11434/api",
|
|
237
|
+
};
|
|
238
|
+
|
|
239
|
+
const providersConfig: Record<string, Record<string, unknown>> = {};
|
|
240
|
+
|
|
241
|
+
if (config.provider !== "ollama" && config.apiKey) {
|
|
242
|
+
providersConfig[config.provider] = {
|
|
243
|
+
apiKey: config.apiKey,
|
|
244
|
+
};
|
|
245
|
+
if (baseUrlMap[config.provider]) {
|
|
246
|
+
providersConfig[config.provider].baseUrl = baseUrlMap[config.provider];
|
|
247
|
+
}
|
|
248
|
+
} else if (config.provider === "ollama") {
|
|
249
|
+
providersConfig[config.provider] = {
|
|
250
|
+
baseUrl: baseUrlMap[config.provider],
|
|
251
|
+
};
|
|
252
|
+
}
|
|
253
|
+
|
|
229
254
|
const configObj: Record<string, unknown> = {
|
|
230
255
|
name: config.agentName,
|
|
231
256
|
version: VERSION,
|
|
232
257
|
gateway: {
|
|
233
|
-
port: 18790,
|
|
234
258
|
host: "127.0.0.1",
|
|
235
|
-
|
|
259
|
+
port: 18790,
|
|
260
|
+
authToken: generateToken(),
|
|
236
261
|
},
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
262
|
+
models: {
|
|
263
|
+
defaultProvider: config.provider,
|
|
264
|
+
defaults: {
|
|
265
|
+
[config.provider]: config.model,
|
|
266
|
+
},
|
|
267
|
+
providers: providersConfig,
|
|
241
268
|
},
|
|
242
269
|
agents: {
|
|
243
270
|
list: [
|
|
@@ -252,13 +279,10 @@ async function generateConfig(config: OnboardConfig): Promise<void> {
|
|
|
252
279
|
},
|
|
253
280
|
channels: {},
|
|
254
281
|
skills: {
|
|
255
|
-
watch: true,
|
|
256
282
|
allowBundled: [],
|
|
257
|
-
denyBundled: [],
|
|
258
283
|
},
|
|
259
284
|
sessions: {
|
|
260
285
|
pruneAfterHours: 168,
|
|
261
|
-
pruneInterval: 24,
|
|
262
286
|
},
|
|
263
287
|
logging: {
|
|
264
288
|
level: "info",
|
|
@@ -267,16 +291,6 @@ async function generateConfig(config: OnboardConfig): Promise<void> {
|
|
|
267
291
|
},
|
|
268
292
|
};
|
|
269
293
|
|
|
270
|
-
if (config.provider === "gemini") {
|
|
271
|
-
(configObj.model as Record<string, unknown>).baseUrl = "https://generativelanguage.googleapis.com/v1beta";
|
|
272
|
-
} else if (config.provider === "deepseek") {
|
|
273
|
-
(configObj.model as Record<string, unknown>).baseUrl = "https://api.deepseek.com/v1";
|
|
274
|
-
} else if (config.provider === "kimi") {
|
|
275
|
-
(configObj.model as Record<string, unknown>).baseUrl = "https://api.moonshot.cn/v1";
|
|
276
|
-
} else if (config.provider === "ollama") {
|
|
277
|
-
(configObj.model as Record<string, unknown>).baseUrl = "http://localhost:11434/api";
|
|
278
|
-
}
|
|
279
|
-
|
|
280
294
|
if (config.channel === "telegram" && config.channelToken) {
|
|
281
295
|
configObj.channels = {
|
|
282
296
|
telegram: {
|
|
@@ -438,13 +452,13 @@ export async function onboard(): Promise<void> {
|
|
|
438
452
|
const provider = await p.select({
|
|
439
453
|
message: "¿Qué proveedor LLM quieres usar?",
|
|
440
454
|
options: [
|
|
441
|
-
{ value: "anthropic", label: "Anthropic (Claude)", hint: "Recomendado" },
|
|
442
|
-
{ value: "openai", label: "OpenAI (GPT-
|
|
443
|
-
{ value: "gemini", label: "Google Gemini", hint: "Gemini
|
|
444
|
-
{ value: "deepseek", label: "DeepSeek", hint: "
|
|
445
|
-
{ value: "kimi", label: "Kimi (Moonshot AI)", hint: "
|
|
446
|
-
{ value: "openrouter", label: "OpenRouter", hint: "
|
|
447
|
-
{ value: "ollama", label: "Ollama (local, sin costo
|
|
455
|
+
{ value: "anthropic", label: "Anthropic (Claude)", hint: "Recomendado — Claude 4.6, 1M contexto" },
|
|
456
|
+
{ value: "openai", label: "OpenAI (GPT-5)", hint: "GPT-5.2, 400K contexto" },
|
|
457
|
+
{ value: "gemini", label: "Google Gemini", hint: "Gemini 3 Flash/Pro, 1M contexto" },
|
|
458
|
+
{ value: "deepseek", label: "DeepSeek", hint: "V3/R1 — muy económico, 1/100 costo" },
|
|
459
|
+
{ value: "kimi", label: "Kimi (Moonshot AI)", hint: "K2.5 — multimodal, agentic, 1T params" },
|
|
460
|
+
{ value: "openrouter", label: "OpenRouter", hint: "Gratis: Llama 3.3 70B, Gemini Flash" },
|
|
461
|
+
{ value: "ollama", label: "Ollama (local)", hint: "Llama 3.3, Qwen 2.5, sin costo" },
|
|
448
462
|
],
|
|
449
463
|
});
|
|
450
464
|
|
package/src/commands/update.ts
CHANGED
|
@@ -1,12 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
const PACKAGES = [
|
|
4
|
-
"@johpaz/hive-cli",
|
|
5
|
-
"@johpaz/hive-core",
|
|
6
|
-
"@johpaz/hive-sdk",
|
|
7
|
-
"@johpaz/hive-mcp",
|
|
8
|
-
"@johpaz/hive-skills",
|
|
9
|
-
];
|
|
1
|
+
const PACKAGES = ["@johpaz/hive-cli"];
|
|
10
2
|
|
|
11
3
|
export async function update(): Promise<void> {
|
|
12
4
|
console.log("🔄 Actualizando Hive...\n");
|
|
@@ -14,8 +6,16 @@ export async function update(): Promise<void> {
|
|
|
14
6
|
for (const pkg of PACKAGES) {
|
|
15
7
|
console.log(`Actualizando ${pkg}...`);
|
|
16
8
|
try {
|
|
17
|
-
|
|
18
|
-
|
|
9
|
+
const proc = Bun.spawn(["bun", "install", "-g", `${pkg}@latest`], {
|
|
10
|
+
stdout: "inherit",
|
|
11
|
+
stderr: "inherit",
|
|
12
|
+
});
|
|
13
|
+
const exitCode = await proc.exited;
|
|
14
|
+
if (exitCode === 0) {
|
|
15
|
+
console.log(`✅ ${pkg} actualizado\n`);
|
|
16
|
+
} else {
|
|
17
|
+
console.log(`⚠️ Error actualizando ${pkg}\n`);
|
|
18
|
+
}
|
|
19
19
|
} catch (e) {
|
|
20
20
|
console.log(`⚠️ Error actualizando ${pkg}: ${(e as Error).message}\n`);
|
|
21
21
|
}
|
package/src/index.ts
CHANGED
|
@@ -14,7 +14,7 @@ import { securityAudit } from "./commands/security";
|
|
|
14
14
|
import { installService } from "./commands/service";
|
|
15
15
|
import { update } from "./commands/update";
|
|
16
16
|
|
|
17
|
-
const VERSION = "1.0.
|
|
17
|
+
const VERSION = "1.0.6";
|
|
18
18
|
|
|
19
19
|
const HELP = `
|
|
20
20
|
🐝 Hive — Personal AI Gateway v${VERSION}
|