gemini-cli-pro 0.0.3-snapshot

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,56 @@
1
+ # Gemini CLI
2
+
3
+ CLI em Python para automacao de codificacao com Gemini, roteamento dinamico Flash/Pro e sincronizacao de RAG local.
4
+
5
+ ## Requisitos
6
+
7
+ - Node.js 18+
8
+ - Python 3.10+
9
+ - `GOOGLE_API_KEY` configurada
10
+ - Dependencia Python: `google-generativeai`
11
+
12
+ ## Instalacao global (npm)
13
+
14
+ Opcao direta:
15
+
16
+ ```bash
17
+ npm install -g gemini-cli-pro
18
+ ```
19
+
20
+ Com fallback automatico para `gemini-cli`:
21
+
22
+ ```bash
23
+ npm run install:global
24
+ ```
25
+
26
+ ## Publicacao no npm
27
+
28
+ Antes de publicar, o script garante o nome:
29
+ - `gemini-cli-pro`
30
+
31
+ ```bash
32
+ npm run publish:npm
33
+ ```
34
+
35
+ ## Execucao
36
+
37
+ ```bash
38
+ gemini
39
+ ```
40
+
41
+ Comandos do chat:
42
+ - `/auto`
43
+ - `/pro`
44
+ - `/flash`
45
+ - `/sync`
46
+ - `/reload-mcps`
47
+ - `/status`
48
+ - `/help`
49
+ - `/exit`
50
+
51
+ ## MCPs opcionais
52
+
53
+ O arquivo `~/.cache/gemini-history-chats/config.json` controla os MCPs (`local_rag`, `github`, `memory`).
54
+ Se algum MCP nao estiver disponivel, o CLI continua funcionando no modo API.
55
+ O MCP local de RAG usa `own-rag-cli` como nativo e cai para `own-rag` se existir.
56
+ Em toda nova sessao, os MCPs sao recarregados automaticamente no startup.
package/bin/gemini ADDED
@@ -0,0 +1,19 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5
+ PROJECT_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
6
+ PYTHON_BIN="${PYTHON_BIN:-python3}"
7
+
8
+ if ! command -v "${PYTHON_BIN}" >/dev/null 2>&1; then
9
+ echo "Erro: python3 nao encontrado no PATH." >&2
10
+ exit 1
11
+ fi
12
+
13
+ if ! "${PYTHON_BIN}" -c "import google.generativeai" >/dev/null 2>&1; then
14
+ echo "Dependencia ausente: google-generativeai" >&2
15
+ echo "Instale com: python3 -m pip install --upgrade google-generativeai" >&2
16
+ exit 1
17
+ fi
18
+
19
+ exec "${PYTHON_BIN}" "${PROJECT_DIR}/gemini_tool.py" "$@"
package/gemini_tool.py ADDED
@@ -0,0 +1,623 @@
1
+ #!/usr/bin/env python3
2
+ """Gemini CLI com automacao de codificacao e RAG dinamico."""
3
+
4
+ from __future__ import annotations
5
+
6
+ import hashlib
7
+ import json
8
+ import os
9
+ import shutil
10
+ import subprocess
11
+ import sys
12
+ from dataclasses import dataclass
13
+ from pathlib import Path
14
+ from typing import Any, Callable
15
+
16
+ import google.generativeai as genai
17
+
18
+ CACHE_DIR = Path.home() / ".cache" / "gemini-history-chats"
19
+ CONFIG_PATH = CACHE_DIR / "config.json"
20
+ MEMORY_PATH = CACHE_DIR / "memory_store.json"
21
+
22
+ MODEL_FLASH = "gemini-2.5-flash"
23
+ MODEL_PRO = "gemini-2.5-pro"
24
+ RAG_NATIVE_COMMAND = "own-rag-cli"
25
+ RAG_FALLBACK_COMMAND = "own-rag"
26
+
27
+
28
+ def ensure_cache_layout() -> None:
29
+ """Cria estrutura centralizada de cache e configuracao de MCPs."""
30
+ CACHE_DIR.mkdir(parents=True, exist_ok=True)
31
+
32
+ default_config = {
33
+ "version": 1,
34
+ "mcps": {
35
+ "local_rag": {
36
+ "enabled": True,
37
+ "transport": "cli",
38
+ "command": RAG_NATIVE_COMMAND,
39
+ "description": "Local RAG via own-rag-cli (fallback own-rag)",
40
+ },
41
+ "github": {
42
+ "enabled": True,
43
+ "transport": "simulated",
44
+ "description": "GitHub MCP mock adapter",
45
+ },
46
+ "memory": {
47
+ "enabled": True,
48
+ "transport": "local-file",
49
+ "file": str(MEMORY_PATH),
50
+ "description": "Memory MCP mock adapter",
51
+ },
52
+ },
53
+ }
54
+
55
+ if CONFIG_PATH.exists():
56
+ try:
57
+ data = json.loads(CONFIG_PATH.read_text(encoding="utf-8"))
58
+ except json.JSONDecodeError:
59
+ data = {}
60
+ merged = merge_dict(default_config, data)
61
+ CONFIG_PATH.write_text(
62
+ json.dumps(merged, indent=2, ensure_ascii=False),
63
+ encoding="utf-8",
64
+ )
65
+ else:
66
+ CONFIG_PATH.write_text(
67
+ json.dumps(default_config, indent=2, ensure_ascii=False),
68
+ encoding="utf-8",
69
+ )
70
+
71
+ if not MEMORY_PATH.exists():
72
+ MEMORY_PATH.write_text("{}\n", encoding="utf-8")
73
+
74
+
75
+ def merge_dict(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
76
+ result = dict(base)
77
+ for key, value in override.items():
78
+ if isinstance(value, dict) and isinstance(result.get(key), dict):
79
+ result[key] = merge_dict(result[key], value)
80
+ else:
81
+ result[key] = value
82
+ return result
83
+
84
+
85
+ def require_api_key() -> str:
86
+ api_key = os.getenv("GOOGLE_API_KEY")
87
+ if not api_key:
88
+ print("Erro: defina GOOGLE_API_KEY antes de iniciar.", file=sys.stderr)
89
+ sys.exit(1)
90
+ return api_key
91
+
92
+
93
+ def load_config() -> dict[str, Any]:
94
+ try:
95
+ data = json.loads(CONFIG_PATH.read_text(encoding="utf-8"))
96
+ except (FileNotFoundError, json.JSONDecodeError):
97
+ return {}
98
+ if isinstance(data, dict):
99
+ return data
100
+ return {}
101
+
102
+
103
+ def get_project_hash() -> str:
104
+ project_path = os.path.abspath(os.getcwd())
105
+ return hashlib.sha256(project_path.encode("utf-8")).hexdigest()
106
+
107
+
108
+ def history_path_for_cwd() -> Path:
109
+ return CACHE_DIR / f"{get_project_hash()}.json"
110
+
111
+
112
+ def load_history() -> list[dict[str, Any]]:
113
+ path = history_path_for_cwd()
114
+ if not path.exists():
115
+ return []
116
+
117
+ try:
118
+ data = json.loads(path.read_text(encoding="utf-8"))
119
+ except json.JSONDecodeError:
120
+ return []
121
+
122
+ if not isinstance(data, list):
123
+ return []
124
+
125
+ cleaned: list[dict[str, Any]] = []
126
+ for item in data:
127
+ if not isinstance(item, dict):
128
+ continue
129
+ role = item.get("role")
130
+ parts = item.get("parts")
131
+ if role in {"user", "model"} and isinstance(parts, list):
132
+ cleaned.append({"role": role, "parts": [str(p) for p in parts]})
133
+ return cleaned
134
+
135
+
136
+ def save_history(history: list[dict[str, Any]]) -> None:
137
+ path = history_path_for_cwd()
138
+ path.write_text(
139
+ json.dumps(history, indent=2, ensure_ascii=False),
140
+ encoding="utf-8",
141
+ )
142
+
143
+
144
+ def load_memory_store() -> dict[str, str]:
145
+ try:
146
+ data = json.loads(MEMORY_PATH.read_text(encoding="utf-8"))
147
+ except (FileNotFoundError, json.JSONDecodeError):
148
+ return {}
149
+
150
+ if not isinstance(data, dict):
151
+ return {}
152
+
153
+ return {str(k): str(v) for k, v in data.items()}
154
+
155
+
156
+ def save_memory_store(data: dict[str, str]) -> None:
157
+ MEMORY_PATH.write_text(
158
+ json.dumps(data, indent=2, ensure_ascii=False),
159
+ encoding="utf-8",
160
+ )
161
+
162
+
163
+ def _run_command(cmd: list[str], timeout: int = 90) -> subprocess.CompletedProcess[str]:
164
+ return subprocess.run(
165
+ cmd,
166
+ capture_output=True,
167
+ text=True,
168
+ timeout=timeout,
169
+ check=False,
170
+ )
171
+
172
+
173
+ def emit_plin() -> None:
174
+ """Toca um alerta sonoro curto usando o som padrao do Ubuntu (com fallback)."""
175
+ sound_candidates = [
176
+ "/usr/share/sounds/ubuntu/stereo/message.ogg",
177
+ "/usr/share/sounds/freedesktop/stereo/complete.oga",
178
+ ]
179
+
180
+ for sound in sound_candidates:
181
+ if not Path(sound).exists():
182
+ continue
183
+ if shutil.which("paplay"):
184
+ subprocess.Popen( # noqa: S603
185
+ ["paplay", sound],
186
+ stdout=subprocess.DEVNULL,
187
+ stderr=subprocess.DEVNULL,
188
+ )
189
+ return
190
+
191
+ if shutil.which("canberra-gtk-play"):
192
+ subprocess.Popen( # noqa: S603
193
+ ["canberra-gtk-play", "-i", "message"],
194
+ stdout=subprocess.DEVNULL,
195
+ stderr=subprocess.DEVNULL,
196
+ )
197
+ return
198
+
199
+ # Fallback para terminal bell.
200
+ print("\a", end="", flush=True)
201
+
202
+
203
+ def _config_rag_candidates(config: dict[str, Any]) -> list[str]:
204
+ candidates: list[str] = [RAG_NATIVE_COMMAND]
205
+ mcps = config.get("mcps", {})
206
+ if isinstance(mcps, dict):
207
+ local_rag = mcps.get("local_rag", {})
208
+ if isinstance(local_rag, dict):
209
+ configured = local_rag.get("command")
210
+ if isinstance(configured, str) and configured.strip():
211
+ candidates.append(configured.strip())
212
+ if isinstance(configured, list):
213
+ for item in configured:
214
+ if isinstance(item, str) and item.strip():
215
+ candidates.append(item.strip())
216
+ candidates.append(RAG_FALLBACK_COMMAND)
217
+ # remove duplicados preservando ordem
218
+ return list(dict.fromkeys(candidates))
219
+
220
+
221
+ def resolve_rag_command(config: dict[str, Any] | None = None) -> str | None:
222
+ effective_config = config if isinstance(config, dict) else load_config()
223
+ for candidate in _config_rag_candidates(effective_config):
224
+ if shutil.which(candidate):
225
+ return candidate
226
+ return None
227
+
228
+
229
+ def check_own_rag_installed(config: dict[str, Any] | None = None) -> bool:
230
+ return resolve_rag_command(config) is not None
231
+
232
+
233
+ def rag_sync_current_dir(config: dict[str, Any] | None = None) -> tuple[bool, str]:
234
+ rag_command = resolve_rag_command(config)
235
+ if not rag_command:
236
+ return False, "MISSING"
237
+
238
+ result = _run_command([rag_command, "sync", "."])
239
+ if result.returncode == 0:
240
+ return True, "OK"
241
+ return False, "FAIL"
242
+
243
+
244
+ def search_local_context(query: str) -> str:
245
+ """Busca no contexto local usando own-rag search."""
246
+ rag_command = resolve_rag_command()
247
+ if not rag_command:
248
+ return "own-rag-cli/own-rag nao encontrado no PATH."
249
+
250
+ query = (query or "").strip()
251
+ if not query:
252
+ return "Consulta vazia."
253
+
254
+ result = _run_command([rag_command, "search", query])
255
+ stdout = (result.stdout or "").strip()
256
+ stderr = (result.stderr or "").strip()
257
+
258
+ if result.returncode == 0:
259
+ return stdout or "Nenhum resultado no RAG local."
260
+ return (
261
+ f"Erro no {rag_command} search (code={result.returncode}): "
262
+ f"{stderr or stdout or 'sem detalhes'}"
263
+ )
264
+
265
+
266
+ def github_tool(action: str, repo: str, path: str = "") -> str:
267
+ """Simula um MCP de GitHub para automacoes."""
268
+ action = (action or "").strip().lower()
269
+ repo = (repo or "").strip()
270
+ path = (path or "").strip()
271
+
272
+ if not action or not repo:
273
+ return "github_tool: informe action e repo."
274
+
275
+ simulated_payload = {
276
+ "mcp": "github",
277
+ "mode": "simulated",
278
+ "action": action,
279
+ "repo": repo,
280
+ "path": path,
281
+ "status": "ok",
282
+ "hint": "Integre aqui chamadas reais ao MCP de GitHub quando necessario.",
283
+ }
284
+ return json.dumps(simulated_payload, ensure_ascii=False)
285
+
286
+
287
+ def memory_tool(action: str, key: str, value: str = "") -> str:
288
+ """Simula Memory MCP persistindo fatos entre sessoes."""
289
+ action = (action or "").strip().lower()
290
+ key = (key or "").strip()
291
+ value = (value or "").strip()
292
+
293
+ store = load_memory_store()
294
+
295
+ if action in {"set", "save", "upsert"}:
296
+ if not key:
297
+ return "memory_tool(set): informe key."
298
+ store[key] = value
299
+ save_memory_store(store)
300
+ return f"memory_tool: salvo '{key}'."
301
+
302
+ if action in {"get", "read"}:
303
+ if not key:
304
+ return "memory_tool(get): informe key."
305
+ return store.get(key, "") or f"memory_tool: chave '{key}' nao encontrada."
306
+
307
+ if action in {"delete", "del", "remove"}:
308
+ if not key:
309
+ return "memory_tool(delete): informe key."
310
+ existed = key in store
311
+ store.pop(key, None)
312
+ save_memory_store(store)
313
+ return "memory_tool: chave removida." if existed else "memory_tool: chave nao existia."
314
+
315
+ if action in {"list", "keys"}:
316
+ keys = sorted(store.keys())
317
+ return json.dumps({"keys": keys}, ensure_ascii=False)
318
+
319
+ return "memory_tool: action invalida. Use set/get/delete/list."
320
+
321
+
322
+ def as_executable(func: Callable[..., str]) -> Any:
323
+ """Registra ferramenta como genai.Executable quando disponivel."""
324
+ executable_cls = getattr(genai, "Executable", None)
325
+ if executable_cls is None:
326
+ return func
327
+
328
+ # Compatibilidade com variantes de construtor.
329
+ constructors: list[Callable[[], Any]] = [lambda: executable_cls(func)]
330
+ if hasattr(executable_cls, "from_function"):
331
+ constructors.append(lambda: executable_cls.from_function(func))
332
+ if hasattr(executable_cls, "from_callable"):
333
+ constructors.append(lambda: executable_cls.from_callable(func))
334
+
335
+ for build in constructors:
336
+ try:
337
+ built = build()
338
+ if built is not None:
339
+ return built
340
+ except Exception:
341
+ continue
342
+
343
+ return func
344
+
345
+
346
+ def _mcp_enabled(config: dict[str, Any], mcp_name: str, default: bool = True) -> bool:
347
+ mcps = config.get("mcps", {})
348
+ if not isinstance(mcps, dict):
349
+ return default
350
+ mcp_cfg = mcps.get(mcp_name, {})
351
+ if not isinstance(mcp_cfg, dict):
352
+ return default
353
+ enabled = mcp_cfg.get("enabled", default)
354
+ return bool(enabled)
355
+
356
+
357
+ def build_tools(config: dict[str, Any]) -> list[Any]:
358
+ tools: list[Any] = []
359
+ if _mcp_enabled(config, "local_rag", default=True) and check_own_rag_installed(config):
360
+ tools.append(as_executable(search_local_context))
361
+ if _mcp_enabled(config, "github", default=True):
362
+ tools.append(as_executable(github_tool))
363
+ if _mcp_enabled(config, "memory", default=True):
364
+ tools.append(as_executable(memory_tool))
365
+ return tools
366
+
367
+
368
+ @dataclass
369
+ class RouteDecision:
370
+ model_name: str
371
+ reason: str
372
+
373
+
374
+ class GeminiToolCLI:
375
+ def __init__(self) -> None:
376
+ ensure_cache_layout()
377
+ genai.configure(api_key=require_api_key())
378
+ self.mode = "AUTO"
379
+ self.current_model_name = MODEL_FLASH
380
+ self.history = load_history()
381
+ self.config: dict[str, Any] = {}
382
+ self.tools: list[Any] = []
383
+ self.tools_active = False
384
+ self.tools_error = ""
385
+ self.rag_command: str | None = None
386
+ self.rag_ok = False
387
+ self.rag_status = "MISSING"
388
+
389
+ self.reload_mcps(sync_rag=True, announce=False)
390
+
391
+ self.model = self._build_model(self.current_model_name)
392
+ self.chat = self.model.start_chat(history=self.history)
393
+
394
+ def _build_model(self, model_name: str) -> genai.GenerativeModel:
395
+ if not self.tools:
396
+ return genai.GenerativeModel(model_name=model_name)
397
+ try:
398
+ return genai.GenerativeModel(model_name=model_name, tools=self.tools)
399
+ except Exception as exc:
400
+ # Se o SDK nao suportar/aceitar tools no ambiente atual, segue sem MCP.
401
+ self.tools_active = False
402
+ self.tools_error = str(exc)
403
+ return genai.GenerativeModel(model_name=model_name)
404
+
405
+ def _prompt_model_tag(self) -> str:
406
+ if self.current_model_name == MODEL_PRO:
407
+ return "PRO"
408
+ return "FLASH"
409
+
410
+ def _prompt(self) -> str:
411
+ return f"[{self._prompt_model_tag()}][RAG:{self.rag_status}] >> "
412
+
413
+ def _switch_model_if_needed(self, model_name: str) -> None:
414
+ if model_name == self.current_model_name:
415
+ return
416
+ self.current_model_name = model_name
417
+ self.model = self._build_model(model_name)
418
+ self.chat = self.model.start_chat(history=self.history)
419
+
420
+ def _heuristic_route(self, user_text: str) -> RouteDecision:
421
+ normalized = (user_text or "").lower()
422
+ pro_keywords = [
423
+ "cria",
424
+ "criar",
425
+ "criacao",
426
+ "criação",
427
+ "refatora",
428
+ "refatoracao",
429
+ "refatoração",
430
+ "analisar bug",
431
+ "analise de bug",
432
+ "análise de bug",
433
+ "debug",
434
+ "arquitetura",
435
+ "complex",
436
+ "complexa",
437
+ ]
438
+ if any(keyword in normalized for keyword in pro_keywords):
439
+ return RouteDecision(MODEL_PRO, "heuristica: tarefa potencialmente complexa")
440
+ return RouteDecision(MODEL_FLASH, "heuristica: tarefa comum")
441
+
442
+ def _flash_router(self, user_text: str) -> RouteDecision:
443
+ """No modo /auto, usa Flash para decidir FLASH vs PRO."""
444
+ router_prompt = (
445
+ "Voce e um roteador de modelos.\n"
446
+ "Classifique a tarefa do usuario em apenas UMA palavra: PRO ou FLASH.\n"
447
+ "Use PRO quando envolver criacao extensa, refatoracao complexa, analise profunda de bugs,"
448
+ "ou design arquitetural.\n"
449
+ "Caso contrario, use FLASH.\n"
450
+ f"Tarefa: {user_text}"
451
+ )
452
+
453
+ try:
454
+ router = genai.GenerativeModel(model_name=MODEL_FLASH)
455
+ response = router.generate_content(router_prompt)
456
+ answer = ((response.text if hasattr(response, "text") else "") or "").upper()
457
+ if "PRO" in answer:
458
+ return RouteDecision(MODEL_PRO, "router flash")
459
+ if "FLASH" in answer:
460
+ return RouteDecision(MODEL_FLASH, "router flash")
461
+ except Exception:
462
+ pass
463
+
464
+ return self._heuristic_route(user_text)
465
+
466
+ def route_for_input(self, user_text: str) -> RouteDecision:
467
+ if self.mode == "PRO":
468
+ return RouteDecision(MODEL_PRO, "modo travado /pro")
469
+ if self.mode == "FLASH":
470
+ return RouteDecision(MODEL_FLASH, "modo travado /flash")
471
+ return self._flash_router(user_text)
472
+
473
+ def _extract_text(self, chunk: Any) -> str:
474
+ text = getattr(chunk, "text", None)
475
+ if isinstance(text, str):
476
+ return text
477
+ return ""
478
+
479
+ def _append_to_history(self, role: str, text: str) -> None:
480
+ self.history.append({"role": role, "parts": [text]})
481
+
482
+ def _print_help(self) -> None:
483
+ print("Comandos: /auto, /pro, /flash, /sync, /reload-mcps, /status, /help, /exit")
484
+
485
+ def _show_status(self) -> None:
486
+ tools_state = "ON" if self.tools_active else "OFF"
487
+ rag_cmd = self.rag_command or "none"
488
+ print(
489
+ f"mode={self.mode} model={self._prompt_model_tag()} rag={self.rag_status} "
490
+ f"rag_cmd={rag_cmd} tools={tools_state} history={history_path_for_cwd()}"
491
+ )
492
+ if self.tools_error:
493
+ print(f"tools_error={self.tools_error}")
494
+
495
+ def _resync_rag(self) -> None:
496
+ rag_ok, rag_status = rag_sync_current_dir(self.config)
497
+ self.rag_ok = rag_ok
498
+ self.rag_status = rag_status
499
+ print(f"RAG sync status: {self.rag_status}")
500
+
501
+ def reload_mcps(self, sync_rag: bool = True, announce: bool = True) -> None:
502
+ """Recarrega MCPs/config no inicio da sessao e sob comando manual."""
503
+ self.config = load_config()
504
+ self.rag_command = resolve_rag_command(self.config)
505
+ self.tools = build_tools(self.config)
506
+ self.tools_active = bool(self.tools)
507
+ self.tools_error = ""
508
+
509
+ if sync_rag:
510
+ self.rag_ok, self.rag_status = rag_sync_current_dir(self.config)
511
+ else:
512
+ self.rag_ok = bool(self.rag_command)
513
+ self.rag_status = "OK" if self.rag_ok else "MISSING"
514
+
515
+ if hasattr(self, "model") and hasattr(self, "chat"):
516
+ self.model = self._build_model(self.current_model_name)
517
+ self.chat = self.model.start_chat(history=self.history)
518
+
519
+ if announce:
520
+ rag_info = self.rag_command or "indisponivel"
521
+ print(
522
+ f"MCPs recarregados. tools={'ON' if self.tools_active else 'OFF'} "
523
+ f"rag={self.rag_status} cmd={rag_info}"
524
+ )
525
+
526
+ def run(self) -> int:
527
+ print(f"Gemini Tool iniciado em: {os.path.abspath(os.getcwd())}")
528
+ print(f"Historico: {history_path_for_cwd()}")
529
+
530
+ if not self.rag_ok:
531
+ print(
532
+ "Aviso: RAG nao esta OK. Use /sync para tentar novamente ",
533
+ "(ou instale own-rag-cli/own-rag).",
534
+ )
535
+
536
+ self._print_help()
537
+
538
+ while True:
539
+ try:
540
+ raw = input(self._prompt()).strip()
541
+ except EOFError:
542
+ print()
543
+ break
544
+ except KeyboardInterrupt:
545
+ print("\nInterrompido.")
546
+ break
547
+
548
+ if not raw:
549
+ continue
550
+
551
+ if raw in {"/exit", "/quit", "sair"}:
552
+ break
553
+ if raw == "/help":
554
+ self._print_help()
555
+ continue
556
+ if raw == "/status":
557
+ self._show_status()
558
+ continue
559
+ if raw == "/sync":
560
+ self._resync_rag()
561
+ continue
562
+ if raw in {"/reload-mcps", "/reload"}:
563
+ self.reload_mcps(sync_rag=True, announce=True)
564
+ continue
565
+ if raw == "/auto":
566
+ self.mode = "AUTO"
567
+ print("Modo alterado para AUTO")
568
+ continue
569
+ if raw == "/pro":
570
+ self.mode = "PRO"
571
+ self._switch_model_if_needed(MODEL_PRO)
572
+ print("Modo alterado para PRO")
573
+ continue
574
+ if raw == "/flash":
575
+ self.mode = "FLASH"
576
+ self._switch_model_if_needed(MODEL_FLASH)
577
+ print("Modo alterado para FLASH")
578
+ continue
579
+
580
+ decision = self.route_for_input(raw)
581
+ self._switch_model_if_needed(decision.model_name)
582
+
583
+ self._append_to_history("user", raw)
584
+
585
+ full_response = ""
586
+ try:
587
+ stream = self.chat.send_message(raw, stream=True)
588
+ for chunk in stream:
589
+ piece = self._extract_text(chunk)
590
+ if piece:
591
+ full_response += piece
592
+ print(piece, end="", flush=True)
593
+ print()
594
+ emit_plin()
595
+ except Exception as exc:
596
+ print(f"Erro ao gerar resposta: {exc}")
597
+ self.history.pop()
598
+ emit_plin()
599
+ continue
600
+
601
+ self._append_to_history("model", full_response)
602
+ save_history(self.history)
603
+
604
+ save_history(self.history)
605
+ return 0
606
+
607
+
608
+ def main() -> int:
609
+ cli = GeminiToolCLI()
610
+ return cli.run()
611
+
612
+
613
+ if __name__ == "__main__":
614
+ raise SystemExit(main())
615
+
616
+
617
+ # --- COMANDOS DE INSTALACAO E GIT ---
618
+ # chmod +x gemini_tool.py
619
+ # python3 -m pip install --upgrade google-generativeai
620
+ # git remote add origin https://github.com/JocsaPB/gemini-cli.git
621
+ # git add .
622
+ # git commit -m "Initial release of Gemini-CLI with RAG Sync"
623
+ # git push origin main
package/package.json ADDED
@@ -0,0 +1,34 @@
1
+ {
2
+ "name": "gemini-cli-pro",
3
+ "version": "0.0.3-snapshot",
4
+ "description": "Gemini CLI em Python com sync de RAG local e roteamento Flash/Pro",
5
+ "license": "MIT",
6
+ "bin": {
7
+ "gemini": "bin/gemini"
8
+ },
9
+ "files": [
10
+ "bin",
11
+ "scripts",
12
+ "gemini_tool.py",
13
+ "README.md"
14
+ ],
15
+ "scripts": {
16
+ "prepare:publish": "node scripts/prepare-publish.js",
17
+ "publish:npm": "npm run prepare:publish && npm publish --access public",
18
+ "install:global": "node scripts/install-global.js",
19
+ "pack:check": "npm pack --dry-run"
20
+ },
21
+ "publishConfig": {
22
+ "access": "public"
23
+ },
24
+ "engines": {
25
+ "node": ">=18"
26
+ },
27
+ "keywords": [
28
+ "gemini",
29
+ "cli",
30
+ "python",
31
+ "rag",
32
+ "automation"
33
+ ]
34
+ }
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env node
2
+ const { spawnSync } = require("child_process");
3
+
4
+ function install(pkgName) {
5
+ const result = spawnSync("npm", ["install", "-g", pkgName], { stdio: "inherit" });
6
+ return result.status === 0;
7
+ }
8
+
9
+ const preferred = "gemini-cli-pro";
10
+ const fallback = "gemini-cli";
11
+
12
+ console.log(`Tentando instalar: ${preferred}`);
13
+ if (install(preferred)) {
14
+ process.exit(0);
15
+ }
16
+
17
+ console.log(`Falha ao instalar ${preferred}. Tentando ${fallback}...`);
18
+ if (install(fallback)) {
19
+ process.exit(0);
20
+ }
21
+
22
+ console.error("Nao foi possivel instalar nem gemini-cli-pro nem gemini-cli.");
23
+ process.exit(1);
@@ -0,0 +1,13 @@
1
+ #!/usr/bin/env node
2
+ const fs = require("fs");
3
+ const path = require("path");
4
+
5
+ const projectDir = path.resolve(__dirname, "..");
6
+ const packageJsonPath = path.join(projectDir, "package.json");
7
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
8
+
9
+ const preferred = "gemini-cli-pro";
10
+ packageJson.name = preferred;
11
+ fs.writeFileSync(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf8");
12
+
13
+ console.log(`package.json atualizado para publish com nome: ${packageJson.name}`);