codegpt-ai 1.2.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/setup.js +27 -18
  2. package/chat.py +100 -48
  3. package/package.json +1 -1
package/bin/setup.js CHANGED
@@ -1,5 +1,10 @@
1
1
  #!/usr/bin/env node
2
- // Post-install: ensure Python deps are installed
2
+ /**
3
+ * Post-install: check environment, DO NOT auto-install pip packages.
4
+ * Users must explicitly run `ai setup` or `pip install` themselves.
5
+ * This prevents supply chain attacks via transitive dependency hijacking.
6
+ */
7
+
3
8
  const { execSync } = require("child_process");
4
9
 
5
10
  const pythonCmds = process.platform === "win32"
@@ -18,23 +23,27 @@ function findPython() {
18
23
 
19
24
  const python = findPython();
20
25
 
21
- if (!python) {
22
- console.log("\n CodeGPT installed but Python not found.");
23
- console.log(" Install Python from https://python.org");
24
- console.log(" Then run: pip install requests rich prompt-toolkit\n");
25
- process.exit(0);
26
- }
26
+ console.log("\n CodeGPT installed successfully.\n");
27
27
 
28
- // Install Python deps
29
- console.log(" Installing Python dependencies...");
30
- try {
31
- execSync(`${python} -m pip install requests rich prompt-toolkit --quiet`, {
32
- stdio: "inherit",
33
- });
34
- console.log(" Python dependencies installed.");
35
- } catch {
36
- console.log(" Warning: Could not install Python deps.");
37
- console.log(" Run manually: pip install requests rich prompt-toolkit");
28
+ if (python) {
29
+ // Check if deps are already installed
30
+ let depsOk = true;
31
+ try {
32
+ execSync(`${python} -c "import requests, rich, prompt_toolkit"`, { stdio: "pipe" });
33
+ } catch {
34
+ depsOk = false;
35
+ }
36
+
37
+ if (depsOk) {
38
+ console.log(" Python dependencies: ready");
39
+ } else {
40
+ console.log(" Python found but dependencies missing.");
41
+ console.log(" Run: pip install requests rich prompt-toolkit");
42
+ }
43
+ } else {
44
+ console.log(" Python not found — Node.js mode will be used.");
45
+ console.log(" Install Python for the full 80+ command experience.");
38
46
  }
39
47
 
40
- console.log("\n CodeGPT ready! Type: ai\n");
48
+ console.log("\n Type: ai");
49
+ console.log(" Docs: https://github.com/CCguvycu/codegpt\n");
package/chat.py CHANGED
@@ -803,8 +803,8 @@ def build_sidebar():
803
803
 
804
804
 
805
805
  def print_with_sidebar(panel):
806
- """Print a panel with sidebar if enabled."""
807
- if not sidebar_enabled or console.width < 80:
806
+ """Print a panel with sidebar if enabled. Auto-disabled on small screens."""
807
+ if not sidebar_enabled or is_compact() or console.width < 80:
808
808
  console.print(panel)
809
809
  return
810
810
 
@@ -832,11 +832,16 @@ def tw():
832
832
  return min(console.width, 100)
833
833
 
834
834
 
835
+ def is_compact():
836
+ """Check if terminal is small (Termux, narrow window)."""
837
+ return console.width < 60
838
+
839
+
835
840
  def clear_screen():
836
841
  os.system("cls" if os.name == "nt" else "clear")
837
842
 
838
843
 
839
- LOGO = """
844
+ LOGO_FULL = """
840
845
  [bright_cyan] ██████╗ ██████╗ ██████╗ ███████╗[/][bold white] ██████╗ ██████╗ ████████╗[/]
841
846
  [bright_cyan] ██╔════╝██╔═══██╗██╔══██╗██╔════╝[/][bold white] ██╔════╝ ██╔══██╗╚══██╔══╝[/]
842
847
  [bright_cyan] ██║ ██║ ██║██║ ██║█████╗ [/][bold white] ██║ ███╗██████╔╝ ██║ [/]
@@ -845,6 +850,14 @@ LOGO = """
845
850
  [bright_cyan] ╚═════╝ ╚═════╝ ╚═════╝ ╚══════╝[/][bold white] ╚═════╝ ╚═╝ ╚═╝ [/]
846
851
  [dim] Your Local AI Assistant — Powered by Ollama[/]"""
847
852
 
853
+ LOGO_COMPACT = """
854
+ [bold bright_cyan]╔═══════════════════════╗[/]
855
+ [bold bright_cyan]║[/] [bold white]C O D E[/][bold bright_cyan] G P T[/] [bold bright_cyan]║[/]
856
+ [bold bright_cyan]╚═══════════════════════╝[/]
857
+ [dim] Local AI · Ollama[/]"""
858
+
859
+ LOGO = LOGO_FULL
860
+
848
861
  # --- Command Aliases ---
849
862
  ALIASES = {
850
863
  "/q": "/quit", "/x": "/quit", "/exit": "/quit",
@@ -892,30 +905,39 @@ HISTORY_FILE = Path.home() / ".codegpt" / "input_history"
892
905
  def print_header(model):
893
906
  clear_screen()
894
907
  w = tw()
908
+ compact = is_compact()
895
909
  console.print()
910
+
911
+ # Responsive logo
912
+ logo = LOGO_COMPACT if compact else LOGO_FULL
896
913
  console.print(Panel(
897
- Text.from_markup(LOGO),
914
+ Text.from_markup(logo),
898
915
  border_style="bright_cyan",
899
- padding=(1, 2),
916
+ padding=(0 if compact else 1, 1 if compact else 2),
900
917
  width=w,
901
918
  ))
902
919
 
903
- # Status bar
920
+ # Status bar — compact version for small screens
904
921
  now = datetime.now().strftime("%H:%M")
905
922
  elapsed = int(time.time() - session_stats["start"])
906
923
  uptime = f"{elapsed // 60}m"
907
924
  tok = session_stats["tokens_out"]
908
925
 
909
926
  bar = Text()
910
- bar.append(f" {model}", style="bright_cyan")
911
- bar.append(" | ", style="dim")
912
- bar.append(f"{session_stats['messages']} msgs", style="dim")
913
- bar.append(" | ", style="dim")
914
- bar.append(f"{tok} tokens", style="dim")
915
- bar.append(" | ", style="dim")
916
- bar.append(f"{uptime}", style="dim")
917
- bar.append(" | ", style="dim")
918
- bar.append(now, style="dim")
927
+ if compact:
928
+ bar.append(f" {model}", style="bright_cyan")
929
+ bar.append(f" {session_stats['messages']}msg", style="dim")
930
+ bar.append(f" {now}", style="dim")
931
+ else:
932
+ bar.append(f" {model}", style="bright_cyan")
933
+ bar.append(" | ", style="dim")
934
+ bar.append(f"{session_stats['messages']} msgs", style="dim")
935
+ bar.append(" | ", style="dim")
936
+ bar.append(f"{tok} tokens", style="dim")
937
+ bar.append(" | ", style="dim")
938
+ bar.append(f"{uptime}", style="dim")
939
+ bar.append(" | ", style="dim")
940
+ bar.append(now, style="dim")
919
941
 
920
942
  console.print(Panel(bar, border_style="dim", padding=0, width=w))
921
943
  console.print()
@@ -934,6 +956,8 @@ def print_welcome(model, available_models):
934
956
  else:
935
957
  greeting = "Good evening"
936
958
 
959
+ compact = is_compact()
960
+
937
961
  console.print(Align.center(Text(f"\n{greeting}.\n", style="bold white")), width=w)
938
962
 
939
963
  # Connection status bar
@@ -945,75 +969,101 @@ def print_welcome(model, available_models):
945
969
  streak = profile.get("total_sessions", 0)
946
970
 
947
971
  status = Text()
948
- status.append(" ◈ ", style="bright_cyan")
949
- status.append(f"{model}", style="bold bright_cyan")
950
- status.append("", style="dim")
951
- status.append(f" {server_type}", style="green" if model_count > 0 else "red")
952
- status.append(" │ ", style="dim")
953
- status.append(f" {model_count} models", style="dim")
954
- status.append("", style="dim")
955
- status.append(f"◇ {mem_count} memories", style="dim")
956
- if streak > 1:
972
+ if compact:
973
+ status.append(f" {model}", style="bold bright_cyan")
974
+ status.append(f" {server_type}", style="green" if model_count > 0 else "red")
975
+ status.append(f" {model_count}m", style="dim")
976
+ else:
977
+ status.append(" ", style="bright_cyan")
978
+ status.append(f"{model}", style="bold bright_cyan")
957
979
  status.append(" │ ", style="dim")
958
- status.append(f" {streak} sessions", style="dim")
980
+ status.append(f" {server_type}", style="green" if model_count > 0 else "red")
981
+ status.append(" │ ", style="dim")
982
+ status.append(f"△ {model_count} models", style="dim")
983
+ status.append(" │ ", style="dim")
984
+ status.append(f"◇ {mem_count} memories", style="dim")
985
+ if streak > 1:
986
+ status.append(" │ ", style="dim")
987
+ status.append(f"▸ {streak} sessions", style="dim")
959
988
  console.print(Panel(status, border_style="bright_black", padding=0, width=w))
960
989
 
961
- # Suggestion chips
962
- console.print(Panel(
963
- _build_suggestions(),
964
- title="[dim]Suggestions (type a number)[/]",
965
- title_align="left",
966
- border_style="bright_black",
967
- padding=(1, 2),
968
- width=w,
969
- ))
990
+ # Suggestion chips — fewer on compact
991
+ if compact:
992
+ console.print(Panel(
993
+ _build_suggestions(max_items=3),
994
+ title="[dim]Try[/]",
995
+ title_align="left",
996
+ border_style="bright_black",
997
+ padding=(0, 1),
998
+ width=w,
999
+ ))
1000
+ else:
1001
+ console.print(Panel(
1002
+ _build_suggestions(),
1003
+ title="[dim]Suggestions (type a number)[/]",
1004
+ title_align="left",
1005
+ border_style="bright_black",
1006
+ padding=(1, 2),
1007
+ width=w,
1008
+ ))
970
1009
 
971
1010
  # Tip of the day
972
1011
  tip = random.choice(TIPS)
973
- console.print(Align.center(Text(f"Tip: {tip}", style="dim italic")), width=w)
1012
+ console.print(Text(f" Tip: {tip}", style="dim italic"))
974
1013
  console.print()
975
1014
 
976
1015
 
977
- def _build_suggestions():
1016
+ def _build_suggestions(max_items=None):
978
1017
  text = Text()
979
- for i, s in enumerate(SUGGESTIONS, 1):
980
- text.append(f" [{i}]", style="bright_cyan bold")
981
- text.append(f" {s}\n", style="white")
1018
+ items = SUGGESTIONS[:max_items] if max_items else SUGGESTIONS
1019
+ for i, s in enumerate(items, 1):
1020
+ if is_compact():
1021
+ text.append(f" {i}.", style="bright_cyan bold")
1022
+ text.append(f" {s[:30]}\n", style="white")
1023
+ else:
1024
+ text.append(f" [{i}]", style="bright_cyan bold")
1025
+ text.append(f" {s}\n", style="white")
982
1026
  return text
983
1027
 
984
1028
 
985
1029
  def print_user_msg(text):
1030
+ pad = (0, 1) if is_compact() else (0, 2)
986
1031
  console.print(Panel(
987
1032
  Text(text, style="white"),
988
1033
  title="[bold bright_cyan]You[/]",
989
1034
  title_align="left",
990
1035
  border_style="bright_cyan",
991
- padding=(0, 2),
1036
+ padding=pad,
992
1037
  width=tw(),
993
1038
  ))
994
1039
 
995
1040
 
996
1041
  def print_ai_msg(text, stats=""):
1042
+ pad = (0, 1) if is_compact() else (0, 2)
1043
+ compact = is_compact()
997
1044
  panel = Panel(
998
1045
  Markdown(text),
999
1046
  title="[bold bright_green]AI[/]",
1000
1047
  title_align="left",
1001
1048
  border_style="bright_green",
1002
- subtitle=stats,
1049
+ subtitle=stats if not compact else "",
1003
1050
  subtitle_align="right",
1004
- padding=(0, 2),
1051
+ padding=pad,
1005
1052
  width=tw(),
1006
1053
  )
1007
1054
  print_with_sidebar(panel)
1008
1055
 
1009
1056
 
1010
1057
  def print_sys(text):
1011
- console.print(Panel(
1012
- Text(text, style="dim italic"),
1013
- border_style="bright_black",
1014
- padding=(0, 1),
1015
- width=tw(),
1016
- ))
1058
+ if is_compact():
1059
+ console.print(Text(f" {text}", style="dim italic"))
1060
+ else:
1061
+ console.print(Panel(
1062
+ Text(text, style="dim italic"),
1063
+ border_style="bright_black",
1064
+ padding=(0, 1),
1065
+ width=tw(),
1066
+ ))
1017
1067
 
1018
1068
 
1019
1069
  def print_err(text):
@@ -3934,6 +3984,8 @@ def _bottom_toolbar():
3934
3984
  mins = elapsed // 60
3935
3985
  msgs = session_stats["messages"]
3936
3986
  tok = session_stats["tokens_out"]
3987
+ if is_compact():
3988
+ return [("class:bottom-toolbar", f" {msgs}msg {tok}tok {mins}m │ / cmds ")]
3937
3989
  return [("class:bottom-toolbar",
3938
3990
  f" {msgs} msgs │ {tok} tok │ {mins}m │ / for commands │ Ctrl+C to exit ")]
3939
3991
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codegpt-ai",
3
- "version": "1.2.0",
3
+ "version": "1.4.0",
4
4
  "description": "Local AI Assistant Hub — 80+ commands, 29 tools, 8 agents, training, security",
5
5
  "author": "ArukuX",
6
6
  "license": "MIT",