codegpt-ai 1.28.2 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  """CodeGPT — Local AI Assistant Hub."""
2
- __version__ = "1.0.0"
2
+ __version__ = "2.0.0"
package/chat.py CHANGED
@@ -473,31 +473,76 @@ try:
473
473
  input_history = FileHistory(str(_hist_path))
474
474
  except Exception:
475
475
  input_history = InMemoryHistory()
476
+ # Command categories for autocomplete display
477
+ CMD_CATEGORIES = {}
478
+ _cat_map = {
479
+ "Chat": ["/new", "/save", "/load", "/delete", "/copy", "/regen", "/edit", "/history", "/clear", "/quit"],
480
+ "Model": ["/model", "/modelinfo", "/params", "/temp", "/think", "/tokens", "/compact", "/system"],
481
+ "AI Agents": ["/agent", "/agents", "/all", "/vote", "/swarm", "/team", "/room", "/spectate", "/dm", "/chat-link"],
482
+ "AI Lab": ["/lab", "/chain", "/race", "/prompts", "/compare"],
483
+ "Tools": ["/tools", "/bg", "/split", "/splitv", "/grid", "/running", "/killall"],
484
+ "Connect": ["/connect", "/disconnect", "/server", "/qr", "/scan"],
485
+ "Files & Code": ["/file", "/run", "/code", "/shell", "/browse", "/open", "/export"],
486
+ "Memory": ["/mem", "/train", "/pin", "/pins", "/search", "/fork", "/rate", "/tag"],
487
+ "Profile": ["/profile", "/setname", "/setbio", "/persona", "/personas", "/usage"],
488
+ "Skills": ["/skill", "/skills", "/auto", "/cron", "/crons"],
489
+ "Comms": ["/broadcast", "/inbox", "/feed", "/monitor", "/hub"],
490
+ "System": ["/github", "/weather", "/spotify", "/volume", "/bright", "/sysinfo", "/voice", "/remind", "/reminders", "/shortcuts"],
491
+ "Security": ["/pin-set", "/pin-remove", "/lock", "/audit", "/security", "/permissions"],
492
+ }
493
+ for _cat, _cmds in _cat_map.items():
494
+ for _cmd in _cmds:
495
+ CMD_CATEGORIES[_cmd] = _cat
496
+ # Tools get their own category
497
+ for _tool_name in AI_TOOLS:
498
+ CMD_CATEGORIES[f"/{_tool_name}"] = "Tool"
499
+ CMD_CATEGORIES["/claude"] = "Tool"
500
+ CMD_CATEGORIES["/openclaw"] = "Tool"
501
+ CMD_CATEGORIES["/sidebar"] = "UI"
502
+ CMD_CATEGORIES["/diff"] = "Chat"
503
+ CMD_CATEGORIES["/help"] = "Help"
504
+
505
+
476
506
  class SlashCompleter(Completer):
477
- """Show all commands when typing /"""
507
+ """Show all commands with categories when typing /"""
478
508
  def get_completions(self, document, complete_event):
479
509
  text = document.text_before_cursor.lstrip()
480
510
  if text.startswith("/"):
481
511
  typed = text.lower()
482
512
  on_termux = os.path.exists("/data/data/com.termux")
483
513
 
484
- # Main commands — hide unsupported tool commands on Termux
514
+ # Custom skills first
515
+ skills = load_skills()
516
+ for skill_name in skills:
517
+ cmd = f"/{skill_name}"
518
+ if cmd.startswith(typed):
519
+ yield Completion(
520
+ cmd,
521
+ start_position=-len(text),
522
+ display=f"{cmd}",
523
+ display_meta=f"skill: {skills[skill_name].get('desc', '')[:30]}",
524
+ )
525
+
526
+ # Main commands with categories
485
527
  for cmd, desc in COMMANDS.items():
486
528
  if cmd.startswith(typed):
487
- # Skip tool commands that don't work on Termux
488
529
  tool_name = cmd[1:]
489
530
  if on_termux and tool_name in AI_TOOLS and not AI_TOOLS[tool_name].get("termux", True):
490
531
  continue
532
+ cat = CMD_CATEGORIES.get(cmd, "")
533
+ meta = f"[{cat}] {desc}" if cat else desc
491
534
  yield Completion(
492
535
  cmd,
493
536
  start_position=-len(text),
494
537
  display=f"{cmd}",
495
- display_meta=desc,
538
+ display_meta=meta,
496
539
  )
540
+
497
541
  # Aliases
498
542
  for alias, target in ALIASES.items():
499
543
  if alias.startswith(typed) and alias not in COMMANDS:
500
544
  desc = COMMANDS.get(target, "")
545
+ cat = CMD_CATEGORIES.get(target, "")
501
546
  yield Completion(
502
547
  alias,
503
548
  start_position=-len(text),
@@ -508,7 +553,12 @@ class SlashCompleter(Completer):
508
553
  cmd_completer = SlashCompleter()
509
554
  input_style = PtStyle.from_dict({
510
555
  "prompt": "ansicyan bold",
511
- "bottom-toolbar": "bg:#1a1a2e #ffffff",
556
+ "bottom-toolbar": "bg:#1a1a2e #888888",
557
+ "completion-menu": "bg:#1a1a2e #ffffff",
558
+ "completion-menu.completion": "bg:#1a1a2e #ffffff",
559
+ "completion-menu.completion.current": "bg:#00aaff #ffffff bold",
560
+ "completion-menu.meta.completion": "bg:#1a1a2e #888888",
561
+ "completion-menu.meta.completion.current": "bg:#00aaff #ffffff",
512
562
  })
513
563
 
514
564
  session_stats = {"messages": 0, "tokens_in": 0, "tokens_out": 0, "start": time.time()}
@@ -1169,27 +1219,29 @@ HISTORY_FILE = Path.home() / ".codegpt" / "input_history"
1169
1219
 
1170
1220
  def print_header(model):
1171
1221
  clear_screen()
1172
- w = tw()
1173
1222
  compact = is_compact()
1174
- console.print()
1175
1223
 
1176
1224
  if compact:
1177
- console.print(Text.from_markup(f" [bold bright_cyan]CodeGPT[/] [dim]· {model}[/]"))
1178
- console.print(Rule(style="dim", characters="─"))
1225
+ console.print(Text.from_markup(f"\n [bold bright_cyan]CodeGPT[/] [dim]v2.0 · {model}[/]\n"))
1179
1226
  else:
1180
- console.print(Text.from_markup(LOGO_FULL))
1227
+ # Clean startup like Claude Code — no ASCII art on repeat, just info
1228
+ is_local = "localhost" in OLLAMA_URL or "127.0.0.1" in OLLAMA_URL
1229
+ server = "local" if is_local else OLLAMA_URL.split("//")[1].split("/")[0] if "//" in OLLAMA_URL else "?"
1230
+ profile = load_profile()
1231
+ name = profile.get("name", "")
1232
+ mem_count = len(load_memories())
1233
+
1234
+ console.print()
1235
+ console.print(Text.from_markup(f" [bold bright_cyan]CodeGPT[/] [dim]v2.0[/]"))
1181
1236
  console.print()
1182
- now = datetime.now().strftime("%H:%M")
1183
- elapsed = int(time.time() - session_stats["start"])
1184
1237
  console.print(Text.from_markup(
1185
- f" [bright_cyan]{model}[/]"
1186
- f" [dim]·[/] [dim]{session_stats['messages']} msgs[/]"
1187
- f" [dim]·[/] [dim]{session_stats['tokens_out']} tok[/]"
1188
- f" [dim]·[/] [dim]{elapsed // 60}m[/]"
1189
- f" [dim]·[/] [dim]{now}[/]"
1238
+ f" [dim]model[/] [bright_cyan]{model}[/]\n"
1239
+ f" [dim]server[/] [green]{server}[/]\n"
1240
+ f" [dim]user[/] {name}\n"
1241
+ f" [dim]memory[/] {mem_count} items\n"
1242
+ f" [dim]commands[/] {len(COMMANDS)}"
1190
1243
  ))
1191
- console.print(Rule(style="dim", characters="─"))
1192
- console.print()
1244
+ console.print()
1193
1245
 
1194
1246
 
1195
1247
  def print_welcome(model, available_models):
@@ -4739,7 +4791,11 @@ def main():
4739
4791
  ollama_status = "offline"
4740
4792
  tool_count = sum(1 for t in AI_TOOLS.values() if shutil.which(t["bin"]))
4741
4793
 
4742
- print(f" CodeGPT v1.0.0")
4794
+ try:
4795
+ from ai_cli import __version__ as _v
4796
+ except ImportError:
4797
+ _v = "2.0.0"
4798
+ print(f" CodeGPT v{_v}")
4743
4799
  print(f" User: {profile.get('name', 'not set')}")
4744
4800
  print(f" Model: {profile.get('model', MODEL)}")
4745
4801
  print(f" Persona: {profile.get('persona', 'default')}")
@@ -4867,53 +4923,19 @@ def main():
4867
4923
 
4868
4924
  print_header(model)
4869
4925
 
4870
- # Welcome popupalways show
4926
+ # Clean welcomelike Claude Code
4871
4927
  if not first_time:
4872
- w = tw()
4873
- compact = is_compact()
4874
- name = profile.get("name", "User")
4875
- is_local = "localhost" in OLLAMA_URL or "127.0.0.1" in OLLAMA_URL
4876
- server = "local" if is_local else OLLAMA_URL.split("//")[1].split("/")[0] if "//" in OLLAMA_URL else "unknown"
4877
- model_count = len(available_models)
4878
- sessions = profile.get("total_sessions", 0)
4879
- total_msgs = profile.get("total_messages", 0)
4880
-
4881
- hour = datetime.now().hour
4882
- greeting = "Good morning" if hour < 12 else "Good afternoon" if hour < 18 else "Good evening"
4883
-
4884
- if compact:
4885
- if offline_mode:
4886
- status_line = "[yellow]offline[/] — /connect IP"
4887
- else:
4888
- status_line = f"[green]connected[/] {model_count} models"
4889
-
4890
- console.print(Panel(
4891
- Text.from_markup(
4892
- f"[bold]{greeting}, {name}![/]\n\n"
4893
- f" Model [bright_cyan]{model}[/]\n"
4894
- f" Status {status_line}\n"
4895
- f" Session [dim]#{sessions}[/]\n"
4896
- ),
4897
- title="[bold bright_cyan]CodeGPT[/]",
4898
- border_style="bright_cyan", padding=(0, 1), width=w,
4899
- ))
4900
- else:
4901
- if offline_mode:
4902
- status_line = f" Server: [yellow]offline[/] — use [bright_cyan]/connect IP[/] to link"
4903
- else:
4904
- status_line = f" Server: [green]{server}[/] ({model_count} models)"
4928
+ name = profile.get("name", "")
4929
+ if offline_mode:
4930
+ console.print(Text.from_markup(" [yellow]offline[/] — use [bright_cyan]/connect IP[/] to link to Ollama"))
4931
+ console.print()
4905
4932
 
4906
- console.print(Panel(
4907
- Text.from_markup(
4908
- f"[bold]{greeting}, {name}![/]\n\n"
4909
- f" Model: [bright_cyan]{model}[/]\n"
4910
- f"{status_line}\n"
4911
- f" Session: [dim]#{sessions}[/] ({total_msgs} lifetime msgs)\n\n"
4912
- f" [dim]Type / for commands · /help for full list[/]"
4913
- ),
4914
- title="[bold bright_cyan]Welcome[/]",
4915
- border_style="bright_cyan", padding=(1, 2), width=w,
4916
- ))
4933
+ if name:
4934
+ hour = datetime.now().hour
4935
+ greeting = "Good morning" if hour < 12 else "Good afternoon" if hour < 18 else "Good evening"
4936
+ console.print(Text(f" {greeting}, {name}.", style="bold white"))
4937
+ console.print(Text(" Type a message to chat. Type / for commands.", style="dim"))
4938
+ console.print()
4917
4939
 
4918
4940
  print_welcome(model, available_models)
4919
4941
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codegpt-ai",
3
- "version": "1.28.2",
3
+ "version": "2.2.0",
4
4
  "description": "Local AI Assistant Hub — 80+ commands, 29 tools, 8 agents, training, security",
5
5
  "author": "ArukuX",
6
6
  "license": "MIT",