codegpt-ai 1.28.2 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  """CodeGPT — Local AI Assistant Hub."""
2
- __version__ = "1.0.0"
2
+ __version__ = "2.0.0"
package/chat.py CHANGED
@@ -1169,27 +1169,29 @@ HISTORY_FILE = Path.home() / ".codegpt" / "input_history"
1169
1169
 
1170
1170
  def print_header(model):
1171
1171
  clear_screen()
1172
- w = tw()
1173
1172
  compact = is_compact()
1174
- console.print()
1175
1173
 
1176
1174
  if compact:
1177
- console.print(Text.from_markup(f" [bold bright_cyan]CodeGPT[/] [dim]· {model}[/]"))
1178
- console.print(Rule(style="dim", characters="─"))
1175
+ console.print(Text.from_markup(f"\n [bold bright_cyan]CodeGPT[/] [dim]v2.0 · {model}[/]\n"))
1179
1176
  else:
1180
- console.print(Text.from_markup(LOGO_FULL))
1177
+ # Clean startup like Claude Code — no ASCII art on repeat, just info
1178
+ is_local = "localhost" in OLLAMA_URL or "127.0.0.1" in OLLAMA_URL
1179
+ server = "local" if is_local else OLLAMA_URL.split("//")[1].split("/")[0] if "//" in OLLAMA_URL else "?"
1180
+ profile = load_profile()
1181
+ name = profile.get("name", "")
1182
+ mem_count = len(load_memories())
1183
+
1184
+ console.print()
1185
+ console.print(Text.from_markup(f" [bold bright_cyan]CodeGPT[/] [dim]v2.0[/]"))
1181
1186
  console.print()
1182
- now = datetime.now().strftime("%H:%M")
1183
- elapsed = int(time.time() - session_stats["start"])
1184
1187
  console.print(Text.from_markup(
1185
- f" [bright_cyan]{model}[/]"
1186
- f" [dim]·[/] [dim]{session_stats['messages']} msgs[/]"
1187
- f" [dim]·[/] [dim]{session_stats['tokens_out']} tok[/]"
1188
- f" [dim]·[/] [dim]{elapsed // 60}m[/]"
1189
- f" [dim]·[/] [dim]{now}[/]"
1188
+ f" [dim]model[/] [bright_cyan]{model}[/]\n"
1189
+ f" [dim]server[/] [green]{server}[/]\n"
1190
+ f" [dim]user[/] {name}\n"
1191
+ f" [dim]memory[/] {mem_count} items\n"
1192
+ f" [dim]commands[/] {len(COMMANDS)}"
1190
1193
  ))
1191
- console.print(Rule(style="dim", characters="─"))
1192
- console.print()
1194
+ console.print()
1193
1195
 
1194
1196
 
1195
1197
  def print_welcome(model, available_models):
@@ -4739,7 +4741,11 @@ def main():
4739
4741
  ollama_status = "offline"
4740
4742
  tool_count = sum(1 for t in AI_TOOLS.values() if shutil.which(t["bin"]))
4741
4743
 
4742
- print(f" CodeGPT v1.0.0")
4744
+ try:
4745
+ from ai_cli import __version__ as _v
4746
+ except ImportError:
4747
+ _v = "2.0.0"
4748
+ print(f" CodeGPT v{_v}")
4743
4749
  print(f" User: {profile.get('name', 'not set')}")
4744
4750
  print(f" Model: {profile.get('model', MODEL)}")
4745
4751
  print(f" Persona: {profile.get('persona', 'default')}")
@@ -4867,53 +4873,19 @@ def main():
4867
4873
 
4868
4874
  print_header(model)
4869
4875
 
4870
- # Welcome popupalways show
4876
+ # Clean welcomelike Claude Code
4871
4877
  if not first_time:
4872
- w = tw()
4873
- compact = is_compact()
4874
- name = profile.get("name", "User")
4875
- is_local = "localhost" in OLLAMA_URL or "127.0.0.1" in OLLAMA_URL
4876
- server = "local" if is_local else OLLAMA_URL.split("//")[1].split("/")[0] if "//" in OLLAMA_URL else "unknown"
4877
- model_count = len(available_models)
4878
- sessions = profile.get("total_sessions", 0)
4879
- total_msgs = profile.get("total_messages", 0)
4880
-
4881
- hour = datetime.now().hour
4882
- greeting = "Good morning" if hour < 12 else "Good afternoon" if hour < 18 else "Good evening"
4883
-
4884
- if compact:
4885
- if offline_mode:
4886
- status_line = "[yellow]offline[/] — /connect IP"
4887
- else:
4888
- status_line = f"[green]connected[/] {model_count} models"
4889
-
4890
- console.print(Panel(
4891
- Text.from_markup(
4892
- f"[bold]{greeting}, {name}![/]\n\n"
4893
- f" Model [bright_cyan]{model}[/]\n"
4894
- f" Status {status_line}\n"
4895
- f" Session [dim]#{sessions}[/]\n"
4896
- ),
4897
- title="[bold bright_cyan]CodeGPT[/]",
4898
- border_style="bright_cyan", padding=(0, 1), width=w,
4899
- ))
4900
- else:
4901
- if offline_mode:
4902
- status_line = f" Server: [yellow]offline[/] — use [bright_cyan]/connect IP[/] to link"
4903
- else:
4904
- status_line = f" Server: [green]{server}[/] ({model_count} models)"
4878
+ name = profile.get("name", "")
4879
+ if offline_mode:
4880
+ console.print(Text.from_markup(" [yellow]offline[/] — use [bright_cyan]/connect IP[/] to link to Ollama"))
4881
+ console.print()
4905
4882
 
4906
- console.print(Panel(
4907
- Text.from_markup(
4908
- f"[bold]{greeting}, {name}![/]\n\n"
4909
- f" Model: [bright_cyan]{model}[/]\n"
4910
- f"{status_line}\n"
4911
- f" Session: [dim]#{sessions}[/] ({total_msgs} lifetime msgs)\n\n"
4912
- f" [dim]Type / for commands · /help for full list[/]"
4913
- ),
4914
- title="[bold bright_cyan]Welcome[/]",
4915
- border_style="bright_cyan", padding=(1, 2), width=w,
4916
- ))
4883
+ if name:
4884
+ hour = datetime.now().hour
4885
+ greeting = "Good morning" if hour < 12 else "Good afternoon" if hour < 18 else "Good evening"
4886
+ console.print(Text(f" {greeting}, {name}.", style="bold white"))
4887
+ console.print(Text(" Type a message to chat. Type / for commands.", style="dim"))
4888
+ console.print()
4917
4889
 
4918
4890
  print_welcome(model, available_models)
4919
4891
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "codegpt-ai",
3
- "version": "1.28.2",
3
+ "version": "2.0.0",
4
4
  "description": "Local AI Assistant Hub — 80+ commands, 29 tools, 8 agents, training, security",
5
5
  "author": "ArukuX",
6
6
  "license": "MIT",