codegpt-ai 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ {
2
+ "permissions": {
3
+ "allow": [
4
+ "Bash(wc:*)"
5
+ ]
6
+ }
7
+ }
@@ -0,0 +1,40 @@
1
+ name: Build & Release
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - 'v*'
7
+
8
+ permissions:
9
+ contents: write
10
+
11
+ jobs:
12
+ build:
13
+ runs-on: windows-latest
14
+ steps:
15
+ - uses: actions/checkout@v4
16
+
17
+ - name: Set up Python
18
+ uses: actions/setup-python@v5
19
+ with:
20
+ python-version: '3.12'
21
+
22
+ - name: Install dependencies
23
+ run: |
24
+ pip install requests rich prompt-toolkit pyinstaller
25
+
26
+ - name: Build exe
27
+ run: |
28
+ pyinstaller ai.spec
29
+
30
+ - name: Verify exe
31
+ run: |
32
+ dist\ai.exe --version
33
+
34
+ - name: Create Release
35
+ uses: softprops/action-gh-release@v2
36
+ with:
37
+ files: dist/ai.exe
38
+ generate_release_notes: true
39
+ env:
40
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
package/CLAUDE.md ADDED
@@ -0,0 +1,58 @@
1
+ # CodeGPT — Project Context
2
+
3
+ ## What This Is
4
+ CodeGPT is a full-featured local AI assistant hub. CLI chat, TUI, Telegram bot, web app, and 15+ AI tool integrations — all powered by Ollama running locally.
5
+
6
+ ## Stack
7
+ - **Language**: Python 3.13
8
+ - **AI Backend**: Ollama (localhost:11434), default model: llama3.2
9
+ - **CLI**: Rich + prompt_toolkit (chat.py)
10
+ - **TUI**: Textual (app.py)
11
+ - **Web**: Flask + HTTPS (web.py)
12
+ - **Bot**: python-telegram-bot (bot.py)
13
+ - **Server**: Flask API, Groq/Ollama (server.py)
14
+ - **Launcher**: run.py (all modes)
15
+
16
+ ## Files
17
+ - `chat.py` — Main CLI with 60+ slash commands, AI agents, AI lab, training, security, tool integrations
18
+ - `app.py` — TUI sidebar app (Textual)
19
+ - `bot.py` — Telegram bot (live streaming, profiles, 10 features)
20
+ - `web.py` — PWA web app (Flask + HTTPS)
21
+ - `server.py` — Backend API (Groq cloud + Ollama local)
22
+ - `run.py` — Launcher (cli/tui/web/bot/server/mobile)
23
+ - `mobile.py` — Flet mobile app (incomplete, disk space issue)
24
+
25
+ ## Key Architecture (chat.py)
26
+ - `COMMANDS` dict — all slash commands
27
+ - `AI_TOOLS` dict — all external tool configs (bin, install, default_args)
28
+ - `AI_AGENTS` dict — 8 specialized agents (coder, debugger, researcher, etc.)
29
+ - `PERSONAS` dict — 6 personalities (default, hacker, teacher, roast, architect, minimal)
30
+ - `PROMPT_TEMPLATES` dict — 15 reusable prompt prefixes
31
+ - `stream_response()` — streaming Ollama chat with live rendering
32
+ - `SlashCompleter` — autocomplete for / commands
33
+ - Profile system — persistent user data at ~/.codegpt/profiles/
34
+ - Memory system — persistent AI memory at ~/.codegpt/memory/
35
+ - Security — PIN lock, audit log, shell blocklist, code exec limits
36
+ - Training — collect conversations, build custom Ollama models
37
+
38
+ ## Data Locations
39
+ - `~/.codegpt/profiles/cli_profile.json` — user profile
40
+ - `~/.codegpt/memory/memories.json` — AI memories
41
+ - `~/.codegpt/security/` — PIN hash, audit log
42
+ - `~/.codegpt/training/` — training data, custom modelfiles
43
+ - `~/.codegpt/sandbox/` — sandboxed tool working dirs
44
+ - `~/.codegpt/context.json` — shared context (updated every tool launch)
45
+ - `~/.codegpt/chats/` — saved conversations
46
+ - `~/.codegpt/exports/` — exported chats
47
+ - `~/.codegpt/ratings.json` — response ratings
48
+
49
+ ## Owner
50
+ ArukuX (Ark), student dev, Southampton UK. Prefers direct/technical tone.
51
+
52
+ ## Rules
53
+ - All external AI tools are sandboxed (except coding tools that need file access)
54
+ - API keys are stripped from sandboxed tool environments
55
+ - All tool launches are audit logged
56
+ - Security PIN uses SHA-256 hashing
57
+ - Code execution limited to 20/session
58
+ - Shell commands checked against blocklist
package/ai.spec ADDED
@@ -0,0 +1,81 @@
1
+ # -*- mode: python ; coding: utf-8 -*-
2
+ # PyInstaller spec file for CodeGPT -> ai.exe
3
+
4
+ import sys
5
+ from pathlib import Path
6
+
7
+ block_cipher = None
8
+
9
+ a = Analysis(
10
+ ['ai_cli/__main__.py'],
11
+ pathex=['.'],
12
+ binaries=[],
13
+ datas=[
14
+ ('chat.py', '.'),
15
+ ('CLAUDE.md', '.'),
16
+ ],
17
+ hiddenimports=[
18
+ 'requests',
19
+ 'rich',
20
+ 'rich.console',
21
+ 'rich.markdown',
22
+ 'rich.panel',
23
+ 'rich.table',
24
+ 'rich.text',
25
+ 'rich.live',
26
+ 'rich.rule',
27
+ 'rich.align',
28
+ 'prompt_toolkit',
29
+ 'prompt_toolkit.history',
30
+ 'prompt_toolkit.completion',
31
+ 'prompt_toolkit.styles',
32
+ 'ai_cli',
33
+ 'ai_cli.__main__',
34
+ 'ai_cli.updater',
35
+ 'ai_cli.doctor',
36
+ ],
37
+ hookspath=[],
38
+ hooksconfig={},
39
+ runtime_hooks=[],
40
+ excludes=[
41
+ 'textual',
42
+ 'telegram',
43
+ 'flask',
44
+ 'groq',
45
+ 'flet',
46
+ 'tkinter',
47
+ 'unittest',
48
+ 'xmlrpc',
49
+ 'pydoc',
50
+ 'doctest',
51
+ ],
52
+ win_no_prefer_redirects=False,
53
+ win_private_assemblies=False,
54
+ cipher=block_cipher,
55
+ noarchive=False,
56
+ )
57
+
58
+ pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
59
+
60
+ exe = EXE(
61
+ pyz,
62
+ a.scripts,
63
+ a.binaries,
64
+ a.zipfiles,
65
+ a.datas,
66
+ [],
67
+ name='ai',
68
+ debug=False,
69
+ bootloader_ignore_signals=False,
70
+ strip=False,
71
+ upx=True,
72
+ upx_exclude=[],
73
+ runtime_tmpdir=None,
74
+ console=True,
75
+ disable_windowed_traceback=False,
76
+ argv_emulation=False,
77
+ target_arch=None,
78
+ codesign_identity=None,
79
+ entitlements_file=None,
80
+ icon=None,
81
+ )
@@ -0,0 +1,2 @@
1
+ """CodeGPT — Local AI Assistant Hub."""
2
+ __version__ = "1.0.0"
@@ -0,0 +1,46 @@
1
+ """Entry point for `python -m ai_cli` and `ai` command."""
2
+ import sys
3
+ import os
4
+
5
+ # Fix Unicode on Windows
6
+ os.environ["PYTHONUTF8"] = "1"
7
+ if sys.platform == "win32":
8
+ sys.stdout.reconfigure(encoding="utf-8", errors="replace")
9
+ sys.stderr.reconfigure(encoding="utf-8", errors="replace")
10
+
11
+
12
+ def main():
13
+ """Main entry point — wraps chat.py with auto-update check."""
14
+ from ai_cli import __version__
15
+ from ai_cli.updater import check_for_update
16
+
17
+ # Handle meta-commands before loading the full CLI
18
+ if len(sys.argv) > 1:
19
+ cmd = sys.argv[1].lower()
20
+ if cmd == "--version" or cmd == "-v":
21
+ print(f"CodeGPT v{__version__}")
22
+ return
23
+ elif cmd == "update":
24
+ from ai_cli.updater import force_update
25
+ force_update()
26
+ return
27
+ elif cmd == "doctor":
28
+ from ai_cli.doctor import run_doctor
29
+ run_doctor()
30
+ return
31
+
32
+ # Silent update check (non-blocking)
33
+ check_for_update()
34
+
35
+ # Add parent dir to path so chat.py imports work
36
+ project_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
37
+ if project_dir not in sys.path:
38
+ sys.path.insert(0, project_dir)
39
+
40
+ # Launch the CLI
41
+ from chat import main as chat_main
42
+ chat_main()
43
+
44
+
45
+ if __name__ == "__main__":
46
+ main()
@@ -0,0 +1,89 @@
1
+ """System diagnostics — `ai doctor`."""
2
+ import shutil
3
+ import subprocess
4
+ import sys
5
+ from pathlib import Path
6
+
7
+
8
+ def run_doctor():
9
+ """Check system dependencies and configuration."""
10
+ from rich.console import Console
11
+ from rich.table import Table
12
+
13
+ console = Console()
14
+ table = Table(title="CodeGPT Doctor", border_style="bright_cyan",
15
+ title_style="bold cyan", show_header=True, header_style="bold")
16
+ table.add_column("Check", style="white", width=24)
17
+ table.add_column("Status", width=8)
18
+ table.add_column("Details", style="dim")
19
+
20
+ checks = []
21
+
22
+ # Python
23
+ py_ver = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
24
+ checks.append(("Python", True, py_ver))
25
+
26
+ # Ollama
27
+ ollama_ok = shutil.which("ollama") is not None
28
+ if ollama_ok:
29
+ try:
30
+ r = subprocess.run(["ollama", "list"], capture_output=True, text=True, timeout=5)
31
+ model_count = len(r.stdout.strip().splitlines()) - 1
32
+ checks.append(("Ollama", True, f"{model_count} models"))
33
+ except Exception:
34
+ checks.append(("Ollama", True, "installed, not running"))
35
+ else:
36
+ checks.append(("Ollama", False, "not installed — ollama.com"))
37
+
38
+ # Ollama connectivity
39
+ try:
40
+ import requests
41
+ r = requests.get("http://localhost:11434/api/tags", timeout=2)
42
+ models = [m["name"] for m in r.json().get("models", [])]
43
+ checks.append(("Ollama API", True, f"{len(models)} models loaded"))
44
+ except Exception:
45
+ checks.append(("Ollama API", False, "not reachable — run: ollama serve"))
46
+
47
+ # Data dir
48
+ data_dir = Path.home() / ".codegpt"
49
+ checks.append(("Data directory", data_dir.exists(), str(data_dir)))
50
+
51
+ # Profile
52
+ profile = data_dir / "profiles" / "cli_profile.json"
53
+ checks.append(("Profile", profile.exists(), "configured" if profile.exists() else "run ai to setup"))
54
+
55
+ # Memory
56
+ mem = data_dir / "memory" / "memories.json"
57
+ if mem.exists():
58
+ import json
59
+ try:
60
+ count = len(json.loads(mem.read_text()))
61
+ checks.append(("Memory", True, f"{count} entries"))
62
+ except Exception:
63
+ checks.append(("Memory", True, "exists"))
64
+ else:
65
+ checks.append(("Memory", True, "empty (normal)"))
66
+
67
+ # Security
68
+ pin = data_dir / "security" / "pin.hash"
69
+ checks.append(("PIN lock", pin.exists(), "enabled" if pin.exists() else "disabled"))
70
+
71
+ # External tools
72
+ for name, bin_name in [("Claude Code", "claude"), ("GitHub CLI", "gh"),
73
+ ("Node.js", "node"), ("npm", "npm")]:
74
+ ok = shutil.which(bin_name) is not None
75
+ checks.append((name, ok, "found" if ok else "not found"))
76
+
77
+ # Render
78
+ for name, ok, detail in checks:
79
+ status = "[green]OK[/]" if ok else "[red]FAIL[/]"
80
+ table.add_row(name, status, detail)
81
+
82
+ console.print(table)
83
+ console.print()
84
+
85
+ fails = sum(1 for _, ok, _ in checks if not ok)
86
+ if fails == 0:
87
+ console.print("[bold green]All checks passed.[/]")
88
+ else:
89
+ console.print(f"[yellow]{fails} issue(s) found.[/]")
@@ -0,0 +1,191 @@
1
+ """Self-updating system — checks GitHub Releases for new versions."""
2
+ import os
3
+ import sys
4
+ import json
5
+ import shutil
6
+ import tempfile
7
+ import threading
8
+ from pathlib import Path
9
+
10
+ # Configure these for your repo
11
+ GITHUB_OWNER = "ArukuX"
12
+ GITHUB_REPO = "codegpt"
13
+ RELEASES_URL = f"https://api.github.com/repos/{GITHUB_OWNER}/{GITHUB_REPO}/releases/latest"
14
+ UPDATE_CHECK_FILE = Path.home() / ".codegpt" / "last_update_check"
15
+
16
+
17
+ def _get_current_version():
18
+ from ai_cli import __version__
19
+ return __version__
20
+
21
+
22
+ def _parse_version(v):
23
+ """Parse 'v1.2.3' or '1.2.3' into tuple."""
24
+ v = v.lstrip("v").strip()
25
+ parts = v.split(".")
26
+ return tuple(int(p) for p in parts if p.isdigit())
27
+
28
+
29
+ def _should_check():
30
+ """Only check once per hour."""
31
+ import time
32
+ try:
33
+ if UPDATE_CHECK_FILE.exists():
34
+ last = float(UPDATE_CHECK_FILE.read_text().strip())
35
+ if time.time() - last < 3600: # 1 hour
36
+ return False
37
+ except Exception:
38
+ pass
39
+ return True
40
+
41
+
42
+ def _save_check_time():
43
+ import time
44
+ try:
45
+ UPDATE_CHECK_FILE.parent.mkdir(parents=True, exist_ok=True)
46
+ UPDATE_CHECK_FILE.write_text(str(time.time()))
47
+ except Exception:
48
+ pass
49
+
50
+
51
+ def _fetch_latest():
52
+ """Fetch latest release info from GitHub."""
53
+ import requests
54
+ try:
55
+ resp = requests.get(RELEASES_URL, timeout=5, headers={"Accept": "application/vnd.github.v3+json"})
56
+ if resp.status_code == 200:
57
+ return resp.json()
58
+ except Exception:
59
+ pass
60
+ return None
61
+
62
+
63
+ def _is_frozen():
64
+ """Check if running as PyInstaller exe."""
65
+ return getattr(sys, 'frozen', False)
66
+
67
+
68
+ def check_for_update():
69
+ """Non-blocking update check on startup."""
70
+ if not _should_check():
71
+ return
72
+
73
+ def _check():
74
+ _save_check_time()
75
+ release = _fetch_latest()
76
+ if not release:
77
+ return
78
+
79
+ latest_tag = release.get("tag_name", "")
80
+ current = _parse_version(_get_current_version())
81
+ latest = _parse_version(latest_tag)
82
+
83
+ if latest > current:
84
+ # Store update info for next prompt
85
+ update_file = Path.home() / ".codegpt" / "update_available.json"
86
+ update_file.write_text(json.dumps({
87
+ "version": latest_tag,
88
+ "current": _get_current_version(),
89
+ "url": release.get("html_url", ""),
90
+ "assets": [
91
+ {"name": a["name"], "url": a["browser_download_url"]}
92
+ for a in release.get("assets", [])
93
+ if a["name"].endswith(".exe")
94
+ ],
95
+ }, indent=2))
96
+
97
+ # Run in background thread — never block startup
98
+ t = threading.Thread(target=_check, daemon=True)
99
+ t.start()
100
+
101
+
102
+ def get_pending_update():
103
+ """Check if there's a pending update notification."""
104
+ update_file = Path.home() / ".codegpt" / "update_available.json"
105
+ if update_file.exists():
106
+ try:
107
+ data = json.loads(update_file.read_text())
108
+ latest = _parse_version(data["version"])
109
+ current = _parse_version(_get_current_version())
110
+ if latest > current:
111
+ return data
112
+ except Exception:
113
+ pass
114
+ return None
115
+
116
+
117
+ def force_update():
118
+ """Force download and install the latest version."""
119
+ import requests
120
+ from rich.console import Console
121
+ from rich.panel import Panel
122
+ from rich.text import Text
123
+
124
+ console = Console()
125
+
126
+ console.print(Panel(
127
+ Text("Checking for updates...", style="bold"),
128
+ border_style="bright_cyan",
129
+ ))
130
+
131
+ release = _fetch_latest()
132
+ if not release:
133
+ console.print("[red]Cannot reach GitHub. Check your internet.[/]")
134
+ return
135
+
136
+ latest_tag = release.get("tag_name", "")
137
+ current = _get_current_version()
138
+
139
+ if _parse_version(latest_tag) <= _parse_version(current):
140
+ console.print(f"[green]Already up to date (v{current})[/]")
141
+ return
142
+
143
+ # Find the exe asset
144
+ exe_assets = [a for a in release.get("assets", []) if a["name"].endswith(".exe")]
145
+ if not exe_assets:
146
+ console.print("[yellow]No exe found in release. Update manually.[/]")
147
+ console.print(f" {release.get('html_url', '')}")
148
+ return
149
+
150
+ asset = exe_assets[0]
151
+ console.print(f" Downloading {asset['name']} ({latest_tag})...")
152
+
153
+ try:
154
+ resp = requests.get(asset["browser_download_url"], stream=True, timeout=60)
155
+ resp.raise_for_status()
156
+
157
+ # Download to temp file
158
+ tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".exe")
159
+ for chunk in resp.iter_content(chunk_size=8192):
160
+ tmp.write(chunk)
161
+ tmp.close()
162
+
163
+ if _is_frozen():
164
+ # Replace the running exe
165
+ current_exe = sys.executable
166
+ backup = current_exe + ".bak"
167
+
168
+ # Rename current to .bak, move new to current
169
+ if os.path.exists(backup):
170
+ os.remove(backup)
171
+ os.rename(current_exe, backup)
172
+ shutil.move(tmp.name, current_exe)
173
+
174
+ console.print(Panel(
175
+ Text(f"Updated: v{current} -> {latest_tag}\nRestart to use the new version.", style="green"),
176
+ border_style="green",
177
+ ))
178
+ else:
179
+ # Running from source — just notify
180
+ console.print(Panel(
181
+ Text(f"New version available: {latest_tag}\nDownloaded to: {tmp.name}", style="yellow"),
182
+ border_style="yellow",
183
+ ))
184
+
185
+ # Clear update notification
186
+ update_file = Path.home() / ".codegpt" / "update_available.json"
187
+ if update_file.exists():
188
+ update_file.unlink()
189
+
190
+ except Exception as e:
191
+ console.print(f"[red]Update failed: {e}[/]")