nc1709 1.15.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nc1709/__init__.py +13 -0
- nc1709/agent/__init__.py +36 -0
- nc1709/agent/core.py +505 -0
- nc1709/agent/mcp_bridge.py +245 -0
- nc1709/agent/permissions.py +298 -0
- nc1709/agent/tools/__init__.py +21 -0
- nc1709/agent/tools/base.py +440 -0
- nc1709/agent/tools/bash_tool.py +367 -0
- nc1709/agent/tools/file_tools.py +454 -0
- nc1709/agent/tools/notebook_tools.py +516 -0
- nc1709/agent/tools/search_tools.py +322 -0
- nc1709/agent/tools/task_tool.py +284 -0
- nc1709/agent/tools/web_tools.py +555 -0
- nc1709/agents/__init__.py +17 -0
- nc1709/agents/auto_fix.py +506 -0
- nc1709/agents/test_generator.py +507 -0
- nc1709/checkpoints.py +372 -0
- nc1709/cli.py +3380 -0
- nc1709/cli_ui.py +1080 -0
- nc1709/cognitive/__init__.py +149 -0
- nc1709/cognitive/anticipation.py +594 -0
- nc1709/cognitive/context_engine.py +1046 -0
- nc1709/cognitive/council.py +824 -0
- nc1709/cognitive/learning.py +761 -0
- nc1709/cognitive/router.py +583 -0
- nc1709/cognitive/system.py +519 -0
- nc1709/config.py +155 -0
- nc1709/custom_commands.py +300 -0
- nc1709/executor.py +333 -0
- nc1709/file_controller.py +354 -0
- nc1709/git_integration.py +308 -0
- nc1709/github_integration.py +477 -0
- nc1709/image_input.py +446 -0
- nc1709/linting.py +519 -0
- nc1709/llm_adapter.py +667 -0
- nc1709/logger.py +192 -0
- nc1709/mcp/__init__.py +18 -0
- nc1709/mcp/client.py +370 -0
- nc1709/mcp/manager.py +407 -0
- nc1709/mcp/protocol.py +210 -0
- nc1709/mcp/server.py +473 -0
- nc1709/memory/__init__.py +20 -0
- nc1709/memory/embeddings.py +325 -0
- nc1709/memory/indexer.py +474 -0
- nc1709/memory/sessions.py +432 -0
- nc1709/memory/vector_store.py +451 -0
- nc1709/models/__init__.py +86 -0
- nc1709/models/detector.py +377 -0
- nc1709/models/formats.py +315 -0
- nc1709/models/manager.py +438 -0
- nc1709/models/registry.py +497 -0
- nc1709/performance/__init__.py +343 -0
- nc1709/performance/cache.py +705 -0
- nc1709/performance/pipeline.py +611 -0
- nc1709/performance/tiering.py +543 -0
- nc1709/plan_mode.py +362 -0
- nc1709/plugins/__init__.py +17 -0
- nc1709/plugins/agents/__init__.py +18 -0
- nc1709/plugins/agents/django_agent.py +912 -0
- nc1709/plugins/agents/docker_agent.py +623 -0
- nc1709/plugins/agents/fastapi_agent.py +887 -0
- nc1709/plugins/agents/git_agent.py +731 -0
- nc1709/plugins/agents/nextjs_agent.py +867 -0
- nc1709/plugins/base.py +359 -0
- nc1709/plugins/manager.py +411 -0
- nc1709/plugins/registry.py +337 -0
- nc1709/progress.py +443 -0
- nc1709/prompts/__init__.py +22 -0
- nc1709/prompts/agent_system.py +180 -0
- nc1709/prompts/task_prompts.py +340 -0
- nc1709/prompts/unified_prompt.py +133 -0
- nc1709/reasoning_engine.py +541 -0
- nc1709/remote_client.py +266 -0
- nc1709/shell_completions.py +349 -0
- nc1709/slash_commands.py +649 -0
- nc1709/task_classifier.py +408 -0
- nc1709/version_check.py +177 -0
- nc1709/web/__init__.py +8 -0
- nc1709/web/server.py +950 -0
- nc1709/web/templates/index.html +1127 -0
- nc1709-1.15.4.dist-info/METADATA +858 -0
- nc1709-1.15.4.dist-info/RECORD +86 -0
- nc1709-1.15.4.dist-info/WHEEL +5 -0
- nc1709-1.15.4.dist-info/entry_points.txt +2 -0
- nc1709-1.15.4.dist-info/licenses/LICENSE +9 -0
- nc1709-1.15.4.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,858 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: nc1709
|
|
3
|
+
Version: 1.15.4
|
|
4
|
+
Summary: NC1709 - Your AI coding partner that brings your code to life
|
|
5
|
+
Home-page: https://github.com/yourusername/nc1709
|
|
6
|
+
Author: NC1709 Team
|
|
7
|
+
Author-email: Lafzusa Corp <support@lafzusa.com>
|
|
8
|
+
License: Proprietary License
|
|
9
|
+
|
|
10
|
+
Copyright (c) 2025 Lafzusa Corp. All rights reserved.
|
|
11
|
+
|
|
12
|
+
This software is proprietary and confidential. Unauthorized copying,
|
|
13
|
+
distribution, modification, or use of this software, via any medium,
|
|
14
|
+
is strictly prohibited without express written permission from Lafzusa Corp.
|
|
15
|
+
|
|
16
|
+
For licensing inquiries, contact: support@lafzusa.com
|
|
17
|
+
|
|
18
|
+
Project-URL: Homepage, https://github.com/nc1709/nc1709
|
|
19
|
+
Project-URL: Documentation, https://github.com/nc1709/nc1709#readme
|
|
20
|
+
Project-URL: Repository, https://github.com/nc1709/nc1709
|
|
21
|
+
Project-URL: Issues, https://github.com/nc1709/nc1709/issues
|
|
22
|
+
Keywords: ai,assistant,developer,cli,local,ollama,coding,productivity,llm,agent
|
|
23
|
+
Classifier: Development Status :: 4 - Beta
|
|
24
|
+
Classifier: Environment :: Console
|
|
25
|
+
Classifier: Intended Audience :: Developers
|
|
26
|
+
Classifier: Operating System :: OS Independent
|
|
27
|
+
Classifier: Programming Language :: Python :: 3
|
|
28
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
29
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
30
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
31
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
32
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
33
|
+
Classifier: Topic :: Software Development
|
|
34
|
+
Classifier: Topic :: Software Development :: Code Generators
|
|
35
|
+
Classifier: Topic :: Utilities
|
|
36
|
+
Requires-Python: >=3.9
|
|
37
|
+
Description-Content-Type: text/markdown
|
|
38
|
+
License-File: LICENSE
|
|
39
|
+
Requires-Dist: litellm>=1.0.0
|
|
40
|
+
Requires-Dist: rich>=13.0.0
|
|
41
|
+
Requires-Dist: prompt_toolkit>=3.0.0
|
|
42
|
+
Requires-Dist: ddgs>=9.0.0
|
|
43
|
+
Requires-Dist: packaging>=21.0
|
|
44
|
+
Provides-Extra: memory
|
|
45
|
+
Requires-Dist: chromadb>=0.4.0; extra == "memory"
|
|
46
|
+
Requires-Dist: sentence-transformers>=2.2.0; extra == "memory"
|
|
47
|
+
Provides-Extra: web
|
|
48
|
+
Requires-Dist: fastapi>=0.100.0; extra == "web"
|
|
49
|
+
Requires-Dist: uvicorn>=0.23.0; extra == "web"
|
|
50
|
+
Provides-Extra: search
|
|
51
|
+
Requires-Dist: ddgs>=9.0.0; extra == "search"
|
|
52
|
+
Provides-Extra: notebook
|
|
53
|
+
Requires-Dist: nbconvert>=7.0.0; extra == "notebook"
|
|
54
|
+
Requires-Dist: nbformat>=5.0.0; extra == "notebook"
|
|
55
|
+
Provides-Extra: screenshot
|
|
56
|
+
Requires-Dist: playwright>=1.40.0; extra == "screenshot"
|
|
57
|
+
Provides-Extra: all
|
|
58
|
+
Requires-Dist: chromadb>=0.4.0; extra == "all"
|
|
59
|
+
Requires-Dist: sentence-transformers>=2.2.0; extra == "all"
|
|
60
|
+
Requires-Dist: fastapi>=0.100.0; extra == "all"
|
|
61
|
+
Requires-Dist: uvicorn>=0.23.0; extra == "all"
|
|
62
|
+
Requires-Dist: watchdog>=3.0.0; extra == "all"
|
|
63
|
+
Requires-Dist: ddgs>=9.0.0; extra == "all"
|
|
64
|
+
Requires-Dist: nbconvert>=7.0.0; extra == "all"
|
|
65
|
+
Requires-Dist: nbformat>=5.0.0; extra == "all"
|
|
66
|
+
Requires-Dist: playwright>=1.40.0; extra == "all"
|
|
67
|
+
Provides-Extra: dev
|
|
68
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
69
|
+
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
70
|
+
Requires-Dist: httpx>=0.24.0; extra == "dev"
|
|
71
|
+
Dynamic: author
|
|
72
|
+
Dynamic: home-page
|
|
73
|
+
Dynamic: license-file
|
|
74
|
+
Dynamic: requires-python
|
|
75
|
+
|
|
76
|
+
# NC1709 - A Local-First AI Developer Assistant
|
|
77
|
+
|
|
78
|
+
<p align="center">
|
|
79
|
+
<strong>Tools run locally. Intelligence from the cloud.</strong>
|
|
80
|
+
</p>
|
|
81
|
+
|
|
82
|
+
<p align="center">
|
|
83
|
+
<a href="#installation">Installation</a> •
|
|
84
|
+
<a href="#quick-start">Quick Start</a> •
|
|
85
|
+
<a href="#features">Features</a> •
|
|
86
|
+
<a href="#architecture">Architecture</a> •
|
|
87
|
+
<a href="#remote-mode">Remote Mode</a> •
|
|
88
|
+
<a href="#extensions">Extensions</a>
|
|
89
|
+
</p>
|
|
90
|
+
|
|
91
|
+
---
|
|
92
|
+
|
|
93
|
+
NC1709 is a powerful AI developer assistant with a **Claude Code-like architecture**. Tools execute locally on your machine while LLM inference happens on a remote server. Your files stay on your computer - only prompts and responses travel to the server.
|
|
94
|
+
|
|
95
|
+
## What's New in v1.8.0
|
|
96
|
+
|
|
97
|
+
- **New Architecture** - Tools execute locally, LLM runs remotely (like Claude Code)
|
|
98
|
+
- **Auto-Connect** - CLI automatically connects to `nc1709.lafzusa.com` server
|
|
99
|
+
- **Server-Side Vector DB** - Code automatically indexed for smarter responses
|
|
100
|
+
- **Session Memory** - Conversation history persisted locally and sent for context
|
|
101
|
+
- **Local Tool Execution** - File ops, bash, search all run on YOUR machine
|
|
102
|
+
- **17 Built-in Tools** - Read, Write, Edit, Bash, Glob, Grep, WebSearch, and more
|
|
103
|
+
|
|
104
|
+
## Installation
|
|
105
|
+
|
|
106
|
+
### Quick Install (All Platforms)
|
|
107
|
+
|
|
108
|
+
```bash
|
|
109
|
+
pip install nc1709
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
### Platform-Specific Installation
|
|
113
|
+
|
|
114
|
+
<details>
|
|
115
|
+
<summary><b>🍎 macOS</b></summary>
|
|
116
|
+
|
|
117
|
+
#### 1. Install Python 3.9+
|
|
118
|
+
```bash
|
|
119
|
+
# Using Homebrew (recommended)
|
|
120
|
+
brew install python@3.11
|
|
121
|
+
|
|
122
|
+
# Or download from python.org
|
|
123
|
+
# https://www.python.org/downloads/macos/
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
#### 2. Install NC1709
|
|
127
|
+
```bash
|
|
128
|
+
# Basic installation
|
|
129
|
+
pip3 install nc1709
|
|
130
|
+
|
|
131
|
+
# With all features
|
|
132
|
+
pip3 install nc1709[all]
|
|
133
|
+
|
|
134
|
+
# Or with specific features
|
|
135
|
+
pip3 install nc1709[search,notebook,screenshot]
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
#### 3. Install Ollama
|
|
139
|
+
```bash
|
|
140
|
+
# Using Homebrew
|
|
141
|
+
brew install ollama
|
|
142
|
+
|
|
143
|
+
# Or download directly
|
|
144
|
+
curl -fsSL https://ollama.com/install.sh | sh
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
#### 4. Download Models
|
|
148
|
+
```bash
|
|
149
|
+
ollama pull qwen2.5-coder:32b
|
|
150
|
+
ollama pull qwen2.5:32b
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
#### 5. Enable Shell Completions (Optional)
|
|
154
|
+
```bash
|
|
155
|
+
# For Zsh (default on macOS)
|
|
156
|
+
echo 'eval "$(nc1709 --completion zsh)"' >> ~/.zshrc
|
|
157
|
+
source ~/.zshrc
|
|
158
|
+
|
|
159
|
+
# For Bash
|
|
160
|
+
echo 'eval "$(nc1709 --completion bash)"' >> ~/.bash_profile
|
|
161
|
+
source ~/.bash_profile
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
</details>
|
|
165
|
+
|
|
166
|
+
<details>
|
|
167
|
+
<summary><b>🐧 Linux (Ubuntu/Debian)</b></summary>
|
|
168
|
+
|
|
169
|
+
#### 1. Install Python 3.9+
|
|
170
|
+
```bash
|
|
171
|
+
sudo apt update
|
|
172
|
+
sudo apt install python3 python3-pip python3-venv
|
|
173
|
+
```
|
|
174
|
+
|
|
175
|
+
#### 2. Install NC1709
|
|
176
|
+
```bash
|
|
177
|
+
# Basic installation
|
|
178
|
+
pip3 install nc1709
|
|
179
|
+
|
|
180
|
+
# With all features
|
|
181
|
+
pip3 install nc1709[all]
|
|
182
|
+
|
|
183
|
+
# If you get permission errors, use --user
|
|
184
|
+
pip3 install --user nc1709[all]
|
|
185
|
+
|
|
186
|
+
# Or create a virtual environment (recommended)
|
|
187
|
+
python3 -m venv ~/.nc1709-venv
|
|
188
|
+
source ~/.nc1709-venv/bin/activate
|
|
189
|
+
pip install nc1709[all]
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
#### 3. Install Ollama
|
|
193
|
+
```bash
|
|
194
|
+
curl -fsSL https://ollama.com/install.sh | sh
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
#### 4. Start Ollama Service
|
|
198
|
+
```bash
|
|
199
|
+
# Start as a service (systemd)
|
|
200
|
+
sudo systemctl enable ollama
|
|
201
|
+
sudo systemctl start ollama
|
|
202
|
+
|
|
203
|
+
# Or run manually
|
|
204
|
+
ollama serve
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
#### 5. Download Models
|
|
208
|
+
```bash
|
|
209
|
+
ollama pull qwen2.5-coder:32b
|
|
210
|
+
ollama pull qwen2.5:32b
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
#### 6. Enable Shell Completions (Optional)
|
|
214
|
+
```bash
|
|
215
|
+
# For Bash
|
|
216
|
+
echo 'eval "$(nc1709 --completion bash)"' >> ~/.bashrc
|
|
217
|
+
source ~/.bashrc
|
|
218
|
+
|
|
219
|
+
# For Zsh
|
|
220
|
+
echo 'eval "$(nc1709 --completion zsh)"' >> ~/.zshrc
|
|
221
|
+
source ~/.zshrc
|
|
222
|
+
|
|
223
|
+
# For Fish
|
|
224
|
+
nc1709 --completion fish > ~/.config/fish/completions/nc1709.fish
|
|
225
|
+
```
|
|
226
|
+
|
|
227
|
+
</details>
|
|
228
|
+
|
|
229
|
+
<details>
|
|
230
|
+
<summary><b>🐧 Linux (Fedora/RHEL/CentOS)</b></summary>
|
|
231
|
+
|
|
232
|
+
#### 1. Install Python 3.9+
|
|
233
|
+
```bash
|
|
234
|
+
sudo dnf install python3 python3-pip
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
#### 2. Install NC1709
|
|
238
|
+
```bash
|
|
239
|
+
pip3 install --user nc1709[all]
|
|
240
|
+
|
|
241
|
+
# Add to PATH if needed
|
|
242
|
+
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
|
243
|
+
source ~/.bashrc
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
#### 3. Install Ollama
|
|
247
|
+
```bash
|
|
248
|
+
curl -fsSL https://ollama.com/install.sh | sh
|
|
249
|
+
```
|
|
250
|
+
|
|
251
|
+
#### 4. Start Ollama Service
|
|
252
|
+
```bash
|
|
253
|
+
sudo systemctl enable ollama
|
|
254
|
+
sudo systemctl start ollama
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
#### 5. Download Models
|
|
258
|
+
```bash
|
|
259
|
+
ollama pull qwen2.5-coder:32b
|
|
260
|
+
ollama pull qwen2.5:32b
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
</details>
|
|
264
|
+
|
|
265
|
+
<details>
|
|
266
|
+
<summary><b>🐧 Linux (Arch)</b></summary>
|
|
267
|
+
|
|
268
|
+
#### 1. Install Python and Dependencies
|
|
269
|
+
```bash
|
|
270
|
+
sudo pacman -S python python-pip
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
#### 2. Install NC1709
|
|
274
|
+
```bash
|
|
275
|
+
pip install --user nc1709[all]
|
|
276
|
+
```
|
|
277
|
+
|
|
278
|
+
#### 3. Install Ollama
|
|
279
|
+
```bash
|
|
280
|
+
# From AUR
|
|
281
|
+
yay -S ollama
|
|
282
|
+
|
|
283
|
+
# Or official installer
|
|
284
|
+
curl -fsSL https://ollama.com/install.sh | sh
|
|
285
|
+
```
|
|
286
|
+
|
|
287
|
+
#### 4. Start Ollama
|
|
288
|
+
```bash
|
|
289
|
+
sudo systemctl enable ollama
|
|
290
|
+
sudo systemctl start ollama
|
|
291
|
+
```
|
|
292
|
+
|
|
293
|
+
#### 5. Download Models
|
|
294
|
+
```bash
|
|
295
|
+
ollama pull qwen2.5-coder:32b
|
|
296
|
+
ollama pull qwen2.5:32b
|
|
297
|
+
```
|
|
298
|
+
|
|
299
|
+
</details>
|
|
300
|
+
|
|
301
|
+
<details>
|
|
302
|
+
<summary><b>🪟 Windows</b></summary>
|
|
303
|
+
|
|
304
|
+
#### Option A: Native Windows (Recommended for beginners)
|
|
305
|
+
|
|
306
|
+
##### 1. Install Python 3.9+
|
|
307
|
+
- Download from [python.org](https://www.python.org/downloads/windows/)
|
|
308
|
+
- **Important**: Check "Add Python to PATH" during installation
|
|
309
|
+
|
|
310
|
+
##### 2. Install NC1709
|
|
311
|
+
Open Command Prompt or PowerShell:
|
|
312
|
+
```powershell
|
|
313
|
+
pip install nc1709
|
|
314
|
+
|
|
315
|
+
# With all features
|
|
316
|
+
pip install nc1709[all]
|
|
317
|
+
```
|
|
318
|
+
|
|
319
|
+
##### 3. Install Ollama
|
|
320
|
+
- Download from [ollama.com/download/windows](https://ollama.com/download/windows)
|
|
321
|
+
- Run the installer
|
|
322
|
+
- Ollama will start automatically
|
|
323
|
+
|
|
324
|
+
##### 4. Download Models
|
|
325
|
+
Open Command Prompt:
|
|
326
|
+
```powershell
|
|
327
|
+
ollama pull qwen2.5-coder:32b
|
|
328
|
+
ollama pull qwen2.5:32b
|
|
329
|
+
```
|
|
330
|
+
|
|
331
|
+
##### 5. Run NC1709
|
|
332
|
+
```powershell
|
|
333
|
+
nc1709
|
|
334
|
+
```
|
|
335
|
+
|
|
336
|
+
#### Option B: WSL2 (Recommended for advanced users)
|
|
337
|
+
|
|
338
|
+
##### 1. Enable WSL2
|
|
339
|
+
Open PowerShell as Administrator:
|
|
340
|
+
```powershell
|
|
341
|
+
wsl --install
|
|
342
|
+
```
|
|
343
|
+
Restart your computer.
|
|
344
|
+
|
|
345
|
+
##### 2. Install Ubuntu
|
|
346
|
+
```powershell
|
|
347
|
+
wsl --install -d Ubuntu
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
##### 3. Follow Linux (Ubuntu/Debian) instructions above
|
|
351
|
+
|
|
352
|
+
**Note**: WSL2 provides better performance and full Linux compatibility.
|
|
353
|
+
|
|
354
|
+
</details>
|
|
355
|
+
|
|
356
|
+
<details>
|
|
357
|
+
<summary><b>🐳 Docker</b></summary>
|
|
358
|
+
|
|
359
|
+
#### Using Docker (Any Platform)
|
|
360
|
+
|
|
361
|
+
```bash
|
|
362
|
+
# Pull and run (CPU only)
|
|
363
|
+
docker run -it --rm \
|
|
364
|
+
-v $(pwd):/workspace \
|
|
365
|
+
-w /workspace \
|
|
366
|
+
python:3.11-slim \
|
|
367
|
+
bash -c "pip install nc1709 && nc1709"
|
|
368
|
+
|
|
369
|
+
# With GPU support (NVIDIA)
|
|
370
|
+
docker run -it --rm --gpus all \
|
|
371
|
+
-v $(pwd):/workspace \
|
|
372
|
+
-w /workspace \
|
|
373
|
+
python:3.11-slim \
|
|
374
|
+
bash -c "pip install nc1709 && nc1709"
|
|
375
|
+
```
|
|
376
|
+
|
|
377
|
+
**Note**: You'll still need Ollama running on the host machine. Set `OLLAMA_HOST` to connect:
|
|
378
|
+
```bash
|
|
379
|
+
docker run -it --rm \
|
|
380
|
+
-e OLLAMA_HOST=host.docker.internal:11434 \
|
|
381
|
+
-v $(pwd):/workspace \
|
|
382
|
+
python:3.11-slim \
|
|
383
|
+
bash -c "pip install nc1709 && nc1709"
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
</details>
|
|
387
|
+
|
|
388
|
+
### Installation Options
|
|
389
|
+
|
|
390
|
+
```bash
|
|
391
|
+
# Basic - core functionality
|
|
392
|
+
pip install nc1709
|
|
393
|
+
|
|
394
|
+
# With web dashboard
|
|
395
|
+
pip install nc1709[web]
|
|
396
|
+
|
|
397
|
+
# With memory features (semantic search, ChromaDB)
|
|
398
|
+
pip install nc1709[memory]
|
|
399
|
+
|
|
400
|
+
# With web search (DuckDuckGo, Brave)
|
|
401
|
+
pip install nc1709[search]
|
|
402
|
+
|
|
403
|
+
# With Jupyter notebook support
|
|
404
|
+
pip install nc1709[notebook]
|
|
405
|
+
|
|
406
|
+
# With web screenshots (Playwright)
|
|
407
|
+
pip install nc1709[screenshot]
|
|
408
|
+
|
|
409
|
+
# Everything included
|
|
410
|
+
pip install nc1709[all]
|
|
411
|
+
|
|
412
|
+
# Development dependencies
|
|
413
|
+
pip install nc1709[dev]
|
|
414
|
+
```
|
|
415
|
+
|
|
416
|
+
### Verify Installation
|
|
417
|
+
|
|
418
|
+
```bash
|
|
419
|
+
# Check version
|
|
420
|
+
nc1709 --version
|
|
421
|
+
|
|
422
|
+
# Check Ollama connection
|
|
423
|
+
nc1709 --config
|
|
424
|
+
|
|
425
|
+
# Start interactive mode
|
|
426
|
+
nc1709
|
|
427
|
+
```
|
|
428
|
+
|
|
429
|
+
### Prerequisites Summary
|
|
430
|
+
|
|
431
|
+
**For Remote Users (connecting to a server):**
|
|
432
|
+
| Component | Required | Purpose |
|
|
433
|
+
|-----------|----------|---------|
|
|
434
|
+
| Python 3.9+ | ✅ Yes | Runtime |
|
|
435
|
+
| pip | ✅ Yes | Package installer |
|
|
436
|
+
| Internet | ✅ Yes | Connect to server |
|
|
437
|
+
|
|
438
|
+
**For Self-Hosted / Local Mode:**
|
|
439
|
+
| Component | Required | Purpose |
|
|
440
|
+
|-----------|----------|---------|
|
|
441
|
+
| Python 3.9+ | ✅ Yes | Runtime |
|
|
442
|
+
| Ollama | ✅ Yes | Local LLM server |
|
|
443
|
+
| pip | ✅ Yes | Package installer |
|
|
444
|
+
| NVIDIA GPU | ❌ Optional | Faster inference |
|
|
445
|
+
| 16GB+ RAM | ✅ Recommended | Model loading |
|
|
446
|
+
|
|
447
|
+
## Quick Start
|
|
448
|
+
|
|
449
|
+
```bash
|
|
450
|
+
# Interactive shell mode
|
|
451
|
+
nc1709
|
|
452
|
+
|
|
453
|
+
# Direct command
|
|
454
|
+
nc1709 "create a Python script to fetch JSON from an API"
|
|
455
|
+
|
|
456
|
+
# Start web dashboard
|
|
457
|
+
nc1709 --web
|
|
458
|
+
|
|
459
|
+
# Auto-fix errors in a file
|
|
460
|
+
nc1709 --fix main.py
|
|
461
|
+
|
|
462
|
+
# Generate tests for a file
|
|
463
|
+
nc1709 --generate-tests utils.py
|
|
464
|
+
```
|
|
465
|
+
|
|
466
|
+
## Features
|
|
467
|
+
|
|
468
|
+
### Core Capabilities
|
|
469
|
+
- **Chat Interface** - Conversational AI for coding help
|
|
470
|
+
- **File Operations** - Read, write, edit files safely with auto-backup
|
|
471
|
+
- **Command Execution** - Run shell commands in a sandboxed environment
|
|
472
|
+
- **Multi-Step Reasoning** - Complex tasks broken into manageable steps
|
|
473
|
+
- **Smart Task Classification** - Automatic model selection based on task type
|
|
474
|
+
|
|
475
|
+
### Memory & Context
|
|
476
|
+
- **Semantic Code Search** - Find code by meaning, not just keywords
|
|
477
|
+
- **Project Indexing** - Index your codebase for intelligent search
|
|
478
|
+
- **Session Persistence** - Save and resume conversations
|
|
479
|
+
|
|
480
|
+
```bash
|
|
481
|
+
# Index your project
|
|
482
|
+
nc1709 --index
|
|
483
|
+
|
|
484
|
+
# Semantic search
|
|
485
|
+
nc1709 --search "authentication logic"
|
|
486
|
+
|
|
487
|
+
# Resume a session
|
|
488
|
+
nc1709 --sessions
|
|
489
|
+
nc1709 --resume <session-id>
|
|
490
|
+
```
|
|
491
|
+
|
|
492
|
+
### AI Agents
|
|
493
|
+
|
|
494
|
+
**Auto-Fix Agent** - Automatically detects and fixes code errors:
|
|
495
|
+
```bash
|
|
496
|
+
nc1709 --fix src/main.py # Analyze and suggest fixes
|
|
497
|
+
nc1709 --fix src/main.py --apply # Auto-apply fixes
|
|
498
|
+
```
|
|
499
|
+
|
|
500
|
+
**Test Generator** - Generates unit tests for your code:
|
|
501
|
+
```bash
|
|
502
|
+
nc1709 --generate-tests utils.py # Generate tests
|
|
503
|
+
nc1709 --generate-tests utils.py --output tests/test_utils.py
|
|
504
|
+
```
|
|
505
|
+
|
|
506
|
+
### Plugins & Agents
|
|
507
|
+
- **Git Agent** - Commits, branches, diffs, and more
|
|
508
|
+
- **Docker Agent** - Container and image management
|
|
509
|
+
- **Framework Agents** - FastAPI, Next.js, Django scaffolding
|
|
510
|
+
- **MCP Support** - Model Context Protocol integration
|
|
511
|
+
|
|
512
|
+
```bash
|
|
513
|
+
# In shell mode
|
|
514
|
+
git status
|
|
515
|
+
git diff
|
|
516
|
+
docker ps
|
|
517
|
+
docker compose up
|
|
518
|
+
|
|
519
|
+
# Or via CLI
|
|
520
|
+
nc1709 --plugin git:status
|
|
521
|
+
nc1709 --plugin docker:ps
|
|
522
|
+
```
|
|
523
|
+
|
|
524
|
+
### Web Dashboard
|
|
525
|
+
A full browser-based interface for NC1709:
|
|
526
|
+
|
|
527
|
+
```bash
|
|
528
|
+
nc1709 --web
|
|
529
|
+
# Open http://localhost:8709
|
|
530
|
+
```
|
|
531
|
+
|
|
532
|
+
Features:
|
|
533
|
+
- Chat with syntax highlighting
|
|
534
|
+
- Session management
|
|
535
|
+
- Semantic code search UI
|
|
536
|
+
- Plugin management
|
|
537
|
+
- MCP tools browser
|
|
538
|
+
- Configuration editor
|
|
539
|
+
|
|
540
|
+
## Architecture
|
|
541
|
+
|
|
542
|
+
NC1709 v1.8.0 uses a **split architecture** similar to Claude Code:
|
|
543
|
+
|
|
544
|
+
```
|
|
545
|
+
┌─────────────────────────────────────┐ ┌──────────────────────────────────┐
|
|
546
|
+
│ Your Machine (CLI) │ │ nc1709.lafzusa.com (Server) │
|
|
547
|
+
│ │ │ │
|
|
548
|
+
│ ✅ Tools execute HERE │ │ ✅ LLM inference HERE │
|
|
549
|
+
│ • Read/Write/Edit files │◀───▶│ • Ollama models │
|
|
550
|
+
│ • Run bash commands │ │ • Reasoning engine │
|
|
551
|
+
│ • Search code (grep/glob) │ │ • Vector DB (code indexing) │
|
|
552
|
+
│ • Web search/fetch │ │ │
|
|
553
|
+
│ │ │ │
|
|
554
|
+
│ 📁 Your files STAY HERE │ │ 🧠 Only "thinking" happens here │
|
|
555
|
+
└─────────────────────────────────────┘ └──────────────────────────────────┘
|
|
556
|
+
```
|
|
557
|
+
|
|
558
|
+
### How It Works
|
|
559
|
+
|
|
560
|
+
1. You run `nc1709` → Auto-connects to server
|
|
561
|
+
2. You type a prompt → Sent to server for LLM processing
|
|
562
|
+
3. Server returns tool instructions → CLI executes locally
|
|
563
|
+
4. Results sent back → LLM continues until task complete
|
|
564
|
+
|
|
565
|
+
### Data Flow
|
|
566
|
+
|
|
567
|
+
- **To Server**: Your prompts, tool execution results
|
|
568
|
+
- **Stays Local**: Your files, bash commands, environment
|
|
569
|
+
- **Auto-Indexed**: Files you work with get indexed on server for better context
|
|
570
|
+
|
|
571
|
+
## Remote Mode
|
|
572
|
+
|
|
573
|
+
### Default (Recommended)
|
|
574
|
+
|
|
575
|
+
NC1709 automatically connects to the hosted server:
|
|
576
|
+
|
|
577
|
+
```bash
|
|
578
|
+
# Just install and run - no configuration needed!
|
|
579
|
+
pip install nc1709
|
|
580
|
+
nc1709
|
|
581
|
+
```
|
|
582
|
+
|
|
583
|
+
The CLI connects to `https://nc1709.lafzusa.com` by default.
|
|
584
|
+
|
|
585
|
+
### Self-Hosted Server
|
|
586
|
+
|
|
587
|
+
Want to run your own server?
|
|
588
|
+
|
|
589
|
+
#### Server Setup
|
|
590
|
+
```bash
|
|
591
|
+
# On your server
|
|
592
|
+
pip install nc1709[all,memory]
|
|
593
|
+
|
|
594
|
+
# Install Ollama and models
|
|
595
|
+
ollama pull qwen2.5-coder:32b
|
|
596
|
+
|
|
597
|
+
# Start server with remote access enabled
|
|
598
|
+
nc1709 --web --serve --port 8709
|
|
599
|
+
|
|
600
|
+
# Set API key in config (~/.nc1709/config.json)
|
|
601
|
+
{
|
|
602
|
+
"remote": {
|
|
603
|
+
"api_key": "your-secret-key"
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
```
|
|
607
|
+
|
|
608
|
+
#### Expose to Internet
|
|
609
|
+
```bash
|
|
610
|
+
# Using Cloudflare Tunnel (recommended)
|
|
611
|
+
cloudflared tunnel --url http://localhost:8709
|
|
612
|
+
|
|
613
|
+
# Or ngrok
|
|
614
|
+
ngrok http 8709
|
|
615
|
+
```
|
|
616
|
+
|
|
617
|
+
#### Client Setup
|
|
618
|
+
```bash
|
|
619
|
+
# Users install nc1709
|
|
620
|
+
pip install nc1709
|
|
621
|
+
|
|
622
|
+
# Point to your server
|
|
623
|
+
export NC1709_API_URL="https://your-server.com"
|
|
624
|
+
export NC1709_API_KEY="your-secret-key"
|
|
625
|
+
|
|
626
|
+
# Use normally
|
|
627
|
+
nc1709 "explain this code"
|
|
628
|
+
```
|
|
629
|
+
|
|
630
|
+
### Local-Only Mode
|
|
631
|
+
|
|
632
|
+
Force local mode (requires Ollama installed):
|
|
633
|
+
|
|
634
|
+
```bash
|
|
635
|
+
nc1709 --local
|
|
636
|
+
```
|
|
637
|
+
|
|
638
|
+
## Extensions
|
|
639
|
+
|
|
640
|
+
### VS Code Extension
|
|
641
|
+
Full IDE integration with:
|
|
642
|
+
- Chat sidebar panel
|
|
643
|
+
- Inline code completions (like GitHub Copilot)
|
|
644
|
+
- Right-click code actions (explain, refactor, test, fix)
|
|
645
|
+
- Keyboard shortcuts
|
|
646
|
+
|
|
647
|
+
```bash
|
|
648
|
+
cd vscode-extension
|
|
649
|
+
npm install && npm run package
|
|
650
|
+
# Install the .vsix file in VS Code
|
|
651
|
+
```
|
|
652
|
+
|
|
653
|
+
### Desktop App
|
|
654
|
+
Native Electron app with:
|
|
655
|
+
- System tray integration
|
|
656
|
+
- Automatic server management
|
|
657
|
+
- Dark/light mode support
|
|
658
|
+
|
|
659
|
+
```bash
|
|
660
|
+
cd desktop-app
|
|
661
|
+
npm install && npm start
|
|
662
|
+
```
|
|
663
|
+
|
|
664
|
+
## Shell Commands
|
|
665
|
+
|
|
666
|
+
In interactive mode:
|
|
667
|
+
|
|
668
|
+
```
|
|
669
|
+
help Show available commands
|
|
670
|
+
exit Exit the shell
|
|
671
|
+
clear Clear conversation history
|
|
672
|
+
sessions List saved sessions
|
|
673
|
+
search <q> Semantic code search
|
|
674
|
+
index Index current project
|
|
675
|
+
plugins List available plugins
|
|
676
|
+
git <cmd> Git operations
|
|
677
|
+
docker <cmd> Docker operations
|
|
678
|
+
mcp MCP status and tools
|
|
679
|
+
fix <file> Auto-fix errors
|
|
680
|
+
test <file> Generate tests
|
|
681
|
+
```
|
|
682
|
+
|
|
683
|
+
## Configuration
|
|
684
|
+
|
|
685
|
+
Config file: `~/.nc1709/config.json`
|
|
686
|
+
|
|
687
|
+
```bash
|
|
688
|
+
nc1709 --config # View configuration
|
|
689
|
+
```
|
|
690
|
+
|
|
691
|
+
```json
|
|
692
|
+
{
|
|
693
|
+
"models": {
|
|
694
|
+
"reasoning": "deepseek-r1:latest",
|
|
695
|
+
"coding": "qwen2.5-coder:32b",
|
|
696
|
+
"general": "qwen2.5:32b",
|
|
697
|
+
"fast": "qwen2.5-coder:7b"
|
|
698
|
+
},
|
|
699
|
+
"safety": {
|
|
700
|
+
"confirm_writes": true,
|
|
701
|
+
"auto_backup": true
|
|
702
|
+
},
|
|
703
|
+
"remote": {
|
|
704
|
+
"api_key": "your-secret-key"
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
```
|
|
708
|
+
|
|
709
|
+
## CLI Reference
|
|
710
|
+
|
|
711
|
+
```bash
|
|
712
|
+
nc1709 [prompt] # Direct command or start shell
|
|
713
|
+
nc1709 --shell # Interactive shell mode
|
|
714
|
+
nc1709 --web # Start web dashboard
|
|
715
|
+
nc1709 --web --port 9000 # Custom port
|
|
716
|
+
nc1709 --web --serve # Enable remote access
|
|
717
|
+
|
|
718
|
+
# AI Agents
|
|
719
|
+
nc1709 --fix <file> # Auto-fix code errors
|
|
720
|
+
nc1709 --generate-tests <file> # Generate unit tests
|
|
721
|
+
|
|
722
|
+
# Memory features
|
|
723
|
+
nc1709 --index # Index project
|
|
724
|
+
nc1709 --search "query" # Semantic search
|
|
725
|
+
nc1709 --sessions # List sessions
|
|
726
|
+
nc1709 --resume <id> # Resume session
|
|
727
|
+
|
|
728
|
+
# Plugins
|
|
729
|
+
nc1709 --plugins # List plugins
|
|
730
|
+
nc1709 --plugin git:status
|
|
731
|
+
|
|
732
|
+
# Remote mode
|
|
733
|
+
nc1709 --remote <url> # Connect to remote server
|
|
734
|
+
nc1709 --api-key <key> # API key for remote
|
|
735
|
+
|
|
736
|
+
# Shell completions
|
|
737
|
+
nc1709 --completion bash # Generate bash completions
|
|
738
|
+
nc1709 --completion zsh # Generate zsh completions
|
|
739
|
+
nc1709 --completion fish # Generate fish completions
|
|
740
|
+
|
|
741
|
+
# Info
|
|
742
|
+
nc1709 --version # Show version
|
|
743
|
+
nc1709 --config # Show configuration
|
|
744
|
+
nc1709 --help # Show help
|
|
745
|
+
```
|
|
746
|
+
|
|
747
|
+
## Installation Options
|
|
748
|
+
|
|
749
|
+
```bash
|
|
750
|
+
# Basic installation
|
|
751
|
+
pip install nc1709
|
|
752
|
+
|
|
753
|
+
# With memory features (ChromaDB, embeddings)
|
|
754
|
+
pip install nc1709[memory]
|
|
755
|
+
|
|
756
|
+
# With web dashboard
|
|
757
|
+
pip install nc1709[web]
|
|
758
|
+
|
|
759
|
+
# Everything
|
|
760
|
+
pip install nc1709[all]
|
|
761
|
+
|
|
762
|
+
# Development
|
|
763
|
+
pip install nc1709[dev]
|
|
764
|
+
```
|
|
765
|
+
|
|
766
|
+
## System Requirements
|
|
767
|
+
|
|
768
|
+
- **Python**: 3.9+
|
|
769
|
+
- **RAM**: 16GB minimum, 32GB recommended
|
|
770
|
+
- **GPU**: NVIDIA with 12GB+ VRAM (optional, CPU works but slower)
|
|
771
|
+
- **Storage**: ~50GB for models
|
|
772
|
+
- **OS**: macOS, Linux, Windows (WSL2)
|
|
773
|
+
|
|
774
|
+
## Project Structure
|
|
775
|
+
|
|
776
|
+
```
|
|
777
|
+
nc1709/
|
|
778
|
+
├── nc1709/
|
|
779
|
+
│ ├── cli.py # Main CLI
|
|
780
|
+
│ ├── config.py # Configuration
|
|
781
|
+
│ ├── llm_adapter.py # LLM integration
|
|
782
|
+
│ ├── reasoning_engine.py # Multi-step reasoning
|
|
783
|
+
│ ├── task_classifier.py # Smart task classification
|
|
784
|
+
│ ├── progress.py # Progress indicators
|
|
785
|
+
│ ├── shell_completions.py # Shell completions
|
|
786
|
+
│ ├── file_controller.py # File operations
|
|
787
|
+
│ ├── executor.py # Command execution
|
|
788
|
+
│ ├── remote_client.py # Remote mode client
|
|
789
|
+
│ ├── memory/ # Vector DB, sessions, indexing
|
|
790
|
+
│ ├── plugins/ # Plugin system & agents
|
|
791
|
+
│ ├── agents/ # AI agents (auto-fix, test-gen)
|
|
792
|
+
│ ├── mcp/ # Model Context Protocol
|
|
793
|
+
│ └── web/ # Web dashboard
|
|
794
|
+
├── vscode-extension/ # VS Code extension
|
|
795
|
+
├── desktop-app/ # Electron desktop app
|
|
796
|
+
├── docs/ # Documentation
|
|
797
|
+
├── tests/ # Test suite
|
|
798
|
+
├── pyproject.toml # Package config
|
|
799
|
+
└── README.md
|
|
800
|
+
```
|
|
801
|
+
|
|
802
|
+
## Privacy & Security
|
|
803
|
+
|
|
804
|
+
- **Tools Run Locally**: All file operations happen on YOUR machine
|
|
805
|
+
- **Files Stay Local**: Your code never leaves your computer
|
|
806
|
+
- **Server-Side**: Only prompts and tool results sent to server
|
|
807
|
+
- **Auto-Backup**: Files backed up before modification
|
|
808
|
+
- **Sandboxed Execution**: Commands validated before running
|
|
809
|
+
- **Confirmation Prompts**: Ask before destructive operations
|
|
810
|
+
- **API Key Auth**: Secure remote access with authentication
|
|
811
|
+
- **Session Memory**: Stored locally at `~/.nc1709/sessions/`
|
|
812
|
+
|
|
813
|
+
## Troubleshooting
|
|
814
|
+
|
|
815
|
+
### "Cannot connect to Ollama"
|
|
816
|
+
```bash
|
|
817
|
+
ollama serve # Start Ollama
|
|
818
|
+
```
|
|
819
|
+
|
|
820
|
+
### "Model not found"
|
|
821
|
+
```bash
|
|
822
|
+
ollama pull qwen2.5-coder:32b
|
|
823
|
+
```
|
|
824
|
+
|
|
825
|
+
### Slow performance
|
|
826
|
+
- Use GPU: `nvidia-smi` to verify
|
|
827
|
+
- Use smaller models: `qwen2.5-coder:7b`
|
|
828
|
+
- Clear history: `clear` in shell
|
|
829
|
+
|
|
830
|
+
### Shell completions not working
|
|
831
|
+
```bash
|
|
832
|
+
# Bash
|
|
833
|
+
echo 'eval "$(nc1709 --completion bash)"' >> ~/.bashrc
|
|
834
|
+
source ~/.bashrc
|
|
835
|
+
|
|
836
|
+
# Zsh
|
|
837
|
+
echo 'eval "$(nc1709 --completion zsh)"' >> ~/.zshrc
|
|
838
|
+
source ~/.zshrc
|
|
839
|
+
|
|
840
|
+
# Fish
|
|
841
|
+
nc1709 --completion fish > ~/.config/fish/completions/nc1709.fish
|
|
842
|
+
```
|
|
843
|
+
|
|
844
|
+
## Contributing
|
|
845
|
+
|
|
846
|
+
Contributions welcome! See [CONTRIBUTING.md](CONTRIBUTING.md).
|
|
847
|
+
|
|
848
|
+
## License
|
|
849
|
+
|
|
850
|
+
MIT License - See [LICENSE](LICENSE) file.
|
|
851
|
+
|
|
852
|
+
## Acknowledgments
|
|
853
|
+
|
|
854
|
+
Built with [Ollama](https://ollama.com/), [LiteLLM](https://github.com/BerriAI/litellm), and open-source models from DeepSeek and Qwen.
|
|
855
|
+
|
|
856
|
+
---
|
|
857
|
+
|
|
858
|
+
**Built for developers who value privacy and control.**
|