ngpt 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ngpt/cli.py CHANGED
@@ -35,11 +35,10 @@ def show_config_help():
35
35
  print(" 3. Or set environment variables:")
36
36
  print(" - OPENAI_API_KEY")
37
37
  print(" - OPENAI_BASE_URL")
38
- print(" - OPENAI_PROVIDER")
39
38
  print(" - OPENAI_MODEL")
40
39
 
41
40
  print(" 4. Or provide command line arguments:")
42
- print(" ngpt --api-key your-key --base-url https://api.example.com \"Your prompt\"")
41
+ print(" ngpt --api-key your-key --base-url https://api.example.com --model your-model \"Your prompt\"")
43
42
 
44
43
  print(" 5. Use --config-index to specify which configuration to use:")
45
44
  print(" ngpt --config-index 1 \"Your prompt\"")
@@ -171,8 +170,12 @@ def main():
171
170
  # Handle modes
172
171
  if args.shell:
173
172
  if args.prompt is None:
174
- print("Enter shell command description: ", end='')
175
- prompt = input()
173
+ try:
174
+ print("Enter shell command description: ", end='')
175
+ prompt = input()
176
+ except KeyboardInterrupt:
177
+ print("\nInput cancelled by user. Exiting gracefully.")
178
+ sys.exit(130)
176
179
  else:
177
180
  prompt = args.prompt
178
181
 
@@ -182,20 +185,33 @@ def main():
182
185
 
183
186
  print(f"\nGenerated command: {command}")
184
187
 
185
- print("Do you want to execute this command? [y/N] ", end='')
186
- response = input().lower()
188
+ try:
189
+ print("Do you want to execute this command? [y/N] ", end='')
190
+ response = input().lower()
191
+ except KeyboardInterrupt:
192
+ print("\nCommand execution cancelled by user.")
193
+ return
194
+
187
195
  if response == 'y' or response == 'yes':
188
196
  import subprocess
189
197
  try:
190
- result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
191
- print(f"\nOutput:\n{result.stdout}")
198
+ try:
199
+ print("\nExecuting command... (Press Ctrl+C to cancel)")
200
+ result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
201
+ print(f"\nOutput:\n{result.stdout}")
202
+ except KeyboardInterrupt:
203
+ print("\nCommand execution cancelled by user.")
192
204
  except subprocess.CalledProcessError as e:
193
205
  print(f"\nError:\n{e.stderr}")
194
206
 
195
207
  elif args.code:
196
208
  if args.prompt is None:
197
- print("Enter code description: ", end='')
198
- prompt = input()
209
+ try:
210
+ print("Enter code description: ", end='')
211
+ prompt = input()
212
+ except KeyboardInterrupt:
213
+ print("\nInput cancelled by user. Exiting gracefully.")
214
+ sys.exit(130)
199
215
  else:
200
216
  prompt = args.prompt
201
217
 
@@ -206,16 +222,23 @@ def main():
206
222
  else:
207
223
  # Default to chat mode
208
224
  if args.prompt is None:
209
- print("Enter your prompt: ", end='')
210
- prompt = input()
225
+ try:
226
+ print("Enter your prompt: ", end='')
227
+ prompt = input()
228
+ except KeyboardInterrupt:
229
+ print("\nInput cancelled by user. Exiting gracefully.")
230
+ sys.exit(130)
211
231
  else:
212
232
  prompt = args.prompt
213
233
  client.chat(prompt, web_search=args.web_search)
214
234
 
215
235
  except KeyboardInterrupt:
216
- print("\nOperation cancelled by user.")
236
+ print("\nOperation cancelled by user. Exiting gracefully.")
237
+ # Make sure we exit with a non-zero status code to indicate the operation was cancelled
238
+ sys.exit(130) # 130 is the standard exit code for SIGINT (Ctrl+C)
217
239
  except Exception as e:
218
240
  print(f"Error: {e}")
241
+ sys.exit(1) # Exit with error code
219
242
 
220
243
  if __name__ == "__main__":
221
244
  main()
ngpt/client.py CHANGED
@@ -82,43 +82,51 @@ class NGPTClient:
82
82
  try:
83
83
  if not stream:
84
84
  # Regular request
85
- response = requests.post(url, headers=self.headers, json=payload)
86
- response.raise_for_status() # Raise exception for HTTP errors
87
- result = response.json()
88
-
89
- # Extract content from response
90
- if "choices" in result and len(result["choices"]) > 0:
91
- return result["choices"][0]["message"]["content"]
92
- return ""
85
+ try:
86
+ response = requests.post(url, headers=self.headers, json=payload)
87
+ response.raise_for_status() # Raise exception for HTTP errors
88
+ result = response.json()
89
+
90
+ # Extract content from response
91
+ if "choices" in result and len(result["choices"]) > 0:
92
+ return result["choices"][0]["message"]["content"]
93
+ return ""
94
+ except KeyboardInterrupt:
95
+ print("\nRequest cancelled by user.")
96
+ return ""
93
97
  else:
94
98
  # Streaming request
95
99
  collected_content = ""
96
100
  with requests.post(url, headers=self.headers, json=payload, stream=True) as response:
97
101
  response.raise_for_status() # Raise exception for HTTP errors
98
102
 
99
- for line in response.iter_lines():
100
- if not line:
101
- continue
102
-
103
- # Handle SSE format
104
- line = line.decode('utf-8')
105
- if line.startswith('data: '):
106
- line = line[6:] # Remove 'data: ' prefix
107
-
108
- # Skip keep-alive lines
109
- if line == "[DONE]":
110
- break
103
+ try:
104
+ for line in response.iter_lines():
105
+ if not line:
106
+ continue
107
+
108
+ # Handle SSE format
109
+ line = line.decode('utf-8')
110
+ if line.startswith('data: '):
111
+ line = line[6:] # Remove 'data: ' prefix
111
112
 
112
- try:
113
- chunk = json.loads(line)
114
- if "choices" in chunk and len(chunk["choices"]) > 0:
115
- delta = chunk["choices"][0].get("delta", {})
116
- content = delta.get("content", "")
117
- if content:
118
- print(content, end="", flush=True)
119
- collected_content += content
120
- except json.JSONDecodeError:
121
- pass # Skip invalid JSON
113
+ # Skip keep-alive lines
114
+ if line == "[DONE]":
115
+ break
116
+
117
+ try:
118
+ chunk = json.loads(line)
119
+ if "choices" in chunk and len(chunk["choices"]) > 0:
120
+ delta = chunk["choices"][0].get("delta", {})
121
+ content = delta.get("content", "")
122
+ if content:
123
+ print(content, end="", flush=True)
124
+ collected_content += content
125
+ except json.JSONDecodeError:
126
+ pass # Skip invalid JSON
127
+ except KeyboardInterrupt:
128
+ print("\nGeneration cancelled by user.")
129
+ return collected_content
122
130
 
123
131
  print() # Add a final newline
124
132
  return collected_content
ngpt/config.py CHANGED
@@ -56,22 +56,26 @@ def add_config_entry(config_path: Path, config_index: Optional[int] = None) -> N
56
56
 
57
57
  # Interactive configuration
58
58
  print("Enter configuration details (press Enter to use default values):")
59
- new_entry["api_key"] = input(f"API Key: ") or new_entry["api_key"]
60
- new_entry["base_url"] = input(f"Base URL [{new_entry['base_url']}]: ") or new_entry["base_url"]
61
- new_entry["provider"] = input(f"Provider [{new_entry['provider']}]: ") or new_entry["provider"]
62
- new_entry["model"] = input(f"Model [{new_entry['model']}]: ") or new_entry["model"]
63
-
64
- # Add or update the entry
65
- if config_index is not None and config_index < len(configs):
66
- configs[config_index] = new_entry
67
- print(f"Updated configuration at index {config_index}")
68
- else:
69
- configs.append(new_entry)
70
- print(f"Added new configuration at index {len(configs)-1}")
71
-
72
- # Save the updated configs
73
- with open(config_path, "w") as f:
74
- json.dump(configs, f, indent=2)
59
+ try:
60
+ new_entry["api_key"] = input(f"API Key: ") or new_entry["api_key"]
61
+ new_entry["base_url"] = input(f"Base URL [{new_entry['base_url']}]: ") or new_entry["base_url"]
62
+ new_entry["provider"] = input(f"Provider [{new_entry['provider']}]: ") or new_entry["provider"]
63
+ new_entry["model"] = input(f"Model [{new_entry['model']}]: ") or new_entry["model"]
64
+
65
+ # Add or update the entry
66
+ if config_index is not None and config_index < len(configs):
67
+ configs[config_index] = new_entry
68
+ print(f"Updated configuration at index {config_index}")
69
+ else:
70
+ configs.append(new_entry)
71
+ print(f"Added new configuration at index {len(configs)-1}")
72
+
73
+ # Save the updated configs
74
+ with open(config_path, "w") as f:
75
+ json.dump(configs, f, indent=2)
76
+ except KeyboardInterrupt:
77
+ print("\nConfiguration cancelled by user. Exiting.")
78
+ sys.exit(130) # Exit with standard keyboard interrupt code
75
79
 
76
80
  def load_configs(custom_path: Optional[str] = None) -> List[Dict[str, Any]]:
77
81
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngpt
3
- Version: 1.1.1
3
+ Version: 1.1.3
4
4
  Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
5
5
  Project-URL: Homepage, https://github.com/nazdridoy/ngpt
6
6
  Project-URL: Repository, https://github.com/nazdridoy/ngpt
@@ -33,19 +33,51 @@ Description-Content-Type: text/markdown
33
33
 
34
34
  # nGPT
35
35
 
36
+ [![PyPI version](https://img.shields.io/pypi/v/ngpt.svg)](https://pypi.org/project/ngpt/)
37
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
38
+ [![Python Versions](https://img.shields.io/pypi/pyversions/ngpt.svg)](https://pypi.org/project/ngpt/)
39
+
36
40
  A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
37
41
 
42
+ ## Table of Contents
43
+ - [Quick Start](#quick-start)
44
+ - [Features](#features)
45
+ - [Installation](#installation)
46
+ - [Usage](#usage)
47
+ - [CLI Tool](#as-a-cli-tool)
48
+ - [Python Library](#as-a-library)
49
+ - [Configuration](#configuration)
50
+ - [Command Line Options](#command-line-options)
51
+ - [Configuration File](#configuration-file)
52
+ - [Configuration Priority](#configuration-priority)
53
+ - [License](#license)
54
+
55
+ ## Quick Start
56
+
57
+ ```bash
58
+ # Install
59
+ pip install ngpt
60
+
61
+ # Chat with default settings
62
+ ngpt "Tell me about quantum computing"
63
+
64
+ # Generate code
65
+ ngpt --code "function to calculate the Fibonacci sequence"
66
+
67
+ # Generate and execute shell commands
68
+ ngpt --shell "list all files in the current directory"
69
+ ```
70
+
38
71
  ## Features
39
72
 
40
- - Dual mode: Use as a CLI tool or import as a library
41
- - Minimal dependencies
42
- - Customizable API endpoints and providers
43
- - Streaming responses
44
- - Web search capability (supported by compatible API endpoints)
45
- - Cross-platform configuration system
46
- - Experimental features:
47
- - Shell command generation and execution (OS-aware)
48
- - Code generation with clean output
73
+ - ✅ **Dual Mode**: Use as a CLI tool or import as a Python library
74
+ - 🪶 **Lightweight**: Minimal dependencies (just `requests`)
75
+ - 🔄 **API Flexibility**: Works with OpenAI, Ollama, Groq, and any compatible endpoint
76
+ - 📊 **Streaming Responses**: Real-time output for better user experience
77
+ - 🔍 **Web Search**: Integrated with compatible API endpoints
78
+ - ⚙️ **Multiple Configurations**: Cross-platform config system supporting different profiles
79
+ - 💻 **Shell Command Generation**: OS-aware command execution
80
+ - 🧩 **Clean Code Generation**: Output code without markdown or explanations
49
81
 
50
82
  ## Installation
51
83
 
@@ -53,6 +85,8 @@ A lightweight Python CLI and library for interacting with OpenAI-compatible APIs
53
85
  pip install ngpt
54
86
  ```
55
87
 
88
+ Requires Python 3.8 or newer.
89
+
56
90
  ## Usage
57
91
 
58
92
  ### As a CLI Tool
@@ -71,15 +105,19 @@ ngpt --show-config
71
105
  ngpt --show-config --all
72
106
 
73
107
  # With custom options
74
- ngpt --api-key your-key --base-url http://your-endpoint "Hello"
108
+ ngpt --api-key your-key --base-url http://your-endpoint --model your-model "Hello"
75
109
 
76
110
  # Enable web search (if your API endpoint supports it)
77
111
  ngpt --web-search "What's the latest news about AI?"
78
112
 
79
113
  # Generate and execute shell commands (using -s or --shell flag)
114
+ # OS-aware: generates appropriate commands for Windows, macOS, or Linux
80
115
  ngpt -s "list all files in current directory"
116
+ # On Windows generates: dir
117
+ # On Linux/macOS generates: ls -la
81
118
 
82
- # Generate code (using -c or --code flag)
119
+ # Generate clean code (using -c or --code flag)
120
+ # Returns only code without markdown formatting or explanations
83
121
  ngpt -c "create a python function that calculates fibonacci numbers"
84
122
  ```
85
123
 
@@ -115,20 +153,51 @@ command = client.generate_shell_command("list all files")
115
153
  code = client.generate_code("create a python function that calculates fibonacci numbers")
116
154
  ```
117
155
 
156
+ #### Advanced Library Usage
157
+
158
+ ```python
159
+ # Stream responses
160
+ for chunk in client.chat("Write a poem about Python", stream=True):
161
+ print(chunk, end="", flush=True)
162
+
163
+ # Customize system prompt
164
+ response = client.chat(
165
+ "Explain quantum computing",
166
+ system_prompt="You are a quantum physics professor. Explain complex concepts simply."
167
+ )
168
+
169
+ # OS-aware shell commands
170
+ # Automatically generates appropriate commands for the current OS
171
+ command = client.generate_shell_command("find large files")
172
+ import subprocess
173
+ result = subprocess.run(command, shell=True, capture_output=True, text=True)
174
+ print(result.stdout)
175
+
176
+ # Clean code generation
177
+ # Returns only code without markdown or explanations
178
+ code = client.generate_code("function that converts Celsius to Fahrenheit")
179
+ print(code)
180
+ ```
181
+
118
182
  ## Configuration
119
183
 
120
184
  ### Command Line Options
121
185
 
122
186
  You can configure the client using the following options:
123
187
 
124
- - `--api-key`: API key for the service
125
- - `--base-url`: Base URL for the API
126
- - `--model`: Model to use
127
- - `--web-search`: Enable web search capability (Note: Your API endpoint must support this feature)
128
- - `--config`: Path to a custom configuration file
129
- - `--config-index`: Index of the configuration to use from the config file (default: 0)
130
- - `--show-config`: Show configuration details and exit.
131
- - `--all`: Used with `--show-config` to display details for all configurations.
188
+ | Option | Description |
189
+ |--------|-------------|
190
+ | `--api-key` | API key for the service |
191
+ | `--base-url` | Base URL for the API |
192
+ | `--model` | Model to use |
193
+ | `--web-search` | Enable web search capability |
194
+ | `--config` | Path to a custom configuration file |
195
+ | `--config-index` | Index of the configuration to use (default: 0) |
196
+ | `--show-config` | Show configuration details and exit |
197
+ | `--all` | Used with `--show-config` to display all configurations |
198
+ | `-s, --shell` | Generate and execute shell commands |
199
+ | `-c, --code` | Generate clean code output |
200
+ | `-v, --version` | Show version information |
132
201
 
133
202
  ### Configuration File
134
203
 
@@ -173,20 +242,6 @@ nGPT determines configuration values in the following order (highest priority fi
173
242
  3. Configuration file (selected by `--config-index`, defaults to index 0)
174
243
  4. Default values
175
244
 
176
- ## Special Features
177
-
178
- ### OS-Aware Shell Commands
179
-
180
- Shell command generation is OS-aware, providing appropriate commands for your operating system (Windows, macOS, or Linux) and shell type (bash, powershell, etc.).
181
-
182
- ### Clean Code Generation
183
-
184
- Code generation uses an improved prompt that ensures only clean code is returned, without markdown formatting or unnecessary explanations.
185
-
186
- ## Implementation Notes
187
-
188
- This library uses direct HTTP requests instead of the OpenAI client library, allowing it to work with custom API endpoints that support additional parameters like `provider` and `web_search`. All parameters are sent directly in the request body, similar to the format shown in the curl example.
189
-
190
245
  ## License
191
246
 
192
247
  This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,9 @@
1
+ ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
2
+ ngpt/cli.py,sha256=_nu7eY76_y4KQ59cKC91VijVvzSASAaSJI1FFfJ9l04,10655
3
+ ngpt/client.py,sha256=j7UCX_nkFRQJ_15ynxdu0Tj3HxxsI7Ll4__HdTbD7zE,10400
4
+ ngpt/config.py,sha256=JWCEp1aMq96i8owi4z_poKigaA_s2UTfzY0fjBM5MoQ,5295
5
+ ngpt-1.1.3.dist-info/METADATA,sha256=QTx1AssxeYEUlGOUpDdvssIVlvh2Xl8tqp9-a4wXwWs,8002
6
+ ngpt-1.1.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
+ ngpt-1.1.3.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
8
+ ngpt-1.1.3.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
9
+ ngpt-1.1.3.dist-info/RECORD,,
@@ -1,9 +0,0 @@
1
- ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
2
- ngpt/cli.py,sha256=HA85u1-ajsOBO7Zp-Hngyr4NbYxdktn-uM1VuCvy4CU,9495
3
- ngpt/client.py,sha256=DfOjE2qQQZq3JF6wNa4YxEfUALq4B0ycP_3v9ZKw2ds,9940
4
- ngpt/config.py,sha256=qkOd4pNk8pW191u4EzHORecPpRDX2yVPrMQkJ35UsNw,5063
5
- ngpt-1.1.1.dist-info/METADATA,sha256=g7JncpCt4beu2lsc3-8x7LBnQxROCcD9hMvH5IYWk8w,6299
6
- ngpt-1.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
- ngpt-1.1.1.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
8
- ngpt-1.1.1.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
9
- ngpt-1.1.1.dist-info/RECORD,,
File without changes