ngpt 1.0.0__tar.gz → 1.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ngpt-1.0.0 → ngpt-1.1.0}/PKG-INFO +40 -28
- {ngpt-1.0.0 → ngpt-1.1.0}/README.md +37 -25
- {ngpt-1.0.0 → ngpt-1.1.0}/ngpt/__init__.py +2 -6
- ngpt-1.1.0/ngpt/cli.py +221 -0
- {ngpt-1.0.0 → ngpt-1.1.0}/ngpt/client.py +5 -4
- ngpt-1.1.0/ngpt/config.py +136 -0
- {ngpt-1.0.0 → ngpt-1.1.0}/pyproject.toml +3 -6
- {ngpt-1.0.0 → ngpt-1.1.0}/uv.lock +2 -5
- ngpt-1.0.0/ngpt/cli.py +0 -167
- ngpt-1.0.0/ngpt/config.py +0 -81
- {ngpt-1.0.0 → ngpt-1.1.0}/.gitignore +0 -0
- {ngpt-1.0.0 → ngpt-1.1.0}/.python-version +0 -0
- {ngpt-1.0.0 → ngpt-1.1.0}/LICENSE +0 -0
@@ -1,7 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ngpt
|
3
|
-
Version: 1.
|
4
|
-
Summary: A Python CLI and library for interacting with
|
3
|
+
Version: 1.1.0
|
4
|
+
Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
|
5
5
|
Project-URL: Homepage, https://github.com/nazdridoy/ngpt
|
6
6
|
Project-URL: Repository, https://github.com/nazdridoy/ngpt
|
7
7
|
Project-URL: Bug Tracker, https://github.com/nazdridoy/ngpt/issues
|
@@ -28,12 +28,12 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
28
28
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
29
29
|
Classifier: Topic :: Utilities
|
30
30
|
Requires-Python: >=3.8
|
31
|
-
Requires-Dist: requests>=2.
|
31
|
+
Requires-Dist: requests>=2.31.0
|
32
32
|
Description-Content-Type: text/markdown
|
33
33
|
|
34
34
|
# nGPT
|
35
35
|
|
36
|
-
A lightweight Python CLI and library for interacting with
|
36
|
+
A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
|
37
37
|
|
38
38
|
## Features
|
39
39
|
|
@@ -64,6 +64,12 @@ ngpt "Hello, how are you?"
|
|
64
64
|
# Show version information
|
65
65
|
ngpt -v
|
66
66
|
|
67
|
+
# Show active configuration
|
68
|
+
ngpt --show-config
|
69
|
+
|
70
|
+
# Show all configurations
|
71
|
+
ngpt --show-config --all
|
72
|
+
|
67
73
|
# With custom options
|
68
74
|
ngpt --api-key your-key --base-url http://your-endpoint "Hello"
|
69
75
|
|
@@ -82,8 +88,8 @@ ngpt -c "create a python function that calculates fibonacci numbers"
|
|
82
88
|
```python
|
83
89
|
from ngpt import NGPTClient, load_config
|
84
90
|
|
85
|
-
# Load from config file
|
86
|
-
config = load_config()
|
91
|
+
# Load the first configuration (index 0) from config file
|
92
|
+
config = load_config(config_index=0)
|
87
93
|
|
88
94
|
# Initialize the client with config
|
89
95
|
client = NGPTClient(**config)
|
@@ -117,10 +123,12 @@ You can configure the client using the following options:
|
|
117
123
|
|
118
124
|
- `--api-key`: API key for the service
|
119
125
|
- `--base-url`: Base URL for the API
|
120
|
-
- `--provider`: Provider name
|
121
126
|
- `--model`: Model to use
|
122
127
|
- `--web-search`: Enable web search capability (Note: Your API endpoint must support this feature)
|
123
128
|
- `--config`: Path to a custom configuration file
|
129
|
+
- `--config-index`: Index of the configuration to use from the config file (default: 0)
|
130
|
+
- `--show-config`: Show configuration details and exit.
|
131
|
+
- `--all`: Used with `--show-config` to display details for all configurations.
|
124
132
|
|
125
133
|
### Configuration File
|
126
134
|
|
@@ -130,35 +138,39 @@ nGPT uses a configuration file stored in the standard user config directory for
|
|
130
138
|
- **macOS**: `~/Library/Application Support/ngpt/ngpt.conf`
|
131
139
|
- **Windows**: `%APPDATA%\ngpt\ngpt.conf`
|
132
140
|
|
133
|
-
The configuration file uses JSON format
|
134
|
-
|
135
|
-
#### OpenAI API Example
|
136
|
-
```json
|
137
|
-
{
|
138
|
-
"api_key": "your_openai_api_key_here",
|
139
|
-
"base_url": "https://api.openai.com/v1/",
|
140
|
-
"provider": "OpenAI",
|
141
|
-
"model": "gpt-3.5-turbo"
|
142
|
-
}
|
143
|
-
```
|
141
|
+
The configuration file uses a JSON list format, allowing you to store multiple configurations. You can select which configuration to use with the `--config-index` argument (or by default, index 0 is used).
|
144
142
|
|
145
|
-
####
|
143
|
+
#### Multiple Configurations Example (`ngpt.conf`)
|
146
144
|
```json
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
145
|
+
[
|
146
|
+
{
|
147
|
+
"api_key": "your-openai-api-key-here",
|
148
|
+
"base_url": "https://api.openai.com/v1/",
|
149
|
+
"provider": "OpenAI",
|
150
|
+
"model": "gpt-4o"
|
151
|
+
},
|
152
|
+
{
|
153
|
+
"api_key": "your-groq-api-key-here",
|
154
|
+
"base_url": "https://api.groq.com/openai/v1/",
|
155
|
+
"provider": "Groq",
|
156
|
+
"model": "llama3-70b-8192"
|
157
|
+
},
|
158
|
+
{
|
159
|
+
"api_key": "your-ollama-key-if-needed",
|
160
|
+
"base_url": "http://localhost:11434/v1/",
|
161
|
+
"provider": "Ollama-Local",
|
162
|
+
"model": "llama3"
|
163
|
+
}
|
164
|
+
]
|
153
165
|
```
|
154
166
|
|
155
167
|
### Configuration Priority
|
156
168
|
|
157
169
|
nGPT determines configuration values in the following order (highest priority first):
|
158
170
|
|
159
|
-
1. Command line arguments
|
160
|
-
2. Environment variables (`OPENAI_API_KEY`, `OPENAI_BASE_URL`, `
|
161
|
-
3. Configuration file
|
171
|
+
1. Command line arguments (`--api-key`, `--base-url`, `--model`)
|
172
|
+
2. Environment variables (`OPENAI_API_KEY`, `OPENAI_BASE_URL`, `OPENAI_MODEL`)
|
173
|
+
3. Configuration file (selected by `--config-index`, defaults to index 0)
|
162
174
|
4. Default values
|
163
175
|
|
164
176
|
## Special Features
|
@@ -1,6 +1,6 @@
|
|
1
1
|
# nGPT
|
2
2
|
|
3
|
-
A lightweight Python CLI and library for interacting with
|
3
|
+
A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
|
4
4
|
|
5
5
|
## Features
|
6
6
|
|
@@ -31,6 +31,12 @@ ngpt "Hello, how are you?"
|
|
31
31
|
# Show version information
|
32
32
|
ngpt -v
|
33
33
|
|
34
|
+
# Show active configuration
|
35
|
+
ngpt --show-config
|
36
|
+
|
37
|
+
# Show all configurations
|
38
|
+
ngpt --show-config --all
|
39
|
+
|
34
40
|
# With custom options
|
35
41
|
ngpt --api-key your-key --base-url http://your-endpoint "Hello"
|
36
42
|
|
@@ -49,8 +55,8 @@ ngpt -c "create a python function that calculates fibonacci numbers"
|
|
49
55
|
```python
|
50
56
|
from ngpt import NGPTClient, load_config
|
51
57
|
|
52
|
-
# Load from config file
|
53
|
-
config = load_config()
|
58
|
+
# Load the first configuration (index 0) from config file
|
59
|
+
config = load_config(config_index=0)
|
54
60
|
|
55
61
|
# Initialize the client with config
|
56
62
|
client = NGPTClient(**config)
|
@@ -84,10 +90,12 @@ You can configure the client using the following options:
|
|
84
90
|
|
85
91
|
- `--api-key`: API key for the service
|
86
92
|
- `--base-url`: Base URL for the API
|
87
|
-
- `--provider`: Provider name
|
88
93
|
- `--model`: Model to use
|
89
94
|
- `--web-search`: Enable web search capability (Note: Your API endpoint must support this feature)
|
90
95
|
- `--config`: Path to a custom configuration file
|
96
|
+
- `--config-index`: Index of the configuration to use from the config file (default: 0)
|
97
|
+
- `--show-config`: Show configuration details and exit.
|
98
|
+
- `--all`: Used with `--show-config` to display details for all configurations.
|
91
99
|
|
92
100
|
### Configuration File
|
93
101
|
|
@@ -97,35 +105,39 @@ nGPT uses a configuration file stored in the standard user config directory for
|
|
97
105
|
- **macOS**: `~/Library/Application Support/ngpt/ngpt.conf`
|
98
106
|
- **Windows**: `%APPDATA%\ngpt\ngpt.conf`
|
99
107
|
|
100
|
-
The configuration file uses JSON format
|
101
|
-
|
102
|
-
#### OpenAI API Example
|
103
|
-
```json
|
104
|
-
{
|
105
|
-
"api_key": "your_openai_api_key_here",
|
106
|
-
"base_url": "https://api.openai.com/v1/",
|
107
|
-
"provider": "OpenAI",
|
108
|
-
"model": "gpt-3.5-turbo"
|
109
|
-
}
|
110
|
-
```
|
108
|
+
The configuration file uses a JSON list format, allowing you to store multiple configurations. You can select which configuration to use with the `--config-index` argument (or by default, index 0 is used).
|
111
109
|
|
112
|
-
####
|
110
|
+
#### Multiple Configurations Example (`ngpt.conf`)
|
113
111
|
```json
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
112
|
+
[
|
113
|
+
{
|
114
|
+
"api_key": "your-openai-api-key-here",
|
115
|
+
"base_url": "https://api.openai.com/v1/",
|
116
|
+
"provider": "OpenAI",
|
117
|
+
"model": "gpt-4o"
|
118
|
+
},
|
119
|
+
{
|
120
|
+
"api_key": "your-groq-api-key-here",
|
121
|
+
"base_url": "https://api.groq.com/openai/v1/",
|
122
|
+
"provider": "Groq",
|
123
|
+
"model": "llama3-70b-8192"
|
124
|
+
},
|
125
|
+
{
|
126
|
+
"api_key": "your-ollama-key-if-needed",
|
127
|
+
"base_url": "http://localhost:11434/v1/",
|
128
|
+
"provider": "Ollama-Local",
|
129
|
+
"model": "llama3"
|
130
|
+
}
|
131
|
+
]
|
120
132
|
```
|
121
133
|
|
122
134
|
### Configuration Priority
|
123
135
|
|
124
136
|
nGPT determines configuration values in the following order (highest priority first):
|
125
137
|
|
126
|
-
1. Command line arguments
|
127
|
-
2. Environment variables (`OPENAI_API_KEY`, `OPENAI_BASE_URL`, `
|
128
|
-
3. Configuration file
|
138
|
+
1. Command line arguments (`--api-key`, `--base-url`, `--model`)
|
139
|
+
2. Environment variables (`OPENAI_API_KEY`, `OPENAI_BASE_URL`, `OPENAI_MODEL`)
|
140
|
+
3. Configuration file (selected by `--config-index`, defaults to index 0)
|
129
141
|
4. Default values
|
130
142
|
|
131
143
|
## Special Features
|
@@ -1,9 +1,5 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
__version__ = get_version("ngpt")
|
4
|
-
except ImportError:
|
5
|
-
# For Python < 3.8 or package not installed
|
6
|
-
__version__ = "1.0.0" # fallback version
|
1
|
+
from importlib.metadata import version as get_version
|
2
|
+
__version__ = get_version("ngpt")
|
7
3
|
|
8
4
|
from .client import NGPTClient
|
9
5
|
from .config import load_config, get_config_path, get_config_dir
|
ngpt-1.1.0/ngpt/cli.py
ADDED
@@ -0,0 +1,221 @@
|
|
1
|
+
import argparse
|
2
|
+
import sys
|
3
|
+
import os
|
4
|
+
from .client import NGPTClient
|
5
|
+
from .config import load_config, get_config_path, load_configs, add_config_entry
|
6
|
+
from . import __version__
|
7
|
+
|
8
|
+
def show_config_help():
|
9
|
+
"""Display help information about configuration."""
|
10
|
+
print("\nConfiguration Help:")
|
11
|
+
print(" 1. Create a config file at one of these locations:")
|
12
|
+
if sys.platform == "win32":
|
13
|
+
print(f" - %APPDATA%\\ngpt\\ngpt.conf")
|
14
|
+
elif sys.platform == "darwin":
|
15
|
+
print(f" - ~/Library/Application Support/ngpt/ngpt.conf")
|
16
|
+
else:
|
17
|
+
print(f" - ~/.config/ngpt/ngpt.conf")
|
18
|
+
|
19
|
+
print(" 2. Format your config file as JSON:")
|
20
|
+
print(""" [
|
21
|
+
{
|
22
|
+
"api_key": "your-api-key-here",
|
23
|
+
"base_url": "https://api.openai.com/v1/",
|
24
|
+
"provider": "OpenAI",
|
25
|
+
"model": "gpt-3.5-turbo"
|
26
|
+
},
|
27
|
+
{
|
28
|
+
"api_key": "your-second-api-key",
|
29
|
+
"base_url": "http://localhost:1337/v1/",
|
30
|
+
"provider": "Another Provider",
|
31
|
+
"model": "different-model"
|
32
|
+
}
|
33
|
+
]""")
|
34
|
+
|
35
|
+
print(" 3. Or set environment variables:")
|
36
|
+
print(" - OPENAI_API_KEY")
|
37
|
+
print(" - OPENAI_BASE_URL")
|
38
|
+
print(" - OPENAI_PROVIDER")
|
39
|
+
print(" - OPENAI_MODEL")
|
40
|
+
|
41
|
+
print(" 4. Or provide command line arguments:")
|
42
|
+
print(" ngpt --api-key your-key --base-url https://api.example.com \"Your prompt\"")
|
43
|
+
|
44
|
+
print(" 5. Use --config-index to specify which configuration to use:")
|
45
|
+
print(" ngpt --config-index 1 \"Your prompt\"")
|
46
|
+
|
47
|
+
print(" 6. Use --config without arguments to add or edit a configuration:")
|
48
|
+
print(" ngpt --config --config-index 1")
|
49
|
+
|
50
|
+
def check_config(config):
|
51
|
+
"""Check config for common issues and provide guidance."""
|
52
|
+
if not config.get("api_key"):
|
53
|
+
print("Error: API key is not set.")
|
54
|
+
show_config_help()
|
55
|
+
return False
|
56
|
+
|
57
|
+
# Check for common URL mistakes
|
58
|
+
base_url = config.get("base_url", "")
|
59
|
+
if base_url and not (base_url.startswith("http://") or base_url.startswith("https://")):
|
60
|
+
print(f"Warning: Base URL '{base_url}' doesn't start with http:// or https://")
|
61
|
+
|
62
|
+
return True
|
63
|
+
|
64
|
+
def main():
|
65
|
+
parser = argparse.ArgumentParser(description="nGPT - A CLI tool for interacting with custom OpenAI API endpoints")
|
66
|
+
|
67
|
+
# Version flag
|
68
|
+
parser.add_argument('-v', '--version', action='version', version=f'nGPT {__version__}', help='Show version information and exit')
|
69
|
+
|
70
|
+
# Config options
|
71
|
+
config_group = parser.add_argument_group('Configuration Options')
|
72
|
+
config_group.add_argument('--config', nargs='?', const=True, help='Path to a custom config file or, if no value provided, enter interactive configuration mode')
|
73
|
+
config_group.add_argument('--config-index', type=int, default=0, help='Index of the configuration to use (default: 0)')
|
74
|
+
config_group.add_argument('--show-config', action='store_true', help='Show the current configuration(s) and exit')
|
75
|
+
config_group.add_argument('--all', action='store_true', help='Show details for all configurations (requires --show-config)')
|
76
|
+
|
77
|
+
# Global options
|
78
|
+
global_group = parser.add_argument_group('Global Options')
|
79
|
+
global_group.add_argument('--api-key', help='API key for the service')
|
80
|
+
global_group.add_argument('--base-url', help='Base URL for the API')
|
81
|
+
global_group.add_argument('--model', help='Model to use')
|
82
|
+
global_group.add_argument('--web-search', action='store_true',
|
83
|
+
help='Enable web search capability (Note: Your API endpoint must support this feature)')
|
84
|
+
|
85
|
+
# Mode flags (mutually exclusive)
|
86
|
+
mode_group = parser.add_argument_group('Modes (mutually exclusive)')
|
87
|
+
mode_exclusive_group = mode_group.add_mutually_exclusive_group()
|
88
|
+
mode_exclusive_group.add_argument('-s', '--shell', action='store_true', help='Generate and execute shell commands')
|
89
|
+
mode_exclusive_group.add_argument('-c', '--code', action='store_true', help='Generate code')
|
90
|
+
# Note: --show-config is handled separately and implicitly acts as a mode
|
91
|
+
|
92
|
+
# Language option for code mode
|
93
|
+
parser.add_argument('--language', default="python", help='Programming language to generate code in (for code mode)')
|
94
|
+
|
95
|
+
# Prompt argument
|
96
|
+
parser.add_argument('prompt', nargs='?', default=None, help='The prompt to send')
|
97
|
+
|
98
|
+
args = parser.parse_args()
|
99
|
+
|
100
|
+
# Validate --all usage
|
101
|
+
if args.all and not args.show_config:
|
102
|
+
parser.error("--all can only be used with --show-config")
|
103
|
+
|
104
|
+
# Handle interactive configuration mode
|
105
|
+
if args.config is True: # --config was used without a value
|
106
|
+
config_path = get_config_path()
|
107
|
+
add_config_entry(config_path, args.config_index)
|
108
|
+
return
|
109
|
+
|
110
|
+
# Load configuration using the specified index (needed for active config display)
|
111
|
+
active_config = load_config(args.config, args.config_index)
|
112
|
+
|
113
|
+
# Command-line arguments override config settings for active config display
|
114
|
+
# This part is kept to ensure the active config display reflects potential overrides,
|
115
|
+
# even though the overrides don't affect the stored configurations displayed with --all.
|
116
|
+
if args.api_key:
|
117
|
+
active_config["api_key"] = args.api_key
|
118
|
+
if args.base_url:
|
119
|
+
active_config["base_url"] = args.base_url
|
120
|
+
if args.model:
|
121
|
+
active_config["model"] = args.model
|
122
|
+
|
123
|
+
# Show config if requested
|
124
|
+
if args.show_config:
|
125
|
+
config_path = get_config_path(args.config)
|
126
|
+
configs = load_configs(args.config)
|
127
|
+
|
128
|
+
print(f"Configuration file: {config_path}")
|
129
|
+
print(f"Total configurations: {len(configs)}")
|
130
|
+
print(f"Active configuration index: {args.config_index}")
|
131
|
+
|
132
|
+
if args.all:
|
133
|
+
# Show details for all configurations
|
134
|
+
print("\nAll configuration details:")
|
135
|
+
for i, cfg in enumerate(configs):
|
136
|
+
active_str = '(Active)' if i == args.config_index else ''
|
137
|
+
print(f"\n--- Configuration Index {i} {active_str} ---")
|
138
|
+
print(f" API Key: {'[Set]' if cfg.get('api_key') else '[Not Set]'}")
|
139
|
+
print(f" Base URL: {cfg.get('base_url', 'N/A')}")
|
140
|
+
print(f" Provider: {cfg.get('provider', 'N/A')}")
|
141
|
+
print(f" Model: {cfg.get('model', 'N/A')}")
|
142
|
+
else:
|
143
|
+
# Show active config details and summary list
|
144
|
+
print("\nActive configuration details:")
|
145
|
+
print(f" API Key: {'[Set]' if active_config.get('api_key') else '[Not Set]'}")
|
146
|
+
print(f" Base URL: {active_config.get('base_url', 'N/A')}")
|
147
|
+
print(f" Provider: {active_config.get('provider', 'N/A')}")
|
148
|
+
print(f" Model: {active_config.get('model', 'N/A')}")
|
149
|
+
|
150
|
+
if len(configs) > 1:
|
151
|
+
print("\nAvailable configurations:")
|
152
|
+
for i, cfg in enumerate(configs):
|
153
|
+
active_marker = "*" if i == args.config_index else " "
|
154
|
+
print(f"[{i}]{active_marker} {cfg.get('provider', 'N/A')} - {cfg.get('model', 'N/A')} ({'[API Key Set]' if cfg.get('api_key') else '[API Key Not Set]'})")
|
155
|
+
|
156
|
+
return
|
157
|
+
|
158
|
+
# Check if prompt is required but not provided
|
159
|
+
if not args.prompt and not (args.shell or args.code):
|
160
|
+
parser.print_help()
|
161
|
+
return
|
162
|
+
|
163
|
+
# Check configuration (using the potentially overridden active_config)
|
164
|
+
if not check_config(active_config):
|
165
|
+
return
|
166
|
+
|
167
|
+
# Initialize client using the potentially overridden active_config
|
168
|
+
client = NGPTClient(**active_config)
|
169
|
+
|
170
|
+
try:
|
171
|
+
# Handle modes
|
172
|
+
if args.shell:
|
173
|
+
if args.prompt is None:
|
174
|
+
print("Enter shell command description: ", end='')
|
175
|
+
prompt = input()
|
176
|
+
else:
|
177
|
+
prompt = args.prompt
|
178
|
+
|
179
|
+
command = client.generate_shell_command(prompt, web_search=args.web_search)
|
180
|
+
if not command:
|
181
|
+
return # Error already printed by client
|
182
|
+
|
183
|
+
print(f"\nGenerated command: {command}")
|
184
|
+
|
185
|
+
print("Do you want to execute this command? [y/N] ", end='')
|
186
|
+
response = input().lower()
|
187
|
+
if response == 'y' or response == 'yes':
|
188
|
+
import subprocess
|
189
|
+
try:
|
190
|
+
result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
|
191
|
+
print(f"\nOutput:\n{result.stdout}")
|
192
|
+
except subprocess.CalledProcessError as e:
|
193
|
+
print(f"\nError:\n{e.stderr}")
|
194
|
+
|
195
|
+
elif args.code:
|
196
|
+
if args.prompt is None:
|
197
|
+
print("Enter code description: ", end='')
|
198
|
+
prompt = input()
|
199
|
+
else:
|
200
|
+
prompt = args.prompt
|
201
|
+
|
202
|
+
generated_code = client.generate_code(prompt, args.language, web_search=args.web_search)
|
203
|
+
if generated_code:
|
204
|
+
print(f"\nGenerated code:\n{generated_code}")
|
205
|
+
|
206
|
+
else:
|
207
|
+
# Default to chat mode
|
208
|
+
if args.prompt is None:
|
209
|
+
print("Enter your prompt: ", end='')
|
210
|
+
prompt = input()
|
211
|
+
else:
|
212
|
+
prompt = args.prompt
|
213
|
+
client.chat(prompt, web_search=args.web_search)
|
214
|
+
|
215
|
+
except KeyboardInterrupt:
|
216
|
+
print("\nOperation cancelled by user.")
|
217
|
+
except Exception as e:
|
218
|
+
print(f"Error: {e}")
|
219
|
+
|
220
|
+
if __name__ == "__main__":
|
221
|
+
main()
|
@@ -10,13 +10,12 @@ class NGPTClient:
|
|
10
10
|
self,
|
11
11
|
api_key: str = "",
|
12
12
|
base_url: str = "https://api.openai.com/v1/",
|
13
|
-
provider: str = "OpenAI",
|
13
|
+
provider: str = "OpenAI", # Provider is now just a label, kept for potential future use/logging
|
14
14
|
model: str = "gpt-3.5-turbo"
|
15
15
|
):
|
16
16
|
self.api_key = api_key
|
17
17
|
# Ensure base_url ends with /
|
18
18
|
self.base_url = base_url if base_url.endswith('/') else base_url + '/'
|
19
|
-
self.provider = provider
|
20
19
|
self.model = model
|
21
20
|
|
22
21
|
# Default headers
|
@@ -63,10 +62,12 @@ class NGPTClient:
|
|
63
62
|
"messages": messages,
|
64
63
|
"stream": stream,
|
65
64
|
"temperature": temperature,
|
66
|
-
"provider": self.provider,
|
67
|
-
"web_search": web_search
|
68
65
|
}
|
69
66
|
|
67
|
+
# Conditionally add web_search
|
68
|
+
if web_search:
|
69
|
+
payload["web_search"] = True
|
70
|
+
|
70
71
|
# Add max_tokens if provided
|
71
72
|
if max_tokens is not None:
|
72
73
|
payload["max_tokens"] = max_tokens
|
@@ -0,0 +1,136 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
import json
|
4
|
+
from pathlib import Path
|
5
|
+
from typing import Dict, Optional, Any, List
|
6
|
+
|
7
|
+
# Default configuration
|
8
|
+
DEFAULT_CONFIG_ENTRY = {
|
9
|
+
"api_key": "",
|
10
|
+
"base_url": "https://api.openai.com/v1/",
|
11
|
+
"provider": "OpenAI",
|
12
|
+
"model": "gpt-3.5-turbo"
|
13
|
+
}
|
14
|
+
|
15
|
+
# Default configurations list
|
16
|
+
DEFAULT_CONFIG = [DEFAULT_CONFIG_ENTRY]
|
17
|
+
|
18
|
+
def get_config_dir() -> Path:
|
19
|
+
"""Get the appropriate config directory based on OS."""
|
20
|
+
if sys.platform == "win32":
|
21
|
+
# Windows
|
22
|
+
config_dir = Path(os.environ.get("APPDATA", "")) / "ngpt"
|
23
|
+
elif sys.platform == "darwin":
|
24
|
+
# macOS
|
25
|
+
config_dir = Path.home() / "Library" / "Application Support" / "ngpt"
|
26
|
+
else:
|
27
|
+
# Linux and other Unix-like systems
|
28
|
+
xdg_config_home = os.environ.get("XDG_CONFIG_HOME")
|
29
|
+
if xdg_config_home:
|
30
|
+
config_dir = Path(xdg_config_home) / "ngpt"
|
31
|
+
else:
|
32
|
+
config_dir = Path.home() / ".config" / "ngpt"
|
33
|
+
|
34
|
+
# Ensure the directory exists
|
35
|
+
config_dir.mkdir(parents=True, exist_ok=True)
|
36
|
+
return config_dir
|
37
|
+
|
38
|
+
def get_config_path(custom_path: Optional[str] = None) -> Path:
|
39
|
+
"""Get the path to the config file."""
|
40
|
+
if custom_path:
|
41
|
+
return Path(custom_path)
|
42
|
+
return get_config_dir() / "ngpt.conf"
|
43
|
+
|
44
|
+
def create_default_config(config_path: Path) -> None:
|
45
|
+
"""Create a default configuration file with a single config entry."""
|
46
|
+
with open(config_path, "w") as f:
|
47
|
+
json.dump(DEFAULT_CONFIG, f, indent=2)
|
48
|
+
print(f"Created default configuration file at {config_path}")
|
49
|
+
|
50
|
+
def add_config_entry(config_path: Path, config_index: Optional[int] = None) -> None:
|
51
|
+
"""Add a new configuration entry or update existing one at the specified index."""
|
52
|
+
configs = load_configs(custom_path=str(config_path))
|
53
|
+
|
54
|
+
# Create a new entry based on the default
|
55
|
+
new_entry = DEFAULT_CONFIG_ENTRY.copy()
|
56
|
+
|
57
|
+
# Interactive configuration
|
58
|
+
print("Enter configuration details (press Enter to use default values):")
|
59
|
+
new_entry["api_key"] = input(f"API Key: ") or new_entry["api_key"]
|
60
|
+
new_entry["base_url"] = input(f"Base URL [{new_entry['base_url']}]: ") or new_entry["base_url"]
|
61
|
+
new_entry["provider"] = input(f"Provider [{new_entry['provider']}]: ") or new_entry["provider"]
|
62
|
+
new_entry["model"] = input(f"Model [{new_entry['model']}]: ") or new_entry["model"]
|
63
|
+
|
64
|
+
# Add or update the entry
|
65
|
+
if config_index is not None and config_index < len(configs):
|
66
|
+
configs[config_index] = new_entry
|
67
|
+
print(f"Updated configuration at index {config_index}")
|
68
|
+
else:
|
69
|
+
configs.append(new_entry)
|
70
|
+
print(f"Added new configuration at index {len(configs)-1}")
|
71
|
+
|
72
|
+
# Save the updated configs
|
73
|
+
with open(config_path, "w") as f:
|
74
|
+
json.dump(configs, f, indent=2)
|
75
|
+
|
76
|
+
def load_configs(custom_path: Optional[str] = None) -> List[Dict[str, Any]]:
|
77
|
+
"""
|
78
|
+
Load all configurations from the config file.
|
79
|
+
Returns a list of configuration dictionaries.
|
80
|
+
"""
|
81
|
+
config_path = get_config_path(custom_path)
|
82
|
+
|
83
|
+
# Start with default configs
|
84
|
+
configs = DEFAULT_CONFIG.copy()
|
85
|
+
|
86
|
+
# Load from config file if it exists
|
87
|
+
if config_path.exists():
|
88
|
+
try:
|
89
|
+
with open(config_path, "r") as f:
|
90
|
+
file_configs = json.load(f)
|
91
|
+
# Handle both old format (single dict) and new format (list of dicts)
|
92
|
+
if isinstance(file_configs, dict):
|
93
|
+
# Convert old format to new format
|
94
|
+
configs = [file_configs]
|
95
|
+
else:
|
96
|
+
configs = file_configs
|
97
|
+
except (json.JSONDecodeError, IOError) as e:
|
98
|
+
print(f"Warning: Could not read config file: {e}", file=sys.stderr)
|
99
|
+
else:
|
100
|
+
# Create default config file if it doesn't exist
|
101
|
+
create_default_config(config_path)
|
102
|
+
|
103
|
+
return configs
|
104
|
+
|
105
|
+
def load_config(custom_path: Optional[str] = None, config_index: int = 0) -> Dict[str, Any]:
|
106
|
+
"""
|
107
|
+
Load a specific configuration by index and apply environment variables.
|
108
|
+
Environment variables take precedence over the config file.
|
109
|
+
"""
|
110
|
+
configs = load_configs(custom_path)
|
111
|
+
|
112
|
+
# If config_index is out of range, use the first config
|
113
|
+
if config_index < 0 or config_index >= len(configs):
|
114
|
+
if len(configs) > 0:
|
115
|
+
config_index = 0
|
116
|
+
print(f"Warning: Config index {config_index} is out of range. Using index 0 instead.")
|
117
|
+
else:
|
118
|
+
# This should not happen as load_configs should always return at least DEFAULT_CONFIG
|
119
|
+
return DEFAULT_CONFIG_ENTRY.copy()
|
120
|
+
|
121
|
+
# Get the selected config
|
122
|
+
config = configs[config_index]
|
123
|
+
|
124
|
+
# Override with environment variables if they exist
|
125
|
+
env_mapping = {
|
126
|
+
"OPENAI_API_KEY": "api_key",
|
127
|
+
"OPENAI_BASE_URL": "base_url",
|
128
|
+
"OPENAI_PROVIDER": "provider",
|
129
|
+
"OPENAI_MODEL": "model"
|
130
|
+
}
|
131
|
+
|
132
|
+
for env_var, config_key in env_mapping.items():
|
133
|
+
if env_var in os.environ and os.environ[env_var]:
|
134
|
+
config[config_key] = os.environ[env_var]
|
135
|
+
|
136
|
+
return config
|
@@ -1,12 +1,12 @@
|
|
1
1
|
[project]
|
2
2
|
name = "ngpt"
|
3
|
-
version = "1.
|
4
|
-
description = "A Python CLI and library for interacting with
|
3
|
+
version = "1.1.0"
|
4
|
+
description = "A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints."
|
5
5
|
authors = [
|
6
6
|
{name = "nazDridoy", email = "nazdridoy399@gmail.com"},
|
7
7
|
]
|
8
8
|
dependencies = [
|
9
|
-
"requests>=2.
|
9
|
+
"requests>=2.31.0",
|
10
10
|
]
|
11
11
|
requires-python = ">=3.8"
|
12
12
|
readme = "README.md"
|
@@ -44,6 +44,3 @@ ngpt = "ngpt.cli:main"
|
|
44
44
|
[build-system]
|
45
45
|
requires = ["hatchling"]
|
46
46
|
build-backend = "hatchling.build"
|
47
|
-
|
48
|
-
[dependency-groups]
|
49
|
-
dev = []
|
@@ -113,17 +113,14 @@ wheels = [
|
|
113
113
|
|
114
114
|
[[package]]
|
115
115
|
name = "ngpt"
|
116
|
-
version = "1.
|
116
|
+
version = "1.1.0"
|
117
117
|
source = { editable = "." }
|
118
118
|
dependencies = [
|
119
119
|
{ name = "requests" },
|
120
120
|
]
|
121
121
|
|
122
122
|
[package.metadata]
|
123
|
-
requires-dist = [{ name = "requests", specifier = ">=2.
|
124
|
-
|
125
|
-
[package.metadata.requires-dev]
|
126
|
-
dev = []
|
123
|
+
requires-dist = [{ name = "requests", specifier = ">=2.31.0" }]
|
127
124
|
|
128
125
|
[[package]]
|
129
126
|
name = "requests"
|
ngpt-1.0.0/ngpt/cli.py
DELETED
@@ -1,167 +0,0 @@
|
|
1
|
-
import argparse
|
2
|
-
import sys
|
3
|
-
import os
|
4
|
-
from .client import NGPTClient
|
5
|
-
from .config import load_config, get_config_path
|
6
|
-
from . import __version__
|
7
|
-
|
8
|
-
def show_config_help():
|
9
|
-
"""Display help information about configuration."""
|
10
|
-
print("\nConfiguration Help:")
|
11
|
-
print(" 1. Create a config file at one of these locations:")
|
12
|
-
if sys.platform == "win32":
|
13
|
-
print(f" - %APPDATA%\\ngpt\\ngpt.conf")
|
14
|
-
elif sys.platform == "darwin":
|
15
|
-
print(f" - ~/Library/Application Support/ngpt/ngpt.conf")
|
16
|
-
else:
|
17
|
-
print(f" - ~/.config/ngpt/ngpt.conf")
|
18
|
-
|
19
|
-
print(" 2. Format your config file as JSON:")
|
20
|
-
print(""" {
|
21
|
-
"api_key": "your-api-key-here",
|
22
|
-
"base_url": "https://api.openai.com/v1/",
|
23
|
-
"provider": "OpenAI",
|
24
|
-
"model": "gpt-3.5-turbo"
|
25
|
-
}""")
|
26
|
-
|
27
|
-
print(" 3. Or set environment variables:")
|
28
|
-
print(" - OPENAI_API_KEY")
|
29
|
-
print(" - OPENAI_BASE_URL")
|
30
|
-
print(" - OPENAI_PROVIDER")
|
31
|
-
print(" - OPENAI_MODEL")
|
32
|
-
|
33
|
-
print(" 4. Or provide command line arguments:")
|
34
|
-
print(" ngpt --api-key your-key --base-url https://api.example.com \"Your prompt\"")
|
35
|
-
|
36
|
-
def check_config(config):
|
37
|
-
"""Check config for common issues and provide guidance."""
|
38
|
-
if not config.get("api_key"):
|
39
|
-
print("Error: API key is not set.")
|
40
|
-
show_config_help()
|
41
|
-
return False
|
42
|
-
|
43
|
-
# Check for common URL mistakes
|
44
|
-
base_url = config.get("base_url", "")
|
45
|
-
if base_url and not (base_url.startswith("http://") or base_url.startswith("https://")):
|
46
|
-
print(f"Warning: Base URL '{base_url}' doesn't start with http:// or https://")
|
47
|
-
|
48
|
-
return True
|
49
|
-
|
50
|
-
def main():
|
51
|
-
parser = argparse.ArgumentParser(description="nGPT - A CLI tool for interacting with custom OpenAI API endpoints")
|
52
|
-
|
53
|
-
# Version flag
|
54
|
-
parser.add_argument('-v', '--version', action='version', version=f'nGPT {__version__}', help='Show version information and exit')
|
55
|
-
|
56
|
-
# Config option
|
57
|
-
parser.add_argument('--config', help='Path to a custom configuration file')
|
58
|
-
|
59
|
-
# Global options
|
60
|
-
parser.add_argument('--api-key', help='API key for the service')
|
61
|
-
parser.add_argument('--base-url', help='Base URL for the API')
|
62
|
-
parser.add_argument('--provider', help='Provider name')
|
63
|
-
parser.add_argument('--model', help='Model to use')
|
64
|
-
parser.add_argument('--web-search', action='store_true',
|
65
|
-
help='Enable web search capability (Note: Your API endpoint must support this feature)')
|
66
|
-
|
67
|
-
# Mode flags (mutually exclusive)
|
68
|
-
mode_group = parser.add_mutually_exclusive_group()
|
69
|
-
mode_group.add_argument('-s', '--shell', action='store_true', help='Generate and execute shell commands')
|
70
|
-
mode_group.add_argument('-c', '--code', action='store_true', help='Generate code')
|
71
|
-
mode_group.add_argument('--show-config', action='store_true', help='Show the current configuration and exit')
|
72
|
-
|
73
|
-
# Language option for code mode
|
74
|
-
parser.add_argument('--language', default="python", help='Programming language to generate code in (for code mode)')
|
75
|
-
|
76
|
-
# Prompt argument
|
77
|
-
parser.add_argument('prompt', nargs='?', default=None, help='The prompt to send')
|
78
|
-
|
79
|
-
args = parser.parse_args()
|
80
|
-
|
81
|
-
# Load configuration
|
82
|
-
config = load_config(args.config)
|
83
|
-
|
84
|
-
# Command-line arguments override config settings
|
85
|
-
if args.api_key:
|
86
|
-
config["api_key"] = args.api_key
|
87
|
-
if args.base_url:
|
88
|
-
config["base_url"] = args.base_url
|
89
|
-
if args.provider:
|
90
|
-
config["provider"] = args.provider
|
91
|
-
if args.model:
|
92
|
-
config["model"] = args.model
|
93
|
-
|
94
|
-
# Show config if requested
|
95
|
-
if args.show_config:
|
96
|
-
config_path = get_config_path(args.config)
|
97
|
-
print(f"Configuration file: {config_path}")
|
98
|
-
print(f"API Key: {'[Set]' if config['api_key'] else '[Not Set]'}")
|
99
|
-
print(f"Base URL: {config['base_url']}")
|
100
|
-
print(f"Provider: {config['provider']}")
|
101
|
-
print(f"Model: {config['model']}")
|
102
|
-
return
|
103
|
-
|
104
|
-
# Check if prompt is required but not provided
|
105
|
-
if not args.prompt and not (args.shell or args.code):
|
106
|
-
parser.print_help()
|
107
|
-
return
|
108
|
-
|
109
|
-
# Check configuration
|
110
|
-
if not check_config(config):
|
111
|
-
return
|
112
|
-
|
113
|
-
# Initialize client
|
114
|
-
client = NGPTClient(**config)
|
115
|
-
|
116
|
-
try:
|
117
|
-
# Handle modes
|
118
|
-
if args.shell:
|
119
|
-
if args.prompt is None:
|
120
|
-
print("Enter shell command description: ", end='')
|
121
|
-
prompt = input()
|
122
|
-
else:
|
123
|
-
prompt = args.prompt
|
124
|
-
|
125
|
-
command = client.generate_shell_command(prompt, web_search=args.web_search)
|
126
|
-
if not command:
|
127
|
-
return # Error already printed by client
|
128
|
-
|
129
|
-
print(f"\nGenerated command: {command}")
|
130
|
-
|
131
|
-
print("Do you want to execute this command? [y/N] ", end='')
|
132
|
-
response = input().lower()
|
133
|
-
if response == 'y' or response == 'yes':
|
134
|
-
import subprocess
|
135
|
-
try:
|
136
|
-
result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
|
137
|
-
print(f"\nOutput:\n{result.stdout}")
|
138
|
-
except subprocess.CalledProcessError as e:
|
139
|
-
print(f"\nError:\n{e.stderr}")
|
140
|
-
|
141
|
-
elif args.code:
|
142
|
-
if args.prompt is None:
|
143
|
-
print("Enter code description: ", end='')
|
144
|
-
prompt = input()
|
145
|
-
else:
|
146
|
-
prompt = args.prompt
|
147
|
-
|
148
|
-
generated_code = client.generate_code(prompt, args.language, web_search=args.web_search)
|
149
|
-
if generated_code:
|
150
|
-
print(f"\nGenerated code:\n{generated_code}")
|
151
|
-
|
152
|
-
else:
|
153
|
-
# Default to chat mode
|
154
|
-
if args.prompt is None:
|
155
|
-
print("Enter your prompt: ", end='')
|
156
|
-
prompt = input()
|
157
|
-
else:
|
158
|
-
prompt = args.prompt
|
159
|
-
client.chat(prompt, web_search=args.web_search)
|
160
|
-
|
161
|
-
except KeyboardInterrupt:
|
162
|
-
print("\nOperation cancelled by user.")
|
163
|
-
except Exception as e:
|
164
|
-
print(f"Error: {e}")
|
165
|
-
|
166
|
-
if __name__ == "__main__":
|
167
|
-
main()
|
ngpt-1.0.0/ngpt/config.py
DELETED
@@ -1,81 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import sys
|
3
|
-
import json
|
4
|
-
from pathlib import Path
|
5
|
-
from typing import Dict, Optional, Any
|
6
|
-
|
7
|
-
# Default configuration
|
8
|
-
DEFAULT_CONFIG = {
|
9
|
-
"api_key": "",
|
10
|
-
"base_url": "https://api.openai.com/v1/",
|
11
|
-
"provider": "OpenAI",
|
12
|
-
"model": "gpt-3.5-turbo"
|
13
|
-
}
|
14
|
-
|
15
|
-
def get_config_dir() -> Path:
|
16
|
-
"""Get the appropriate config directory based on OS."""
|
17
|
-
if sys.platform == "win32":
|
18
|
-
# Windows
|
19
|
-
config_dir = Path(os.environ.get("APPDATA", "")) / "ngpt"
|
20
|
-
elif sys.platform == "darwin":
|
21
|
-
# macOS
|
22
|
-
config_dir = Path.home() / "Library" / "Application Support" / "ngpt"
|
23
|
-
else:
|
24
|
-
# Linux and other Unix-like systems
|
25
|
-
xdg_config_home = os.environ.get("XDG_CONFIG_HOME")
|
26
|
-
if xdg_config_home:
|
27
|
-
config_dir = Path(xdg_config_home) / "ngpt"
|
28
|
-
else:
|
29
|
-
config_dir = Path.home() / ".config" / "ngpt"
|
30
|
-
|
31
|
-
# Ensure the directory exists
|
32
|
-
config_dir.mkdir(parents=True, exist_ok=True)
|
33
|
-
return config_dir
|
34
|
-
|
35
|
-
def get_config_path(custom_path: Optional[str] = None) -> Path:
|
36
|
-
"""Get the path to the config file."""
|
37
|
-
if custom_path:
|
38
|
-
return Path(custom_path)
|
39
|
-
return get_config_dir() / "ngpt.conf"
|
40
|
-
|
41
|
-
def create_default_config(config_path: Path) -> None:
|
42
|
-
"""Create a default configuration file."""
|
43
|
-
with open(config_path, "w") as f:
|
44
|
-
json.dump(DEFAULT_CONFIG, f, indent=2)
|
45
|
-
print(f"Created default configuration file at {config_path}")
|
46
|
-
|
47
|
-
def load_config(custom_path: Optional[str] = None) -> Dict[str, Any]:
|
48
|
-
"""
|
49
|
-
Load configuration from file and environment variables.
|
50
|
-
Environment variables take precedence over the config file.
|
51
|
-
"""
|
52
|
-
config_path = get_config_path(custom_path)
|
53
|
-
|
54
|
-
# Start with default config
|
55
|
-
config = DEFAULT_CONFIG.copy()
|
56
|
-
|
57
|
-
# Load from config file if it exists
|
58
|
-
if config_path.exists():
|
59
|
-
try:
|
60
|
-
with open(config_path, "r") as f:
|
61
|
-
file_config = json.load(f)
|
62
|
-
config.update(file_config)
|
63
|
-
except (json.JSONDecodeError, IOError) as e:
|
64
|
-
print(f"Warning: Could not read config file: {e}", file=sys.stderr)
|
65
|
-
else:
|
66
|
-
# Create default config file if it doesn't exist
|
67
|
-
create_default_config(config_path)
|
68
|
-
|
69
|
-
# Override with environment variables if they exist
|
70
|
-
env_mapping = {
|
71
|
-
"OPENAI_API_KEY": "api_key",
|
72
|
-
"OPENAI_BASE_URL": "base_url",
|
73
|
-
"OPENAI_PROVIDER": "provider",
|
74
|
-
"OPENAI_MODEL": "model"
|
75
|
-
}
|
76
|
-
|
77
|
-
for env_var, config_key in env_mapping.items():
|
78
|
-
if env_var in os.environ and os.environ[env_var]:
|
79
|
-
config[config_key] = os.environ[env_var]
|
80
|
-
|
81
|
-
return config
|
File without changes
|
File without changes
|
File without changes
|