ngpt 1.5.1__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngpt/cli.py +207 -138
- ngpt/client.py +45 -1
- {ngpt-1.5.1.dist-info → ngpt-1.7.0.dist-info}/METADATA +13 -1
- ngpt-1.7.0.dist-info/RECORD +9 -0
- ngpt-1.5.1.dist-info/RECORD +0 -9
- {ngpt-1.5.1.dist-info → ngpt-1.7.0.dist-info}/WHEEL +0 -0
- {ngpt-1.5.1.dist-info → ngpt-1.7.0.dist-info}/entry_points.txt +0 -0
- {ngpt-1.5.1.dist-info → ngpt-1.7.0.dist-info}/licenses/LICENSE +0 -0
ngpt/cli.py
CHANGED
@@ -18,6 +18,7 @@ try:
|
|
18
18
|
from prompt_toolkit.widgets import TextArea
|
19
19
|
from prompt_toolkit.layout.margins import ScrollbarMargin
|
20
20
|
from prompt_toolkit.filters import to_filter
|
21
|
+
from prompt_toolkit.history import InMemoryHistory
|
21
22
|
import shutil
|
22
23
|
HAS_PROMPT_TOOLKIT = True
|
23
24
|
except ImportError:
|
@@ -67,6 +68,8 @@ def show_config_help():
|
|
67
68
|
print(" ngpt --config --config-index 1")
|
68
69
|
print(" 7. Remove a configuration at a specific index:")
|
69
70
|
print(" ngpt --config --remove --config-index 1")
|
71
|
+
print(" 8. List available models for the current configuration:")
|
72
|
+
print(" ngpt --list-models")
|
70
73
|
|
71
74
|
def check_config(config):
|
72
75
|
"""Check config for common issues and provide guidance."""
|
@@ -82,6 +85,183 @@ def check_config(config):
|
|
82
85
|
|
83
86
|
return True
|
84
87
|
|
88
|
+
def interactive_chat_session(client, web_search=False, no_stream=False):
|
89
|
+
"""Run an interactive chat session with conversation history."""
|
90
|
+
# Define ANSI color codes for terminal output
|
91
|
+
COLORS = {
|
92
|
+
"reset": "\033[0m",
|
93
|
+
"bold": "\033[1m",
|
94
|
+
"cyan": "\033[36m",
|
95
|
+
"green": "\033[32m",
|
96
|
+
"yellow": "\033[33m",
|
97
|
+
"blue": "\033[34m",
|
98
|
+
"magenta": "\033[35m",
|
99
|
+
"gray": "\033[90m",
|
100
|
+
"bg_blue": "\033[44m",
|
101
|
+
"bg_cyan": "\033[46m"
|
102
|
+
}
|
103
|
+
|
104
|
+
# Get terminal width for better formatting
|
105
|
+
try:
|
106
|
+
term_width = shutil.get_terminal_size().columns
|
107
|
+
except:
|
108
|
+
term_width = 80 # Default fallback
|
109
|
+
|
110
|
+
# Improved visual header with better layout
|
111
|
+
header = f"{COLORS['cyan']}{COLORS['bold']}🤖 nGPT Interactive Chat Session 🤖{COLORS['reset']}"
|
112
|
+
print(f"\n{header}")
|
113
|
+
|
114
|
+
# Create a separator line - use a consistent separator length for all lines
|
115
|
+
separator_length = min(40, term_width - 10)
|
116
|
+
separator = f"{COLORS['gray']}{'─' * separator_length}{COLORS['reset']}"
|
117
|
+
print(separator)
|
118
|
+
|
119
|
+
# Group commands into categories with better formatting
|
120
|
+
print(f"\n{COLORS['cyan']}Navigation:{COLORS['reset']}")
|
121
|
+
print(f" {COLORS['yellow']}↑/↓{COLORS['reset']} : Browse input history")
|
122
|
+
|
123
|
+
print(f"\n{COLORS['cyan']}Session Commands:{COLORS['reset']}")
|
124
|
+
print(f" {COLORS['yellow']}history{COLORS['reset']} : Show conversation history")
|
125
|
+
print(f" {COLORS['yellow']}clear{COLORS['reset']} : Reset conversation")
|
126
|
+
print(f" {COLORS['yellow']}exit{COLORS['reset']} : End session")
|
127
|
+
|
128
|
+
print(f"\n{separator}\n")
|
129
|
+
|
130
|
+
# Custom separator - use the same length for consistency
|
131
|
+
def print_separator():
|
132
|
+
print(f"\n{separator}\n")
|
133
|
+
|
134
|
+
# Initialize conversation history
|
135
|
+
system_prompt = "You are a helpful assistant."
|
136
|
+
conversation = []
|
137
|
+
system_message = {"role": "system", "content": system_prompt}
|
138
|
+
conversation.append(system_message)
|
139
|
+
|
140
|
+
# Initialize prompt_toolkit history
|
141
|
+
prompt_history = InMemoryHistory() if HAS_PROMPT_TOOLKIT else None
|
142
|
+
|
143
|
+
# Decorative chat headers with rounded corners
|
144
|
+
def user_header():
|
145
|
+
return f"{COLORS['cyan']}{COLORS['bold']}╭─ 👤 You {COLORS['reset']}"
|
146
|
+
|
147
|
+
def ngpt_header():
|
148
|
+
return f"{COLORS['green']}{COLORS['bold']}╭─ 🤖 nGPT {COLORS['reset']}"
|
149
|
+
|
150
|
+
# Function to display conversation history
|
151
|
+
def display_history():
|
152
|
+
if len(conversation) <= 1: # Only system message
|
153
|
+
print(f"\n{COLORS['yellow']}No conversation history yet.{COLORS['reset']}")
|
154
|
+
return
|
155
|
+
|
156
|
+
print(f"\n{COLORS['cyan']}{COLORS['bold']}Conversation History:{COLORS['reset']}")
|
157
|
+
print(separator)
|
158
|
+
|
159
|
+
# Skip system message
|
160
|
+
message_count = 0
|
161
|
+
for i, msg in enumerate(conversation):
|
162
|
+
if msg["role"] == "system":
|
163
|
+
continue
|
164
|
+
|
165
|
+
if msg["role"] == "user":
|
166
|
+
message_count += 1
|
167
|
+
print(f"\n{user_header()}")
|
168
|
+
print(f"{COLORS['cyan']}│ [{message_count}] {COLORS['reset']}{msg['content']}")
|
169
|
+
elif msg["role"] == "assistant":
|
170
|
+
print(f"\n{ngpt_header()}")
|
171
|
+
print(f"{COLORS['green']}│ {COLORS['reset']}{msg['content']}")
|
172
|
+
|
173
|
+
print(f"\n{separator}") # Consistent separator at the end
|
174
|
+
|
175
|
+
# Function to clear conversation history
|
176
|
+
def clear_history():
|
177
|
+
nonlocal conversation
|
178
|
+
conversation = [{"role": "system", "content": system_prompt}]
|
179
|
+
print(f"\n{COLORS['yellow']}Conversation history cleared.{COLORS['reset']}")
|
180
|
+
print(separator) # Add separator for consistency
|
181
|
+
|
182
|
+
try:
|
183
|
+
while True:
|
184
|
+
# Get user input
|
185
|
+
if HAS_PROMPT_TOOLKIT:
|
186
|
+
# Custom styling for prompt_toolkit
|
187
|
+
style = Style.from_dict({
|
188
|
+
'prompt': 'ansicyan bold',
|
189
|
+
'input': 'ansiwhite',
|
190
|
+
})
|
191
|
+
|
192
|
+
# Create key bindings for Ctrl+C handling
|
193
|
+
kb = KeyBindings()
|
194
|
+
@kb.add('c-c')
|
195
|
+
def _(event):
|
196
|
+
event.app.exit(result=None)
|
197
|
+
raise KeyboardInterrupt()
|
198
|
+
|
199
|
+
# Get user input with styled prompt - using proper HTML formatting
|
200
|
+
user_input = pt_prompt(
|
201
|
+
HTML("<ansicyan><b>╭─ 👤 You:</b></ansicyan> "),
|
202
|
+
style=style,
|
203
|
+
key_bindings=kb,
|
204
|
+
history=prompt_history
|
205
|
+
)
|
206
|
+
else:
|
207
|
+
user_input = input(f"{user_header()}: {COLORS['reset']}")
|
208
|
+
|
209
|
+
# Check for exit commands
|
210
|
+
if user_input.lower() in ('exit', 'quit', 'bye'):
|
211
|
+
print(f"\n{COLORS['green']}Ending chat session. Goodbye!{COLORS['reset']}")
|
212
|
+
break
|
213
|
+
|
214
|
+
# Check for special commands
|
215
|
+
if user_input.lower() == 'history':
|
216
|
+
display_history()
|
217
|
+
continue
|
218
|
+
|
219
|
+
if user_input.lower() == 'clear':
|
220
|
+
clear_history()
|
221
|
+
continue
|
222
|
+
|
223
|
+
# Skip empty messages but don't raise an error
|
224
|
+
if not user_input.strip():
|
225
|
+
continue
|
226
|
+
|
227
|
+
# Add user message to conversation
|
228
|
+
user_message = {"role": "user", "content": user_input}
|
229
|
+
conversation.append(user_message)
|
230
|
+
|
231
|
+
# Print assistant indicator with formatting
|
232
|
+
if not no_stream:
|
233
|
+
print(f"\n{ngpt_header()}: {COLORS['reset']}", end="", flush=True)
|
234
|
+
else:
|
235
|
+
print(f"\n{ngpt_header()}: {COLORS['reset']}", flush=True)
|
236
|
+
|
237
|
+
# Get AI response with conversation history
|
238
|
+
response = client.chat(
|
239
|
+
prompt=user_input,
|
240
|
+
messages=conversation,
|
241
|
+
stream=not no_stream,
|
242
|
+
web_search=web_search
|
243
|
+
)
|
244
|
+
|
245
|
+
# Add AI response to conversation history
|
246
|
+
if response:
|
247
|
+
assistant_message = {"role": "assistant", "content": response}
|
248
|
+
conversation.append(assistant_message)
|
249
|
+
|
250
|
+
# Print response if not streamed
|
251
|
+
if no_stream:
|
252
|
+
print(response)
|
253
|
+
|
254
|
+
# Print separator between exchanges
|
255
|
+
print_separator()
|
256
|
+
|
257
|
+
except KeyboardInterrupt:
|
258
|
+
print(f"\n\n{COLORS['green']}Chat session ended by user. Goodbye!{COLORS['reset']}")
|
259
|
+
except Exception as e:
|
260
|
+
print(f"\n{COLORS['yellow']}Error during chat session: {str(e)}{COLORS['reset']}")
|
261
|
+
# Print traceback for debugging if it's a serious error
|
262
|
+
import traceback
|
263
|
+
traceback.print_exc()
|
264
|
+
|
85
265
|
def main():
|
86
266
|
parser = argparse.ArgumentParser(description="nGPT - A CLI tool for interacting with custom OpenAI API endpoints")
|
87
267
|
|
@@ -95,6 +275,7 @@ def main():
|
|
95
275
|
config_group.add_argument('--remove', action='store_true', help='Remove the configuration at the specified index (requires --config and --config-index)')
|
96
276
|
config_group.add_argument('--show-config', action='store_true', help='Show the current configuration(s) and exit')
|
97
277
|
config_group.add_argument('--all', action='store_true', help='Show details for all configurations (requires --show-config)')
|
278
|
+
config_group.add_argument('--list-models', action='store_true', help='List all available models for the current configuration and exit')
|
98
279
|
|
99
280
|
# Global options
|
100
281
|
global_group = parser.add_argument_group('Global Options')
|
@@ -109,10 +290,11 @@ def main():
|
|
109
290
|
# Mode flags (mutually exclusive)
|
110
291
|
mode_group = parser.add_argument_group('Modes (mutually exclusive)')
|
111
292
|
mode_exclusive_group = mode_group.add_mutually_exclusive_group()
|
293
|
+
mode_exclusive_group.add_argument('-i', '--interactive', action='store_true', help='Start an interactive chat session')
|
112
294
|
mode_exclusive_group.add_argument('-s', '--shell', action='store_true', help='Generate and execute shell commands')
|
113
295
|
mode_exclusive_group.add_argument('-c', '--code', action='store_true', help='Generate code')
|
114
296
|
mode_exclusive_group.add_argument('-t', '--text', action='store_true', help='Enter multi-line text input (submit with Ctrl+D)')
|
115
|
-
|
297
|
+
# Note: --show-config is handled separately and implicitly acts as a mode
|
116
298
|
|
117
299
|
# Language option for code mode
|
118
300
|
parser.add_argument('--language', default="python", help='Programming language to generate code in (for code mode)')
|
@@ -229,21 +411,41 @@ def main():
|
|
229
411
|
|
230
412
|
return
|
231
413
|
|
232
|
-
#
|
233
|
-
if not args.prompt and not (args.shell or args.code or args.text or args.interactive):
|
414
|
+
# For interactive mode, we'll allow continuing without a specific prompt
|
415
|
+
if not args.prompt and not (args.shell or args.code or args.text or args.interactive or args.show_config or args.list_models):
|
234
416
|
parser.print_help()
|
235
417
|
return
|
236
418
|
|
237
419
|
# Check configuration (using the potentially overridden active_config)
|
238
|
-
if not check_config(active_config):
|
420
|
+
if not args.show_config and not args.list_models and not check_config(active_config):
|
239
421
|
return
|
240
422
|
|
241
423
|
# Initialize client using the potentially overridden active_config
|
242
424
|
client = NGPTClient(**active_config)
|
243
425
|
|
244
426
|
try:
|
427
|
+
# Handle listing models
|
428
|
+
if args.list_models:
|
429
|
+
print("Retrieving available models...")
|
430
|
+
models = client.list_models()
|
431
|
+
if models:
|
432
|
+
print(f"\nAvailable models for {active_config.get('provider', 'API')}:")
|
433
|
+
print("-" * 50)
|
434
|
+
for model in models:
|
435
|
+
if "id" in model:
|
436
|
+
owned_by = f" ({model.get('owned_by', 'Unknown')})" if "owned_by" in model else ""
|
437
|
+
current = " [active]" if model["id"] == active_config["model"] else ""
|
438
|
+
print(f"- {model['id']}{owned_by}{current}")
|
439
|
+
print("\nUse --model MODEL_NAME to select a specific model")
|
440
|
+
else:
|
441
|
+
print("No models available or could not retrieve models.")
|
442
|
+
return
|
443
|
+
|
245
444
|
# Handle modes
|
246
|
-
if args.
|
445
|
+
if args.interactive:
|
446
|
+
# Interactive chat mode
|
447
|
+
interactive_chat_session(client, web_search=args.web_search, no_stream=args.no_stream)
|
448
|
+
elif args.shell:
|
247
449
|
if args.prompt is None:
|
248
450
|
try:
|
249
451
|
print("Enter shell command description: ", end='')
|
@@ -279,139 +481,6 @@ def main():
|
|
279
481
|
except subprocess.CalledProcessError as e:
|
280
482
|
print(f"\nError:\n{e.stderr}")
|
281
483
|
|
282
|
-
elif args.interactive:
|
283
|
-
# Interactive chat mode
|
284
|
-
conversation_history = []
|
285
|
-
print("\033[94m\033[1m" + "Interactive Chat Mode" + "\033[0m")
|
286
|
-
print("Type your messages and press Ctrl+D to send. Type 'exit', 'quit', or use Ctrl+C to exit.")
|
287
|
-
print("Type 'clear' to start a new conversation.")
|
288
|
-
if HAS_PROMPT_TOOLKIT:
|
289
|
-
print("Use arrow keys to navigate, Enter for new line in multi-line mode.")
|
290
|
-
print()
|
291
|
-
|
292
|
-
try:
|
293
|
-
while True:
|
294
|
-
try:
|
295
|
-
# Get user input with prompt_toolkit if available
|
296
|
-
if HAS_PROMPT_TOOLKIT:
|
297
|
-
# Create key bindings
|
298
|
-
kb = KeyBindings()
|
299
|
-
|
300
|
-
# Explicitly bind Ctrl+C to exit
|
301
|
-
@kb.add('c-c')
|
302
|
-
def _(event):
|
303
|
-
event.app.exit(result=None)
|
304
|
-
print("\nExiting interactive chat mode.")
|
305
|
-
sys.exit(130)
|
306
|
-
|
307
|
-
# Explicitly bind Ctrl+D to submit
|
308
|
-
@kb.add('c-d')
|
309
|
-
def _(event):
|
310
|
-
event.app.exit(result=event.app.current_buffer.text)
|
311
|
-
|
312
|
-
# Get terminal dimensions
|
313
|
-
term_width, term_height = shutil.get_terminal_size()
|
314
|
-
|
315
|
-
# Create a styled TextArea
|
316
|
-
text_area = TextArea(
|
317
|
-
style="class:input-area",
|
318
|
-
multiline=True,
|
319
|
-
wrap_lines=True,
|
320
|
-
width=term_width - 4,
|
321
|
-
height=min(10, term_height - 8),
|
322
|
-
prompt=HTML("<ansiblue>>>> </ansiblue>"),
|
323
|
-
scrollbar=True,
|
324
|
-
focus_on_click=True,
|
325
|
-
lexer=None,
|
326
|
-
)
|
327
|
-
text_area.window.right_margins = [ScrollbarMargin(display_arrows=True)]
|
328
|
-
|
329
|
-
# Create a title bar
|
330
|
-
title_bar = FormattedTextControl(
|
331
|
-
HTML("<style bg='ansiblue' fg='ansiwhite'><b> NGPT Interactive Chat </b></style>")
|
332
|
-
)
|
333
|
-
|
334
|
-
# Create a status bar with key bindings and commands info
|
335
|
-
status_bar = FormattedTextControl(
|
336
|
-
HTML("<ansiblue><b>Ctrl+D</b></ansiblue>: Submit | <ansiblue><b>Ctrl+C</b></ansiblue>: Exit | Type <ansiblue><b>clear</b></ansiblue> to start new conversation")
|
337
|
-
)
|
338
|
-
|
339
|
-
# Create the layout
|
340
|
-
layout = Layout(
|
341
|
-
HSplit([
|
342
|
-
Window(title_bar, height=1),
|
343
|
-
Window(height=1, char="-", style="class:separator"),
|
344
|
-
text_area,
|
345
|
-
Window(height=1, char="-", style="class:separator"),
|
346
|
-
Window(status_bar, height=1),
|
347
|
-
])
|
348
|
-
)
|
349
|
-
|
350
|
-
# Create a style
|
351
|
-
style = Style.from_dict({
|
352
|
-
"separator": "ansigray",
|
353
|
-
"input-area": "bg:ansiblack fg:ansiwhite",
|
354
|
-
"cursor": "bg:ansiwhite fg:ansiblack",
|
355
|
-
})
|
356
|
-
|
357
|
-
# Create and run the application
|
358
|
-
app = Application(
|
359
|
-
layout=layout,
|
360
|
-
full_screen=False,
|
361
|
-
key_bindings=kb,
|
362
|
-
style=style,
|
363
|
-
mouse_support=True,
|
364
|
-
)
|
365
|
-
|
366
|
-
user_input = app.run()
|
367
|
-
if user_input is None:
|
368
|
-
break
|
369
|
-
else:
|
370
|
-
# Fallback to standard input
|
371
|
-
user_input = input("\033[1m\033[94m>>> \033[0m")
|
372
|
-
|
373
|
-
# Handle special commands
|
374
|
-
if user_input is None:
|
375
|
-
break
|
376
|
-
elif user_input.lower() in ['exit', 'quit', 'q']:
|
377
|
-
print("Exiting interactive chat mode.")
|
378
|
-
break
|
379
|
-
elif user_input.lower() == 'clear':
|
380
|
-
print("Starting a new conversation.")
|
381
|
-
conversation_history = []
|
382
|
-
continue
|
383
|
-
elif not user_input.strip():
|
384
|
-
continue
|
385
|
-
|
386
|
-
# Add user message to conversation history
|
387
|
-
conversation_history.append({"role": "user", "content": user_input})
|
388
|
-
|
389
|
-
# Get response from the model
|
390
|
-
print("\033[90m" + "AI is thinking..." + "\033[0m")
|
391
|
-
response = client.chat(
|
392
|
-
prompt=user_input,
|
393
|
-
stream=not args.no_stream,
|
394
|
-
messages=conversation_history,
|
395
|
-
web_search=args.web_search
|
396
|
-
)
|
397
|
-
|
398
|
-
# Add assistant message to conversation history
|
399
|
-
conversation_history.append({"role": "assistant", "content": response})
|
400
|
-
|
401
|
-
# If no streaming, print the response
|
402
|
-
if args.no_stream and response:
|
403
|
-
print("\033[92m" + response + "\033[0m")
|
404
|
-
|
405
|
-
print() # Add spacing between exchanges
|
406
|
-
|
407
|
-
except KeyboardInterrupt:
|
408
|
-
print("\nExiting interactive chat mode.")
|
409
|
-
break
|
410
|
-
|
411
|
-
except Exception as e:
|
412
|
-
print(f"\nError in interactive mode: {e}")
|
413
|
-
return
|
414
|
-
|
415
484
|
elif args.code:
|
416
485
|
if args.prompt is None:
|
417
486
|
try:
|
ngpt/client.py
CHANGED
@@ -259,4 +259,48 @@ Code:"""
|
|
259
259
|
)
|
260
260
|
except Exception as e:
|
261
261
|
print(f"Error generating code: {e}")
|
262
|
-
return ""
|
262
|
+
return ""
|
263
|
+
|
264
|
+
def list_models(self) -> list:
|
265
|
+
"""
|
266
|
+
Retrieve the list of available models from the API.
|
267
|
+
|
268
|
+
Returns:
|
269
|
+
List of available model objects or empty list if failed
|
270
|
+
"""
|
271
|
+
if not self.api_key:
|
272
|
+
print("Error: API key is not set. Please configure your API key in the config file or provide it with --api-key.")
|
273
|
+
return []
|
274
|
+
|
275
|
+
# Endpoint for models
|
276
|
+
url = f"{self.base_url}models"
|
277
|
+
|
278
|
+
try:
|
279
|
+
response = requests.get(url, headers=self.headers)
|
280
|
+
response.raise_for_status() # Raise exception for HTTP errors
|
281
|
+
result = response.json()
|
282
|
+
|
283
|
+
if "data" in result:
|
284
|
+
return result["data"]
|
285
|
+
else:
|
286
|
+
print("Error: Unexpected response format when retrieving models.")
|
287
|
+
return []
|
288
|
+
|
289
|
+
except requests.exceptions.HTTPError as e:
|
290
|
+
if e.response.status_code == 401:
|
291
|
+
print("Error: Authentication failed. Please check your API key.")
|
292
|
+
elif e.response.status_code == 404:
|
293
|
+
print(f"Error: Models endpoint not found at {url}")
|
294
|
+
elif e.response.status_code == 429:
|
295
|
+
print("Error: Rate limit exceeded. Please try again later.")
|
296
|
+
else:
|
297
|
+
print(f"HTTP Error: {e}")
|
298
|
+
return []
|
299
|
+
|
300
|
+
except requests.exceptions.ConnectionError:
|
301
|
+
print(f"Error: Could not connect to {self.base_url}. Please check your internet connection and base URL.")
|
302
|
+
return []
|
303
|
+
|
304
|
+
except Exception as e:
|
305
|
+
print(f"Error: An unexpected error occurred while retrieving models: {e}")
|
306
|
+
return []
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ngpt
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.7.0
|
4
4
|
Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
|
5
5
|
Project-URL: Homepage, https://github.com/nazdridoy/ngpt
|
6
6
|
Project-URL: Repository, https://github.com/nazdridoy/ngpt
|
@@ -64,6 +64,9 @@ pip install ngpt
|
|
64
64
|
# Chat with default settings
|
65
65
|
ngpt "Tell me about quantum computing"
|
66
66
|
|
67
|
+
# Start an interactive chat session with conversation memory
|
68
|
+
ngpt -i
|
69
|
+
|
67
70
|
# Return response without streaming
|
68
71
|
ngpt -n "Tell me about quantum computing"
|
69
72
|
|
@@ -82,6 +85,7 @@ ngpt --text
|
|
82
85
|
- ✅ **Dual Mode**: Use as a CLI tool or import as a Python library
|
83
86
|
- 🪶 **Lightweight**: Minimal dependencies (just `requests`)
|
84
87
|
- 🔄 **API Flexibility**: Works with OpenAI, Ollama, Groq, and any compatible endpoint
|
88
|
+
- 💬 **Interactive Chat**: Continuous conversation with memory in modern UI
|
85
89
|
- 📊 **Streaming Responses**: Real-time output for better user experience
|
86
90
|
- 🔍 **Web Search**: Integrated with compatible API endpoints
|
87
91
|
- ⚙️ **Multiple Configurations**: Cross-platform config system supporting different profiles
|
@@ -105,6 +109,9 @@ Requires Python 3.8 or newer.
|
|
105
109
|
# Basic chat (default mode)
|
106
110
|
ngpt "Hello, how are you?"
|
107
111
|
|
112
|
+
# Interactive chat session with conversation history
|
113
|
+
ngpt -i
|
114
|
+
|
108
115
|
# Show version information
|
109
116
|
ngpt -v
|
110
117
|
|
@@ -114,6 +121,9 @@ ngpt --show-config
|
|
114
121
|
# Show all configurations
|
115
122
|
ngpt --show-config --all
|
116
123
|
|
124
|
+
# List available models for the current configuration
|
125
|
+
ngpt --list-models
|
126
|
+
|
117
127
|
# With custom options
|
118
128
|
ngpt --api-key your-key --base-url http://your-endpoint --model your-model "Hello"
|
119
129
|
|
@@ -204,6 +214,7 @@ You can configure the client using the following options:
|
|
204
214
|
| `--api-key` | API key for the service |
|
205
215
|
| `--base-url` | Base URL for the API |
|
206
216
|
| `--model` | Model to use |
|
217
|
+
| `--list-models` | List all available models for the current configuration |
|
207
218
|
| `--web-search` | Enable web search capability |
|
208
219
|
| `-n, --no-stream` | Return the whole response without streaming |
|
209
220
|
| `--config` | Path to a custom configuration file or, when used without a value, enters interactive configuration mode |
|
@@ -211,6 +222,7 @@ You can configure the client using the following options:
|
|
211
222
|
| `--remove` | Remove the configuration at the specified index (requires --config and --config-index) |
|
212
223
|
| `--show-config` | Show configuration details and exit |
|
213
224
|
| `--all` | Used with `--show-config` to display all configurations |
|
225
|
+
| `-i, --interactive` | Start an interactive chat session with stylish UI, conversation history, and special commands |
|
214
226
|
| `-s, --shell` | Generate and execute shell commands |
|
215
227
|
| `-c, --code` | Generate clean code output |
|
216
228
|
| `-t, --text` | Open interactive multiline editor for complex prompts |
|
@@ -0,0 +1,9 @@
|
|
1
|
+
ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
|
2
|
+
ngpt/cli.py,sha256=AyIraZFq7icPot0moqPVJer72iqbtJxKBhy6VH-dwAA,28746
|
3
|
+
ngpt/client.py,sha256=ygtY2xuu-PAFPrz1CUJxcj3hyWw7q2kRG85ClDGClCw,12089
|
4
|
+
ngpt/config.py,sha256=BF0G3QeiPma8l7EQyc37bR7LWZog7FHJQNe7uj9cr4w,6896
|
5
|
+
ngpt-1.7.0.dist-info/METADATA,sha256=N0MgHiXxicUgkDTNS6dfCw32CHDmgpnX0N4HpFKm2nE,10568
|
6
|
+
ngpt-1.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
7
|
+
ngpt-1.7.0.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
|
8
|
+
ngpt-1.7.0.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
9
|
+
ngpt-1.7.0.dist-info/RECORD,,
|
ngpt-1.5.1.dist-info/RECORD
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
|
2
|
-
ngpt/cli.py,sha256=ZMBmNJfKiKAO88MecT7CDHH1uyqpupeCqGSX4-5LC_g,27117
|
3
|
-
ngpt/client.py,sha256=O0dPYeQCJlpWZWBBsroo-5UxeyBVwqC6o3Pm8lRnDiY,10329
|
4
|
-
ngpt/config.py,sha256=BF0G3QeiPma8l7EQyc37bR7LWZog7FHJQNe7uj9cr4w,6896
|
5
|
-
ngpt-1.5.1.dist-info/METADATA,sha256=eHORtsw7QD5HwfIhqRzCojOTNtfBqZ6kXCFT_8VBRq8,10086
|
6
|
-
ngpt-1.5.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
7
|
-
ngpt-1.5.1.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
|
8
|
-
ngpt-1.5.1.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
9
|
-
ngpt-1.5.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|