ngpt 2.3.2__py3-none-any.whl → 2.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngpt/cli.py +14 -10
- ngpt/client.py +6 -6
- {ngpt-2.3.2.dist-info → ngpt-2.3.4.dist-info}/METADATA +2 -2
- ngpt-2.3.4.dist-info/RECORD +9 -0
- ngpt-2.3.2.dist-info/RECORD +0 -9
- {ngpt-2.3.2.dist-info → ngpt-2.3.4.dist-info}/WHEEL +0 -0
- {ngpt-2.3.2.dist-info → ngpt-2.3.4.dist-info}/entry_points.txt +0 -0
- {ngpt-2.3.2.dist-info → ngpt-2.3.4.dist-info}/licenses/LICENSE +0 -0
ngpt/cli.py
CHANGED
@@ -325,7 +325,7 @@ def check_config(config):
|
|
325
325
|
|
326
326
|
return True
|
327
327
|
|
328
|
-
def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0,
|
328
|
+
def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, log_file=None, preprompt=None):
|
329
329
|
"""Run an interactive chat session with conversation history."""
|
330
330
|
# Get terminal width for better formatting
|
331
331
|
try:
|
@@ -492,7 +492,7 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
|
|
492
492
|
web_search=web_search,
|
493
493
|
temperature=temperature,
|
494
494
|
top_p=top_p,
|
495
|
-
|
495
|
+
max_tokens=max_tokens
|
496
496
|
)
|
497
497
|
|
498
498
|
# Add AI response to conversation history
|
@@ -528,7 +528,11 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
|
|
528
528
|
def main():
|
529
529
|
# Colorize description - use a shorter description to avoid line wrapping issues
|
530
530
|
description = f"{COLORS['cyan']}{COLORS['bold']}nGPT{COLORS['reset']} - Interact with AI language models via OpenAI-compatible APIs"
|
531
|
-
|
531
|
+
|
532
|
+
# Minimalist, clean epilog design
|
533
|
+
epilog = f"\n{COLORS['yellow']}nGPT {COLORS['bold']}v{__version__}{COLORS['reset']} • {COLORS['green']}Docs: {COLORS['bold']}https://nazdridoy.github.io/ngpt/usage/cli_usage.html{COLORS['reset']}"
|
534
|
+
|
535
|
+
parser = argparse.ArgumentParser(description=description, formatter_class=ColoredHelpFormatter, epilog=epilog)
|
532
536
|
|
533
537
|
# Add custom error method with color
|
534
538
|
original_error = parser.error
|
@@ -568,12 +572,12 @@ def main():
|
|
568
572
|
help='Set temperature (controls randomness, default: 0.7)')
|
569
573
|
global_group.add_argument('--top_p', type=float, default=1.0,
|
570
574
|
help='Set top_p (controls diversity, default: 1.0)')
|
571
|
-
global_group.add_argument('--
|
575
|
+
global_group.add_argument('--max_tokens', type=int,
|
572
576
|
help='Set max response length in tokens')
|
573
577
|
global_group.add_argument('--log', metavar='FILE',
|
574
578
|
help='Set filepath to log conversation to (For interactive modes)')
|
575
579
|
global_group.add_argument('--preprompt',
|
576
|
-
help='Set
|
580
|
+
help='Set custom system prompt to control AI behavior')
|
577
581
|
|
578
582
|
# Mode flags (mutually exclusive)
|
579
583
|
mode_group = parser.add_argument_group('Modes (mutually exclusive)')
|
@@ -734,7 +738,7 @@ def main():
|
|
734
738
|
# Interactive chat mode
|
735
739
|
interactive_chat_session(client, web_search=args.web_search, no_stream=args.no_stream,
|
736
740
|
temperature=args.temperature, top_p=args.top_p,
|
737
|
-
|
741
|
+
max_tokens=args.max_tokens, log_file=args.log, preprompt=args.preprompt)
|
738
742
|
elif args.shell:
|
739
743
|
if args.prompt is None:
|
740
744
|
try:
|
@@ -748,7 +752,7 @@ def main():
|
|
748
752
|
|
749
753
|
command = client.generate_shell_command(prompt, web_search=args.web_search,
|
750
754
|
temperature=args.temperature, top_p=args.top_p,
|
751
|
-
|
755
|
+
max_tokens=args.max_tokens)
|
752
756
|
if not command:
|
753
757
|
return # Error already printed by client
|
754
758
|
|
@@ -786,7 +790,7 @@ def main():
|
|
786
790
|
|
787
791
|
generated_code = client.generate_code(prompt, args.language, web_search=args.web_search,
|
788
792
|
temperature=args.temperature, top_p=args.top_p,
|
789
|
-
|
793
|
+
max_tokens=args.max_tokens)
|
790
794
|
if generated_code:
|
791
795
|
print(f"\nGenerated code:\n{generated_code}")
|
792
796
|
|
@@ -909,7 +913,7 @@ def main():
|
|
909
913
|
|
910
914
|
response = client.chat(prompt, stream=not args.no_stream, web_search=args.web_search,
|
911
915
|
temperature=args.temperature, top_p=args.top_p,
|
912
|
-
max_tokens=args.
|
916
|
+
max_tokens=args.max_tokens, messages=messages)
|
913
917
|
if args.no_stream and response:
|
914
918
|
print(response)
|
915
919
|
|
@@ -935,7 +939,7 @@ def main():
|
|
935
939
|
|
936
940
|
response = client.chat(prompt, stream=not args.no_stream, web_search=args.web_search,
|
937
941
|
temperature=args.temperature, top_p=args.top_p,
|
938
|
-
max_tokens=args.
|
942
|
+
max_tokens=args.max_tokens, messages=messages)
|
939
943
|
if args.no_stream and response:
|
940
944
|
print(response)
|
941
945
|
|
ngpt/client.py
CHANGED
@@ -167,7 +167,7 @@ class NGPTClient:
|
|
167
167
|
web_search: bool = False,
|
168
168
|
temperature: float = 0.4,
|
169
169
|
top_p: float = 0.95,
|
170
|
-
|
170
|
+
max_tokens: Optional[int] = None
|
171
171
|
) -> str:
|
172
172
|
"""
|
173
173
|
Generate a shell command based on the prompt.
|
@@ -177,7 +177,7 @@ class NGPTClient:
|
|
177
177
|
web_search: Whether to enable web search capability
|
178
178
|
temperature: Controls randomness in the response
|
179
179
|
top_p: Controls diversity via nucleus sampling
|
180
|
-
|
180
|
+
max_tokens: Maximum number of tokens to generate
|
181
181
|
|
182
182
|
Returns:
|
183
183
|
The generated shell command
|
@@ -228,7 +228,7 @@ Command:"""
|
|
228
228
|
web_search=web_search,
|
229
229
|
temperature=temperature,
|
230
230
|
top_p=top_p,
|
231
|
-
max_tokens=
|
231
|
+
max_tokens=max_tokens
|
232
232
|
)
|
233
233
|
except Exception as e:
|
234
234
|
print(f"Error generating shell command: {e}")
|
@@ -241,7 +241,7 @@ Command:"""
|
|
241
241
|
web_search: bool = False,
|
242
242
|
temperature: float = 0.4,
|
243
243
|
top_p: float = 0.95,
|
244
|
-
|
244
|
+
max_tokens: Optional[int] = None
|
245
245
|
) -> str:
|
246
246
|
"""
|
247
247
|
Generate code based on the prompt.
|
@@ -252,7 +252,7 @@ Command:"""
|
|
252
252
|
web_search: Whether to enable web search capability
|
253
253
|
temperature: Controls randomness in the response
|
254
254
|
top_p: Controls diversity via nucleus sampling
|
255
|
-
|
255
|
+
max_tokens: Maximum number of tokens to generate
|
256
256
|
|
257
257
|
Returns:
|
258
258
|
The generated code
|
@@ -285,7 +285,7 @@ Code:"""
|
|
285
285
|
web_search=web_search,
|
286
286
|
temperature=temperature,
|
287
287
|
top_p=top_p,
|
288
|
-
max_tokens=
|
288
|
+
max_tokens=max_tokens
|
289
289
|
)
|
290
290
|
except Exception as e:
|
291
291
|
print(f"Error generating code: {e}")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ngpt
|
3
|
-
Version: 2.3.
|
3
|
+
Version: 2.3.4
|
4
4
|
Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
|
5
5
|
Project-URL: Homepage, https://github.com/nazdridoy/ngpt
|
6
6
|
Project-URL: Repository, https://github.com/nazdridoy/ngpt
|
@@ -264,7 +264,7 @@ You can configure the client using the following options:
|
|
264
264
|
| `-n, --no-stream` | Return the whole response without streaming |
|
265
265
|
| `--temperature` | Set temperature (controls randomness, default: 0.7) |
|
266
266
|
| `--top_p` | Set top_p (controls diversity, default: 1.0) |
|
267
|
-
| `--
|
267
|
+
| `--max_tokens` | Set maximum response length in tokens |
|
268
268
|
| `--preprompt` | Set custom system prompt to control AI behavior |
|
269
269
|
| `--log` | Set filepath to log conversation to (for interactive modes) |
|
270
270
|
| `--config` | Path to a custom configuration file or, when used without a value, enters interactive configuration mode |
|
@@ -0,0 +1,9 @@
|
|
1
|
+
ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
|
2
|
+
ngpt/cli.py,sha256=pmgUK-vrMAAsALKnTxVAcoFSGZ4DM89d43bXKuiLbN0,43532
|
3
|
+
ngpt/client.py,sha256=lJfyLONeBU7YqMVJJys6zPay7gcJTq108_rJ4wvOtok,13067
|
4
|
+
ngpt/config.py,sha256=BF0G3QeiPma8l7EQyc37bR7LWZog7FHJQNe7uj9cr4w,6896
|
5
|
+
ngpt-2.3.4.dist-info/METADATA,sha256=H78mhW758iTL9wnB2lgMGYUzivmHzDQU7IdEXqC4X4Y,13535
|
6
|
+
ngpt-2.3.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
7
|
+
ngpt-2.3.4.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
|
8
|
+
ngpt-2.3.4.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
9
|
+
ngpt-2.3.4.dist-info/RECORD,,
|
ngpt-2.3.2.dist-info/RECORD
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
|
2
|
-
ngpt/cli.py,sha256=Or59XajZRf1Gl4zExygLIeIbwsJTkT_YLK_23ViwW2k,43230
|
3
|
-
ngpt/client.py,sha256=75xmzO7e9wQ7y_LzZCacg3mkZdheewcBxB6moPftqYw,13067
|
4
|
-
ngpt/config.py,sha256=BF0G3QeiPma8l7EQyc37bR7LWZog7FHJQNe7uj9cr4w,6896
|
5
|
-
ngpt-2.3.2.dist-info/METADATA,sha256=PnZr050walUIcVAWQz_xVd_yEK8ZA7UlKHYZEX5k9hI,13535
|
6
|
-
ngpt-2.3.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
7
|
-
ngpt-2.3.2.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
|
8
|
-
ngpt-2.3.2.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
9
|
-
ngpt-2.3.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|