ngpt 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ngpt/cli.py CHANGED
@@ -85,7 +85,7 @@ def check_config(config):
85
85
 
86
86
  return True
87
87
 
88
- def interactive_chat_session(client, web_search=False, no_stream=False):
88
+ def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_length=None):
89
89
  """Run an interactive chat session with conversation history."""
90
90
  # Define ANSI color codes for terminal output
91
91
  COLORS = {
@@ -239,7 +239,10 @@ def interactive_chat_session(client, web_search=False, no_stream=False):
239
239
  prompt=user_input,
240
240
  messages=conversation,
241
241
  stream=not no_stream,
242
- web_search=web_search
242
+ web_search=web_search,
243
+ temperature=temperature,
244
+ top_p=top_p,
245
+ max_length=max_length
243
246
  )
244
247
 
245
248
  # Add AI response to conversation history
@@ -286,6 +289,12 @@ def main():
286
289
  help='Enable web search capability (Note: Your API endpoint must support this feature)')
287
290
  global_group.add_argument('-n', '--no-stream', action='store_true',
288
291
  help='Return the whole response without streaming')
292
+ global_group.add_argument('--temperature', type=float, default=0.7,
293
+ help='Set temperature (controls randomness, default: 0.7)')
294
+ global_group.add_argument('--top_p', type=float, default=1.0,
295
+ help='Set top_p (controls diversity, default: 1.0)')
296
+ global_group.add_argument('--max_length', type=int,
297
+ help='Set max response length in tokens')
289
298
 
290
299
  # Mode flags (mutually exclusive)
291
300
  mode_group = parser.add_argument_group('Modes (mutually exclusive)')
@@ -444,7 +453,9 @@ def main():
444
453
  # Handle modes
445
454
  if args.interactive:
446
455
  # Interactive chat mode
447
- interactive_chat_session(client, web_search=args.web_search, no_stream=args.no_stream)
456
+ interactive_chat_session(client, web_search=args.web_search, no_stream=args.no_stream,
457
+ temperature=args.temperature, top_p=args.top_p,
458
+ max_length=args.max_length)
448
459
  elif args.shell:
449
460
  if args.prompt is None:
450
461
  try:
@@ -456,7 +467,9 @@ def main():
456
467
  else:
457
468
  prompt = args.prompt
458
469
 
459
- command = client.generate_shell_command(prompt, web_search=args.web_search)
470
+ command = client.generate_shell_command(prompt, web_search=args.web_search,
471
+ temperature=args.temperature, top_p=args.top_p,
472
+ max_length=args.max_length)
460
473
  if not command:
461
474
  return # Error already printed by client
462
475
 
@@ -492,7 +505,9 @@ def main():
492
505
  else:
493
506
  prompt = args.prompt
494
507
 
495
- generated_code = client.generate_code(prompt, args.language, web_search=args.web_search)
508
+ generated_code = client.generate_code(prompt, args.language, web_search=args.web_search,
509
+ temperature=args.temperature, top_p=args.top_p,
510
+ max_length=args.max_length)
496
511
  if generated_code:
497
512
  print(f"\nGenerated code:\n{generated_code}")
498
513
 
@@ -605,7 +620,9 @@ def main():
605
620
  sys.exit(130)
606
621
 
607
622
  print("\nSubmission successful. Waiting for response...")
608
- response = client.chat(prompt, stream=not args.no_stream, web_search=args.web_search)
623
+ response = client.chat(prompt, stream=not args.no_stream, web_search=args.web_search,
624
+ temperature=args.temperature, top_p=args.top_p,
625
+ max_tokens=args.max_length)
609
626
  if args.no_stream and response:
610
627
  print(response)
611
628
 
@@ -620,7 +637,9 @@ def main():
620
637
  sys.exit(130)
621
638
  else:
622
639
  prompt = args.prompt
623
- response = client.chat(prompt, stream=not args.no_stream, web_search=args.web_search)
640
+ response = client.chat(prompt, stream=not args.no_stream, web_search=args.web_search,
641
+ temperature=args.temperature, top_p=args.top_p,
642
+ max_tokens=args.max_length)
624
643
  if args.no_stream and response:
625
644
  print(response)
626
645
 
ngpt/client.py CHANGED
@@ -30,6 +30,7 @@ class NGPTClient:
30
30
  stream: bool = True,
31
31
  temperature: float = 0.7,
32
32
  max_tokens: Optional[int] = None,
33
+ top_p: float = 1.0,
33
34
  messages: Optional[List[Dict[str, str]]] = None,
34
35
  web_search: bool = False,
35
36
  **kwargs
@@ -42,6 +43,7 @@ class NGPTClient:
42
43
  stream: Whether to stream the response
43
44
  temperature: Controls randomness in the response
44
45
  max_tokens: Maximum number of tokens to generate
46
+ top_p: Controls diversity via nucleus sampling
45
47
  messages: Optional list of message objects to override default behavior
46
48
  web_search: Whether to enable web search capability
47
49
  **kwargs: Additional arguments to pass to the API
@@ -62,6 +64,7 @@ class NGPTClient:
62
64
  "messages": messages,
63
65
  "stream": stream,
64
66
  "temperature": temperature,
67
+ "top_p": top_p,
65
68
  }
66
69
 
67
70
  # Conditionally add web_search
@@ -158,13 +161,23 @@ class NGPTClient:
158
161
  print(f"Error: An unexpected error occurred: {e}")
159
162
  return ""
160
163
 
161
- def generate_shell_command(self, prompt: str, web_search: bool = False) -> str:
164
+ def generate_shell_command(
165
+ self,
166
+ prompt: str,
167
+ web_search: bool = False,
168
+ temperature: float = 0.4,
169
+ top_p: float = 0.95,
170
+ max_length: Optional[int] = None
171
+ ) -> str:
162
172
  """
163
173
  Generate a shell command based on the prompt.
164
174
 
165
175
  Args:
166
176
  prompt: Description of the command to generate
167
177
  web_search: Whether to enable web search capability
178
+ temperature: Controls randomness in the response
179
+ top_p: Controls diversity via nucleus sampling
180
+ max_length: Maximum number of tokens to generate
168
181
 
169
182
  Returns:
170
183
  The generated shell command
@@ -212,13 +225,24 @@ Command:"""
212
225
  prompt=prompt,
213
226
  stream=False,
214
227
  messages=messages,
215
- web_search=web_search
228
+ web_search=web_search,
229
+ temperature=temperature,
230
+ top_p=top_p,
231
+ max_tokens=max_length
216
232
  )
217
233
  except Exception as e:
218
234
  print(f"Error generating shell command: {e}")
219
235
  return ""
220
236
 
221
- def generate_code(self, prompt: str, language: str = "python", web_search: bool = False) -> str:
237
+ def generate_code(
238
+ self,
239
+ prompt: str,
240
+ language: str = "python",
241
+ web_search: bool = False,
242
+ temperature: float = 0.4,
243
+ top_p: float = 0.95,
244
+ max_length: Optional[int] = None
245
+ ) -> str:
222
246
  """
223
247
  Generate code based on the prompt.
224
248
 
@@ -226,6 +250,9 @@ Command:"""
226
250
  prompt: Description of the code to generate
227
251
  language: Programming language to generate code in
228
252
  web_search: Whether to enable web search capability
253
+ temperature: Controls randomness in the response
254
+ top_p: Controls diversity via nucleus sampling
255
+ max_length: Maximum number of tokens to generate
229
256
 
230
257
  Returns:
231
258
  The generated code
@@ -255,7 +282,10 @@ Code:"""
255
282
  prompt=prompt,
256
283
  stream=False,
257
284
  messages=messages,
258
- web_search=web_search
285
+ web_search=web_search,
286
+ temperature=temperature,
287
+ top_p=top_p,
288
+ max_tokens=max_length
259
289
  )
260
290
  except Exception as e:
261
291
  print(f"Error generating code: {e}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngpt
3
- Version: 2.0.0
3
+ Version: 2.1.0
4
4
  Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
5
5
  Project-URL: Homepage, https://github.com/nazdridoy/ngpt
6
6
  Project-URL: Repository, https://github.com/nazdridoy/ngpt
@@ -37,14 +37,17 @@ Description-Content-Type: text/markdown
37
37
  [![PyPI version](https://img.shields.io/pypi/v/ngpt.svg)](https://pypi.org/project/ngpt/)
38
38
  [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
39
39
  [![Python Versions](https://img.shields.io/pypi/pyversions/ngpt.svg)](https://pypi.org/project/ngpt/)
40
+ [![Documentation](https://img.shields.io/badge/docs-available-brightgreen.svg)](https://nazdridoy.github.io/ngpt/)
40
41
 
41
42
  A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
42
43
 
43
44
  ## Table of Contents
44
45
  - [Quick Start](#quick-start)
45
46
  - [Features](#features)
47
+ - [Documentation](#documentation)
46
48
  - [Installation](#installation)
47
49
  - [Usage](#usage)
50
+ - [Documentation](https://nazdridoy.github.io/ngpt/)
48
51
  - [CLI Tool](#as-a-cli-tool)
49
52
  - [Python Library](#as-a-library)
50
53
  - [Configuration](#configuration)
@@ -93,6 +96,12 @@ ngpt --text
93
96
  - 🧩 **Clean Code Generation**: Output code without markdown or explanations
94
97
  - 📝 **Rich Multiline Editor**: Interactive multiline text input with syntax highlighting and intuitive controls
95
98
 
99
+ ## Documentation
100
+
101
+ Comprehensive documentation, including API reference, usage guides, and examples, is available at:
102
+
103
+ **[https://nazdridoy.github.io/ngpt/](https://nazdridoy.github.io/ngpt/)**
104
+
96
105
  ## Installation
97
106
 
98
107
  ```bash
@@ -220,6 +229,9 @@ You can configure the client using the following options:
220
229
  | `--list-models` | List all available models for the selected configuration (can be combined with --config-index) |
221
230
  | `--web-search` | Enable web search capability |
222
231
  | `-n, --no-stream` | Return the whole response without streaming |
232
+ | `--temperature` | Set temperature (controls randomness, default: 0.7) |
233
+ | `--top_p` | Set top_p (controls diversity, default: 1.0) |
234
+ | `--max_length` | Set maximum response length in tokens |
223
235
  | `--config` | Path to a custom configuration file or, when used without a value, enters interactive configuration mode |
224
236
  | `--config-index` | Index of the configuration to use (default: 0) |
225
237
  | `--remove` | Remove the configuration at the specified index (requires --config and --config-index) |
@@ -0,0 +1,9 @@
1
+ ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
2
+ ngpt/cli.py,sha256=wNjY-qUuvzODdzffbYqyydJiLfhIsEXXUs9qalqcpvs,30082
3
+ ngpt/client.py,sha256=75xmzO7e9wQ7y_LzZCacg3mkZdheewcBxB6moPftqYw,13067
4
+ ngpt/config.py,sha256=BF0G3QeiPma8l7EQyc37bR7LWZog7FHJQNe7uj9cr4w,6896
5
+ ngpt-2.1.0.dist-info/METADATA,sha256=47qdylepQUHyR-TsJTtB8ezMnRLnZie0uSI1WVbHUNg,11278
6
+ ngpt-2.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
+ ngpt-2.1.0.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
8
+ ngpt-2.1.0.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
9
+ ngpt-2.1.0.dist-info/RECORD,,
@@ -1,9 +0,0 @@
1
- ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
2
- ngpt/cli.py,sha256=AyIraZFq7icPot0moqPVJer72iqbtJxKBhy6VH-dwAA,28746
3
- ngpt/client.py,sha256=ygtY2xuu-PAFPrz1CUJxcj3hyWw7q2kRG85ClDGClCw,12089
4
- ngpt/config.py,sha256=BF0G3QeiPma8l7EQyc37bR7LWZog7FHJQNe7uj9cr4w,6896
5
- ngpt-2.0.0.dist-info/METADATA,sha256=goeEJKkrJeTSrluucsXpuyinzJXbpyTSB5_s6fb4gV8,10686
6
- ngpt-2.0.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
- ngpt-2.0.0.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
8
- ngpt-2.0.0.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
9
- ngpt-2.0.0.dist-info/RECORD,,
File without changes