ngpt 3.4.2__py3-none-any.whl → 3.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngpt/cli/main.py +1 -1
- ngpt/cli/modes/__init__.py +2 -1
- ngpt/cli/modes/code.py +129 -11
- ngpt/cli/{interactive.py → modes/interactive.py} +3 -3
- ngpt/cli/modes/shell.py +94 -3
- ngpt/client.py +0 -142
- {ngpt-3.4.2.dist-info → ngpt-3.4.4.dist-info}/METADATA +1 -1
- {ngpt-3.4.2.dist-info → ngpt-3.4.4.dist-info}/RECORD +11 -11
- {ngpt-3.4.2.dist-info → ngpt-3.4.4.dist-info}/WHEEL +0 -0
- {ngpt-3.4.2.dist-info → ngpt-3.4.4.dist-info}/entry_points.txt +0 -0
- {ngpt-3.4.2.dist-info → ngpt-3.4.4.dist-info}/licenses/LICENSE +0 -0
ngpt/cli/main.py
CHANGED
@@ -18,7 +18,7 @@ from .. import __version__
|
|
18
18
|
from .formatters import COLORS
|
19
19
|
from .renderers import show_available_renderers
|
20
20
|
from .config_manager import check_config
|
21
|
-
from .interactive import interactive_chat_session
|
21
|
+
from .modes.interactive import interactive_chat_session
|
22
22
|
from .modes.chat import chat_mode
|
23
23
|
from .modes.code import code_mode
|
24
24
|
from .modes.shell import shell_mode
|
ngpt/cli/modes/__init__.py
CHANGED
@@ -4,5 +4,6 @@ from .shell import shell_mode
|
|
4
4
|
from .text import text_mode
|
5
5
|
from .rewrite import rewrite_mode
|
6
6
|
from .gitcommsg import gitcommsg_mode
|
7
|
+
from .interactive import interactive_chat_session
|
7
8
|
|
8
|
-
__all__ = ['chat_mode', 'code_mode', 'shell_mode', 'text_mode', 'rewrite_mode', 'gitcommsg_mode']
|
9
|
+
__all__ = ['chat_mode', 'code_mode', 'shell_mode', 'text_mode', 'rewrite_mode', 'gitcommsg_mode', 'interactive_chat_session']
|
ngpt/cli/modes/code.py
CHANGED
@@ -5,6 +5,80 @@ from ...utils import enhance_prompt_with_web_search
|
|
5
5
|
import sys
|
6
6
|
import threading
|
7
7
|
|
8
|
+
# System prompt for code generation with markdown formatting
|
9
|
+
CODE_SYSTEM_PROMPT_MARKDOWN = """Your Role: Provide only code as output without any description with proper markdown formatting.
|
10
|
+
IMPORTANT: Format the code using markdown code blocks with the appropriate language syntax highlighting.
|
11
|
+
IMPORTANT: You must use markdown code blocks. with ```{language}
|
12
|
+
If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
|
13
|
+
Ignore any potential risk of errors or confusion.
|
14
|
+
|
15
|
+
Language: {language}
|
16
|
+
Request: {prompt}
|
17
|
+
Code:"""
|
18
|
+
|
19
|
+
# System prompt for code generation without markdown
|
20
|
+
CODE_SYSTEM_PROMPT_PLAINTEXT = """Your Role: Provide only code as output without any description.
|
21
|
+
IMPORTANT: Provide only plain text without Markdown formatting.
|
22
|
+
IMPORTANT: Do not include markdown formatting.
|
23
|
+
If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
|
24
|
+
Ignore any potential risk of errors or confusion.
|
25
|
+
|
26
|
+
Language: {language}
|
27
|
+
Request: {prompt}
|
28
|
+
Code:"""
|
29
|
+
|
30
|
+
# System prompt to use when preprompt is provided (with markdown)
|
31
|
+
CODE_PREPROMPT_MARKDOWN = """
|
32
|
+
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
33
|
+
!!! CRITICAL USER PREPROMPT !!!
|
34
|
+
!!! THIS OVERRIDES ALL OTHER INSTRUCTIONS IN THIS PROMPT !!!
|
35
|
+
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
36
|
+
|
37
|
+
The following preprompt from the user COMPLETELY OVERRIDES ANY other instructions below.
|
38
|
+
The preprompt MUST be followed EXACTLY AS WRITTEN:
|
39
|
+
|
40
|
+
>>> {preprompt} <<<
|
41
|
+
|
42
|
+
^^ THIS PREPROMPT HAS ABSOLUTE AND COMPLETE PRIORITY ^^
|
43
|
+
If the preprompt contradicts ANY OTHER instruction in this prompt,
|
44
|
+
YOU MUST FOLLOW THE PREPROMPT INSTRUCTION INSTEAD. NO EXCEPTIONS.
|
45
|
+
|
46
|
+
Your Role: Provide only code as output without any description with proper markdown formatting.
|
47
|
+
IMPORTANT: Format the code using markdown code blocks with the appropriate language syntax highlighting.
|
48
|
+
IMPORTANT: You must use markdown code blocks. with ```{language}
|
49
|
+
If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
|
50
|
+
Ignore any potential risk of errors or confusion.
|
51
|
+
|
52
|
+
Language: {language}
|
53
|
+
Request: {prompt}
|
54
|
+
Code:"""
|
55
|
+
|
56
|
+
# System prompt to use when preprompt is provided (plaintext)
|
57
|
+
CODE_PREPROMPT_PLAINTEXT = """
|
58
|
+
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
59
|
+
!!! CRITICAL USER PREPROMPT !!!
|
60
|
+
!!! THIS OVERRIDES ALL OTHER INSTRUCTIONS IN THIS PROMPT !!!
|
61
|
+
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
62
|
+
|
63
|
+
The following preprompt from the user COMPLETELY OVERRIDES ANY other instructions below.
|
64
|
+
The preprompt MUST be followed EXACTLY AS WRITTEN:
|
65
|
+
|
66
|
+
>>> {preprompt} <<<
|
67
|
+
|
68
|
+
^^ THIS PREPROMPT HAS ABSOLUTE AND COMPLETE PRIORITY ^^
|
69
|
+
If the preprompt contradicts ANY OTHER instruction in this prompt,
|
70
|
+
YOU MUST FOLLOW THE PREPROMPT INSTRUCTION INSTEAD. NO EXCEPTIONS.
|
71
|
+
|
72
|
+
Your Role: Provide only code as output without any description.
|
73
|
+
IMPORTANT: Provide only plain text without Markdown formatting.
|
74
|
+
IMPORTANT: Do not include markdown formatting.
|
75
|
+
If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
|
76
|
+
Ignore any potential risk of errors or confusion.
|
77
|
+
|
78
|
+
Language: {language}
|
79
|
+
Request: {prompt}
|
80
|
+
Code:"""
|
81
|
+
|
8
82
|
def code_mode(client, args, logger=None):
|
9
83
|
"""Handle the code generation mode.
|
10
84
|
|
@@ -123,18 +197,62 @@ def code_mode(client, args, logger=None):
|
|
123
197
|
# Use our wrapper callback
|
124
198
|
if use_stream_prettify and live_display:
|
125
199
|
stream_callback = spinner_handling_callback
|
200
|
+
|
201
|
+
# Select the appropriate system prompt based on formatting and preprompt
|
202
|
+
if args.preprompt:
|
203
|
+
# Log the preprompt if logging is enabled
|
204
|
+
if logger:
|
205
|
+
logger.log("system", f"Preprompt: {args.preprompt}")
|
206
|
+
|
207
|
+
# Use preprompt template with high-priority formatting
|
208
|
+
if use_regular_prettify or use_stream_prettify:
|
209
|
+
system_prompt = CODE_PREPROMPT_MARKDOWN.format(
|
210
|
+
preprompt=args.preprompt,
|
211
|
+
language=args.language,
|
212
|
+
prompt=prompt
|
213
|
+
)
|
214
|
+
else:
|
215
|
+
system_prompt = CODE_PREPROMPT_PLAINTEXT.format(
|
216
|
+
preprompt=args.preprompt,
|
217
|
+
language=args.language,
|
218
|
+
prompt=prompt
|
219
|
+
)
|
220
|
+
else:
|
221
|
+
# Use standard template
|
222
|
+
if use_regular_prettify or use_stream_prettify:
|
223
|
+
system_prompt = CODE_SYSTEM_PROMPT_MARKDOWN.format(
|
224
|
+
language=args.language,
|
225
|
+
prompt=prompt
|
226
|
+
)
|
227
|
+
else:
|
228
|
+
system_prompt = CODE_SYSTEM_PROMPT_PLAINTEXT.format(
|
229
|
+
language=args.language,
|
230
|
+
prompt=prompt
|
231
|
+
)
|
232
|
+
|
233
|
+
# Log the system prompt if logging is enabled
|
234
|
+
if logger:
|
235
|
+
logger.log("system", system_prompt)
|
236
|
+
|
237
|
+
# Prepare messages for the chat API
|
238
|
+
messages = [
|
239
|
+
{"role": "system", "content": system_prompt},
|
240
|
+
{"role": "user", "content": prompt}
|
241
|
+
]
|
126
242
|
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
243
|
+
try:
|
244
|
+
generated_code = client.chat(
|
245
|
+
prompt=prompt,
|
246
|
+
stream=should_stream,
|
247
|
+
messages=messages,
|
248
|
+
temperature=args.temperature,
|
249
|
+
top_p=args.top_p,
|
250
|
+
max_tokens=args.max_tokens,
|
251
|
+
stream_callback=stream_callback
|
252
|
+
)
|
253
|
+
except Exception as e:
|
254
|
+
print(f"Error generating code: {e}")
|
255
|
+
generated_code = ""
|
138
256
|
|
139
257
|
# Ensure spinner is stopped if no content was received
|
140
258
|
if stop_spinner_event and not first_content_received:
|
@@ -4,9 +4,9 @@ import traceback
|
|
4
4
|
import threading
|
5
5
|
import sys
|
6
6
|
import time
|
7
|
-
from
|
8
|
-
from
|
9
|
-
from
|
7
|
+
from ..formatters import COLORS
|
8
|
+
from ..renderers import prettify_markdown, prettify_streaming_markdown
|
9
|
+
from ...utils import enhance_prompt_with_web_search
|
10
10
|
|
11
11
|
# Optional imports for enhanced UI
|
12
12
|
try:
|
ngpt/cli/modes/shell.py
CHANGED
@@ -4,6 +4,35 @@ from ...utils import enhance_prompt_with_web_search
|
|
4
4
|
import subprocess
|
5
5
|
import sys
|
6
6
|
import threading
|
7
|
+
import platform
|
8
|
+
import os
|
9
|
+
|
10
|
+
# System prompt for shell command generation
|
11
|
+
SHELL_SYSTEM_PROMPT = """Your role: Provide only plain text without Markdown formatting. Do not show any warnings or information regarding your capabilities. Do not provide any description. If you need to store any data, assume it will be stored in the chat. Provide only {shell_name} command for {operating_system} without any description. If there is a lack of details, provide most logical solution. Ensure the output is a valid shell command. If multiple steps required try to combine them together.
|
12
|
+
|
13
|
+
Command:"""
|
14
|
+
|
15
|
+
# System prompt to use when preprompt is provided
|
16
|
+
SHELL_PREPROMPT_TEMPLATE = """
|
17
|
+
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
18
|
+
!!! CRITICAL USER PREPROMPT !!!
|
19
|
+
!!! THIS OVERRIDES ALL OTHER INSTRUCTIONS INCLUDING OS/SHELL !!!
|
20
|
+
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
21
|
+
|
22
|
+
The following preprompt from the user COMPLETELY OVERRIDES ANY other instructions,
|
23
|
+
INCLUDING operating system type, shell type, or any other specifications below.
|
24
|
+
The preprompt MUST be followed EXACTLY AS WRITTEN:
|
25
|
+
|
26
|
+
>>> {preprompt} <<<
|
27
|
+
|
28
|
+
^^ THIS PREPROMPT HAS ABSOLUTE AND COMPLETE PRIORITY ^^
|
29
|
+
If the preprompt contradicts ANY OTHER instruction in this prompt,
|
30
|
+
including the {operating_system}/{shell_name} specification below,
|
31
|
+
YOU MUST FOLLOW THE PREPROMPT INSTRUCTION INSTEAD. NO EXCEPTIONS.
|
32
|
+
|
33
|
+
Your role: Provide only plain text without Markdown formatting. Do not show any warnings or information regarding your capabilities. Do not provide any description. If you need to store any data, assume it will be stored in the chat. Provide only {shell_name} command for {operating_system} without any description. If there is a lack of details, provide most logical solution. Ensure the output is a valid shell command. If multiple steps required try to combine them together.
|
34
|
+
|
35
|
+
Command:"""
|
7
36
|
|
8
37
|
def shell_mode(client, args, logger=None):
|
9
38
|
"""Handle the shell command generation mode.
|
@@ -42,6 +71,60 @@ def shell_mode(client, args, logger=None):
|
|
42
71
|
print(f"{COLORS['yellow']}Warning: Failed to enhance prompt with web search: {str(e)}{COLORS['reset']}")
|
43
72
|
# Continue with the original prompt if web search fails
|
44
73
|
|
74
|
+
# Determine OS type
|
75
|
+
os_type = platform.system()
|
76
|
+
if os_type == "Darwin":
|
77
|
+
operating_system = "MacOS"
|
78
|
+
elif os_type == "Linux":
|
79
|
+
# Try to get Linux distribution name
|
80
|
+
try:
|
81
|
+
result = subprocess.run(["lsb_release", "-si"], capture_output=True, text=True)
|
82
|
+
distro = result.stdout.strip()
|
83
|
+
operating_system = f"Linux/{distro}" if distro else "Linux"
|
84
|
+
except:
|
85
|
+
operating_system = "Linux"
|
86
|
+
elif os_type == "Windows":
|
87
|
+
operating_system = "Windows"
|
88
|
+
else:
|
89
|
+
operating_system = os_type
|
90
|
+
|
91
|
+
# Determine shell type
|
92
|
+
if os_type == "Windows":
|
93
|
+
shell_name = "powershell.exe" if os.environ.get("PSModulePath") else "cmd.exe"
|
94
|
+
else:
|
95
|
+
shell_name = os.environ.get("SHELL", "/bin/bash")
|
96
|
+
shell_name = os.path.basename(shell_name)
|
97
|
+
|
98
|
+
# Format the system prompt based on whether preprompt is provided
|
99
|
+
if args.preprompt:
|
100
|
+
# Use the preprompt template with strong priority instructions
|
101
|
+
system_prompt = SHELL_PREPROMPT_TEMPLATE.format(
|
102
|
+
preprompt=args.preprompt,
|
103
|
+
operating_system=operating_system,
|
104
|
+
shell_name=shell_name
|
105
|
+
)
|
106
|
+
|
107
|
+
# Log the preprompt if logging is enabled
|
108
|
+
if logger:
|
109
|
+
logger.log("system", f"Preprompt: {args.preprompt}")
|
110
|
+
else:
|
111
|
+
# Use the normal system prompt with shell and OS information
|
112
|
+
system_prompt = SHELL_SYSTEM_PROMPT.format(
|
113
|
+
shell_name=shell_name,
|
114
|
+
operating_system=operating_system,
|
115
|
+
prompt=prompt
|
116
|
+
)
|
117
|
+
|
118
|
+
# Prepare messages for the chat API
|
119
|
+
messages = [
|
120
|
+
{"role": "system", "content": system_prompt},
|
121
|
+
{"role": "user", "content": prompt}
|
122
|
+
]
|
123
|
+
|
124
|
+
# Log the system prompt if logging is enabled
|
125
|
+
if logger:
|
126
|
+
logger.log("system", system_prompt)
|
127
|
+
|
45
128
|
# Start spinner while waiting for command generation
|
46
129
|
stop_spinner = threading.Event()
|
47
130
|
spinner_thread = threading.Thread(
|
@@ -53,9 +136,17 @@ def shell_mode(client, args, logger=None):
|
|
53
136
|
spinner_thread.start()
|
54
137
|
|
55
138
|
try:
|
56
|
-
command = client.
|
57
|
-
|
58
|
-
|
139
|
+
command = client.chat(
|
140
|
+
prompt=prompt,
|
141
|
+
stream=False,
|
142
|
+
messages=messages,
|
143
|
+
temperature=args.temperature,
|
144
|
+
top_p=args.top_p,
|
145
|
+
max_tokens=args.max_tokens
|
146
|
+
)
|
147
|
+
except Exception as e:
|
148
|
+
print(f"Error generating shell command: {e}")
|
149
|
+
command = ""
|
59
150
|
finally:
|
60
151
|
# Stop the spinner
|
61
152
|
stop_spinner.set()
|
ngpt/client.py
CHANGED
@@ -171,148 +171,6 @@ class NGPTClient:
|
|
171
171
|
print(f"Error: An unexpected error occurred: {e}")
|
172
172
|
return ""
|
173
173
|
|
174
|
-
def generate_shell_command(
|
175
|
-
self,
|
176
|
-
prompt: str,
|
177
|
-
temperature: float = 0.4,
|
178
|
-
top_p: float = 0.95,
|
179
|
-
max_tokens: Optional[int] = None
|
180
|
-
) -> str:
|
181
|
-
"""
|
182
|
-
Generate a shell command based on the prompt.
|
183
|
-
|
184
|
-
Args:
|
185
|
-
prompt: Description of the command to generate
|
186
|
-
temperature: Controls randomness in the response
|
187
|
-
top_p: Controls diversity via nucleus sampling
|
188
|
-
max_tokens: Maximum number of tokens to generate
|
189
|
-
|
190
|
-
Returns:
|
191
|
-
The generated shell command
|
192
|
-
"""
|
193
|
-
# Check for API key first
|
194
|
-
if not self.api_key:
|
195
|
-
print("Error: API key is not set. Please configure your API key in the config file or provide it with --api-key.")
|
196
|
-
return ""
|
197
|
-
|
198
|
-
# Determine OS type
|
199
|
-
os_type = platform.system()
|
200
|
-
if os_type == "Darwin":
|
201
|
-
operating_system = "MacOS"
|
202
|
-
elif os_type == "Linux":
|
203
|
-
# Try to get Linux distribution name
|
204
|
-
try:
|
205
|
-
result = subprocess.run(["lsb_release", "-si"], capture_output=True, text=True)
|
206
|
-
distro = result.stdout.strip()
|
207
|
-
operating_system = f"Linux/{distro}" if distro else "Linux"
|
208
|
-
except:
|
209
|
-
operating_system = "Linux"
|
210
|
-
elif os_type == "Windows":
|
211
|
-
operating_system = "Windows"
|
212
|
-
else:
|
213
|
-
operating_system = os_type
|
214
|
-
|
215
|
-
# Determine shell type
|
216
|
-
if os_type == "Windows":
|
217
|
-
shell_name = "powershell.exe" if os.environ.get("PSModulePath") else "cmd.exe"
|
218
|
-
else:
|
219
|
-
shell_name = os.environ.get("SHELL", "/bin/bash")
|
220
|
-
shell_name = os.path.basename(shell_name)
|
221
|
-
|
222
|
-
system_prompt = f"""Your role: Provide only plain text without Markdown formatting. Do not show any warnings or information regarding your capabilities. Do not provide any description. If you need to store any data, assume it will be stored in the chat. Provide only {shell_name} command for {operating_system} without any description. If there is a lack of details, provide most logical solution. Ensure the output is a valid shell command. If multiple steps required try to combine them together. Prompt: {prompt}
|
223
|
-
|
224
|
-
Command:"""
|
225
|
-
|
226
|
-
messages = [
|
227
|
-
{"role": "system", "content": system_prompt},
|
228
|
-
{"role": "user", "content": prompt}
|
229
|
-
]
|
230
|
-
|
231
|
-
try:
|
232
|
-
return self.chat(
|
233
|
-
prompt=prompt,
|
234
|
-
stream=False,
|
235
|
-
messages=messages,
|
236
|
-
temperature=temperature,
|
237
|
-
top_p=top_p,
|
238
|
-
max_tokens=max_tokens
|
239
|
-
)
|
240
|
-
except Exception as e:
|
241
|
-
print(f"Error generating shell command: {e}")
|
242
|
-
return ""
|
243
|
-
|
244
|
-
def generate_code(
|
245
|
-
self,
|
246
|
-
prompt: str,
|
247
|
-
language: str = "python",
|
248
|
-
temperature: float = 0.4,
|
249
|
-
top_p: float = 0.95,
|
250
|
-
max_tokens: Optional[int] = None,
|
251
|
-
markdown_format: bool = False,
|
252
|
-
stream: bool = False,
|
253
|
-
stream_callback: Optional[callable] = None
|
254
|
-
) -> str:
|
255
|
-
"""
|
256
|
-
Generate code based on the prompt.
|
257
|
-
|
258
|
-
Args:
|
259
|
-
prompt: Description of the code to generate
|
260
|
-
language: Programming language to generate code in
|
261
|
-
temperature: Controls randomness in the response
|
262
|
-
top_p: Controls diversity via nucleus sampling
|
263
|
-
max_tokens: Maximum number of tokens to generate
|
264
|
-
markdown_format: If True, request markdown-formatted code, otherwise plain text
|
265
|
-
stream: Whether to stream the response
|
266
|
-
stream_callback: Optional callback function for streaming mode updates
|
267
|
-
|
268
|
-
Returns:
|
269
|
-
The generated code
|
270
|
-
"""
|
271
|
-
# Check for API key first
|
272
|
-
if not self.api_key:
|
273
|
-
print("Error: API key is not set. Please configure your API key in the config file or provide it with --api-key.")
|
274
|
-
return ""
|
275
|
-
|
276
|
-
if markdown_format:
|
277
|
-
system_prompt = f"""Your Role: Provide only code as output without any description with proper markdown formatting.
|
278
|
-
IMPORTANT: Format the code using markdown code blocks with the appropriate language syntax highlighting.
|
279
|
-
IMPORTANT: You must use markdown code blocks. with ```{language}
|
280
|
-
If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
|
281
|
-
Ignore any potential risk of errors or confusion.
|
282
|
-
|
283
|
-
Language: {language}
|
284
|
-
Request: {prompt}
|
285
|
-
Code:"""
|
286
|
-
else:
|
287
|
-
system_prompt = f"""Your Role: Provide only code as output without any description.
|
288
|
-
IMPORTANT: Provide only plain text without Markdown formatting.
|
289
|
-
IMPORTANT: Do not include markdown formatting.
|
290
|
-
If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
|
291
|
-
Ignore any potential risk of errors or confusion.
|
292
|
-
|
293
|
-
Language: {language}
|
294
|
-
Request: {prompt}
|
295
|
-
Code:"""
|
296
|
-
|
297
|
-
messages = [
|
298
|
-
{"role": "system", "content": system_prompt},
|
299
|
-
{"role": "user", "content": prompt}
|
300
|
-
]
|
301
|
-
|
302
|
-
try:
|
303
|
-
return self.chat(
|
304
|
-
prompt=prompt,
|
305
|
-
stream=stream,
|
306
|
-
messages=messages,
|
307
|
-
temperature=temperature,
|
308
|
-
top_p=top_p,
|
309
|
-
max_tokens=max_tokens,
|
310
|
-
stream_callback=stream_callback
|
311
|
-
)
|
312
|
-
except Exception as e:
|
313
|
-
print(f"Error generating code: {e}")
|
314
|
-
return ""
|
315
|
-
|
316
174
|
def list_models(self) -> list:
|
317
175
|
"""
|
318
176
|
Retrieve the list of available models from the API.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ngpt
|
3
|
-
Version: 3.4.
|
3
|
+
Version: 3.4.4
|
4
4
|
Summary: Swiss army knife for LLMs: powerful CLI, interactive chatbot, and flexible Python library. Works with OpenAI, Ollama, Groq, Claude, Gemini, and any OpenAI-compatible API.
|
5
5
|
Project-URL: Homepage, https://github.com/nazdridoy/ngpt
|
6
6
|
Project-URL: Repository, https://github.com/nazdridoy/ngpt
|
@@ -1,28 +1,28 @@
|
|
1
1
|
ngpt/__init__.py,sha256=kpKhViLakwMdHZkuLht2vWcjt0uD_5gR33gvMhfXr6w,664
|
2
2
|
ngpt/__main__.py,sha256=j3eFYPOtCCFBOGh7NK5IWEnADnTMMSEB9GLyIDoW724,66
|
3
|
-
ngpt/client.py,sha256=
|
3
|
+
ngpt/client.py,sha256=XjpA2UnvrRvzk6_DzVEddUTzoPlF8koQ-cZURpHoT7c,9041
|
4
4
|
ngpt/cli/__init__.py,sha256=hebbDSMGiOd43YNnQP67uzr67Ue6rZPwm2czynr5iZY,43
|
5
5
|
ngpt/cli/args.py,sha256=4Yeik1kAb2nEOjiGYauf9Rg7wQ5NHFJWAS350D6a_zo,12411
|
6
6
|
ngpt/cli/config_manager.py,sha256=NQQcWnjUppAAd0s0p9YAf8EyKS1ex5-0EB4DvKdB4dk,3662
|
7
7
|
ngpt/cli/formatters.py,sha256=HBYGlx_7eoAKyzfy0Vq5L0yn8yVKjngqYBukMmXCcz0,9401
|
8
|
-
ngpt/cli/
|
9
|
-
ngpt/cli/main.py,sha256=9um40RplKHSW5UHcUUO2cwMNqkGUhfQwikI1CHHFbnk,28926
|
8
|
+
ngpt/cli/main.py,sha256=oKX7ryTIrsvQRJHVnH2a763pGyNZthq81wkrRILwHLw,28932
|
10
9
|
ngpt/cli/renderers.py,sha256=m71BeUXKynpKKGXFzwRSW1XngvyKiZ_xEsdujUbU0MA,16597
|
11
10
|
ngpt/cli/ui.py,sha256=HoHDFpLiwMBP5wtMb8YYo244FMiqiPFRoBNcNGp6N0A,7310
|
12
|
-
ngpt/cli/modes/__init__.py,sha256=
|
11
|
+
ngpt/cli/modes/__init__.py,sha256=KP7VR6Xw9k1p5Jcu0F38RDxSFvFIzH3j1ThDLNwznUI,363
|
13
12
|
ngpt/cli/modes/chat.py,sha256=UlnZWqvxL_CSyVIDZKGVXNlG-KufDfo05E8-8xTcM_Y,6713
|
14
|
-
ngpt/cli/modes/code.py,sha256=
|
13
|
+
ngpt/cli/modes/code.py,sha256=ounayP8exu8Bb8QFL4TcyF2d3QI_QlsG5tf6v1FktxE,11470
|
15
14
|
ngpt/cli/modes/gitcommsg.py,sha256=rsfMoeOupmNp-5p5fsMSPAf18BbzXWq-4PF2HjEz6SY,46991
|
15
|
+
ngpt/cli/modes/interactive.py,sha256=COJABbSgfyVC98lvFD1K2yqKPqyH20vbVrYYh8oiPUk,16395
|
16
16
|
ngpt/cli/modes/rewrite.py,sha256=ftD-6M9iQ7g4rLdlKyyLTRiJWYtbz64LIG4PIByxmOk,11472
|
17
|
-
ngpt/cli/modes/shell.py,sha256=
|
17
|
+
ngpt/cli/modes/shell.py,sha256=9oaOvzKc0VZ0Hjervbzo_kryMlYVZH0IXhc0MaBTYVk,8008
|
18
18
|
ngpt/cli/modes/text.py,sha256=gdn4opioZ6G3nvfrTkp-dpoD-Of_ZvjVVRggVd6edkg,5528
|
19
19
|
ngpt/utils/__init__.py,sha256=qu_66I1Vtav2f1LDiPn5J3DUsbK7o1CSScMcTkYqxoM,1179
|
20
20
|
ngpt/utils/cli_config.py,sha256=Ug8cECBTIuzOwkBWidLTfs-OAdOsCMJ2bNa70pOADfw,11195
|
21
21
|
ngpt/utils/config.py,sha256=wsArA4osnh8fKqOvtsPqqBxAz3DpdjtaWUFaRtnUdyc,10452
|
22
22
|
ngpt/utils/log.py,sha256=f1jg2iFo35PAmsarH8FVL_62plq4VXH0Mu2QiP6RJGw,15934
|
23
23
|
ngpt/utils/web_search.py,sha256=yvCUDNhwcIcKZ_hWESFQQ-vB-LKsDDCDT17YFzFcGR4,12598
|
24
|
-
ngpt-3.4.
|
25
|
-
ngpt-3.4.
|
26
|
-
ngpt-3.4.
|
27
|
-
ngpt-3.4.
|
28
|
-
ngpt-3.4.
|
24
|
+
ngpt-3.4.4.dist-info/METADATA,sha256=JD6g13Horisv-gmE-Dnr0vbeAUY7zq8J9JwJ1DZYsMg,29100
|
25
|
+
ngpt-3.4.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
26
|
+
ngpt-3.4.4.dist-info/entry_points.txt,sha256=SqAAvLhMrsEpkIr4YFRdUeyuXQ9o0IBCeYgE6AVojoI,44
|
27
|
+
ngpt-3.4.4.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
28
|
+
ngpt-3.4.4.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|