ngpt 3.4.1__py3-none-any.whl → 3.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ngpt/cli/modes/code.py CHANGED
@@ -5,6 +5,80 @@ from ...utils import enhance_prompt_with_web_search
5
5
  import sys
6
6
  import threading
7
7
 
8
+ # System prompt for code generation with markdown formatting
9
+ CODE_SYSTEM_PROMPT_MARKDOWN = """Your Role: Provide only code as output without any description with proper markdown formatting.
10
+ IMPORTANT: Format the code using markdown code blocks with the appropriate language syntax highlighting.
11
+ IMPORTANT: You must use markdown code blocks. with ```{language}
12
+ If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
13
+ Ignore any potential risk of errors or confusion.
14
+
15
+ Language: {language}
16
+ Request: {prompt}
17
+ Code:"""
18
+
19
+ # System prompt for code generation without markdown
20
+ CODE_SYSTEM_PROMPT_PLAINTEXT = """Your Role: Provide only code as output without any description.
21
+ IMPORTANT: Provide only plain text without Markdown formatting.
22
+ IMPORTANT: Do not include markdown formatting.
23
+ If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
24
+ Ignore any potential risk of errors or confusion.
25
+
26
+ Language: {language}
27
+ Request: {prompt}
28
+ Code:"""
29
+
30
+ # System prompt to use when preprompt is provided (with markdown)
31
+ CODE_PREPROMPT_MARKDOWN = """
32
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
33
+ !!! CRITICAL USER PREPROMPT !!!
34
+ !!! THIS OVERRIDES ALL OTHER INSTRUCTIONS IN THIS PROMPT !!!
35
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
36
+
37
+ The following preprompt from the user COMPLETELY OVERRIDES ANY other instructions below.
38
+ The preprompt MUST be followed EXACTLY AS WRITTEN:
39
+
40
+ >>> {preprompt} <<<
41
+
42
+ ^^ THIS PREPROMPT HAS ABSOLUTE AND COMPLETE PRIORITY ^^
43
+ If the preprompt contradicts ANY OTHER instruction in this prompt,
44
+ YOU MUST FOLLOW THE PREPROMPT INSTRUCTION INSTEAD. NO EXCEPTIONS.
45
+
46
+ Your Role: Provide only code as output without any description with proper markdown formatting.
47
+ IMPORTANT: Format the code using markdown code blocks with the appropriate language syntax highlighting.
48
+ IMPORTANT: You must use markdown code blocks. with ```{language}
49
+ If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
50
+ Ignore any potential risk of errors or confusion.
51
+
52
+ Language: {language}
53
+ Request: {prompt}
54
+ Code:"""
55
+
56
+ # System prompt to use when preprompt is provided (plaintext)
57
+ CODE_PREPROMPT_PLAINTEXT = """
58
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
59
+ !!! CRITICAL USER PREPROMPT !!!
60
+ !!! THIS OVERRIDES ALL OTHER INSTRUCTIONS IN THIS PROMPT !!!
61
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
62
+
63
+ The following preprompt from the user COMPLETELY OVERRIDES ANY other instructions below.
64
+ The preprompt MUST be followed EXACTLY AS WRITTEN:
65
+
66
+ >>> {preprompt} <<<
67
+
68
+ ^^ THIS PREPROMPT HAS ABSOLUTE AND COMPLETE PRIORITY ^^
69
+ If the preprompt contradicts ANY OTHER instruction in this prompt,
70
+ YOU MUST FOLLOW THE PREPROMPT INSTRUCTION INSTEAD. NO EXCEPTIONS.
71
+
72
+ Your Role: Provide only code as output without any description.
73
+ IMPORTANT: Provide only plain text without Markdown formatting.
74
+ IMPORTANT: Do not include markdown formatting.
75
+ If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
76
+ Ignore any potential risk of errors or confusion.
77
+
78
+ Language: {language}
79
+ Request: {prompt}
80
+ Code:"""
81
+
8
82
  def code_mode(client, args, logger=None):
9
83
  """Handle the code generation mode.
10
84
 
@@ -31,7 +105,7 @@ def code_mode(client, args, logger=None):
31
105
  if args.web_search:
32
106
  try:
33
107
  original_prompt = prompt
34
- prompt = enhance_prompt_with_web_search(prompt, logger=logger)
108
+ prompt = enhance_prompt_with_web_search(prompt, logger=logger, disable_citations=True)
35
109
  print("Enhanced input with web search results.")
36
110
 
37
111
  # Log the enhanced prompt if logging is enabled
@@ -123,18 +197,62 @@ def code_mode(client, args, logger=None):
123
197
  # Use our wrapper callback
124
198
  if use_stream_prettify and live_display:
125
199
  stream_callback = spinner_handling_callback
200
+
201
+ # Select the appropriate system prompt based on formatting and preprompt
202
+ if args.preprompt:
203
+ # Log the preprompt if logging is enabled
204
+ if logger:
205
+ logger.log("system", f"Preprompt: {args.preprompt}")
206
+
207
+ # Use preprompt template with high-priority formatting
208
+ if use_regular_prettify or use_stream_prettify:
209
+ system_prompt = CODE_PREPROMPT_MARKDOWN.format(
210
+ preprompt=args.preprompt,
211
+ language=args.language,
212
+ prompt=prompt
213
+ )
214
+ else:
215
+ system_prompt = CODE_PREPROMPT_PLAINTEXT.format(
216
+ preprompt=args.preprompt,
217
+ language=args.language,
218
+ prompt=prompt
219
+ )
220
+ else:
221
+ # Use standard template
222
+ if use_regular_prettify or use_stream_prettify:
223
+ system_prompt = CODE_SYSTEM_PROMPT_MARKDOWN.format(
224
+ language=args.language,
225
+ prompt=prompt
226
+ )
227
+ else:
228
+ system_prompt = CODE_SYSTEM_PROMPT_PLAINTEXT.format(
229
+ language=args.language,
230
+ prompt=prompt
231
+ )
232
+
233
+ # Log the system prompt if logging is enabled
234
+ if logger:
235
+ logger.log("system", system_prompt)
236
+
237
+ # Prepare messages for the chat API
238
+ messages = [
239
+ {"role": "system", "content": system_prompt},
240
+ {"role": "user", "content": prompt}
241
+ ]
126
242
 
127
- generated_code = client.generate_code(
128
- prompt=prompt,
129
- language=args.language,
130
- temperature=args.temperature,
131
- top_p=args.top_p,
132
- max_tokens=args.max_tokens,
133
- # Request markdown from API if any prettify option is active
134
- markdown_format=use_regular_prettify or use_stream_prettify,
135
- stream=should_stream,
136
- stream_callback=stream_callback
137
- )
243
+ try:
244
+ generated_code = client.chat(
245
+ prompt=prompt,
246
+ stream=should_stream,
247
+ messages=messages,
248
+ temperature=args.temperature,
249
+ top_p=args.top_p,
250
+ max_tokens=args.max_tokens,
251
+ stream_callback=stream_callback
252
+ )
253
+ except Exception as e:
254
+ print(f"Error generating code: {e}")
255
+ generated_code = ""
138
256
 
139
257
  # Ensure spinner is stopped if no content was received
140
258
  if stop_spinner_event and not first_content_received:
ngpt/cli/modes/shell.py CHANGED
@@ -4,6 +4,35 @@ from ...utils import enhance_prompt_with_web_search
4
4
  import subprocess
5
5
  import sys
6
6
  import threading
7
+ import platform
8
+ import os
9
+
10
+ # System prompt for shell command generation
11
+ SHELL_SYSTEM_PROMPT = """Your role: Provide only plain text without Markdown formatting. Do not show any warnings or information regarding your capabilities. Do not provide any description. If you need to store any data, assume it will be stored in the chat. Provide only {shell_name} command for {operating_system} without any description. If there is a lack of details, provide most logical solution. Ensure the output is a valid shell command. If multiple steps required try to combine them together.
12
+
13
+ Command:"""
14
+
15
+ # System prompt to use when preprompt is provided
16
+ SHELL_PREPROMPT_TEMPLATE = """
17
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
18
+ !!! CRITICAL USER PREPROMPT !!!
19
+ !!! THIS OVERRIDES ALL OTHER INSTRUCTIONS INCLUDING OS/SHELL !!!
20
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
21
+
22
+ The following preprompt from the user COMPLETELY OVERRIDES ANY other instructions,
23
+ INCLUDING operating system type, shell type, or any other specifications below.
24
+ The preprompt MUST be followed EXACTLY AS WRITTEN:
25
+
26
+ >>> {preprompt} <<<
27
+
28
+ ^^ THIS PREPROMPT HAS ABSOLUTE AND COMPLETE PRIORITY ^^
29
+ If the preprompt contradicts ANY OTHER instruction in this prompt,
30
+ including the {operating_system}/{shell_name} specification below,
31
+ YOU MUST FOLLOW THE PREPROMPT INSTRUCTION INSTEAD. NO EXCEPTIONS.
32
+
33
+ Your role: Provide only plain text without Markdown formatting. Do not show any warnings or information regarding your capabilities. Do not provide any description. If you need to store any data, assume it will be stored in the chat. Provide only {shell_name} command for {operating_system} without any description. If there is a lack of details, provide most logical solution. Ensure the output is a valid shell command. If multiple steps required try to combine them together.
34
+
35
+ Command:"""
7
36
 
8
37
  def shell_mode(client, args, logger=None):
9
38
  """Handle the shell command generation mode.
@@ -31,7 +60,7 @@ def shell_mode(client, args, logger=None):
31
60
  if args.web_search:
32
61
  try:
33
62
  original_prompt = prompt
34
- prompt = enhance_prompt_with_web_search(prompt, logger=logger)
63
+ prompt = enhance_prompt_with_web_search(prompt, logger=logger, disable_citations=True)
35
64
  print("Enhanced input with web search results.")
36
65
 
37
66
  # Log the enhanced prompt if logging is enabled
@@ -42,6 +71,60 @@ def shell_mode(client, args, logger=None):
42
71
  print(f"{COLORS['yellow']}Warning: Failed to enhance prompt with web search: {str(e)}{COLORS['reset']}")
43
72
  # Continue with the original prompt if web search fails
44
73
 
74
+ # Determine OS type
75
+ os_type = platform.system()
76
+ if os_type == "Darwin":
77
+ operating_system = "MacOS"
78
+ elif os_type == "Linux":
79
+ # Try to get Linux distribution name
80
+ try:
81
+ result = subprocess.run(["lsb_release", "-si"], capture_output=True, text=True)
82
+ distro = result.stdout.strip()
83
+ operating_system = f"Linux/{distro}" if distro else "Linux"
84
+ except:
85
+ operating_system = "Linux"
86
+ elif os_type == "Windows":
87
+ operating_system = "Windows"
88
+ else:
89
+ operating_system = os_type
90
+
91
+ # Determine shell type
92
+ if os_type == "Windows":
93
+ shell_name = "powershell.exe" if os.environ.get("PSModulePath") else "cmd.exe"
94
+ else:
95
+ shell_name = os.environ.get("SHELL", "/bin/bash")
96
+ shell_name = os.path.basename(shell_name)
97
+
98
+ # Format the system prompt based on whether preprompt is provided
99
+ if args.preprompt:
100
+ # Use the preprompt template with strong priority instructions
101
+ system_prompt = SHELL_PREPROMPT_TEMPLATE.format(
102
+ preprompt=args.preprompt,
103
+ operating_system=operating_system,
104
+ shell_name=shell_name
105
+ )
106
+
107
+ # Log the preprompt if logging is enabled
108
+ if logger:
109
+ logger.log("system", f"Preprompt: {args.preprompt}")
110
+ else:
111
+ # Use the normal system prompt with shell and OS information
112
+ system_prompt = SHELL_SYSTEM_PROMPT.format(
113
+ shell_name=shell_name,
114
+ operating_system=operating_system,
115
+ prompt=prompt
116
+ )
117
+
118
+ # Prepare messages for the chat API
119
+ messages = [
120
+ {"role": "system", "content": system_prompt},
121
+ {"role": "user", "content": prompt}
122
+ ]
123
+
124
+ # Log the system prompt if logging is enabled
125
+ if logger:
126
+ logger.log("system", system_prompt)
127
+
45
128
  # Start spinner while waiting for command generation
46
129
  stop_spinner = threading.Event()
47
130
  spinner_thread = threading.Thread(
@@ -53,9 +136,17 @@ def shell_mode(client, args, logger=None):
53
136
  spinner_thread.start()
54
137
 
55
138
  try:
56
- command = client.generate_shell_command(prompt,
57
- temperature=args.temperature, top_p=args.top_p,
58
- max_tokens=args.max_tokens)
139
+ command = client.chat(
140
+ prompt=prompt,
141
+ stream=False,
142
+ messages=messages,
143
+ temperature=args.temperature,
144
+ top_p=args.top_p,
145
+ max_tokens=args.max_tokens
146
+ )
147
+ except Exception as e:
148
+ print(f"Error generating shell command: {e}")
149
+ command = ""
59
150
  finally:
60
151
  # Stop the spinner
61
152
  stop_spinner.set()
ngpt/client.py CHANGED
@@ -171,148 +171,6 @@ class NGPTClient:
171
171
  print(f"Error: An unexpected error occurred: {e}")
172
172
  return ""
173
173
 
174
- def generate_shell_command(
175
- self,
176
- prompt: str,
177
- temperature: float = 0.4,
178
- top_p: float = 0.95,
179
- max_tokens: Optional[int] = None
180
- ) -> str:
181
- """
182
- Generate a shell command based on the prompt.
183
-
184
- Args:
185
- prompt: Description of the command to generate
186
- temperature: Controls randomness in the response
187
- top_p: Controls diversity via nucleus sampling
188
- max_tokens: Maximum number of tokens to generate
189
-
190
- Returns:
191
- The generated shell command
192
- """
193
- # Check for API key first
194
- if not self.api_key:
195
- print("Error: API key is not set. Please configure your API key in the config file or provide it with --api-key.")
196
- return ""
197
-
198
- # Determine OS type
199
- os_type = platform.system()
200
- if os_type == "Darwin":
201
- operating_system = "MacOS"
202
- elif os_type == "Linux":
203
- # Try to get Linux distribution name
204
- try:
205
- result = subprocess.run(["lsb_release", "-si"], capture_output=True, text=True)
206
- distro = result.stdout.strip()
207
- operating_system = f"Linux/{distro}" if distro else "Linux"
208
- except:
209
- operating_system = "Linux"
210
- elif os_type == "Windows":
211
- operating_system = "Windows"
212
- else:
213
- operating_system = os_type
214
-
215
- # Determine shell type
216
- if os_type == "Windows":
217
- shell_name = "powershell.exe" if os.environ.get("PSModulePath") else "cmd.exe"
218
- else:
219
- shell_name = os.environ.get("SHELL", "/bin/bash")
220
- shell_name = os.path.basename(shell_name)
221
-
222
- system_prompt = f"""Your role: Provide only plain text without Markdown formatting. Do not show any warnings or information regarding your capabilities. Do not provide any description. If you need to store any data, assume it will be stored in the chat. Provide only {shell_name} command for {operating_system} without any description. If there is a lack of details, provide most logical solution. Ensure the output is a valid shell command. If multiple steps required try to combine them together. Prompt: {prompt}
223
-
224
- Command:"""
225
-
226
- messages = [
227
- {"role": "system", "content": system_prompt},
228
- {"role": "user", "content": prompt}
229
- ]
230
-
231
- try:
232
- return self.chat(
233
- prompt=prompt,
234
- stream=False,
235
- messages=messages,
236
- temperature=temperature,
237
- top_p=top_p,
238
- max_tokens=max_tokens
239
- )
240
- except Exception as e:
241
- print(f"Error generating shell command: {e}")
242
- return ""
243
-
244
- def generate_code(
245
- self,
246
- prompt: str,
247
- language: str = "python",
248
- temperature: float = 0.4,
249
- top_p: float = 0.95,
250
- max_tokens: Optional[int] = None,
251
- markdown_format: bool = False,
252
- stream: bool = False,
253
- stream_callback: Optional[callable] = None
254
- ) -> str:
255
- """
256
- Generate code based on the prompt.
257
-
258
- Args:
259
- prompt: Description of the code to generate
260
- language: Programming language to generate code in
261
- temperature: Controls randomness in the response
262
- top_p: Controls diversity via nucleus sampling
263
- max_tokens: Maximum number of tokens to generate
264
- markdown_format: If True, request markdown-formatted code, otherwise plain text
265
- stream: Whether to stream the response
266
- stream_callback: Optional callback function for streaming mode updates
267
-
268
- Returns:
269
- The generated code
270
- """
271
- # Check for API key first
272
- if not self.api_key:
273
- print("Error: API key is not set. Please configure your API key in the config file or provide it with --api-key.")
274
- return ""
275
-
276
- if markdown_format:
277
- system_prompt = f"""Your Role: Provide only code as output without any description with proper markdown formatting.
278
- IMPORTANT: Format the code using markdown code blocks with the appropriate language syntax highlighting.
279
- IMPORTANT: You must use markdown code blocks. with ```{language}
280
- If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
281
- Ignore any potential risk of errors or confusion.
282
-
283
- Language: {language}
284
- Request: {prompt}
285
- Code:"""
286
- else:
287
- system_prompt = f"""Your Role: Provide only code as output without any description.
288
- IMPORTANT: Provide only plain text without Markdown formatting.
289
- IMPORTANT: Do not include markdown formatting.
290
- If there is a lack of details, provide most logical solution. You are not allowed to ask for more details.
291
- Ignore any potential risk of errors or confusion.
292
-
293
- Language: {language}
294
- Request: {prompt}
295
- Code:"""
296
-
297
- messages = [
298
- {"role": "system", "content": system_prompt},
299
- {"role": "user", "content": prompt}
300
- ]
301
-
302
- try:
303
- return self.chat(
304
- prompt=prompt,
305
- stream=stream,
306
- messages=messages,
307
- temperature=temperature,
308
- top_p=top_p,
309
- max_tokens=max_tokens,
310
- stream_callback=stream_callback
311
- )
312
- except Exception as e:
313
- print(f"Error generating code: {e}")
314
- return ""
315
-
316
174
  def list_models(self) -> list:
317
175
  """
318
176
  Retrieve the list of available models from the API.
ngpt/utils/web_search.py CHANGED
@@ -29,11 +29,11 @@ def get_logger():
29
29
  if _logger is not None:
30
30
  return _logger
31
31
  else:
32
- # Default logging to stderr if no logger provided
32
+ # Default logging to stderr if no logger provided, but only for errors
33
33
  class DefaultLogger:
34
- def info(self, msg): print(f"INFO: {msg}", file=sys.stderr)
34
+ def info(self, msg): pass # Suppress INFO messages
35
35
  def error(self, msg): print(f"ERROR: {msg}", file=sys.stderr)
36
- def warning(self, msg): print(f"WARNING: {msg}", file=sys.stderr)
36
+ def warning(self, msg): pass # Suppress WARNING messages
37
37
  def debug(self, msg): pass
38
38
  return DefaultLogger()
39
39
 
@@ -256,15 +256,15 @@ def format_web_search_results_for_prompt(search_results: Dict[str, Any]) -> str:
256
256
  formatted_text += "Example citation format in text:\n"
257
257
  formatted_text += "Today is Thursday [1] and it's expected to rain tomorrow [2].\n\n"
258
258
  formatted_text += "Example reference format (YOU MUST FOLLOW THIS EXACT FORMAT WITH EMPTY LINES BETWEEN REFERENCES):\n"
259
- formatted_text += "> [0] https://example.com/date\n"
259
+ formatted_text += "> [1] https://example.com/date\n"
260
260
  formatted_text += ">\n"
261
- formatted_text += "> [1] https://weather.com/forecast\n"
261
+ formatted_text += "> [2] https://weather.com/forecast\n"
262
262
  formatted_text += ">\n"
263
- formatted_text += "> [2] https://www.timeanddate.com\n\n"
263
+ formatted_text += "> [3] https://www.timeanddate.com\n\n"
264
264
 
265
265
  return formatted_text
266
266
 
267
- def enhance_prompt_with_web_search(prompt: str, max_results: int = 5, logger=None) -> str:
267
+ def enhance_prompt_with_web_search(prompt: str, max_results: int = 5, logger=None, disable_citations: bool = False) -> str:
268
268
  """
269
269
  Enhance a prompt with web search results.
270
270
 
@@ -272,6 +272,7 @@ def enhance_prompt_with_web_search(prompt: str, max_results: int = 5, logger=Non
272
272
  prompt: The original user prompt
273
273
  max_results: Maximum number of search results to include
274
274
  logger: Optional logger to use
275
+ disable_citations: If True, disables citation instructions (used for code and shell modes)
275
276
 
276
277
  Returns:
277
278
  Enhanced prompt with web search results prepended
@@ -282,10 +283,28 @@ def enhance_prompt_with_web_search(prompt: str, max_results: int = 5, logger=Non
282
283
 
283
284
  logger = get_logger()
284
285
  search_results = get_web_search_results(prompt, max_results)
285
- formatted_results = format_web_search_results_for_prompt(search_results)
286
+
287
+ if disable_citations:
288
+ # Modified version without citation instructions for code/shell modes
289
+ query = search_results['query']
290
+ results = search_results['results']
291
+ timestamp = search_results['timestamp']
292
+
293
+ formatted_text = f"[Web Search Results for: {query} (searched at {timestamp})]\n\n"
294
+
295
+ for i, result in enumerate(results, 1):
296
+ formatted_text += f"RESULT {i}: {result['title']}\n"
297
+ formatted_text += f"URL: {result['url']}\n"
298
+ formatted_text += f"CONTENT:\n{result['content']}\n\n"
299
+
300
+ formatted_text += f"[End of Web Search Results]\n\n"
301
+ formatted_text += "Use the above web search information to help you, but do not include citations or references in your response.\n\n"
302
+ else:
303
+ # Standard version with citation instructions
304
+ formatted_text = format_web_search_results_for_prompt(search_results)
286
305
 
287
306
  # Combine results with original prompt
288
- enhanced_prompt = formatted_results + prompt
307
+ enhanced_prompt = formatted_text + prompt
289
308
 
290
309
  logger.info("Enhanced input with web search results")
291
310
  return enhanced_prompt
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngpt
3
- Version: 3.4.1
3
+ Version: 3.4.3
4
4
  Summary: Swiss army knife for LLMs: powerful CLI, interactive chatbot, and flexible Python library. Works with OpenAI, Ollama, Groq, Claude, Gemini, and any OpenAI-compatible API.
5
5
  Project-URL: Homepage, https://github.com/nazdridoy/ngpt
6
6
  Project-URL: Repository, https://github.com/nazdridoy/ngpt
@@ -1,6 +1,6 @@
1
1
  ngpt/__init__.py,sha256=kpKhViLakwMdHZkuLht2vWcjt0uD_5gR33gvMhfXr6w,664
2
2
  ngpt/__main__.py,sha256=j3eFYPOtCCFBOGh7NK5IWEnADnTMMSEB9GLyIDoW724,66
3
- ngpt/client.py,sha256=1kn-kVQ2ZYhOlQ5OPM9c_btVKajfk1qb52QbMyGdYtU,14645
3
+ ngpt/client.py,sha256=XjpA2UnvrRvzk6_DzVEddUTzoPlF8koQ-cZURpHoT7c,9041
4
4
  ngpt/cli/__init__.py,sha256=hebbDSMGiOd43YNnQP67uzr67Ue6rZPwm2czynr5iZY,43
5
5
  ngpt/cli/args.py,sha256=4Yeik1kAb2nEOjiGYauf9Rg7wQ5NHFJWAS350D6a_zo,12411
6
6
  ngpt/cli/config_manager.py,sha256=NQQcWnjUppAAd0s0p9YAf8EyKS1ex5-0EB4DvKdB4dk,3662
@@ -11,18 +11,18 @@ ngpt/cli/renderers.py,sha256=m71BeUXKynpKKGXFzwRSW1XngvyKiZ_xEsdujUbU0MA,16597
11
11
  ngpt/cli/ui.py,sha256=HoHDFpLiwMBP5wtMb8YYo244FMiqiPFRoBNcNGp6N0A,7310
12
12
  ngpt/cli/modes/__init__.py,sha256=R3aO662RIzWEOvr3moTrEI8Tpg0zDDyMGGh1-OxiRgM,285
13
13
  ngpt/cli/modes/chat.py,sha256=UlnZWqvxL_CSyVIDZKGVXNlG-KufDfo05E8-8xTcM_Y,6713
14
- ngpt/cli/modes/code.py,sha256=4TpIV-EkyXb8d3XJKrsLs9Wxq5P9jC82z94PI2Zck_g,6730
14
+ ngpt/cli/modes/code.py,sha256=ounayP8exu8Bb8QFL4TcyF2d3QI_QlsG5tf6v1FktxE,11470
15
15
  ngpt/cli/modes/gitcommsg.py,sha256=rsfMoeOupmNp-5p5fsMSPAf18BbzXWq-4PF2HjEz6SY,46991
16
16
  ngpt/cli/modes/rewrite.py,sha256=ftD-6M9iQ7g4rLdlKyyLTRiJWYtbz64LIG4PIByxmOk,11472
17
- ngpt/cli/modes/shell.py,sha256=fxE9LEEo4arSn5-q_6zxdnUH7RlqifWmk-_kcA76OhM,4070
17
+ ngpt/cli/modes/shell.py,sha256=9oaOvzKc0VZ0Hjervbzo_kryMlYVZH0IXhc0MaBTYVk,8008
18
18
  ngpt/cli/modes/text.py,sha256=gdn4opioZ6G3nvfrTkp-dpoD-Of_ZvjVVRggVd6edkg,5528
19
19
  ngpt/utils/__init__.py,sha256=qu_66I1Vtav2f1LDiPn5J3DUsbK7o1CSScMcTkYqxoM,1179
20
20
  ngpt/utils/cli_config.py,sha256=Ug8cECBTIuzOwkBWidLTfs-OAdOsCMJ2bNa70pOADfw,11195
21
21
  ngpt/utils/config.py,sha256=wsArA4osnh8fKqOvtsPqqBxAz3DpdjtaWUFaRtnUdyc,10452
22
22
  ngpt/utils/log.py,sha256=f1jg2iFo35PAmsarH8FVL_62plq4VXH0Mu2QiP6RJGw,15934
23
- ngpt/utils/web_search.py,sha256=xHfAymkVkx5b7rOITee2smxpH7waRUpaX8aqTf5WBeA,11599
24
- ngpt-3.4.1.dist-info/METADATA,sha256=z2jPQJa1kWQZmTsYgoEbln4TREkW3YPbwdrU0gqTx40,29100
25
- ngpt-3.4.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
26
- ngpt-3.4.1.dist-info/entry_points.txt,sha256=SqAAvLhMrsEpkIr4YFRdUeyuXQ9o0IBCeYgE6AVojoI,44
27
- ngpt-3.4.1.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
28
- ngpt-3.4.1.dist-info/RECORD,,
23
+ ngpt/utils/web_search.py,sha256=yvCUDNhwcIcKZ_hWESFQQ-vB-LKsDDCDT17YFzFcGR4,12598
24
+ ngpt-3.4.3.dist-info/METADATA,sha256=oM_uL8vOKuK4Ou4F2yHt1vBnoIFrTuJaSrGKECcMwkM,29100
25
+ ngpt-3.4.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
26
+ ngpt-3.4.3.dist-info/entry_points.txt,sha256=SqAAvLhMrsEpkIr4YFRdUeyuXQ9o0IBCeYgE6AVojoI,44
27
+ ngpt-3.4.3.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
28
+ ngpt-3.4.3.dist-info/RECORD,,
File without changes