ngpt 3.12.2__tar.gz → 4.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {ngpt-3.12.2 → ngpt-4.0.0}/PKG-INFO +22 -37
  2. {ngpt-3.12.2 → ngpt-4.0.0}/README.md +21 -36
  3. {ngpt-3.12.2 → ngpt-4.0.0}/docs/configuration.md +21 -35
  4. {ngpt-3.12.2 → ngpt-4.0.0}/docs/usage/cli_config.md +16 -0
  5. {ngpt-3.12.2 → ngpt-4.0.0}/docs/usage/cli_usage.md +55 -33
  6. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/args.py +33 -36
  7. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/main.py +1 -2
  8. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/interactive.py +1 -1
  9. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/utils/cli_config.py +0 -6
  10. {ngpt-3.12.2 → ngpt-4.0.0}/pyproject.toml +1 -1
  11. {ngpt-3.12.2 → ngpt-4.0.0}/uv.lock +1 -1
  12. {ngpt-3.12.2 → ngpt-4.0.0}/.github/banner.svg +0 -0
  13. {ngpt-3.12.2 → ngpt-4.0.0}/.github/workflows/aur-publish.yml +0 -0
  14. {ngpt-3.12.2 → ngpt-4.0.0}/.github/workflows/python-publish.yml +0 -0
  15. {ngpt-3.12.2 → ngpt-4.0.0}/.github/workflows/repo-mirror.yml +0 -0
  16. {ngpt-3.12.2 → ngpt-4.0.0}/.gitignore +0 -0
  17. {ngpt-3.12.2 → ngpt-4.0.0}/.python-version +0 -0
  18. {ngpt-3.12.2 → ngpt-4.0.0}/COMMIT_GUIDELINES.md +0 -0
  19. {ngpt-3.12.2 → ngpt-4.0.0}/CONTRIBUTING.md +0 -0
  20. {ngpt-3.12.2 → ngpt-4.0.0}/LICENSE +0 -0
  21. {ngpt-3.12.2 → ngpt-4.0.0}/PKGBUILD +0 -0
  22. {ngpt-3.12.2 → ngpt-4.0.0}/docs/CONTRIBUTING.md +0 -0
  23. {ngpt-3.12.2 → ngpt-4.0.0}/docs/LICENSE.md +0 -0
  24. {ngpt-3.12.2 → ngpt-4.0.0}/docs/_config.yml +0 -0
  25. {ngpt-3.12.2 → ngpt-4.0.0}/docs/_sass/custom/custom.scss +0 -0
  26. {ngpt-3.12.2 → ngpt-4.0.0}/docs/examples/advanced.md +0 -0
  27. {ngpt-3.12.2 → ngpt-4.0.0}/docs/examples/basic.md +0 -0
  28. {ngpt-3.12.2 → ngpt-4.0.0}/docs/examples/role_gallery.md +0 -0
  29. {ngpt-3.12.2 → ngpt-4.0.0}/docs/examples/specialized_tools.md +0 -0
  30. {ngpt-3.12.2 → ngpt-4.0.0}/docs/examples.md +0 -0
  31. {ngpt-3.12.2 → ngpt-4.0.0}/docs/index.md +0 -0
  32. {ngpt-3.12.2 → ngpt-4.0.0}/docs/installation.md +0 -0
  33. {ngpt-3.12.2 → ngpt-4.0.0}/docs/overview.md +0 -0
  34. {ngpt-3.12.2 → ngpt-4.0.0}/docs/usage/gitcommsg.md +0 -0
  35. {ngpt-3.12.2 → ngpt-4.0.0}/docs/usage/roles.md +0 -0
  36. {ngpt-3.12.2 → ngpt-4.0.0}/docs/usage/web_search.md +0 -0
  37. {ngpt-3.12.2 → ngpt-4.0.0}/docs/usage.md +0 -0
  38. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/__init__.py +0 -0
  39. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/__main__.py +0 -0
  40. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/__init__.py +0 -0
  41. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/config_manager.py +0 -0
  42. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/formatters.py +0 -0
  43. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/__init__.py +0 -0
  44. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/chat.py +0 -0
  45. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/code.py +0 -0
  46. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/gitcommsg.py +0 -0
  47. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/rewrite.py +0 -0
  48. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/shell.py +0 -0
  49. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/modes/text.py +0 -0
  50. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/renderers.py +0 -0
  51. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/roles.py +0 -0
  52. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/cli/ui.py +0 -0
  53. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/client.py +0 -0
  54. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/utils/__init__.py +0 -0
  55. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/utils/config.py +0 -0
  56. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/utils/log.py +0 -0
  57. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/utils/pipe.py +0 -0
  58. {ngpt-3.12.2 → ngpt-4.0.0}/ngpt/utils/web_search.py +0 -0
  59. {ngpt-3.12.2 → ngpt-4.0.0}/previews/icon.png +0 -0
  60. {ngpt-3.12.2 → ngpt-4.0.0}/previews/ngpt-g.png +0 -0
  61. {ngpt-3.12.2 → ngpt-4.0.0}/previews/ngpt-i.png +0 -0
  62. {ngpt-3.12.2 → ngpt-4.0.0}/previews/ngpt-s-c.png +0 -0
  63. {ngpt-3.12.2 → ngpt-4.0.0}/previews/ngpt-sh-c-a.png +0 -0
  64. {ngpt-3.12.2 → ngpt-4.0.0}/previews/ngpt-w-self.png +0 -0
  65. {ngpt-3.12.2 → ngpt-4.0.0}/previews/ngpt-w.png +0 -0
  66. {ngpt-3.12.2 → ngpt-4.0.0}/previews/social-preview.png +0 -0
  67. {ngpt-3.12.2 → ngpt-4.0.0}/wiki.md +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngpt
3
- Version: 3.12.2
3
+ Version: 4.0.0
4
4
  Summary: A Swiss army knife for LLMs: A fast, lightweight CLI and interactive chat tool that brings the power of any OpenAI-compatible LLM (OpenAI, Ollama, Groq, Claude, Gemini, etc.) straight to your terminal. rewrite texts or refine code, craft git commit messages, generate and run OS-aware shell commands.
5
5
  Project-URL: Homepage, https://github.com/nazdridoy/ngpt
6
6
  Project-URL: Repository, https://github.com/nazdridoy/ngpt
@@ -281,44 +281,21 @@ For more examples and detailed usage, visit the [CLI Usage Guide](https://nazdri
281
281
  ```console
282
282
  ❯ ngpt -h
283
283
 
284
- usage: ngpt [-h] [-v] [--config [CONFIG]] [--config-index CONFIG_INDEX] [--provider PROVIDER] [--remove] [--show-config]
285
- [--all] [--list-models] [--list-renderers] [--cli-config [COMMAND ...]] [--role-config [ACTION ...]]
286
- [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
284
+ usage: ngpt [-h] [-v] [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
287
285
  [--temperature TEMPERATURE] [--top_p TOP_P] [--max_tokens MAX_TOKENS] [--log [FILE]]
288
- [--preprompt PREPROMPT | --role ROLE] [--no-stream | --prettify | --stream-prettify]
286
+ [--preprompt PREPROMPT | --role ROLE] [--config [CONFIG]] [--config-index CONFIG_INDEX]
287
+ [--provider PROVIDER] [--remove] [--show-config] [--all] [--list-models] [--list-renderers]
288
+ [--cli-config [COMMAND ...]] [--role-config [ACTION ...]] [--no-stream | --prettify | --stream-prettify]
289
289
  [--renderer {auto,rich,glow}] [--language LANGUAGE] [--rec-chunk] [--diff [FILE]] [--chunk-size CHUNK_SIZE]
290
290
  [--analyses-chunk-size ANALYSES_CHUNK_SIZE] [--max-msg-lines MAX_MSG_LINES]
291
- [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [--multiline] [-i | -s | -c | -t | -r | -g]
292
- [prompt]
291
+ [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [-i | -s | -c | -t | -r | -g]
293
292
 
294
293
  nGPT - Interact with AI language models via OpenAI-compatible APIs
295
294
 
296
- positional arguments::
297
-
298
- [PROMPT] The prompt to send
299
-
300
- options::
295
+ Global Options::
301
296
 
302
297
  -h, --help show this help message and exit
303
298
  -v, --version Show version information and exit
304
-
305
- Configuration Options::
306
-
307
- --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
308
- configuration mode to create a new config
309
- --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
310
- --provider PROVIDER Provider name to identify the configuration to use
311
- --remove Remove the configuration at the specified index (requires --config and
312
- --config-index or --provider)
313
- --show-config Show the current configuration(s) and exit
314
- --all Show details for all configurations (requires --show-config)
315
- --list-models List all available models for the current configuration and exit
316
- --list-renderers Show available markdown renderers for use with --prettify
317
- --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
318
- --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
319
-
320
- Global Options::
321
-
322
299
  --api-key API_KEY API key for the service
323
300
  --base-url BASE_URL Base URL for the API
324
301
  --model MODEL Model to use
@@ -338,6 +315,21 @@ Global Options::
338
315
  --renderer {auto,rich,glow} Select which markdown renderer to use with --prettify or --stream-prettify
339
316
  (auto, rich, or glow)
340
317
 
318
+ Configuration Options::
319
+
320
+ --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
321
+ configuration mode to create a new config
322
+ --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
323
+ --provider PROVIDER Provider name to identify the configuration to use
324
+ --remove Remove the configuration at the specified index (requires --config and
325
+ --config-index or --provider)
326
+ --show-config Show the current configuration(s) and exit
327
+ --all Show details for all configurations (requires --show-config)
328
+ --list-models List all available models for the current configuration and exit
329
+ --list-renderers Show available markdown renderers for use with --prettify
330
+ --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
331
+ --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
332
+
341
333
  Output Display Options (mutually exclusive)::
342
334
 
343
335
  --no-stream Return the whole response without streaming or formatting
@@ -365,10 +357,6 @@ Rewrite Mode Options::
365
357
  --humanize Transform AI-generated text into human-like content that passes AI detection
366
358
  tools
367
359
 
368
- Interactive Mode Options::
369
-
370
- --multiline Enable multiline text input with the "ml" command in interactive mode
371
-
372
360
  Modes (mutually exclusive)::
373
361
 
374
362
  -i, --interactive Start an interactive chat session
@@ -475,9 +463,6 @@ CLI Configuration Help:
475
463
  Code mode options (-c/--code):
476
464
  language - Type: str (default: python)
477
465
 
478
- Interactive mode options (-i/--interactive):
479
- interactive-multiline - Type: bool (default: False)
480
-
481
466
  Git commit message options (-g/--gitcommsg):
482
467
  analyses-chunk-size - Type: int (default: 200)
483
468
  chunk-size - Type: int (default: 200)
@@ -244,44 +244,21 @@ For more examples and detailed usage, visit the [CLI Usage Guide](https://nazdri
244
244
  ```console
245
245
  ❯ ngpt -h
246
246
 
247
- usage: ngpt [-h] [-v] [--config [CONFIG]] [--config-index CONFIG_INDEX] [--provider PROVIDER] [--remove] [--show-config]
248
- [--all] [--list-models] [--list-renderers] [--cli-config [COMMAND ...]] [--role-config [ACTION ...]]
249
- [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
247
+ usage: ngpt [-h] [-v] [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
250
248
  [--temperature TEMPERATURE] [--top_p TOP_P] [--max_tokens MAX_TOKENS] [--log [FILE]]
251
- [--preprompt PREPROMPT | --role ROLE] [--no-stream | --prettify | --stream-prettify]
249
+ [--preprompt PREPROMPT | --role ROLE] [--config [CONFIG]] [--config-index CONFIG_INDEX]
250
+ [--provider PROVIDER] [--remove] [--show-config] [--all] [--list-models] [--list-renderers]
251
+ [--cli-config [COMMAND ...]] [--role-config [ACTION ...]] [--no-stream | --prettify | --stream-prettify]
252
252
  [--renderer {auto,rich,glow}] [--language LANGUAGE] [--rec-chunk] [--diff [FILE]] [--chunk-size CHUNK_SIZE]
253
253
  [--analyses-chunk-size ANALYSES_CHUNK_SIZE] [--max-msg-lines MAX_MSG_LINES]
254
- [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [--multiline] [-i | -s | -c | -t | -r | -g]
255
- [prompt]
254
+ [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [-i | -s | -c | -t | -r | -g]
256
255
 
257
256
  nGPT - Interact with AI language models via OpenAI-compatible APIs
258
257
 
259
- positional arguments::
260
-
261
- [PROMPT] The prompt to send
262
-
263
- options::
258
+ Global Options::
264
259
 
265
260
  -h, --help show this help message and exit
266
261
  -v, --version Show version information and exit
267
-
268
- Configuration Options::
269
-
270
- --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
271
- configuration mode to create a new config
272
- --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
273
- --provider PROVIDER Provider name to identify the configuration to use
274
- --remove Remove the configuration at the specified index (requires --config and
275
- --config-index or --provider)
276
- --show-config Show the current configuration(s) and exit
277
- --all Show details for all configurations (requires --show-config)
278
- --list-models List all available models for the current configuration and exit
279
- --list-renderers Show available markdown renderers for use with --prettify
280
- --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
281
- --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
282
-
283
- Global Options::
284
-
285
262
  --api-key API_KEY API key for the service
286
263
  --base-url BASE_URL Base URL for the API
287
264
  --model MODEL Model to use
@@ -301,6 +278,21 @@ Global Options::
301
278
  --renderer {auto,rich,glow} Select which markdown renderer to use with --prettify or --stream-prettify
302
279
  (auto, rich, or glow)
303
280
 
281
+ Configuration Options::
282
+
283
+ --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
284
+ configuration mode to create a new config
285
+ --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
286
+ --provider PROVIDER Provider name to identify the configuration to use
287
+ --remove Remove the configuration at the specified index (requires --config and
288
+ --config-index or --provider)
289
+ --show-config Show the current configuration(s) and exit
290
+ --all Show details for all configurations (requires --show-config)
291
+ --list-models List all available models for the current configuration and exit
292
+ --list-renderers Show available markdown renderers for use with --prettify
293
+ --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
294
+ --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
295
+
304
296
  Output Display Options (mutually exclusive)::
305
297
 
306
298
  --no-stream Return the whole response without streaming or formatting
@@ -328,10 +320,6 @@ Rewrite Mode Options::
328
320
  --humanize Transform AI-generated text into human-like content that passes AI detection
329
321
  tools
330
322
 
331
- Interactive Mode Options::
332
-
333
- --multiline Enable multiline text input with the "ml" command in interactive mode
334
-
335
323
  Modes (mutually exclusive)::
336
324
 
337
325
  -i, --interactive Start an interactive chat session
@@ -438,9 +426,6 @@ CLI Configuration Help:
438
426
  Code mode options (-c/--code):
439
427
  language - Type: str (default: python)
440
428
 
441
- Interactive mode options (-i/--interactive):
442
- interactive-multiline - Type: bool (default: False)
443
-
444
429
  Git commit message options (-g/--gitcommsg):
445
430
  analyses-chunk-size - Type: int (default: 200)
446
431
  chunk-size - Type: int (default: 200)
@@ -146,46 +146,21 @@ You can set configuration options directly via command-line arguments:
146
146
 
147
147
 
148
148
  ```console
149
- ngpt -h
150
-
151
- usage: ngpt [-h] [-v] [--config [CONFIG]] [--config-index CONFIG_INDEX] [--provider PROVIDER] [--remove] [--show-config]
152
- [--all] [--list-models] [--list-renderers] [--cli-config [COMMAND ...]] [--role-config [ACTION ...]]
153
- [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
149
+ usage: ngpt [-h] [-v] [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
154
150
  [--temperature TEMPERATURE] [--top_p TOP_P] [--max_tokens MAX_TOKENS] [--log [FILE]]
155
- [--preprompt PREPROMPT | --role ROLE] [--no-stream | --prettify | --stream-prettify]
151
+ [--preprompt PREPROMPT | --role ROLE] [--config [CONFIG]] [--config-index CONFIG_INDEX]
152
+ [--provider PROVIDER] [--remove] [--show-config] [--all] [--list-models] [--list-renderers]
153
+ [--cli-config [COMMAND ...]] [--role-config [ACTION ...]] [--no-stream | --prettify | --stream-prettify]
156
154
  [--renderer {auto,rich,glow}] [--language LANGUAGE] [--rec-chunk] [--diff [FILE]] [--chunk-size CHUNK_SIZE]
157
155
  [--analyses-chunk-size ANALYSES_CHUNK_SIZE] [--max-msg-lines MAX_MSG_LINES]
158
- [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [--multiline] [-i | -s | -c | -t | -r | -g]
159
- [prompt]
156
+ [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [-i | -s | -c | -t | -r | -g]
160
157
 
161
158
  nGPT - Interact with AI language models via OpenAI-compatible APIs
162
159
 
163
- positional arguments::
164
-
165
- [PROMPT] The prompt to send
166
-
167
- options::
160
+ Global Options::
168
161
 
169
162
  -h, --help show this help message and exit
170
163
  -v, --version Show version information and exit
171
-
172
- Configuration Options::
173
-
174
- --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
175
- configuration mode to create a new config
176
- --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
177
- --provider PROVIDER Provider name to identify the configuration to use
178
- --remove Remove the configuration at the specified index (requires --config and
179
- --config-index or --provider)
180
- --show-config Show the current configuration(s) and exit
181
- --all Show details for all configurations (requires --show-config)
182
- --list-models List all available models for the current configuration and exit
183
- --list-renderers Show available markdown renderers for use with --prettify
184
- --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
185
- --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
186
-
187
- Global Options::
188
-
189
164
  --api-key API_KEY API key for the service
190
165
  --base-url BASE_URL Base URL for the API
191
166
  --model MODEL Model to use
@@ -205,6 +180,21 @@ Global Options::
205
180
  --renderer {auto,rich,glow} Select which markdown renderer to use with --prettify or --stream-prettify
206
181
  (auto, rich, or glow)
207
182
 
183
+ Configuration Options::
184
+
185
+ --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
186
+ configuration mode to create a new config
187
+ --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
188
+ --provider PROVIDER Provider name to identify the configuration to use
189
+ --remove Remove the configuration at the specified index (requires --config and
190
+ --config-index or --provider)
191
+ --show-config Show the current configuration(s) and exit
192
+ --all Show details for all configurations (requires --show-config)
193
+ --list-models List all available models for the current configuration and exit
194
+ --list-renderers Show available markdown renderers for use with --prettify
195
+ --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
196
+ --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
197
+
208
198
  Output Display Options (mutually exclusive)::
209
199
 
210
200
  --no-stream Return the whole response without streaming or formatting
@@ -232,10 +222,6 @@ Rewrite Mode Options::
232
222
  --humanize Transform AI-generated text into human-like content that passes AI detection
233
223
  tools
234
224
 
235
- Interactive Mode Options::
236
-
237
- --multiline Enable multiline text input with the "ml" command in interactive mode
238
-
239
225
  Modes (mutually exclusive)::
240
226
 
241
227
  -i, --interactive Start an interactive chat session
@@ -171,6 +171,12 @@ The CLI configuration system enforces these rules to prevent incompatible combin
171
171
  |--------|------|---------|-------------|
172
172
  | `language` | string | python | Programming language for code generation |
173
173
 
174
+ #### Interactive Mode
175
+
176
+ | Option | Type | Default | Description |
177
+ |--------|------|---------|-------------|
178
+ | `interactive-multiline` | bool | false | Enable multiline text input with the "ml" command in interactive mode |
179
+
174
180
  #### Git Commit Message Mode
175
181
 
176
182
  | Option | Type | Default | Description |
@@ -197,6 +203,16 @@ ngpt --cli-config set prettify true
197
203
  ngpt --cli-config set temperature 0.3
198
204
  ```
199
205
 
206
+ ### Setting Up for Interactive Chat
207
+
208
+ ```bash
209
+ # Enable multiline input in interactive mode by default
210
+ ngpt --cli-config set interactive-multiline true
211
+
212
+ # Set a custom system prompt for interactive sessions
213
+ ngpt --cli-config set preprompt "You are a helpful coding assistant specializing in Python"
214
+ ```
215
+
200
216
  ### Setting Up a Creative Writing Environment
201
217
 
202
218
  ```bash
@@ -39,44 +39,21 @@ You can set configuration options directly via command-line arguments:
39
39
  ```console
40
40
  ❯ ngpt -h
41
41
 
42
- usage: ngpt [-h] [-v] [--config [CONFIG]] [--config-index CONFIG_INDEX] [--provider PROVIDER] [--remove] [--show-config]
43
- [--all] [--list-models] [--list-renderers] [--cli-config [COMMAND ...]] [--role-config [ACTION ...]]
44
- [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
42
+ usage: ngpt [-h] [-v] [--api-key API_KEY] [--base-url BASE_URL] [--model MODEL] [--web-search] [--pipe]
45
43
  [--temperature TEMPERATURE] [--top_p TOP_P] [--max_tokens MAX_TOKENS] [--log [FILE]]
46
- [--preprompt PREPROMPT | --role ROLE] [--no-stream | --prettify | --stream-prettify]
44
+ [--preprompt PREPROMPT | --role ROLE] [--config [CONFIG]] [--config-index CONFIG_INDEX]
45
+ [--provider PROVIDER] [--remove] [--show-config] [--all] [--list-models] [--list-renderers]
46
+ [--cli-config [COMMAND ...]] [--role-config [ACTION ...]] [--no-stream | --prettify | --stream-prettify]
47
47
  [--renderer {auto,rich,glow}] [--language LANGUAGE] [--rec-chunk] [--diff [FILE]] [--chunk-size CHUNK_SIZE]
48
48
  [--analyses-chunk-size ANALYSES_CHUNK_SIZE] [--max-msg-lines MAX_MSG_LINES]
49
- [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [--multiline] [-i | -s | -c | -t | -r | -g]
50
- [prompt]
49
+ [--max-recursion-depth MAX_RECURSION_DEPTH] [--humanize] [-i | -s | -c | -t | -r | -g]
51
50
 
52
51
  nGPT - Interact with AI language models via OpenAI-compatible APIs
53
52
 
54
- positional arguments::
55
-
56
- [PROMPT] The prompt to send
57
-
58
- options::
53
+ Global Options::
59
54
 
60
55
  -h, --help show this help message and exit
61
56
  -v, --version Show version information and exit
62
-
63
- Configuration Options::
64
-
65
- --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
66
- configuration mode to create a new config
67
- --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
68
- --provider PROVIDER Provider name to identify the configuration to use
69
- --remove Remove the configuration at the specified index (requires --config and
70
- --config-index or --provider)
71
- --show-config Show the current configuration(s) and exit
72
- --all Show details for all configurations (requires --show-config)
73
- --list-models List all available models for the current configuration and exit
74
- --list-renderers Show available markdown renderers for use with --prettify
75
- --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
76
- --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
77
-
78
- Global Options::
79
-
80
57
  --api-key API_KEY API key for the service
81
58
  --base-url BASE_URL Base URL for the API
82
59
  --model MODEL Model to use
@@ -96,6 +73,21 @@ Global Options::
96
73
  --renderer {auto,rich,glow} Select which markdown renderer to use with --prettify or --stream-prettify
97
74
  (auto, rich, or glow)
98
75
 
76
+ Configuration Options::
77
+
78
+ --config [CONFIG] Path to a custom config file or, if no value provided, enter interactive
79
+ configuration mode to create a new config
80
+ --config-index CONFIG_INDEX Index of the configuration to use or edit (default: 0)
81
+ --provider PROVIDER Provider name to identify the configuration to use
82
+ --remove Remove the configuration at the specified index (requires --config and
83
+ --config-index or --provider)
84
+ --show-config Show the current configuration(s) and exit
85
+ --all Show details for all configurations (requires --show-config)
86
+ --list-models List all available models for the current configuration and exit
87
+ --list-renderers Show available markdown renderers for use with --prettify
88
+ --cli-config [COMMAND ...] Manage CLI configuration (set, get, unset, list, help)
89
+ --role-config [ACTION ...] Manage custom roles (help, create, show, edit, list, remove) [role_name]
90
+
99
91
  Output Display Options (mutually exclusive)::
100
92
 
101
93
  --no-stream Return the whole response without streaming or formatting
@@ -123,10 +115,6 @@ Rewrite Mode Options::
123
115
  --humanize Transform AI-generated text into human-like content that passes AI detection
124
116
  tools
125
117
 
126
- Interactive Mode Options::
127
-
128
- --multiline Enable multiline text input with the "ml" command in interactive mode
129
-
130
118
  Modes (mutually exclusive)::
131
119
 
132
120
  -i, --interactive Start an interactive chat session
@@ -195,6 +183,40 @@ ngpt --code --log "function to calculate prime numbers"
195
183
  ngpt --shell --log "find large files in current directory"
196
184
  ```
197
185
 
186
+ #### Multiline Text Input in Interactive Mode
187
+
188
+ Enable multiline input in interactive chat mode:
189
+
190
+ ```bash
191
+ ngpt -i
192
+ ```
193
+
194
+ In interective mode you can:
195
+ - Use the "ml" command to enter multiline text mode
196
+ - Type or paste complex, multi-paragraph prompts
197
+ - Press Ctrl+D (or Ctrl+Z on Windows) to submit the multiline input
198
+ - Exit multiline mode anytime by typing ".exit" on a new line
199
+
200
+ This is especially useful when:
201
+ - Providing code samples for the AI to analyze
202
+ - Entering complex contexts or scenarios
203
+ - Pasting error logs or output for debugging help
204
+ - Composing detailed questions with multiple parts
205
+
206
+ Example usage:
207
+ ```
208
+ > ml
209
+ (multiline mode - press Ctrl+D to submit)
210
+ Here's the error I'm getting:
211
+
212
+ TypeError: cannot convert 'NoneType' object to int
213
+ File "app.py", line 45, in process_data
214
+ result = data['count'] + 5
215
+
216
+ Can you help me understand what's wrong?
217
+ ^D
218
+ ```
219
+
198
220
  #### Combining with Other Options
199
221
 
200
222
  Interactive mode can be combined with other options for enhanced functionality:
@@ -12,7 +12,8 @@ def setup_argument_parser():
12
12
  # Minimalist, clean epilog design
13
13
  epilog = f"\n{COLORS['yellow']}nGPT {COLORS['bold']}v{__version__}{COLORS['reset']} • {COLORS['green']}Docs: {COLORS['bold']}https://nazdridoy.github.io/ngpt/usage/cli_usage{COLORS['reset']}"
14
14
 
15
- parser = argparse.ArgumentParser(description=description, formatter_class=ColoredHelpFormatter, epilog=epilog)
15
+ parser = argparse.ArgumentParser(description=description, formatter_class=ColoredHelpFormatter,
16
+ epilog=epilog, add_help=False)
16
17
 
17
18
  # Add custom error method with color
18
19
  original_error = parser.error
@@ -27,41 +28,16 @@ def setup_argument_parser():
27
28
  print(f"{COLORS['green']}{COLORS['bold']}nGPT{COLORS['reset']} version {COLORS['yellow']}{__version__}{COLORS['reset']}")
28
29
  parser.exit()
29
30
 
30
- # Version flag
31
- parser.add_argument('-v', '--version', action=ColoredVersionAction, nargs=0,
32
- help='Show version information and exit')
33
-
34
- # Prompt argument
35
- parser.add_argument('prompt', nargs='?', default=None,
36
- help='The prompt to send')
37
-
38
- # Config options
39
- config_group = parser.add_argument_group('Configuration Options')
40
- config_group.add_argument('--config', nargs='?', const=True,
41
- help='Path to a custom config file or, if no value provided, enter interactive configuration mode to create a new config')
42
- config_group.add_argument('--config-index', type=int, default=0,
43
- help='Index of the configuration to use or edit (default: 0)')
44
- config_group.add_argument('--provider',
45
- help='Provider name to identify the configuration to use')
46
- config_group.add_argument('--remove', action='store_true',
47
- help='Remove the configuration at the specified index (requires --config and --config-index or --provider)')
48
- config_group.add_argument('--show-config', action='store_true',
49
- help='Show the current configuration(s) and exit')
50
- config_group.add_argument('--all', action='store_true',
51
- help='Show details for all configurations (requires --show-config)')
52
- config_group.add_argument('--list-models', action='store_true',
53
- help='List all available models for the current configuration and exit')
54
- config_group.add_argument('--list-renderers', action='store_true',
55
- help='Show available markdown renderers for use with --prettify')
56
- config_group.add_argument('--cli-config', nargs='*', metavar='COMMAND',
57
- help='Manage CLI configuration (set, get, unset, list, help)')
58
-
59
- # Role configuration options
60
- config_group.add_argument('--role-config', nargs='*', metavar='ACTION',
61
- help='Manage custom roles (help, create, show, edit, list, remove) [role_name]')
62
-
63
31
  # Global options
64
32
  global_group = parser.add_argument_group('Global Options')
33
+
34
+ # Add help and version to the global group
35
+ global_group.add_argument('-h', '--help', action='help',
36
+ help='show this help message and exit')
37
+ global_group.add_argument('-v', '--version', action=ColoredVersionAction, nargs=0,
38
+ help='Show version information and exit')
39
+
40
+ # Then add the other global options
65
41
  global_group.add_argument('--api-key',
66
42
  help='API key for the service')
67
43
  global_group.add_argument('--base-url',
@@ -88,6 +64,29 @@ def setup_argument_parser():
88
64
  prompt_exclusive_group.add_argument('--role',
89
65
  help='Use a predefined role to set system prompt (mutually exclusive with --preprompt)')
90
66
 
67
+ # Config options
68
+ config_group = parser.add_argument_group('Configuration Options')
69
+ config_group.add_argument('--config', nargs='?', const=True,
70
+ help='Path to a custom config file or, if no value provided, enter interactive configuration mode to create a new config')
71
+ config_group.add_argument('--config-index', type=int, default=0,
72
+ help='Index of the configuration to use or edit (default: 0)')
73
+ config_group.add_argument('--provider',
74
+ help='Provider name to identify the configuration to use')
75
+ config_group.add_argument('--remove', action='store_true',
76
+ help='Remove the configuration at the specified index (requires --config and --config-index or --provider)')
77
+ config_group.add_argument('--show-config', action='store_true',
78
+ help='Show the current configuration(s) and exit')
79
+ config_group.add_argument('--all', action='store_true',
80
+ help='Show details for all configurations (requires --show-config)')
81
+ config_group.add_argument('--list-models', action='store_true',
82
+ help='List all available models for the current configuration and exit')
83
+ config_group.add_argument('--list-renderers', action='store_true',
84
+ help='Show available markdown renderers for use with --prettify')
85
+ config_group.add_argument('--cli-config', nargs='*', metavar='COMMAND',
86
+ help='Manage CLI configuration (set, get, unset, list, help)')
87
+ config_group.add_argument('--role-config', nargs='*', metavar='ACTION',
88
+ help='Manage custom roles (help, create, show, edit, list, remove) [role_name]')
89
+
91
90
  # Output display options (mutually exclusive group)
92
91
  output_group = parser.add_argument_group('Output Display Options (mutually exclusive)')
93
92
  output_exclusive_group = output_group.add_mutually_exclusive_group()
@@ -128,8 +127,6 @@ def setup_argument_parser():
128
127
 
129
128
  # Interactive mode options
130
129
  interactive_group = parser.add_argument_group('Interactive Mode Options')
131
- interactive_group.add_argument('--multiline', action='store_true',
132
- help='Enable multiline text input with the "ml" command in interactive mode')
133
130
 
134
131
  # Mode flags (mutually exclusive)
135
132
  mode_group = parser.add_argument_group('Modes (mutually exclusive)')
@@ -578,8 +578,7 @@ def main():
578
578
  prettify=args.prettify,
579
579
  renderer=args.renderer,
580
580
  stream_prettify=args.stream_prettify,
581
- logger=logger,
582
- multiline_enabled=args.multiline
581
+ logger=logger
583
582
  )
584
583
  elif args.shell:
585
584
  # Apply CLI config for shell mode
@@ -20,7 +20,7 @@ try:
20
20
  except ImportError:
21
21
  HAS_PROMPT_TOOLKIT = False
22
22
 
23
- def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, preprompt=None, prettify=False, renderer='auto', stream_prettify=False, logger=None, multiline_enabled=False):
23
+ def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, preprompt=None, prettify=False, renderer='auto', stream_prettify=False, logger=None, multiline_enabled=True):
24
24
  """Start an interactive chat session with the AI.
25
25
 
26
26
  Args:
@@ -19,8 +19,6 @@ CLI_CONFIG_OPTIONS = {
19
19
  "renderer": {"type": "str", "default": "auto", "context": ["all"]},
20
20
  "config-index": {"type": "int", "default": 0, "context": ["all"], "exclusive": ["provider"]},
21
21
  "web-search": {"type": "bool", "default": False, "context": ["all"]},
22
- # Interactive mode options
23
- "interactive-multiline": {"type": "bool", "default": False, "context": ["interactive"]},
24
22
  # GitCommit message options
25
23
  "rec-chunk": {"type": "bool", "default": False, "context": ["gitcommsg"]},
26
24
  "diff": {"type": "str", "default": None, "context": ["gitcommsg"]},
@@ -245,10 +243,6 @@ def apply_cli_config(args: Any, mode: str) -> Any:
245
243
  # Convert dashes to underscores for argparse compatibility
246
244
  arg_name = option.replace("-", "_")
247
245
 
248
- # Special case for interactive-multiline which maps to multiline argument
249
- if option == "interactive-multiline":
250
- arg_name = "multiline"
251
-
252
246
  # Skip if explicitly set via command line
253
247
  cli_option = f"--{option}"
254
248
  if cli_option in explicit_args:
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "ngpt"
3
- version = "3.12.2"
3
+ version = "4.0.0"
4
4
  description = "A Swiss army knife for LLMs: A fast, lightweight CLI and interactive chat tool that brings the power of any OpenAI-compatible LLM (OpenAI, Ollama, Groq, Claude, Gemini, etc.) straight to your terminal. rewrite texts or refine code, craft git commit messages, generate and run OS-aware shell commands."
5
5
  authors = [
6
6
  {name = "nazDridoy", email = "nazdridoy399@gmail.com"},
@@ -148,7 +148,7 @@ wheels = [
148
148
 
149
149
  [[package]]
150
150
  name = "ngpt"
151
- version = "3.12.2"
151
+ version = "4.0.0"
152
152
  source = { editable = "." }
153
153
  dependencies = [
154
154
  { name = "beautifulsoup4" },
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes