ngpt 2.10.0__tar.gz → 2.11.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. {ngpt-2.10.0 → ngpt-2.11.0}/PKG-INFO +5 -2
  2. {ngpt-2.10.0 → ngpt-2.11.0}/README.md +4 -1
  3. {ngpt-2.10.0 → ngpt-2.11.0}/docs/api/cli.md +2 -2
  4. {ngpt-2.10.0 → ngpt-2.11.0}/docs/configuration.md +5 -2
  5. {ngpt-2.10.0 → ngpt-2.11.0}/docs/examples/advanced.md +7 -1
  6. {ngpt-2.10.0 → ngpt-2.11.0}/docs/overview.md +5 -0
  7. {ngpt-2.10.0 → ngpt-2.11.0}/docs/usage/cli_usage.md +26 -3
  8. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/args.py +2 -2
  9. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/interactive.py +18 -33
  10. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/main.py +58 -9
  11. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/modes/chat.py +15 -1
  12. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/modes/code.py +11 -1
  13. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/modes/shell.py +33 -3
  14. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/modes/text.py +15 -1
  15. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli_config.py +1 -1
  16. ngpt-2.11.0/ngpt/log.py +180 -0
  17. {ngpt-2.10.0 → ngpt-2.11.0}/pyproject.toml +1 -1
  18. {ngpt-2.10.0 → ngpt-2.11.0}/uv.lock +1 -1
  19. {ngpt-2.10.0 → ngpt-2.11.0}/.github/workflows/python-publish.yml +0 -0
  20. {ngpt-2.10.0 → ngpt-2.11.0}/.gitignore +0 -0
  21. {ngpt-2.10.0 → ngpt-2.11.0}/.python-version +0 -0
  22. {ngpt-2.10.0 → ngpt-2.11.0}/COMMIT_GUIDELINES.md +0 -0
  23. {ngpt-2.10.0 → ngpt-2.11.0}/CONTRIBUTING.md +0 -0
  24. {ngpt-2.10.0 → ngpt-2.11.0}/LICENSE +0 -0
  25. {ngpt-2.10.0 → ngpt-2.11.0}/docs/CONTRIBUTING.md +0 -0
  26. {ngpt-2.10.0 → ngpt-2.11.0}/docs/LICENSE.md +0 -0
  27. {ngpt-2.10.0 → ngpt-2.11.0}/docs/README.md +0 -0
  28. {ngpt-2.10.0 → ngpt-2.11.0}/docs/_config.yml +0 -0
  29. {ngpt-2.10.0 → ngpt-2.11.0}/docs/api/README.md +0 -0
  30. {ngpt-2.10.0 → ngpt-2.11.0}/docs/api/client.md +0 -0
  31. {ngpt-2.10.0 → ngpt-2.11.0}/docs/api/config.md +0 -0
  32. {ngpt-2.10.0 → ngpt-2.11.0}/docs/assets/css/style.scss +0 -0
  33. {ngpt-2.10.0 → ngpt-2.11.0}/docs/examples/README.md +0 -0
  34. {ngpt-2.10.0 → ngpt-2.11.0}/docs/examples/basic.md +0 -0
  35. {ngpt-2.10.0 → ngpt-2.11.0}/docs/examples/cli_components.md +0 -0
  36. {ngpt-2.10.0 → ngpt-2.11.0}/docs/examples/integrations.md +0 -0
  37. {ngpt-2.10.0 → ngpt-2.11.0}/docs/installation.md +0 -0
  38. {ngpt-2.10.0 → ngpt-2.11.0}/docs/usage/README.md +0 -0
  39. {ngpt-2.10.0 → ngpt-2.11.0}/docs/usage/cli_config.md +0 -0
  40. {ngpt-2.10.0 → ngpt-2.11.0}/docs/usage/cli_framework.md +0 -0
  41. {ngpt-2.10.0 → ngpt-2.11.0}/docs/usage/library_usage.md +0 -0
  42. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/__init__.py +0 -0
  43. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/__init__.py +0 -0
  44. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/config_manager.py +0 -0
  45. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/formatters.py +0 -0
  46. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/modes/__init__.py +0 -0
  47. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/renderers.py +0 -0
  48. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli/ui.py +0 -0
  49. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/cli.py +0 -0
  50. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/client.py +0 -0
  51. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/config.py +0 -0
  52. {ngpt-2.10.0 → ngpt-2.11.0}/ngpt/utils/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ngpt
3
- Version: 2.10.0
3
+ Version: 2.11.0
4
4
  Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
5
5
  Project-URL: Homepage, https://github.com/nazdridoy/ngpt
6
6
  Project-URL: Repository, https://github.com/nazdridoy/ngpt
@@ -114,6 +114,9 @@ ngpt --preprompt "You are a Linux expert" "How do I find large files?"
114
114
 
115
115
  # Log your conversation to a file
116
116
  ngpt --interactive --log conversation.log
117
+
118
+ # Create a temporary log file automatically
119
+ ngpt --log "Tell me about quantum computing"
117
120
  ```
118
121
 
119
122
  For more examples and detailed usage, visit the [CLI Usage Guide](https://nazdridoy.github.io/ngpt/usage/cli_usage.html).
@@ -349,7 +352,7 @@ You can configure the client using the following options:
349
352
  | `--top_p` | Set top_p (controls diversity, default: 1.0) |
350
353
  | `--max_tokens` | Set maximum response length in tokens |
351
354
  | `--preprompt` | Set custom system prompt to control AI behavior |
352
- | `--log` | Set filepath to log conversation to (for interactive modes) |
355
+ | `--log` | Enable logging: use `--log` to create a temporary log file, or `--log PATH` for a specific location |
353
356
  | `--prettify` | Render markdown responses and code with syntax highlighting |
354
357
  | `--stream-prettify` | Enable real-time markdown rendering with syntax highlighting while streaming |
355
358
  | `--renderer` | Select which markdown renderer to use with --prettify (auto, rich, or glow) |
@@ -79,6 +79,9 @@ ngpt --preprompt "You are a Linux expert" "How do I find large files?"
79
79
 
80
80
  # Log your conversation to a file
81
81
  ngpt --interactive --log conversation.log
82
+
83
+ # Create a temporary log file automatically
84
+ ngpt --log "Tell me about quantum computing"
82
85
  ```
83
86
 
84
87
  For more examples and detailed usage, visit the [CLI Usage Guide](https://nazdridoy.github.io/ngpt/usage/cli_usage.html).
@@ -314,7 +317,7 @@ You can configure the client using the following options:
314
317
  | `--top_p` | Set top_p (controls diversity, default: 1.0) |
315
318
  | `--max_tokens` | Set maximum response length in tokens |
316
319
  | `--preprompt` | Set custom system prompt to control AI behavior |
317
- | `--log` | Set filepath to log conversation to (for interactive modes) |
320
+ | `--log` | Enable logging: use `--log` to create a temporary log file, or `--log PATH` for a specific location |
318
321
  | `--prettify` | Render markdown responses and code with syntax highlighting |
319
322
  | `--stream-prettify` | Enable real-time markdown rendering with syntax highlighting while streaming |
320
323
  | `--renderer` | Select which markdown renderer to use with --prettify (auto, rich, or glow) |
@@ -18,7 +18,7 @@ def interactive_chat_session(
18
18
  temperature=0.7,
19
19
  top_p=1.0,
20
20
  max_tokens=None,
21
- log_file=None,
21
+ logger=None,
22
22
  preprompt=None,
23
23
  prettify=False,
24
24
  renderer='auto',
@@ -35,7 +35,7 @@ Creates an interactive chat session with the specified AI client.
35
35
  - `temperature` (float): Temperature for generation (0.0-1.0)
36
36
  - `top_p` (float): Top-p sampling value (0.0-1.0)
37
37
  - `max_tokens` (int, optional): Maximum number of tokens to generate
38
- - `log_file` (str, optional): Path to file for logging the conversation
38
+ - `logger` (object, optional): A logger instance with `log(role, message)` and `get_log_path()` methods for logging the conversation.
39
39
  - `preprompt` (str, optional): System prompt to use for the chat
40
40
  - `prettify` (bool): Whether to prettify markdown in responses
41
41
  - `renderer` (str): Markdown renderer to use ('auto', 'rich', 'glow')
@@ -131,7 +131,7 @@ You can also set configuration options directly via command-line arguments:
131
131
  - `--stream-prettify`: Enable real-time formatted output while streaming (uses Rich).
132
132
  - `--web-search`: Enable web search capability (if supported by the API).
133
133
  - `--preprompt <text>`: Set a custom system prompt.
134
- - `--log <file>`: Log the conversation to a file (in interactive modes).
134
+ - `--log [file]`: Enable logging: use `--log` to create a temporary log file, or `--log PATH` for a specific location.
135
135
  - `--temperature <value>`: Set the generation temperature (0.0-2.0).
136
136
  - `--top_p <value>`: Set the nucleus sampling top_p value (0.0-1.0).
137
137
  - `--max_tokens <number>`: Set the maximum number of tokens for the response.
@@ -157,8 +157,11 @@ ngpt --temperature 0.8 --top_p 0.95 --max_tokens 300 "Write a creative story"
157
157
  # Set a custom system prompt (preprompt)
158
158
  ngpt --preprompt "You are a Linux command line expert. Focus on efficient solutions." "How do I find the largest files in a directory?"
159
159
 
160
- # Save conversation to a log file (for interactive modes)
160
+ # Log conversation to a specific file
161
161
  ngpt --interactive --log conversation.log
162
+
163
+ # Create a temporary log file automatically
164
+ ngpt --log "Tell me about quantum computing"
162
165
  ```
163
166
 
164
167
  ## Environment Variables
@@ -81,9 +81,12 @@ ngpt --interactive --preprompt "You are a Python programming tutor. Explain conc
81
81
  Save your conversation history to a file for reference:
82
82
 
83
83
  ```bash
84
- # Basic interactive session with logging
84
+ # Basic interactive session with logging to a specific file
85
85
  ngpt --interactive --log python_tutoring.log
86
86
 
87
+ # Create an automatic temporary log file
88
+ ngpt --interactive --log
89
+
87
90
  # Combine logging with custom system prompt
88
91
  ngpt --interactive \
89
92
  --preprompt "You are a data science expert helping analyze experimental results." \
@@ -93,6 +96,9 @@ ngpt --interactive \
93
96
  ngpt --interactive \
94
97
  --preprompt "You are helping plan the architecture for a microservices application." \
95
98
  --log architecture_planning.log
99
+
100
+ # Log non-interactive sessions
101
+ ngpt --log "Explain quantum computing"
96
102
  ```
97
103
 
98
104
  The log file contains the complete conversation transcript, including:
@@ -28,6 +28,11 @@ nGPT is a lightweight Python library and command-line interface (CLI) tool desig
28
28
 
29
29
  - **Clean Code Generation**: Output code without markdown formatting or explanations.
30
30
 
31
+ - 📝 **Rich Multiline Editor**: Interactive multiline text input with syntax highlighting and intuitive controls
32
+ - 🎭 **System Prompts**: Customize model behavior with custom system prompts
33
+ - 📃 **Conversation Logging**: Automatically or manually log conversations with timestamps and role information
34
+ - 🧰 **CLI Components**: Reusable components for building custom AI-powered command-line tools
35
+
31
36
  ## Architecture
32
37
 
33
38
  nGPT is built around a few core components:
@@ -90,7 +90,7 @@ Below is a comprehensive list of all available command-line options, organized b
90
90
 
91
91
  | Option | Description |
92
92
  |--------|-------------|
93
- | `--log <file>` | Set filepath to log conversation to (for interactive modes) |
93
+ | `--log [file]` | Enable logging: use `--log` to create a temporary log file, or `--log PATH` for a specific location |
94
94
  | `-v, --version` | Show version information and exit |
95
95
  | `-h, --help` | Show help message and exit |
96
96
 
@@ -120,13 +120,36 @@ This opens a continuous chat session where the AI remembers previous exchanges.
120
120
  - Use arrow keys to navigate message history
121
121
  - Press Ctrl+C to exit the session
122
122
 
123
- You can log your conversation to a file for later reference:
123
+ #### Conversation Logging
124
+
125
+ You can log your conversation in several ways:
124
126
 
125
127
  ```bash
128
+ # Log to a specific file
126
129
  ngpt -i --log conversation.log
130
+
131
+ # Automatically create a temporary log file
132
+ ngpt -i --log
127
133
  ```
128
134
 
129
- This saves the entire conversation, including both user inputs and AI responses, to the specified file.
135
+ When using `--log` without a path, nGPT creates a temporary log file with a timestamp in the name:
136
+ - On Linux/macOS: `/tmp/ngpt-YYYYMMDD-HHMMSS.log`
137
+ - On Windows: `%TEMP%\ngpt-YYYYMMDD-HHMMSS.log`
138
+
139
+ The log file contains timestamps, roles, and the full content of all messages exchanged, making it easy to reference conversations later.
140
+
141
+ Logging works in all modes (not just interactive):
142
+
143
+ ```bash
144
+ # Log in standard chat mode
145
+ ngpt --log "Tell me about quantum computing"
146
+
147
+ # Log in code generation mode
148
+ ngpt --code --log "function to calculate prime numbers"
149
+
150
+ # Log in shell command mode
151
+ ngpt --shell --log "find large files in current directory"
152
+ ```
130
153
 
131
154
  #### Combining with Other Options
132
155
 
@@ -56,8 +56,8 @@ def setup_argument_parser():
56
56
  help='Set top_p (controls diversity, default: 1.0)')
57
57
  global_group.add_argument('--max_tokens', type=int,
58
58
  help='Set max response length in tokens')
59
- global_group.add_argument('--log', metavar='FILE',
60
- help='Set filepath to log conversation to (For interactive modes)')
59
+ global_group.add_argument('--log', metavar='FILE', nargs='?', const=True,
60
+ help='Set filepath to log conversation to, or create a temporary log file if no path provided')
61
61
  global_group.add_argument('--preprompt',
62
62
  help='Set custom system prompt to control AI behavior')
63
63
  global_group.add_argument('--prettify', action='store_const', const='auto',
@@ -1,9 +1,11 @@
1
1
  import sys
2
+ import os
2
3
  import shutil
3
4
  import datetime
4
5
  import traceback
5
6
  from .formatters import COLORS
6
7
  from .renderers import prettify_markdown, prettify_streaming_markdown
8
+ from ..log import create_logger
7
9
 
8
10
  # Optional imports for enhanced UI
9
11
  try:
@@ -16,7 +18,7 @@ try:
16
18
  except ImportError:
17
19
  HAS_PROMPT_TOOLKIT = False
18
20
 
19
- def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, log_file=None, preprompt=None, prettify=False, renderer='auto', stream_prettify=False):
21
+ def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, preprompt=None, prettify=False, renderer='auto', stream_prettify=False, logger=None):
20
22
  """Start an interactive chat session with the AI.
21
23
 
22
24
  Args:
@@ -26,11 +28,11 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
26
28
  temperature: Controls randomness in the response
27
29
  top_p: Controls diversity via nucleus sampling
28
30
  max_tokens: Maximum number of tokens to generate in each response
29
- log_file: Optional filepath to log conversation to
30
31
  preprompt: Custom system prompt to control AI behavior
31
32
  prettify: Whether to enable markdown rendering
32
33
  renderer: Which markdown renderer to use
33
34
  stream_prettify: Whether to enable streaming with prettify
35
+ logger: Logger instance for logging the conversation
34
36
  """
35
37
  # Get terminal width for better formatting
36
38
  try:
@@ -58,18 +60,9 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
58
60
 
59
61
  print(f"\n{separator}\n")
60
62
 
61
- # Initialize log file if provided
62
- log_handle = None
63
- if log_file:
64
- try:
65
- timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
66
- log_handle = open(log_file, 'a', encoding='utf-8')
67
- log_handle.write(f"\n--- nGPT Session Log: {sys.argv} ---\n")
68
- log_handle.write(f"Started at: {timestamp}\n\n")
69
- print(f"{COLORS['green']}Logging conversation to: {log_file}{COLORS['reset']}")
70
- except Exception as e:
71
- print(f"{COLORS['yellow']}Warning: Could not open log file: {str(e)}{COLORS['reset']}")
72
- log_handle = None
63
+ # Show logging info if logger is available
64
+ if logger:
65
+ print(f"{COLORS['green']}Logging conversation to: {logger.get_log_path()}{COLORS['reset']}")
73
66
 
74
67
  # Custom separator - use the same length for consistency
75
68
  def print_separator():
@@ -90,9 +83,8 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
90
83
  conversation.append(system_message)
91
84
 
92
85
  # Log system prompt if logging is enabled
93
- if log_handle and preprompt:
94
- log_handle.write(f"System: {system_prompt}\n\n")
95
- log_handle.flush()
86
+ if logger and preprompt:
87
+ logger.log("system", system_prompt)
96
88
 
97
89
  # Initialize prompt_toolkit history
98
90
  prompt_history = InMemoryHistory() if HAS_PROMPT_TOOLKIT else None
@@ -187,9 +179,8 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
187
179
  conversation.append(user_message)
188
180
 
189
181
  # Log user message if logging is enabled
190
- if log_handle:
191
- log_handle.write(f"User: {user_input}\n")
192
- log_handle.flush()
182
+ if logger:
183
+ logger.log("user", user_input)
193
184
 
194
185
  # Print assistant indicator with formatting
195
186
  if not no_stream and not stream_prettify:
@@ -254,22 +245,16 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
254
245
  else:
255
246
  print(response)
256
247
 
257
- # Log assistant response if logging is enabled
258
- if log_handle:
259
- log_handle.write(f"Assistant: {response}\n\n")
260
- log_handle.flush()
248
+ # Log AI response if logging is enabled
249
+ if logger:
250
+ logger.log("assistant", response)
261
251
 
262
252
  # Print separator between exchanges
263
253
  print_separator()
264
254
 
265
255
  except KeyboardInterrupt:
266
- print(f"\n\n{COLORS['green']}Chat session ended by user. Goodbye!{COLORS['reset']}")
256
+ print(f"\n\n{COLORS['yellow']}Chat session interrupted by user.{COLORS['reset']}")
267
257
  except Exception as e:
268
- print(f"\n{COLORS['yellow']}Error during chat session: {str(e)}{COLORS['reset']}")
269
- # Print traceback for debugging if it's a serious error
270
- traceback.print_exc()
271
- finally:
272
- # Close log file if it was opened
273
- if log_handle:
274
- log_handle.write(f"\n--- End of Session ---\n")
275
- log_handle.close()
258
+ print(f"\n{COLORS['yellow']}Error in chat session: {str(e)}{COLORS['reset']}")
259
+ if os.environ.get("NGPT_DEBUG"):
260
+ traceback.print_exc()
@@ -12,6 +12,7 @@ from ..cli_config import (
12
12
  CLI_CONFIG_OPTIONS,
13
13
  load_cli_config
14
14
  )
15
+ from ..log import create_logger
15
16
  from .. import __version__
16
17
 
17
18
  from .formatters import COLORS, ColoredHelpFormatter
@@ -195,6 +196,46 @@ def main():
195
196
  # Load CLI configuration early
196
197
  cli_config = load_cli_config()
197
198
 
199
+ # Initialize logger if --log is specified
200
+ logger = None
201
+ if args.log is not None:
202
+ # Check if the log value is a string that looks like a prompt (incorrectly parsed)
203
+ likely_prompt = False
204
+ likely_path = False
205
+
206
+ if isinstance(args.log, str) and args.prompt is None:
207
+ # Check if string looks like a path
208
+ if args.log.startswith('/') or args.log.startswith('./') or args.log.startswith('../') or args.log.startswith('~'):
209
+ likely_path = True
210
+ # Check if string has a file extension
211
+ elif '.' in os.path.basename(args.log):
212
+ likely_path = True
213
+ # Check if parent directory exists
214
+ elif os.path.exists(os.path.dirname(args.log)) and os.path.dirname(args.log) != '':
215
+ likely_path = True
216
+ # Check if string ends with a question mark (very likely a prompt)
217
+ elif args.log.strip().endswith('?'):
218
+ likely_prompt = True
219
+ # As a last resort, if it has spaces and doesn't look like a path, assume it's a prompt
220
+ elif ' ' in args.log and not likely_path:
221
+ likely_prompt = True
222
+
223
+ if likely_prompt and not likely_path:
224
+ # This is likely a prompt, not a log path
225
+ args.prompt = args.log
226
+ # Change log to True to create a temp file
227
+ args.log = True
228
+
229
+ # If --log is True, it means it was used without a path value
230
+ log_path = None if args.log is True else args.log
231
+ logger = create_logger(log_path)
232
+ if logger:
233
+ logger.open()
234
+ print(f"{COLORS['green']}Logging session to: {logger.get_log_path()}{COLORS['reset']}")
235
+ # If it's a temporary log file, inform the user
236
+ if logger.is_temporary():
237
+ print(f"{COLORS['green']}Created temporary log file.{COLORS['reset']}")
238
+
198
239
  # Priority order for config selection:
199
240
  # 1. Command-line arguments (args.provider, args.config_index)
200
241
  # 2. CLI configuration (cli_config["provider"], cli_config["config-index"])
@@ -426,7 +467,12 @@ def main():
426
467
  show_available_renderers()
427
468
 
428
469
  # Initialize client using the potentially overridden active_config
429
- client = NGPTClient(**active_config)
470
+ client = NGPTClient(
471
+ api_key=active_config.get("api_key", args.api_key),
472
+ base_url=active_config.get("base_url", args.base_url),
473
+ provider=active_config.get("provider"),
474
+ model=active_config.get("model", args.model)
475
+ )
430
476
 
431
477
  try:
432
478
  # Handle listing models
@@ -459,32 +505,32 @@ def main():
459
505
  temperature=args.temperature,
460
506
  top_p=args.top_p,
461
507
  max_tokens=args.max_tokens,
462
- log_file=args.log,
463
508
  preprompt=args.preprompt,
464
509
  prettify=args.prettify,
465
510
  renderer=args.renderer,
466
- stream_prettify=args.stream_prettify
511
+ stream_prettify=args.stream_prettify,
512
+ logger=logger
467
513
  )
468
514
  elif args.shell:
469
515
  # Apply CLI config for shell mode
470
516
  args = apply_cli_config(args, "shell")
471
517
 
472
518
  # Shell command generation mode
473
- shell_mode(client, args)
519
+ shell_mode(client, args, logger=logger)
474
520
 
475
521
  elif args.code:
476
522
  # Apply CLI config for code mode
477
523
  args = apply_cli_config(args, "code")
478
524
 
479
525
  # Code generation mode
480
- code_mode(client, args)
526
+ code_mode(client, args, logger=logger)
481
527
 
482
528
  elif args.text:
483
529
  # Apply CLI config for text mode
484
530
  args = apply_cli_config(args, "text")
485
531
 
486
532
  # Text mode (multiline input)
487
- text_mode(client, args)
533
+ text_mode(client, args, logger=logger)
488
534
 
489
535
  else:
490
536
  # Default to chat mode
@@ -492,12 +538,15 @@ def main():
492
538
  args = apply_cli_config(args, "all")
493
539
 
494
540
  # Standard chat mode
495
- chat_mode(client, args)
496
-
541
+ chat_mode(client, args, logger=logger)
497
542
  except KeyboardInterrupt:
498
543
  print("\nOperation cancelled by user. Exiting gracefully.")
499
544
  # Make sure we exit with a non-zero status code to indicate the operation was cancelled
500
545
  sys.exit(130) # 130 is the standard exit code for SIGINT (Ctrl+C)
501
546
  except Exception as e:
502
547
  print(f"Error: {e}")
503
- sys.exit(1) # Exit with error code
548
+ sys.exit(1) # Exit with error code
549
+ finally:
550
+ # Close the logger if it exists
551
+ if logger:
552
+ logger.close()
@@ -1,13 +1,15 @@
1
1
  from ..formatters import COLORS
2
2
  from ..renderers import prettify_markdown, prettify_streaming_markdown
3
+ from ...log import create_logger
3
4
  import sys
4
5
 
5
- def chat_mode(client, args):
6
+ def chat_mode(client, args, logger=None):
6
7
  """Handle the standard chat mode with a single prompt.
7
8
 
8
9
  Args:
9
10
  client: The NGPTClient instance
10
11
  args: The parsed command-line arguments
12
+ logger: Optional logger instance
11
13
  """
12
14
  # Get the prompt
13
15
  if args.prompt is None:
@@ -19,10 +21,18 @@ def chat_mode(client, args):
19
21
  sys.exit(130)
20
22
  else:
21
23
  prompt = args.prompt
24
+
25
+ # Log the user message if logging is enabled
26
+ if logger:
27
+ logger.log("user", prompt)
22
28
 
23
29
  # Create messages array with preprompt if available
24
30
  messages = None
25
31
  if args.preprompt:
32
+ # Log the system message if logging is enabled
33
+ if logger:
34
+ logger.log("system", args.preprompt)
35
+
26
36
  messages = [
27
37
  {"role": "system", "content": args.preprompt},
28
38
  {"role": "user", "content": prompt}
@@ -63,6 +73,10 @@ def chat_mode(client, args):
63
73
  # Stop live display if using stream-prettify
64
74
  if args.stream_prettify and live_display:
65
75
  live_display.stop()
76
+
77
+ # Log the AI response if logging is enabled
78
+ if logger and response:
79
+ logger.log("assistant", response)
66
80
 
67
81
  # Handle non-stream response or regular prettify
68
82
  if (args.no_stream or args.prettify) and response:
@@ -1,13 +1,15 @@
1
1
  from ..formatters import COLORS
2
2
  from ..renderers import prettify_markdown, prettify_streaming_markdown, has_markdown_renderer, show_available_renderers
3
+ from ...log import create_logger
3
4
  import sys
4
5
 
5
- def code_mode(client, args):
6
+ def code_mode(client, args, logger=None):
6
7
  """Handle the code generation mode.
7
8
 
8
9
  Args:
9
10
  client: The NGPTClient instance
10
11
  args: The parsed command-line arguments
12
+ logger: Optional logger instance
11
13
  """
12
14
  if args.prompt is None:
13
15
  try:
@@ -18,6 +20,10 @@ def code_mode(client, args):
18
20
  sys.exit(130)
19
21
  else:
20
22
  prompt = args.prompt
23
+
24
+ # Log the user prompt if logging is enabled
25
+ if logger:
26
+ logger.log("user", prompt)
21
27
 
22
28
  # Setup for streaming and prettify logic
23
29
  stream_callback = None
@@ -86,6 +92,10 @@ def code_mode(client, args):
86
92
  # Stop live display if using stream-prettify
87
93
  if use_stream_prettify and live_display:
88
94
  live_display.stop()
95
+
96
+ # Log the generated code if logging is enabled
97
+ if logger and generated_code:
98
+ logger.log("assistant", generated_code)
89
99
 
90
100
  # Print non-streamed output if needed
91
101
  if generated_code and not should_stream:
@@ -1,13 +1,15 @@
1
1
  from ..formatters import COLORS
2
+ from ...log import create_logger
2
3
  import subprocess
3
4
  import sys
4
5
 
5
- def shell_mode(client, args):
6
+ def shell_mode(client, args, logger=None):
6
7
  """Handle the shell command generation mode.
7
8
 
8
9
  Args:
9
10
  client: The NGPTClient instance
10
11
  args: The parsed command-line arguments
12
+ logger: Optional logger instance
11
13
  """
12
14
  if args.prompt is None:
13
15
  try:
@@ -18,12 +20,20 @@ def shell_mode(client, args):
18
20
  sys.exit(130)
19
21
  else:
20
22
  prompt = args.prompt
23
+
24
+ # Log the user prompt if logging is enabled
25
+ if logger:
26
+ logger.log("user", prompt)
21
27
 
22
28
  command = client.generate_shell_command(prompt, web_search=args.web_search,
23
29
  temperature=args.temperature, top_p=args.top_p,
24
30
  max_tokens=args.max_tokens)
25
31
  if not command:
26
32
  return # Error already printed by client
33
+
34
+ # Log the generated command if logging is enabled
35
+ if logger:
36
+ logger.log("assistant", command)
27
37
 
28
38
  print(f"\nGenerated command: {command}")
29
39
 
@@ -35,12 +45,32 @@ def shell_mode(client, args):
35
45
  return
36
46
 
37
47
  if response == 'y' or response == 'yes':
48
+ # Log the execution if logging is enabled
49
+ if logger:
50
+ logger.log("system", f"Executing command: {command}")
51
+
38
52
  try:
39
53
  try:
40
54
  print("\nExecuting command... (Press Ctrl+C to cancel)")
41
55
  result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
42
- print(f"\nOutput:\n{result.stdout}")
56
+ output = result.stdout
57
+
58
+ # Log the command output if logging is enabled
59
+ if logger:
60
+ logger.log("system", f"Command output: {output}")
61
+
62
+ print(f"\nOutput:\n{output}")
43
63
  except KeyboardInterrupt:
44
64
  print("\nCommand execution cancelled by user.")
65
+
66
+ # Log the cancellation if logging is enabled
67
+ if logger:
68
+ logger.log("system", "Command execution cancelled by user")
45
69
  except subprocess.CalledProcessError as e:
46
- print(f"\nError:\n{e.stderr}")
70
+ error = e.stderr
71
+
72
+ # Log the error if logging is enabled
73
+ if logger:
74
+ logger.log("system", f"Command error: {error}")
75
+
76
+ print(f"\nError:\n{error}")
@@ -1,13 +1,15 @@
1
1
  from ..formatters import COLORS
2
2
  from ..renderers import prettify_markdown, prettify_streaming_markdown
3
3
  from ..ui import get_multiline_input
4
+ from ...log import create_logger
4
5
 
5
- def text_mode(client, args):
6
+ def text_mode(client, args, logger=None):
6
7
  """Handle the multi-line text input mode.
7
8
 
8
9
  Args:
9
10
  client: The NGPTClient instance
10
11
  args: The parsed command-line arguments
12
+ logger: Optional logger instance
11
13
  """
12
14
  if args.prompt is not None:
13
15
  prompt = args.prompt
@@ -20,9 +22,17 @@ def text_mode(client, args):
20
22
 
21
23
  print("\nSubmission successful. Waiting for response...")
22
24
 
25
+ # Log the user message if logging is enabled
26
+ if logger:
27
+ logger.log("user", prompt)
28
+
23
29
  # Create messages array with preprompt if available
24
30
  messages = None
25
31
  if args.preprompt:
32
+ # Log the system message if logging is enabled
33
+ if logger:
34
+ logger.log("system", args.preprompt)
35
+
26
36
  messages = [
27
37
  {"role": "system", "content": args.preprompt},
28
38
  {"role": "user", "content": prompt}
@@ -64,6 +74,10 @@ def text_mode(client, args):
64
74
  if args.stream_prettify and live_display:
65
75
  live_display.stop()
66
76
 
77
+ # Log the AI response if logging is enabled
78
+ if logger and response:
79
+ logger.log("assistant", response)
80
+
67
81
  # Handle non-stream response or regular prettify
68
82
  if (args.no_stream or args.prettify) and response:
69
83
  if args.prettify:
@@ -11,7 +11,7 @@ CLI_CONFIG_OPTIONS = {
11
11
  "temperature": {"type": "float", "default": 0.7, "context": ["all"]},
12
12
  "top_p": {"type": "float", "default": 1.0, "context": ["all"]},
13
13
  "max_tokens": {"type": "int", "default": None, "context": ["all"]},
14
- "log": {"type": "str", "default": None, "context": ["interactive", "text"]},
14
+ "log": {"type": "str", "default": None, "context": ["all"]},
15
15
  "preprompt": {"type": "str", "default": None, "context": ["all"]},
16
16
  "no-stream": {"type": "bool", "default": False, "context": ["all"], "exclusive": ["prettify", "stream-prettify"]},
17
17
  "prettify": {"type": "bool", "default": False, "context": ["all"], "exclusive": ["no-stream", "stream-prettify"]},
@@ -0,0 +1,180 @@
1
+ import os
2
+ import sys
3
+ import datetime
4
+ import tempfile
5
+ from pathlib import Path
6
+ from typing import Optional, TextIO, Dict, Any
7
+
8
+ # Simple color definitions for fallback message
9
+ COLORS = {
10
+ "green": "\033[32m",
11
+ "yellow": "\033[33m",
12
+ "reset": "\033[0m"
13
+ }
14
+
15
+ class Logger:
16
+ """Handles logging functionality for ngpt"""
17
+
18
+ def __init__(self, log_path: Optional[str] = None):
19
+ """
20
+ Initialize the logger.
21
+
22
+ Args:
23
+ log_path: Optional path to the log file. If None, a temporary file will be created.
24
+ """
25
+ self.log_path = log_path
26
+ self.log_file: Optional[TextIO] = None
27
+ self.is_temp = False
28
+ self.command_args = sys.argv
29
+
30
+ if self.log_path is None:
31
+ # Create a temporary log file with date-time in the name
32
+ timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
33
+
34
+ # Use OS-specific temp directory
35
+ if sys.platform == "win32":
36
+ # Windows
37
+ temp_dir = os.environ.get("TEMP", "")
38
+ self.log_path = os.path.join(temp_dir, f"ngpt-{timestamp}.log")
39
+ else:
40
+ # Linux/MacOS
41
+ self.log_path = f"/tmp/ngpt-{timestamp}.log"
42
+
43
+ self.is_temp = True
44
+
45
+ def __enter__(self):
46
+ """Context manager entry"""
47
+ self.open()
48
+ return self
49
+
50
+ def __exit__(self, exc_type, exc_val, exc_tb):
51
+ """Context manager exit"""
52
+ self.close()
53
+
54
+ def open(self) -> bool:
55
+ """
56
+ Open the log file for writing.
57
+
58
+ Returns:
59
+ bool: True if successful, False otherwise.
60
+ """
61
+ try:
62
+ # Expand ~ to home directory if present
63
+ if self.log_path.startswith('~'):
64
+ self.log_path = os.path.expanduser(self.log_path)
65
+
66
+ # Make sure the directory exists
67
+ log_dir = os.path.dirname(self.log_path)
68
+ if log_dir and not os.path.exists(log_dir):
69
+ try:
70
+ os.makedirs(log_dir, exist_ok=True)
71
+ except (PermissionError, OSError) as e:
72
+ print(f"Warning: Could not create log directory: {str(e)}", file=sys.stderr)
73
+ # Fall back to temp directory
74
+ timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
75
+ if sys.platform == "win32":
76
+ temp_dir = os.environ.get("TEMP", "")
77
+ self.log_path = os.path.join(temp_dir, f"ngpt-{timestamp}.log")
78
+ else:
79
+ self.log_path = f"/tmp/ngpt-{timestamp}.log"
80
+ self.is_temp = True
81
+
82
+ self.log_file = open(self.log_path, 'a', encoding='utf-8')
83
+
84
+ # Write header information
85
+ timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
86
+ self.log_file.write(f"\n--- nGPT Session Log ---\n")
87
+ self.log_file.write(f"Started at: {timestamp}\n")
88
+ self.log_file.write(f"Command: {' '.join(self.command_args)}\n")
89
+ self.log_file.write(f"Log file: {self.log_path}\n\n")
90
+ self.log_file.flush()
91
+
92
+ return True
93
+ except Exception as e:
94
+ print(f"Warning: Could not open log file: {str(e)}", file=sys.stderr)
95
+
96
+ # Fall back to temp file
97
+ timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
98
+ if sys.platform == "win32":
99
+ temp_dir = os.environ.get("TEMP", "")
100
+ self.log_path = os.path.join(temp_dir, f"ngpt-{timestamp}.log")
101
+ else:
102
+ self.log_path = f"/tmp/ngpt-{timestamp}.log"
103
+ self.is_temp = True
104
+
105
+ # Try again with temp file
106
+ try:
107
+ self.log_file = open(self.log_path, 'a', encoding='utf-8')
108
+ timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
109
+ self.log_file.write(f"\n--- nGPT Session Log ---\n")
110
+ self.log_file.write(f"Started at: {timestamp}\n")
111
+ self.log_file.write(f"Command: {' '.join(self.command_args)}\n")
112
+ self.log_file.write(f"Log file: {self.log_path}\n\n")
113
+ self.log_file.flush()
114
+ print(f"{COLORS['green']}Falling back to temporary log file: {self.log_path}{COLORS['reset']}", file=sys.stderr)
115
+ return True
116
+ except Exception as e2:
117
+ print(f"Warning: Could not open temporary log file: {str(e2)}", file=sys.stderr)
118
+ self.log_file = None
119
+ return False
120
+
121
+ def close(self):
122
+ """Close the log file if it's open."""
123
+ if self.log_file:
124
+ try:
125
+ timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
126
+ self.log_file.write(f"\n--- Session ended at {timestamp} ---\n")
127
+ self.log_file.close()
128
+ except Exception:
129
+ pass
130
+ self.log_file = None
131
+
132
+ def log(self, role: str, content: str):
133
+ """
134
+ Log a message.
135
+
136
+ Args:
137
+ role: Role of the message (e.g., 'system', 'user', 'assistant')
138
+ content: Content of the message
139
+ """
140
+ if not self.log_file:
141
+ return
142
+
143
+ try:
144
+ timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
145
+ self.log_file.write(f"{timestamp}: {role}: {content}\n")
146
+ self.log_file.flush()
147
+ except Exception:
148
+ # Silently fail if logging fails
149
+ pass
150
+
151
+ def get_log_path(self) -> str:
152
+ """
153
+ Get the path to the log file.
154
+
155
+ Returns:
156
+ str: Path to the log file
157
+ """
158
+ return self.log_path
159
+
160
+ def is_temporary(self) -> bool:
161
+ """
162
+ Check if the log file is temporary.
163
+
164
+ Returns:
165
+ bool: True if the log file is temporary
166
+ """
167
+ return self.is_temp
168
+
169
+
170
+ def create_logger(log_path: Optional[str] = None) -> Logger:
171
+ """
172
+ Create a logger instance.
173
+
174
+ Args:
175
+ log_path: Optional path to the log file
176
+
177
+ Returns:
178
+ Logger: Logger instance
179
+ """
180
+ return Logger(log_path)
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "ngpt"
3
- version = "2.10.0"
3
+ version = "2.11.0"
4
4
  description = "A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints."
5
5
  authors = [
6
6
  {name = "nazDridoy", email = "nazdridoy399@gmail.com"},
@@ -134,7 +134,7 @@ wheels = [
134
134
 
135
135
  [[package]]
136
136
  name = "ngpt"
137
- version = "2.10.0"
137
+ version = "2.11.0"
138
138
  source = { editable = "." }
139
139
  dependencies = [
140
140
  { name = "prompt-toolkit" },
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes