ngpt 2.5.1__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ngpt/cli.py +253 -35
- ngpt/client.py +21 -6
- {ngpt-2.5.1.dist-info → ngpt-2.7.1.dist-info}/METADATA +23 -4
- ngpt-2.7.1.dist-info/RECORD +9 -0
- ngpt-2.5.1.dist-info/RECORD +0 -9
- {ngpt-2.5.1.dist-info → ngpt-2.7.1.dist-info}/WHEEL +0 -0
- {ngpt-2.5.1.dist-info → ngpt-2.7.1.dist-info}/entry_points.txt +0 -0
- {ngpt-2.5.1.dist-info → ngpt-2.7.1.dist-info}/licenses/LICENSE +0 -0
ngpt/cli.py
CHANGED
@@ -115,10 +115,11 @@ def show_available_renderers():
|
|
115
115
|
if HAS_RICH:
|
116
116
|
print(f" {COLORS['green']}✓ Rich{COLORS['reset']} - Python library for terminal formatting (Recommended)")
|
117
117
|
else:
|
118
|
-
print(f" {COLORS['yellow']}✗ Rich{COLORS['reset']} - Not installed (pip install rich)")
|
118
|
+
print(f" {COLORS['yellow']}✗ Rich{COLORS['reset']} - Not installed (pip install \"ngpt[full]\" or pip install rich)")
|
119
119
|
|
120
120
|
if not HAS_GLOW and not HAS_RICH:
|
121
121
|
print(f"\n{COLORS['yellow']}To enable prettified markdown output, install one of the above renderers.{COLORS['reset']}")
|
122
|
+
print(f"{COLORS['yellow']}For Rich: pip install \"ngpt[full]\" or pip install rich{COLORS['reset']}")
|
122
123
|
else:
|
123
124
|
renderers = []
|
124
125
|
if HAS_RICH:
|
@@ -148,11 +149,11 @@ def warn_if_no_markdown_renderer(renderer='auto'):
|
|
148
149
|
|
149
150
|
if renderer == 'auto':
|
150
151
|
print(f"{COLORS['yellow']}Warning: No markdown rendering library available.{COLORS['reset']}")
|
151
|
-
print(f"{COLORS['yellow']}Install
|
152
|
+
print(f"{COLORS['yellow']}Install with: pip install \"ngpt[full]\"{COLORS['reset']}")
|
152
153
|
print(f"{COLORS['yellow']}Or install 'glow' from https://github.com/charmbracelet/glow{COLORS['reset']}")
|
153
154
|
elif renderer == 'rich':
|
154
155
|
print(f"{COLORS['yellow']}Warning: Rich is not available.{COLORS['reset']}")
|
155
|
-
print(f"{COLORS['yellow']}Install with: pip install rich{COLORS['reset']}")
|
156
|
+
print(f"{COLORS['yellow']}Install with: pip install \"ngpt[full]\" or pip install rich{COLORS['reset']}")
|
156
157
|
elif renderer == 'glow':
|
157
158
|
print(f"{COLORS['yellow']}Warning: Glow is not available.{COLORS['reset']}")
|
158
159
|
print(f"{COLORS['yellow']}Install from https://github.com/charmbracelet/glow{COLORS['reset']}")
|
@@ -219,7 +220,8 @@ def prettify_markdown(text, renderer='auto'):
|
|
219
220
|
# Use rich for rendering
|
220
221
|
elif renderer == 'rich':
|
221
222
|
if not HAS_RICH:
|
222
|
-
print(f"{COLORS['yellow']}Warning: Rich is not available.
|
223
|
+
print(f"{COLORS['yellow']}Warning: Rich is not available.{COLORS['reset']}")
|
224
|
+
print(f"{COLORS['yellow']}Install with: pip install \"ngpt[full]\" or pip install rich{COLORS['reset']}")
|
223
225
|
# Fall back to glow if available
|
224
226
|
if HAS_GLOW:
|
225
227
|
print(f"{COLORS['yellow']}Falling back to Glow renderer.{COLORS['reset']}")
|
@@ -505,8 +507,22 @@ def check_config(config):
|
|
505
507
|
|
506
508
|
return True
|
507
509
|
|
508
|
-
def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, log_file=None, preprompt=None, prettify=False, renderer='auto'):
|
509
|
-
"""
|
510
|
+
def interactive_chat_session(client, web_search=False, no_stream=False, temperature=0.7, top_p=1.0, max_tokens=None, log_file=None, preprompt=None, prettify=False, renderer='auto', stream_prettify=False):
|
511
|
+
"""Start an interactive chat session with the AI.
|
512
|
+
|
513
|
+
Args:
|
514
|
+
client: The NGPTClient instance
|
515
|
+
web_search: Whether to enable web search capability
|
516
|
+
no_stream: Whether to disable streaming
|
517
|
+
temperature: Controls randomness in the response
|
518
|
+
top_p: Controls diversity via nucleus sampling
|
519
|
+
max_tokens: Maximum number of tokens to generate in each response
|
520
|
+
log_file: Optional filepath to log conversation to
|
521
|
+
preprompt: Custom system prompt to control AI behavior
|
522
|
+
prettify: Whether to enable markdown rendering
|
523
|
+
renderer: Which markdown renderer to use
|
524
|
+
stream_prettify: Whether to enable streaming with prettify
|
525
|
+
"""
|
510
526
|
# Get terminal width for better formatting
|
511
527
|
try:
|
512
528
|
term_width = shutil.get_terminal_size().columns
|
@@ -668,18 +684,38 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
|
|
668
684
|
log_handle.flush()
|
669
685
|
|
670
686
|
# Print assistant indicator with formatting
|
671
|
-
if not no_stream:
|
687
|
+
if not no_stream and not stream_prettify:
|
672
688
|
print(f"\n{ngpt_header()}: {COLORS['reset']}", end="", flush=True)
|
673
|
-
|
689
|
+
elif not stream_prettify:
|
674
690
|
print(f"\n{ngpt_header()}: {COLORS['reset']}", flush=True)
|
675
691
|
|
676
|
-
# If prettify is enabled
|
677
|
-
|
678
|
-
|
679
|
-
# If prettify is enabled with streaming, inform the user
|
680
|
-
if prettify and not no_stream:
|
692
|
+
# If prettify is enabled with regular streaming
|
693
|
+
if prettify and not no_stream and not stream_prettify:
|
681
694
|
print(f"\n{COLORS['yellow']}Note: Streaming disabled to enable markdown rendering.{COLORS['reset']}")
|
682
695
|
print(f"\n{ngpt_header()}: {COLORS['reset']}", flush=True)
|
696
|
+
should_stream = False
|
697
|
+
else:
|
698
|
+
# Regular behavior with stream-prettify taking precedence
|
699
|
+
should_stream = not no_stream
|
700
|
+
|
701
|
+
# Setup for stream-prettify
|
702
|
+
stream_callback = None
|
703
|
+
live_display = None
|
704
|
+
|
705
|
+
if stream_prettify and should_stream:
|
706
|
+
# Get the correct header for interactive mode
|
707
|
+
header = ngpt_header()
|
708
|
+
live_display, stream_callback = prettify_streaming_markdown(renderer, is_interactive=True, header_text=header)
|
709
|
+
if not live_display:
|
710
|
+
# Fallback to normal prettify if live display setup failed
|
711
|
+
prettify = True
|
712
|
+
stream_prettify = False
|
713
|
+
should_stream = False
|
714
|
+
print(f"{COLORS['yellow']}Falling back to regular prettify mode.{COLORS['reset']}")
|
715
|
+
|
716
|
+
# Start live display if using stream-prettify
|
717
|
+
if stream_prettify and live_display:
|
718
|
+
live_display.start()
|
683
719
|
|
684
720
|
# Get AI response with conversation history
|
685
721
|
response = client.chat(
|
@@ -690,9 +726,14 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
|
|
690
726
|
temperature=temperature,
|
691
727
|
top_p=top_p,
|
692
728
|
max_tokens=max_tokens,
|
693
|
-
markdown_format=prettify
|
729
|
+
markdown_format=prettify or stream_prettify,
|
730
|
+
stream_callback=stream_callback
|
694
731
|
)
|
695
732
|
|
733
|
+
# Stop live display if using stream-prettify
|
734
|
+
if stream_prettify and live_display:
|
735
|
+
live_display.stop()
|
736
|
+
|
696
737
|
# Add AI response to conversation history
|
697
738
|
if response:
|
698
739
|
assistant_message = {"role": "assistant", "content": response}
|
@@ -726,6 +767,83 @@ def interactive_chat_session(client, web_search=False, no_stream=False, temperat
|
|
726
767
|
log_handle.write(f"\n--- End of Session ---\n")
|
727
768
|
log_handle.close()
|
728
769
|
|
770
|
+
def prettify_streaming_markdown(renderer='rich', is_interactive=False, header_text=None):
|
771
|
+
"""Set up streaming markdown rendering.
|
772
|
+
|
773
|
+
This function creates a live display context for rendering markdown
|
774
|
+
that can be updated in real-time as streaming content arrives.
|
775
|
+
|
776
|
+
Args:
|
777
|
+
renderer (str): Which renderer to use (currently only 'rich' is supported for streaming)
|
778
|
+
is_interactive (bool): Whether this is being used in interactive mode
|
779
|
+
header_text (str): Header text to include at the top (for interactive mode)
|
780
|
+
|
781
|
+
Returns:
|
782
|
+
tuple: (live_display, update_function) if successful, (None, None) otherwise
|
783
|
+
"""
|
784
|
+
# Only warn if explicitly specifying a renderer other than 'rich' or 'auto'
|
785
|
+
if renderer != 'rich' and renderer != 'auto':
|
786
|
+
print(f"{COLORS['yellow']}Warning: Streaming prettify only supports 'rich' renderer currently.{COLORS['reset']}")
|
787
|
+
print(f"{COLORS['yellow']}Falling back to Rich renderer.{COLORS['reset']}")
|
788
|
+
|
789
|
+
# Always use rich for streaming prettify
|
790
|
+
renderer = 'rich'
|
791
|
+
|
792
|
+
if not HAS_RICH:
|
793
|
+
print(f"{COLORS['yellow']}Warning: Rich is not available for streaming prettify.{COLORS['reset']}")
|
794
|
+
print(f"{COLORS['yellow']}Install with: pip install \"ngpt[full]\" or pip install rich{COLORS['reset']}")
|
795
|
+
return None, None
|
796
|
+
|
797
|
+
try:
|
798
|
+
from rich.live import Live
|
799
|
+
from rich.markdown import Markdown
|
800
|
+
from rich.console import Console
|
801
|
+
from rich.text import Text
|
802
|
+
from rich.panel import Panel
|
803
|
+
import rich.box
|
804
|
+
|
805
|
+
console = Console()
|
806
|
+
|
807
|
+
# Create an empty markdown object to start with
|
808
|
+
if is_interactive and header_text:
|
809
|
+
# For interactive mode, include header in a panel
|
810
|
+
# Clean up the header text to avoid duplication - use just "🤖 nGPT" instead of "╭─ 🤖 nGPT"
|
811
|
+
clean_header = "🤖 nGPT"
|
812
|
+
panel_title = Text(clean_header, style="cyan bold")
|
813
|
+
|
814
|
+
# Create a nicer, more compact panel
|
815
|
+
padding = (1, 1) # Less horizontal padding (left, right)
|
816
|
+
md_obj = Panel(
|
817
|
+
Markdown(""),
|
818
|
+
title=panel_title,
|
819
|
+
title_align="left",
|
820
|
+
border_style="cyan",
|
821
|
+
padding=padding,
|
822
|
+
width=console.width - 4, # Make panel slightly narrower than console
|
823
|
+
box=rich.box.ROUNDED
|
824
|
+
)
|
825
|
+
else:
|
826
|
+
md_obj = Markdown("")
|
827
|
+
|
828
|
+
# Initialize the Live display with an empty markdown
|
829
|
+
live = Live(md_obj, console=console, refresh_per_second=10)
|
830
|
+
|
831
|
+
# Define an update function that will be called with new content
|
832
|
+
def update_content(content):
|
833
|
+
nonlocal md_obj
|
834
|
+
if is_interactive and header_text:
|
835
|
+
# Update the panel content
|
836
|
+
md_obj.renderable = Markdown(content)
|
837
|
+
live.update(md_obj)
|
838
|
+
else:
|
839
|
+
md_obj = Markdown(content)
|
840
|
+
live.update(md_obj)
|
841
|
+
|
842
|
+
return live, update_content
|
843
|
+
except Exception as e:
|
844
|
+
print(f"{COLORS['yellow']}Error setting up Rich streaming display: {str(e)}{COLORS['reset']}")
|
845
|
+
return None, None
|
846
|
+
|
729
847
|
def main():
|
730
848
|
# Colorize description - use a shorter description to avoid line wrapping issues
|
731
849
|
description = f"{COLORS['cyan']}{COLORS['bold']}nGPT{COLORS['reset']} - Interact with AI language models via OpenAI-compatible APIs"
|
@@ -783,6 +901,8 @@ def main():
|
|
783
901
|
help='Set custom system prompt to control AI behavior')
|
784
902
|
global_group.add_argument('--prettify', action='store_const', const='auto',
|
785
903
|
help='Render markdown responses and code with syntax highlighting and formatting')
|
904
|
+
global_group.add_argument('--stream-prettify', action='store_true',
|
905
|
+
help='Enable streaming with markdown rendering (automatically uses Rich renderer)')
|
786
906
|
global_group.add_argument('--renderer', choices=['auto', 'rich', 'glow'], default='auto',
|
787
907
|
help='Select which markdown renderer to use with --prettify (auto, rich, or glow)')
|
788
908
|
|
@@ -1022,6 +1142,14 @@ def main():
|
|
1022
1142
|
show_available_renderers()
|
1023
1143
|
args.prettify = False
|
1024
1144
|
|
1145
|
+
# Check if --prettify is used with --stream-prettify (conflict)
|
1146
|
+
if args.prettify and args.stream_prettify:
|
1147
|
+
parser.error("--prettify and --stream-prettify cannot be used together. Choose one option.")
|
1148
|
+
|
1149
|
+
# Check if --stream-prettify is used but Rich is not available
|
1150
|
+
if args.stream_prettify and not has_markdown_renderer('rich'):
|
1151
|
+
parser.error("--stream-prettify requires Rich to be installed. Install with: pip install \"ngpt[full]\" or pip install rich")
|
1152
|
+
|
1025
1153
|
# Initialize client using the potentially overridden active_config
|
1026
1154
|
client = NGPTClient(**active_config)
|
1027
1155
|
|
@@ -1046,9 +1174,19 @@ def main():
|
|
1046
1174
|
# Handle modes
|
1047
1175
|
if args.interactive:
|
1048
1176
|
# Interactive chat mode
|
1049
|
-
interactive_chat_session(
|
1050
|
-
|
1051
|
-
|
1177
|
+
interactive_chat_session(
|
1178
|
+
client,
|
1179
|
+
web_search=args.web_search,
|
1180
|
+
no_stream=args.no_stream,
|
1181
|
+
temperature=args.temperature,
|
1182
|
+
top_p=args.top_p,
|
1183
|
+
max_tokens=args.max_tokens,
|
1184
|
+
log_file=args.log,
|
1185
|
+
preprompt=args.preprompt,
|
1186
|
+
prettify=args.prettify,
|
1187
|
+
renderer=args.renderer,
|
1188
|
+
stream_prettify=args.stream_prettify
|
1189
|
+
)
|
1052
1190
|
elif args.shell:
|
1053
1191
|
if args.prompt is None:
|
1054
1192
|
try:
|
@@ -1097,12 +1235,50 @@ def main():
|
|
1097
1235
|
sys.exit(130)
|
1098
1236
|
else:
|
1099
1237
|
prompt = args.prompt
|
1238
|
+
|
1239
|
+
# Setup for stream-prettify with code generation
|
1240
|
+
stream_callback = None
|
1241
|
+
live_display = None
|
1242
|
+
should_stream = False
|
1243
|
+
|
1244
|
+
if args.stream_prettify:
|
1245
|
+
should_stream = True # Enable streaming
|
1246
|
+
# This is the code generation mode, not interactive
|
1247
|
+
live_display, stream_callback = prettify_streaming_markdown(args.renderer)
|
1248
|
+
if not live_display:
|
1249
|
+
# Fallback to normal prettify if live display setup failed
|
1250
|
+
args.prettify = True
|
1251
|
+
args.stream_prettify = False
|
1252
|
+
should_stream = False
|
1253
|
+
print(f"{COLORS['yellow']}Falling back to regular prettify mode.{COLORS['reset']}")
|
1254
|
+
|
1255
|
+
# If regular prettify is enabled with streaming, inform the user
|
1256
|
+
if args.prettify and not args.no_stream:
|
1257
|
+
print(f"{COLORS['yellow']}Note: Streaming disabled to enable markdown rendering.{COLORS['reset']}")
|
1258
|
+
|
1259
|
+
print("\nGenerating code...")
|
1260
|
+
|
1261
|
+
# Start live display if using stream-prettify
|
1262
|
+
if args.stream_prettify and live_display:
|
1263
|
+
live_display.start()
|
1264
|
+
|
1265
|
+
generated_code = client.generate_code(
|
1266
|
+
prompt=prompt,
|
1267
|
+
language=args.language,
|
1268
|
+
web_search=args.web_search,
|
1269
|
+
temperature=args.temperature,
|
1270
|
+
top_p=args.top_p,
|
1271
|
+
max_tokens=args.max_tokens,
|
1272
|
+
markdown_format=args.prettify or args.stream_prettify,
|
1273
|
+
stream=should_stream,
|
1274
|
+
stream_callback=stream_callback
|
1275
|
+
)
|
1276
|
+
|
1277
|
+
# Stop live display if using stream-prettify
|
1278
|
+
if args.stream_prettify and live_display:
|
1279
|
+
live_display.stop()
|
1100
1280
|
|
1101
|
-
generated_code
|
1102
|
-
temperature=args.temperature, top_p=args.top_p,
|
1103
|
-
max_tokens=args.max_tokens,
|
1104
|
-
markdown_format=args.prettify)
|
1105
|
-
if generated_code:
|
1281
|
+
if generated_code and not args.stream_prettify:
|
1106
1282
|
if args.prettify:
|
1107
1283
|
print("\nGenerated code:")
|
1108
1284
|
prettify_markdown(generated_code, args.renderer)
|
@@ -1226,25 +1402,46 @@ def main():
|
|
1226
1402
|
{"role": "user", "content": prompt}
|
1227
1403
|
]
|
1228
1404
|
|
1229
|
-
# If prettify is enabled
|
1230
|
-
|
1405
|
+
# If stream-prettify is enabled
|
1406
|
+
stream_callback = None
|
1407
|
+
live_display = None
|
1231
1408
|
|
1232
|
-
|
1409
|
+
if args.stream_prettify:
|
1410
|
+
should_stream = True # Enable streaming
|
1411
|
+
# This is the standard mode, not interactive
|
1412
|
+
live_display, stream_callback = prettify_streaming_markdown(args.renderer)
|
1413
|
+
if not live_display:
|
1414
|
+
# Fallback to normal prettify if live display setup failed
|
1415
|
+
args.prettify = True
|
1416
|
+
args.stream_prettify = False
|
1417
|
+
should_stream = False
|
1418
|
+
print(f"{COLORS['yellow']}Falling back to regular prettify mode.{COLORS['reset']}")
|
1419
|
+
|
1420
|
+
# If regular prettify is enabled with streaming, inform the user
|
1233
1421
|
if args.prettify and not args.no_stream:
|
1234
1422
|
print(f"{COLORS['yellow']}Note: Streaming disabled to enable markdown rendering.{COLORS['reset']}")
|
1235
|
-
|
1423
|
+
|
1424
|
+
# Start live display if using stream-prettify
|
1425
|
+
if args.stream_prettify and live_display:
|
1426
|
+
live_display.start()
|
1427
|
+
|
1236
1428
|
response = client.chat(prompt, stream=should_stream, web_search=args.web_search,
|
1237
1429
|
temperature=args.temperature, top_p=args.top_p,
|
1238
1430
|
max_tokens=args.max_tokens, messages=messages,
|
1239
|
-
markdown_format=args.prettify
|
1431
|
+
markdown_format=args.prettify or args.stream_prettify,
|
1432
|
+
stream_callback=stream_callback)
|
1240
1433
|
|
1241
|
-
#
|
1434
|
+
# Stop live display if using stream-prettify
|
1435
|
+
if args.stream_prettify and live_display:
|
1436
|
+
live_display.stop()
|
1437
|
+
|
1438
|
+
# Handle non-stream response or regular prettify
|
1242
1439
|
if (args.no_stream or args.prettify) and response:
|
1243
1440
|
if args.prettify:
|
1244
1441
|
prettify_markdown(response, args.renderer)
|
1245
1442
|
else:
|
1246
1443
|
print(response)
|
1247
|
-
|
1444
|
+
|
1248
1445
|
else:
|
1249
1446
|
# Default to chat mode
|
1250
1447
|
if args.prompt is None:
|
@@ -1265,19 +1462,40 @@ def main():
|
|
1265
1462
|
{"role": "user", "content": prompt}
|
1266
1463
|
]
|
1267
1464
|
|
1268
|
-
# If prettify is enabled
|
1269
|
-
|
1465
|
+
# If stream-prettify is enabled
|
1466
|
+
stream_callback = None
|
1467
|
+
live_display = None
|
1270
1468
|
|
1271
|
-
|
1469
|
+
if args.stream_prettify:
|
1470
|
+
should_stream = True # Enable streaming
|
1471
|
+
# This is the standard mode, not interactive
|
1472
|
+
live_display, stream_callback = prettify_streaming_markdown(args.renderer)
|
1473
|
+
if not live_display:
|
1474
|
+
# Fallback to normal prettify if live display setup failed
|
1475
|
+
args.prettify = True
|
1476
|
+
args.stream_prettify = False
|
1477
|
+
should_stream = False
|
1478
|
+
print(f"{COLORS['yellow']}Falling back to regular prettify mode.{COLORS['reset']}")
|
1479
|
+
|
1480
|
+
# If regular prettify is enabled with streaming, inform the user
|
1272
1481
|
if args.prettify and not args.no_stream:
|
1273
1482
|
print(f"{COLORS['yellow']}Note: Streaming disabled to enable markdown rendering.{COLORS['reset']}")
|
1274
|
-
|
1483
|
+
|
1484
|
+
# Start live display if using stream-prettify
|
1485
|
+
if args.stream_prettify and live_display:
|
1486
|
+
live_display.start()
|
1487
|
+
|
1275
1488
|
response = client.chat(prompt, stream=should_stream, web_search=args.web_search,
|
1276
1489
|
temperature=args.temperature, top_p=args.top_p,
|
1277
1490
|
max_tokens=args.max_tokens, messages=messages,
|
1278
|
-
markdown_format=args.prettify
|
1491
|
+
markdown_format=args.prettify or args.stream_prettify,
|
1492
|
+
stream_callback=stream_callback)
|
1279
1493
|
|
1280
|
-
#
|
1494
|
+
# Stop live display if using stream-prettify
|
1495
|
+
if args.stream_prettify and live_display:
|
1496
|
+
live_display.stop()
|
1497
|
+
|
1498
|
+
# Handle non-stream response or regular prettify
|
1281
1499
|
if (args.no_stream or args.prettify) and response:
|
1282
1500
|
if args.prettify:
|
1283
1501
|
prettify_markdown(response, args.renderer)
|
ngpt/client.py
CHANGED
@@ -34,6 +34,7 @@ class NGPTClient:
|
|
34
34
|
messages: Optional[List[Dict[str, str]]] = None,
|
35
35
|
web_search: bool = False,
|
36
36
|
markdown_format: bool = False,
|
37
|
+
stream_callback: Optional[callable] = None,
|
37
38
|
**kwargs
|
38
39
|
) -> str:
|
39
40
|
"""
|
@@ -48,6 +49,7 @@ class NGPTClient:
|
|
48
49
|
messages: Optional list of message objects to override default behavior
|
49
50
|
web_search: Whether to enable web search capability
|
50
51
|
markdown_format: If True, allow markdown-formatted responses, otherwise plain text
|
52
|
+
stream_callback: Optional callback function for streaming mode updates
|
51
53
|
**kwargs: Additional arguments to pass to the API
|
52
54
|
|
53
55
|
Returns:
|
@@ -129,15 +131,23 @@ class NGPTClient:
|
|
129
131
|
delta = chunk["choices"][0].get("delta", {})
|
130
132
|
content = delta.get("content", "")
|
131
133
|
if content:
|
132
|
-
|
133
|
-
|
134
|
+
if stream_callback:
|
135
|
+
# If we have a callback, use it and don't print here
|
136
|
+
collected_content += content
|
137
|
+
stream_callback(collected_content)
|
138
|
+
else:
|
139
|
+
# Default behavior: print to console
|
140
|
+
print(content, end="", flush=True)
|
141
|
+
collected_content += content
|
134
142
|
except json.JSONDecodeError:
|
135
143
|
pass # Skip invalid JSON
|
136
144
|
except KeyboardInterrupt:
|
137
145
|
print("\nGeneration cancelled by user.")
|
138
146
|
return collected_content
|
139
147
|
|
140
|
-
|
148
|
+
# Only print a newline if we're not using a callback
|
149
|
+
if not stream_callback:
|
150
|
+
print() # Add a final newline
|
141
151
|
return collected_content
|
142
152
|
|
143
153
|
except requests.exceptions.HTTPError as e:
|
@@ -248,7 +258,9 @@ Command:"""
|
|
248
258
|
temperature: float = 0.4,
|
249
259
|
top_p: float = 0.95,
|
250
260
|
max_tokens: Optional[int] = None,
|
251
|
-
markdown_format: bool = False
|
261
|
+
markdown_format: bool = False,
|
262
|
+
stream: bool = False,
|
263
|
+
stream_callback: Optional[callable] = None
|
252
264
|
) -> str:
|
253
265
|
"""
|
254
266
|
Generate code based on the prompt.
|
@@ -261,6 +273,8 @@ Command:"""
|
|
261
273
|
top_p: Controls diversity via nucleus sampling
|
262
274
|
max_tokens: Maximum number of tokens to generate
|
263
275
|
markdown_format: If True, request markdown-formatted code, otherwise plain text
|
276
|
+
stream: Whether to stream the response
|
277
|
+
stream_callback: Optional callback function for streaming mode updates
|
264
278
|
|
265
279
|
Returns:
|
266
280
|
The generated code
|
@@ -299,12 +313,13 @@ Code:"""
|
|
299
313
|
try:
|
300
314
|
return self.chat(
|
301
315
|
prompt=prompt,
|
302
|
-
stream=
|
316
|
+
stream=stream,
|
303
317
|
messages=messages,
|
304
318
|
web_search=web_search,
|
305
319
|
temperature=temperature,
|
306
320
|
top_p=top_p,
|
307
|
-
max_tokens=max_tokens
|
321
|
+
max_tokens=max_tokens,
|
322
|
+
stream_callback=stream_callback
|
308
323
|
)
|
309
324
|
except Exception as e:
|
310
325
|
print(f"Error generating code: {e}")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ngpt
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.7.1
|
4
4
|
Summary: A lightweight Python CLI and library for interacting with OpenAI-compatible APIs, supporting both official and self-hosted LLM endpoints.
|
5
5
|
Project-URL: Homepage, https://github.com/nazdridoy/ngpt
|
6
6
|
Project-URL: Repository, https://github.com/nazdridoy/ngpt
|
@@ -28,10 +28,10 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
28
28
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
29
29
|
Classifier: Topic :: Utilities
|
30
30
|
Requires-Python: >=3.8
|
31
|
-
Requires-Dist: prompt-toolkit>=3.0.0
|
32
31
|
Requires-Dist: requests>=2.31.0
|
33
|
-
Provides-Extra:
|
34
|
-
Requires-Dist:
|
32
|
+
Provides-Extra: full
|
33
|
+
Requires-Dist: prompt-toolkit>=3.0.0; extra == 'full'
|
34
|
+
Requires-Dist: rich>=10.0.0; extra == 'full'
|
35
35
|
Description-Content-Type: text/markdown
|
36
36
|
|
37
37
|
# nGPT
|
@@ -66,6 +66,9 @@ A lightweight Python CLI and library for interacting with OpenAI-compatible APIs
|
|
66
66
|
# Install
|
67
67
|
pip install ngpt
|
68
68
|
|
69
|
+
# Install with additional features
|
70
|
+
pip install "ngpt[full]"
|
71
|
+
|
69
72
|
# Chat with default settings
|
70
73
|
ngpt "Tell me about quantum computing"
|
71
74
|
|
@@ -81,12 +84,18 @@ ngpt --code "function to calculate the Fibonacci sequence"
|
|
81
84
|
# Generate code with syntax highlighting
|
82
85
|
ngpt --code --prettify "function to calculate the Fibonacci sequence"
|
83
86
|
|
87
|
+
# Generate code with real-time syntax highlighting
|
88
|
+
ngpt --code --stream-prettify "function to calculate the Fibonacci sequence"
|
89
|
+
|
84
90
|
# Generate and execute shell commands
|
85
91
|
ngpt --shell "list all files in the current directory"
|
86
92
|
|
87
93
|
# Display markdown responses with beautiful formatting
|
88
94
|
ngpt --prettify "Explain markdown syntax with examples"
|
89
95
|
|
96
|
+
# Display markdown responses with real-time formatting
|
97
|
+
ngpt --stream-prettify "Explain markdown syntax with examples"
|
98
|
+
|
90
99
|
# Use a specific markdown renderer
|
91
100
|
ngpt --prettify --renderer=rich "Create a markdown table"
|
92
101
|
|
@@ -111,6 +120,7 @@ For more examples and detailed usage, visit the [CLI Usage Guide](https://nazdri
|
|
111
120
|
- 📊 **Streaming Responses**: Real-time output for better user experience
|
112
121
|
- 🔍 **Web Search**: Integrated with compatible API endpoints
|
113
122
|
- 🎨 **Markdown Rendering**: Beautiful formatting of markdown and code with syntax highlighting
|
123
|
+
- ⚡ **Real-time Markdown**: Stream responses with live updating syntax highlighting and formatting
|
114
124
|
- ⚙️ **Multiple Configurations**: Cross-platform config system supporting different profiles
|
115
125
|
- 💻 **Shell Command Generation**: OS-aware command execution
|
116
126
|
- 🧩 **Clean Code Generation**: Output code without markdown or explanations
|
@@ -136,11 +146,19 @@ Key documentation sections:
|
|
136
146
|
## Installation
|
137
147
|
|
138
148
|
```bash
|
149
|
+
# Basic installation (minimal dependencies)
|
139
150
|
pip install ngpt
|
151
|
+
|
152
|
+
# Full installation with all features (recommended)
|
153
|
+
pip install "ngpt[full]"
|
140
154
|
```
|
141
155
|
|
142
156
|
Requires Python 3.8 or newer.
|
143
157
|
|
158
|
+
The full installation includes:
|
159
|
+
- Enhanced markdown rendering with syntax highlighting
|
160
|
+
- Improved interactive input experience with multiline editing
|
161
|
+
|
144
162
|
For detailed installation instructions, see the [Installation Guide](https://nazdridoy.github.io/ngpt/installation.html).
|
145
163
|
|
146
164
|
## Usage
|
@@ -280,6 +298,7 @@ You can configure the client using the following options:
|
|
280
298
|
| `--preprompt` | Set custom system prompt to control AI behavior |
|
281
299
|
| `--log` | Set filepath to log conversation to (for interactive modes) |
|
282
300
|
| `--prettify` | Render markdown responses and code with syntax highlighting |
|
301
|
+
| `--stream-prettify` | Enable real-time markdown rendering with syntax highlighting while streaming |
|
283
302
|
| `--renderer` | Select which markdown renderer to use with --prettify (auto, rich, or glow) |
|
284
303
|
| `--list-renderers` | Show available markdown renderers for use with --prettify |
|
285
304
|
| `--config` | Path to a custom configuration file or, when used without a value, enters interactive configuration mode |
|
@@ -0,0 +1,9 @@
|
|
1
|
+
ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
|
2
|
+
ngpt/cli.py,sha256=TX5amxbysh__U-NPKauLspIGNvwG8mQcJJZfzzYcoto,69769
|
3
|
+
ngpt/client.py,sha256=Rv-JO8RAmw1v3gdLkwaPe_PEw6p83cejO0YNT_DDjeg,15134
|
4
|
+
ngpt/config.py,sha256=WYOk_b1eiYjo6hpV3pfXr2RjqhOnmKqwZwKid1T41I4,10363
|
5
|
+
ngpt-2.7.1.dist-info/METADATA,sha256=tCZroHfff-Q5Ft3wg9kLDDzNjgshvWQVXwuzgsAgUrk,15452
|
6
|
+
ngpt-2.7.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
7
|
+
ngpt-2.7.1.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
|
8
|
+
ngpt-2.7.1.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
9
|
+
ngpt-2.7.1.dist-info/RECORD,,
|
ngpt-2.5.1.dist-info/RECORD
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
ngpt/__init__.py,sha256=ehInP9w0MZlS1vZ1g6Cm4YE1ftmgF72CnEddQ3Le9n4,368
|
2
|
-
ngpt/cli.py,sha256=IiBVelrzhrRDu75B5wbf5GlCbBqgQXMh7tJ3Nk_WDsQ,60095
|
3
|
-
ngpt/client.py,sha256=QyPw93oJrMnStOzRqK6AldVqHATH1QgdbJ3vfkFjUsQ,14152
|
4
|
-
ngpt/config.py,sha256=WYOk_b1eiYjo6hpV3pfXr2RjqhOnmKqwZwKid1T41I4,10363
|
5
|
-
ngpt-2.5.1.dist-info/METADATA,sha256=wdJY5g_7LG2hvj5U3I_uY9Zho02lrqfolgj0G2Gjr0A,14657
|
6
|
-
ngpt-2.5.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
7
|
-
ngpt-2.5.1.dist-info/entry_points.txt,sha256=1cnAMujyy34DlOahrJg19lePSnb08bLbkUs_kVerqdk,39
|
8
|
-
ngpt-2.5.1.dist-info/licenses/LICENSE,sha256=mQkpWoADxbHqE0HRefYLJdm7OpdrXBr3vNv5bZ8w72M,1065
|
9
|
-
ngpt-2.5.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|