openai-term 2.0 → 2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/openai +82 -27
- metadata +4 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9cd942dd8d572b2ad6d8825fb3b0871a4a4f9be7297d9af2d01d752f7299455f
|
4
|
+
data.tar.gz: 22b0f14f8c67b9005a2183275be173e78ede4f2a3174b09b734b24ca8e24bb7b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2549e8da498c5f09891b2a19d83d1ee3d5c3809ad5c4f08edc81b29152dd0d802d93965f46ab8234b1c1aec24816988a68323a70bd5efea9f2a9bf0ffb5b1faf
|
7
|
+
data.tar.gz: 9bda6974048e71250f2ceae7ba8df5e2f653319adf4a5a1aab4ec608feb7499cbe8cf9109cc5d8e4dd85531f2a6c6b78e67e337e8c25c7b23cab9b9b1629127a
|
data/bin/openai
CHANGED
@@ -17,9 +17,9 @@ include Rcurses::Cursor
|
|
17
17
|
# Constants
|
18
18
|
CONFIG_FILE = File.join(Dir.home, '.openai.conf')
|
19
19
|
HISTORY_FILE = File.join(Dir.home, '.openai_history.json')
|
20
|
-
DEFAULT_MODEL = "gpt-
|
20
|
+
DEFAULT_MODEL = "gpt-4-turbo-preview"
|
21
21
|
DEFAULT_MAX_TOKENS = 2048
|
22
|
-
VERSION = "2.
|
22
|
+
VERSION = "2.2"
|
23
23
|
|
24
24
|
# Global variables
|
25
25
|
@model = DEFAULT_MODEL
|
@@ -42,6 +42,7 @@ VERSION = "2.0"
|
|
42
42
|
@input_text = ""
|
43
43
|
@chat_scroll = 0
|
44
44
|
@selected_model = 0
|
45
|
+
@in_editline = false
|
45
46
|
|
46
47
|
# Parse command line options
|
47
48
|
def parse_options
|
@@ -58,7 +59,7 @@ def parse_options
|
|
58
59
|
opts.on('-c', '--config FILE', 'Config file path') { |c| options[:config] = c }
|
59
60
|
opts.on('-q', '--quiet', 'Skip TUI and output to stdout directly') { options[:quiet] = true }
|
60
61
|
opts.on('-h', '--help', 'Display help') { puts opts; exit }
|
61
|
-
opts.on('-v', '--version', 'Display version') { puts "OpenAI Terminal 2.
|
62
|
+
opts.on('-v', '--version', 'Display version') { puts "OpenAI Terminal 2.1"; exit }
|
62
63
|
end
|
63
64
|
|
64
65
|
optparse.parse!
|
@@ -176,13 +177,13 @@ def setup_ui
|
|
176
177
|
|
177
178
|
# Ensure status pane is visible at startup and input pane is ready
|
178
179
|
@status_pane.refresh
|
179
|
-
@
|
180
|
-
|
180
|
+
@in_editline = false
|
181
|
+
update_input_prompt
|
181
182
|
end
|
182
183
|
|
183
184
|
# Update header
|
184
185
|
def update_header
|
185
|
-
title = "OpenAI Terminal v2.
|
186
|
+
title = "OpenAI Terminal v2.1".b.fg(226)
|
186
187
|
model_info = "Model: #{@model}".fg(117)
|
187
188
|
tokens_info = "Max Tokens: #{@max_tokens}".fg(117)
|
188
189
|
|
@@ -203,7 +204,8 @@ def update_status
|
|
203
204
|
"C-S:Save",
|
204
205
|
"C-Y:Copy",
|
205
206
|
"C-V:Version",
|
206
|
-
"C-I:Image"
|
207
|
+
"C-I:Image",
|
208
|
+
"PgUp/PgDn:Scroll"
|
207
209
|
].join(" ")
|
208
210
|
when :model_select
|
209
211
|
shortcuts = "↑↓:Navigate Enter:Select ESC:Cancel"
|
@@ -271,7 +273,7 @@ end
|
|
271
273
|
|
272
274
|
# Send message to OpenAI (includes user message display)
|
273
275
|
def send_to_openai(message, generate_image = false)
|
274
|
-
add_to_chat("user", message) unless generate_image
|
276
|
+
add_to_chat("user", message) unless generate_image || !@chat_pane
|
275
277
|
get_openai_response(message, generate_image)
|
276
278
|
end
|
277
279
|
|
@@ -305,8 +307,13 @@ def get_openai_response(message, generate_image = false)
|
|
305
307
|
end
|
306
308
|
elsif @model.include?("gpt")
|
307
309
|
# Prepare messages for chat models
|
308
|
-
messages = @current_conversation.
|
309
|
-
|
310
|
+
messages = if @current_conversation.empty? && !@chat_pane
|
311
|
+
# In quiet mode without conversation history, create a single message
|
312
|
+
[{ role: "user", content: message }]
|
313
|
+
else
|
314
|
+
@current_conversation.map do |msg|
|
315
|
+
{ role: msg["role"] == "user" ? "user" : "assistant", content: msg["content"] }
|
316
|
+
end
|
310
317
|
end
|
311
318
|
|
312
319
|
response = @client.chat(
|
@@ -416,7 +423,7 @@ def show_help_popup
|
|
416
423
|
@help_visible = true
|
417
424
|
|
418
425
|
help_text = <<~HELP
|
419
|
-
#{"OpenAI Terminal Help".b.fg(226)}
|
426
|
+
#{"OpenAI Terminal Help v2.1".b.fg(226)}
|
420
427
|
|
421
428
|
#{"Keyboard Shortcuts:".b.fg(117)}
|
422
429
|
|
@@ -429,6 +436,7 @@ def show_help_popup
|
|
429
436
|
Ctrl-Y - Copy last AI response to clipboard
|
430
437
|
Ctrl-V - Show version information
|
431
438
|
Ctrl-I - Generate image
|
439
|
+
PgUp/PgDn - Scroll chat window up/down
|
432
440
|
Any char - Start typing message
|
433
441
|
|
434
442
|
#{"Features:".b.fg(117)}
|
@@ -459,6 +467,43 @@ def hide_help_popup
|
|
459
467
|
[@header, @chat_pane, @input_pane, @status_pane].each(&:full_refresh)
|
460
468
|
end
|
461
469
|
|
470
|
+
# Update input pane prompt with appropriate styling
|
471
|
+
def update_input_prompt(text = "")
|
472
|
+
if @in_editline
|
473
|
+
# Bright prompt when in editline mode (matches chat window)
|
474
|
+
@input_pane.text = "You: ".b.fg(226) + text
|
475
|
+
else
|
476
|
+
# Dimmed prompt when not in editline mode
|
477
|
+
@input_pane.text = "You: ".fg(240) + text
|
478
|
+
end
|
479
|
+
@input_pane.refresh
|
480
|
+
end
|
481
|
+
|
482
|
+
# Scroll chat pane
|
483
|
+
def scroll_chat_pane(lines)
|
484
|
+
return unless @chat_pane
|
485
|
+
|
486
|
+
# Get current scroll position
|
487
|
+
current_scroll = @chat_pane.ix || 0
|
488
|
+
|
489
|
+
# Calculate new scroll position
|
490
|
+
new_scroll = current_scroll + lines
|
491
|
+
|
492
|
+
# Get total lines and visible lines to calculate scroll limits
|
493
|
+
total_lines = @chat_pane.text ? @chat_pane.text.lines.count : 0
|
494
|
+
visible_lines = @chat_pane.h - 2 # Account for border
|
495
|
+
max_scroll = [total_lines - visible_lines, 0].max
|
496
|
+
|
497
|
+
# Constrain scroll position to valid range
|
498
|
+
new_scroll = [[new_scroll, 0].max, max_scroll].min
|
499
|
+
|
500
|
+
# Apply scroll if it changed
|
501
|
+
if new_scroll != current_scroll
|
502
|
+
@chat_pane.ix = new_scroll
|
503
|
+
@chat_pane.refresh
|
504
|
+
end
|
505
|
+
end
|
506
|
+
|
462
507
|
# Navigate input history
|
463
508
|
def navigate_input_history(direction, input_history, current_index)
|
464
509
|
return if input_history.empty?
|
@@ -475,13 +520,13 @@ def navigate_input_history(direction, input_history, current_index)
|
|
475
520
|
|
476
521
|
if new_index < input_history.size
|
477
522
|
# Show historical message
|
478
|
-
@
|
523
|
+
@in_editline = false
|
524
|
+
update_input_prompt(input_history[new_index])
|
479
525
|
else
|
480
526
|
# Beyond history - empty input
|
481
|
-
@
|
527
|
+
@in_editline = false
|
528
|
+
update_input_prompt
|
482
529
|
end
|
483
|
-
|
484
|
-
@input_pane.refresh
|
485
530
|
end
|
486
531
|
|
487
532
|
# Main input loop
|
@@ -493,7 +538,6 @@ def input_loop
|
|
493
538
|
loop do
|
494
539
|
key = getchr
|
495
540
|
|
496
|
-
|
497
541
|
# Handle popup input first
|
498
542
|
if @help_visible
|
499
543
|
case key
|
@@ -610,11 +654,19 @@ def input_loop
|
|
610
654
|
when "DOWN"
|
611
655
|
navigate_input_history(1, input_history, history_index)
|
612
656
|
history_index = @current_history_index
|
657
|
+
when "PgUP"
|
658
|
+
# Scroll chat pane up
|
659
|
+
scroll_chat_pane(-10)
|
660
|
+
when "PgDOWN"
|
661
|
+
# Scroll chat pane down
|
662
|
+
scroll_chat_pane(10)
|
613
663
|
when "C-I"
|
614
664
|
# Image generation
|
615
|
-
@
|
665
|
+
@in_editline = true
|
666
|
+
@input_pane.prompt = "Image: ".b.fg(226)
|
616
667
|
@input_pane.text = ""
|
617
668
|
@input_pane.editline
|
669
|
+
@in_editline = false
|
618
670
|
|
619
671
|
# Only generate image if user didn't cancel (ESC)
|
620
672
|
final_text = @input_pane.text&.strip || ""
|
@@ -627,16 +679,18 @@ def input_loop
|
|
627
679
|
end
|
628
680
|
# Reset input pane completely
|
629
681
|
@input_pane.clear
|
630
|
-
@
|
631
|
-
|
682
|
+
@in_editline = false
|
683
|
+
update_input_prompt
|
632
684
|
else
|
633
685
|
# Any printable character -> Enter input pane editline
|
634
686
|
if key && key.length == 1 && key.match?(/[[:print:]]/)
|
635
687
|
# Set up for editline
|
636
|
-
@
|
688
|
+
@in_editline = true
|
689
|
+
@input_pane.prompt = "You: ".b.fg(226)
|
637
690
|
@input_pane.text = key
|
638
691
|
initial_text = key
|
639
692
|
@input_pane.editline
|
693
|
+
@in_editline = false
|
640
694
|
|
641
695
|
# After editline returns, check what happened
|
642
696
|
final_text = @input_pane.text&.strip || ""
|
@@ -661,10 +715,9 @@ def input_loop
|
|
661
715
|
end
|
662
716
|
|
663
717
|
# Always reset input pane completely (clears any remaining text)
|
664
|
-
@input_pane.text = "You: "
|
665
718
|
@input_pane.clear
|
666
|
-
@
|
667
|
-
|
719
|
+
@in_editline = false
|
720
|
+
update_input_prompt
|
668
721
|
end
|
669
722
|
end
|
670
723
|
end
|
@@ -703,7 +756,7 @@ end
|
|
703
756
|
def update_conversation_list
|
704
757
|
content = "Load Conversation (↑↓ to navigate, Enter to load, Esc to cancel):".b.fg(226) + "\n\n"
|
705
758
|
|
706
|
-
@conversation_history.each_with_index do |conv, i|
|
759
|
+
@conversation_history.reverse.each_with_index do |conv, i|
|
707
760
|
timestamp = conv["timestamp"]
|
708
761
|
model = conv["model"]
|
709
762
|
message_count = conv["messages"].size
|
@@ -729,7 +782,9 @@ end
|
|
729
782
|
def load_selected_conversation
|
730
783
|
return if @selected_conversation >= @conversation_history.size
|
731
784
|
|
732
|
-
|
785
|
+
# Account for reverse order in display
|
786
|
+
actual_index = @conversation_history.size - 1 - @selected_conversation
|
787
|
+
selected_conv = @conversation_history[actual_index]
|
733
788
|
@current_conversation = selected_conv["messages"].dup
|
734
789
|
|
735
790
|
# Update chat display
|
@@ -868,8 +923,8 @@ def main
|
|
868
923
|
return if result == :exit
|
869
924
|
|
870
925
|
# Initialize input pane with prompt before starting loop
|
871
|
-
@
|
872
|
-
|
926
|
+
@in_editline = false
|
927
|
+
update_input_prompt
|
873
928
|
|
874
929
|
# Main loop
|
875
930
|
input_loop
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: openai-term
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: '2.
|
4
|
+
version: '2.2'
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Geir Isene
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2025-
|
11
|
+
date: 2025-08-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -40,7 +40,8 @@ dependencies:
|
|
40
40
|
version: '3.5'
|
41
41
|
description: 'A modern terminal interface to OpenAI with a full TUI using rcurses.
|
42
42
|
Features include interactive chat mode, conversation history, model selection, and
|
43
|
-
more. Version 2.
|
43
|
+
more. Version 2.2: Updated default model to GPT-4 Turbo, fixed quiet mode for CLI
|
44
|
+
usage, prepared for GPT-5 compatibility.'
|
44
45
|
email: g@isene.com
|
45
46
|
executables:
|
46
47
|
- openai
|