openai-term 3.0.1 → 3.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/openai +104 -46
- metadata +4 -4
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 1b541d1b3b223edaa022070e38cbf187104deed56fe67f321c3197a3ff7c2992
|
|
4
|
+
data.tar.gz: 5d6f9cf9ee175e59b6b90a790c2ae273a12cad80069ca1662a64b14ba4ecd199
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: e4de85e3718a0c94b6d8173a25424222e4b496960207151953a07557046ef81be336f495235439c25c31c324e3998de5b5e2c384aa0dc805d6ed95055aebdc00
|
|
7
|
+
data.tar.gz: 1e0f248ac0d35859d2a2fee6446358205f454f5b4e37b75da83828803f8ca2f512a230f7f139c285e7f1cf7c4748bcfe4e734e21f55011c7c1f55dc573e173e1
|
data/bin/openai
CHANGED
|
@@ -5,21 +5,23 @@
|
|
|
5
5
|
# A modern TUI for interacting with OpenAI's API
|
|
6
6
|
|
|
7
7
|
require 'optparse'
|
|
8
|
-
require 'openai'
|
|
8
|
+
require 'ruby/openai'
|
|
9
9
|
require 'rcurses'
|
|
10
10
|
require 'json'
|
|
11
11
|
require 'fileutils'
|
|
12
|
+
require 'io/console'
|
|
12
13
|
|
|
13
|
-
include Rcurses
|
|
14
14
|
include Rcurses::Input
|
|
15
|
-
include Rcurses::Cursor
|
|
16
15
|
|
|
17
16
|
# Constants
|
|
18
17
|
CONFIG_FILE = File.join(Dir.home, '.openai.conf')
|
|
19
18
|
HISTORY_FILE = File.join(Dir.home, '.openai_history.json')
|
|
20
19
|
DEFAULT_MODEL = "gpt-4-turbo-preview"
|
|
21
20
|
DEFAULT_MAX_TOKENS = 2048
|
|
22
|
-
VERSION = "3.0.
|
|
21
|
+
VERSION = "3.0.3"
|
|
22
|
+
REQUEST_TIMEOUT = 30
|
|
23
|
+
MAX_RETRIES = 2
|
|
24
|
+
RETRY_DELAY = 2
|
|
23
25
|
|
|
24
26
|
# Global variables
|
|
25
27
|
@model = DEFAULT_MODEL
|
|
@@ -59,7 +61,7 @@ def parse_options
|
|
|
59
61
|
opts.on('-c', '--config FILE', 'Config file path') { |c| options[:config] = c }
|
|
60
62
|
opts.on('-q', '--quiet', 'Skip TUI and output to stdout directly') { options[:quiet] = true }
|
|
61
63
|
opts.on('-h', '--help', 'Display help') { puts opts; exit }
|
|
62
|
-
opts.on('-v', '--version', 'Display version') { puts "OpenAI Terminal
|
|
64
|
+
opts.on('-v', '--version', 'Display version') { puts "OpenAI Terminal #{VERSION}"; exit }
|
|
63
65
|
end
|
|
64
66
|
|
|
65
67
|
optparse.parse!
|
|
@@ -76,15 +78,30 @@ def load_config(config_path = nil)
|
|
|
76
78
|
else
|
|
77
79
|
FileUtils.mkdir_p(File.dirname(config_file))
|
|
78
80
|
File.write(config_file, "@ai = 'your-secret-openai-key'")
|
|
79
|
-
puts "
|
|
80
|
-
puts "
|
|
81
|
-
puts "
|
|
81
|
+
puts "\nOpenAI Terminal - Configuration Required"
|
|
82
|
+
puts "=" * 50
|
|
83
|
+
puts "\nCreated config file: #{config_file}"
|
|
84
|
+
puts "\nPlease edit it and add your OpenAI API key:"
|
|
85
|
+
puts " @ai = \"sk-...\""
|
|
86
|
+
puts "\nGet your API key from:"
|
|
87
|
+
puts " https://platform.openai.com/api-keys"
|
|
88
|
+
|
|
89
|
+
# Only mention RTFM if it exists
|
|
90
|
+
rtfm_config = File.join(Dir.home, '.rtfm/conf')
|
|
91
|
+
if File.exist?(rtfm_config)
|
|
92
|
+
puts "\nTip: If you use RTFM, you can copy the @ai value from:"
|
|
93
|
+
puts " #{rtfm_config}"
|
|
94
|
+
end
|
|
95
|
+
puts "\nThen run 'openai' again to start the terminal interface."
|
|
82
96
|
exit 1
|
|
83
97
|
end
|
|
84
98
|
|
|
85
99
|
unless @api_key && @api_key != 'your-secret-openai-key'
|
|
86
|
-
puts "
|
|
87
|
-
puts "
|
|
100
|
+
puts "\nError: Invalid API key in #{config_file}"
|
|
101
|
+
puts "\nPlease edit the file and add your OpenAI API key:"
|
|
102
|
+
puts " @ai = \"sk-...\""
|
|
103
|
+
puts "\nGet your API key from:"
|
|
104
|
+
puts " https://platform.openai.com/api-keys"
|
|
88
105
|
exit 1
|
|
89
106
|
end
|
|
90
107
|
end
|
|
@@ -125,44 +142,77 @@ end
|
|
|
125
142
|
def init_client
|
|
126
143
|
@client = OpenAI::Client.new(
|
|
127
144
|
access_token: @api_key,
|
|
128
|
-
|
|
145
|
+
request_timeout: REQUEST_TIMEOUT
|
|
129
146
|
)
|
|
130
147
|
end
|
|
131
148
|
|
|
149
|
+
# Retry wrapper for transient API failures
|
|
150
|
+
def with_retry
|
|
151
|
+
retries = 0
|
|
152
|
+
begin
|
|
153
|
+
yield
|
|
154
|
+
rescue Net::OpenTimeout, Net::ReadTimeout, Errno::ETIMEDOUT => e
|
|
155
|
+
if retries < MAX_RETRIES
|
|
156
|
+
retries += 1
|
|
157
|
+
msg = "Timeout, retrying (#{retries}/#{MAX_RETRIES})..."
|
|
158
|
+
@chat_pane ? add_to_chat("system", msg.fg(208)) : $stderr.puts(msg)
|
|
159
|
+
sleep(RETRY_DELAY)
|
|
160
|
+
retry
|
|
161
|
+
end
|
|
162
|
+
raise
|
|
163
|
+
rescue Faraday::Error => e
|
|
164
|
+
status = e.respond_to?(:response_status) ? e.response_status : nil
|
|
165
|
+
if status && [429, 500, 502, 503].include?(status) && retries < MAX_RETRIES
|
|
166
|
+
retries += 1
|
|
167
|
+
delay = status == 429 ? RETRY_DELAY * 2 : RETRY_DELAY
|
|
168
|
+
msg = "HTTP #{status}, retrying (#{retries}/#{MAX_RETRIES})..."
|
|
169
|
+
@chat_pane ? add_to_chat("system", msg.fg(208)) : $stderr.puts(msg)
|
|
170
|
+
sleep(delay)
|
|
171
|
+
retry
|
|
172
|
+
end
|
|
173
|
+
raise
|
|
174
|
+
end
|
|
175
|
+
end
|
|
176
|
+
|
|
132
177
|
# Setup UI
|
|
133
178
|
def setup_ui
|
|
179
|
+
unless $stdin.tty?
|
|
180
|
+
puts "Error: This program requires a TTY terminal"
|
|
181
|
+
exit 1
|
|
182
|
+
end
|
|
183
|
+
|
|
134
184
|
rows, cols = IO.console.winsize
|
|
135
185
|
|
|
136
186
|
Rcurses.clear_screen
|
|
137
|
-
Cursor.hide
|
|
187
|
+
Rcurses::Cursor.hide
|
|
138
188
|
|
|
139
189
|
# Create panes - accounting for borders being drawn outside pane geometry
|
|
140
|
-
@header = Pane.new(1, 1, cols, 1, 255, 24)
|
|
190
|
+
@header = Rcurses::Pane.new(1, 1, cols, 1, 255, 24)
|
|
141
191
|
@header.border = false # Top pane doesn't need border
|
|
142
192
|
|
|
143
|
-
@chat_pane = Pane.new(1, 3, cols, rows - 7, 255, 232)
|
|
193
|
+
@chat_pane = Rcurses::Pane.new(1, 3, cols, rows - 7, 255, 232)
|
|
144
194
|
@chat_pane.border = true
|
|
145
195
|
|
|
146
|
-
@input_pane = Pane.new(1, rows - 2, cols, 1, 255, 234)
|
|
196
|
+
@input_pane = Rcurses::Pane.new(1, rows - 2, cols, 1, 255, 234)
|
|
147
197
|
@input_pane.border = true
|
|
148
198
|
|
|
149
|
-
@status_pane = Pane.new(1, rows, cols, 1, 255, 236)
|
|
199
|
+
@status_pane = Rcurses::Pane.new(1, rows, cols, 1, 255, 236)
|
|
150
200
|
|
|
151
201
|
# Popup panes (created but not displayed initially)
|
|
152
202
|
help_w = cols * 3 / 4
|
|
153
203
|
help_h = rows * 3 / 4
|
|
154
|
-
@help_pane = Pane.new((cols - help_w) / 2 + 1, (rows - help_h) / 2 + 1, help_w, help_h, 255, 234)
|
|
204
|
+
@help_pane = Rcurses::Pane.new((cols - help_w) / 2 + 1, (rows - help_h) / 2 + 1, help_w, help_h, 255, 234)
|
|
155
205
|
@help_pane.border = true
|
|
156
206
|
|
|
157
207
|
model_w = cols / 2
|
|
158
208
|
model_h = rows / 2
|
|
159
|
-
@model_list_pane = Pane.new((cols - model_w) / 2 + 1, (rows - model_h) / 2 + 1, model_w, model_h, 255, 233)
|
|
209
|
+
@model_list_pane = Rcurses::Pane.new((cols - model_w) / 2 + 1, (rows - model_h) / 2 + 1, model_w, model_h, 255, 233)
|
|
160
210
|
@model_list_pane.border = true
|
|
161
211
|
|
|
162
212
|
# Conversation list pane
|
|
163
213
|
conv_w = cols * 3 / 4
|
|
164
214
|
conv_h = rows * 3 / 4
|
|
165
|
-
@conversation_list_pane = Pane.new((cols - conv_w) / 2 + 1, (rows - conv_h) / 2 + 1, conv_w, conv_h, 255, 235)
|
|
215
|
+
@conversation_list_pane = Rcurses::Pane.new((cols - conv_w) / 2 + 1, (rows - conv_h) / 2 + 1, conv_w, conv_h, 255, 235)
|
|
166
216
|
@conversation_list_pane.border = true
|
|
167
217
|
|
|
168
218
|
# Popup state tracking
|
|
@@ -291,13 +341,15 @@ def get_openai_response(message, generate_image = false)
|
|
|
291
341
|
|
|
292
342
|
begin
|
|
293
343
|
if generate_image
|
|
294
|
-
response =
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
344
|
+
response = with_retry do
|
|
345
|
+
@client.images.generate(
|
|
346
|
+
parameters: {
|
|
347
|
+
prompt: message,
|
|
348
|
+
n: 1,
|
|
349
|
+
size: "1024x1024"
|
|
350
|
+
}
|
|
351
|
+
)
|
|
352
|
+
end
|
|
301
353
|
|
|
302
354
|
url = response.dig("data", 0, "url")
|
|
303
355
|
if url
|
|
@@ -316,28 +368,32 @@ def get_openai_response(message, generate_image = false)
|
|
|
316
368
|
end
|
|
317
369
|
end
|
|
318
370
|
|
|
319
|
-
response =
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
371
|
+
response = with_retry do
|
|
372
|
+
@client.chat(
|
|
373
|
+
parameters: {
|
|
374
|
+
model: @model,
|
|
375
|
+
messages: messages,
|
|
376
|
+
max_tokens: @max_tokens,
|
|
377
|
+
temperature: @temperature
|
|
378
|
+
}
|
|
379
|
+
)
|
|
380
|
+
end
|
|
327
381
|
|
|
328
382
|
content = response.dig("choices", 0, "message", "content")
|
|
329
383
|
else
|
|
330
384
|
# Use completion for older models
|
|
331
385
|
prompt = @current_conversation.map { |m| "#{m['role']}: #{m['content']}" }.join("\n") + "\nassistant:"
|
|
332
386
|
|
|
333
|
-
response =
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
387
|
+
response = with_retry do
|
|
388
|
+
@client.completions(
|
|
389
|
+
parameters: {
|
|
390
|
+
model: @model,
|
|
391
|
+
prompt: prompt,
|
|
392
|
+
max_tokens: @max_tokens,
|
|
393
|
+
temperature: @temperature
|
|
394
|
+
}
|
|
395
|
+
)
|
|
396
|
+
end
|
|
341
397
|
|
|
342
398
|
content = response.dig("choices", 0, "text")
|
|
343
399
|
end
|
|
@@ -380,7 +436,7 @@ def show_model_selection
|
|
|
380
436
|
|
|
381
437
|
# Get available models
|
|
382
438
|
begin
|
|
383
|
-
models_response = @client.models.list
|
|
439
|
+
models_response = with_retry { @client.models.list }
|
|
384
440
|
@available_models = models_response["data"]
|
|
385
441
|
.map { |m| m["id"] }
|
|
386
442
|
.select { |id| id.include?("gpt") || id.include?("davinci") || id.include?("curie") }
|
|
@@ -933,9 +989,11 @@ def main
|
|
|
933
989
|
input_loop
|
|
934
990
|
|
|
935
991
|
ensure
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
992
|
+
if @chat_pane # Only cleanup UI if TUI was initialized
|
|
993
|
+
save_history if defined?(@conversation_history)
|
|
994
|
+
Rcurses::Cursor.show if defined?(Rcurses::Cursor)
|
|
995
|
+
Rcurses.clear_screen if defined?(Rcurses)
|
|
996
|
+
end
|
|
939
997
|
end
|
|
940
998
|
|
|
941
999
|
# Run the program
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: openai-term
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 3.0.
|
|
4
|
+
version: 3.0.3
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Geir Isene
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: bin
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date:
|
|
11
|
+
date: 2026-03-21 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: ruby-openai
|
|
@@ -40,8 +40,8 @@ dependencies:
|
|
|
40
40
|
version: '6.0'
|
|
41
41
|
description: 'A modern terminal interface to OpenAI with a full TUI using rcurses.
|
|
42
42
|
Features include interactive chat mode, conversation history, model selection, and
|
|
43
|
-
more. Version 3.0.
|
|
44
|
-
|
|
43
|
+
more. Version 3.0.3: Add configurable request timeout and retry logic for transient
|
|
44
|
+
API failures (timeout, 429, 500/502/503).'
|
|
45
45
|
email: g@isene.com
|
|
46
46
|
executables:
|
|
47
47
|
- openai
|