ai-chat 0.3.2 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +252 -194
- data/ai-chat.gemspec +4 -4
- data/lib/ai/amazing_print.rb +25 -26
- data/lib/ai/chat.rb +127 -169
- data/lib/ai/http.rb +1 -1
- data/lib/ai/items.rb +54 -0
- data/lib/ai/message.rb +23 -0
- data/lib/ai-chat.rb +11 -0
- metadata +6 -2
data/ai-chat.gemspec
CHANGED
|
@@ -2,9 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
Gem::Specification.new do |spec|
|
|
4
4
|
spec.name = "ai-chat"
|
|
5
|
-
spec.version = "0.
|
|
6
|
-
spec.authors = ["Raghu Betina"]
|
|
7
|
-
spec.email = ["raghu@firstdraft.com"]
|
|
5
|
+
spec.version = "0.5.0"
|
|
6
|
+
spec.authors = ["Raghu Betina", "Jelani Woods"]
|
|
7
|
+
spec.email = ["raghu@firstdraft.com", "jelani@firstdraft.com"]
|
|
8
8
|
spec.homepage = "https://github.com/firstdraft/ai-chat"
|
|
9
9
|
spec.summary = "A beginner-friendly Ruby interface for OpenAI's API"
|
|
10
10
|
spec.license = "MIT"
|
|
@@ -21,7 +21,7 @@ Gem::Specification.new do |spec|
|
|
|
21
21
|
spec.required_ruby_version = "~> 3.2"
|
|
22
22
|
spec.add_runtime_dependency "openai", "~> 0.34"
|
|
23
23
|
spec.add_runtime_dependency "marcel", "~> 1.0"
|
|
24
|
-
spec.add_runtime_dependency "base64",
|
|
24
|
+
spec.add_runtime_dependency "base64", "~> 0.1", "> 0.1.1"
|
|
25
25
|
spec.add_runtime_dependency "json", "~> 2.0"
|
|
26
26
|
spec.add_runtime_dependency "ostruct", "~> 0.2"
|
|
27
27
|
spec.add_runtime_dependency "tty-spinner", "~> 0.9.3"
|
data/lib/ai/amazing_print.rb
CHANGED
|
@@ -1,4 +1,22 @@
|
|
|
1
1
|
require "amazing_print"
|
|
2
|
+
|
|
3
|
+
# Fix AmazingPrint's colorless method to strip HTML tags in addition to ANSI codes.
|
|
4
|
+
# Without this, alignment is broken when html: true because colorless_size
|
|
5
|
+
# doesn't account for <kbd> tag lengths.
|
|
6
|
+
# TODO: Remove if https://github.com/amazing-print/amazing_print/pull/146 is merged.
|
|
7
|
+
module AmazingPrint
|
|
8
|
+
module Formatters
|
|
9
|
+
class BaseFormatter
|
|
10
|
+
alias_method :original_colorless, :colorless
|
|
11
|
+
|
|
12
|
+
def colorless(string)
|
|
13
|
+
result = original_colorless(string)
|
|
14
|
+
result.gsub(/<kbd[^>]*>|<\/kbd>/, "")
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
2
20
|
# :reek:IrresponsibleModule
|
|
3
21
|
module AmazingPrint
|
|
4
22
|
module AI
|
|
@@ -27,33 +45,11 @@ module AmazingPrint
|
|
|
27
45
|
end
|
|
28
46
|
end
|
|
29
47
|
|
|
30
|
-
# :reek:DuplicateMethodCall
|
|
31
48
|
# :reek:FeatureEnvy
|
|
32
|
-
# :reek:NilCheck
|
|
33
|
-
# :reek:TooManyStatements
|
|
34
49
|
def format_ai_chat(chat)
|
|
35
|
-
vars =
|
|
36
|
-
|
|
37
|
-
# Format messages with truncation
|
|
38
|
-
if chat.instance_variable_defined?(:@messages)
|
|
39
|
-
messages = chat.instance_variable_get(:@messages).map do |msg|
|
|
40
|
-
truncated_msg = msg.dup
|
|
41
|
-
if msg[:content].is_a?(String) && msg[:content].length > 80
|
|
42
|
-
truncated_msg[:content] = msg[:content][0..77] + "..."
|
|
43
|
-
end
|
|
44
|
-
truncated_msg
|
|
45
|
-
end
|
|
46
|
-
vars << ["@messages", messages]
|
|
50
|
+
vars = chat.inspectable_attributes.map do |(name, value)|
|
|
51
|
+
[name.to_s, value]
|
|
47
52
|
end
|
|
48
|
-
|
|
49
|
-
# Add other variables (except sensitive ones)
|
|
50
|
-
skip_vars = [:@api_key, :@client, :@messages]
|
|
51
|
-
chat.instance_variables.sort.each do |var|
|
|
52
|
-
next if skip_vars.include?(var)
|
|
53
|
-
value = chat.instance_variable_get(var)
|
|
54
|
-
vars << [var.to_s, value] unless value.nil?
|
|
55
|
-
end
|
|
56
|
-
|
|
57
53
|
format_object(chat, vars)
|
|
58
54
|
end
|
|
59
55
|
|
|
@@ -65,10 +61,13 @@ module AmazingPrint
|
|
|
65
61
|
"#{name}: #{inspector.awesome(value)}"
|
|
66
62
|
end
|
|
67
63
|
|
|
64
|
+
lt = @options[:html] ? "<" : "<"
|
|
65
|
+
gt = @options[:html] ? ">" : ">"
|
|
66
|
+
|
|
68
67
|
if @options[:multiline]
|
|
69
|
-
"
|
|
68
|
+
"##{lt}#{object.class}\n#{data.map { |line| " #{line}" }.join("\n")}\n#{gt}"
|
|
70
69
|
else
|
|
71
|
-
"
|
|
70
|
+
"##{lt}#{object.class} #{data.join(", ")}#{gt}"
|
|
72
71
|
end
|
|
73
72
|
end
|
|
74
73
|
end
|
data/lib/ai/chat.rb
CHANGED
|
@@ -12,7 +12,6 @@ require "tty-spinner"
|
|
|
12
12
|
require "timeout"
|
|
13
13
|
|
|
14
14
|
require_relative "http"
|
|
15
|
-
include AI::Http
|
|
16
15
|
|
|
17
16
|
module AI
|
|
18
17
|
# :reek:MissingSafeMethod { exclude: [ generate! ] }
|
|
@@ -21,20 +20,21 @@ module AI
|
|
|
21
20
|
# :reek:InstanceVariableAssumption
|
|
22
21
|
# :reek:IrresponsibleModule
|
|
23
22
|
class Chat
|
|
23
|
+
include AI::Http
|
|
24
|
+
|
|
24
25
|
# :reek:Attribute
|
|
25
|
-
attr_accessor :background, :code_interpreter, :conversation_id, :image_generation, :image_folder, :messages, :model, :proxy, :
|
|
26
|
-
attr_reader :
|
|
26
|
+
attr_accessor :background, :code_interpreter, :conversation_id, :image_generation, :image_folder, :messages, :model, :proxy, :reasoning_effort, :web_search
|
|
27
|
+
attr_reader :client, :last_response_id, :schema, :schema_file
|
|
27
28
|
|
|
28
|
-
|
|
29
|
-
PROXY_URL = "https://prepend.me/".freeze
|
|
29
|
+
PROXY_URL = "https://prepend.me/"
|
|
30
30
|
|
|
31
31
|
def initialize(api_key: nil, api_key_env_var: "OPENAI_API_KEY")
|
|
32
32
|
@api_key = api_key || ENV.fetch(api_key_env_var)
|
|
33
33
|
@messages = []
|
|
34
34
|
@reasoning_effort = nil
|
|
35
|
-
@model = "gpt-
|
|
35
|
+
@model = "gpt-5.2"
|
|
36
36
|
@client = OpenAI::Client.new(api_key: @api_key)
|
|
37
|
-
@
|
|
37
|
+
@last_response_id = nil
|
|
38
38
|
@proxy = false
|
|
39
39
|
@image_generation = false
|
|
40
40
|
@image_folder = "./images"
|
|
@@ -43,15 +43,15 @@ module AI
|
|
|
43
43
|
def self.generate_schema!(description, location: "schema.json", api_key: nil, api_key_env_var: "OPENAI_API_KEY", proxy: false)
|
|
44
44
|
api_key ||= ENV.fetch(api_key_env_var)
|
|
45
45
|
prompt_path = File.expand_path("../prompts/schema_generator.md", __dir__)
|
|
46
|
-
system_prompt = File.
|
|
46
|
+
system_prompt = File.read(prompt_path)
|
|
47
47
|
|
|
48
48
|
json = if proxy
|
|
49
49
|
uri = URI(PROXY_URL + "api.openai.com/v1/responses")
|
|
50
50
|
parameters = {
|
|
51
|
-
model: "gpt-5.
|
|
51
|
+
model: "gpt-5.2",
|
|
52
52
|
input: [
|
|
53
53
|
{role: :system, content: system_prompt},
|
|
54
|
-
{role: :user, content: description}
|
|
54
|
+
{role: :user, content: description}
|
|
55
55
|
],
|
|
56
56
|
text: {format: {type: "json_object"}},
|
|
57
57
|
reasoning: {effort: "high"}
|
|
@@ -61,7 +61,7 @@ module AI
|
|
|
61
61
|
else
|
|
62
62
|
client = OpenAI::Client.new(api_key: api_key)
|
|
63
63
|
response = client.responses.create(
|
|
64
|
-
model: "gpt-5.
|
|
64
|
+
model: "gpt-5.2",
|
|
65
65
|
input: [
|
|
66
66
|
{role: :system, content: system_prompt},
|
|
67
67
|
{role: :user, content: description}
|
|
@@ -77,9 +77,7 @@ module AI
|
|
|
77
77
|
if location
|
|
78
78
|
path = Pathname.new(location)
|
|
79
79
|
FileUtils.mkdir_p(path.dirname) if path.dirname != "."
|
|
80
|
-
File.
|
|
81
|
-
file.write(content)
|
|
82
|
-
end
|
|
80
|
+
File.binwrite(location, content)
|
|
83
81
|
end
|
|
84
82
|
content
|
|
85
83
|
end
|
|
@@ -87,15 +85,12 @@ module AI
|
|
|
87
85
|
# :reek:TooManyStatements
|
|
88
86
|
# :reek:NilCheck
|
|
89
87
|
def add(content, role: "user", response: nil, status: nil, image: nil, images: nil, file: nil, files: nil)
|
|
90
|
-
if image.nil? && images.nil? && file.nil? && files.nil?
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
message[:content] = content if content
|
|
97
|
-
message[:status] = status if status
|
|
98
|
-
messages.push(message)
|
|
88
|
+
message = if image.nil? && images.nil? && file.nil? && files.nil?
|
|
89
|
+
msg = Message[role: role]
|
|
90
|
+
msg[:content] = content if content
|
|
91
|
+
msg[:response] = response if response
|
|
92
|
+
msg[:status] = status if status
|
|
93
|
+
msg
|
|
99
94
|
else
|
|
100
95
|
text_and_files_array = [
|
|
101
96
|
{
|
|
@@ -125,14 +120,15 @@ module AI
|
|
|
125
120
|
text_and_files_array.push(process_file_input(file))
|
|
126
121
|
end
|
|
127
122
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
}
|
|
134
|
-
)
|
|
123
|
+
Message[
|
|
124
|
+
role: role,
|
|
125
|
+
content: text_and_files_array,
|
|
126
|
+
status: status
|
|
127
|
+
]
|
|
135
128
|
end
|
|
129
|
+
|
|
130
|
+
messages.push(message)
|
|
131
|
+
message
|
|
136
132
|
end
|
|
137
133
|
|
|
138
134
|
def system(message)
|
|
@@ -154,7 +150,7 @@ module AI
|
|
|
154
150
|
response = create_response
|
|
155
151
|
parse_response(response)
|
|
156
152
|
|
|
157
|
-
|
|
153
|
+
@last_response_id = last.dig(:response, :id)
|
|
158
154
|
last
|
|
159
155
|
end
|
|
160
156
|
|
|
@@ -166,29 +162,11 @@ module AI
|
|
|
166
162
|
response = if wait
|
|
167
163
|
wait_for_response(timeout)
|
|
168
164
|
else
|
|
169
|
-
retrieve_response(
|
|
165
|
+
retrieve_response(last_response_id)
|
|
170
166
|
end
|
|
171
167
|
parse_response(response)
|
|
172
168
|
end
|
|
173
169
|
|
|
174
|
-
# :reek:NilCheck
|
|
175
|
-
# :reek:TooManyStatements
|
|
176
|
-
def reasoning_effort=(value)
|
|
177
|
-
if value.nil?
|
|
178
|
-
@reasoning_effort = nil
|
|
179
|
-
return
|
|
180
|
-
end
|
|
181
|
-
|
|
182
|
-
normalized_value = value.to_sym
|
|
183
|
-
|
|
184
|
-
if VALID_REASONING_EFFORTS.include?(normalized_value)
|
|
185
|
-
@reasoning_effort = normalized_value
|
|
186
|
-
else
|
|
187
|
-
valid_values = VALID_REASONING_EFFORTS.map { |valid_value| ":#{valid_value} or \"#{valid_value}\"" }.join(", ")
|
|
188
|
-
raise ArgumentError, "Invalid reasoning_effort value: '#{value}'. Must be one of: #{valid_values}"
|
|
189
|
-
end
|
|
190
|
-
end
|
|
191
|
-
|
|
192
170
|
def schema=(value)
|
|
193
171
|
if value.is_a?(String)
|
|
194
172
|
parsed = JSON.parse(value, symbolize_names: true)
|
|
@@ -201,7 +179,7 @@ module AI
|
|
|
201
179
|
end
|
|
202
180
|
|
|
203
181
|
def schema_file=(path)
|
|
204
|
-
content = File.
|
|
182
|
+
content = File.read(path)
|
|
205
183
|
@schema_file = path
|
|
206
184
|
self.schema = content
|
|
207
185
|
end
|
|
@@ -210,16 +188,16 @@ module AI
|
|
|
210
188
|
messages.last
|
|
211
189
|
end
|
|
212
190
|
|
|
213
|
-
def
|
|
191
|
+
def get_items(order: :asc)
|
|
214
192
|
raise "No conversation_id set. Call generate! first to create a conversation." unless conversation_id
|
|
215
193
|
|
|
216
|
-
if proxy
|
|
217
|
-
uri = URI(PROXY_URL + "api.openai.com/v1/conversations/#{conversation_id}/items?order=#{order
|
|
194
|
+
raw_items = if proxy
|
|
195
|
+
uri = URI(PROXY_URL + "api.openai.com/v1/conversations/#{conversation_id}/items?order=#{order}")
|
|
218
196
|
response_hash = send_request(uri, content_type: "json", method: "get")
|
|
219
197
|
|
|
220
198
|
if response_hash.key?(:data)
|
|
221
199
|
response_hash.dig(:data).map do |hash|
|
|
222
|
-
# Transform values to allow expected symbols that non-proxied request returns
|
|
200
|
+
# Transform values to allow expected symbols that non-proxied request returns
|
|
223
201
|
|
|
224
202
|
hash.transform_values! do |value|
|
|
225
203
|
if hash.key(value) == :type
|
|
@@ -236,67 +214,56 @@ module AI
|
|
|
236
214
|
else
|
|
237
215
|
client.conversations.items.list(conversation_id, order: order)
|
|
238
216
|
end
|
|
217
|
+
|
|
218
|
+
Items.new(raw_items, conversation_id: conversation_id)
|
|
239
219
|
end
|
|
240
220
|
|
|
241
|
-
def
|
|
242
|
-
|
|
221
|
+
def inspectable_attributes
|
|
222
|
+
attrs = []
|
|
223
|
+
|
|
224
|
+
# 1. Model and reasoning (configuration)
|
|
225
|
+
attrs << [:@model, @model]
|
|
226
|
+
attrs << [:@reasoning_effort, @reasoning_effort]
|
|
227
|
+
|
|
228
|
+
# 2. Conversation state
|
|
229
|
+
attrs << [:@conversation_id, @conversation_id]
|
|
230
|
+
attrs << [:@last_response_id, @last_response_id] if @last_response_id
|
|
231
|
+
|
|
232
|
+
# 3. Messages (the main content, without response details)
|
|
233
|
+
display_messages = @messages.map { |msg| msg.except(:response) }
|
|
234
|
+
attrs << [:@messages, display_messages]
|
|
243
235
|
|
|
244
|
-
|
|
245
|
-
|
|
236
|
+
# 4. Optional features (only if enabled/changed from default)
|
|
237
|
+
attrs << [:@proxy, @proxy] if @proxy != false
|
|
238
|
+
attrs << [:@image_generation, @image_generation] if @image_generation != false
|
|
239
|
+
attrs << [:@image_folder, @image_folder] if @image_folder != "./images"
|
|
246
240
|
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
241
|
+
# 5. Optional state (only if set)
|
|
242
|
+
attrs << [:@background, @background] if @background
|
|
243
|
+
attrs << [:@code_interpreter, @code_interpreter] if @code_interpreter
|
|
244
|
+
attrs << [:@web_search, @web_search] if @web_search
|
|
245
|
+
attrs << [:@schema, @schema] if @schema
|
|
246
|
+
attrs << [:@schema_file, @schema_file] if @schema_file
|
|
253
247
|
|
|
254
|
-
|
|
248
|
+
attrs
|
|
255
249
|
end
|
|
256
250
|
|
|
257
251
|
def inspect
|
|
258
|
-
|
|
252
|
+
ai(plain: !$stdout.tty?, multiline: true)
|
|
259
253
|
end
|
|
260
254
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
def pretty_print(q)
|
|
268
|
-
q.group(1, "#<#{self.class}", ">") do
|
|
269
|
-
q.breakable
|
|
270
|
-
|
|
271
|
-
# Show messages with truncation
|
|
272
|
-
q.text "@messages="
|
|
273
|
-
truncated_messages = @messages.map do |msg|
|
|
274
|
-
truncated_msg = msg.dup
|
|
275
|
-
if msg[:content].is_a?(String) && msg[:content].length > 80
|
|
276
|
-
truncated_msg[:content] = msg[:content][0..77] + "..."
|
|
277
|
-
end
|
|
278
|
-
truncated_msg
|
|
279
|
-
end
|
|
280
|
-
q.pp truncated_messages
|
|
281
|
-
|
|
282
|
-
# Show other instance variables (except sensitive ones)
|
|
283
|
-
skip_vars = [:@messages, :@api_key, :@client]
|
|
284
|
-
instance_variables.sort.each do |var|
|
|
285
|
-
next if skip_vars.include?(var)
|
|
286
|
-
value = instance_variable_get(var)
|
|
287
|
-
unless value.nil?
|
|
288
|
-
q.text ","
|
|
289
|
-
q.breakable
|
|
290
|
-
q.text "#{var}="
|
|
291
|
-
q.pp value
|
|
292
|
-
end
|
|
293
|
-
end
|
|
294
|
-
end
|
|
255
|
+
def to_html
|
|
256
|
+
AI.wrap_html(ai(html: true, multiline: true))
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def pretty_inspect
|
|
260
|
+
"#{inspect}\n"
|
|
295
261
|
end
|
|
296
262
|
|
|
297
263
|
private
|
|
298
264
|
|
|
299
265
|
class InputClassificationError < StandardError; end
|
|
266
|
+
|
|
300
267
|
class WrongAPITokenUsedError < StandardError; end
|
|
301
268
|
|
|
302
269
|
# :reek:FeatureEnvy
|
|
@@ -332,18 +299,10 @@ module AI
|
|
|
332
299
|
parameters[:background] = background if background
|
|
333
300
|
parameters[:tools] = tools unless tools.empty?
|
|
334
301
|
parameters[:text] = schema if schema
|
|
335
|
-
parameters[:reasoning] = {effort: reasoning_effort} if reasoning_effort
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
parameters[:previous_response_id] = previous_response_id
|
|
340
|
-
elsif previous_response_id
|
|
341
|
-
parameters[:previous_response_id] = previous_response_id
|
|
342
|
-
elsif conversation_id
|
|
343
|
-
parameters[:conversation] = conversation_id
|
|
344
|
-
else
|
|
345
|
-
create_conversation
|
|
346
|
-
end
|
|
302
|
+
parameters[:reasoning] = {effort: reasoning_effort, summary: "auto"} if reasoning_effort
|
|
303
|
+
|
|
304
|
+
create_conversation unless conversation_id
|
|
305
|
+
parameters[:conversation] = conversation_id
|
|
347
306
|
|
|
348
307
|
messages_to_send = prepare_messages_for_api
|
|
349
308
|
parameters[:input] = strip_responses(messages_to_send) unless messages_to_send.empty?
|
|
@@ -381,7 +340,7 @@ module AI
|
|
|
381
340
|
if response.key?(:conversation)
|
|
382
341
|
self.conversation_id = response.dig(:conversation, :id)
|
|
383
342
|
end
|
|
384
|
-
else
|
|
343
|
+
else
|
|
385
344
|
text_response = response.output_text
|
|
386
345
|
response_id = response.id
|
|
387
346
|
response_status = response.status
|
|
@@ -415,12 +374,12 @@ module AI
|
|
|
415
374
|
message.dig(:response, :id) == response_id
|
|
416
375
|
end
|
|
417
376
|
|
|
418
|
-
message =
|
|
377
|
+
message = Message[
|
|
419
378
|
role: "assistant",
|
|
420
379
|
content: response_content,
|
|
421
380
|
response: chat_response,
|
|
422
381
|
status: response_status
|
|
423
|
-
|
|
382
|
+
]
|
|
424
383
|
|
|
425
384
|
message.store(:images, image_filenames) unless image_filenames.empty?
|
|
426
385
|
|
|
@@ -428,21 +387,22 @@ module AI
|
|
|
428
387
|
messages[existing_message_position] = message
|
|
429
388
|
else
|
|
430
389
|
messages.push(message)
|
|
431
|
-
message
|
|
432
390
|
end
|
|
391
|
+
|
|
392
|
+
message
|
|
433
393
|
end
|
|
434
394
|
|
|
435
395
|
def cancel_request
|
|
436
|
-
client.responses.cancel(
|
|
396
|
+
client.responses.cancel(last_response_id)
|
|
437
397
|
end
|
|
438
398
|
|
|
439
399
|
def prepare_messages_for_api
|
|
440
|
-
return messages unless
|
|
400
|
+
return messages unless last_response_id
|
|
441
401
|
|
|
442
|
-
|
|
402
|
+
last_response_index = messages.find_index { |message| message.dig(:response, :id) == last_response_id }
|
|
443
403
|
|
|
444
|
-
if
|
|
445
|
-
messages[(
|
|
404
|
+
if last_response_index
|
|
405
|
+
messages[(last_response_index + 1)..] || []
|
|
446
406
|
else
|
|
447
407
|
messages
|
|
448
408
|
end
|
|
@@ -578,7 +538,7 @@ module AI
|
|
|
578
538
|
def tools
|
|
579
539
|
tools_list = []
|
|
580
540
|
if web_search
|
|
581
|
-
tools_list << {type: "
|
|
541
|
+
tools_list << {type: "web_search"}
|
|
582
542
|
end
|
|
583
543
|
if image_generation
|
|
584
544
|
tools_list << {type: "image_generation"}
|
|
@@ -619,12 +579,12 @@ module AI
|
|
|
619
579
|
def extract_and_save_images(response)
|
|
620
580
|
image_filenames = []
|
|
621
581
|
|
|
622
|
-
if proxy
|
|
623
|
-
|
|
582
|
+
image_outputs = if proxy
|
|
583
|
+
response.dig(:output).select { |output|
|
|
624
584
|
output.dig(:type) == "image_generation_call"
|
|
625
585
|
}
|
|
626
|
-
else
|
|
627
|
-
|
|
586
|
+
else
|
|
587
|
+
response.output.select { |output|
|
|
628
588
|
output.respond_to?(:type) && output.type == :image_generation_call
|
|
629
589
|
}
|
|
630
590
|
end
|
|
@@ -708,7 +668,7 @@ module AI
|
|
|
708
668
|
message_outputs = response.dig(:output).select do |output|
|
|
709
669
|
output.dig(:type) == "message"
|
|
710
670
|
end
|
|
711
|
-
|
|
671
|
+
|
|
712
672
|
outputs_with_annotations = message_outputs.map do |message|
|
|
713
673
|
message.dig(:content).find do |content|
|
|
714
674
|
content.dig(:annotations).length.positive?
|
|
@@ -718,7 +678,7 @@ module AI
|
|
|
718
678
|
message_outputs = response.output.select do |output|
|
|
719
679
|
output.respond_to?(:type) && output.type == :message
|
|
720
680
|
end
|
|
721
|
-
|
|
681
|
+
|
|
722
682
|
outputs_with_annotations = message_outputs.map do |message|
|
|
723
683
|
message.content.find do |content|
|
|
724
684
|
content.respond_to?(:annotations) && content.annotations.length.positive?
|
|
@@ -737,12 +697,12 @@ module AI
|
|
|
737
697
|
annotation.key?(:filename)
|
|
738
698
|
end
|
|
739
699
|
end.compact
|
|
740
|
-
|
|
700
|
+
|
|
741
701
|
annotations.each do |annotation|
|
|
742
702
|
container_id = annotation.dig(:container_id)
|
|
743
703
|
file_id = annotation.dig(:file_id)
|
|
744
704
|
filename = annotation.dig(:filename)
|
|
745
|
-
|
|
705
|
+
|
|
746
706
|
warn_if_file_fails_to_save do
|
|
747
707
|
file_content = retrieve_file(file_id, container_id: container_id)
|
|
748
708
|
file_path = File.join(subfolder_path, filename)
|
|
@@ -756,18 +716,16 @@ module AI
|
|
|
756
716
|
annotation.respond_to?(:filename)
|
|
757
717
|
end
|
|
758
718
|
end.compact
|
|
759
|
-
|
|
719
|
+
|
|
760
720
|
annotations.each do |annotation|
|
|
761
721
|
container_id = annotation.container_id
|
|
762
722
|
file_id = annotation.file_id
|
|
763
723
|
filename = annotation.filename
|
|
764
|
-
|
|
724
|
+
|
|
765
725
|
warn_if_file_fails_to_save do
|
|
766
726
|
file_content = retrieve_file(file_id, container_id: container_id)
|
|
767
727
|
file_path = File.join(subfolder_path, filename)
|
|
768
|
-
File.
|
|
769
|
-
file.write(file_content.read)
|
|
770
|
-
end
|
|
728
|
+
File.binwrite(file_path, file_content.read)
|
|
771
729
|
filenames << file_path
|
|
772
730
|
end
|
|
773
731
|
end
|
|
@@ -790,53 +748,53 @@ module AI
|
|
|
790
748
|
yield
|
|
791
749
|
end
|
|
792
750
|
rescue Timeout::Error
|
|
793
|
-
client.responses.cancel(
|
|
751
|
+
client.responses.cancel(last_response_id)
|
|
794
752
|
end
|
|
795
753
|
|
|
796
754
|
# :reek:DuplicateMethodCall
|
|
797
755
|
# :reek:TooManyStatements
|
|
798
756
|
def wait_for_response(timeout)
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
757
|
+
spinner = TTY::Spinner.new("[:spinner] Thinking ...", format: :dots)
|
|
758
|
+
spinner.auto_spin
|
|
759
|
+
api_response = retrieve_response(last_response_id)
|
|
760
|
+
number_of_times_polled = 0
|
|
761
|
+
response = timeout_request(timeout) do
|
|
762
|
+
status = if api_response.respond_to?(:status)
|
|
763
|
+
api_response.status
|
|
764
|
+
else
|
|
765
|
+
api_response.dig(:status)&.to_sym
|
|
766
|
+
end
|
|
767
|
+
|
|
768
|
+
while status != :completed
|
|
769
|
+
some_amount_of_seconds = calculate_wait(number_of_times_polled)
|
|
770
|
+
sleep some_amount_of_seconds
|
|
771
|
+
number_of_times_polled += 1
|
|
772
|
+
api_response = retrieve_response(last_response_id)
|
|
804
773
|
status = if api_response.respond_to?(:status)
|
|
805
774
|
api_response.status
|
|
806
|
-
else
|
|
775
|
+
else
|
|
807
776
|
api_response.dig(:status)&.to_sym
|
|
808
777
|
end
|
|
809
|
-
|
|
810
|
-
while status != :completed
|
|
811
|
-
some_amount_of_seconds = calculate_wait(number_of_times_polled)
|
|
812
|
-
sleep some_amount_of_seconds
|
|
813
|
-
number_of_times_polled += 1
|
|
814
|
-
api_response = retrieve_response(previous_response_id)
|
|
815
|
-
status = if api_response.respond_to?(:status)
|
|
816
|
-
api_response.status
|
|
817
|
-
else
|
|
818
|
-
api_response.dig(:status)&.to_sym
|
|
819
|
-
end
|
|
820
|
-
end
|
|
821
|
-
api_response
|
|
822
|
-
end
|
|
823
|
-
|
|
824
|
-
status = if api_response.respond_to?(:status)
|
|
825
|
-
api_response.status
|
|
826
|
-
else
|
|
827
|
-
api_response.dig(:status).to_sym
|
|
828
778
|
end
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
779
|
+
api_response
|
|
780
|
+
end
|
|
781
|
+
|
|
782
|
+
status = if api_response.respond_to?(:status)
|
|
783
|
+
api_response.status
|
|
784
|
+
else
|
|
785
|
+
api_response.dig(:status).to_sym
|
|
786
|
+
end
|
|
787
|
+
exit_message = (status == :cancelled) ? "request timed out" : "done!"
|
|
788
|
+
spinner.stop(exit_message)
|
|
789
|
+
response
|
|
832
790
|
end
|
|
833
791
|
|
|
834
|
-
def retrieve_response(
|
|
792
|
+
def retrieve_response(response_id)
|
|
835
793
|
if proxy
|
|
836
|
-
uri = URI(PROXY_URL + "api.openai.com/v1/responses/#{
|
|
794
|
+
uri = URI(PROXY_URL + "api.openai.com/v1/responses/#{response_id}")
|
|
837
795
|
send_request(uri, content_type: "json", method: "get")
|
|
838
796
|
else
|
|
839
|
-
client.responses.retrieve(
|
|
797
|
+
client.responses.retrieve(response_id)
|
|
840
798
|
end
|
|
841
799
|
end
|
|
842
800
|
|
|
@@ -846,7 +804,7 @@ module AI
|
|
|
846
804
|
send_request(uri, method: "get")
|
|
847
805
|
else
|
|
848
806
|
container_content = client.containers.files.content
|
|
849
|
-
|
|
807
|
+
container_content.retrieve(file_id, container_id: container_id)
|
|
850
808
|
end
|
|
851
809
|
end
|
|
852
810
|
end
|
data/lib/ai/http.rb
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
require "net/http"
|
|
2
2
|
module AI
|
|
3
3
|
module Http
|
|
4
|
-
def send_request(uri, content_type: nil, parameters: nil
|
|
4
|
+
def send_request(uri, method:, content_type: nil, parameters: nil)
|
|
5
5
|
Net::HTTP.start(uri.host, 443, use_ssl: true) do |http|
|
|
6
6
|
headers = {
|
|
7
7
|
"Authorization" => "Bearer #{@api_key}"
|