open_ai_bot 0.3.1 → 0.3.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/open_ai/chat_gpt.rb +2 -2
- data/lib/open_ai/chat_thread.rb +1 -6
- data/lib/open_ai/message.rb +0 -11
- data/lib/open_ai/model.rb +10 -11
- data/open_ai_bot.gemspec +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2aafb706ad49054fb0e35aab308ea6486ab89ea7e6740f081033b49a972b4dfb
|
4
|
+
data.tar.gz: 2b6b9c143e1465bc2e1f984d8ec611b234644b6f99bf3a033ca160a558871625
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f486692224acc9d6897a426b37e56673779d2631bd2577e9a26ed0ff99715a1e8b14972649cfbbaab02db47233a878bd85f00b1a4cb0fb966a169d3a651a56f9
|
7
|
+
data.tar.gz: 6d0c11a1f76f4edf83ec4a011ded1a88bfc73d48ff7a7567c57525d519ec568fc458f5c262787422ac5f35fad97fe91e2f4d0b5c6564a7d5058ec08b3fd3458b
|
data/lib/open_ai/chat_gpt.rb
CHANGED
@@ -8,6 +8,7 @@ module OpenAI
|
|
8
8
|
end
|
9
9
|
|
10
10
|
def new_thread(chat_id, model = nil)
|
11
|
+
model ||= config.open_ai["chat_gpt_model"].to_sym
|
11
12
|
msgs = config.open_ai["whitelist"].include?(chat_id) ? initial_messages : []
|
12
13
|
new_thread = ChatThread.new(msgs, model)
|
13
14
|
threads[chat_id] = new_thread
|
@@ -144,9 +145,8 @@ module OpenAI
|
|
144
145
|
def get_tokens_info!(response)
|
145
146
|
completion_tokens = response.dig("usage", "completion_tokens")
|
146
147
|
prompt_tokens = response.dig("usage", "prompt_tokens")
|
147
|
-
vision_tokens = current_thread.claim_vision_tokens!
|
148
148
|
|
149
|
-
result = current_thread.model.request_cost(completion_tokens:, prompt_tokens:,
|
149
|
+
result = current_thread.model.request_cost(completion_tokens:, prompt_tokens:, current_thread:)
|
150
150
|
end
|
151
151
|
|
152
152
|
def send_chat_gpt_response(text, tokens_info)
|
data/lib/open_ai/chat_thread.rb
CHANGED
@@ -2,8 +2,7 @@
|
|
2
2
|
|
3
3
|
module OpenAI
|
4
4
|
class ChatThread
|
5
|
-
def initialize(defaults = [], model
|
6
|
-
model ||= OpenAIBot.config.open_ai["chat_gpt_model"].to_sym
|
5
|
+
def initialize(defaults = [], model)
|
7
6
|
@history ||= defaults
|
8
7
|
@model = model.is_a?(Model) ? model : Model.new(model)
|
9
8
|
puts @history
|
@@ -25,10 +24,6 @@ module OpenAI
|
|
25
24
|
@history.map(&:cost).compact.sum
|
26
25
|
end
|
27
26
|
|
28
|
-
def claim_vision_tokens!
|
29
|
-
@history.reject(&:vision_tokens_claimed?).map(&:claim_vision_tokens!).compact.sum
|
30
|
-
end
|
31
|
-
|
32
27
|
def add(message)
|
33
28
|
return false unless message&.valid?
|
34
29
|
return false if @history.any? { message.id == _1.id}
|
data/lib/open_ai/message.rb
CHANGED
@@ -10,17 +10,6 @@ module OpenAI
|
|
10
10
|
kwargs.each_pair { public_send("#{_1}=", _2) }
|
11
11
|
@role = :user
|
12
12
|
@timestamp = Time.now.to_i
|
13
|
-
@vision_tokens_claimed = !image
|
14
|
-
end
|
15
|
-
|
16
|
-
def vision_tokens_claimed?
|
17
|
-
@vision_tokens_claimed
|
18
|
-
end
|
19
|
-
|
20
|
-
def claim_vision_tokens!
|
21
|
-
# binding.pry
|
22
|
-
@vision_tokens_claimed = true
|
23
|
-
image&.tokens
|
24
13
|
end
|
25
14
|
|
26
15
|
def valid?
|
data/lib/open_ai/model.rb
CHANGED
@@ -6,26 +6,27 @@ module OpenAI
|
|
6
6
|
max_context: 128_000,
|
7
7
|
prompt_price: 0.005,
|
8
8
|
completion_price: 0.015,
|
9
|
-
|
9
|
+
vision: true
|
10
10
|
},
|
11
11
|
"gpt-3.5-turbo": {
|
12
12
|
max_context: 16385,
|
13
13
|
prompt_price: 0.0005,
|
14
|
-
completion_price: 0.0015
|
15
|
-
vision_price: 0
|
14
|
+
completion_price: 0.0015
|
16
15
|
}
|
17
16
|
}
|
18
17
|
|
19
|
-
attr_accessor :max_context, :prompt_price, :completion_price
|
18
|
+
attr_accessor :max_context, :prompt_price, :completion_price
|
20
19
|
|
21
|
-
[:max_context, :prompt_price, :completion_price
|
20
|
+
[:max_context, :prompt_price, :completion_price].each do |attr|
|
22
21
|
define_method(attr) do
|
23
22
|
MODEL_INFO[@model][attr]
|
24
23
|
end
|
25
24
|
end
|
26
25
|
|
27
26
|
def initialize(model)
|
28
|
-
|
27
|
+
if MODEL_INFO[model].nil?
|
28
|
+
raise ArgumentError.new("Unknown model: #{model.inspect}.")
|
29
|
+
end
|
29
30
|
|
30
31
|
@model = model
|
31
32
|
end
|
@@ -35,21 +36,19 @@ module OpenAI
|
|
35
36
|
end
|
36
37
|
|
37
38
|
def has_vision?
|
38
|
-
MODEL_INFO[@model][:
|
39
|
+
MODEL_INFO[@model][:vision]
|
39
40
|
end
|
40
41
|
|
41
|
-
def request_cost(prompt_tokens:, completion_tokens:,
|
42
|
+
def request_cost(prompt_tokens:, completion_tokens:, current_thread:)
|
42
43
|
prompt_cost = prompt_tokens * prompt_price / 1000
|
43
44
|
completion_cost = completion_tokens * completion_price / 1000
|
44
|
-
vision_cost = vision_tokens * vision_price / 1000
|
45
45
|
|
46
|
-
total = prompt_cost + completion_cost
|
46
|
+
total = prompt_cost + completion_cost
|
47
47
|
thread_total = current_thread.total_cost
|
48
48
|
|
49
49
|
info = "\n\n" + {
|
50
50
|
prompt: "#{prompt_tokens} tokens (#{prompt_cost.round(5)}$)",
|
51
51
|
completion: "#{completion_tokens} tokens (#{completion_cost.round(5)}$)",
|
52
|
-
vision: "#{vision_tokens} tokens (#{vision_cost.round(5)}$)",
|
53
52
|
total: "#{total.round(5)}$",
|
54
53
|
total_for_this_conversation: "#{(thread_total + total).round(5)}$",
|
55
54
|
max_context: max_context
|
data/open_ai_bot.gemspec
CHANGED