open_ai_bot 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +1 -1
- data/.ruby-version +1 -0
- data/Gemfile.lock +28 -23
- data/README.md +2 -2
- data/lib/open_ai/chat_gpt.rb +24 -18
- data/lib/open_ai/chat_thread.rb +10 -1
- data/lib/open_ai/image.rb +38 -0
- data/lib/open_ai/message.rb +19 -8
- data/lib/open_ai/model.rb +63 -0
- data/lib/open_ai/utils.rb +2 -0
- data/lib/open_ai_bot.rb +2 -1
- data/main.rb +1 -0
- data/open_ai_bot.gemspec +1 -1
- metadata +10 -7
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 21fff768a394b10773c77b9fafea42d01500cca136aea8c418753da1ad61024d
|
|
4
|
+
data.tar.gz: 7b2da8e1e8e0038a5420877ed0644888c0948976876dc065f65bb0f1ba9f94f1
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 82fff5e67a384cbcd0cb6d8a99bb1a3c9a42016ced7dbe9bd2d93d694183ab299f5b9b308662660c3997aa3d6407e420151e28e019ed9d63c1a8716b50bb3b6e
|
|
7
|
+
data.tar.gz: e525381d25b1e2d81e3bbd73c6c6990753948d0287c476f531fca32f16e3980b0f8380869945e9254e639925a6247ac799a140442899496d74687554631370e2
|
data/.rubocop.yml
CHANGED
data/.ruby-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
3.3.2
|
data/Gemfile.lock
CHANGED
|
@@ -3,17 +3,17 @@ GEM
|
|
|
3
3
|
specs:
|
|
4
4
|
addressable (2.8.6)
|
|
5
5
|
public_suffix (>= 2.0.2, < 6.0)
|
|
6
|
-
async (2.
|
|
7
|
-
console (~> 1.
|
|
6
|
+
async (2.12.0)
|
|
7
|
+
console (~> 1.25, >= 1.25.2)
|
|
8
8
|
fiber-annotation
|
|
9
|
-
io-event (~> 1.
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
console (1.23.3)
|
|
9
|
+
io-event (~> 1.6)
|
|
10
|
+
bigdecimal (3.1.8)
|
|
11
|
+
concurrent-ruby (1.3.2)
|
|
12
|
+
console (1.25.2)
|
|
14
13
|
fiber-annotation
|
|
15
|
-
fiber-local
|
|
16
|
-
|
|
14
|
+
fiber-local (~> 1.1)
|
|
15
|
+
json
|
|
16
|
+
down (5.4.2)
|
|
17
17
|
addressable (~> 2.8)
|
|
18
18
|
dry-core (1.0.1)
|
|
19
19
|
concurrent-ruby (~> 1.0)
|
|
@@ -28,31 +28,35 @@ GEM
|
|
|
28
28
|
dry-types (>= 1.7, < 2)
|
|
29
29
|
ice_nine (~> 0.11)
|
|
30
30
|
zeitwerk (~> 2.6)
|
|
31
|
-
dry-types (1.7.
|
|
31
|
+
dry-types (1.7.2)
|
|
32
|
+
bigdecimal (~> 3.0)
|
|
32
33
|
concurrent-ruby (~> 1.0)
|
|
33
34
|
dry-core (~> 1.0)
|
|
34
35
|
dry-inflector (~> 1.0)
|
|
35
36
|
dry-logic (~> 1.4)
|
|
36
37
|
zeitwerk (~> 2.6)
|
|
37
38
|
event_stream_parser (0.3.0)
|
|
38
|
-
faraday (2.
|
|
39
|
-
|
|
40
|
-
faraday-net_http (>= 2.0, < 3.1)
|
|
41
|
-
ruby2_keywords (>= 0.0.4)
|
|
39
|
+
faraday (2.9.1)
|
|
40
|
+
faraday-net_http (>= 2.0, < 3.2)
|
|
42
41
|
faraday-multipart (1.0.4)
|
|
43
42
|
multipart-post (~> 2)
|
|
44
|
-
faraday-net_http (3.0
|
|
43
|
+
faraday-net_http (3.1.0)
|
|
44
|
+
net-http
|
|
45
45
|
fiber-annotation (0.2.0)
|
|
46
|
-
fiber-local (1.
|
|
46
|
+
fiber-local (1.1.0)
|
|
47
|
+
fiber-storage
|
|
48
|
+
fiber-storage (0.1.1)
|
|
47
49
|
ice_nine (0.11.2)
|
|
48
|
-
io-event (1.
|
|
49
|
-
|
|
50
|
-
|
|
50
|
+
io-event (1.6.0)
|
|
51
|
+
json (2.7.2)
|
|
52
|
+
multipart-post (2.4.1)
|
|
53
|
+
net-http (0.4.1)
|
|
54
|
+
uri
|
|
55
|
+
public_suffix (5.0.5)
|
|
51
56
|
ruby-openai (5.2.0)
|
|
52
57
|
event_stream_parser (>= 0.3.0, < 1.0.0)
|
|
53
58
|
faraday (>= 1)
|
|
54
59
|
faraday-multipart (>= 1)
|
|
55
|
-
ruby2_keywords (0.0.5)
|
|
56
60
|
rubydium (0.4.1)
|
|
57
61
|
async (~> 2.3)
|
|
58
62
|
telegram-bot-ruby (~> 1.0.0)
|
|
@@ -61,10 +65,11 @@ GEM
|
|
|
61
65
|
faraday (~> 2.0)
|
|
62
66
|
faraday-multipart (~> 1.0)
|
|
63
67
|
zeitwerk (~> 2.6)
|
|
64
|
-
|
|
65
|
-
zeitwerk (2.6.
|
|
68
|
+
uri (0.13.0)
|
|
69
|
+
zeitwerk (2.6.15)
|
|
66
70
|
|
|
67
71
|
PLATFORMS
|
|
72
|
+
ruby
|
|
68
73
|
x86_64-linux
|
|
69
74
|
|
|
70
75
|
DEPENDENCIES
|
|
@@ -73,4 +78,4 @@ DEPENDENCIES
|
|
|
73
78
|
rubydium (>= 0.2.5)
|
|
74
79
|
|
|
75
80
|
BUNDLED WITH
|
|
76
|
-
2.
|
|
81
|
+
2.5.9
|
data/README.md
CHANGED
|
@@ -16,8 +16,8 @@ sudo apt install autoconf patch build-essential rustc libssl-dev libyaml-dev lib
|
|
|
16
16
|
git clone https://github.com/asdf-vm/asdf.git ~/.asdf --branch v0.13.1
|
|
17
17
|
echo '. "$HOME/.asdf/asdf.sh"' >> ~/.bashrc
|
|
18
18
|
asdf plugin add ruby https://github.com/asdf-vm/asdf-ruby.git
|
|
19
|
-
asdf install ruby 3.
|
|
20
|
-
echo 'ruby 3.
|
|
19
|
+
asdf install ruby 3.3.2
|
|
20
|
+
echo 'ruby 3.3.2' >> ~/.tool-versions
|
|
21
21
|
```
|
|
22
22
|
|
|
23
23
|
2. `ffmpeg`
|
data/lib/open_ai/chat_gpt.rb
CHANGED
|
@@ -72,16 +72,6 @@ module OpenAI
|
|
|
72
72
|
"NULL"
|
|
73
73
|
end
|
|
74
74
|
|
|
75
|
-
def base64(file)
|
|
76
|
-
return unless config.open_ai["chat_gpt_model"] == "gpt-4-vision-preview"
|
|
77
|
-
return unless file
|
|
78
|
-
|
|
79
|
-
f = download_file(file)
|
|
80
|
-
res = Base64.encode64(f.read)
|
|
81
|
-
FileUtils.rm_rf("./#{f.original_filename}")
|
|
82
|
-
res
|
|
83
|
-
end
|
|
84
|
-
|
|
85
75
|
def handle_gpt_command
|
|
86
76
|
return unless bot_mentioned? || bot_replied_to? || private_chat?
|
|
87
77
|
return if self.class.registered_commands.keys.any? { @text.include? _1 }
|
|
@@ -97,7 +87,8 @@ module OpenAI
|
|
|
97
87
|
from: username(@user),
|
|
98
88
|
body: @text_without_bot_mentions,
|
|
99
89
|
chat_id: @chat.id,
|
|
100
|
-
|
|
90
|
+
chat_thread: current_thread,
|
|
91
|
+
image: Image.from_tg_photo(download_file(@msg.photo&.last), model: current_thread.model)
|
|
101
92
|
)
|
|
102
93
|
|
|
103
94
|
return unless current_message.valid?
|
|
@@ -110,7 +101,8 @@ module OpenAI
|
|
|
110
101
|
from: username(@target),
|
|
111
102
|
body: @replies_to.text.to_s.gsub(/@#{config.bot_username}\b/, ""),
|
|
112
103
|
chat_id: @chat.id,
|
|
113
|
-
|
|
104
|
+
chat_thread: current_thread,
|
|
105
|
+
image: Image.from_tg_photo(download_file(@replies_to.photo&.last), model: current_thread.model)
|
|
114
106
|
)
|
|
115
107
|
else
|
|
116
108
|
nil
|
|
@@ -127,7 +119,7 @@ module OpenAI
|
|
|
127
119
|
|
|
128
120
|
response = open_ai.chat(
|
|
129
121
|
parameters: {
|
|
130
|
-
model: current_thread.model
|
|
122
|
+
model: current_thread.model.to_s,
|
|
131
123
|
messages: current_thread.as_json
|
|
132
124
|
}
|
|
133
125
|
)
|
|
@@ -138,9 +130,10 @@ module OpenAI
|
|
|
138
130
|
send_chat_gpt_error(error_text.strip)
|
|
139
131
|
else
|
|
140
132
|
text = response.dig("choices", 0, "message", "content")
|
|
141
|
-
tokens = response.dig("usage", "total_tokens")
|
|
142
133
|
|
|
143
|
-
|
|
134
|
+
tokens_info = get_tokens_info!(response)
|
|
135
|
+
|
|
136
|
+
send_chat_gpt_response(text, tokens_info)
|
|
144
137
|
end
|
|
145
138
|
end
|
|
146
139
|
|
|
@@ -148,15 +141,28 @@ module OpenAI
|
|
|
148
141
|
reply(text, parse_mode: "Markdown")
|
|
149
142
|
end
|
|
150
143
|
|
|
151
|
-
def
|
|
152
|
-
|
|
144
|
+
def get_tokens_info!(response)
|
|
145
|
+
completion_tokens = response.dig("usage", "completion_tokens")
|
|
146
|
+
prompt_tokens = response.dig("usage", "prompt_tokens")
|
|
147
|
+
vision_tokens = current_thread.claim_vision_tokens!
|
|
148
|
+
|
|
149
|
+
result = current_thread.model.request_cost(completion_tokens:, prompt_tokens:, vision_tokens:, current_thread:)
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
def send_chat_gpt_response(text, tokens_info)
|
|
153
|
+
tokens_text = tokens_info[:info]
|
|
154
|
+
|
|
155
|
+
id = reply(text + tokens_text).dig("result", "message_id")
|
|
156
|
+
|
|
153
157
|
bot_message = BotMessage.new(
|
|
154
158
|
id: id,
|
|
155
159
|
replies_to: @message_id,
|
|
156
160
|
body: text,
|
|
157
161
|
chat_id: @chat.id,
|
|
158
|
-
|
|
162
|
+
chat_thread: current_thread,
|
|
163
|
+
cost: tokens_info[:total]
|
|
159
164
|
)
|
|
165
|
+
|
|
160
166
|
current_thread.add(bot_message)
|
|
161
167
|
end
|
|
162
168
|
end
|
data/lib/open_ai/chat_thread.rb
CHANGED
|
@@ -3,8 +3,9 @@
|
|
|
3
3
|
module OpenAI
|
|
4
4
|
class ChatThread
|
|
5
5
|
def initialize(defaults = [], model = nil)
|
|
6
|
+
model ||= OpenAIBot.config.open_ai["chat_gpt_model"].to_sym
|
|
6
7
|
@history ||= defaults
|
|
7
|
-
@model = model
|
|
8
|
+
@model = model.is_a?(Model) ? model : Model.new(model)
|
|
8
9
|
puts @history
|
|
9
10
|
end
|
|
10
11
|
|
|
@@ -20,6 +21,14 @@ module OpenAI
|
|
|
20
21
|
true
|
|
21
22
|
end
|
|
22
23
|
|
|
24
|
+
def total_cost
|
|
25
|
+
@history.map(&:cost).compact.sum
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def claim_vision_tokens!
|
|
29
|
+
@history.reject(&:vision_tokens_claimed?).map(&:claim_vision_tokens!).compact.sum
|
|
30
|
+
end
|
|
31
|
+
|
|
23
32
|
def add(message)
|
|
24
33
|
return false unless message&.valid?
|
|
25
34
|
return false if @history.any? { message.id == _1.id}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
module OpenAI
|
|
2
|
+
class Image
|
|
3
|
+
BASE_TOKENS = 85
|
|
4
|
+
TILE_TOKENS = 170
|
|
5
|
+
TILE_SIZE = 512
|
|
6
|
+
|
|
7
|
+
def self.from_tg_photo(file, model:)
|
|
8
|
+
return unless file
|
|
9
|
+
return unless model.has_vision?
|
|
10
|
+
|
|
11
|
+
base64 = Base64.encode64(file.read)
|
|
12
|
+
size = `identify -format "%w %h" ./#{file.original_filename}`
|
|
13
|
+
width, height = size.split(" ")
|
|
14
|
+
FileUtils.rm_rf("./#{file.original_filename}")
|
|
15
|
+
|
|
16
|
+
new(width, height, base64)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
attr_accessor :width, :height, :base64
|
|
20
|
+
|
|
21
|
+
def initialize(width, height, base64)
|
|
22
|
+
@width = width
|
|
23
|
+
@height = height
|
|
24
|
+
@base64 = base64
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def tokens
|
|
28
|
+
@tokens ||= begin
|
|
29
|
+
tiles = tiles(width) * tiles(height)
|
|
30
|
+
(tiles * TILE_TOKENS) + BASE_TOKENS
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def tiles(pixels)
|
|
35
|
+
(pixels.to_f / TILE_SIZE).ceil
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
data/lib/open_ai/message.rb
CHANGED
|
@@ -3,29 +3,40 @@ module OpenAI
|
|
|
3
3
|
# (ChatGPT isn't brilliant at parsing JSON sructures without starting to reply in JSON, so most of it is useless)
|
|
4
4
|
|
|
5
5
|
class Message
|
|
6
|
-
attr_accessor :body, :from, :id, :replies_to, :
|
|
6
|
+
attr_accessor :body, :from, :id, :replies_to, :chat_id, :image, :chat_thread, :cost
|
|
7
7
|
attr_reader :role, :timestamp
|
|
8
8
|
|
|
9
9
|
def initialize(**kwargs)
|
|
10
10
|
kwargs.each_pair { public_send("#{_1}=", _2) }
|
|
11
11
|
@role = :user
|
|
12
12
|
@timestamp = Time.now.to_i
|
|
13
|
+
@vision_tokens_claimed = !image
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def vision_tokens_claimed?
|
|
17
|
+
@vision_tokens_claimed
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def claim_vision_tokens!
|
|
21
|
+
# binding.pry
|
|
22
|
+
@vision_tokens_claimed = true
|
|
23
|
+
image&.tokens
|
|
13
24
|
end
|
|
14
25
|
|
|
15
26
|
def valid?
|
|
16
|
-
[(
|
|
27
|
+
[(image || body), from, id, chat_id, chat_thread].all?(&:present?)
|
|
17
28
|
end
|
|
18
29
|
|
|
19
30
|
# Format for OpenAI API
|
|
20
31
|
def as_json
|
|
21
32
|
msg = [from, body].compact.join("\n")
|
|
22
33
|
|
|
23
|
-
if
|
|
34
|
+
if image
|
|
24
35
|
{
|
|
25
36
|
role: role,
|
|
26
37
|
content: [
|
|
27
38
|
{ type: "text", text: msg },
|
|
28
|
-
{ type: "image_url", image_url: { url: "data:image/jpeg;base64,#{
|
|
39
|
+
{ type: "image_url", image_url: { url: "data:image/jpeg;base64,#{image.base64}" } }
|
|
29
40
|
]
|
|
30
41
|
}
|
|
31
42
|
else
|
|
@@ -46,8 +57,8 @@ module OpenAI
|
|
|
46
57
|
"From" => from,
|
|
47
58
|
"To" => replies_to,
|
|
48
59
|
"Body" => body,
|
|
49
|
-
"Tokens used" => tokens,
|
|
50
|
-
"Image" => (
|
|
60
|
+
# "Tokens used" => tokens,
|
|
61
|
+
"Image" => (image ? "Some image" : "None")
|
|
51
62
|
}.reject { |_k, v|
|
|
52
63
|
v.blank?
|
|
53
64
|
}.map { |k, v|
|
|
@@ -69,7 +80,7 @@ module OpenAI
|
|
|
69
80
|
end
|
|
70
81
|
|
|
71
82
|
def valid?
|
|
72
|
-
body.present?
|
|
83
|
+
[body, chat_thread].all?(&:present?)
|
|
73
84
|
end
|
|
74
85
|
end
|
|
75
86
|
|
|
@@ -80,7 +91,7 @@ module OpenAI
|
|
|
80
91
|
end
|
|
81
92
|
|
|
82
93
|
def valid?
|
|
83
|
-
[body, id, chat_id,
|
|
94
|
+
[body, id, chat_id, chat_thread].all?(&:present?)
|
|
84
95
|
end
|
|
85
96
|
end
|
|
86
97
|
end
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
module OpenAI
|
|
2
|
+
class Model
|
|
3
|
+
# All prices are per 1K tokens
|
|
4
|
+
MODEL_INFO = {
|
|
5
|
+
"gpt-4o": {
|
|
6
|
+
max_context: 128_000,
|
|
7
|
+
prompt_price: 0.005,
|
|
8
|
+
completion_price: 0.015,
|
|
9
|
+
vision_price: 0.005
|
|
10
|
+
},
|
|
11
|
+
"gpt-3.5-turbo": {
|
|
12
|
+
max_context: 16385,
|
|
13
|
+
prompt_price: 0.0005,
|
|
14
|
+
completion_price: 0.0015,
|
|
15
|
+
vision_price: 0
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
attr_accessor :max_context, :prompt_price, :completion_price, :vision_price
|
|
20
|
+
|
|
21
|
+
[:max_context, :prompt_price, :completion_price, :vision_price].each do |attr|
|
|
22
|
+
define_method(attr) do
|
|
23
|
+
MODEL_INFO[@model][attr]
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def initialize(model)
|
|
28
|
+
raise ArgumentError.new("Unknown model: #{model}") unless MODEL_INFO[model]
|
|
29
|
+
|
|
30
|
+
@model = model
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def to_s
|
|
34
|
+
@model
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def has_vision?
|
|
38
|
+
MODEL_INFO[@model][:vision_price].positive?
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def request_cost(prompt_tokens:, completion_tokens:, vision_tokens:, current_thread:)
|
|
42
|
+
prompt_cost = prompt_tokens * prompt_price / 1000
|
|
43
|
+
completion_cost = completion_tokens * completion_price / 1000
|
|
44
|
+
vision_cost = vision_tokens * vision_price / 1000
|
|
45
|
+
|
|
46
|
+
total = prompt_cost + completion_cost + vision_cost
|
|
47
|
+
thread_total = current_thread.total_cost
|
|
48
|
+
|
|
49
|
+
info = "\n\n" + {
|
|
50
|
+
prompt: "#{prompt_tokens} tokens (#{prompt_cost.round(5)}$)",
|
|
51
|
+
completion: "#{completion_tokens} tokens (#{completion_cost.round(5)}$)",
|
|
52
|
+
vision: "#{vision_tokens} tokens (#{vision_cost.round(5)}$)",
|
|
53
|
+
total: "#{total.round(5)}$",
|
|
54
|
+
total_for_this_conversation: "#{(thread_total + total).round(5)}$",
|
|
55
|
+
max_context: max_context
|
|
56
|
+
}.map { |k, v|
|
|
57
|
+
"#{k}: #{v}"
|
|
58
|
+
}.join("\n")
|
|
59
|
+
|
|
60
|
+
{ info:, total: }
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
data/lib/open_ai/utils.rb
CHANGED
data/lib/open_ai_bot.rb
CHANGED
|
@@ -6,11 +6,12 @@ require_relative "open_ai/message"
|
|
|
6
6
|
require_relative "open_ai/dalle"
|
|
7
7
|
require_relative "open_ai/utils"
|
|
8
8
|
require_relative "open_ai/whisper"
|
|
9
|
+
require_relative "open_ai/model"
|
|
10
|
+
require_relative "open_ai/image"
|
|
9
11
|
|
|
10
12
|
require_relative "ext/blank"
|
|
11
13
|
require_relative "ext/in"
|
|
12
14
|
|
|
13
|
-
|
|
14
15
|
class OpenAIBot < Rubydium::Bot
|
|
15
16
|
include OpenAI::ChatGPT
|
|
16
17
|
include OpenAI::Dalle
|
data/main.rb
CHANGED
data/open_ai_bot.gemspec
CHANGED
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: open_ai_bot
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.3.
|
|
4
|
+
version: 0.3.1
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- bulgakke
|
|
8
|
-
autorequire:
|
|
8
|
+
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date:
|
|
11
|
+
date: 2024-06-08 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: down
|
|
@@ -80,7 +80,7 @@ dependencies:
|
|
|
80
80
|
- - "~>"
|
|
81
81
|
- !ruby/object:Gem::Version
|
|
82
82
|
version: '5.1'
|
|
83
|
-
description:
|
|
83
|
+
description:
|
|
84
84
|
email:
|
|
85
85
|
- vvp835@yandex.ru
|
|
86
86
|
executables: []
|
|
@@ -89,6 +89,7 @@ extra_rdoc_files: []
|
|
|
89
89
|
files:
|
|
90
90
|
- ".gitignore"
|
|
91
91
|
- ".rubocop.yml"
|
|
92
|
+
- ".ruby-version"
|
|
92
93
|
- Gemfile
|
|
93
94
|
- Gemfile.lock
|
|
94
95
|
- README.md
|
|
@@ -100,7 +101,9 @@ files:
|
|
|
100
101
|
- lib/open_ai/chat_gpt.rb
|
|
101
102
|
- lib/open_ai/chat_thread.rb
|
|
102
103
|
- lib/open_ai/dalle.rb
|
|
104
|
+
- lib/open_ai/image.rb
|
|
103
105
|
- lib/open_ai/message.rb
|
|
106
|
+
- lib/open_ai/model.rb
|
|
104
107
|
- lib/open_ai/utils.rb
|
|
105
108
|
- lib/open_ai/whisper.rb
|
|
106
109
|
- lib/open_ai_bot.rb
|
|
@@ -113,7 +116,7 @@ metadata:
|
|
|
113
116
|
homepage_uri: https://github.com/bulgakke/open_ai_bot
|
|
114
117
|
source_code_uri: https://github.com/bulgakke/open_ai_bot
|
|
115
118
|
rubygems_mfa_required: 'true'
|
|
116
|
-
post_install_message:
|
|
119
|
+
post_install_message:
|
|
117
120
|
rdoc_options: []
|
|
118
121
|
require_paths:
|
|
119
122
|
- lib
|
|
@@ -128,8 +131,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
128
131
|
- !ruby/object:Gem::Version
|
|
129
132
|
version: '0'
|
|
130
133
|
requirements: []
|
|
131
|
-
rubygems_version: 3.
|
|
132
|
-
signing_key:
|
|
134
|
+
rubygems_version: 3.5.9
|
|
135
|
+
signing_key:
|
|
133
136
|
specification_version: 4
|
|
134
137
|
summary: Telegram bot for using ChatGPT, DALL-E and Whisper
|
|
135
138
|
test_files: []
|