omniai-google 3.3.3 → 3.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +30 -0
- data/lib/omniai/google/chat/content_serializer.rb +2 -1
- data/lib/omniai/google/chat/message_serializer.rb +1 -0
- data/lib/omniai/google/chat/stream.rb +12 -3
- data/lib/omniai/google/chat/thinking_serializer.rb +27 -0
- data/lib/omniai/google/chat.rb +17 -0
- data/lib/omniai/google/client.rb +3 -2
- data/lib/omniai/google/version.rb +1 -1
- metadata +2 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 8ed53a642ea244879948b2831fb9f9dfb301eb7a1cedfff39e26131e6a6bb36e
|
|
4
|
+
data.tar.gz: abe713af3afe2f678e515ea23fd1cc1cc8b6ab0f328d77936e3a3990d3cd06fa
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 154befd060662f97e6a3b8fdb9a2ba55879f3226f89a8f4c539572c7064c973599e32892567af9b91c173ef42da3995b0d2a237765892a735c27227b59b4c1ec
|
|
7
|
+
data.tar.gz: 4d4992ccb4a94e074e2ed185afadebb7dfedb1b44c08ed68485b243876f70b6a6dce4242424538545962bf4992c3176b6d26398a998887b09dd0f5ac45b5cd01
|
data/README.md
CHANGED
|
@@ -118,6 +118,36 @@ end
|
|
|
118
118
|
client.chat('Be poetic.', stream:)
|
|
119
119
|
```
|
|
120
120
|
|
|
121
|
+
#### Extended Thinking
|
|
122
|
+
|
|
123
|
+
Google Gemini 2.0+ models support extended thinking, which shows the model's reasoning process.
|
|
124
|
+
|
|
125
|
+
```ruby
|
|
126
|
+
# Enable thinking
|
|
127
|
+
response = client.chat("What is 25 * 25?", model: "gemini-2.5-pro-preview-05-06", thinking: true)
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
#### Accessing Thinking Content
|
|
131
|
+
|
|
132
|
+
```ruby
|
|
133
|
+
response.choices.first.message.contents.each do |content|
|
|
134
|
+
case content
|
|
135
|
+
when OmniAI::Chat::Thinking
|
|
136
|
+
puts "Thinking: #{content.thinking}"
|
|
137
|
+
when OmniAI::Chat::Text
|
|
138
|
+
puts "Response: #{content.text}"
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
#### Streaming with Thinking
|
|
144
|
+
|
|
145
|
+
```ruby
|
|
146
|
+
client.chat("What are the prime factors of 1234567?", model: "gemini-2.5-pro-preview-05-06", thinking: true, stream: $stdout)
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
[Google API Reference `thinking`](https://ai.google.dev/gemini-api/docs/thinking)
|
|
150
|
+
|
|
121
151
|
### Upload
|
|
122
152
|
|
|
123
153
|
An upload is especially useful when processing audio / image / video / text files. To use:
|
|
@@ -7,9 +7,10 @@ module OmniAI
|
|
|
7
7
|
module ContentSerializer
|
|
8
8
|
# @param data [Hash]
|
|
9
9
|
# @param context [Context]
|
|
10
|
-
# @return [OmniAI::Chat::Text, OmniAI::Chat::ToolCall]
|
|
10
|
+
# @return [OmniAI::Chat::Text, OmniAI::Chat::Thinking, OmniAI::Chat::ToolCall]
|
|
11
11
|
def self.deserialize(data, context:)
|
|
12
12
|
case
|
|
13
|
+
when data["thought"] then OmniAI::Chat::Thinking.deserialize(data, context:)
|
|
13
14
|
when data["text"] then data["text"]
|
|
14
15
|
when data["functionCall"] then OmniAI::Chat::ToolCall.deserialize(data, context:)
|
|
15
16
|
end
|
|
@@ -27,6 +27,7 @@ module OmniAI
|
|
|
27
27
|
role = data["role"]
|
|
28
28
|
parts = arrayify(data["parts"]).map do |part|
|
|
29
29
|
case
|
|
30
|
+
when part["thought"] then OmniAI::Chat::Thinking.deserialize(part, context:)
|
|
30
31
|
when part["text"] then OmniAI::Chat::Text.deserialize(part, context:)
|
|
31
32
|
when part["functionCall"] then OmniAI::Chat::ToolCall.deserialize(part, context:)
|
|
32
33
|
when part["functionResponse"] then OmniAI::Chat::ToolCallResult.deserialize(part, context:)
|
|
@@ -57,7 +57,12 @@ module OmniAI
|
|
|
57
57
|
return unless candidate["content"]
|
|
58
58
|
|
|
59
59
|
candidate["content"]["parts"].each do |part|
|
|
60
|
-
|
|
60
|
+
if part["thought"]
|
|
61
|
+
# Google uses thought: true as a flag, content is in text
|
|
62
|
+
block&.call(OmniAI::Chat::Delta.new(thinking: part["text"]))
|
|
63
|
+
elsif part["text"]
|
|
64
|
+
block&.call(OmniAI::Chat::Delta.new(text: part["text"]))
|
|
65
|
+
end
|
|
61
66
|
end
|
|
62
67
|
|
|
63
68
|
merge_candidate!(candidate:, index:)
|
|
@@ -82,8 +87,12 @@ module OmniAI
|
|
|
82
87
|
# @param part [Hash]
|
|
83
88
|
# @param into [Hash]
|
|
84
89
|
def merge_part!(part:, candidate:)
|
|
85
|
-
|
|
86
|
-
|
|
90
|
+
last_part = candidate["content"]["parts"][-1]
|
|
91
|
+
|
|
92
|
+
if last_part&.key?("text") && part["text"]
|
|
93
|
+
last_part["text"] += part["text"]
|
|
94
|
+
elsif last_part&.key?("thought") && part["thought"]
|
|
95
|
+
last_part["thought"] += part["thought"]
|
|
87
96
|
else
|
|
88
97
|
candidate["content"]["parts"] << part
|
|
89
98
|
end
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module OmniAI
|
|
4
|
+
module Google
|
|
5
|
+
class Chat
|
|
6
|
+
# Overrides thinking serialize / deserialize.
|
|
7
|
+
module ThinkingSerializer
|
|
8
|
+
# @param data [Hash]
|
|
9
|
+
# @param context [Context]
|
|
10
|
+
#
|
|
11
|
+
# @return [OmniAI::Chat::Thinking]
|
|
12
|
+
def self.deserialize(data, context: nil) # rubocop:disable Lint/UnusedMethodArgument
|
|
13
|
+
# Google uses "thought: true" as a flag, with content in "text"
|
|
14
|
+
OmniAI::Chat::Thinking.new(data["text"])
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# @param thinking [OmniAI::Chat::Thinking]
|
|
18
|
+
# @param context [Context]
|
|
19
|
+
#
|
|
20
|
+
# @return [Hash]
|
|
21
|
+
def self.serialize(thinking, context: nil) # rubocop:disable Lint/UnusedMethodArgument
|
|
22
|
+
{ thought: true, text: thinking.thinking }
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
end
|
data/lib/omniai/google/chat.rb
CHANGED
|
@@ -65,6 +65,9 @@ module OmniAI
|
|
|
65
65
|
context.deserializers[:content] = ContentSerializer.method(:deserialize)
|
|
66
66
|
|
|
67
67
|
context.serializers[:tool] = ToolSerializer.method(:serialize)
|
|
68
|
+
|
|
69
|
+
context.serializers[:thinking] = ThinkingSerializer.method(:serialize)
|
|
70
|
+
context.deserializers[:thinking] = ThinkingSerializer.method(:deserialize)
|
|
68
71
|
end
|
|
69
72
|
|
|
70
73
|
protected
|
|
@@ -129,6 +132,7 @@ module OmniAI
|
|
|
129
132
|
end
|
|
130
133
|
|
|
131
134
|
data[:temperature] = @temperature if @temperature
|
|
135
|
+
data[:thinkingConfig] = thinking_config if thinking_config
|
|
132
136
|
|
|
133
137
|
data = data.compact
|
|
134
138
|
data unless data.empty?
|
|
@@ -144,6 +148,19 @@ module OmniAI
|
|
|
144
148
|
stream? ? "streamGenerateContent" : "generateContent"
|
|
145
149
|
end
|
|
146
150
|
|
|
151
|
+
# Translates unified thinking option to Google's thinkingConfig format.
|
|
152
|
+
# Example: `thinking: true` becomes `{ includeThoughts: true }`
|
|
153
|
+
# @return [Hash, nil]
|
|
154
|
+
def thinking_config
|
|
155
|
+
thinking = @options[:thinking]
|
|
156
|
+
return unless thinking
|
|
157
|
+
|
|
158
|
+
case thinking
|
|
159
|
+
when true then { includeThoughts: true }
|
|
160
|
+
when Hash then { includeThoughts: true }.merge(thinking)
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
|
|
147
164
|
# @return [Array<Message>]
|
|
148
165
|
def build_tool_call_messages(tool_call_list)
|
|
149
166
|
content = tool_call_list.map do |tool_call|
|
data/lib/omniai/google/client.rb
CHANGED
|
@@ -67,8 +67,9 @@ module OmniAI
|
|
|
67
67
|
# @yieldparam prompt [OmniAI::Chat::Prompt]
|
|
68
68
|
#
|
|
69
69
|
# @return [OmniAI::Chat::Completion]
|
|
70
|
-
def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil,
|
|
71
|
-
|
|
70
|
+
def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil,
|
|
71
|
+
**, &)
|
|
72
|
+
Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self, **, &)
|
|
72
73
|
end
|
|
73
74
|
|
|
74
75
|
# @param io [File, String] required - a file or URL
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: omniai-google
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 3.
|
|
4
|
+
version: 3.4.3
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Kevin Sylvestre
|
|
@@ -113,6 +113,7 @@ files:
|
|
|
113
113
|
- lib/omniai/google/chat/response_serializer.rb
|
|
114
114
|
- lib/omniai/google/chat/stream.rb
|
|
115
115
|
- lib/omniai/google/chat/text_serializer.rb
|
|
116
|
+
- lib/omniai/google/chat/thinking_serializer.rb
|
|
116
117
|
- lib/omniai/google/chat/tool_call_result_serializer.rb
|
|
117
118
|
- lib/omniai/google/chat/tool_call_serializer.rb
|
|
118
119
|
- lib/omniai/google/chat/tool_serializer.rb
|