omniai 3.2.3 → 3.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b3e745edbb32e4102309da753fb4ccdb4cec5b6ab69a4bebf7260a39d99b31b4
4
- data.tar.gz: d8546d88f6c5cdc1c665d08bd7624f284c0e5baec04162f3a7ee29474f25beba
3
+ metadata.gz: 1428efe58af69bd2b3055f547b15234dd5a7d7f642705a2071df5880f894ae6a
4
+ data.tar.gz: 151da9550296302bd6b69f2e6a4d8c3b5a45451a0691ab7d689d571c92a71c18
5
5
  SHA512:
6
- metadata.gz: 59798981414c5fb32fd3f59fcc2d159c21b37cc1b6ce979424b91e7154147d05ba36e6e048ef3ff002f70022be5b02af0c7e66eb1c2e6dab6a434cb1be9ee794
7
- data.tar.gz: 45f18be41f25a010810901fc9fe001f31698549fdfff6c0a1b59d5eae612f53ad6cd4ff9a857cb11b2a77fe50a1e8f0d000a388f836604e7e26d1d8212635258
6
+ metadata.gz: 9bd92193965a8d1f3c310d1852fbc730e888963dc206ee53168f0290a633e6c82db4e7c4da7fdfef8cc7d1393a2e20ad6f824e1db1ab8fea425e8aab025efa74
7
+ data.tar.gz: 3d453e691a5f4a3fb89d5aef94e7cb436517ac328d9ae27ad284312879c8278e4dc857b0ce90d30ba36da034e2e7c99aa79a01778844fea6376a6016ecab7086
data/README.md CHANGED
@@ -532,6 +532,39 @@ end
532
532
  client.chat('What is the weather in "London" in Celsius and "Paris" in Fahrenheit?', tools: [WeatherTool.new])
533
533
  ```
534
534
 
535
+ #### Extended Thinking / Reasoning
536
+
537
+ Some models support extended thinking or reasoning capabilities. OmniAI provides a unified `thinking:` option that works across all supported providers:
538
+
539
+ ```ruby
540
+ # Enable thinking (provider uses sensible defaults)
541
+ response = client.chat("What is 25 * 25?", thinking: true)
542
+
543
+ # Access thinking content
544
+ response.choices.first.message.contents.each do |content|
545
+ case content
546
+ when OmniAI::Chat::Thinking
547
+ puts "Thinking: #{content.thinking}"
548
+ when OmniAI::Chat::Text
549
+ puts "Response: #{content.text}"
550
+ end
551
+ end
552
+ ```
553
+
554
+ With streaming:
555
+
556
+ ```ruby
557
+ client.chat("Solve this step by step: What is 123 * 456?", thinking: true, stream: $stdout)
558
+ ```
559
+
560
+ **Provider Support:**
561
+
562
+ | Provider | Option | Notes |
563
+ |----------|--------|-------|
564
+ | Anthropic | `thinking: true` or `thinking: { budget_tokens: N }` | Requires Claude 3.5+ models |
565
+ | Google | `thinking: true` | Requires Gemini 2.0+ with thinking enabled |
566
+ | OpenAI | `thinking: true` or `thinking: { effort: "high" }` | Requires o1/o3 models |
567
+
535
568
  ### 🎤 Speech to Text
536
569
 
537
570
  Clients that support transcribe (e.g. OpenAI w/ "Whisper") convert recordings to text via the following calls:
@@ -37,6 +37,7 @@ module OmniAI
37
37
 
38
38
  case data["type"]
39
39
  when "text" then Text.deserialize(data, context:)
40
+ when "thinking" then Thinking.deserialize(data, context:)
40
41
  when /(.*)_url/ then URL.deserialize(data, context:)
41
42
  else raise ArgumentError, "unknown type=#{data['type'].inspect}"
42
43
  end
@@ -6,17 +6,28 @@ module OmniAI
6
6
  # back to the caller.
7
7
  class Delta
8
8
  # @!attribute [rw] text
9
- #
9
+ # @return [String, nil]
10
10
  attr_accessor :text
11
11
 
12
- # @param text [String]
13
- def initialize(text:)
12
+ # @!attribute [rw] thinking
13
+ # @return [String, nil]
14
+ attr_accessor :thinking
15
+
16
+ # @param text [String, nil]
17
+ # @param thinking [String, nil]
18
+ def initialize(text: nil, thinking: nil)
14
19
  @text = text
20
+ @thinking = thinking
15
21
  end
16
22
 
17
23
  # @return [Boolean]
18
24
  def text?
19
- !text.empty?
25
+ !@text.nil? && !@text.empty?
26
+ end
27
+
28
+ # @return [Boolean]
29
+ def thinking?
30
+ !@thinking.nil? && !@thinking.empty?
20
31
  end
21
32
  end
22
33
  end
@@ -149,6 +149,19 @@ module OmniAI
149
149
  parts.map(&:text).join("\n") unless parts.empty?
150
150
  end
151
151
 
152
+ # @return [Boolean]
153
+ def thinking?
154
+ !thinking.nil?
155
+ end
156
+
157
+ # @return [String, nil]
158
+ def thinking
159
+ return if @content.nil?
160
+
161
+ parts = arrayify(@content).filter { |content| content.is_a?(Thinking) }
162
+ parts.map(&:thinking).join("\n") unless parts.empty?
163
+ end
164
+
152
165
  # @param object [Object]
153
166
  # @return [Array]
154
167
  def arrayify(object)
@@ -83,6 +83,18 @@ module OmniAI
83
83
  messages.any?(&:text?)
84
84
  end
85
85
 
86
+ # @return [String, nil]
87
+ def thinking
88
+ return unless thinking?
89
+
90
+ messages.filter(&:thinking?).map(&:thinking).join("\n\n")
91
+ end
92
+
93
+ # @return [Boolean]
94
+ def thinking?
95
+ messages.any?(&:thinking?)
96
+ end
97
+
86
98
  # @return [ToolCallList, nil]
87
99
  def tool_call_list
88
100
  tool_call_lists = messages.filter(&:tool_call_list?).map(&:tool_call_list)
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ class Chat
5
+ # Represents thinking/reasoning content from a model.
6
+ class Thinking < Content
7
+ # @return [String]
8
+ attr_accessor :thinking
9
+
10
+ # @return [Hash] Provider-specific metadata (e.g., Anthropic's signature)
11
+ attr_accessor :metadata
12
+
13
+ # @param thinking [String]
14
+ # @param metadata [Hash] Provider-specific data for round-tripping
15
+ def initialize(thinking = nil, metadata: {})
16
+ super()
17
+ @thinking = thinking
18
+ @metadata = metadata || {}
19
+ end
20
+
21
+ # @return [String]
22
+ def inspect
23
+ "#<#{self.class} thinking=#{@thinking.inspect}>"
24
+ end
25
+
26
+ # @return [String]
27
+ def summarize
28
+ @thinking
29
+ end
30
+
31
+ # @param data [Hash] required
32
+ # @param context [Context] optional
33
+ def self.deserialize(data, context: nil)
34
+ deserialize = context&.deserializer(:thinking)
35
+ return deserialize.call(data, context:) if deserialize
36
+
37
+ new(data["thinking"])
38
+ end
39
+
40
+ # @param context [Context] optional
41
+ # @param direction [String] optional - either "input" or "output"
42
+ #
43
+ # @return [Hash]
44
+ def serialize(context: nil, direction: nil) # rubocop:disable Lint/UnusedMethodArgument
45
+ serializer = context&.serializer(:thinking)
46
+ return serializer.call(self, context:) if serializer
47
+
48
+ { type: "thinking", thinking: @thinking }
49
+ end
50
+ end
51
+ end
52
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OmniAI
4
- VERSION = "3.2.3"
4
+ VERSION = "3.3.3"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.2.3
4
+ version: 3.3.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
@@ -107,6 +107,7 @@ files:
107
107
  - lib/omniai/chat/response.rb
108
108
  - lib/omniai/chat/stream.rb
109
109
  - lib/omniai/chat/text.rb
110
+ - lib/omniai/chat/thinking.rb
110
111
  - lib/omniai/chat/tool_call.rb
111
112
  - lib/omniai/chat/tool_call_list.rb
112
113
  - lib/omniai/chat/tool_call_message.rb