omniai-anthropic 3.1.1 → 3.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5077178a43cec44b1c90e0fc272ee5b1e6e8b025387ed6bb71c2238514187631
4
- data.tar.gz: 416834202514d4acfa5e742a8dba508f8aecd9042a5cc5ecff16d22366406cd7
3
+ metadata.gz: acf0d7364e586047dfdad9d4e48892ceaaf18b641e7f9bdcbd43a383c03de6bb
4
+ data.tar.gz: cf134c06b43d88c1e407e4a86a6c5846e9a5322b8b00c9fd8499d1e47f0da74a
5
5
  SHA512:
6
- metadata.gz: bab092c2f66f0ac792f09574974afdaa0f8e6a575482661c168374089aa5380202feeaaa617a2259fd81718f738b6c68c0280bba438ba35de7ae66d154f0a555
7
- data.tar.gz: 5e4b8e0d4db1e9769031d8c11e11eedd4d6ad4ff14c6f06d48ae9271138cbcd187ebf379ad9059907fdf5f92c79004f6973522bc58561ac0b1fc1c456d70e3c2
6
+ metadata.gz: 7322cdbda4727494805ffe2d7dd9593bd21e6d828188b95a767a1194413df9a9cff86fdde3390490fceb7085f1c295ddbc8195c337edb0ad601f1b1cd13c357a
7
+ data.tar.gz: ca7fa497bba5bbf194e1b2d254700ef94498e4b2be7b0ef1ca64e2b6cd1c8b0ad1bc7f45f28a68f46a1edde38ad2ee68ee9f92b6b986cb16abc63974dfd9dc1f
data/README.md CHANGED
@@ -61,7 +61,7 @@ completion.text # 'The capital of Canada is Ottawa.'
61
61
 
62
62
  #### Model
63
63
 
64
- `model` takes an optional string (default is `claude-3-haiku-20240307`):
64
+ `model` takes an optional string (default is `claude-sonnet-4-6`):
65
65
 
66
66
  ```ruby
67
67
  completion = client.chat('Provide code for fibonacci', model: OmniAI::Anthropic::Chat::Model::CLAUDE_SONNET)
@@ -21,7 +21,7 @@ module OmniAI
21
21
  CLAUDE_3_5_HAIKU_20241022 = "claude-3-5-haiku-20241022"
22
22
  CLAUDE_HAIKU_4_5_20251001 = "claude-haiku-4-5-20251001"
23
23
  CLAUDE_3_OPUS_20240229 = "claude-3-opus-20240229"
24
- CLAUDE_3_SONNET_20240209 = "claude-3-sonnet-20240229"
24
+ CLAUDE_3_SONNET_20240229 = "claude-3-sonnet-20240229"
25
25
  CLAUDE_3_SONNET_20240307 = "claude-3-sonnet-20240307"
26
26
  CLAUDE_3_5_SONNET_20240620 = "claude-3-5-sonnet-20240620"
27
27
  CLAUDE_3_5_SONNET_20241022 = "claude-3-5-sonnet-20241022"
@@ -35,19 +35,24 @@ module OmniAI
35
35
  CLAUDE_OPUS_4_20250514 = "claude-opus-4-20250514"
36
36
  CLAUDE_OPUS_4_1_20250805 = "claude-opus-4-1-20250805"
37
37
  CLAUDE_OPUS_4_5_20251101 = "claude-opus-4-5-20251101"
38
+ CLAUDE_OPUS_4_6_20260217 = "claude-opus-4-6-20260217"
38
39
  CLAUDE_SONNET_4_20250514 = "claude-sonnet-4-20250514"
39
- CLAUDE_SONNET_4_5_20240620 = "claude-sonnet-4-5-20250929"
40
+ CLAUDE_SONNET_4_5_20250929 = "claude-sonnet-4-5-20250929"
41
+ CLAUDE_SONNET_4_6_20260217 = "claude-sonnet-4-6-20260217"
40
42
 
41
43
  CLAUDE_HAIKU_4_5 = "claude-haiku-4-5"
42
44
  CLAUDE_OPUS_4_0 = "claude-opus-4-0"
43
45
  CLAUDE_OPUS_4_1 = "claude-opus-4-1"
44
46
  CLAUDE_OPUS_4_5 = "claude-opus-4-5"
47
+ CLAUDE_OPUS_4_6 = "claude-opus-4-6"
48
+ CLAUDE_OPUS_4_7 = "claude-opus-4-7"
45
49
  CLAUDE_SONNET_4_0 = "claude-sonnet-4-0"
46
50
  CLAUDE_SONNET_4_5 = "claude-sonnet-4-5"
51
+ CLAUDE_SONNET_4_6 = "claude-sonnet-4-6"
47
52
 
48
53
  CLAUDE_HAIKU = CLAUDE_HAIKU_4_5
49
- CLAUDE_OPUS = CLAUDE_OPUS_4_5
50
- CLAUDE_SONNET = CLAUDE_SONNET_4_5
54
+ CLAUDE_OPUS = CLAUDE_OPUS_4_7
55
+ CLAUDE_SONNET = CLAUDE_SONNET_4_6
51
56
  end
52
57
 
53
58
  DEFAULT_MODEL = Model::CLAUDE_SONNET
@@ -82,43 +87,73 @@ module OmniAI
82
87
  end
83
88
 
84
89
  # @return [Hash]
90
+ #
91
+ # NOTE: Anthropic requires temperature=1 (default) when thinking is enabled,
92
+ # so temperature is omitted from the payload when thinking_config is present.
85
93
  def payload
86
- data = OmniAI::Anthropic.config.chat_options.merge({
87
- model: @model,
88
- messages:,
89
- system:,
90
- stream: stream? || nil,
91
- temperature: thinking_config ? nil : @temperature, # Anthropic requires temperature=1 (default) when thinking
92
- tools: tools_payload,
93
- thinking: thinking_config,
94
- }).compact
95
-
96
- # When thinking is enabled, ensure max_tokens > budget_tokens
97
- data[:max_tokens] = thinking_max_tokens if thinking_config
94
+ OmniAI::Anthropic.config.chat_options
95
+ .merge({
96
+ model: @model,
97
+ messages:,
98
+ system:,
99
+ stream: stream? || nil,
100
+ temperature: thinking_config ? nil : @temperature,
101
+ tools: tools_payload,
102
+ thinking: thinking_config,
103
+ })
104
+ .merge({ max_tokens:, output_config: }.compact)
105
+ .compact
106
+ end
98
107
 
99
- data
108
+ # Resolved max_tokens. Precedence: thinking floor (when budget set) > per-call kwarg.
109
+ # Returns nil when neither is set, so the config default flows through unchanged.
110
+ def max_tokens
111
+ thinking_max_tokens || @options[:max_tokens]
100
112
  end
101
113
 
102
114
  # Translates unified thinking option to Anthropic's native format.
103
115
  # Example: `thinking: { budget_tokens: 10000 }` becomes `{ type: "enabled", budget_tokens: 10000 }`
116
+ # Example: `thinking: { effort: nil }` becomes `{ type: "adaptive" }` (Claude decides)
117
+ # Example: `thinking: { effort: "medium" }` becomes `{ type: "adaptive" }` + output_config
104
118
  # @return [Hash, nil]
105
119
  def thinking_config
120
+ return @thinking_config if defined?(@thinking_config)
121
+
106
122
  thinking = @options[:thinking]
107
- return unless thinking
108
123
 
109
- case thinking
110
- when true then { type: "enabled", budget_tokens: 10_000 }
111
- when Hash then { type: "enabled" }.merge(thinking)
112
- end
124
+ @thinking_config = case thinking
125
+ when true then { type: "enabled", budget_tokens: 10_000 }
126
+ when Hash
127
+ if thinking.key?(:effort)
128
+ { type: "adaptive" }
129
+ else
130
+ { type: "enabled" }.merge(thinking)
131
+ end
132
+ end
113
133
  end
114
134
 
115
- # Returns max_tokens ensuring it's greater than budget_tokens when thinking is enabled.
116
- # @return [Integer]
135
+ # Adaptive thinking can consume any portion of max_tokens before emitting output.
136
+ # Without a floor, callers using adaptive with the legacy default (4096) silently
137
+ # get empty responses. This constant guarantees enough headroom for thinking + output.
138
+ ADAPTIVE_THINKING_MIN_TOKENS = 32_768
139
+
140
+ # Returns max_tokens ensuring enough headroom when thinking is in play.
141
+ # Enabled-mode path preserves the existing [base, budget+8000].max floor.
142
+ # Adaptive-mode path applies ADAPTIVE_THINKING_MIN_TOKENS as a safety floor.
143
+ # @return [Integer, nil]
117
144
  def thinking_max_tokens
118
- budget = thinking_config[:budget_tokens]
119
- base = @options[:max_tokens] || OmniAI::Anthropic.config.chat_options[:max_tokens] || 0
120
- # Ensure max_tokens > budget_tokens (default to budget + 8000 for response)
121
- [base, budget + 8_000].max
145
+ return unless thinking_config
146
+
147
+ case thinking_config[:type]
148
+ when "adaptive"
149
+ [@options[:max_tokens] || ADAPTIVE_THINKING_MIN_TOKENS, ADAPTIVE_THINKING_MIN_TOKENS].max
150
+ when "enabled"
151
+ budget = thinking_config[:budget_tokens]
152
+ return unless budget
153
+
154
+ base = @options[:max_tokens] || OmniAI::Anthropic.config.chat_options[:max_tokens] || 0
155
+ [base, budget + 8_000].max
156
+ end
122
157
  end
123
158
 
124
159
  # @return [Array<Hash>]
@@ -177,6 +212,19 @@ module OmniAI
177
212
  def tools_payload
178
213
  @tools.map { |tool| tool.serialize(context:) } if @tools&.any?
179
214
  end
215
+
216
+ # @return [Boolean]
217
+ def adaptive_thinking?
218
+ thinking_config&.dig(:type) == "adaptive"
219
+ end
220
+
221
+ # @return [Hash, nil]
222
+ def output_config
223
+ return unless adaptive_thinking?
224
+
225
+ effort = @options.dig(:thinking, :effort)
226
+ { effort: } if effort
227
+ end
180
228
  end
181
229
  end
182
230
  end
@@ -91,7 +91,7 @@ module OmniAI
91
91
  # @param text [String] optional
92
92
  #
93
93
  # @return [Array<Hash>]
94
- def perform(action:, text: nil, coordinate: nil) # rubocop:disable Metrics/CyclomaticComplexity
94
+ def perform(action:, text: nil, coordinate: nil)
95
95
  case action
96
96
  when Action::KEY then key(text:)
97
97
  when Action::TYPE then type(text:)
@@ -2,6 +2,6 @@
2
2
 
3
3
  module OmniAI
4
4
  module Anthropic
5
- VERSION = "3.1.1"
5
+ VERSION = "3.2.1"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai-anthropic
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.1.1
4
+ version: 3.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre