omniai-anthropic 3.2.0 → 3.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/omniai/anthropic/chat.rb +28 -8
- data/lib/omniai/anthropic/computer.rb +1 -1
- data/lib/omniai/anthropic/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: acf0d7364e586047dfdad9d4e48892ceaaf18b641e7f9bdcbd43a383c03de6bb
|
|
4
|
+
data.tar.gz: cf134c06b43d88c1e407e4a86a6c5846e9a5322b8b00c9fd8499d1e47f0da74a
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 7322cdbda4727494805ffe2d7dd9593bd21e6d828188b95a767a1194413df9a9cff86fdde3390490fceb7085f1c295ddbc8195c337edb0ad601f1b1cd13c357a
|
|
7
|
+
data.tar.gz: ca7fa497bba5bbf194e1b2d254700ef94498e4b2be7b0ef1ca64e2b6cd1c8b0ad1bc7f45f28a68f46a1edde38ad2ee68ee9f92b6b986cb16abc63974dfd9dc1f
|
|
@@ -45,12 +45,13 @@ module OmniAI
|
|
|
45
45
|
CLAUDE_OPUS_4_1 = "claude-opus-4-1"
|
|
46
46
|
CLAUDE_OPUS_4_5 = "claude-opus-4-5"
|
|
47
47
|
CLAUDE_OPUS_4_6 = "claude-opus-4-6"
|
|
48
|
+
CLAUDE_OPUS_4_7 = "claude-opus-4-7"
|
|
48
49
|
CLAUDE_SONNET_4_0 = "claude-sonnet-4-0"
|
|
49
50
|
CLAUDE_SONNET_4_5 = "claude-sonnet-4-5"
|
|
50
51
|
CLAUDE_SONNET_4_6 = "claude-sonnet-4-6"
|
|
51
52
|
|
|
52
53
|
CLAUDE_HAIKU = CLAUDE_HAIKU_4_5
|
|
53
|
-
CLAUDE_OPUS =
|
|
54
|
+
CLAUDE_OPUS = CLAUDE_OPUS_4_7
|
|
54
55
|
CLAUDE_SONNET = CLAUDE_SONNET_4_6
|
|
55
56
|
end
|
|
56
57
|
|
|
@@ -100,10 +101,16 @@ module OmniAI
|
|
|
100
101
|
tools: tools_payload,
|
|
101
102
|
thinking: thinking_config,
|
|
102
103
|
})
|
|
103
|
-
.merge({ max_tokens
|
|
104
|
+
.merge({ max_tokens:, output_config: }.compact)
|
|
104
105
|
.compact
|
|
105
106
|
end
|
|
106
107
|
|
|
108
|
+
# Resolved max_tokens. Precedence: thinking floor (when budget set) > per-call kwarg.
|
|
109
|
+
# Returns nil when neither is set, so the config default flows through unchanged.
|
|
110
|
+
def max_tokens
|
|
111
|
+
thinking_max_tokens || @options[:max_tokens]
|
|
112
|
+
end
|
|
113
|
+
|
|
107
114
|
# Translates unified thinking option to Anthropic's native format.
|
|
108
115
|
# Example: `thinking: { budget_tokens: 10000 }` becomes `{ type: "enabled", budget_tokens: 10000 }`
|
|
109
116
|
# Example: `thinking: { effort: nil }` becomes `{ type: "adaptive" }` (Claude decides)
|
|
@@ -125,15 +132,28 @@ module OmniAI
|
|
|
125
132
|
end
|
|
126
133
|
end
|
|
127
134
|
|
|
128
|
-
#
|
|
135
|
+
# Adaptive thinking can consume any portion of max_tokens before emitting output.
|
|
136
|
+
# Without a floor, callers using adaptive with the legacy default (4096) silently
|
|
137
|
+
# get empty responses. This constant guarantees enough headroom for thinking + output.
|
|
138
|
+
ADAPTIVE_THINKING_MIN_TOKENS = 32_768
|
|
139
|
+
|
|
140
|
+
# Returns max_tokens ensuring enough headroom when thinking is in play.
|
|
141
|
+
# Enabled-mode path preserves the existing [base, budget+8000].max floor.
|
|
142
|
+
# Adaptive-mode path applies ADAPTIVE_THINKING_MIN_TOKENS as a safety floor.
|
|
129
143
|
# @return [Integer, nil]
|
|
130
144
|
def thinking_max_tokens
|
|
131
|
-
|
|
132
|
-
return unless budget
|
|
145
|
+
return unless thinking_config
|
|
133
146
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
147
|
+
case thinking_config[:type]
|
|
148
|
+
when "adaptive"
|
|
149
|
+
[@options[:max_tokens] || ADAPTIVE_THINKING_MIN_TOKENS, ADAPTIVE_THINKING_MIN_TOKENS].max
|
|
150
|
+
when "enabled"
|
|
151
|
+
budget = thinking_config[:budget_tokens]
|
|
152
|
+
return unless budget
|
|
153
|
+
|
|
154
|
+
base = @options[:max_tokens] || OmniAI::Anthropic.config.chat_options[:max_tokens] || 0
|
|
155
|
+
[base, budget + 8_000].max
|
|
156
|
+
end
|
|
137
157
|
end
|
|
138
158
|
|
|
139
159
|
# @return [Array<Hash>]
|
|
@@ -91,7 +91,7 @@ module OmniAI
|
|
|
91
91
|
# @param text [String] optional
|
|
92
92
|
#
|
|
93
93
|
# @return [Array<Hash>]
|
|
94
|
-
def perform(action:, text: nil, coordinate: nil)
|
|
94
|
+
def perform(action:, text: nil, coordinate: nil)
|
|
95
95
|
case action
|
|
96
96
|
when Action::KEY then key(text:)
|
|
97
97
|
when Action::TYPE then type(text:)
|