smart_prompt 0.4.0 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/lib/smart_prompt/anthropic_adapter.rb +254 -0
- data/lib/smart_prompt/version.rb +1 -1
- metadata +2 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 729a2b8524be928407172c7e12f14dc6bebc97f9fdf4e1f3ff35f9e7791a5f11
|
|
4
|
+
data.tar.gz: a8fa95d01d5c5372bb669d67f5dca337db77c7e8444ddb166ac579318cf38e8d
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: ab384181eba6b17a53505110cc3500c35d5774bbe08d4ea0d7cc367281dcd22c1f45ed0eff367e22aeac382d6f8d29efb20e328f6e07e0b8c64f5e2c42368029
|
|
7
|
+
data.tar.gz: 62cbb97e5794f77fb467fdef9b5e6403450e5ed7332ca6f0044c967c16a193c70a334a2bd95897af79605ae6d51be8b2317e3011b70cc923dd859cd8b951fd57
|
data/CHANGELOG.md
CHANGED
|
@@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [0.4.1] - 2026-04-22
|
|
9
|
+
### Fixed
|
|
10
|
+
- Re-release package with `lib/smart_prompt/anthropic_adapter.rb`, which is required by the gem entrypoint.
|
|
11
|
+
|
|
12
|
+
## [0.4.0] - 2026-04-22
|
|
13
|
+
### Added
|
|
14
|
+
- Anthropic adapter support.
|
|
15
|
+
|
|
8
16
|
## [0.3.6] - 2026-04-08
|
|
9
17
|
### Changed
|
|
10
18
|
- Bumped `ruby-openai` dependency from `8.1.0` to `8.3.0`
|
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
require "net/http"
|
|
2
|
+
require "json"
|
|
3
|
+
require "uri"
|
|
4
|
+
|
|
5
|
+
module SmartPrompt
|
|
6
|
+
class AnthropicAdapter < LLMAdapter
|
|
7
|
+
DEFAULT_URL = "https://api.anthropic.com"
|
|
8
|
+
DEFAULT_VERSION = "2023-06-01"
|
|
9
|
+
DEFAULT_MAX_TOKENS = 4096
|
|
10
|
+
|
|
11
|
+
def initialize(config)
|
|
12
|
+
super
|
|
13
|
+
@api_key = resolve_api_key(@config["api_key"]) || ENV["ANTHROPIC_API_KEY"]
|
|
14
|
+
@url = (@config["url"] || DEFAULT_URL).chomp("/")
|
|
15
|
+
@anthropic_version = @config["anthropic_version"] || DEFAULT_VERSION
|
|
16
|
+
@request_timeout = @config["request_timeout"] || 240
|
|
17
|
+
|
|
18
|
+
raise LLMAPIError, "Invalid Anthropic configuration: missing api_key" if @api_key.nil? || @api_key.empty?
|
|
19
|
+
|
|
20
|
+
@messages_uri = URI("#{@url}/v1/messages")
|
|
21
|
+
SmartPrompt.logger.info "Successful creation an Anthropic client."
|
|
22
|
+
rescue URI::InvalidURIError => e
|
|
23
|
+
SmartPrompt.logger.error "Failed to initialize Anthropic client: #{e.message}"
|
|
24
|
+
raise LLMAPIError, "Invalid Anthropic configuration: #{e.message}"
|
|
25
|
+
rescue LLMAPIError
|
|
26
|
+
raise
|
|
27
|
+
rescue => e
|
|
28
|
+
SmartPrompt.logger.error "Failed to initialize Anthropic client: #{e.message}"
|
|
29
|
+
raise Error, "Unexpected error initializing Anthropic client: #{e.message}"
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def send_request(messages, model = nil, temperature = 0.7, tools = nil, proc = nil)
|
|
33
|
+
SmartPrompt.logger.info "AnthropicAdapter: Sending request to Anthropic"
|
|
34
|
+
temperature = 0.7 if temperature.nil?
|
|
35
|
+
model_name = model || @config["model"]
|
|
36
|
+
SmartPrompt.logger.info "AnthropicAdapter: Using model #{model_name}"
|
|
37
|
+
|
|
38
|
+
parameters = build_parameters(messages, model_name, temperature, tools, !proc.nil?)
|
|
39
|
+
SmartPrompt.logger.info "Send parameters is: #{parameters}"
|
|
40
|
+
|
|
41
|
+
response = post_messages(parameters, proc)
|
|
42
|
+
SmartPrompt.logger.info "AnthropicAdapter: Received response from Anthropic"
|
|
43
|
+
|
|
44
|
+
return if proc
|
|
45
|
+
|
|
46
|
+
@last_response = response
|
|
47
|
+
extract_content(response)
|
|
48
|
+
rescue JSON::ParserError
|
|
49
|
+
SmartPrompt.logger.error "Failed to parse Anthropic API response"
|
|
50
|
+
raise LLMAPIError, "Failed to parse Anthropic API response"
|
|
51
|
+
rescue LLMAPIError
|
|
52
|
+
raise
|
|
53
|
+
rescue => e
|
|
54
|
+
SmartPrompt.logger.error "Unexpected error during Anthropic request: #{e.message}"
|
|
55
|
+
raise Error, "Unexpected error during Anthropic request: #{e.message}"
|
|
56
|
+
ensure
|
|
57
|
+
SmartPrompt.logger.info "Successful send a message"
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
private
|
|
61
|
+
|
|
62
|
+
def resolve_api_key(api_key)
|
|
63
|
+
return api_key unless api_key.is_a?(String)
|
|
64
|
+
|
|
65
|
+
match = api_key.match(/\AENV\[(["']?)([A-Za-z_][A-Za-z0-9_]*)\1\]\z/)
|
|
66
|
+
return ENV[match[2]] if match
|
|
67
|
+
|
|
68
|
+
api_key
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def build_parameters(messages, model_name, temperature, tools, stream)
|
|
72
|
+
anthropic_messages, system = normalize_messages(messages)
|
|
73
|
+
parameters = {
|
|
74
|
+
model: model_name,
|
|
75
|
+
messages: anthropic_messages,
|
|
76
|
+
max_tokens: @config["max_tokens"] || @config["max_completion_tokens"] || DEFAULT_MAX_TOKENS,
|
|
77
|
+
temperature: @config["temperature"] || temperature,
|
|
78
|
+
}
|
|
79
|
+
parameters[:system] = system unless system.empty?
|
|
80
|
+
parameters[:tools] = normalize_tools(tools) if tools
|
|
81
|
+
parameters[:stream] = true if stream
|
|
82
|
+
parameters
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def normalize_messages(messages)
|
|
86
|
+
system_messages = []
|
|
87
|
+
anthropic_messages = []
|
|
88
|
+
|
|
89
|
+
messages.each do |message|
|
|
90
|
+
role = message["role"] || message[:role]
|
|
91
|
+
content = message["content"] || message[:content]
|
|
92
|
+
|
|
93
|
+
case role.to_s
|
|
94
|
+
when "system"
|
|
95
|
+
system_messages << content.to_s
|
|
96
|
+
when "user", "assistant"
|
|
97
|
+
anthropic_messages << {
|
|
98
|
+
role: role.to_s,
|
|
99
|
+
content: normalize_content(content),
|
|
100
|
+
}
|
|
101
|
+
when "tool"
|
|
102
|
+
anthropic_messages << {
|
|
103
|
+
role: "user",
|
|
104
|
+
content: normalize_tool_result(message),
|
|
105
|
+
}
|
|
106
|
+
else
|
|
107
|
+
anthropic_messages << {
|
|
108
|
+
role: "user",
|
|
109
|
+
content: normalize_content(content),
|
|
110
|
+
}
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
[anthropic_messages, system_messages.join("\n\n")]
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def normalize_content(content)
|
|
118
|
+
return content if content.is_a?(Array)
|
|
119
|
+
|
|
120
|
+
content.to_s
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def normalize_tool_result(message)
|
|
124
|
+
tool_use_id = message["tool_call_id"] || message[:tool_call_id]
|
|
125
|
+
content = message["content"] || message[:content]
|
|
126
|
+
|
|
127
|
+
[{
|
|
128
|
+
type: "tool_result",
|
|
129
|
+
tool_use_id: tool_use_id.to_s,
|
|
130
|
+
content: content.to_s,
|
|
131
|
+
}]
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
def normalize_tools(tools)
|
|
135
|
+
tools.map do |tool|
|
|
136
|
+
function = tool["function"] || tool[:function] || tool
|
|
137
|
+
{
|
|
138
|
+
name: function["name"] || function[:name],
|
|
139
|
+
description: function["description"] || function[:description],
|
|
140
|
+
input_schema: function["parameters"] || function[:parameters] || {},
|
|
141
|
+
}
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def post_messages(parameters, stream_proc)
|
|
146
|
+
http = Net::HTTP.new(@messages_uri.host, @messages_uri.port)
|
|
147
|
+
http.use_ssl = @messages_uri.scheme == "https"
|
|
148
|
+
http.read_timeout = @request_timeout
|
|
149
|
+
http.open_timeout = @request_timeout
|
|
150
|
+
|
|
151
|
+
request = Net::HTTP::Post.new(@messages_uri)
|
|
152
|
+
request["Content-Type"] = "application/json"
|
|
153
|
+
request["x-api-key"] = @api_key
|
|
154
|
+
request["anthropic-version"] = @anthropic_version
|
|
155
|
+
request.body = JSON.generate(parameters)
|
|
156
|
+
|
|
157
|
+
if stream_proc
|
|
158
|
+
handle_streaming_response(http, request, stream_proc)
|
|
159
|
+
else
|
|
160
|
+
handle_response(http.request(request))
|
|
161
|
+
end
|
|
162
|
+
rescue SocketError => e
|
|
163
|
+
SmartPrompt.logger.error "Failed to connect to Anthropic API: #{e.message}"
|
|
164
|
+
raise LLMAPIError, "Network error: Unable to connect to Anthropic API"
|
|
165
|
+
rescue Net::OpenTimeout, Net::ReadTimeout
|
|
166
|
+
SmartPrompt.logger.error "Request to Anthropic API timed out"
|
|
167
|
+
raise LLMAPIError, "Request to Anthropic API timed out"
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
def handle_response(response)
|
|
171
|
+
body = JSON.parse(response.body)
|
|
172
|
+
return body if response.is_a?(Net::HTTPSuccess)
|
|
173
|
+
|
|
174
|
+
message = body.dig("error", "message") || response.message
|
|
175
|
+
SmartPrompt.logger.error "Anthropic API error: #{message}"
|
|
176
|
+
raise LLMAPIError, "Anthropic API error: #{message}"
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
def handle_streaming_response(http, request, stream_proc)
|
|
180
|
+
accumulated_response = nil
|
|
181
|
+
|
|
182
|
+
http.request(request) do |response|
|
|
183
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
184
|
+
body = response.body.to_s.empty? ? {} : JSON.parse(response.body)
|
|
185
|
+
message = body.dig("error", "message") || response.message
|
|
186
|
+
SmartPrompt.logger.error "Anthropic API error: #{message}"
|
|
187
|
+
raise LLMAPIError, "Anthropic API error: #{message}"
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
response.read_body do |chunk|
|
|
191
|
+
chunk.each_line do |line|
|
|
192
|
+
next unless line.start_with?("data:")
|
|
193
|
+
|
|
194
|
+
data = line.delete_prefix("data:").strip
|
|
195
|
+
next if data.empty?
|
|
196
|
+
|
|
197
|
+
event = JSON.parse(data)
|
|
198
|
+
accumulated_response = event if event["type"] == "message_start"
|
|
199
|
+
stream_proc.call(openai_stream_chunk(event), chunk.bytesize)
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
accumulated_response
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def openai_stream_chunk(event)
|
|
208
|
+
case event["type"]
|
|
209
|
+
when "message_start"
|
|
210
|
+
message = event["message"] || {}
|
|
211
|
+
{
|
|
212
|
+
"id" => message["id"],
|
|
213
|
+
"object" => "chat.completion.chunk",
|
|
214
|
+
"created" => Time.now.to_i,
|
|
215
|
+
"model" => message["model"],
|
|
216
|
+
"choices" => [{
|
|
217
|
+
"index" => 0,
|
|
218
|
+
"delta" => {},
|
|
219
|
+
}],
|
|
220
|
+
"usage" => message["usage"],
|
|
221
|
+
}
|
|
222
|
+
when "content_block_delta"
|
|
223
|
+
{
|
|
224
|
+
"choices" => [{
|
|
225
|
+
"index" => 0,
|
|
226
|
+
"delta" => {
|
|
227
|
+
"content" => event.dig("delta", "text").to_s,
|
|
228
|
+
},
|
|
229
|
+
}],
|
|
230
|
+
}
|
|
231
|
+
else
|
|
232
|
+
{
|
|
233
|
+
"choices" => [{
|
|
234
|
+
"index" => 0,
|
|
235
|
+
"delta" => {},
|
|
236
|
+
}],
|
|
237
|
+
}
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
def extract_content(response)
|
|
242
|
+
response.fetch("content", []).map do |block|
|
|
243
|
+
case block["type"]
|
|
244
|
+
when "text"
|
|
245
|
+
block["text"].to_s
|
|
246
|
+
when "tool_use"
|
|
247
|
+
block.to_s
|
|
248
|
+
else
|
|
249
|
+
block.to_s
|
|
250
|
+
end
|
|
251
|
+
end.join
|
|
252
|
+
end
|
|
253
|
+
end
|
|
254
|
+
end
|
data/lib/smart_prompt/version.rb
CHANGED
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: smart_prompt
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.4.
|
|
4
|
+
version: 0.4.1
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- zhuang biaowei
|
|
@@ -107,6 +107,7 @@ files:
|
|
|
107
107
|
- README.md
|
|
108
108
|
- Rakefile
|
|
109
109
|
- lib/smart_prompt.rb
|
|
110
|
+
- lib/smart_prompt/anthropic_adapter.rb
|
|
110
111
|
- lib/smart_prompt/api_handler.rb
|
|
111
112
|
- lib/smart_prompt/conversation.rb
|
|
112
113
|
- lib/smart_prompt/db_adapter.rb
|