google-adk 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +1 -2
- data/lib/google/adk/agents/llm_agent.rb +28 -4
- data/lib/google/adk/agents/simple_llm_agent.rb +16 -1
- data/lib/google/adk/clients/anthropic_client.rb +237 -0
- data/lib/google/adk/clients/gemini_client.rb +2 -2
- data/lib/google/adk/clients/openrouter_client.rb +235 -0
- data/lib/google/adk/version.rb +1 -1
- data/lib/google/adk.rb +2 -0
- metadata +4 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 7339ca51f7b62eca3faef04fa0bc1270a1b2e2aa62e69450b7d24af003bee3e7
|
|
4
|
+
data.tar.gz: d7439e8f961885e1e65acc2300606ccf6c5ff0a7c04a5121d85eecf577f09024
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 45a536e4605ad80cc478b307150b90e0d12a1a5137a754735e4125451adbc4da4448331428ac5f61f71964359c3e77ed6644093354c65ec21885cc6deba593d9
|
|
7
|
+
data.tar.gz: 13269dd0e75c90145cb8b731a312a2338e6f31149dff3b10c75eb8f61007c6731b7b6ae3af8c3725b1c8f8c0ae016bcbe0d29a29c17005beddfe1842b593d5c5
|
data/README.md
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
Ruby implementation of Google's Agent Development Kit for building AI agents.
|
|
4
4
|
|
|
5
5
|
[](https://badge.fury.io/rb/google-adk)
|
|
6
|
-
[](https://github.com/yourusername/google-adk-ruby/actions)
|
|
7
6
|
|
|
8
7
|
> **⚠️ DISCLAIMER: This is an UNOFFICIAL Ruby port of Google's Agent Development Kit (ADK). This gem is not affiliated with, endorsed by, or maintained by Google. It is a community-driven implementation based on the public Python ADK repository. Use at your own risk.**
|
|
9
8
|
|
|
@@ -190,4 +189,4 @@ Bug reports and pull requests are welcome on GitHub at https://github.com/yourus
|
|
|
190
189
|
|
|
191
190
|
## License
|
|
192
191
|
|
|
193
|
-
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
|
192
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
|
@@ -5,6 +5,8 @@ require_relative "../tools/base_tool"
|
|
|
5
5
|
require_relative "../tools/function_tool"
|
|
6
6
|
require_relative "../tools/agent_tool"
|
|
7
7
|
require_relative "../clients/gemini_client"
|
|
8
|
+
require_relative "../clients/anthropic_client"
|
|
9
|
+
require_relative "../clients/openrouter_client"
|
|
8
10
|
|
|
9
11
|
module Google
|
|
10
12
|
module ADK
|
|
@@ -119,8 +121,8 @@ module Google
|
|
|
119
121
|
def run_async(message, context: nil)
|
|
120
122
|
Enumerator.new do |yielder|
|
|
121
123
|
begin
|
|
122
|
-
# Initialize
|
|
123
|
-
client =
|
|
124
|
+
# Initialize appropriate client based on model
|
|
125
|
+
client = create_client_for_model(canonical_model)
|
|
124
126
|
|
|
125
127
|
# Build simple message for now
|
|
126
128
|
messages = [{ role: "user", content: message }]
|
|
@@ -211,13 +213,21 @@ module Google
|
|
|
211
213
|
|
|
212
214
|
rescue => e
|
|
213
215
|
# Error handling
|
|
214
|
-
puts "[DEBUG]
|
|
216
|
+
puts "[DEBUG] LLM error: #{e.message}" if ENV["DEBUG"]
|
|
215
217
|
puts "[DEBUG] Backtrace: #{e.backtrace.first(3).join(', ')}" if ENV["DEBUG"]
|
|
216
218
|
|
|
219
|
+
api_key_msg = if canonical_model.to_s.downcase.include?("openrouter") || ENV["USE_OPENROUTER"]
|
|
220
|
+
"Please check your OPENROUTER_API_KEY."
|
|
221
|
+
elsif canonical_model.to_s.downcase.include?("claude") || ENV["USE_ANTHROPIC"]
|
|
222
|
+
"Please check your ANTHROPIC_API_KEY."
|
|
223
|
+
else
|
|
224
|
+
"Please check your GEMINI_API_KEY or GOOGLE_API_KEY."
|
|
225
|
+
end
|
|
226
|
+
|
|
217
227
|
event = Event.new(
|
|
218
228
|
invocation_id: context&.invocation_id || "inv-#{SecureRandom.uuid}",
|
|
219
229
|
author: @name,
|
|
220
|
-
content: "Error calling
|
|
230
|
+
content: "Error calling LLM API: #{e.message}. #{api_key_msg}"
|
|
221
231
|
)
|
|
222
232
|
yielder << event
|
|
223
233
|
context&.add_event(event) if context
|
|
@@ -303,6 +313,20 @@ module Google
|
|
|
303
313
|
end
|
|
304
314
|
end
|
|
305
315
|
|
|
316
|
+
# Create appropriate client based on model name
|
|
317
|
+
#
|
|
318
|
+
# @param model [String] Model name
|
|
319
|
+
# @return [GeminiClient, AnthropicClient, OpenRouterClient] Appropriate client instance
|
|
320
|
+
def create_client_for_model(model)
|
|
321
|
+
if model.to_s.downcase.include?("openrouter") || ENV["USE_OPENROUTER"]
|
|
322
|
+
OpenRouterClient.new
|
|
323
|
+
elsif model.to_s.downcase.include?("claude") || ENV["USE_ANTHROPIC"]
|
|
324
|
+
AnthropicClient.new
|
|
325
|
+
else
|
|
326
|
+
GeminiClient.new
|
|
327
|
+
end
|
|
328
|
+
end
|
|
329
|
+
|
|
306
330
|
# Extract a reasonable name from a callable
|
|
307
331
|
#
|
|
308
332
|
# @param callable [Proc, Method] Callable object
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
require_relative "../clients/gemini_client"
|
|
4
|
+
require_relative "../clients/anthropic_client"
|
|
4
5
|
require "securerandom"
|
|
5
6
|
|
|
6
7
|
module Google
|
|
@@ -14,7 +15,7 @@ module Google
|
|
|
14
15
|
@name = name
|
|
15
16
|
@instructions = instructions
|
|
16
17
|
@tools = tools
|
|
17
|
-
@client =
|
|
18
|
+
@client = create_client_for_model(model)
|
|
18
19
|
end
|
|
19
20
|
|
|
20
21
|
# Simple synchronous call to Gemini
|
|
@@ -62,6 +63,20 @@ module Google
|
|
|
62
63
|
end
|
|
63
64
|
end
|
|
64
65
|
end
|
|
66
|
+
|
|
67
|
+
private
|
|
68
|
+
|
|
69
|
+
# Create appropriate client based on model name
|
|
70
|
+
#
|
|
71
|
+
# @param model [String] Model name
|
|
72
|
+
# @return [GeminiClient, AnthropicClient] Appropriate client instance
|
|
73
|
+
def create_client_for_model(model)
|
|
74
|
+
if model.to_s.downcase.include?("claude") || ENV["USE_ANTHROPIC"]
|
|
75
|
+
AnthropicClient.new
|
|
76
|
+
else
|
|
77
|
+
GeminiClient.new
|
|
78
|
+
end
|
|
79
|
+
end
|
|
65
80
|
end
|
|
66
81
|
end
|
|
67
82
|
end
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "faraday"
|
|
4
|
+
require "json"
|
|
5
|
+
require "securerandom"
|
|
6
|
+
|
|
7
|
+
module Google
|
|
8
|
+
module ADK
|
|
9
|
+
# Client for interacting with Anthropic's Claude API
|
|
10
|
+
class AnthropicClient
|
|
11
|
+
API_BASE_URL = "https://api.anthropic.com"
|
|
12
|
+
API_VERSION = "2023-06-01"
|
|
13
|
+
DEFAULT_MODEL = "claude-3-5-sonnet-20241022"
|
|
14
|
+
|
|
15
|
+
attr_reader :api_key
|
|
16
|
+
|
|
17
|
+
def initialize(api_key: nil)
|
|
18
|
+
@api_key = api_key || ENV["ANTHROPIC_API_KEY"]
|
|
19
|
+
raise ConfigurationError, "ANTHROPIC_API_KEY not set" unless @api_key
|
|
20
|
+
|
|
21
|
+
@client = Faraday.new(API_BASE_URL) do |conn|
|
|
22
|
+
conn.request :json
|
|
23
|
+
conn.response :json
|
|
24
|
+
conn.adapter Faraday.default_adapter
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Generate content using Anthropic API
|
|
29
|
+
#
|
|
30
|
+
# @param model [String] Model name (e.g., "claude-3-5-sonnet-20241022")
|
|
31
|
+
# @param messages [Array<Hash>] Conversation messages
|
|
32
|
+
# @param tools [Array<Hash>] Available tools (optional)
|
|
33
|
+
# @param system_instruction [String] System instruction (optional)
|
|
34
|
+
# @return [Hash] API response formatted to match Gemini response structure
|
|
35
|
+
def generate_content(model:, messages:, tools: nil, system_instruction: nil)
|
|
36
|
+
url = "/v1/messages"
|
|
37
|
+
|
|
38
|
+
# Convert messages to Anthropic format
|
|
39
|
+
anthropic_messages = format_messages(messages)
|
|
40
|
+
|
|
41
|
+
# Adjust max_tokens based on model
|
|
42
|
+
max_tokens = case model
|
|
43
|
+
when /haiku/
|
|
44
|
+
4096
|
|
45
|
+
when /sonnet/
|
|
46
|
+
4096
|
|
47
|
+
when /opus/
|
|
48
|
+
4096
|
|
49
|
+
else
|
|
50
|
+
4096 # Safe default for Claude models
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
payload = {
|
|
54
|
+
model: model || DEFAULT_MODEL,
|
|
55
|
+
messages: anthropic_messages,
|
|
56
|
+
max_tokens: max_tokens,
|
|
57
|
+
temperature: 0.7
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
# Add system instruction if provided
|
|
61
|
+
payload[:system] = system_instruction if system_instruction
|
|
62
|
+
|
|
63
|
+
# Add tools if provided
|
|
64
|
+
if tools && !tools.empty?
|
|
65
|
+
payload[:tools] = format_tools(tools)
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
response = @client.post(url) do |req|
|
|
69
|
+
req.headers["x-api-key"] = @api_key
|
|
70
|
+
req.headers["anthropic-version"] = API_VERSION
|
|
71
|
+
req.headers["content-type"] = "application/json"
|
|
72
|
+
req.body = payload
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
handle_response(response)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
private
|
|
79
|
+
|
|
80
|
+
# Format messages for Anthropic API
|
|
81
|
+
def format_messages(messages)
|
|
82
|
+
@tool_id_map ||= {}
|
|
83
|
+
|
|
84
|
+
messages.map do |msg|
|
|
85
|
+
if msg[:parts]
|
|
86
|
+
# Handle Gemini-formatted messages
|
|
87
|
+
convert_gemini_message(msg)
|
|
88
|
+
else
|
|
89
|
+
# Simple text messages
|
|
90
|
+
{
|
|
91
|
+
role: normalize_role(msg[:role]),
|
|
92
|
+
content: msg[:content]
|
|
93
|
+
}
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Convert Gemini-formatted message to Anthropic format
|
|
99
|
+
def convert_gemini_message(msg)
|
|
100
|
+
role = msg[:role] == "model" ? "assistant" : normalize_role(msg[:role])
|
|
101
|
+
|
|
102
|
+
# Handle different part types
|
|
103
|
+
content = msg[:parts].map do |part|
|
|
104
|
+
# Handle both symbol and string keys
|
|
105
|
+
text = part[:text] || part["text"]
|
|
106
|
+
function_call = part[:functionCall] || part["functionCall"]
|
|
107
|
+
function_response = part[:functionResponse] || part["functionResponse"]
|
|
108
|
+
|
|
109
|
+
if text
|
|
110
|
+
# Text part
|
|
111
|
+
{ type: "text", text: text }
|
|
112
|
+
elsif function_call
|
|
113
|
+
# Tool use request - ensure name is a string and handle nested keys
|
|
114
|
+
fc_name = function_call[:name] || function_call["name"]
|
|
115
|
+
fc_args = function_call[:args] || function_call["args"] || {}
|
|
116
|
+
|
|
117
|
+
# Generate and store tool ID
|
|
118
|
+
tool_id = "tool_#{SecureRandom.hex(8)}"
|
|
119
|
+
@tool_id_map[fc_name.to_s] = tool_id
|
|
120
|
+
|
|
121
|
+
{
|
|
122
|
+
type: "tool_use",
|
|
123
|
+
id: tool_id,
|
|
124
|
+
name: fc_name.to_s,
|
|
125
|
+
input: fc_args
|
|
126
|
+
}
|
|
127
|
+
elsif function_response
|
|
128
|
+
# Tool result
|
|
129
|
+
fr_name = function_response[:name] || function_response["name"]
|
|
130
|
+
fr_response = function_response[:response] || function_response["response"]
|
|
131
|
+
|
|
132
|
+
# Retrieve the tool ID for this function
|
|
133
|
+
tool_id = @tool_id_map[fr_name.to_s] || "tool_#{SecureRandom.hex(8)}"
|
|
134
|
+
|
|
135
|
+
{
|
|
136
|
+
type: "tool_result",
|
|
137
|
+
tool_use_id: tool_id,
|
|
138
|
+
content: JSON.generate(fr_response)
|
|
139
|
+
}
|
|
140
|
+
else
|
|
141
|
+
# Default to text
|
|
142
|
+
{ type: "text", text: part.to_s }
|
|
143
|
+
end
|
|
144
|
+
end.flatten
|
|
145
|
+
|
|
146
|
+
# Anthropic expects content as array for complex messages, string for simple
|
|
147
|
+
content = content.first[:text] if content.length == 1 && content.first[:type] == "text"
|
|
148
|
+
|
|
149
|
+
{ role: role, content: content }
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
# Normalize role names
|
|
153
|
+
def normalize_role(role)
|
|
154
|
+
case role.to_s
|
|
155
|
+
when "model", "assistant"
|
|
156
|
+
"assistant"
|
|
157
|
+
when "function", "tool"
|
|
158
|
+
"user" # Anthropic treats tool responses as user messages
|
|
159
|
+
else
|
|
160
|
+
"user"
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
# Format tools for Anthropic API
|
|
165
|
+
def format_tools(tools)
|
|
166
|
+
tools.map do |tool|
|
|
167
|
+
{
|
|
168
|
+
name: tool["name"],
|
|
169
|
+
description: tool["description"],
|
|
170
|
+
input_schema: tool["parameters"] || {
|
|
171
|
+
type: "object",
|
|
172
|
+
properties: {},
|
|
173
|
+
required: []
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
# Handle API response and convert to Gemini format
|
|
180
|
+
def handle_response(response)
|
|
181
|
+
case response.status
|
|
182
|
+
when 200
|
|
183
|
+
convert_to_gemini_format(response.body)
|
|
184
|
+
when 400
|
|
185
|
+
raise Error, "Bad request: #{response.body.dig('error', 'message') || response.body}"
|
|
186
|
+
when 401
|
|
187
|
+
raise ConfigurationError, "Invalid API key"
|
|
188
|
+
when 429
|
|
189
|
+
raise Error, "Rate limit exceeded"
|
|
190
|
+
else
|
|
191
|
+
raise Error, "API error (#{response.status}): #{response.body}"
|
|
192
|
+
end
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
# Convert Anthropic response to Gemini format for compatibility
|
|
196
|
+
def convert_to_gemini_format(anthropic_response)
|
|
197
|
+
# Build parts from content
|
|
198
|
+
parts = []
|
|
199
|
+
|
|
200
|
+
content = anthropic_response["content"]
|
|
201
|
+
content = [content] unless content.is_a?(Array)
|
|
202
|
+
|
|
203
|
+
content.each do |item|
|
|
204
|
+
if item.is_a?(String)
|
|
205
|
+
# Simple text response
|
|
206
|
+
parts << { "text" => item }
|
|
207
|
+
elsif item.is_a?(Hash)
|
|
208
|
+
case item["type"]
|
|
209
|
+
when "text"
|
|
210
|
+
parts << { "text" => item["text"] }
|
|
211
|
+
when "tool_use"
|
|
212
|
+
# Convert tool use to function call
|
|
213
|
+
parts << {
|
|
214
|
+
"functionCall" => {
|
|
215
|
+
"name" => item["name"].to_s,
|
|
216
|
+
"args" => item["input"]
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
# Format as Gemini response
|
|
224
|
+
{
|
|
225
|
+
"candidates" => [
|
|
226
|
+
{
|
|
227
|
+
"content" => {
|
|
228
|
+
"parts" => parts,
|
|
229
|
+
"role" => "model"
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
]
|
|
233
|
+
}
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
end
|
|
@@ -12,8 +12,8 @@ module Google
|
|
|
12
12
|
attr_reader :api_key
|
|
13
13
|
|
|
14
14
|
def initialize(api_key: nil)
|
|
15
|
-
@api_key = api_key || ENV["GEMINI_API_KEY"]
|
|
16
|
-
raise ConfigurationError, "GEMINI_API_KEY not set" unless @api_key
|
|
15
|
+
@api_key = api_key || ENV["GEMINI_API_KEY"] || ENV["GOOGLE_API_KEY"]
|
|
16
|
+
raise ConfigurationError, "GEMINI_API_KEY or GOOGLE_API_KEY not set" unless @api_key
|
|
17
17
|
|
|
18
18
|
@client = Faraday.new(API_BASE_URL) do |conn|
|
|
19
19
|
conn.request :json
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "faraday"
|
|
4
|
+
require "json"
|
|
5
|
+
require "securerandom"
|
|
6
|
+
|
|
7
|
+
module Google
|
|
8
|
+
module ADK
|
|
9
|
+
# Client for interacting with OpenRouter's OpenAI-compatible API
|
|
10
|
+
class OpenRouterClient
|
|
11
|
+
API_BASE_URL = "https://openrouter.ai/api/v1"
|
|
12
|
+
DEFAULT_MODEL = "openrouter/auto"
|
|
13
|
+
|
|
14
|
+
attr_reader :api_key
|
|
15
|
+
|
|
16
|
+
def initialize(api_key: nil)
|
|
17
|
+
@api_key = api_key || ENV["OPENROUTER_API_KEY"]
|
|
18
|
+
raise ConfigurationError, "OPENROUTER_API_KEY not set" unless @api_key
|
|
19
|
+
|
|
20
|
+
@client = Faraday.new(API_BASE_URL) do |conn|
|
|
21
|
+
conn.request :json
|
|
22
|
+
conn.response :json
|
|
23
|
+
conn.adapter Faraday.default_adapter
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Generate content using OpenRouter API
|
|
28
|
+
#
|
|
29
|
+
# @param model [String] Model name (e.g., "openrouter/auto", "anthropic/claude-3-haiku")
|
|
30
|
+
# @param messages [Array<Hash>] Conversation messages
|
|
31
|
+
# @param tools [Array<Hash>] Available tools (optional)
|
|
32
|
+
# @param system_instruction [String] System instruction (optional)
|
|
33
|
+
# @return [Hash] API response formatted to match Gemini response structure
|
|
34
|
+
def generate_content(model:, messages:, tools: nil, system_instruction: nil)
|
|
35
|
+
url = "/chat/completions"
|
|
36
|
+
|
|
37
|
+
# Convert messages to OpenAI format
|
|
38
|
+
openai_messages = format_messages(messages)
|
|
39
|
+
|
|
40
|
+
# Add system message if provided
|
|
41
|
+
if system_instruction
|
|
42
|
+
openai_messages.unshift({
|
|
43
|
+
role: "system",
|
|
44
|
+
content: system_instruction
|
|
45
|
+
})
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
payload = {
|
|
49
|
+
model: model || DEFAULT_MODEL,
|
|
50
|
+
messages: openai_messages,
|
|
51
|
+
max_tokens: 8192,
|
|
52
|
+
temperature: 0.7,
|
|
53
|
+
top_p: 0.95
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
# Add tools if provided
|
|
57
|
+
if tools && !tools.empty?
|
|
58
|
+
payload[:tools] = format_tools(tools)
|
|
59
|
+
payload[:tool_choice] = "auto"
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
response = @client.post(url) do |req|
|
|
63
|
+
req.headers["Authorization"] = "Bearer #{@api_key}"
|
|
64
|
+
req.headers["Content-Type"] = "application/json"
|
|
65
|
+
req.headers["HTTP-Referer"] = "https://github.com/google-adk"
|
|
66
|
+
req.headers["X-Title"] = "Google ADK Client"
|
|
67
|
+
req.body = payload
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
handle_response(response)
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
private
|
|
74
|
+
|
|
75
|
+
# Format messages for OpenRouter/OpenAI API
|
|
76
|
+
def format_messages(messages)
|
|
77
|
+
messages.map do |msg|
|
|
78
|
+
if msg[:parts]
|
|
79
|
+
# Handle Gemini-formatted messages
|
|
80
|
+
convert_gemini_message(msg)
|
|
81
|
+
else
|
|
82
|
+
# Simple text messages
|
|
83
|
+
{
|
|
84
|
+
role: normalize_role(msg[:role]),
|
|
85
|
+
content: msg[:content]
|
|
86
|
+
}
|
|
87
|
+
end
|
|
88
|
+
end.flatten.compact
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Convert Gemini-formatted message to OpenAI format
|
|
92
|
+
def convert_gemini_message(msg)
|
|
93
|
+
role = msg[:role] == "model" ? "assistant" : normalize_role(msg[:role])
|
|
94
|
+
|
|
95
|
+
# Handle different part types
|
|
96
|
+
msg[:parts].map do |part|
|
|
97
|
+
if part[:text]
|
|
98
|
+
# Text part
|
|
99
|
+
{ role: role, content: part[:text] }
|
|
100
|
+
elsif part[:functionCall]
|
|
101
|
+
# Tool call - OpenAI format uses tool_calls array
|
|
102
|
+
{
|
|
103
|
+
role: "assistant",
|
|
104
|
+
content: nil,
|
|
105
|
+
tool_calls: [{
|
|
106
|
+
id: "call_#{SecureRandom.hex(8)}",
|
|
107
|
+
type: "function",
|
|
108
|
+
function: {
|
|
109
|
+
name: part[:functionCall][:name],
|
|
110
|
+
arguments: JSON.generate(part[:functionCall][:args] || {})
|
|
111
|
+
}
|
|
112
|
+
}]
|
|
113
|
+
}
|
|
114
|
+
elsif part[:functionResponse]
|
|
115
|
+
# Tool response
|
|
116
|
+
{
|
|
117
|
+
role: "tool",
|
|
118
|
+
content: JSON.generate(part[:functionResponse][:response]),
|
|
119
|
+
tool_call_id: "call_#{SecureRandom.hex(8)}" # Would need to track this properly
|
|
120
|
+
}
|
|
121
|
+
else
|
|
122
|
+
# Default to text
|
|
123
|
+
{ role: role, content: part.to_s }
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Normalize role names
|
|
129
|
+
def normalize_role(role)
|
|
130
|
+
case role.to_s
|
|
131
|
+
when "model", "assistant"
|
|
132
|
+
"assistant"
|
|
133
|
+
when "function"
|
|
134
|
+
"tool"
|
|
135
|
+
else
|
|
136
|
+
role.to_s
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
# Format tools for OpenRouter/OpenAI API
|
|
141
|
+
def format_tools(tools)
|
|
142
|
+
tools.map do |tool|
|
|
143
|
+
{
|
|
144
|
+
type: "function",
|
|
145
|
+
function: {
|
|
146
|
+
name: tool["name"],
|
|
147
|
+
description: tool["description"],
|
|
148
|
+
parameters: tool["parameters"] || {
|
|
149
|
+
type: "object",
|
|
150
|
+
properties: {},
|
|
151
|
+
required: []
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
# Handle API response and convert to Gemini format
|
|
159
|
+
def handle_response(response)
|
|
160
|
+
case response.status
|
|
161
|
+
when 200
|
|
162
|
+
convert_to_gemini_format(response.body)
|
|
163
|
+
when 400
|
|
164
|
+
error_msg = response.body.dig("error", "message") || response.body["error"] || response.body
|
|
165
|
+
raise Error, "Bad request: #{error_msg}"
|
|
166
|
+
when 401
|
|
167
|
+
raise ConfigurationError, "Invalid API key"
|
|
168
|
+
when 402
|
|
169
|
+
raise Error, "Insufficient credits. Please add credits to your OpenRouter account."
|
|
170
|
+
when 429
|
|
171
|
+
raise Error, "Rate limit exceeded"
|
|
172
|
+
when 503
|
|
173
|
+
raise Error, "Model provider temporarily unavailable"
|
|
174
|
+
else
|
|
175
|
+
error_msg = response.body.dig("error", "message") || response.body["error"] || response.body
|
|
176
|
+
raise Error, "API error (#{response.status}): #{error_msg}"
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
# Convert OpenRouter/OpenAI response to Gemini format for compatibility
|
|
181
|
+
def convert_to_gemini_format(openai_response)
|
|
182
|
+
# Get the first choice (OpenAI returns array of choices)
|
|
183
|
+
choice = openai_response["choices"]&.first
|
|
184
|
+
return empty_response unless choice
|
|
185
|
+
|
|
186
|
+
message = choice["message"]
|
|
187
|
+
parts = []
|
|
188
|
+
|
|
189
|
+
# Handle text content
|
|
190
|
+
if message["content"]
|
|
191
|
+
parts << { "text" => message["content"] }
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
# Handle tool calls
|
|
195
|
+
if message["tool_calls"]
|
|
196
|
+
message["tool_calls"].each do |tool_call|
|
|
197
|
+
if tool_call["type"] == "function"
|
|
198
|
+
parts << {
|
|
199
|
+
"functionCall" => {
|
|
200
|
+
"name" => tool_call["function"]["name"],
|
|
201
|
+
"args" => JSON.parse(tool_call["function"]["arguments"])
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
end
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
# Format as Gemini response
|
|
209
|
+
{
|
|
210
|
+
"candidates" => [
|
|
211
|
+
{
|
|
212
|
+
"content" => {
|
|
213
|
+
"parts" => parts,
|
|
214
|
+
"role" => "model"
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
]
|
|
218
|
+
}
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
def empty_response
|
|
222
|
+
{
|
|
223
|
+
"candidates" => [
|
|
224
|
+
{
|
|
225
|
+
"content" => {
|
|
226
|
+
"parts" => [{ "text" => "" }],
|
|
227
|
+
"role" => "model"
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
]
|
|
231
|
+
}
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
end
|
|
235
|
+
end
|
data/lib/google/adk/version.rb
CHANGED
data/lib/google/adk.rb
CHANGED
|
@@ -18,6 +18,8 @@ require_relative "adk/tools/base_tool"
|
|
|
18
18
|
require_relative "adk/tools/function_tool"
|
|
19
19
|
require_relative "adk/tools/agent_tool"
|
|
20
20
|
require_relative "adk/clients/gemini_client"
|
|
21
|
+
require_relative "adk/clients/anthropic_client"
|
|
22
|
+
require_relative "adk/clients/openrouter_client"
|
|
21
23
|
require_relative "adk/runner"
|
|
22
24
|
|
|
23
25
|
module Google
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: google-adk
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.2.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Landon Gray
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2025-12-
|
|
11
|
+
date: 2025-12-31 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: async
|
|
@@ -213,7 +213,9 @@ files:
|
|
|
213
213
|
- lib/google/adk/agents/workflow_agents/loop_agent.rb
|
|
214
214
|
- lib/google/adk/agents/workflow_agents/parallel_agent.rb
|
|
215
215
|
- lib/google/adk/agents/workflow_agents/sequential_agent.rb
|
|
216
|
+
- lib/google/adk/clients/anthropic_client.rb
|
|
216
217
|
- lib/google/adk/clients/gemini_client.rb
|
|
218
|
+
- lib/google/adk/clients/openrouter_client.rb
|
|
217
219
|
- lib/google/adk/context.rb
|
|
218
220
|
- lib/google/adk/events.rb
|
|
219
221
|
- lib/google/adk/runner.rb
|