ruby_llm-mcp 0.2.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 49b1593de3f38afce9d3002a696c7034c2bea82711742ea4bd96a5f155f113e3
4
- data.tar.gz: 6fb3c4b87bc82684bb9ef093b70b13eea91e759f5ee44a08b4be543cc45d30d3
3
+ metadata.gz: c414a0306ebefcb35dcb599406a0092e554e1389aebfa812597e7072e304cf88
4
+ data.tar.gz: 25b384446ad37b3422bf64e1f9b15413bd8cd64636dbf50ad8ae861e4dc4589d
5
5
  SHA512:
6
- metadata.gz: 6950867fde16315dba705049ec2d23cc89f3977808d323af20b5bf343b2453228145190e183bfd6350b95df856ae96bf4f60a83145eb0137dad0003454c889d3
7
- data.tar.gz: 1ed01c5ec46651c1b420971a429299e60b59707255403f1a167c8832cfde9ee5ba1a5ed00ba1edea8f2bf710006f1d605896781c329f02f5ca056642fe246100
6
+ metadata.gz: abbca445c4ecb2ee7a3f2af4457f47aff5337ac981426746184cb0cb63e699b606116d5db983d8d4183557896675743a14bada2af4dd3d54d85066183fe34653
7
+ data.tar.gz: d8007f3ee983eb3a8e2e76ad2b0daa8ead92bc6347e3352d8d569fab770056026a29237a76182c137459663b54cb4b364df93fb7e968a2d783d1c96fe98f4774
data/README.md CHANGED
@@ -12,13 +12,16 @@ This project is a Ruby client for the [Model Context Protocol (MCP)](https://mod
12
12
  - 🛠️ **Tool Integration**: Automatically converts MCP tools into RubyLLM-compatible tools
13
13
  - 📄 **Resource Management**: Access and include MCP resources (files, data) and resource templates in conversations
14
14
  - 🎯 **Prompt Integration**: Use predefined MCP prompts with arguments for consistent interactions
15
- - 🔄 **Real-time Communication**: Efficient bidirectional communication with MCP servers
16
15
  - 🎨 **Enhanced Chat Interface**: Extended RubyLLM chat methods for seamless MCP integration
17
16
  - 📚 **Simple API**: Easy-to-use interface that integrates seamlessly with RubyLLM
18
17
 
19
18
  ## Installation
20
19
 
21
- Add this line to your application's Gemfile:
20
+ ```bash
21
+ bundle add ruby_llm-mcp
22
+ ```
23
+
24
+ or add this line to your application's Gemfile:
22
25
 
23
26
  ```ruby
24
27
  gem 'ruby_llm-mcp'
@@ -152,12 +155,12 @@ MCP servers can provide access to resources - structured data that can be includ
152
155
  # Get available resources from the MCP server
153
156
  resources = client.resources
154
157
  puts "Available resources:"
155
- resources.each do |name, resource|
156
- puts "- #{name}: #{resource.description}"
158
+ resources.each do |resource|
159
+ puts "- #{resource.name}: #{resource.description}"
157
160
  end
158
161
 
159
- # Access a specific resource
160
- file_resource = resources["project_readme"]
162
+ # Access a specific resource by name
163
+ file_resource = client.resource("project_readme")
161
164
  content = file_resource.content
162
165
  puts "Resource content: #{content}"
163
166
 
@@ -179,11 +182,11 @@ Resource templates are parameterized resources that can be dynamically configure
179
182
  ```ruby
180
183
  # Get available resource templates
181
184
  templates = client.resource_templates
182
- log_template = templates["application_logs"]
185
+ log_template = client.resource_template("application_logs")
183
186
 
184
187
  # Use a template with parameters
185
188
  chat = RubyLLM.chat(model: "gpt-4")
186
- chat.with_resource(log_template, arguments: {
189
+ chat.with_resource_template(log_template, arguments: {
187
190
  date: "2024-01-15",
188
191
  level: "error"
189
192
  })
@@ -192,7 +195,7 @@ response = chat.ask("What errors occurred on this date?")
192
195
  puts response
193
196
 
194
197
  # You can also get templated content directly
195
- content = log_template.content(arguments: {
198
+ content = log_template.to_content(arguments: {
196
199
  date: "2024-01-15",
197
200
  level: "error"
198
201
  })
@@ -204,12 +207,12 @@ puts content
204
207
  For resource templates, you can get suggested values for arguments:
205
208
 
206
209
  ```ruby
207
- template = client.resource_templates["user_profile"]
210
+ template = client.resource_template("user_profile")
208
211
 
209
212
  # Search for possible values for a specific argument
210
- suggestions = template.arguments_search("username", "john")
213
+ suggestions = template.complete("username", "john")
211
214
  puts "Suggested usernames:"
212
- suggestions.arg_values.each do |value|
215
+ suggestions.values.each do |value|
213
216
  puts "- #{value}"
214
217
  end
215
218
  puts "Total matches: #{suggestions.total}"
@@ -224,15 +227,15 @@ MCP servers can provide predefined prompts that can be used in conversations:
224
227
  # Get available prompts from the MCP server
225
228
  prompts = client.prompts
226
229
  puts "Available prompts:"
227
- prompts.each do |name, prompt|
228
- puts "- #{name}: #{prompt.description}"
230
+ prompts.each do |prompt|
231
+ puts "- #{prompt.name}: #{prompt.description}"
229
232
  prompt.arguments.each do |arg|
230
233
  puts " - #{arg.name}: #{arg.description} (required: #{arg.required})"
231
234
  end
232
235
  end
233
236
 
234
237
  # Use a prompt in a conversation
235
- greeting_prompt = prompts["daily_greeting"]
238
+ greeting_prompt = client.prompt("daily_greeting")
236
239
  chat = RubyLLM.chat(model: "gpt-4")
237
240
 
238
241
  # Method 1: Ask prompt directly
@@ -261,15 +264,15 @@ chat = RubyLLM.chat(model: "gpt-4")
261
264
  chat.with_tools(*client.tools)
262
265
 
263
266
  # Add resources for context
264
- chat.with_resource(client.resources["project_structure"])
267
+ chat.with_resource(client.resource("project_structure"))
265
268
  chat.with_resource(
266
- client.resource_templates["recent_commits"],
269
+ client.resource_template("recent_commits"),
267
270
  arguments: { days: 7 }
268
271
  )
269
272
 
270
273
  # Add prompts for guidance
271
274
  chat.with_prompt(
272
- client.prompts["code_review_checklist"],
275
+ client.prompt("code_review_checklist"),
273
276
  arguments: { focus: "security" }
274
277
  )
275
278
 
@@ -278,6 +281,88 @@ response = chat.ask("Please review the recent commits using the checklist and su
278
281
  puts response
279
282
  ```
280
283
 
284
+ ## Argument Completion
285
+
286
+ Some MCP servers support argument completion for prompts and resource templates:
287
+
288
+ ```ruby
289
+ # For prompts
290
+ prompt = client.prompt("user_search")
291
+ suggestions = prompt.complete("username", "jo")
292
+ puts "Suggestions: #{suggestions.values}" # ["john", "joanna", "joseph"]
293
+
294
+ # For resource templates
295
+ template = client.resource_template("user_logs")
296
+ suggestions = template.complete("user_id", "123")
297
+ puts "Total matches: #{suggestions.total}"
298
+ puts "Has more results: #{suggestions.has_more}"
299
+ ```
300
+
301
+ ## Additional Chat Methods
302
+
303
+ The gem extends RubyLLM's chat interface with convenient methods for MCP integration:
304
+
305
+ ```ruby
306
+ chat = RubyLLM.chat(model: "gpt-4")
307
+
308
+ # Add a single resource
309
+ chat.with_resource(resource)
310
+
311
+ # Add multiple resources
312
+ chat.with_resources(resource1, resource2, resource3)
313
+
314
+ # Add a resource template with arguments
315
+ chat.with_resource_template(resource_template, arguments: { key: "value" })
316
+
317
+ # Add a prompt with arguments
318
+ chat.with_prompt(prompt, arguments: { name: "Alice" })
319
+
320
+ # Ask using a prompt directly
321
+ response = chat.ask_prompt(prompt, arguments: { name: "Alice" })
322
+ ```
323
+
324
+ ## Client Lifecycle Management
325
+
326
+ You can manage the MCP client connection lifecycle:
327
+
328
+ ```ruby
329
+ client = RubyLLM::MCP.client(name: "my-server", transport_type: :stdio, start: false, config: {...})
330
+
331
+ # Manually start the connection
332
+ client.start
333
+
334
+ # Check if connection is alive
335
+ puts client.alive?
336
+
337
+ # Restart the connection
338
+ client.restart!
339
+
340
+ # Stop the connection
341
+ client.stop
342
+ ```
343
+
344
+ ## Refreshing Cached Data
345
+
346
+ The client caches tools, resources, prompts, and resource templates list calls are cached to reduce round trips back to the MCP server. You can refresh this cache:
347
+
348
+ ```ruby
349
+ # Refresh all cached tools
350
+ tools = client.tools(refresh: true)
351
+
352
+ # Refresh a specific tool
353
+ tool = client.tool("search_files", refresh: true)
354
+
355
+ # Same pattern works for resources, prompts, and resource templates
356
+ resources = client.resources(refresh: true)
357
+ prompts = client.prompts(refresh: true)
358
+ templates = client.resource_templates(refresh: true)
359
+
360
+ # Or refresh specific items
361
+ resource = client.resource("project_readme", refresh: true)
362
+ prompt = client.prompt("daily_greeting", refresh: true)
363
+ template = client.resource_template("user_logs", refresh: true)
364
+ ```
365
+
281
366
  ## Transport Types
282
367
 
283
368
  ### SSE (Server-Sent Events)
@@ -289,7 +374,8 @@ client = RubyLLM::MCP.client(
289
374
  name: "web-mcp-server",
290
375
  transport_type: :sse,
291
376
  config: {
292
- url: "https://your-mcp-server.com/mcp/sse"
377
+ url: "https://your-mcp-server.com/mcp/sse",
378
+ headers: { "Authorization" => "Bearer your-token" }
293
379
  }
294
380
  )
295
381
  ```
@@ -329,9 +415,10 @@ client = RubyLLM::MCP.client(
329
415
 
330
416
  - `name`: A unique identifier for your MCP client
331
417
  - `transport_type`: Either `:sse`, `:streamable`, or `:stdio`
418
+ - `start`: Whether to automatically start the connection (default: true)
332
419
  - `request_timeout`: Timeout for requests in milliseconds (default: 8000)
333
420
  - `config`: Transport-specific configuration
334
- - For SSE: `{ url: "http://..." }`
421
+ - For SSE: `{ url: "http://...", headers: {...} }`
335
422
  - For Streamable: `{ url: "http://...", headers: {...} }`
336
423
  - For stdio: `{ command: "...", args: [...], env: {...} }`
337
424
 
@@ -339,13 +426,16 @@ client = RubyLLM::MCP.client(
339
426
 
340
427
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
341
428
 
342
- To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
429
+ To install this gem onto your local machine, run `bundle exec rake install`. Run `bundle exec rake` to test specs and run linters. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
343
430
 
344
431
  ## Examples
345
432
 
346
433
  Check out the `examples/` directory for more detailed usage examples:
347
434
 
348
- - `examples/test_local_mcp.rb` - Complete example with SSE transport
435
+ - `examples/tools/local_mcp.rb` - Complete example with stdio transport
436
+ - `examples/tools/sse_mcp_with_gpt.rb` - Example using SSE transport with GPT
437
+ - `examples/resources/list_resources.rb` - Example of listing and using resources
438
+ - `examples/prompts/streamable_prompt_call.rb` - Example of using prompts with streamable transport
349
439
 
350
440
  ## Contributing
351
441
 
data/lib/ruby_llm/chat.rb CHANGED
@@ -10,8 +10,14 @@ module RubyLLM
10
10
  self
11
11
  end
12
12
 
13
- def with_resource(resource, **args)
14
- resource.include(self, **args)
13
+ def with_resource(resource)
14
+ resource.include(self)
15
+ self
16
+ end
17
+
18
+ def with_resource_template(resource_template, arguments: {})
19
+ resource = resource_template.fetch_resource(arguments: arguments)
20
+ resource.include(self)
15
21
  self
16
22
  end
17
23
 
@@ -3,9 +3,9 @@
3
3
  module RubyLLM
4
4
  module MCP
5
5
  class Capabilities
6
- attr_reader :capabilities
6
+ attr_accessor :capabilities
7
7
 
8
- def initialize(capabilities)
8
+ def initialize(capabilities = {})
9
9
  @capabilities = capabilities
10
10
  end
11
11
 
@@ -22,7 +22,7 @@ module RubyLLM
22
22
  end
23
23
 
24
24
  def completion?
25
- @capabilities["completion"].present?
25
+ !@capabilities["completions"].nil?
26
26
  end
27
27
  end
28
28
  end
@@ -9,55 +9,113 @@ module RubyLLM
9
9
  attr_reader :name, :config, :transport_type, :transport, :request_timeout, :reverse_proxy_url, :protocol_version,
10
10
  :capabilities
11
11
 
12
- def initialize(name:, transport_type:, request_timeout: 8000, reverse_proxy_url: nil, config: {})
12
+ def initialize(name:, transport_type:, start: true, request_timeout: 8000, reverse_proxy_url: nil, config: {}) # rubocop:disable Metrics/ParameterLists
13
13
  @name = name
14
14
  @config = config
15
15
  @protocol_version = PROTOCOL_VERSION
16
16
  @headers = config[:headers] || {}
17
17
 
18
18
  @transport_type = transport_type.to_sym
19
+ @transport = nil
19
20
 
21
+ @capabilities = nil
22
+
23
+ @request_timeout = request_timeout
24
+ @reverse_proxy_url = reverse_proxy_url
25
+
26
+ if start
27
+ self.start
28
+ end
29
+ end
30
+
31
+ def request(body, **options)
32
+ @transport.request(body, **options)
33
+ end
34
+
35
+ def start
20
36
  case @transport_type
21
37
  when :sse
22
- @transport = RubyLLM::MCP::Transport::SSE.new(@config[:url], headers: @headers)
38
+ @transport = RubyLLM::MCP::Transport::SSE.new(@config[:url], request_timeout: @request_timeout,
39
+ headers: @headers)
23
40
  when :stdio
24
- @transport = RubyLLM::MCP::Transport::Stdio.new(@config[:command], args: @config[:args], env: @config[:env])
41
+ @transport = RubyLLM::MCP::Transport::Stdio.new(@config[:command], request_timeout: @request_timeout,
42
+ args: @config[:args], env: @config[:env])
25
43
  when :streamable
26
- @transport = RubyLLM::MCP::Transport::Streamable.new(@config[:url], headers: @headers)
44
+ @transport = RubyLLM::MCP::Transport::Streamable.new(@config[:url], request_timeout: @request_timeout,
45
+ headers: @headers)
27
46
  else
28
47
  raise "Invalid transport type: #{transport_type}"
29
48
  end
30
- @capabilities = nil
31
49
 
32
- @request_timeout = request_timeout
33
- @reverse_proxy_url = reverse_proxy_url
34
-
35
- initialize_request
50
+ @initialize_response = initialize_request
51
+ @capabilities = RubyLLM::MCP::Capabilities.new(@initialize_response["result"]["capabilities"])
36
52
  notification_request
37
53
  end
38
54
 
39
- def request(body, **options)
40
- @transport.request(body, **options)
55
+ def stop
56
+ @transport&.close
57
+ @transport = nil
58
+ end
59
+
60
+ def restart!
61
+ stop
62
+ start
63
+ end
64
+
65
+ def alive?
66
+ !!@transport&.alive?
41
67
  end
42
68
 
43
69
  def tools(refresh: false)
44
70
  @tools = nil if refresh
45
71
  @tools ||= fetch_and_create_tools
72
+ @tools.values
73
+ end
74
+
75
+ def tool(name, refresh: false)
76
+ @tools = nil if refresh
77
+ @tools ||= fetch_and_create_tools
78
+
79
+ @tools[name]
46
80
  end
47
81
 
48
82
  def resources(refresh: false)
49
83
  @resources = nil if refresh
50
84
  @resources ||= fetch_and_create_resources
85
+ @resources.values
86
+ end
87
+
88
+ def resource(name, refresh: false)
89
+ @resources = nil if refresh
90
+ @resources ||= fetch_and_create_resources
91
+
92
+ @resources[name]
51
93
  end
52
94
 
53
95
  def resource_templates(refresh: false)
54
96
  @resource_templates = nil if refresh
55
- @resource_templates ||= fetch_and_create_resources(set_as_template: true)
97
+ @resource_templates ||= fetch_and_create_resource_templates
98
+ @resource_templates.values
99
+ end
100
+
101
+ def resource_template(name, refresh: false)
102
+ @resource_templates = nil if refresh
103
+ @resource_templates ||= fetch_and_create_resource_templates
104
+
105
+ @resource_templates[name]
56
106
  end
57
107
 
58
108
  def prompts(refresh: false)
59
109
  @prompts = nil if refresh
60
110
  @prompts ||= fetch_and_create_prompts
111
+ @prompts.values
112
+ end
113
+
114
+ def prompt(name, refresh: false)
115
+ @prompts = nil if refresh
116
+ @prompts ||= fetch_and_create_prompts
117
+
118
+ @prompts[name]
61
119
  end
62
120
 
63
121
  def execute_tool(**args)
@@ -68,8 +126,12 @@ module RubyLLM
68
126
  RubyLLM::MCP::Requests::ResourceRead.new(self, **args).call
69
127
  end
70
128
 
71
- def completion(**args)
72
- RubyLLM::MCP::Requests::Completion.new(self, **args).call
129
+ def completion_resource(**args)
130
+ RubyLLM::MCP::Requests::CompletionResource.new(self, **args).call
131
+ end
132
+
133
+ def completion_prompt(**args)
134
+ RubyLLM::MCP::Requests::CompletionPrompt.new(self, **args).call
73
135
  end
74
136
 
75
137
  def execute_prompt(**args)
@@ -79,8 +141,7 @@ module RubyLLM
79
141
  private
80
142
 
81
143
  def initialize_request
82
- @initialize_response = RubyLLM::MCP::Requests::Initialization.new(self).call
83
- @capabilities = RubyLLM::MCP::Capabilities.new(@initialize_response["result"]["capabilities"])
144
+ RubyLLM::MCP::Requests::Initialization.new(self).call
84
145
  end
85
146
 
86
147
  def notification_request
@@ -107,24 +168,41 @@ module RubyLLM
107
168
  tools_response = tool_list_request
108
169
  tools_response = tools_response["result"]["tools"]
109
170
 
110
- @tools = tools_response.map do |tool|
111
- RubyLLM::MCP::Tool.new(self, tool)
171
+ tools = {}
172
+ tools_response.each do |tool|
173
+ new_tool = RubyLLM::MCP::Tool.new(self, tool)
174
+ tools[new_tool.name] = new_tool
112
175
  end
176
+
177
+ tools
113
178
  end
114
179
 
115
- def fetch_and_create_resources(set_as_template: false)
180
+ def fetch_and_create_resources
116
181
  resources_response = resources_list_request
117
182
  resources_response = resources_response["result"]["resources"]
118
183
 
119
184
  resources = {}
120
185
  resources_response.each do |resource|
121
- new_resource = RubyLLM::MCP::Resource.new(self, resource, template: set_as_template)
186
+ new_resource = RubyLLM::MCP::Resource.new(self, resource)
122
187
  resources[new_resource.name] = new_resource
123
188
  end
124
189
 
125
190
  resources
126
191
  end
127
192
 
193
+ def fetch_and_create_resource_templates
194
+ resource_templates_response = resource_template_list_request
195
+ resource_templates_response = resource_templates_response["result"]["resourceTemplates"]
196
+
197
+ resource_templates = {}
198
+ resource_templates_response.each do |resource_template|
199
+ new_resource_template = RubyLLM::MCP::ResourceTemplate.new(self, resource_template)
200
+ resource_templates[new_resource_template.name] = new_resource_template
201
+ end
202
+
203
+ resource_templates
204
+ end
205
+
128
206
  def fetch_and_create_prompts
129
207
  prompts_response = prompt_list_request
130
208
  prompts_response = prompts_response["result"]["prompts"]
@@ -3,7 +3,17 @@
3
3
  module RubyLLM
4
4
  module MCP
5
5
  class Parameter < RubyLLM::Parameter
6
- attr_accessor :items, :properties
6
+ attr_accessor :items, :properties, :enum, :union_type
7
+
8
+ def initialize(name, type: "string", desc: nil, required: true, union_type: nil)
9
+ super(name, type: type.to_sym, desc: desc, required: required)
10
+ @properties = {}
11
+ @union_type = union_type
12
+ end
13
+
14
+ def item_type
15
+ @items["type"].to_sym
16
+ end
7
17
  end
8
18
  end
9
19
  end
@@ -19,10 +19,11 @@ module RubyLLM
19
19
  @mcp_client = mcp_client
20
20
  @name = name
21
21
  @description = description
22
+ @arguments = parse_arguments(arguments)
23
+ end
22
24
 
23
- @arguments = arguments.map do |arg|
24
- Argument.new(name: arg["name"], description: arg["description"], required: arg["required"])
25
- end
25
+ def fetch(arguments = {})
26
+ fetch_prompt_messages(arguments)
26
27
  end
27
28
 
28
29
  def include(chat, arguments: {})
@@ -41,14 +42,14 @@ module RubyLLM
41
42
 
42
43
  alias say ask
43
44
 
44
- def arguments_search(argument, value)
45
+ def complete(argument, value)
45
46
  if @mcp_client.capabilities.completion?
46
- response = @mcp_client.completion(type: :prompt, name: @name, argument: argument, value: value)
47
+ response = @mcp_client.completion_prompt(name: @name, argument: argument, value: value)
47
48
  response = response.dig("result", "completion")
48
49
 
49
50
  Completion.new(values: response["values"], total: response["total"], has_more: response["hasMore"])
50
51
  else
51
- raise Errors::CompletionNotAvailable, "Completion is not available for this MCP server"
52
+ raise Errors::CompletionNotAvailable.new(message: "Completion is not available for this MCP server")
52
53
  end
53
54
  end
54
55
 
@@ -90,6 +91,16 @@ module RubyLLM
90
91
  resource.to_content
91
92
  end
92
93
  end
94
+
95
+ def parse_arguments(arguments)
96
+ if arguments.nil?
97
+ []
98
+ else
99
+ arguments.map do |arg|
100
+ Argument.new(name: arg["name"], description: arg["description"], required: arg["required"])
101
+ end
102
+ end
103
+ end
93
104
  end
94
105
  end
95
106
  end
@@ -20,21 +20,32 @@ module RubyLLM
20
20
  def build_properties(param)
21
21
  case param.type
22
22
  when :array
23
- {
24
- type: param.type,
25
- items: { type: param.item_type }
26
- }
23
+ if param.item_type == :object
24
+ {
25
+ type: param.type,
26
+ items: { type: param.item_type, properties: clean_parameters(param.properties) }
27
+ }
28
+ else
29
+ {
30
+ type: param.type,
31
+ items: { type: param.item_type, enum: param.enum }.compact
32
+ }
33
+ end
27
34
  when :object
28
35
  {
29
36
  type: param.type,
30
37
  properties: clean_parameters(param.properties),
31
38
  required: required_parameters(param.properties)
32
39
  }
40
+ when :union
41
+ {
42
+ param.union_type => param.properties.map { |property| build_properties(property) }
43
+ }
33
44
  else
34
45
  {
35
46
  type: param.type,
36
47
  description: param.description
37
- }
48
+ }.compact
38
49
  end
39
50
  end
40
51
  end
@@ -43,4 +54,12 @@ module RubyLLM
43
54
  end
44
55
  end
45
56
 
46
- RubyLLM::Providers::Anthropic.extend(RubyLLM::MCP::Providers::Anthropic::ComplexParameterSupport)
57
+ module RubyLLM::Providers::Anthropic::Tools
58
+ def self.clean_parameters(parameters)
59
+ RubyLLM::MCP::Providers::Anthropic::ComplexParameterSupport.clean_parameters(parameters)
60
+ end
61
+
62
+ def self.required_parameters(parameters)
63
+ RubyLLM::MCP::Providers::Anthropic::ComplexParameterSupport.required_parameters(parameters)
64
+ end
65
+ end
@@ -0,0 +1,61 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ module Providers
6
+ module Gemini
7
+ module ComplexParameterSupport
8
+ module_function
9
+
10
+ # Format tool parameters for Gemini API
11
+ def format_parameters(parameters)
12
+ {
13
+ type: "OBJECT",
14
+ properties: parameters.transform_values { |param| build_properties(param) },
15
+ required: parameters.select { |_, p| p.required }.keys.map(&:to_s)
16
+ }
17
+ end
18
+
19
+ def build_properties(param)
20
+ properties = case param.type
21
+ when :array
22
+ if param.item_type == :object
23
+ {
24
+ type: param_type_for_gemini(param.type),
25
+ items: {
26
+ type: param_type_for_gemini(param.item_type),
27
+ properties: param.properties.transform_values { |value| build_properties(value) }
28
+ }
29
+ }
30
+ else
31
+ {
32
+ type: param_type_for_gemini(param.type),
33
+ items: { type: param_type_for_gemini(param.item_type), enum: param.enum }.compact
34
+ }
35
+ end
36
+ when :object
37
+ {
38
+ type: param_type_for_gemini(param.type),
39
+ properties: param.properties.transform_values { |value| build_properties(value) },
40
+ required: param.properties.select { |_, p| p.required }.keys
41
+ }
42
+ when :union
43
+ {
44
+ param.union_type => param.properties.map { |properties| build_properties(properties) }
45
+ }
46
+ else
47
+ {
48
+ type: param_type_for_gemini(param.type),
49
+ description: param.description
50
+ }
51
+ end
52
+
53
+ properties.compact
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
60
+
61
+ RubyLLM::Providers::Gemini.extend(RubyLLM::MCP::Providers::Gemini::ComplexParameterSupport)