sc-ruby_llm-mcp 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +21 -0
  3. data/README.md +446 -0
  4. data/lib/ruby_llm/chat.rb +33 -0
  5. data/lib/ruby_llm/mcp/attachment.rb +18 -0
  6. data/lib/ruby_llm/mcp/capabilities.rb +29 -0
  7. data/lib/ruby_llm/mcp/client.rb +104 -0
  8. data/lib/ruby_llm/mcp/completion.rb +15 -0
  9. data/lib/ruby_llm/mcp/content.rb +20 -0
  10. data/lib/ruby_llm/mcp/coordinator.rb +112 -0
  11. data/lib/ruby_llm/mcp/errors.rb +28 -0
  12. data/lib/ruby_llm/mcp/parameter.rb +19 -0
  13. data/lib/ruby_llm/mcp/prompt.rb +106 -0
  14. data/lib/ruby_llm/mcp/providers/anthropic/complex_parameter_support.rb +65 -0
  15. data/lib/ruby_llm/mcp/providers/gemini/complex_parameter_support.rb +61 -0
  16. data/lib/ruby_llm/mcp/providers/openai/complex_parameter_support.rb +52 -0
  17. data/lib/ruby_llm/mcp/requests/base.rb +31 -0
  18. data/lib/ruby_llm/mcp/requests/completion_prompt.rb +40 -0
  19. data/lib/ruby_llm/mcp/requests/completion_resource.rb +40 -0
  20. data/lib/ruby_llm/mcp/requests/initialization.rb +24 -0
  21. data/lib/ruby_llm/mcp/requests/initialize_notification.rb +14 -0
  22. data/lib/ruby_llm/mcp/requests/prompt_call.rb +32 -0
  23. data/lib/ruby_llm/mcp/requests/prompt_list.rb +23 -0
  24. data/lib/ruby_llm/mcp/requests/resource_list.rb +21 -0
  25. data/lib/ruby_llm/mcp/requests/resource_read.rb +30 -0
  26. data/lib/ruby_llm/mcp/requests/resource_template_list.rb +21 -0
  27. data/lib/ruby_llm/mcp/requests/tool_call.rb +32 -0
  28. data/lib/ruby_llm/mcp/requests/tool_list.rb +17 -0
  29. data/lib/ruby_llm/mcp/resource.rb +77 -0
  30. data/lib/ruby_llm/mcp/resource_template.rb +79 -0
  31. data/lib/ruby_llm/mcp/tool.rb +115 -0
  32. data/lib/ruby_llm/mcp/transport/sse.rb +244 -0
  33. data/lib/ruby_llm/mcp/transport/stdio.rb +210 -0
  34. data/lib/ruby_llm/mcp/transport/streamable.rb +299 -0
  35. data/lib/ruby_llm/mcp/version.rb +7 -0
  36. data/lib/ruby_llm/mcp.rb +27 -0
  37. metadata +175 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 4c1c5ae1b340830b5c21a87db8c018d138f6e32b1b16a4f49a8c75d75c5b286f
4
+ data.tar.gz: edb5dc76cf88f95bbc0248e1e1f3c0a0f620fb6f200f905b018da0a8023413fc
5
+ SHA512:
6
+ metadata.gz: fd5ff74ce585ac95cadee17ab523803082b2f8ae13e436b7299155c7e1be2f37ef4bc31fae9f7d05e360637c26ccbe6eb9b30fc28539a8e995e5a60cb47e9016
7
+ data.tar.gz: 944d96845421634423f9e6729f35bb94798df9c539b425d9a1883021860a2aca945fd9c11fcce84563c9cfed6581acc28254a389ef9ccdb1f4381252c12b6874
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2025 Patrick Vice
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,446 @@
1
+ # RubyLLM::MCP
2
+
3
+ Aiming to make using MCP with RubyLLM as easy as possible.
4
+
5
+ This project is a Ruby client for the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/), designed to work seamlessly with [RubyLLM](https://github.com/crmne/ruby_llm). This gem enables Ruby applications to connect to MCP servers and use their tools, resources and prompts as part of LLM conversations.
6
+
7
+ **Note:** This project is still under development and the API is subject to change.
8
+
9
+ ## Features
10
+
11
+ - 🔌 **Multiple Transport Types**: Support for SSE (Server-Sent Events), Streamable HTTP, and stdio transports
12
+ - 🛠️ **Tool Integration**: Automatically converts MCP tools into RubyLLM-compatible tools
13
+ - 📄 **Resource Management**: Access and include MCP resources (files, data) and resource templates in conversations
14
+ - 🎯 **Prompt Integration**: Use predefined MCP prompts with arguments for consistent interactions
15
+ - 🎨 **Enhanced Chat Interface**: Extended RubyLLM chat methods for seamless MCP integration
16
+ - 📚 **Simple API**: Easy-to-use interface that integrates seamlessly with RubyLLM
17
+
18
+ ## Installation
19
+
20
+ ```bash
21
+ bundle add ruby_llm-mcp
22
+ ```
23
+
24
+ or add this line to your application's Gemfile:
25
+
26
+ ```ruby
27
+ gem 'ruby_llm-mcp'
28
+ ```
29
+
30
+ And then execute:
31
+
32
+ ```bash
33
+ bundle install
34
+ ```
35
+
36
+ Or install it yourself as:
37
+
38
+ ```bash
39
+ gem install ruby_llm-mcp
40
+ ```
41
+
42
+ ## Usage
43
+
44
+ ### Basic Setup
45
+
46
+ First, configure your RubyLLM client and create an MCP connection:
47
+
48
+ ```ruby
49
+ require 'ruby_llm/mcp'
50
+
51
+ # Configure RubyLLM
52
+ RubyLLM.configure do |config|
53
+ config.openai_api_key = "your-api-key"
54
+ end
55
+
56
+ # Connect to an MCP server via SSE
57
+ client = RubyLLM::MCP.client(
58
+ name: "my-mcp-server",
59
+ transport_type: :sse,
60
+ config: {
61
+ url: "http://localhost:9292/mcp/sse"
62
+ }
63
+ )
64
+
65
+ # Or connect via stdio
66
+ client = RubyLLM::MCP.client(
67
+ name: "my-mcp-server",
68
+ transport_type: :stdio,
69
+ config: {
70
+ command: "node",
71
+ args: ["path/to/mcp-server.js"],
72
+ env: { "NODE_ENV" => "production" }
73
+ }
74
+ )
75
+
76
+ # Or connect via streamable HTTP
77
+ client = RubyLLM::MCP.client(
78
+ name: "my-mcp-server",
79
+ transport_type: :streamable,
80
+ config: {
81
+ url: "http://localhost:8080/mcp",
82
+ headers: { "Authorization" => "Bearer your-token" }
83
+ }
84
+ )
85
+ ```
86
+
87
+ ### Using MCP Tools with RubyLLM
88
+
89
+ ```ruby
90
+ # Get available tools from the MCP server
91
+ tools = client.tools
92
+ puts "Available tools:"
93
+ tools.each do |tool|
94
+ puts "- #{tool.name}: #{tool.description}"
95
+ end
96
+
97
+ # Create a chat session with MCP tools
98
+ chat = RubyLLM.chat(model: "gpt-4")
99
+ chat.with_tools(*client.tools)
100
+
101
+ # Ask a question that will use the MCP tools
102
+ response = chat.ask("Can you help me search for recent files in my project?")
103
+ puts response
104
+ ```
105
+
106
+ ### Support Complex Parameters
107
+
108
+ If you want to support complex parameters, like an array of objects it currently requires a patch to RubyLLM itself. This is planned to be temporary until the RubyLLM is updated.
109
+
110
+ ```ruby
111
+ RubyLLM::MCP.support_complex_parameters!
112
+ ```
113
+
114
+ ### Streaming Responses with Tool Calls
115
+
116
+ ```ruby
117
+ chat = RubyLLM.chat(model: "gpt-4")
118
+ chat.with_tools(*client.tools)
119
+
120
+ chat.ask("Analyze my project structure") do |chunk|
121
+ if chunk.tool_call?
122
+ chunk.tool_calls.each do |key, tool_call|
123
+ puts "\n🔧 Using tool: #{tool_call.name}"
124
+ end
125
+ else
126
+ print chunk.content
127
+ end
128
+ end
129
+ ```
130
+
131
+ ### Manual Tool Execution
132
+
133
+ You can also execute MCP tools directly:
134
+
135
+ ```ruby
136
+ # Execute a specific tool
137
+ result = client.execute_tool(
138
+ name: "search_files",
139
+ parameters: {
140
+ query: "*.rb",
141
+ directory: "/path/to/search"
142
+ }
143
+ )
144
+
145
+ puts result
146
+ ```
147
+
148
+ ### Working with Resources
149
+
150
+ MCP servers can provide access to resources - structured data that can be included in conversations. Resources come in two types: normal resources and resource templates.
151
+
152
+ #### Normal Resources
153
+
154
+ ```ruby
155
+ # Get available resources from the MCP server
156
+ resources = client.resources
157
+ puts "Available resources:"
158
+ resources.each do |resource|
159
+ puts "- #{resource.name}: #{resource.description}"
160
+ end
161
+
162
+ # Access a specific resource by name
163
+ file_resource = client.resource("project_readme")
164
+ content = file_resource.content
165
+ puts "Resource content: #{content}"
166
+
167
+ # Include a resource in a chat conversation for reference with an LLM
168
+ chat = RubyLLM.chat(model: "gpt-4")
169
+ chat.with_resource(file_resource)
170
+
171
+ # Or add a resource directly to the conversation
172
+ file_resource.include(chat)
173
+
174
+ response = chat.ask("Can you summarize this README file?")
175
+ puts response
176
+ ```
177
+
178
+ #### Resource Templates
179
+
180
+ Resource templates are parameterized resources that can be dynamically configured:
181
+
182
+ ```ruby
183
+ # Get available resource templates
184
+ templates = client.resource_templates
185
+ log_template = client.resource_template("application_logs")
186
+
187
+ # Use a template with parameters
188
+ chat = RubyLLM.chat(model: "gpt-4")
189
+ chat.with_resource_template(log_template, arguments: {
190
+ date: "2024-01-15",
191
+ level: "error"
192
+ })
193
+
194
+ response = chat.ask("What errors occurred on this date?")
195
+ puts response
196
+
197
+ # You can also get templated content directly
198
+ content = log_template.to_content(arguments: {
199
+ date: "2024-01-15",
200
+ level: "error"
201
+ })
202
+ puts content
203
+ ```
204
+
205
+ #### Resource Argument Completion
206
+
207
+ For resource templates, you can get suggested values for arguments:
208
+
209
+ ```ruby
210
+ template = client.resource_template("user_profile")
211
+
212
+ # Search for possible values for a specific argument
213
+ suggestions = template.complete("username", "john")
214
+ puts "Suggested usernames:"
215
+ suggestions.values.each do |value|
216
+ puts "- #{value}"
217
+ end
218
+ puts "Total matches: #{suggestions.total}"
219
+ puts "Has more: #{suggestions.has_more}"
220
+ ```
221
+
222
+ ### Working with Prompts
223
+
224
+ MCP servers can provide predefined prompts that can be used in conversations:
225
+
226
+ ```ruby
227
+ # Get available prompts from the MCP server
228
+ prompts = client.prompts
229
+ puts "Available prompts:"
230
+ prompts.each do |prompt|
231
+ puts "- #{prompt.name}: #{prompt.description}"
232
+ prompt.arguments.each do |arg|
233
+ puts " - #{arg.name}: #{arg.description} (required: #{arg.required})"
234
+ end
235
+ end
236
+
237
+ # Use a prompt in a conversation
238
+ greeting_prompt = client.prompt("daily_greeting")
239
+ chat = RubyLLM.chat(model: "gpt-4")
240
+
241
+ # Method 1: Ask prompt directly
242
+ response = chat.ask_prompt(greeting_prompt, arguments: { name: "Alice", time: "morning" })
243
+ puts response
244
+
245
+ # Method 2: Add prompt to chat and then ask
246
+ chat.with_prompt(greeting_prompt, arguments: { name: "Alice", time: "morning" })
247
+ response = chat.ask("Continue with the greeting")
248
+ ```
249
+
250
+ ### Combining Resources, Prompts, and Tools
251
+
252
+ You can combine all MCP features for powerful conversations:
253
+
254
+ ```ruby
255
+ client = RubyLLM::MCP.client(
256
+ name: "development-assistant",
257
+ transport_type: :sse,
258
+ config: { url: "http://localhost:9292/mcp/sse" }
259
+ )
260
+
261
+ chat = RubyLLM.chat(model: "gpt-4")
262
+
263
+ # Add tools for capabilities
264
+ chat.with_tools(*client.tools)
265
+
266
+ # Add resources for context
267
+ chat.with_resource(client.resource("project_structure"))
268
+ chat.with_resource(
269
+ client.resource_template("recent_commits"),
270
+ arguments: { days: 7 }
271
+ )
272
+
273
+ # Add prompts for guidance
274
+ chat.with_prompt(
275
+ client.prompt("code_review_checklist"),
276
+ arguments: { focus: "security" }
277
+ )
278
+
279
+ # Now ask for analysis
280
+ response = chat.ask("Please review the recent commits using the checklist and suggest improvements")
281
+ puts response
282
+ ```
283
+
284
+ ## Argument Completion
285
+
286
+ Some MCP servers support argument completion for prompts and resource templates:
287
+
288
+ ```ruby
289
+ # For prompts
290
+ prompt = client.prompt("user_search")
291
+ suggestions = prompt.complete("username", "jo")
292
+ puts "Suggestions: #{suggestions.values}" # ["john", "joanna", "joseph"]
293
+
294
+ # For resource templates
295
+ template = client.resource_template("user_logs")
296
+ suggestions = template.complete("user_id", "123")
297
+ puts "Total matches: #{suggestions.total}"
298
+ puts "Has more results: #{suggestions.has_more}"
299
+ ```
300
+
301
+ ## Additional Chat Methods
302
+
303
+ The gem extends RubyLLM's chat interface with convenient methods for MCP integration:
304
+
305
+ ```ruby
306
+ chat = RubyLLM.chat(model: "gpt-4")
307
+
308
+ # Add a single resource
309
+ chat.with_resource(resource)
310
+
311
+ # Add multiple resources
312
+ chat.with_resources(resource1, resource2, resource3)
313
+
314
+ # Add a resource template with arguments
315
+ chat.with_resource_template(resource_template, arguments: { key: "value" })
316
+
317
+ # Add a prompt with arguments
318
+ chat.with_prompt(prompt, arguments: { name: "Alice" })
319
+
320
+ # Ask using a prompt directly
321
+ response = chat.ask_prompt(prompt, arguments: { name: "Alice" })
322
+ ```
323
+
324
+ ## Client Lifecycle Management
325
+
326
+ You can manage the MCP client connection lifecycle:
327
+
328
+ ```ruby
329
+ client = RubyLLM::MCP.client(name: "my-server", transport_type: :stdio, start: false, config: {...})
330
+
331
+ # Manually start the connection
332
+ client.start
333
+
334
+ # Check if connection is alive
335
+ puts client.alive?
336
+
337
+ # Restart the connection
338
+ client.restart!
339
+
340
+ # Stop the connection
341
+ client.stop
342
+ ```
343
+
344
+ ## Refreshing Cached Data
345
+
346
+ The client caches tools, resources, prompts, and resource templates list calls are cached to reduce round trips back to the MCP server. You can refresh this cache:
347
+
348
+ ```ruby
349
+ # Refresh all cached tools
350
+ tools = client.tools(refresh: true)
351
+
352
+ # Refresh a specific tool
353
+ tool = client.tool("search_files", refresh: true)
354
+
355
+ # Same pattern works for resources, prompts, and resource templates
356
+ resources = client.resources(refresh: true)
357
+ prompts = client.prompts(refresh: true)
358
+ templates = client.resource_templates(refresh: true)
359
+
360
+ # Or refresh specific items
361
+ resource = client.resource("project_readme", refresh: true)
362
+ prompt = client.prompt("daily_greeting", refresh: true)
363
+ template = client.resource_template("user_logs", refresh: true)
364
+ ```
365
+
366
+ ## Transport Types
367
+
368
+ ### SSE (Server-Sent Events)
369
+
370
+ Best for web-based MCP servers or when you need HTTP-based communication:
371
+
372
+ ```ruby
373
+ client = RubyLLM::MCP.client(
374
+ name: "web-mcp-server",
375
+ transport_type: :sse,
376
+ config: {
377
+ url: "https://your-mcp-server.com/mcp/sse",
378
+ headers: { "Authorization" => "Bearer your-token" }
379
+ }
380
+ )
381
+ ```
382
+
383
+ ### Streamable HTTP
384
+
385
+ Best for HTTP-based MCP servers that support streaming responses:
386
+
387
+ ```ruby
388
+ client = RubyLLM::MCP.client(
389
+ name: "streaming-mcp-server",
390
+ transport_type: :streamable,
391
+ config: {
392
+ url: "https://your-mcp-server.com/mcp",
393
+ headers: { "Authorization" => "Bearer your-token" }
394
+ }
395
+ )
396
+ ```
397
+
398
+ ### Stdio
399
+
400
+ Best for local MCP servers or command-line tools:
401
+
402
+ ```ruby
403
+ client = RubyLLM::MCP.client(
404
+ name: "local-mcp-server",
405
+ transport_type: :stdio,
406
+ config: {
407
+ command: "python",
408
+ args: ["-m", "my_mcp_server"],
409
+ env: { "DEBUG" => "1" }
410
+ }
411
+ )
412
+ ```
413
+
414
+ ## Configuration Options
415
+
416
+ - `name`: A unique identifier for your MCP client
417
+ - `transport_type`: Either `:sse`, `:streamable`, or `:stdio`
418
+ - `start`: Whether to automatically start the connection (default: true)
419
+ - `request_timeout`: Timeout for requests in milliseconds (default: 8000)
420
+ - `config`: Transport-specific configuration
421
+ - For SSE: `{ url: "http://...", headers: {...} }`
422
+ - For Streamable: `{ url: "http://...", headers: {...} }`
423
+ - For stdio: `{ command: "...", args: [...], env: {...} }`
424
+
425
+ ## Development
426
+
427
+ After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
428
+
429
+ To install this gem onto your local machine, run `bundle exec rake install`. Run `bundle exec rake` to test specs and run linters. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
430
+
431
+ ## Examples
432
+
433
+ Check out the `examples/` directory for more detailed usage examples:
434
+
435
+ - `examples/tools/local_mcp.rb` - Complete example with stdio transport
436
+ - `examples/tools/sse_mcp_with_gpt.rb` - Example using SSE transport with GPT
437
+ - `examples/resources/list_resources.rb` - Example of listing and using resources
438
+ - `examples/prompts/streamable_prompt_call.rb` - Example of using prompts with streamable transport
439
+
440
+ ## Contributing
441
+
442
+ We welcome contributions! Bug reports and pull requests are welcome on GitHub at https://github.com/patvice/ruby_llm-mcp.
443
+
444
+ ## License
445
+
446
+ Released under the MIT License.
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This is an override of the RubyLLM::Chat class to convient methods for easy MCP support
4
+ module RubyLLM
5
+ class Chat
6
+ def with_resources(*resources, **args)
7
+ resources.each do |resource|
8
+ resource.include(self, **args)
9
+ end
10
+ self
11
+ end
12
+
13
+ def with_resource(resource)
14
+ resource.include(self)
15
+ self
16
+ end
17
+
18
+ def with_resource_template(resource_template, arguments: {})
19
+ resource = resource_template.fetch_resource(arguments: arguments)
20
+ resource.include(self)
21
+ self
22
+ end
23
+
24
+ def with_prompt(prompt, arguments: {})
25
+ prompt.include(self, arguments: arguments)
26
+ self
27
+ end
28
+
29
+ def ask_prompt(prompt, ...)
30
+ prompt.ask(self, ...)
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ class Attachment < RubyLLM::Attachment
6
+ attr_reader :content, :mime_type
7
+
8
+ def initialize(content, mime_type) # rubocop:disable Lint/MissingSuper
9
+ @content = content
10
+ @mime_type = mime_type
11
+ end
12
+
13
+ def encoded
14
+ @content
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ class Capabilities
6
+ attr_accessor :capabilities
7
+
8
+ def initialize(capabilities = {})
9
+ @capabilities = capabilities
10
+ end
11
+
12
+ def resources_list_changed?
13
+ @capabilities.dig("resources", "listChanged") || false
14
+ end
15
+
16
+ def resource_subscribe?
17
+ @capabilities.dig("resources", "subscribe") || false
18
+ end
19
+
20
+ def tools_list_changed?
21
+ @capabilities.dig("tools", "listChanged") || false
22
+ end
23
+
24
+ def completion?
25
+ !@capabilities["completions"].nil?
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,104 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "forwardable"
4
+
5
+ module RubyLLM
6
+ module MCP
7
+ class Client
8
+ extend Forwardable
9
+
10
+ attr_reader :name, :config, :transport_type, :request_timeout
11
+
12
+ def initialize(name:, transport_type:, start: true, request_timeout: 8000, config: {})
13
+ @name = name
14
+ @config = config.merge(request_timeout: request_timeout)
15
+ @transport_type = transport_type.to_sym
16
+ @request_timeout = request_timeout
17
+
18
+ @coordinator = Coordinator.new(self, transport_type: @transport_type, config: @config)
19
+
20
+ start_transport if start
21
+ end
22
+
23
+ def_delegators :@coordinator, :start_transport, :stop_transport, :restart_transport, :alive?, :capabilities
24
+
25
+ alias start start_transport
26
+ alias stop stop_transport
27
+ alias restart! restart_transport
28
+
29
+ def tools(refresh: false)
30
+ fetch(:tools, refresh) do
31
+ tools_data = @coordinator.tool_list.dig("result", "tools")
32
+ build_map(tools_data, MCP::Tool)
33
+ end
34
+
35
+ @tools.values
36
+ end
37
+
38
+ def tool(name, refresh: false)
39
+ tools(refresh: refresh)
40
+
41
+ @tools[name]
42
+ end
43
+
44
+ def resources(refresh: false)
45
+ fetch(:resources, refresh) do
46
+ resources_data = @coordinator.resource_list.dig("result", "resources")
47
+ build_map(resources_data, MCP::Resource)
48
+ end
49
+
50
+ @resources.values
51
+ end
52
+
53
+ def resource(name, refresh: false)
54
+ resources(refresh: refresh)
55
+
56
+ @resources[name]
57
+ end
58
+
59
+ def resource_templates(refresh: false)
60
+ fetch(:resource_templates, refresh) do
61
+ templates_data = @coordinator.resource_template_list.dig("result", "resourceTemplates")
62
+ build_map(templates_data, MCP::ResourceTemplate)
63
+ end
64
+
65
+ @resource_templates.values
66
+ end
67
+
68
+ def resource_template(name, refresh: false)
69
+ resource_templates(refresh: refresh)
70
+
71
+ @resource_templates[name]
72
+ end
73
+
74
+ def prompts(refresh: false)
75
+ fetch(:prompts, refresh) do
76
+ prompts_data = @coordinator.prompt_list.dig("result", "prompts")
77
+ build_map(prompts_data, MCP::Prompt)
78
+ end
79
+
80
+ @prompts.values
81
+ end
82
+
83
+ def prompt(name, refresh: false)
84
+ prompts(refresh: refresh)
85
+
86
+ @prompts[name]
87
+ end
88
+
89
+ private
90
+
91
+ def fetch(cache_key, refresh)
92
+ instance_variable_set("@#{cache_key}", nil) if refresh
93
+ instance_variable_get("@#{cache_key}") || instance_variable_set("@#{cache_key}", yield)
94
+ end
95
+
96
+ def build_map(raw_data, klass)
97
+ raw_data.each_with_object({}) do |item, acc|
98
+ instance = klass.new(@coordinator, item)
99
+ acc[instance.name] = instance
100
+ end
101
+ end
102
+ end
103
+ end
104
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ class Completion
6
+ attr_reader :values, :total, :has_more
7
+
8
+ def initialize(values:, total:, has_more:)
9
+ @values = values
10
+ @total = total
11
+ @has_more = has_more
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ class Content < RubyLLM::Content
6
+ attr_reader :text, :attachments, :content
7
+
8
+ def initialize(text: nil, attachments: nil) # rubocop:disable Lint/MissingSuper
9
+ @text = text
10
+ @attachments = attachments || []
11
+ end
12
+
13
+ # This is a workaround to allow the content object to be passed as the tool call
14
+ # to return audio or image attachments.
15
+ def to_s
16
+ attachments.empty? ? text : self
17
+ end
18
+ end
19
+ end
20
+ end