ruby_llm-mcp 0.0.2 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +174 -9
- data/lib/ruby_llm/chat.rb +27 -0
- data/lib/ruby_llm/mcp/attachment.rb +18 -0
- data/lib/ruby_llm/mcp/capabilities.rb +29 -0
- data/lib/ruby_llm/mcp/client.rb +85 -16
- data/lib/ruby_llm/mcp/completion.rb +15 -0
- data/lib/ruby_llm/mcp/content.rb +20 -0
- data/lib/ruby_llm/mcp/errors.rb +11 -1
- data/lib/ruby_llm/mcp/prompt.rb +95 -0
- data/lib/ruby_llm/mcp/providers/anthropic/complex_parameter_support.rb +26 -10
- data/lib/ruby_llm/mcp/providers/open_ai/complex_parameter_support.rb +21 -10
- data/lib/ruby_llm/mcp/requests/completion.rb +50 -0
- data/lib/ruby_llm/mcp/requests/initialization.rb +3 -7
- data/lib/ruby_llm/mcp/requests/notification.rb +1 -1
- data/lib/ruby_llm/mcp/requests/prompt_call.rb +32 -0
- data/lib/ruby_llm/mcp/requests/prompt_list.rb +23 -0
- data/lib/ruby_llm/mcp/requests/resource_list.rb +21 -0
- data/lib/ruby_llm/mcp/requests/resource_read.rb +30 -0
- data/lib/ruby_llm/mcp/requests/resource_template_list.rb +21 -0
- data/lib/ruby_llm/mcp/resource.rb +99 -0
- data/lib/ruby_llm/mcp/tool.rb +21 -1
- data/lib/ruby_llm/mcp/transport/sse.rb +47 -15
- data/lib/ruby_llm/mcp/transport/stdio.rb +7 -7
- data/lib/ruby_llm/mcp/transport/streamable.rb +274 -4
- data/lib/ruby_llm/mcp/version.rb +1 -1
- data/lib/ruby_llm/mcp.rb +4 -2
- metadata +18 -6
- data/lib/ruby_llm/overrides.rb +0 -21
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f54f8d20c1ebbaddd31672de6715ca3adc92db0187a98b743711f0aff06f373c
|
4
|
+
data.tar.gz: a41e69179703af28db4f7ea5ad27bc1f47c0f6c9eb189cacfb0392cf9d2adebb
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0a01c76e79c94eb3cf19a422182ae1427fc5e2ac39f32951c18d32ed7be68007c09155f95e23595c4ae864fb1b192fea963193d325abe9e9d5a4406736f61f82
|
7
|
+
data.tar.gz: 25e4f7611a0a0c7c3462c1114daeeed832fc4900a1abccda65253c5a0b459a6ebf7955f2d3ef3473045d6530f220a8ac69f285ec32acca0e921a12a0f4feeebf
|
data/README.md
CHANGED
@@ -2,16 +2,19 @@
|
|
2
2
|
|
3
3
|
Aiming to make using MCP with RubyLLM as easy as possible.
|
4
4
|
|
5
|
-
This project is a Ruby client for the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/), designed to work seamlessly with [RubyLLM](https://github.com/
|
5
|
+
This project is a Ruby client for the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/), designed to work seamlessly with [RubyLLM](https://github.com/crmne/ruby_llm). This gem enables Ruby applications to connect to MCP servers and use their tools, resources and prompts as part of LLM conversations.
|
6
6
|
|
7
|
-
**Note:** This project is still under development and the API is subject to change.
|
7
|
+
**Note:** This project is still under development and the API is subject to change.
|
8
8
|
|
9
9
|
## Features
|
10
10
|
|
11
|
-
- 🔌 **Multiple Transport Types**: Support for SSE (Server-Sent Events) and stdio transports
|
11
|
+
- 🔌 **Multiple Transport Types**: Support for SSE (Server-Sent Events), Streamable HTTP, and stdio transports
|
12
12
|
- 🛠️ **Tool Integration**: Automatically converts MCP tools into RubyLLM-compatible tools
|
13
|
+
- 📄 **Resource Management**: Access and include MCP resources (files, data) and resource templates in conversations
|
14
|
+
- 🎯 **Prompt Integration**: Use predefined MCP prompts with arguments for consistent interactions
|
13
15
|
- 🔄 **Real-time Communication**: Efficient bidirectional communication with MCP servers
|
14
|
-
-
|
16
|
+
- 🎨 **Enhanced Chat Interface**: Extended RubyLLM chat methods for seamless MCP integration
|
17
|
+
- 📚 **Simple API**: Easy-to-use interface that integrates seamlessly with RubyLLM
|
15
18
|
|
16
19
|
## Installation
|
17
20
|
|
@@ -50,7 +53,7 @@ end
|
|
50
53
|
# Connect to an MCP server via SSE
|
51
54
|
client = RubyLLM::MCP.client(
|
52
55
|
name: "my-mcp-server",
|
53
|
-
transport_type:
|
56
|
+
transport_type: :sse,
|
54
57
|
config: {
|
55
58
|
url: "http://localhost:9292/mcp/sse"
|
56
59
|
}
|
@@ -59,13 +62,23 @@ client = RubyLLM::MCP.client(
|
|
59
62
|
# Or connect via stdio
|
60
63
|
client = RubyLLM::MCP.client(
|
61
64
|
name: "my-mcp-server",
|
62
|
-
transport_type:
|
65
|
+
transport_type: :stdio,
|
63
66
|
config: {
|
64
67
|
command: "node",
|
65
68
|
args: ["path/to/mcp-server.js"],
|
66
69
|
env: { "NODE_ENV" => "production" }
|
67
70
|
}
|
68
71
|
)
|
72
|
+
|
73
|
+
# Or connect via streamable HTTP
|
74
|
+
client = RubyLLM::MCP.client(
|
75
|
+
name: "my-mcp-server",
|
76
|
+
transport_type: :streamable,
|
77
|
+
config: {
|
78
|
+
url: "http://localhost:8080/mcp",
|
79
|
+
headers: { "Authorization" => "Bearer your-token" }
|
80
|
+
}
|
81
|
+
)
|
69
82
|
```
|
70
83
|
|
71
84
|
### Using MCP Tools with RubyLLM
|
@@ -129,6 +142,142 @@ result = client.execute_tool(
|
|
129
142
|
puts result
|
130
143
|
```
|
131
144
|
|
145
|
+
### Working with Resources
|
146
|
+
|
147
|
+
MCP servers can provide access to resources - structured data that can be included in conversations. Resources come in two types: normal resources and resource templates.
|
148
|
+
|
149
|
+
#### Normal Resources
|
150
|
+
|
151
|
+
```ruby
|
152
|
+
# Get available resources from the MCP server
|
153
|
+
resources = client.resources
|
154
|
+
puts "Available resources:"
|
155
|
+
resources.each do |name, resource|
|
156
|
+
puts "- #{name}: #{resource.description}"
|
157
|
+
end
|
158
|
+
|
159
|
+
# Access a specific resource
|
160
|
+
file_resource = resources["project_readme"]
|
161
|
+
content = file_resource.content
|
162
|
+
puts "Resource content: #{content}"
|
163
|
+
|
164
|
+
# Include a resource in a chat conversation for reference with an LLM
|
165
|
+
chat = RubyLLM.chat(model: "gpt-4")
|
166
|
+
chat.with_resource(file_resource)
|
167
|
+
|
168
|
+
# Or add a resource directly to the conversation
|
169
|
+
file_resource.include(chat)
|
170
|
+
|
171
|
+
response = chat.ask("Can you summarize this README file?")
|
172
|
+
puts response
|
173
|
+
```
|
174
|
+
|
175
|
+
#### Resource Templates
|
176
|
+
|
177
|
+
Resource templates are parameterized resources that can be dynamically configured:
|
178
|
+
|
179
|
+
```ruby
|
180
|
+
# Get available resource templates
|
181
|
+
templates = client.resource_templates
|
182
|
+
log_template = templates["application_logs"]
|
183
|
+
|
184
|
+
# Use a template with parameters
|
185
|
+
chat = RubyLLM.chat(model: "gpt-4")
|
186
|
+
chat.with_resource(log_template, arguments: {
|
187
|
+
date: "2024-01-15",
|
188
|
+
level: "error"
|
189
|
+
})
|
190
|
+
|
191
|
+
response = chat.ask("What errors occurred on this date?")
|
192
|
+
puts response
|
193
|
+
|
194
|
+
# You can also get templated content directly
|
195
|
+
content = log_template.content(arguments: {
|
196
|
+
date: "2024-01-15",
|
197
|
+
level: "error"
|
198
|
+
})
|
199
|
+
puts content
|
200
|
+
```
|
201
|
+
|
202
|
+
#### Resource Argument Completion
|
203
|
+
|
204
|
+
For resource templates, you can get suggested values for arguments:
|
205
|
+
|
206
|
+
```ruby
|
207
|
+
template = client.resource_templates["user_profile"]
|
208
|
+
|
209
|
+
# Search for possible values for a specific argument
|
210
|
+
suggestions = template.arguments_search("username", "john")
|
211
|
+
puts "Suggested usernames:"
|
212
|
+
suggestions.arg_values.each do |value|
|
213
|
+
puts "- #{value}"
|
214
|
+
end
|
215
|
+
puts "Total matches: #{suggestions.total}"
|
216
|
+
puts "Has more: #{suggestions.has_more}"
|
217
|
+
```
|
218
|
+
|
219
|
+
### Working with Prompts
|
220
|
+
|
221
|
+
MCP servers can provide predefined prompts that can be used in conversations:
|
222
|
+
|
223
|
+
```ruby
|
224
|
+
# Get available prompts from the MCP server
|
225
|
+
prompts = client.prompts
|
226
|
+
puts "Available prompts:"
|
227
|
+
prompts.each do |name, prompt|
|
228
|
+
puts "- #{name}: #{prompt.description}"
|
229
|
+
prompt.arguments.each do |arg|
|
230
|
+
puts " - #{arg.name}: #{arg.description} (required: #{arg.required})"
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
# Use a prompt in a conversation
|
235
|
+
greeting_prompt = prompts["daily_greeting"]
|
236
|
+
chat = RubyLLM.chat(model: "gpt-4")
|
237
|
+
|
238
|
+
# Method 1: Ask prompt directly
|
239
|
+
response = chat.ask_prompt(greeting_prompt, arguments: { name: "Alice", time: "morning" })
|
240
|
+
puts response
|
241
|
+
|
242
|
+
# Method 2: Add prompt to chat and then ask
|
243
|
+
chat.with_prompt(greeting_prompt, arguments: { name: "Alice", time: "morning" })
|
244
|
+
response = chat.ask("Continue with the greeting")
|
245
|
+
```
|
246
|
+
|
247
|
+
### Combining Resources, Prompts, and Tools
|
248
|
+
|
249
|
+
You can combine all MCP features for powerful conversations:
|
250
|
+
|
251
|
+
```ruby
|
252
|
+
client = RubyLLM::MCP.client(
|
253
|
+
name: "development-assistant",
|
254
|
+
transport_type: :sse,
|
255
|
+
config: { url: "http://localhost:9292/mcp/sse" }
|
256
|
+
)
|
257
|
+
|
258
|
+
chat = RubyLLM.chat(model: "gpt-4")
|
259
|
+
|
260
|
+
# Add tools for capabilities
|
261
|
+
chat.with_tools(*client.tools)
|
262
|
+
|
263
|
+
# Add resources for context
|
264
|
+
chat.with_resource(client.resources["project_structure"])
|
265
|
+
chat.with_resource(
|
266
|
+
client.resource_templates["recent_commits"],
|
267
|
+
arguments: { days: 7 }
|
268
|
+
)
|
269
|
+
|
270
|
+
# Add prompts for guidance
|
271
|
+
chat.with_prompt(
|
272
|
+
client.prompts["code_review_checklist"],
|
273
|
+
arguments: { focus: "security" }
|
274
|
+
)
|
275
|
+
|
276
|
+
# Now ask for analysis
|
277
|
+
response = chat.ask("Please review the recent commits using the checklist and suggest improvements")
|
278
|
+
puts response
|
279
|
+
```
|
280
|
+
|
132
281
|
## Transport Types
|
133
282
|
|
134
283
|
### SSE (Server-Sent Events)
|
@@ -138,13 +287,28 @@ Best for web-based MCP servers or when you need HTTP-based communication:
|
|
138
287
|
```ruby
|
139
288
|
client = RubyLLM::MCP.client(
|
140
289
|
name: "web-mcp-server",
|
141
|
-
transport_type:
|
290
|
+
transport_type: :sse,
|
142
291
|
config: {
|
143
292
|
url: "https://your-mcp-server.com/mcp/sse"
|
144
293
|
}
|
145
294
|
)
|
146
295
|
```
|
147
296
|
|
297
|
+
### Streamable HTTP
|
298
|
+
|
299
|
+
Best for HTTP-based MCP servers that support streaming responses:
|
300
|
+
|
301
|
+
```ruby
|
302
|
+
client = RubyLLM::MCP.client(
|
303
|
+
name: "streaming-mcp-server",
|
304
|
+
transport_type: :streamable,
|
305
|
+
config: {
|
306
|
+
url: "https://your-mcp-server.com/mcp",
|
307
|
+
headers: { "Authorization" => "Bearer your-token" }
|
308
|
+
}
|
309
|
+
)
|
310
|
+
```
|
311
|
+
|
148
312
|
### Stdio
|
149
313
|
|
150
314
|
Best for local MCP servers or command-line tools:
|
@@ -152,7 +316,7 @@ Best for local MCP servers or command-line tools:
|
|
152
316
|
```ruby
|
153
317
|
client = RubyLLM::MCP.client(
|
154
318
|
name: "local-mcp-server",
|
155
|
-
transport_type:
|
319
|
+
transport_type: :stdio,
|
156
320
|
config: {
|
157
321
|
command: "python",
|
158
322
|
args: ["-m", "my_mcp_server"],
|
@@ -164,10 +328,11 @@ client = RubyLLM::MCP.client(
|
|
164
328
|
## Configuration Options
|
165
329
|
|
166
330
|
- `name`: A unique identifier for your MCP client
|
167
|
-
- `transport_type`: Either `:sse
|
331
|
+
- `transport_type`: Either `:sse`, `:streamable`, or `:stdio`
|
168
332
|
- `request_timeout`: Timeout for requests in milliseconds (default: 8000)
|
169
333
|
- `config`: Transport-specific configuration
|
170
334
|
- For SSE: `{ url: "http://..." }`
|
335
|
+
- For Streamable: `{ url: "http://...", headers: {...} }`
|
171
336
|
- For stdio: `{ command: "...", args: [...], env: {...} }`
|
172
337
|
|
173
338
|
## Development
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# This is an override of the RubyLLM::Chat class to convient methods for easy MCP support
|
4
|
+
module RubyLLM
|
5
|
+
class Chat
|
6
|
+
def with_resources(*resources, **args)
|
7
|
+
resources.each do |resource|
|
8
|
+
resource.include(self, **args)
|
9
|
+
end
|
10
|
+
self
|
11
|
+
end
|
12
|
+
|
13
|
+
def with_resource(resource, **args)
|
14
|
+
resource.include(self, **args)
|
15
|
+
self
|
16
|
+
end
|
17
|
+
|
18
|
+
def with_prompt(prompt, arguments: {})
|
19
|
+
prompt.include(self, arguments: arguments)
|
20
|
+
self
|
21
|
+
end
|
22
|
+
|
23
|
+
def ask_prompt(prompt, ...)
|
24
|
+
prompt.ask(self, ...)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module MCP
|
5
|
+
class Attachment < RubyLLM::Attachment
|
6
|
+
attr_reader :content, :mime_type
|
7
|
+
|
8
|
+
def initialize(content, mime_type) # rubocop:disable Lint/MissingSuper
|
9
|
+
@content = content
|
10
|
+
@mime_type = mime_type
|
11
|
+
end
|
12
|
+
|
13
|
+
def encoded
|
14
|
+
@content
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module MCP
|
5
|
+
class Capabilities
|
6
|
+
attr_reader :capabilities
|
7
|
+
|
8
|
+
def initialize(capabilities)
|
9
|
+
@capabilities = capabilities
|
10
|
+
end
|
11
|
+
|
12
|
+
def resources_list_changed?
|
13
|
+
@capabilities.dig("resources", "listChanged") || false
|
14
|
+
end
|
15
|
+
|
16
|
+
def resource_subscribe?
|
17
|
+
@capabilities.dig("resources", "subscribe") || false
|
18
|
+
end
|
19
|
+
|
20
|
+
def tools_list_changed?
|
21
|
+
@capabilities.dig("tools", "listChanged") || false
|
22
|
+
end
|
23
|
+
|
24
|
+
def completion?
|
25
|
+
@capabilities["completion"].present?
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
data/lib/ruby_llm/mcp/client.rb
CHANGED
@@ -4,22 +4,30 @@ module RubyLLM
|
|
4
4
|
module MCP
|
5
5
|
class Client
|
6
6
|
PROTOCOL_VERSION = "2025-03-26"
|
7
|
-
|
7
|
+
PV_2024_11_05 = "2024-11-05"
|
8
|
+
|
9
|
+
attr_reader :name, :config, :transport_type, :transport, :request_timeout, :reverse_proxy_url, :protocol_version,
|
10
|
+
:capabilities
|
8
11
|
|
9
12
|
def initialize(name:, transport_type:, request_timeout: 8000, reverse_proxy_url: nil, config: {})
|
10
13
|
@name = name
|
11
14
|
@config = config
|
15
|
+
@protocol_version = PROTOCOL_VERSION
|
16
|
+
@headers = config[:headers] || {}
|
17
|
+
|
12
18
|
@transport_type = transport_type.to_sym
|
13
19
|
|
14
|
-
# TODO: Add streamable HTTP
|
15
20
|
case @transport_type
|
16
21
|
when :sse
|
17
|
-
@transport = RubyLLM::MCP::Transport::SSE.new(@config[:url])
|
22
|
+
@transport = RubyLLM::MCP::Transport::SSE.new(@config[:url], headers: @headers)
|
18
23
|
when :stdio
|
19
24
|
@transport = RubyLLM::MCP::Transport::Stdio.new(@config[:command], args: @config[:args], env: @config[:env])
|
25
|
+
when :streamable
|
26
|
+
@transport = RubyLLM::MCP::Transport::Streamable.new(@config[:url], headers: @headers)
|
20
27
|
else
|
21
28
|
raise "Invalid transport type: #{transport_type}"
|
22
29
|
end
|
30
|
+
@capabilities = nil
|
23
31
|
|
24
32
|
@request_timeout = request_timeout
|
25
33
|
@reverse_proxy_url = reverse_proxy_url
|
@@ -28,8 +36,8 @@ module RubyLLM
|
|
28
36
|
notification_request
|
29
37
|
end
|
30
38
|
|
31
|
-
def request(body,
|
32
|
-
@transport.request(body,
|
39
|
+
def request(body, **options)
|
40
|
+
@transport.request(body, **options)
|
33
41
|
end
|
34
42
|
|
35
43
|
def tools(refresh: false)
|
@@ -37,31 +45,62 @@ module RubyLLM
|
|
37
45
|
@tools ||= fetch_and_create_tools
|
38
46
|
end
|
39
47
|
|
40
|
-
def
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
48
|
+
def resources(refresh: false)
|
49
|
+
@resources = nil if refresh
|
50
|
+
@resources ||= fetch_and_create_resources
|
51
|
+
end
|
52
|
+
|
53
|
+
def resource_templates(refresh: false)
|
54
|
+
@resource_templates = nil if refresh
|
55
|
+
@resource_templates ||= fetch_and_create_resources(set_as_template: true)
|
56
|
+
end
|
57
|
+
|
58
|
+
def prompts(refresh: false)
|
59
|
+
@prompts = nil if refresh
|
60
|
+
@prompts ||= fetch_and_create_prompts
|
61
|
+
end
|
62
|
+
|
63
|
+
def execute_tool(**args)
|
64
|
+
RubyLLM::MCP::Requests::ToolCall.new(self, **args).call
|
65
|
+
end
|
66
|
+
|
67
|
+
def resource_read_request(**args)
|
68
|
+
RubyLLM::MCP::Requests::ResourceRead.new(self, **args).call
|
69
|
+
end
|
70
|
+
|
71
|
+
def completion(**args)
|
72
|
+
RubyLLM::MCP::Requests::Completion.new(self, **args).call
|
73
|
+
end
|
74
|
+
|
75
|
+
def execute_prompt(**args)
|
76
|
+
RubyLLM::MCP::Requests::PromptCall.new(self, **args).call
|
47
77
|
end
|
48
78
|
|
49
79
|
private
|
50
80
|
|
51
81
|
def initialize_request
|
52
82
|
@initialize_response = RubyLLM::MCP::Requests::Initialization.new(self).call
|
83
|
+
@capabilities = RubyLLM::MCP::Capabilities.new(@initialize_response["result"]["capabilities"])
|
53
84
|
end
|
54
85
|
|
55
86
|
def notification_request
|
56
|
-
|
87
|
+
RubyLLM::MCP::Requests::Notification.new(self).call
|
57
88
|
end
|
58
89
|
|
59
90
|
def tool_list_request
|
60
|
-
|
91
|
+
RubyLLM::MCP::Requests::ToolList.new(self).call
|
61
92
|
end
|
62
93
|
|
63
|
-
def
|
64
|
-
|
94
|
+
def resources_list_request
|
95
|
+
RubyLLM::MCP::Requests::ResourceList.new(self).call
|
96
|
+
end
|
97
|
+
|
98
|
+
def resource_template_list_request
|
99
|
+
RubyLLM::MCP::Requests::ResourceTemplateList.new(self).call
|
100
|
+
end
|
101
|
+
|
102
|
+
def prompt_list_request
|
103
|
+
RubyLLM::MCP::Requests::PromptList.new(self).call
|
65
104
|
end
|
66
105
|
|
67
106
|
def fetch_and_create_tools
|
@@ -72,6 +111,36 @@ module RubyLLM
|
|
72
111
|
RubyLLM::MCP::Tool.new(self, tool)
|
73
112
|
end
|
74
113
|
end
|
114
|
+
|
115
|
+
def fetch_and_create_resources(set_as_template: false)
|
116
|
+
resources_response = resources_list_request
|
117
|
+
resources_response = resources_response["result"]["resources"]
|
118
|
+
|
119
|
+
resources = {}
|
120
|
+
resources_response.each do |resource|
|
121
|
+
new_resource = RubyLLM::MCP::Resource.new(self, resource, template: set_as_template)
|
122
|
+
resources[new_resource.name] = new_resource
|
123
|
+
end
|
124
|
+
|
125
|
+
resources
|
126
|
+
end
|
127
|
+
|
128
|
+
def fetch_and_create_prompts
|
129
|
+
prompts_response = prompt_list_request
|
130
|
+
prompts_response = prompts_response["result"]["prompts"]
|
131
|
+
|
132
|
+
prompts = {}
|
133
|
+
prompts_response.each do |prompt|
|
134
|
+
new_prompt = RubyLLM::MCP::Prompt.new(self,
|
135
|
+
name: prompt["name"],
|
136
|
+
description: prompt["description"],
|
137
|
+
arguments: prompt["arguments"])
|
138
|
+
|
139
|
+
prompts[new_prompt.name] = new_prompt
|
140
|
+
end
|
141
|
+
|
142
|
+
prompts
|
143
|
+
end
|
75
144
|
end
|
76
145
|
end
|
77
146
|
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module MCP
|
5
|
+
class Completion
|
6
|
+
attr_reader :values, :total, :has_more
|
7
|
+
|
8
|
+
def initialize(values:, total:, has_more:)
|
9
|
+
@values = values
|
10
|
+
@total = total
|
11
|
+
@has_more = has_more
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module MCP
|
5
|
+
class Content < RubyLLM::Content
|
6
|
+
attr_reader :text, :attachments, :content
|
7
|
+
|
8
|
+
def initialize(text: nil, attachments: nil) # rubocop:disable Lint/MissingSuper
|
9
|
+
@text = text
|
10
|
+
@attachments = attachments || []
|
11
|
+
end
|
12
|
+
|
13
|
+
# This is a workaround to allow the content object to be passed as the tool call
|
14
|
+
# to return audio or image attachments.
|
15
|
+
def to_s
|
16
|
+
attachments.empty? ? text : self
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
data/lib/ruby_llm/mcp/errors.rb
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
module RubyLLM
|
4
4
|
module MCP
|
5
5
|
module Errors
|
6
|
-
class
|
6
|
+
class BaseError < StandardError
|
7
7
|
attr_reader :message
|
8
8
|
|
9
9
|
def initialize(message:)
|
@@ -11,6 +11,16 @@ module RubyLLM
|
|
11
11
|
super(message)
|
12
12
|
end
|
13
13
|
end
|
14
|
+
|
15
|
+
class InvalidProtocolVersionError < BaseError; end
|
16
|
+
|
17
|
+
class SessionExpiredError < BaseError; end
|
18
|
+
|
19
|
+
class TimeoutError < BaseError; end
|
20
|
+
|
21
|
+
class PromptArgumentError < BaseError; end
|
22
|
+
|
23
|
+
class CompletionNotAvailable < BaseError; end
|
14
24
|
end
|
15
25
|
end
|
16
26
|
end
|
@@ -0,0 +1,95 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module MCP
|
5
|
+
class Prompt
|
6
|
+
class Argument
|
7
|
+
attr_reader :name, :description, :required
|
8
|
+
|
9
|
+
def initialize(name:, description:, required:)
|
10
|
+
@name = name
|
11
|
+
@description = description
|
12
|
+
@required = required
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
attr_reader :name, :description, :arguments, :mcp_client
|
17
|
+
|
18
|
+
def initialize(mcp_client, name:, description:, arguments:)
|
19
|
+
@mcp_client = mcp_client
|
20
|
+
@name = name
|
21
|
+
@description = description
|
22
|
+
|
23
|
+
@arguments = arguments.map do |arg|
|
24
|
+
Argument.new(name: arg["name"], description: arg["description"], required: arg["required"])
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def include(chat, arguments: {})
|
29
|
+
validate_arguments!(arguments)
|
30
|
+
messages = fetch_prompt_messages(arguments)
|
31
|
+
|
32
|
+
messages.each { |message| chat.add_message(message) }
|
33
|
+
chat
|
34
|
+
end
|
35
|
+
|
36
|
+
def ask(chat, arguments: {}, &)
|
37
|
+
include(chat, arguments: arguments)
|
38
|
+
|
39
|
+
chat.complete(&)
|
40
|
+
end
|
41
|
+
|
42
|
+
alias say ask
|
43
|
+
|
44
|
+
def arguments_search(argument, value)
|
45
|
+
if @mcp_client.capabilities.completion?
|
46
|
+
response = @mcp_client.completion(type: :prompt, name: @name, argument: argument, value: value)
|
47
|
+
response = response.dig("result", "completion")
|
48
|
+
|
49
|
+
Completion.new(values: response["values"], total: response["total"], has_more: response["hasMore"])
|
50
|
+
else
|
51
|
+
raise Errors::CompletionNotAvailable, "Completion is not available for this MCP server"
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
private
|
56
|
+
|
57
|
+
def fetch_prompt_messages(arguments)
|
58
|
+
response = @mcp_client.execute_prompt(
|
59
|
+
name: @name,
|
60
|
+
arguments: arguments
|
61
|
+
)
|
62
|
+
|
63
|
+
response["result"]["messages"].map do |message|
|
64
|
+
content = create_content_for_message(message["content"])
|
65
|
+
|
66
|
+
RubyLLM::Message.new(
|
67
|
+
role: message["role"],
|
68
|
+
content: content
|
69
|
+
)
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def validate_arguments!(incoming_arguments)
|
74
|
+
@arguments.each do |arg|
|
75
|
+
if arg.required && incoming_arguments.key?(arg.name)
|
76
|
+
raise Errors::PromptArgumentError, "Argument #{arg.name} is required"
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
def create_content_for_message(content)
|
82
|
+
case content["type"]
|
83
|
+
when "text"
|
84
|
+
MCP::Content.new(text: content["text"])
|
85
|
+
when "image", "audio"
|
86
|
+
attachment = MCP::Attachment.new(content["content"], content["mime_type"])
|
87
|
+
MCP::Content.new(text: nil, attachments: [attachment])
|
88
|
+
when "resource"
|
89
|
+
resource = Resource.new(mcp_client, content["resource"])
|
90
|
+
resource.to_content
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|