sc-ruby_llm-mcp 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +446 -0
- data/lib/ruby_llm/chat.rb +33 -0
- data/lib/ruby_llm/mcp/attachment.rb +18 -0
- data/lib/ruby_llm/mcp/capabilities.rb +29 -0
- data/lib/ruby_llm/mcp/client.rb +104 -0
- data/lib/ruby_llm/mcp/completion.rb +15 -0
- data/lib/ruby_llm/mcp/content.rb +20 -0
- data/lib/ruby_llm/mcp/coordinator.rb +112 -0
- data/lib/ruby_llm/mcp/errors.rb +28 -0
- data/lib/ruby_llm/mcp/parameter.rb +19 -0
- data/lib/ruby_llm/mcp/prompt.rb +106 -0
- data/lib/ruby_llm/mcp/providers/anthropic/complex_parameter_support.rb +65 -0
- data/lib/ruby_llm/mcp/providers/gemini/complex_parameter_support.rb +61 -0
- data/lib/ruby_llm/mcp/providers/openai/complex_parameter_support.rb +52 -0
- data/lib/ruby_llm/mcp/requests/base.rb +31 -0
- data/lib/ruby_llm/mcp/requests/completion_prompt.rb +40 -0
- data/lib/ruby_llm/mcp/requests/completion_resource.rb +40 -0
- data/lib/ruby_llm/mcp/requests/initialization.rb +24 -0
- data/lib/ruby_llm/mcp/requests/initialize_notification.rb +14 -0
- data/lib/ruby_llm/mcp/requests/prompt_call.rb +32 -0
- data/lib/ruby_llm/mcp/requests/prompt_list.rb +23 -0
- data/lib/ruby_llm/mcp/requests/resource_list.rb +21 -0
- data/lib/ruby_llm/mcp/requests/resource_read.rb +30 -0
- data/lib/ruby_llm/mcp/requests/resource_template_list.rb +21 -0
- data/lib/ruby_llm/mcp/requests/tool_call.rb +32 -0
- data/lib/ruby_llm/mcp/requests/tool_list.rb +17 -0
- data/lib/ruby_llm/mcp/resource.rb +77 -0
- data/lib/ruby_llm/mcp/resource_template.rb +79 -0
- data/lib/ruby_llm/mcp/tool.rb +115 -0
- data/lib/ruby_llm/mcp/transport/sse.rb +244 -0
- data/lib/ruby_llm/mcp/transport/stdio.rb +210 -0
- data/lib/ruby_llm/mcp/transport/streamable.rb +299 -0
- data/lib/ruby_llm/mcp/version.rb +7 -0
- data/lib/ruby_llm/mcp.rb +27 -0
- metadata +175 -0
@@ -0,0 +1,210 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "open3"
|
4
|
+
require "json"
|
5
|
+
require "timeout"
|
6
|
+
require "securerandom"
|
7
|
+
|
8
|
+
module RubyLLM
|
9
|
+
module MCP
|
10
|
+
module Transport
|
11
|
+
class Stdio
|
12
|
+
attr_reader :command, :stdin, :stdout, :stderr, :id
|
13
|
+
|
14
|
+
def initialize(command, request_timeout:, args: [], env: {})
|
15
|
+
@request_timeout = request_timeout
|
16
|
+
@command = command
|
17
|
+
@args = args
|
18
|
+
@env = env || {}
|
19
|
+
@client_id = SecureRandom.uuid
|
20
|
+
|
21
|
+
@id_counter = 0
|
22
|
+
@id_mutex = Mutex.new
|
23
|
+
@pending_requests = {}
|
24
|
+
@pending_mutex = Mutex.new
|
25
|
+
@running = true
|
26
|
+
@reader_thread = nil
|
27
|
+
@stderr_thread = nil
|
28
|
+
|
29
|
+
start_process
|
30
|
+
end
|
31
|
+
|
32
|
+
def request(body, add_id: true, wait_for_response: true)
|
33
|
+
if add_id
|
34
|
+
@id_mutex.synchronize { @id_counter += 1 }
|
35
|
+
request_id = @id_counter
|
36
|
+
body["id"] = request_id
|
37
|
+
end
|
38
|
+
|
39
|
+
response_queue = Queue.new
|
40
|
+
if wait_for_response
|
41
|
+
@pending_mutex.synchronize do
|
42
|
+
@pending_requests[request_id.to_s] = response_queue
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
begin
|
47
|
+
@stdin.puts(JSON.generate(body))
|
48
|
+
@stdin.flush
|
49
|
+
rescue IOError, Errno::EPIPE => e
|
50
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
|
51
|
+
restart_process
|
52
|
+
raise "Failed to send request: #{e.message}"
|
53
|
+
end
|
54
|
+
|
55
|
+
return unless wait_for_response
|
56
|
+
|
57
|
+
begin
|
58
|
+
Timeout.timeout(@request_timeout / 1000) do
|
59
|
+
response_queue.pop
|
60
|
+
end
|
61
|
+
rescue Timeout::Error
|
62
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
|
63
|
+
raise RubyLLM::MCP::Errors::TimeoutError.new(
|
64
|
+
message: "Request timed out after #{@request_timeout / 1000} seconds"
|
65
|
+
)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def alive?
|
70
|
+
@running
|
71
|
+
end
|
72
|
+
|
73
|
+
def close # rubocop:disable Metrics/MethodLength
|
74
|
+
@running = false
|
75
|
+
|
76
|
+
begin
|
77
|
+
@stdin&.close
|
78
|
+
rescue StandardError
|
79
|
+
nil
|
80
|
+
end
|
81
|
+
|
82
|
+
begin
|
83
|
+
@wait_thread&.join(1)
|
84
|
+
rescue StandardError
|
85
|
+
nil
|
86
|
+
end
|
87
|
+
|
88
|
+
begin
|
89
|
+
@stdout&.close
|
90
|
+
rescue StandardError
|
91
|
+
nil
|
92
|
+
end
|
93
|
+
|
94
|
+
begin
|
95
|
+
@stderr&.close
|
96
|
+
rescue StandardError
|
97
|
+
nil
|
98
|
+
end
|
99
|
+
|
100
|
+
begin
|
101
|
+
@reader_thread&.join(1)
|
102
|
+
rescue StandardError
|
103
|
+
nil
|
104
|
+
end
|
105
|
+
|
106
|
+
begin
|
107
|
+
@stderr_thread&.join(1)
|
108
|
+
rescue StandardError
|
109
|
+
nil
|
110
|
+
end
|
111
|
+
|
112
|
+
@stdin = nil
|
113
|
+
@stdout = nil
|
114
|
+
@stderr = nil
|
115
|
+
@wait_thread = nil
|
116
|
+
@reader_thread = nil
|
117
|
+
@stderr_thread = nil
|
118
|
+
end
|
119
|
+
|
120
|
+
private
|
121
|
+
|
122
|
+
def start_process
|
123
|
+
close if @stdin || @stdout || @stderr || @wait_thread
|
124
|
+
|
125
|
+
@stdin, @stdout, @stderr, @wait_thread = if @env.empty?
|
126
|
+
Open3.popen3(@command, *@args)
|
127
|
+
else
|
128
|
+
Open3.popen3(@env, @command, *@args)
|
129
|
+
end
|
130
|
+
|
131
|
+
start_reader_thread
|
132
|
+
start_stderr_thread
|
133
|
+
end
|
134
|
+
|
135
|
+
def restart_process
|
136
|
+
puts "Process connection lost. Restarting..."
|
137
|
+
start_process
|
138
|
+
end
|
139
|
+
|
140
|
+
def start_reader_thread
|
141
|
+
@reader_thread = Thread.new do
|
142
|
+
while @running
|
143
|
+
begin
|
144
|
+
if @stdout.closed? || @wait_thread.nil? || !@wait_thread.alive?
|
145
|
+
sleep 1
|
146
|
+
restart_process if @running
|
147
|
+
next
|
148
|
+
end
|
149
|
+
|
150
|
+
line = @stdout.gets
|
151
|
+
next unless line && !line.strip.empty?
|
152
|
+
|
153
|
+
process_response(line.strip)
|
154
|
+
rescue IOError, Errno::EPIPE => e
|
155
|
+
puts "Reader error: #{e.message}. Restarting in 1 second..."
|
156
|
+
sleep 1
|
157
|
+
restart_process if @running
|
158
|
+
rescue StandardError => e
|
159
|
+
puts "Error in reader thread: #{e.message}, #{e.backtrace.join("\n")}"
|
160
|
+
sleep 1
|
161
|
+
end
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
@reader_thread.abort_on_exception = true
|
166
|
+
end
|
167
|
+
|
168
|
+
def start_stderr_thread
|
169
|
+
@stderr_thread = Thread.new do
|
170
|
+
while @running
|
171
|
+
begin
|
172
|
+
if @stderr.closed? || @wait_thread.nil? || !@wait_thread.alive?
|
173
|
+
sleep 1
|
174
|
+
next
|
175
|
+
end
|
176
|
+
|
177
|
+
line = @stderr.gets
|
178
|
+
next unless line && !line.strip.empty?
|
179
|
+
|
180
|
+
puts "STDERR: #{line.strip}"
|
181
|
+
rescue IOError, Errno::EPIPE => e
|
182
|
+
puts "Stderr reader error: #{e.message}"
|
183
|
+
sleep 1
|
184
|
+
rescue StandardError => e
|
185
|
+
puts "Error in stderr thread: #{e.message}"
|
186
|
+
sleep 1
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
@stderr_thread.abort_on_exception = true
|
192
|
+
end
|
193
|
+
|
194
|
+
def process_response(line)
|
195
|
+
response = JSON.parse(line)
|
196
|
+
request_id = response["id"]&.to_s
|
197
|
+
|
198
|
+
@pending_mutex.synchronize do
|
199
|
+
if request_id && @pending_requests.key?(request_id)
|
200
|
+
response_queue = @pending_requests.delete(request_id)
|
201
|
+
response_queue&.push(response)
|
202
|
+
end
|
203
|
+
end
|
204
|
+
rescue JSON::ParserError => e
|
205
|
+
RubyLLM.logger.error("Error parsing response as JSON: #{e.message}\nRaw response: #{line}")
|
206
|
+
end
|
207
|
+
end
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
@@ -0,0 +1,299 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "json"
|
4
|
+
require "uri"
|
5
|
+
require "faraday"
|
6
|
+
require "timeout"
|
7
|
+
require "securerandom"
|
8
|
+
|
9
|
+
module RubyLLM
|
10
|
+
module MCP
|
11
|
+
module Transport
|
12
|
+
class Streamable
|
13
|
+
attr_reader :headers, :id, :session_id
|
14
|
+
|
15
|
+
def initialize(url, request_timeout:, headers: {})
|
16
|
+
@url = url
|
17
|
+
@request_timeout = request_timeout
|
18
|
+
@client_id = SecureRandom.uuid
|
19
|
+
@session_id = nil
|
20
|
+
@base_headers = headers.merge({
|
21
|
+
"Content-Type" => "application/json",
|
22
|
+
"Accept" => "application/json, text/event-stream",
|
23
|
+
"Connection" => "keep-alive",
|
24
|
+
"X-CLIENT-ID" => @client_id
|
25
|
+
})
|
26
|
+
|
27
|
+
@id_counter = 0
|
28
|
+
@id_mutex = Mutex.new
|
29
|
+
@pending_requests = {}
|
30
|
+
@pending_mutex = Mutex.new
|
31
|
+
@running = true
|
32
|
+
@sse_streams = {}
|
33
|
+
@sse_mutex = Mutex.new
|
34
|
+
|
35
|
+
# Initialize HTTP connection
|
36
|
+
@connection = create_connection
|
37
|
+
end
|
38
|
+
|
39
|
+
def request(body, add_id: true, wait_for_response: true)
|
40
|
+
# Generate a unique request ID for requests
|
41
|
+
if add_id && body.is_a?(Hash) && !body.key?("id")
|
42
|
+
@id_mutex.synchronize { @id_counter += 1 }
|
43
|
+
body["id"] = @id_counter
|
44
|
+
end
|
45
|
+
|
46
|
+
request_id = body.is_a?(Hash) ? body["id"] : nil
|
47
|
+
is_initialization = body.is_a?(Hash) && body["method"] == "initialize"
|
48
|
+
|
49
|
+
# Create a queue for this request's response if needed
|
50
|
+
response_queue = setup_response_queue(request_id, wait_for_response)
|
51
|
+
|
52
|
+
# Send the HTTP request
|
53
|
+
response = send_http_request(body, request_id, is_initialization: is_initialization)
|
54
|
+
|
55
|
+
# Handle different response types based on content
|
56
|
+
handle_response(response, request_id, response_queue, wait_for_response)
|
57
|
+
end
|
58
|
+
|
59
|
+
def alive?
|
60
|
+
@running
|
61
|
+
end
|
62
|
+
|
63
|
+
def close
|
64
|
+
@running = false
|
65
|
+
@sse_mutex.synchronize do
|
66
|
+
@sse_streams.each_value(&:close)
|
67
|
+
@sse_streams.clear
|
68
|
+
end
|
69
|
+
@connection&.close if @connection.respond_to?(:close)
|
70
|
+
@connection = nil
|
71
|
+
end
|
72
|
+
|
73
|
+
def terminate_session
|
74
|
+
return unless @session_id
|
75
|
+
|
76
|
+
begin
|
77
|
+
response = @connection.delete do |req|
|
78
|
+
build_headers.each { |key, value| req.headers[key] = value }
|
79
|
+
end
|
80
|
+
@session_id = nil if response.status == 200
|
81
|
+
rescue StandardError => e
|
82
|
+
# Server may not support session termination (405), which is allowed
|
83
|
+
puts "Warning: Failed to terminate session: #{e.message}"
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
private
|
88
|
+
|
89
|
+
def create_connection
|
90
|
+
Faraday.new(url: @url) do |f|
|
91
|
+
f.options.timeout = @request_timeout / 1000
|
92
|
+
f.options.open_timeout = 10
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
def build_headers
|
97
|
+
headers = @base_headers.dup
|
98
|
+
headers["Mcp-Session-Id"] = @session_id if @session_id
|
99
|
+
headers
|
100
|
+
end
|
101
|
+
|
102
|
+
def build_initialization_headers
|
103
|
+
@base_headers.dup
|
104
|
+
end
|
105
|
+
|
106
|
+
def setup_response_queue(request_id, wait_for_response)
|
107
|
+
response_queue = Queue.new
|
108
|
+
if wait_for_response && request_id
|
109
|
+
@pending_mutex.synchronize do
|
110
|
+
@pending_requests[request_id.to_s] = response_queue
|
111
|
+
end
|
112
|
+
end
|
113
|
+
response_queue
|
114
|
+
end
|
115
|
+
|
116
|
+
def send_http_request(body, request_id, is_initialization: false)
|
117
|
+
@connection.post do |req|
|
118
|
+
headers = is_initialization ? build_initialization_headers : build_headers
|
119
|
+
headers.each { |key, value| req.headers[key] = value }
|
120
|
+
req.body = JSON.generate(body)
|
121
|
+
end
|
122
|
+
rescue StandardError => e
|
123
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) } if request_id
|
124
|
+
raise e
|
125
|
+
end
|
126
|
+
|
127
|
+
def handle_response(response, request_id, response_queue, wait_for_response)
|
128
|
+
case response.status
|
129
|
+
when 200
|
130
|
+
handle_200_response(response, request_id, response_queue, wait_for_response)
|
131
|
+
when 202
|
132
|
+
# Accepted - for notifications/responses only, no body expected
|
133
|
+
nil
|
134
|
+
when 400..499
|
135
|
+
handle_client_error(response)
|
136
|
+
when 404
|
137
|
+
handle_session_expired
|
138
|
+
else
|
139
|
+
raise "HTTP request failed: #{response.status} - #{response.body}"
|
140
|
+
end
|
141
|
+
rescue StandardError => e
|
142
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) } if request_id
|
143
|
+
raise e
|
144
|
+
end
|
145
|
+
|
146
|
+
def handle_200_response(response, request_id, response_queue, wait_for_response)
|
147
|
+
content_type = response.headers["content-type"]
|
148
|
+
|
149
|
+
if content_type&.include?("text/event-stream")
|
150
|
+
handle_sse_response(response, request_id, response_queue, wait_for_response)
|
151
|
+
elsif content_type&.include?("application/json")
|
152
|
+
handle_json_response(response, request_id, response_queue, wait_for_response)
|
153
|
+
else
|
154
|
+
raise "Unexpected content type: #{content_type}"
|
155
|
+
end
|
156
|
+
end
|
157
|
+
|
158
|
+
def handle_sse_response(response, request_id, response_queue, wait_for_response)
|
159
|
+
# Extract session ID from initial response if present
|
160
|
+
extract_session_id(response)
|
161
|
+
|
162
|
+
if wait_for_response && request_id
|
163
|
+
# Process SSE stream for this specific request
|
164
|
+
process_sse_for_request(response.body, request_id.to_s, response_queue)
|
165
|
+
# Wait for the response with timeout
|
166
|
+
wait_for_response_with_timeout(request_id.to_s, response_queue)
|
167
|
+
else
|
168
|
+
# Process general SSE stream
|
169
|
+
process_sse_stream(response.body)
|
170
|
+
nil
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
def handle_json_response(response, request_id, response_queue, wait_for_response)
|
175
|
+
# Extract session ID from response if present
|
176
|
+
extract_session_id(response)
|
177
|
+
|
178
|
+
begin
|
179
|
+
json_response = JSON.parse(response.body)
|
180
|
+
|
181
|
+
if wait_for_response && request_id && response_queue
|
182
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
|
183
|
+
return json_response
|
184
|
+
end
|
185
|
+
|
186
|
+
json_response
|
187
|
+
rescue JSON::ParserError => e
|
188
|
+
raise "Invalid JSON response: #{e.message}"
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
def extract_session_id(response)
|
193
|
+
session_id = response.headers["Mcp-Session-Id"]
|
194
|
+
@session_id = session_id if session_id
|
195
|
+
end
|
196
|
+
|
197
|
+
def handle_client_error(response)
|
198
|
+
begin
|
199
|
+
error_body = JSON.parse(response.body)
|
200
|
+
if error_body.is_a?(Hash) && error_body["error"]
|
201
|
+
error_message = error_body["error"]["message"] || error_body["error"]["code"]
|
202
|
+
|
203
|
+
if error_message.to_s.downcase.include?("session")
|
204
|
+
raise "Server error: #{error_message} (Current session ID: #{@session_id || 'none'})"
|
205
|
+
end
|
206
|
+
|
207
|
+
raise "Server error: #{error_message}"
|
208
|
+
|
209
|
+
end
|
210
|
+
rescue JSON::ParserError
|
211
|
+
# Fall through to generic error
|
212
|
+
end
|
213
|
+
|
214
|
+
raise "HTTP client error: #{response.status} - #{response.body}"
|
215
|
+
end
|
216
|
+
|
217
|
+
def handle_session_expired
|
218
|
+
@session_id = nil
|
219
|
+
raise RubyLLM::MCP::Errors::SessionExpiredError.new(
|
220
|
+
message: "Session expired, re-initialization required"
|
221
|
+
)
|
222
|
+
end
|
223
|
+
|
224
|
+
def process_sse_for_request(sse_body, request_id, response_queue)
|
225
|
+
Thread.new do
|
226
|
+
process_sse_events(sse_body) do |event_data|
|
227
|
+
if event_data.is_a?(Hash) && event_data["id"]&.to_s == request_id
|
228
|
+
response_queue.push(event_data)
|
229
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id) }
|
230
|
+
break # Found our response, stop processing
|
231
|
+
end
|
232
|
+
end
|
233
|
+
rescue StandardError => e
|
234
|
+
puts "Error processing SSE stream: #{e.message}"
|
235
|
+
response_queue.push({ "error" => { "message" => e.message } })
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
def process_sse_stream(sse_body)
|
240
|
+
Thread.new do
|
241
|
+
process_sse_events(sse_body) do |event_data|
|
242
|
+
# Handle server-initiated requests/notifications
|
243
|
+
handle_server_message(event_data) if event_data.is_a?(Hash)
|
244
|
+
end
|
245
|
+
rescue StandardError => e
|
246
|
+
puts "Error processing SSE stream: #{e.message}"
|
247
|
+
end
|
248
|
+
end
|
249
|
+
|
250
|
+
def process_sse_events(sse_body)
|
251
|
+
event_buffer = ""
|
252
|
+
event_id = nil
|
253
|
+
|
254
|
+
sse_body.each_line do |line|
|
255
|
+
line = line.strip
|
256
|
+
|
257
|
+
if line.empty?
|
258
|
+
# End of event, process accumulated data
|
259
|
+
unless event_buffer.empty?
|
260
|
+
begin
|
261
|
+
event_data = JSON.parse(event_buffer)
|
262
|
+
yield event_data
|
263
|
+
rescue JSON::ParserError
|
264
|
+
puts "Warning: Failed to parse SSE event data: #{event_buffer}"
|
265
|
+
end
|
266
|
+
event_buffer = ""
|
267
|
+
end
|
268
|
+
elsif line.start_with?("id:")
|
269
|
+
event_id = line[3..].strip
|
270
|
+
elsif line.start_with?("data:")
|
271
|
+
data = line[5..].strip
|
272
|
+
event_buffer += data
|
273
|
+
elsif line.start_with?("event:")
|
274
|
+
# Event type - could be used for different message types
|
275
|
+
# For now, we treat all as data events
|
276
|
+
end
|
277
|
+
end
|
278
|
+
end
|
279
|
+
|
280
|
+
def handle_server_message(message)
|
281
|
+
# Handle server-initiated requests and notifications
|
282
|
+
# This would typically be passed to a message handler
|
283
|
+
puts "Received server message: #{message.inspect}"
|
284
|
+
end
|
285
|
+
|
286
|
+
def wait_for_response_with_timeout(request_id, response_queue)
|
287
|
+
Timeout.timeout(@request_timeout / 1000) do
|
288
|
+
response_queue.pop
|
289
|
+
end
|
290
|
+
rescue Timeout::Error
|
291
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
|
292
|
+
raise RubyLLM::MCP::Errors::TimeoutError.new(
|
293
|
+
message: "Request timed out after #{@request_timeout / 1000} seconds"
|
294
|
+
)
|
295
|
+
end
|
296
|
+
end
|
297
|
+
end
|
298
|
+
end
|
299
|
+
end
|
data/lib/ruby_llm/mcp.rb
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "ruby_llm"
|
4
|
+
require "zeitwerk"
|
5
|
+
require_relative "chat"
|
6
|
+
|
7
|
+
loader = Zeitwerk::Loader.for_gem_extension(RubyLLM)
|
8
|
+
loader.inflector.inflect("mcp" => "MCP")
|
9
|
+
loader.inflector.inflect("sse" => "SSE")
|
10
|
+
loader.inflector.inflect("openai" => "OpenAI")
|
11
|
+
loader.setup
|
12
|
+
|
13
|
+
module RubyLLM
|
14
|
+
module MCP
|
15
|
+
module_function
|
16
|
+
|
17
|
+
def client(*args, **kwargs)
|
18
|
+
@client ||= Client.new(*args, **kwargs)
|
19
|
+
end
|
20
|
+
|
21
|
+
def support_complex_parameters!
|
22
|
+
require_relative "mcp/providers/openai/complex_parameter_support"
|
23
|
+
require_relative "mcp/providers/anthropic/complex_parameter_support"
|
24
|
+
require_relative "mcp/providers/gemini/complex_parameter_support"
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|