ruby_llm-mcp 0.3.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +121 -2
- data/lib/ruby_llm/mcp/capabilities.rb +22 -2
- data/lib/ruby_llm/mcp/client.rb +106 -18
- data/lib/ruby_llm/mcp/configuration.rb +66 -0
- data/lib/ruby_llm/mcp/coordinator.rb +197 -33
- data/lib/ruby_llm/mcp/error.rb +34 -0
- data/lib/ruby_llm/mcp/errors.rb +37 -4
- data/lib/ruby_llm/mcp/logging.rb +16 -0
- data/lib/ruby_llm/mcp/parameter.rb +2 -0
- data/lib/ruby_llm/mcp/progress.rb +33 -0
- data/lib/ruby_llm/mcp/prompt.rb +12 -5
- data/lib/ruby_llm/mcp/providers/anthropic/complex_parameter_support.rb +5 -2
- data/lib/ruby_llm/mcp/providers/gemini/complex_parameter_support.rb +6 -3
- data/lib/ruby_llm/mcp/providers/openai/complex_parameter_support.rb +6 -3
- data/lib/ruby_llm/mcp/requests/base.rb +3 -3
- data/lib/ruby_llm/mcp/requests/cancelled_notification.rb +32 -0
- data/lib/ruby_llm/mcp/requests/completion_prompt.rb +3 -3
- data/lib/ruby_llm/mcp/requests/completion_resource.rb +3 -3
- data/lib/ruby_llm/mcp/requests/initialization.rb +24 -18
- data/lib/ruby_llm/mcp/requests/initialize_notification.rb +15 -9
- data/lib/ruby_llm/mcp/requests/logging_set_level.rb +28 -0
- data/lib/ruby_llm/mcp/requests/meta.rb +30 -0
- data/lib/ruby_llm/mcp/requests/ping.rb +20 -0
- data/lib/ruby_llm/mcp/requests/ping_response.rb +28 -0
- data/lib/ruby_llm/mcp/requests/prompt_call.rb +3 -3
- data/lib/ruby_llm/mcp/requests/prompt_list.rb +1 -1
- data/lib/ruby_llm/mcp/requests/resource_list.rb +1 -1
- data/lib/ruby_llm/mcp/requests/resource_read.rb +4 -4
- data/lib/ruby_llm/mcp/requests/resource_template_list.rb +1 -1
- data/lib/ruby_llm/mcp/requests/resources_subscribe.rb +30 -0
- data/lib/ruby_llm/mcp/requests/tool_call.rb +6 -3
- data/lib/ruby_llm/mcp/requests/tool_list.rb +17 -11
- data/lib/ruby_llm/mcp/resource.rb +26 -5
- data/lib/ruby_llm/mcp/resource_template.rb +11 -6
- data/lib/ruby_llm/mcp/result.rb +90 -0
- data/lib/ruby_llm/mcp/tool.rb +28 -3
- data/lib/ruby_llm/mcp/transport/sse.rb +81 -75
- data/lib/ruby_llm/mcp/transport/stdio.rb +33 -17
- data/lib/ruby_llm/mcp/transport/streamable_http.rb +647 -0
- data/lib/ruby_llm/mcp/version.rb +1 -1
- data/lib/ruby_llm/mcp.rb +18 -0
- data/lib/tasks/release.rake +23 -0
- metadata +20 -50
- data/lib/ruby_llm/mcp/transport/streamable.rb +0 -299
@@ -0,0 +1,647 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "json"
|
4
|
+
require "uri"
|
5
|
+
require "httpx"
|
6
|
+
require "timeout"
|
7
|
+
require "securerandom"
|
8
|
+
|
9
|
+
module RubyLLM
|
10
|
+
module MCP
|
11
|
+
module Transport
|
12
|
+
# Configuration options for reconnection behavior
|
13
|
+
class ReconnectionOptions
|
14
|
+
attr_reader :max_reconnection_delay, :initial_reconnection_delay,
|
15
|
+
:reconnection_delay_grow_factor, :max_retries
|
16
|
+
|
17
|
+
def initialize(
|
18
|
+
max_reconnection_delay: 30_000,
|
19
|
+
initial_reconnection_delay: 1_000,
|
20
|
+
reconnection_delay_grow_factor: 1.5,
|
21
|
+
max_retries: 2
|
22
|
+
)
|
23
|
+
@max_reconnection_delay = max_reconnection_delay
|
24
|
+
@initial_reconnection_delay = initial_reconnection_delay
|
25
|
+
@reconnection_delay_grow_factor = reconnection_delay_grow_factor
|
26
|
+
@max_retries = max_retries
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
# Options for starting SSE connections
|
31
|
+
class StartSSEOptions
|
32
|
+
attr_reader :resumption_token, :on_resumption_token, :replay_message_id
|
33
|
+
|
34
|
+
def initialize(resumption_token: nil, on_resumption_token: nil, replay_message_id: nil)
|
35
|
+
@resumption_token = resumption_token
|
36
|
+
@on_resumption_token = on_resumption_token
|
37
|
+
@replay_message_id = replay_message_id
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
# Main StreamableHTTP transport class
|
42
|
+
class StreamableHTTP
|
43
|
+
attr_reader :session_id, :protocol_version, :coordinator
|
44
|
+
|
45
|
+
def initialize( # rubocop:disable Metrics/ParameterLists
|
46
|
+
url,
|
47
|
+
request_timeout:,
|
48
|
+
coordinator:,
|
49
|
+
headers: {},
|
50
|
+
reconnection_options: nil,
|
51
|
+
session_id: nil
|
52
|
+
)
|
53
|
+
@url = URI(url)
|
54
|
+
@coordinator = coordinator
|
55
|
+
@request_timeout = request_timeout
|
56
|
+
@headers = headers || {}
|
57
|
+
@session_id = session_id
|
58
|
+
@reconnection_options = reconnection_options || ReconnectionOptions.new
|
59
|
+
@protocol_version = nil
|
60
|
+
@resource_metadata_url = nil
|
61
|
+
@client_id = SecureRandom.uuid
|
62
|
+
|
63
|
+
@id_counter = 0
|
64
|
+
@id_mutex = Mutex.new
|
65
|
+
@pending_requests = {}
|
66
|
+
@pending_mutex = Mutex.new
|
67
|
+
@running = true
|
68
|
+
@abort_controller = nil
|
69
|
+
@sse_thread = nil
|
70
|
+
@sse_mutex = Mutex.new
|
71
|
+
|
72
|
+
# Thread-safe collection of all HTTPX clients
|
73
|
+
@clients = []
|
74
|
+
@clients_mutex = Mutex.new
|
75
|
+
|
76
|
+
@connection = create_connection
|
77
|
+
end
|
78
|
+
|
79
|
+
def request(body, add_id: true, wait_for_response: true)
|
80
|
+
# Generate a unique request ID for requests
|
81
|
+
if add_id && body.is_a?(Hash) && !body.key?("id")
|
82
|
+
@id_mutex.synchronize { @id_counter += 1 }
|
83
|
+
body["id"] = @id_counter
|
84
|
+
end
|
85
|
+
|
86
|
+
request_id = body.is_a?(Hash) ? body["id"] : nil
|
87
|
+
is_initialization = body.is_a?(Hash) && body["method"] == "initialize"
|
88
|
+
|
89
|
+
response_queue = setup_response_queue(request_id, wait_for_response)
|
90
|
+
result = send_http_request(body, request_id, is_initialization: is_initialization)
|
91
|
+
|
92
|
+
return result if result.is_a?(RubyLLM::MCP::Result)
|
93
|
+
|
94
|
+
if wait_for_response && request_id
|
95
|
+
wait_for_response_with_timeout(request_id.to_s, response_queue)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def alive?
|
100
|
+
@running
|
101
|
+
end
|
102
|
+
|
103
|
+
def close
|
104
|
+
terminate_session
|
105
|
+
cleanup_sse_resources
|
106
|
+
cleanup_connection
|
107
|
+
end
|
108
|
+
|
109
|
+
def start
|
110
|
+
@abort_controller = false
|
111
|
+
end
|
112
|
+
|
113
|
+
def terminate_session
|
114
|
+
return unless @session_id
|
115
|
+
|
116
|
+
begin
|
117
|
+
headers = build_common_headers
|
118
|
+
response = @connection.delete(@url, headers: headers)
|
119
|
+
|
120
|
+
# Handle HTTPX error responses first
|
121
|
+
handle_httpx_error_response!(response, context: { location: "terminating session" })
|
122
|
+
|
123
|
+
# 405 Method Not Allowed is acceptable per spec
|
124
|
+
unless [200, 405].include?(response.status)
|
125
|
+
reason_phrase = response.respond_to?(:reason_phrase) ? response.reason_phrase : nil
|
126
|
+
raise Errors::TransportError.new(
|
127
|
+
code: response.status,
|
128
|
+
message: "Failed to terminate session: #{reason_phrase || response.status}"
|
129
|
+
)
|
130
|
+
end
|
131
|
+
|
132
|
+
@session_id = nil
|
133
|
+
rescue StandardError => e
|
134
|
+
raise Errors::TransportError.new(
|
135
|
+
message: "Failed to terminate session: #{e.message}",
|
136
|
+
code: nil,
|
137
|
+
error: e
|
138
|
+
)
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
def set_protocol_version(version)
|
143
|
+
@protocol_version = version
|
144
|
+
end
|
145
|
+
|
146
|
+
private
|
147
|
+
|
148
|
+
def handle_httpx_error_response!(response, context:, allow_eof_for_sse: false)
|
149
|
+
return false unless response.is_a?(HTTPX::ErrorResponse)
|
150
|
+
|
151
|
+
error = response.error
|
152
|
+
|
153
|
+
# Special handling for EOFError in SSE contexts
|
154
|
+
if allow_eof_for_sse && error.is_a?(EOFError)
|
155
|
+
RubyLLM::MCP.logger.info "SSE stream closed: #{response.error.message}"
|
156
|
+
return :eof_handled
|
157
|
+
end
|
158
|
+
|
159
|
+
if error.is_a?(HTTPX::ReadTimeoutError)
|
160
|
+
raise Errors::TimeoutError.new(
|
161
|
+
message: "Request timed out after #{@request_timeout / 1000} seconds",
|
162
|
+
request_id: context[:request_id]
|
163
|
+
)
|
164
|
+
end
|
165
|
+
|
166
|
+
error_message = response.error&.message || "Request failed"
|
167
|
+
RubyLLM::MCP.logger.error "HTTPX error in #{context[:location]}: #{error_message}"
|
168
|
+
|
169
|
+
raise Errors::TransportError.new(
|
170
|
+
code: nil,
|
171
|
+
message: "HTTPX Error #{context}: #{error_message}"
|
172
|
+
)
|
173
|
+
end
|
174
|
+
|
175
|
+
def register_client(client)
|
176
|
+
@clients_mutex.synchronize do
|
177
|
+
@clients << client
|
178
|
+
end
|
179
|
+
client
|
180
|
+
end
|
181
|
+
|
182
|
+
def unregister_client(client)
|
183
|
+
@clients_mutex.synchronize do
|
184
|
+
@clients.delete(client)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
def close_client(client)
|
189
|
+
client.close if client.respond_to?(:close)
|
190
|
+
rescue StandardError => e
|
191
|
+
RubyLLM::MCP.logger.debug "Error closing HTTPX client: #{e.message}"
|
192
|
+
ensure
|
193
|
+
unregister_client(client)
|
194
|
+
end
|
195
|
+
|
196
|
+
def active_clients_count
|
197
|
+
@clients_mutex.synchronize do
|
198
|
+
@clients.size
|
199
|
+
end
|
200
|
+
end
|
201
|
+
|
202
|
+
def create_connection
|
203
|
+
client = HTTPX.with(
|
204
|
+
timeout: {
|
205
|
+
connect_timeout: 10,
|
206
|
+
read_timeout: @request_timeout / 1000,
|
207
|
+
write_timeout: @request_timeout / 1000,
|
208
|
+
operation_timeout: @request_timeout / 1000
|
209
|
+
}
|
210
|
+
)
|
211
|
+
register_client(client)
|
212
|
+
end
|
213
|
+
|
214
|
+
def build_common_headers
|
215
|
+
headers = @headers.dup
|
216
|
+
|
217
|
+
headers["mcp-session-id"] = @session_id if @session_id
|
218
|
+
headers["mcp-protocol-version"] = @protocol_version if @protocol_version
|
219
|
+
headers["X-CLIENT-ID"] = @client_id
|
220
|
+
|
221
|
+
headers
|
222
|
+
end
|
223
|
+
|
224
|
+
def setup_response_queue(request_id, wait_for_response)
|
225
|
+
response_queue = Queue.new
|
226
|
+
if wait_for_response && request_id
|
227
|
+
@pending_mutex.synchronize do
|
228
|
+
@pending_requests[request_id.to_s] = response_queue
|
229
|
+
end
|
230
|
+
end
|
231
|
+
response_queue
|
232
|
+
end
|
233
|
+
|
234
|
+
def send_http_request(body, request_id, is_initialization: false)
|
235
|
+
headers = build_common_headers
|
236
|
+
headers["Content-Type"] = "application/json"
|
237
|
+
headers["Accept"] = "application/json, text/event-stream"
|
238
|
+
|
239
|
+
json_body = JSON.generate(body)
|
240
|
+
RubyLLM::MCP.logger.debug "Sending Request: #{json_body}"
|
241
|
+
|
242
|
+
begin
|
243
|
+
# Set up connection with streaming callbacks if not initialization
|
244
|
+
connection = if is_initialization
|
245
|
+
@connection
|
246
|
+
else
|
247
|
+
create_connection_with_streaming_callbacks(request_id)
|
248
|
+
end
|
249
|
+
|
250
|
+
response = connection.post(@url, json: body, headers: headers)
|
251
|
+
handle_response(response, request_id, body)
|
252
|
+
ensure
|
253
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) } if request_id
|
254
|
+
end
|
255
|
+
end
|
256
|
+
|
257
|
+
def create_connection_with_streaming_callbacks(request_id)
|
258
|
+
buffer = +""
|
259
|
+
|
260
|
+
client = HTTPX.plugin(:callbacks).on_response_body_chunk do |request, _response, chunk|
|
261
|
+
next unless @running && !@abort_controller
|
262
|
+
|
263
|
+
RubyLLM::MCP.logger.debug "Received chunk: #{chunk.bytesize} bytes for #{request.uri}"
|
264
|
+
buffer << chunk
|
265
|
+
process_sse_buffer_events(buffer, request_id&.to_s)
|
266
|
+
end
|
267
|
+
.with(
|
268
|
+
timeout: {
|
269
|
+
connect_timeout: 10,
|
270
|
+
read_timeout: @request_timeout / 1000,
|
271
|
+
write_timeout: @request_timeout / 1000,
|
272
|
+
operation_timeout: @request_timeout / 1000
|
273
|
+
}
|
274
|
+
)
|
275
|
+
register_client(client)
|
276
|
+
end
|
277
|
+
|
278
|
+
def handle_response(response, request_id, original_message)
|
279
|
+
# Handle HTTPX error responses first
|
280
|
+
handle_httpx_error_response!(response, context: { location: "handling response", request_id: request_id })
|
281
|
+
|
282
|
+
# Extract session ID if present (only for successful responses)
|
283
|
+
session_id = response.headers["mcp-session-id"]
|
284
|
+
@session_id = session_id if session_id
|
285
|
+
|
286
|
+
case response.status
|
287
|
+
when 200
|
288
|
+
handle_success_response(response, request_id, original_message)
|
289
|
+
when 202
|
290
|
+
handle_accepted_response(original_message)
|
291
|
+
when 404
|
292
|
+
handle_session_expired
|
293
|
+
when 405, 401
|
294
|
+
# TODO: Implement 401 handling this once we are adding authorization
|
295
|
+
# Method not allowed - acceptable for some endpoints
|
296
|
+
nil
|
297
|
+
when 400...500
|
298
|
+
handle_client_error(response)
|
299
|
+
else
|
300
|
+
response_body = response.respond_to?(:body) ? response.body.to_s : "Unknown error"
|
301
|
+
raise Errors::TransportError.new(
|
302
|
+
code: response.status,
|
303
|
+
message: "HTTP request failed: #{response.status} - #{response_body}"
|
304
|
+
)
|
305
|
+
end
|
306
|
+
end
|
307
|
+
|
308
|
+
def handle_success_response(response, request_id, _original_message)
|
309
|
+
# Safely access content type
|
310
|
+
content_type = response.respond_to?(:headers) ? response.headers["content-type"] : nil
|
311
|
+
|
312
|
+
if content_type&.include?("text/event-stream")
|
313
|
+
# SSE response - let the streaming handler process it
|
314
|
+
start_sse_stream
|
315
|
+
nil
|
316
|
+
elsif content_type&.include?("application/json")
|
317
|
+
# Direct JSON response
|
318
|
+
response_body = response.respond_to?(:body) ? response.body.to_s : "{}"
|
319
|
+
json_response = JSON.parse(response_body)
|
320
|
+
result = RubyLLM::MCP::Result.new(json_response, session_id: @session_id)
|
321
|
+
|
322
|
+
if request_id
|
323
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
|
324
|
+
end
|
325
|
+
|
326
|
+
result
|
327
|
+
else
|
328
|
+
raise Errors::TransportError.new(
|
329
|
+
code: -1,
|
330
|
+
message: "Unexpected content type: #{content_type}"
|
331
|
+
)
|
332
|
+
end
|
333
|
+
rescue StandardError => e
|
334
|
+
raise Errors::TransportError.new(
|
335
|
+
message: "Invalid JSON response: #{e.message}",
|
336
|
+
error: e
|
337
|
+
)
|
338
|
+
end
|
339
|
+
|
340
|
+
def handle_accepted_response(original_message)
|
341
|
+
# 202 Accepted - start SSE stream if this was an initialization
|
342
|
+
if original_message.is_a?(Hash) && original_message["method"] == "initialize"
|
343
|
+
start_sse_stream
|
344
|
+
end
|
345
|
+
nil
|
346
|
+
end
|
347
|
+
|
348
|
+
def handle_client_error(response)
|
349
|
+
begin
|
350
|
+
# Safely access response body
|
351
|
+
response_body = response.respond_to?(:body) ? response.body.to_s : "Unknown error"
|
352
|
+
error_body = JSON.parse(response_body)
|
353
|
+
|
354
|
+
if error_body.is_a?(Hash) && error_body["error"]
|
355
|
+
error_message = error_body["error"]["message"] || error_body["error"]["code"]
|
356
|
+
|
357
|
+
if error_message.to_s.downcase.include?("session")
|
358
|
+
raise Errors::TransportError.new(
|
359
|
+
code: response.status,
|
360
|
+
message: "Server error: #{error_message} (Current session ID: #{@session_id || 'none'})"
|
361
|
+
)
|
362
|
+
end
|
363
|
+
|
364
|
+
raise Errors::TransportError.new(
|
365
|
+
code: response.status,
|
366
|
+
message: "Server error: #{error_message}"
|
367
|
+
)
|
368
|
+
end
|
369
|
+
rescue JSON::ParserError
|
370
|
+
# Fall through to generic error
|
371
|
+
end
|
372
|
+
|
373
|
+
# Safely access response attributes
|
374
|
+
response_body = response.respond_to?(:body) ? response.body.to_s : "Unknown error"
|
375
|
+
status_code = response.respond_to?(:status) ? response.status : "Unknown"
|
376
|
+
|
377
|
+
raise Errors::TransportError.new(
|
378
|
+
code: status_code,
|
379
|
+
message: "HTTP client error: #{status_code} - #{response_body}"
|
380
|
+
)
|
381
|
+
end
|
382
|
+
|
383
|
+
def handle_session_expired
|
384
|
+
@session_id = nil
|
385
|
+
raise Errors::SessionExpiredError.new(
|
386
|
+
message: "Session expired, re-initialization required"
|
387
|
+
)
|
388
|
+
end
|
389
|
+
|
390
|
+
def extract_resource_metadata_url(response)
|
391
|
+
# Extract resource metadata URL from response headers if present
|
392
|
+
# Guard against error responses that don't have headers
|
393
|
+
return nil unless response.respond_to?(:headers)
|
394
|
+
|
395
|
+
metadata_url = response.headers["mcp-resource-metadata-url"]
|
396
|
+
metadata_url ? URI(metadata_url) : nil
|
397
|
+
end
|
398
|
+
|
399
|
+
def start_sse_stream(options = StartSSEOptions.new)
|
400
|
+
return unless @running && !@abort_controller
|
401
|
+
|
402
|
+
@sse_mutex.synchronize do
|
403
|
+
return if @sse_thread&.alive?
|
404
|
+
|
405
|
+
@sse_thread = Thread.new do
|
406
|
+
start_sse(options)
|
407
|
+
end
|
408
|
+
end
|
409
|
+
end
|
410
|
+
|
411
|
+
def start_sse(options) # rubocop:disable Metrics/MethodLength
|
412
|
+
attempt_count = 0
|
413
|
+
|
414
|
+
begin
|
415
|
+
headers = build_common_headers
|
416
|
+
headers["Accept"] = "text/event-stream"
|
417
|
+
|
418
|
+
if options.resumption_token
|
419
|
+
headers["Last-Event-ID"] = options.resumption_token
|
420
|
+
end
|
421
|
+
|
422
|
+
# Set up SSE streaming connection with callbacks
|
423
|
+
connection = create_connection_with_sse_callbacks(options)
|
424
|
+
response = connection.get(@url, headers: headers)
|
425
|
+
|
426
|
+
# Handle HTTPX error responses first
|
427
|
+
error_result = handle_httpx_error_response!(response, context: { location: "SSE connection" },
|
428
|
+
allow_eof_for_sse: true)
|
429
|
+
return if error_result == :eof_handled
|
430
|
+
|
431
|
+
case response.status
|
432
|
+
when 200
|
433
|
+
# SSE stream established successfully
|
434
|
+
RubyLLM::MCP.logger.debug "SSE stream established"
|
435
|
+
# Response will be processed through callbacks
|
436
|
+
when 405, 401
|
437
|
+
# Server doesn't support SSE - this is acceptable
|
438
|
+
RubyLLM::MCP.logger.info "Server does not support SSE streaming"
|
439
|
+
nil
|
440
|
+
else
|
441
|
+
reason_phrase = response.respond_to?(:reason_phrase) ? response.reason_phrase : nil
|
442
|
+
raise Errors::TransportError.new(
|
443
|
+
code: response.status,
|
444
|
+
message: "Failed to open SSE stream: #{reason_phrase || response.status}"
|
445
|
+
)
|
446
|
+
end
|
447
|
+
rescue StandardError => e
|
448
|
+
RubyLLM::MCP.logger.error "SSE stream error: #{e.message}"
|
449
|
+
# Attempt reconnection with exponential backoff
|
450
|
+
|
451
|
+
if @running && !@abort_controller && attempt_count < @reconnection_options.max_retries
|
452
|
+
delay = calculate_reconnection_delay(attempt_count)
|
453
|
+
RubyLLM::MCP.logger.info "Reconnecting SSE stream in #{delay}ms..."
|
454
|
+
|
455
|
+
sleep(delay / 1000.0)
|
456
|
+
attempt_count += 1
|
457
|
+
retry
|
458
|
+
end
|
459
|
+
|
460
|
+
raise e
|
461
|
+
end
|
462
|
+
end
|
463
|
+
|
464
|
+
def create_connection_with_sse_callbacks(options)
|
465
|
+
buffer = +""
|
466
|
+
|
467
|
+
client = HTTPX
|
468
|
+
.plugin(:callbacks)
|
469
|
+
.on_response_body_chunk do |request, response, chunk|
|
470
|
+
# Only process chunks for text/event-stream and if still running
|
471
|
+
next unless @running && !@abort_controller
|
472
|
+
|
473
|
+
if chunk.include?("event: stop")
|
474
|
+
RubyLLM::MCP.logger.debug "Closing SSE stream"
|
475
|
+
request.close
|
476
|
+
end
|
477
|
+
|
478
|
+
content_type = response.headers["content-type"]
|
479
|
+
if content_type&.include?("text/event-stream")
|
480
|
+
buffer << chunk.to_s
|
481
|
+
|
482
|
+
while (event_data = extract_sse_event(buffer))
|
483
|
+
raw_event, remaining_buffer = event_data
|
484
|
+
buffer.replace(remaining_buffer)
|
485
|
+
|
486
|
+
next unless raw_event && raw_event[:data]
|
487
|
+
|
488
|
+
if raw_event[:id]
|
489
|
+
options.on_resumption_token&.call(raw_event[:id])
|
490
|
+
end
|
491
|
+
|
492
|
+
process_sse_event(raw_event, options.replay_message_id)
|
493
|
+
end
|
494
|
+
end
|
495
|
+
end
|
496
|
+
.with(
|
497
|
+
timeout: {
|
498
|
+
connect_timeout: 10,
|
499
|
+
read_timeout: @request_timeout / 1000,
|
500
|
+
write_timeout: @request_timeout / 1000,
|
501
|
+
operation_timeout: @request_timeout / 1000
|
502
|
+
}
|
503
|
+
)
|
504
|
+
register_client(client)
|
505
|
+
end
|
506
|
+
|
507
|
+
def calculate_reconnection_delay(attempt)
|
508
|
+
initial = @reconnection_options.initial_reconnection_delay
|
509
|
+
factor = @reconnection_options.reconnection_delay_grow_factor
|
510
|
+
max_delay = @reconnection_options.max_reconnection_delay
|
511
|
+
|
512
|
+
[initial * (factor**attempt), max_delay].min
|
513
|
+
end
|
514
|
+
|
515
|
+
def process_sse_buffer_events(buffer, _request_id)
|
516
|
+
return unless @running && !@abort_controller
|
517
|
+
|
518
|
+
while (event_data = extract_sse_event(buffer))
|
519
|
+
raw_event, remaining_buffer = event_data
|
520
|
+
buffer.replace(remaining_buffer)
|
521
|
+
|
522
|
+
process_sse_event(raw_event, nil) if raw_event && raw_event[:data]
|
523
|
+
end
|
524
|
+
end
|
525
|
+
|
526
|
+
def extract_sse_event(buffer)
|
527
|
+
return nil unless buffer.include?("\n\n")
|
528
|
+
|
529
|
+
raw, rest = buffer.split("\n\n", 2)
|
530
|
+
[parse_sse_event(raw), rest]
|
531
|
+
end
|
532
|
+
|
533
|
+
def parse_sse_event(raw)
|
534
|
+
event = {}
|
535
|
+
raw.each_line do |line|
|
536
|
+
line = line.strip
|
537
|
+
case line
|
538
|
+
when /^data:\s*(.*)/
|
539
|
+
(event[:data] ||= []) << ::Regexp.last_match(1)
|
540
|
+
when /^event:\s*(.*)/
|
541
|
+
event[:event] = ::Regexp.last_match(1)
|
542
|
+
when /^id:\s*(.*)/
|
543
|
+
event[:id] = ::Regexp.last_match(1)
|
544
|
+
end
|
545
|
+
end
|
546
|
+
event[:data] = event[:data]&.join("\n")
|
547
|
+
event
|
548
|
+
end
|
549
|
+
|
550
|
+
def process_sse_event(raw_event, replay_message_id)
|
551
|
+
return unless raw_event[:data]
|
552
|
+
return unless @running && !@abort_controller
|
553
|
+
|
554
|
+
begin
|
555
|
+
event_data = JSON.parse(raw_event[:data])
|
556
|
+
|
557
|
+
# Handle replay message ID if specified
|
558
|
+
if replay_message_id && event_data.is_a?(Hash) && event_data["id"]
|
559
|
+
event_data["id"] = replay_message_id
|
560
|
+
end
|
561
|
+
|
562
|
+
result = RubyLLM::MCP::Result.new(event_data, session_id: @session_id)
|
563
|
+
RubyLLM::MCP.logger.debug "SSE Result Received: #{result.inspect}"
|
564
|
+
|
565
|
+
# Handle different types of messages
|
566
|
+
if result.notification?
|
567
|
+
@coordinator.process_notification(result)
|
568
|
+
elsif result.request?
|
569
|
+
@coordinator.process_request(result)
|
570
|
+
elsif result.response?
|
571
|
+
# Handle response to client request
|
572
|
+
request_id = result.id&.to_s
|
573
|
+
if request_id
|
574
|
+
@pending_mutex.synchronize do
|
575
|
+
response_queue = @pending_requests.delete(request_id)
|
576
|
+
response_queue&.push(result)
|
577
|
+
end
|
578
|
+
end
|
579
|
+
end
|
580
|
+
rescue JSON::ParserError => e
|
581
|
+
RubyLLM::MCP.logger.warn "Failed to parse SSE event data: #{raw_event[:data]} - #{e.message}"
|
582
|
+
rescue Errors::UnknownRequest => e
|
583
|
+
RubyLLM::MCP.logger.warn "Unknown request from MCP server: #{e.message}"
|
584
|
+
rescue StandardError => e
|
585
|
+
RubyLLM::MCP.logger.error "Error processing SSE event: #{e.message}"
|
586
|
+
raise Errors::TransportError.new(
|
587
|
+
message: "Error processing SSE event: #{e.message}",
|
588
|
+
error: e
|
589
|
+
)
|
590
|
+
end
|
591
|
+
end
|
592
|
+
|
593
|
+
def wait_for_response_with_timeout(request_id, response_queue)
|
594
|
+
Timeout.timeout(@request_timeout / 1000) do
|
595
|
+
response_queue.pop
|
596
|
+
end
|
597
|
+
rescue Timeout::Error
|
598
|
+
@pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
|
599
|
+
raise Errors::TimeoutError.new(
|
600
|
+
message: "Request timed out after #{@request_timeout / 1000} seconds",
|
601
|
+
request_id: request_id
|
602
|
+
)
|
603
|
+
end
|
604
|
+
|
605
|
+
def cleanup_sse_resources
|
606
|
+
@running = false
|
607
|
+
@abort_controller = true
|
608
|
+
|
609
|
+
@sse_mutex.synchronize do
|
610
|
+
if @sse_thread&.alive?
|
611
|
+
@sse_thread.kill
|
612
|
+
@sse_thread.join(5) # Wait up to 5 seconds for thread to finish
|
613
|
+
@sse_thread = nil
|
614
|
+
end
|
615
|
+
end
|
616
|
+
|
617
|
+
# Clear any pending requests
|
618
|
+
@pending_mutex.synchronize do
|
619
|
+
@pending_requests.each_value do |queue|
|
620
|
+
queue.close if queue.respond_to?(:close)
|
621
|
+
rescue StandardError
|
622
|
+
# Ignore errors when closing queues
|
623
|
+
end
|
624
|
+
@pending_requests.clear
|
625
|
+
end
|
626
|
+
end
|
627
|
+
|
628
|
+
def cleanup_connection
|
629
|
+
clients_to_close = []
|
630
|
+
|
631
|
+
@clients_mutex.synchronize do
|
632
|
+
clients_to_close = @clients.dup
|
633
|
+
@clients.clear
|
634
|
+
end
|
635
|
+
|
636
|
+
clients_to_close.each do |client|
|
637
|
+
client.close if client.respond_to?(:close)
|
638
|
+
rescue StandardError => e
|
639
|
+
RubyLLM::MCP.logger.debug "Error closing HTTPX client: #{e.message}"
|
640
|
+
end
|
641
|
+
|
642
|
+
@connection = nil
|
643
|
+
end
|
644
|
+
end
|
645
|
+
end
|
646
|
+
end
|
647
|
+
end
|
data/lib/ruby_llm/mcp/version.rb
CHANGED
data/lib/ruby_llm/mcp.rb
CHANGED
@@ -8,7 +8,10 @@ loader = Zeitwerk::Loader.for_gem_extension(RubyLLM)
|
|
8
8
|
loader.inflector.inflect("mcp" => "MCP")
|
9
9
|
loader.inflector.inflect("sse" => "SSE")
|
10
10
|
loader.inflector.inflect("openai" => "OpenAI")
|
11
|
+
loader.inflector.inflect("streamable_http" => "StreamableHTTP")
|
12
|
+
|
11
13
|
loader.setup
|
14
|
+
loader.eager_load
|
12
15
|
|
13
16
|
module RubyLLM
|
14
17
|
module MCP
|
@@ -23,5 +26,20 @@ module RubyLLM
|
|
23
26
|
require_relative "mcp/providers/anthropic/complex_parameter_support"
|
24
27
|
require_relative "mcp/providers/gemini/complex_parameter_support"
|
25
28
|
end
|
29
|
+
|
30
|
+
def configure
|
31
|
+
yield config
|
32
|
+
end
|
33
|
+
|
34
|
+
def config
|
35
|
+
@config ||= Configuration.new
|
36
|
+
end
|
37
|
+
|
38
|
+
alias configuration config
|
39
|
+
module_function :configuration
|
40
|
+
|
41
|
+
def logger
|
42
|
+
config.logger
|
43
|
+
end
|
26
44
|
end
|
27
45
|
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
namespace :release do
|
4
|
+
desc "Release a new version of the gem"
|
5
|
+
task :version do
|
6
|
+
# Load the current version from version.rb
|
7
|
+
require_relative "../../lib/ruby_llm/schema/version"
|
8
|
+
version = RubyLlm::Schema::VERSION
|
9
|
+
|
10
|
+
puts "Releasing version #{version}..."
|
11
|
+
|
12
|
+
# Make sure we are on the main branch
|
13
|
+
system "git checkout main"
|
14
|
+
system "git pull origin main"
|
15
|
+
|
16
|
+
# Create a new tag for the version
|
17
|
+
system "git tag -a v#{version} -m 'Release version #{version}'"
|
18
|
+
system "git push origin v#{version}"
|
19
|
+
|
20
|
+
system "gem build ruby_llm-mcp.gemspec"
|
21
|
+
system "gem push ruby_llm-mcp-#{version}.gem"
|
22
|
+
end
|
23
|
+
end
|