ruby_llm-mcp 0.4.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +296 -25
  3. data/lib/ruby_llm/chat.rb +2 -1
  4. data/lib/ruby_llm/mcp/client.rb +32 -13
  5. data/lib/ruby_llm/mcp/configuration.rb +123 -3
  6. data/lib/ruby_llm/mcp/coordinator.rb +108 -115
  7. data/lib/ruby_llm/mcp/errors.rb +3 -1
  8. data/lib/ruby_llm/mcp/notification_handler.rb +84 -0
  9. data/lib/ruby_llm/mcp/{requests/cancelled_notification.rb → notifications/cancelled.rb} +2 -2
  10. data/lib/ruby_llm/mcp/{requests/initialize_notification.rb → notifications/initialize.rb} +7 -3
  11. data/lib/ruby_llm/mcp/notifications/roots_list_change.rb +26 -0
  12. data/lib/ruby_llm/mcp/parameter.rb +19 -1
  13. data/lib/ruby_llm/mcp/progress.rb +3 -1
  14. data/lib/ruby_llm/mcp/prompt.rb +18 -0
  15. data/lib/ruby_llm/mcp/railtie.rb +20 -0
  16. data/lib/ruby_llm/mcp/requests/initialization.rb +8 -4
  17. data/lib/ruby_llm/mcp/requests/ping.rb +6 -2
  18. data/lib/ruby_llm/mcp/requests/prompt_list.rb +10 -2
  19. data/lib/ruby_llm/mcp/requests/resource_list.rb +12 -2
  20. data/lib/ruby_llm/mcp/requests/resource_template_list.rb +12 -2
  21. data/lib/ruby_llm/mcp/requests/shared/meta.rb +32 -0
  22. data/lib/ruby_llm/mcp/requests/shared/pagination.rb +17 -0
  23. data/lib/ruby_llm/mcp/requests/tool_call.rb +1 -1
  24. data/lib/ruby_llm/mcp/requests/tool_list.rb +10 -2
  25. data/lib/ruby_llm/mcp/resource.rb +17 -0
  26. data/lib/ruby_llm/mcp/response_handler.rb +58 -0
  27. data/lib/ruby_llm/mcp/responses/error.rb +33 -0
  28. data/lib/ruby_llm/mcp/{requests/ping_response.rb → responses/ping.rb} +2 -2
  29. data/lib/ruby_llm/mcp/responses/roots_list.rb +31 -0
  30. data/lib/ruby_llm/mcp/responses/sampling_create_message.rb +50 -0
  31. data/lib/ruby_llm/mcp/result.rb +21 -8
  32. data/lib/ruby_llm/mcp/roots.rb +45 -0
  33. data/lib/ruby_llm/mcp/sample.rb +148 -0
  34. data/lib/ruby_llm/mcp/{capabilities.rb → server_capabilities.rb} +1 -1
  35. data/lib/ruby_llm/mcp/tool.rb +35 -4
  36. data/lib/ruby_llm/mcp/transport.rb +58 -0
  37. data/lib/ruby_llm/mcp/transports/http_client.rb +26 -0
  38. data/lib/ruby_llm/mcp/{transport → transports}/sse.rb +25 -24
  39. data/lib/ruby_llm/mcp/{transport → transports}/stdio.rb +28 -26
  40. data/lib/ruby_llm/mcp/{transport → transports}/streamable_http.rb +25 -29
  41. data/lib/ruby_llm/mcp/transports/timeout.rb +32 -0
  42. data/lib/ruby_llm/mcp/version.rb +1 -1
  43. data/lib/ruby_llm/mcp.rb +50 -9
  44. metadata +23 -12
  45. data/lib/ruby_llm/mcp/requests/base.rb +0 -31
  46. data/lib/ruby_llm/mcp/requests/meta.rb +0 -30
  47. data/lib/tasks/release.rake +0 -23
@@ -0,0 +1,58 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ class Transport
6
+ class << self
7
+ def transports
8
+ @transports ||= {}
9
+ end
10
+
11
+ def register_transport(transport_type, transport_class)
12
+ transports[transport_type] = transport_class
13
+ end
14
+ end
15
+
16
+ extend Forwardable
17
+
18
+ register_transport(:sse, RubyLLM::MCP::Transports::SSE)
19
+ register_transport(:stdio, RubyLLM::MCP::Transports::Stdio)
20
+ register_transport(:streamable, RubyLLM::MCP::Transports::StreamableHTTP)
21
+ register_transport(:streamable_http, RubyLLM::MCP::Transports::StreamableHTTP)
22
+
23
+ attr_reader :transport_type, :coordinator, :config, :pid
24
+
25
+ def initialize(transport_type, coordinator, config:)
26
+ @transport_type = transport_type
27
+ @coordinator = coordinator
28
+ @config = config
29
+ @pid = Process.pid
30
+ end
31
+
32
+ def_delegators :transport_protocol, :request, :alive?, :close, :start, :set_protocol_version
33
+
34
+ def transport_protocol
35
+ if @pid != Process.pid
36
+ @pid = Process.pid
37
+ @transport = build_transport
38
+ coordinator.restart_transport
39
+ end
40
+
41
+ @transport_protocol ||= build_transport
42
+ end
43
+
44
+ private
45
+
46
+ def build_transport
47
+ unless RubyLLM::MCP::Transport.transports.key?(transport_type)
48
+ supported_types = RubyLLM::MCP::Transport.transports.keys.join(", ")
49
+ message = "Invalid transport type: :#{transport_type}. Supported types are #{supported_types}"
50
+ raise Errors::InvalidTransportType.new(message: message)
51
+ end
52
+
53
+ transport_klass = RubyLLM::MCP::Transport.transports[transport_type]
54
+ transport_klass.new(coordinator: coordinator, **config)
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "httpx"
4
+
5
+ module RubyLLM
6
+ module MCP
7
+ module Transports
8
+ class HTTPClient
9
+ CONNECTION_KEY = :ruby_llm_mcp_client_connection
10
+
11
+ def self.connection
12
+ Thread.current[CONNECTION_KEY] ||= build_connection
13
+ end
14
+
15
+ def self.build_connection
16
+ HTTPX.with(
17
+ pool_options: {
18
+ max_connections: RubyLLM::MCP.config.max_connections,
19
+ pool_timeout: RubyLLM::MCP.config.pool_timeout
20
+ }
21
+ )
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -8,11 +8,13 @@ require "securerandom"
8
8
 
9
9
  module RubyLLM
10
10
  module MCP
11
- module Transport
11
+ module Transports
12
12
  class SSE
13
+ include Timeout
14
+
13
15
  attr_reader :headers, :id, :coordinator
14
16
 
15
- def initialize(url, coordinator:, request_timeout:, headers: {})
17
+ def initialize(url:, coordinator:, request_timeout:, headers: {})
16
18
  @event_url = url
17
19
  @messages_url = nil
18
20
  @coordinator = coordinator
@@ -36,13 +38,10 @@ module RubyLLM
36
38
  @pending_requests = {}
37
39
  @pending_mutex = Mutex.new
38
40
  @connection_mutex = Mutex.new
39
- @running = true
41
+ @running = false
40
42
  @sse_thread = nil
41
43
 
42
44
  RubyLLM::MCP.logger.info "Initializing SSE transport to #{@event_url} with client ID #{@client_id}"
43
-
44
- # Start the SSE listener thread
45
- start_sse_listener
46
45
  end
47
46
 
48
47
  def request(body, add_id: true, wait_for_response: true) # rubocop:disable Metrics/MethodLength
@@ -60,7 +59,8 @@ module RubyLLM
60
59
  end
61
60
 
62
61
  begin
63
- http_client = HTTPX.with(timeout: { request_timeout: @request_timeout / 1000 }, headers: @headers)
62
+ http_client = HTTPClient.connection.with(timeout: { request_timeout: @request_timeout / 1000 },
63
+ headers: @headers)
64
64
  response = http_client.post(@messages_url, body: JSON.generate(body))
65
65
 
66
66
  unless response.status == 200
@@ -83,16 +83,13 @@ module RubyLLM
83
83
  return unless wait_for_response
84
84
 
85
85
  begin
86
- Timeout.timeout(@request_timeout / 1000) do
86
+ with_timeout(@request_timeout / 1000, request_id: request_id) do
87
87
  response_queue.pop
88
88
  end
89
- rescue Timeout::Error
89
+ rescue RubyLLM::MCP::Errors::TimeoutError => e
90
90
  @pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
91
91
  RubyLLM::MCP.logger.error "SSE request timeout (ID: #{request_id}) after #{@request_timeout / 1000} seconds"
92
- raise Errors::TimeoutError.new(
93
- message: "Request timed out after #{@request_timeout / 1000} seconds",
94
- request_id: request_id
95
- )
92
+ raise e
96
93
  end
97
94
  end
98
95
 
@@ -100,6 +97,13 @@ module RubyLLM
100
97
  @running
101
98
  end
102
99
 
100
+ def start
101
+ return if @running
102
+
103
+ @running = true
104
+ start_sse_listener
105
+ end
106
+
103
107
  def close
104
108
  RubyLLM::MCP.logger.info "Closing SSE transport connection"
105
109
  @running = false
@@ -107,6 +111,10 @@ module RubyLLM
107
111
  @sse_thread = nil
108
112
  end
109
113
 
114
+ def set_protocol_version(version)
115
+ @protocol_version = version
116
+ end
117
+
110
118
  private
111
119
 
112
120
  def start_sse_listener
@@ -125,7 +133,7 @@ module RubyLLM
125
133
  end
126
134
  @sse_thread.abort_on_exception = true
127
135
 
128
- Timeout.timeout(100) do
136
+ with_timeout(@request_timeout / 1000) do
129
137
  endpoint = response_queue.pop
130
138
  set_message_endpoint(endpoint)
131
139
  end
@@ -179,7 +187,7 @@ module RubyLLM
179
187
  sleep 1
180
188
  end
181
189
 
182
- def process_event(raw_event) # rubocop:disable Metrics/MethodLength
190
+ def process_event(raw_event)
183
191
  # Return if we believe that are getting a partial event
184
192
  return if raw_event[:data].nil?
185
193
 
@@ -209,15 +217,8 @@ module RubyLLM
209
217
  request_id = event["id"]&.to_s
210
218
  result = RubyLLM::MCP::Result.new(event)
211
219
 
212
- if result.notification?
213
- coordinator.process_notification(result)
214
- return
215
- end
216
-
217
- if result.request?
218
- coordinator.process_request(result) if coordinator.alive?
219
- return
220
- end
220
+ result = @coordinator.process_result(result)
221
+ return if result.nil?
221
222
 
222
223
  @pending_mutex.synchronize do
223
224
  # You can receieve duplicate events for the same request id, and we will ignore thoses
@@ -7,11 +7,13 @@ require "securerandom"
7
7
 
8
8
  module RubyLLM
9
9
  module MCP
10
- module Transport
10
+ module Transports
11
11
  class Stdio
12
+ include Timeout
13
+
12
14
  attr_reader :command, :stdin, :stdout, :stderr, :id, :coordinator
13
15
 
14
- def initialize(command, request_timeout:, coordinator:, args: [], env: {})
16
+ def initialize(command:, request_timeout:, coordinator:, args: [], env: {})
15
17
  @request_timeout = request_timeout
16
18
  @command = command
17
19
  @coordinator = coordinator
@@ -23,11 +25,9 @@ module RubyLLM
23
25
  @id_mutex = Mutex.new
24
26
  @pending_requests = {}
25
27
  @pending_mutex = Mutex.new
26
- @running = true
28
+ @running = false
27
29
  @reader_thread = nil
28
30
  @stderr_thread = nil
29
-
30
- start_process
31
31
  end
32
32
 
33
33
  def request(body, add_id: true, wait_for_response: true)
@@ -58,15 +58,14 @@ module RubyLLM
58
58
  return unless wait_for_response
59
59
 
60
60
  begin
61
- Timeout.timeout(@request_timeout / 1000) do
61
+ with_timeout(@request_timeout / 1000, request_id: request_id) do
62
62
  response_queue.pop
63
63
  end
64
- rescue Timeout::Error
64
+ rescue RubyLLM::MCP::Errors::TimeoutError => e
65
65
  @pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
66
- raise RubyLLM::MCP::Errors::TimeoutError.new(
67
- message: "Request timed out after #{@request_timeout / 1000} seconds",
68
- request_id: request_id
69
- )
66
+ log_message = "Stdio request timeout (ID: #{request_id}) after #{@request_timeout / 1000} seconds"
67
+ RubyLLM::MCP.logger.error(log_message)
68
+ raise e
70
69
  end
71
70
  end
72
71
 
@@ -74,6 +73,11 @@ module RubyLLM
74
73
  @running
75
74
  end
76
75
 
76
+ def start
77
+ start_process unless @running
78
+ @running = true
79
+ end
80
+
77
81
  def close # rubocop:disable Metrics/MethodLength
78
82
  @running = false
79
83
 
@@ -121,6 +125,10 @@ module RubyLLM
121
125
  @stderr_thread = nil
122
126
  end
123
127
 
128
+ def set_protocol_version(version)
129
+ @protocol_version = version
130
+ end
131
+
124
132
  private
125
133
 
126
134
  def start_process
@@ -199,22 +207,16 @@ module RubyLLM
199
207
  response = JSON.parse(line)
200
208
  request_id = response["id"]&.to_s
201
209
  result = RubyLLM::MCP::Result.new(response)
202
-
203
210
  RubyLLM::MCP.logger.debug "Result Received: #{result.inspect}"
204
- # Handle notifications (process but don't return - continue processing other responses)
205
- if result.notification?
206
- coordinator.process_notification(result)
207
- # Don't return here - continue to process potential tool responses
208
- elsif result.request?
209
- coordinator.process_request(result)
210
- nil
211
- else
212
- # Handle regular responses (tool calls, etc.)
213
- @pending_mutex.synchronize do
214
- if result.matching_id?(request_id) && @pending_requests.key?(request_id)
215
- response_queue = @pending_requests.delete(request_id)
216
- response_queue&.push(result)
217
- end
211
+
212
+ result = @coordinator.process_result(result)
213
+ return if result.nil?
214
+
215
+ # Handle regular responses (tool calls, etc.)
216
+ @pending_mutex.synchronize do
217
+ if result.matching_id?(request_id) && @pending_requests.key?(request_id)
218
+ response_queue = @pending_requests.delete(request_id)
219
+ response_queue&.push(result)
218
220
  end
219
221
  end
220
222
  rescue JSON::ParserError => e
@@ -8,7 +8,7 @@ require "securerandom"
8
8
 
9
9
  module RubyLLM
10
10
  module MCP
11
- module Transport
11
+ module Transports
12
12
  # Configuration options for reconnection behavior
13
13
  class ReconnectionOptions
14
14
  attr_reader :max_reconnection_delay, :initial_reconnection_delay,
@@ -40,10 +40,12 @@ module RubyLLM
40
40
 
41
41
  # Main StreamableHTTP transport class
42
42
  class StreamableHTTP
43
+ include Timeout
44
+
43
45
  attr_reader :session_id, :protocol_version, :coordinator
44
46
 
45
47
  def initialize( # rubocop:disable Metrics/ParameterLists
46
- url,
48
+ url:,
47
49
  request_timeout:,
48
50
  coordinator:,
49
51
  headers: {},
@@ -110,6 +112,12 @@ module RubyLLM
110
112
  @abort_controller = false
111
113
  end
112
114
 
115
+ def set_protocol_version(version)
116
+ @protocol_version = version
117
+ end
118
+
119
+ private
120
+
113
121
  def terminate_session
114
122
  return unless @session_id
115
123
 
@@ -139,12 +147,6 @@ module RubyLLM
139
147
  end
140
148
  end
141
149
 
142
- def set_protocol_version(version)
143
- @protocol_version = version
144
- end
145
-
146
- private
147
-
148
150
  def handle_httpx_error_response!(response, context:, allow_eof_for_sse: false)
149
151
  return false unless response.is_a?(HTTPX::ErrorResponse)
150
152
 
@@ -200,7 +202,7 @@ module RubyLLM
200
202
  end
201
203
 
202
204
  def create_connection
203
- client = HTTPX.with(
205
+ client = HTTPClient.connection.with(
204
206
  timeout: {
205
207
  connect_timeout: 10,
206
208
  read_timeout: @request_timeout / 1000,
@@ -257,7 +259,7 @@ module RubyLLM
257
259
  def create_connection_with_streaming_callbacks(request_id)
258
260
  buffer = +""
259
261
 
260
- client = HTTPX.plugin(:callbacks).on_response_body_chunk do |request, _response, chunk|
262
+ client = HTTPClient.connection.plugin(:callbacks).on_response_body_chunk do |request, _response, chunk|
261
263
  next unless @running && !@abort_controller
262
264
 
263
265
  RubyLLM::MCP.logger.debug "Received chunk: #{chunk.bytesize} bytes for #{request.uri}"
@@ -562,19 +564,14 @@ module RubyLLM
562
564
  result = RubyLLM::MCP::Result.new(event_data, session_id: @session_id)
563
565
  RubyLLM::MCP.logger.debug "SSE Result Received: #{result.inspect}"
564
566
 
565
- # Handle different types of messages
566
- if result.notification?
567
- @coordinator.process_notification(result)
568
- elsif result.request?
569
- @coordinator.process_request(result)
570
- elsif result.response?
571
- # Handle response to client request
572
- request_id = result.id&.to_s
573
- if request_id
574
- @pending_mutex.synchronize do
575
- response_queue = @pending_requests.delete(request_id)
576
- response_queue&.push(result)
577
- end
567
+ result = @coordinator.process_result(result)
568
+ return if result.nil?
569
+
570
+ request_id = result.id&.to_s
571
+ if request_id
572
+ @pending_mutex.synchronize do
573
+ response_queue = @pending_requests.delete(request_id)
574
+ response_queue&.push(result)
578
575
  end
579
576
  end
580
577
  rescue JSON::ParserError => e
@@ -591,15 +588,14 @@ module RubyLLM
591
588
  end
592
589
 
593
590
  def wait_for_response_with_timeout(request_id, response_queue)
594
- Timeout.timeout(@request_timeout / 1000) do
591
+ with_timeout(@request_timeout / 1000, request_id: request_id) do
595
592
  response_queue.pop
596
593
  end
597
- rescue Timeout::Error
594
+ rescue RubyLLM::MCP::Errors::TimeoutError => e
595
+ log_message = "StreamableHTTP request timeout (ID: #{request_id}) after #{@request_timeout / 1000} seconds"
596
+ RubyLLM::MCP.logger.error(log_message)
598
597
  @pending_mutex.synchronize { @pending_requests.delete(request_id.to_s) }
599
- raise Errors::TimeoutError.new(
600
- message: "Request timed out after #{@request_timeout / 1000} seconds",
601
- request_id: request_id
602
- )
598
+ raise e
603
599
  end
604
600
 
605
601
  def cleanup_sse_resources
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module MCP
5
+ module Transports
6
+ module Timeout
7
+ def with_timeout(seconds, request_id: nil)
8
+ result = nil
9
+ exception = nil
10
+
11
+ worker = Thread.new do
12
+ result = yield
13
+ rescue StandardError => e
14
+ exception = e
15
+ end
16
+
17
+ if worker.join(seconds)
18
+ raise exception if exception
19
+
20
+ result
21
+ else
22
+ worker.kill # stop the thread (can still have some risk if shared resources)
23
+ raise RubyLLM::MCP::Errors::TimeoutError.new(
24
+ message: "Request timed out after #{seconds} seconds",
25
+ request_id: request_id
26
+ )
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module RubyLLM
4
4
  module MCP
5
- VERSION = "0.4.1"
5
+ VERSION = "0.5.0"
6
6
  end
7
7
  end
data/lib/ruby_llm/mcp.rb CHANGED
@@ -4,19 +4,49 @@ require "ruby_llm"
4
4
  require "zeitwerk"
5
5
  require_relative "chat"
6
6
 
7
- loader = Zeitwerk::Loader.for_gem_extension(RubyLLM)
8
- loader.inflector.inflect("mcp" => "MCP")
9
- loader.inflector.inflect("sse" => "SSE")
10
- loader.inflector.inflect("openai" => "OpenAI")
11
- loader.inflector.inflect("streamable_http" => "StreamableHTTP")
12
- loader.setup
13
-
14
7
  module RubyLLM
15
8
  module MCP
16
9
  module_function
17
10
 
18
- def client(*args, **kwargs)
19
- @client ||= Client.new(*args, **kwargs)
11
+ def clients(config = RubyLLM::MCP.config.mcp_configuration)
12
+ @clients ||= {}
13
+ config.map do |options|
14
+ @clients[options[:name]] ||= Client.new(**options)
15
+ end
16
+ end
17
+
18
+ def add_client(options)
19
+ @clients ||= {}
20
+ @clients[options[:name]] ||= Client.new(**options)
21
+ end
22
+
23
+ def remove_client(name)
24
+ @clients ||= {}
25
+ client = @clients.delete(name)
26
+ client&.stop
27
+ client
28
+ end
29
+
30
+ def client(...)
31
+ Client.new(...)
32
+ end
33
+
34
+ def establish_connection(&)
35
+ clients.each(&:start)
36
+ yield clients
37
+ ensure
38
+ clients.each do |client|
39
+ client.stop if client.alive?
40
+ end
41
+ end
42
+
43
+ def tools(blacklist: [], whitelist: [])
44
+ tools = @clients.values.map(&:tools)
45
+ .flatten
46
+ .reject { |tool| blacklist.include?(tool.name) }
47
+
48
+ tools = tools.select { |tool| whitelist.include?(tool.name) } if whitelist.any?
49
+ tools.uniq(&:name)
20
50
  end
21
51
 
22
52
  def support_complex_parameters!
@@ -41,3 +71,14 @@ module RubyLLM
41
71
  end
42
72
  end
43
73
  end
74
+
75
+ require_relative "mcp/railtie" if defined?(Rails::Railtie)
76
+
77
+ loader = Zeitwerk::Loader.for_gem_extension(RubyLLM)
78
+ loader.inflector.inflect("mcp" => "MCP")
79
+ loader.inflector.inflect("sse" => "SSE")
80
+ loader.inflector.inflect("openai" => "OpenAI")
81
+ loader.inflector.inflect("streamable_http" => "StreamableHTTP")
82
+ loader.inflector.inflect("http_client" => "HTTPClient")
83
+
84
+ loader.setup
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby_llm-mcp
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.1
4
+ version: 0.5.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Patrick Vice
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-07-02 00:00:00.000000000 Z
11
+ date: 2025-07-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: httpx
@@ -68,7 +68,6 @@ files:
68
68
  - lib/ruby_llm/chat.rb
69
69
  - lib/ruby_llm/mcp.rb
70
70
  - lib/ruby_llm/mcp/attachment.rb
71
- - lib/ruby_llm/mcp/capabilities.rb
72
71
  - lib/ruby_llm/mcp/client.rb
73
72
  - lib/ruby_llm/mcp/completion.rb
74
73
  - lib/ruby_llm/mcp/configuration.rb
@@ -77,39 +76,51 @@ files:
77
76
  - lib/ruby_llm/mcp/error.rb
78
77
  - lib/ruby_llm/mcp/errors.rb
79
78
  - lib/ruby_llm/mcp/logging.rb
79
+ - lib/ruby_llm/mcp/notification_handler.rb
80
+ - lib/ruby_llm/mcp/notifications/cancelled.rb
81
+ - lib/ruby_llm/mcp/notifications/initialize.rb
82
+ - lib/ruby_llm/mcp/notifications/roots_list_change.rb
80
83
  - lib/ruby_llm/mcp/parameter.rb
81
84
  - lib/ruby_llm/mcp/progress.rb
82
85
  - lib/ruby_llm/mcp/prompt.rb
83
86
  - lib/ruby_llm/mcp/providers/anthropic/complex_parameter_support.rb
84
87
  - lib/ruby_llm/mcp/providers/gemini/complex_parameter_support.rb
85
88
  - lib/ruby_llm/mcp/providers/openai/complex_parameter_support.rb
86
- - lib/ruby_llm/mcp/requests/base.rb
87
- - lib/ruby_llm/mcp/requests/cancelled_notification.rb
89
+ - lib/ruby_llm/mcp/railtie.rb
88
90
  - lib/ruby_llm/mcp/requests/completion_prompt.rb
89
91
  - lib/ruby_llm/mcp/requests/completion_resource.rb
90
92
  - lib/ruby_llm/mcp/requests/initialization.rb
91
- - lib/ruby_llm/mcp/requests/initialize_notification.rb
92
93
  - lib/ruby_llm/mcp/requests/logging_set_level.rb
93
- - lib/ruby_llm/mcp/requests/meta.rb
94
94
  - lib/ruby_llm/mcp/requests/ping.rb
95
- - lib/ruby_llm/mcp/requests/ping_response.rb
96
95
  - lib/ruby_llm/mcp/requests/prompt_call.rb
97
96
  - lib/ruby_llm/mcp/requests/prompt_list.rb
98
97
  - lib/ruby_llm/mcp/requests/resource_list.rb
99
98
  - lib/ruby_llm/mcp/requests/resource_read.rb
100
99
  - lib/ruby_llm/mcp/requests/resource_template_list.rb
101
100
  - lib/ruby_llm/mcp/requests/resources_subscribe.rb
101
+ - lib/ruby_llm/mcp/requests/shared/meta.rb
102
+ - lib/ruby_llm/mcp/requests/shared/pagination.rb
102
103
  - lib/ruby_llm/mcp/requests/tool_call.rb
103
104
  - lib/ruby_llm/mcp/requests/tool_list.rb
104
105
  - lib/ruby_llm/mcp/resource.rb
105
106
  - lib/ruby_llm/mcp/resource_template.rb
107
+ - lib/ruby_llm/mcp/response_handler.rb
108
+ - lib/ruby_llm/mcp/responses/error.rb
109
+ - lib/ruby_llm/mcp/responses/ping.rb
110
+ - lib/ruby_llm/mcp/responses/roots_list.rb
111
+ - lib/ruby_llm/mcp/responses/sampling_create_message.rb
106
112
  - lib/ruby_llm/mcp/result.rb
113
+ - lib/ruby_llm/mcp/roots.rb
114
+ - lib/ruby_llm/mcp/sample.rb
115
+ - lib/ruby_llm/mcp/server_capabilities.rb
107
116
  - lib/ruby_llm/mcp/tool.rb
108
- - lib/ruby_llm/mcp/transport/sse.rb
109
- - lib/ruby_llm/mcp/transport/stdio.rb
110
- - lib/ruby_llm/mcp/transport/streamable_http.rb
117
+ - lib/ruby_llm/mcp/transport.rb
118
+ - lib/ruby_llm/mcp/transports/http_client.rb
119
+ - lib/ruby_llm/mcp/transports/sse.rb
120
+ - lib/ruby_llm/mcp/transports/stdio.rb
121
+ - lib/ruby_llm/mcp/transports/streamable_http.rb
122
+ - lib/ruby_llm/mcp/transports/timeout.rb
111
123
  - lib/ruby_llm/mcp/version.rb
112
- - lib/tasks/release.rake
113
124
  homepage: https://github.com/patvice/ruby_llm-mcp
114
125
  licenses:
115
126
  - MIT
@@ -1,31 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "json"
4
-
5
- module RubyLLM
6
- module MCP
7
- module Requests
8
- class Base
9
- attr_reader :coordinator
10
-
11
- def initialize(coordinator)
12
- @coordinator = coordinator
13
- end
14
-
15
- def call
16
- raise "Not implemented"
17
- end
18
-
19
- private
20
-
21
- def validate_response!(response, body)
22
- # TODO: Implement response validation
23
- end
24
-
25
- def raise_error(error)
26
- raise "MCP Error: code: #{error['code']} message: #{error['message']} data: #{error['data']}"
27
- end
28
- end
29
- end
30
- end
31
- end
@@ -1,30 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "securerandom"
4
-
5
- module RubyLLM
6
- module MCP
7
- module Requests
8
- module Meta
9
- def merge_meta(body)
10
- meta = {}
11
- meta.merge!(progress_token) if @coordinator.client.tracking_progress?
12
-
13
- body[:params] ||= {}
14
- body[:params].merge!({ _meta: meta }) unless meta.empty?
15
- body
16
- end
17
-
18
- private
19
-
20
- def progress_token
21
- { progressToken: generate_progress_token }
22
- end
23
-
24
- def generate_progress_token
25
- SecureRandom.uuid
26
- end
27
- end
28
- end
29
- end
30
- end