model-context-protocol-rb 0.5.1 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +37 -1
  3. data/README.md +181 -950
  4. data/lib/model_context_protocol/rspec/helpers.rb +54 -0
  5. data/lib/model_context_protocol/rspec/matchers/be_mcp_error_response.rb +123 -0
  6. data/lib/model_context_protocol/rspec/matchers/be_valid_mcp_class.rb +103 -0
  7. data/lib/model_context_protocol/rspec/matchers/be_valid_mcp_prompt_response.rb +126 -0
  8. data/lib/model_context_protocol/rspec/matchers/be_valid_mcp_resource_response.rb +121 -0
  9. data/lib/model_context_protocol/rspec/matchers/be_valid_mcp_tool_response.rb +135 -0
  10. data/lib/model_context_protocol/rspec/matchers/have_audio_content.rb +109 -0
  11. data/lib/model_context_protocol/rspec/matchers/have_embedded_resource_content.rb +150 -0
  12. data/lib/model_context_protocol/rspec/matchers/have_image_content.rb +109 -0
  13. data/lib/model_context_protocol/rspec/matchers/have_message_count.rb +87 -0
  14. data/lib/model_context_protocol/rspec/matchers/have_message_with_role.rb +152 -0
  15. data/lib/model_context_protocol/rspec/matchers/have_resource_annotations.rb +135 -0
  16. data/lib/model_context_protocol/rspec/matchers/have_resource_blob.rb +108 -0
  17. data/lib/model_context_protocol/rspec/matchers/have_resource_link_content.rb +138 -0
  18. data/lib/model_context_protocol/rspec/matchers/have_resource_mime_type.rb +103 -0
  19. data/lib/model_context_protocol/rspec/matchers/have_resource_text.rb +112 -0
  20. data/lib/model_context_protocol/rspec/matchers/have_structured_content.rb +88 -0
  21. data/lib/model_context_protocol/rspec/matchers/have_text_content.rb +113 -0
  22. data/lib/model_context_protocol/rspec/matchers.rb +31 -0
  23. data/lib/model_context_protocol/rspec.rb +23 -0
  24. data/lib/model_context_protocol/server/cancellable.rb +5 -5
  25. data/lib/model_context_protocol/server/{mcp_logger.rb → client_logger.rb} +8 -11
  26. data/lib/model_context_protocol/server/configuration.rb +196 -109
  27. data/lib/model_context_protocol/server/content_helpers.rb +1 -1
  28. data/lib/model_context_protocol/server/global_config/server_logging.rb +78 -0
  29. data/lib/model_context_protocol/server/progressable.rb +43 -21
  30. data/lib/model_context_protocol/server/prompt.rb +12 -21
  31. data/lib/model_context_protocol/server/redis_client_proxy.rb +2 -14
  32. data/lib/model_context_protocol/server/redis_config.rb +5 -7
  33. data/lib/model_context_protocol/server/redis_pool_manager.rb +11 -14
  34. data/lib/model_context_protocol/server/registry.rb +8 -0
  35. data/lib/model_context_protocol/server/resource.rb +7 -4
  36. data/lib/model_context_protocol/server/router.rb +285 -9
  37. data/lib/model_context_protocol/server/server_logger.rb +31 -0
  38. data/lib/model_context_protocol/server/stdio_configuration.rb +114 -0
  39. data/lib/model_context_protocol/server/stdio_transport/request_store.rb +12 -53
  40. data/lib/model_context_protocol/server/stdio_transport.rb +18 -12
  41. data/lib/model_context_protocol/server/streamable_http_configuration.rb +218 -0
  42. data/lib/model_context_protocol/server/streamable_http_transport/event_counter.rb +0 -13
  43. data/lib/model_context_protocol/server/streamable_http_transport/message_poller.rb +9 -9
  44. data/lib/model_context_protocol/server/streamable_http_transport/notification_queue.rb +0 -41
  45. data/lib/model_context_protocol/server/streamable_http_transport/request_store.rb +21 -124
  46. data/lib/model_context_protocol/server/streamable_http_transport/server_request_store.rb +167 -0
  47. data/lib/model_context_protocol/server/streamable_http_transport/session_message_queue.rb +0 -58
  48. data/lib/model_context_protocol/server/streamable_http_transport/session_store.rb +17 -31
  49. data/lib/model_context_protocol/server/streamable_http_transport/stream_registry.rb +0 -34
  50. data/lib/model_context_protocol/server/streamable_http_transport.rb +589 -215
  51. data/lib/model_context_protocol/server/tool.rb +73 -6
  52. data/lib/model_context_protocol/server.rb +204 -261
  53. data/lib/model_context_protocol/version.rb +1 -1
  54. data/lib/model_context_protocol.rb +4 -1
  55. data/lib/puma/plugin/mcp.rb +39 -0
  56. data/tasks/mcp.rake +26 -0
  57. data/tasks/templates/dev-http-puma.erb +251 -0
  58. data/tasks/templates/dev-http.erb +166 -184
  59. data/tasks/templates/dev.erb +29 -7
  60. metadata +33 -6
@@ -1,5 +1,6 @@
1
1
  require "json"
2
2
  require "securerandom"
3
+ require "concurrent"
3
4
 
4
5
  module ModelContextProtocol
5
6
  class Server::StreamableHttpTransport
@@ -15,78 +16,88 @@ module ModelContextProtocol
15
16
  end
16
17
  end
17
18
 
19
+ attr_reader :server_logger
20
+
21
+ # Initialize the HTTP transport with Redis-backed cross-server communication support
22
+ # Sets up background threads for message polling and stream monitoring in multi-server deployments
18
23
  def initialize(router:, configuration:)
19
24
  @router = router
20
25
  @configuration = configuration
26
+ @client_logger = configuration.client_logger
27
+ @server_logger = configuration.server_logger
21
28
 
22
- transport_options = @configuration.transport_options
23
29
  @redis_pool = ModelContextProtocol::Server::RedisConfig.pool
24
- @require_sessions = transport_options.fetch(:require_sessions, false)
25
- @default_protocol_version = transport_options.fetch(:default_protocol_version, "2025-03-26")
26
- @session_protocol_versions = {}
27
- @validate_origin = transport_options.fetch(:validate_origin, true)
28
- @allowed_origins = transport_options.fetch(:allowed_origins, ["http://localhost", "https://localhost", "http://127.0.0.1", "https://127.0.0.1"])
29
30
  @redis = ModelContextProtocol::Server::RedisClientProxy.new(@redis_pool)
30
31
 
31
- @session_store = SessionStore.new(
32
- @redis,
33
- ttl: transport_options[:session_ttl] || 3600
34
- )
32
+ @require_sessions = @configuration.require_sessions
33
+ # Use Concurrent::Map for thread-safe access from multiple request threads
34
+ @session_protocol_versions = Concurrent::Map.new
35
+ @validate_origin = @configuration.validate_origin
36
+ @allowed_origins = @configuration.allowed_origins
35
37
 
38
+ @session_store = SessionStore.new(@redis, ttl: @configuration.session_ttl)
36
39
  @server_instance = "#{Socket.gethostname}-#{Process.pid}-#{SecureRandom.hex(4)}"
37
40
  @stream_registry = StreamRegistry.new(@redis, @server_instance)
38
41
  @notification_queue = NotificationQueue.new(@redis, @server_instance)
39
42
  @event_counter = EventCounter.new(@redis, @server_instance)
40
43
  @request_store = RequestStore.new(@redis, @server_instance)
41
- @stream_monitor_thread = nil
42
- @message_poller = MessagePoller.new(@redis, @stream_registry, @configuration.logger) do |stream, message|
44
+ @server_request_store = ServerRequestStore.new(@redis, @server_instance)
45
+ @ping_timeout = @configuration.ping_timeout
46
+
47
+ @message_poller = MessagePoller.new(@redis, @stream_registry, @client_logger) do |stream, message|
43
48
  send_to_stream(stream, message)
44
49
  end
50
+ @message_poller.start
45
51
 
46
- start_message_poller
52
+ @stream_monitor_running = false
53
+ @stream_monitor_thread = nil
47
54
  start_stream_monitor
48
55
  end
49
56
 
57
+ # Gracefully shut down the transport by stopping background threads and cleaning up resources
58
+ # Closes all active streams. Redis entries are left to expire naturally (they have TTLs).
59
+ # This method is signal-safe and avoids mutex operations.
50
60
  def shutdown
51
- @configuration.logger.info("Shutting down StreamableHttpTransport")
61
+ @server_logger.info("Shutting down StreamableHttpTransport")
52
62
 
53
- # Stop the message poller
54
63
  @message_poller&.stop
55
64
 
56
- # Stop the stream monitor thread
65
+ @stream_monitor_running = false
57
66
  if @stream_monitor_thread&.alive?
58
67
  @stream_monitor_thread.kill
59
- @stream_monitor_thread.join(timeout: 5)
68
+ @stream_monitor_thread.join(5)
60
69
  end
61
70
 
62
- # Unregister all local streams
71
+ # Close streams directly without Redis cleanup (signal-safe).
72
+ # Redis entries will expire naturally via TTL.
63
73
  @stream_registry.get_all_local_streams.each do |session_id, stream|
64
- @stream_registry.unregister_stream(session_id)
65
- @session_store.mark_stream_inactive(session_id)
66
- rescue => e
67
- @configuration.logger.error("Error during stream cleanup", session_id: session_id, error: e.message)
74
+ begin
75
+ stream.close
76
+ rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
77
+ # Stream already closed, ignore
78
+ end
79
+ @server_logger.info("← SSE stream [closed] (#{session_id}) [shutdown]")
68
80
  end
69
81
 
70
- @redis_pool.checkin(@redis) if @redis_pool && @redis
71
-
72
- @configuration.logger.info("StreamableHttpTransport shutdown complete")
82
+ @server_logger.info("StreamableHttpTransport shutdown complete")
73
83
  end
74
84
 
75
- def handle
76
- @configuration.logger.connect_transport(self)
77
-
78
- env = @configuration.transport_options[:env]
79
-
80
- unless env
81
- raise ArgumentError, "StreamableHTTP transport requires Rack env hash in transport_options"
82
- end
85
+ # Main entry point for handling HTTP requests (POST, GET, DELETE)
86
+ # Routes requests to appropriate handlers and manages the request/response lifecycle
87
+ # @param env [Hash] Rack environment hash (required)
88
+ # @param session_context [Hash] Per-request context that will be merged with server context
89
+ def handle(env:, session_context: {})
90
+ @server_logger.debug("Handling streamable HTTP transport request")
83
91
 
84
92
  case env["REQUEST_METHOD"]
85
93
  when "POST"
86
- handle_post_request(env)
94
+ @server_logger.debug("Handling POST request")
95
+ handle_post_request(env, session_context: session_context)
87
96
  when "GET"
88
- handle_sse_request(env)
97
+ @server_logger.debug("Handling GET request")
98
+ handle_get_request(env)
89
99
  when "DELETE"
100
+ @server_logger.debug("Handling DELETE request")
90
101
  handle_delete_request(env)
91
102
  else
92
103
  error_response = ErrorResponse[id: nil, error: {code: -32601, message: "Method not allowed"}]
@@ -94,30 +105,71 @@ module ModelContextProtocol
94
105
  end
95
106
  end
96
107
 
97
- def send_notification(method, params)
108
+ # Send real-time notifications to active SSE streams or queue for delivery
109
+ # Used for progress updates, resource changes, and other server-initiated messages
110
+ # @param method [String] the notification method name
111
+ # @param params [Hash] the notification parameters
112
+ # @param session_id [String, nil] optional session ID for targeted delivery
113
+ def send_notification(method, params, session_id: nil)
98
114
  notification = {
99
115
  jsonrpc: "2.0",
100
116
  method: method,
101
117
  params: params
102
118
  }
103
119
 
104
- if @stream_registry.has_any_local_streams?
120
+ log_to_server_with_context do |logger|
121
+ logger.info("← #{method} [outgoing]")
122
+ logger.info(" Notification: #{notification.to_json}")
123
+ end
124
+
125
+ if session_id
126
+ # Deliver to specific session/stream
127
+ @server_logger.debug("Attempting targeted delivery to session: #{session_id}")
128
+ if deliver_to_session_stream(session_id, notification)
129
+ @server_logger.debug("Successfully delivered notification to specific stream: #{session_id}")
130
+ else
131
+ @server_logger.debug("Failed to deliver to specific stream #{session_id}, queuing notification: #{method}")
132
+ @notification_queue.push(notification)
133
+ end
134
+ elsif @stream_registry.get_local_stream(nil) # Check for persistent notification stream (no-session)
135
+ @server_logger.debug("No session_id provided, delivering notification to persistent notification stream")
136
+ if deliver_to_session_stream(nil, notification)
137
+ @server_logger.debug("Successfully delivered notification to persistent notification stream")
138
+ else
139
+ @server_logger.debug("Failed to deliver to persistent notification stream, queuing notification: #{method}")
140
+ @notification_queue.push(notification)
141
+ end
142
+ elsif @stream_registry.has_any_local_streams?
143
+ @server_logger.debug("No persistent notification stream, delivering notification to active streams")
105
144
  deliver_to_active_streams(notification)
106
145
  else
146
+ @server_logger.debug("No active streams, queuing notification: #{method}")
107
147
  @notification_queue.push(notification)
108
148
  end
109
149
  end
110
150
 
111
151
  private
112
152
 
113
- def validate_headers(env)
114
- if @validate_origin
115
- origin = env["HTTP_ORIGIN"]
116
- if origin && !@allowed_origins.any? { |allowed| origin.start_with?(allowed) }
117
- error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Origin not allowed"}]
118
- return {json: error_response.serialized, status: 403}
119
- end
153
+ # Provide logging context with request ID and MCP context information
154
+ # Ensures consistent logging format across all transport operations
155
+ def log_to_server_with_context(request_id: nil, &block)
156
+ original_context = Thread.current[:mcp_context]
157
+ if request_id && !Thread.current[:mcp_context]
158
+ Thread.current[:mcp_context] = {jsonrpc_request_id: request_id}
159
+ end
160
+
161
+ begin
162
+ block.call(@server_logger) if block_given?
163
+ ensure
164
+ Thread.current[:mcp_context] = original_context if request_id && original_context.nil?
120
165
  end
166
+ end
167
+
168
+ # Validate HTTP headers for POST requests: CORS origin, content type, and protocol version.
169
+ # Returns error response hash if headers are invalid, nil if valid.
170
+ def validate_headers(env, session_id: nil)
171
+ origin_error = validate_origin!(env)
172
+ return origin_error if origin_error
121
173
 
122
174
  accept_header = env["HTTP_ACCEPT"]
123
175
  if accept_header
@@ -127,18 +179,58 @@ module ModelContextProtocol
127
179
  end
128
180
  end
129
181
 
182
+ validate_protocol_version!(env, session_id: session_id)
183
+ end
184
+
185
+ # Validate CORS Origin header against allowed origins.
186
+ # The MCP spec requires servers to validate Origin on all incoming connections
187
+ # to prevent DNS rebinding attacks.
188
+ def validate_origin!(env)
189
+ return nil unless @validate_origin
190
+
191
+ origin = env["HTTP_ORIGIN"]
192
+ if origin && !@allowed_origins.any? { |allowed| origin.start_with?(allowed) }
193
+ error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Origin not allowed"}]
194
+ return {json: error_response.serialized, status: 403}
195
+ end
196
+
197
+ nil
198
+ end
199
+
200
+ # Validate MCP-Protocol-Version header against negotiated version.
201
+ # Per the MCP spec, the server MUST respond with 400 Bad Request for invalid
202
+ # or unsupported protocol versions. When a session_id is provided, validation
203
+ # is scoped to that session's negotiated version.
204
+ def validate_protocol_version!(env, session_id: nil)
130
205
  protocol_version = env["HTTP_MCP_PROTOCOL_VERSION"]
131
- if protocol_version
132
- valid_versions = @session_protocol_versions.values.compact.uniq
133
- unless valid_versions.empty? || valid_versions.include?(protocol_version)
134
- error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Invalid MCP protocol version: #{protocol_version}. Expected one of: #{valid_versions.join(", ")}"}]
135
- return {json: error_response.serialized, status: 400}
206
+ return nil unless protocol_version
207
+
208
+ # When a session_id is provided, try session-specific validation first.
209
+ # If the session has a known negotiated version, validate strictly against it.
210
+ if session_id
211
+ expected_version = @session_protocol_versions[session_id]
212
+ if expected_version
213
+ if protocol_version != expected_version
214
+ error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Invalid MCP protocol version: #{protocol_version}. Expected: #{expected_version}"}]
215
+ return {json: error_response.serialized, status: 400}
216
+ end
217
+ return nil
136
218
  end
137
219
  end
138
220
 
221
+ # Fallback: validate against all known negotiated versions (covers cases
222
+ # where session_id is nil or has no entry, e.g. sessions not required).
223
+ valid_versions = @session_protocol_versions.values.compact.uniq
224
+ unless valid_versions.empty? || valid_versions.include?(protocol_version)
225
+ error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Invalid MCP protocol version: #{protocol_version}. Expected one of: #{valid_versions.join(", ")}"}]
226
+ return {json: error_response.serialized, status: 400}
227
+ end
228
+
139
229
  nil
140
230
  end
141
231
 
232
+ # Determine JSON-RPC message type from request body structure
233
+ # Classifies messages as request, notification, response, or unknown
142
234
  def determine_message_type(body)
143
235
  if body.key?("method") && body.key?("id")
144
236
  :request
@@ -151,151 +243,129 @@ module ModelContextProtocol
151
243
  end
152
244
  end
153
245
 
154
- def create_initialization_sse_stream_proc(response_data)
155
- proc do |stream|
156
- event_id = next_event_id
157
- send_sse_event(stream, response_data, event_id)
158
- end
159
- end
160
-
161
- def create_request_sse_stream_proc(response_data)
162
- proc do |stream|
163
- event_id = next_event_id
164
- send_sse_event(stream, response_data, event_id)
165
- end
166
- end
167
-
168
- def create_progressive_request_sse_stream_proc(request_body, session_id)
169
- proc do |stream|
170
- temp_stream_id = session_id || "temp-#{SecureRandom.hex(8)}"
171
- @stream_registry.register_stream(temp_stream_id, stream)
172
-
173
- begin
174
- result = @router.route(request_body, request_store: @request_store, session_id: session_id, transport: self)
175
-
176
- if result
177
- response = Response[id: request_body["id"], result: result.serialized]
178
-
179
- event_id = next_event_id
180
- send_sse_event(stream, response.serialized, event_id)
181
- else
182
- event_id = next_event_id
183
- send_sse_event(stream, {}, event_id)
184
- end
185
-
186
- # Close stream immediately when work is complete
187
- close_stream(temp_stream_id, reason: "request_completed")
188
- rescue IOError, Errno::EPIPE, Errno::ECONNRESET
189
- # Client disconnected during processing
190
- ensure
191
- # Fallback cleanup
192
- @stream_registry.unregister_stream(temp_stream_id)
193
- end
194
- end
195
- end
246
+ # Handle HTTP POST requests containing JSON-RPC messages
247
+ # Parses request body and routes to initialization or regular request handlers
248
+ # @param env [Hash] Rack environment hash
249
+ # @param session_context [Hash] Per-request context for initialization
250
+ def handle_post_request(env, session_context: {})
251
+ session_id = env["HTTP_MCP_SESSION_ID"]
252
+ validation_error = validate_headers(env, session_id: session_id)
253
+ return validation_error if validation_error
196
254
 
197
- def next_event_id
198
- @event_counter.next_event_id
199
- end
255
+ body_string = env["rack.input"].read
256
+ body = JSON.parse(body_string)
257
+ accept_header = env["HTTP_ACCEPT"] || ""
200
258
 
201
- def send_sse_event(stream, data, event_id = nil)
202
- if event_id
203
- stream.write("id: #{event_id}\n")
204
- end
205
- message = data.is_a?(String) ? data : data.to_json
206
- stream.write("data: #{message}\n\n")
207
- stream.flush if stream.respond_to?(:flush)
208
- end
259
+ log_to_server_with_context(request_id: body["id"]) do |logger|
260
+ method = body["method"]
261
+ id = body["id"]
209
262
 
210
- def close_stream(session_id, reason: "completed")
211
- if (stream = @stream_registry.get_local_stream(session_id))
212
- begin
213
- send_sse_event(stream, {type: "stream_complete", reason: reason})
214
- stream.close
215
- rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
216
- nil
263
+ if method&.start_with?("notifications/") || id.nil?
264
+ logger.info("→ #{method} [notification]")
265
+ else
266
+ logger.info("→ #{method} (id: #{id}) [request]")
217
267
  end
218
268
 
219
- @stream_registry.unregister_stream(session_id)
220
- @session_store.mark_stream_inactive(session_id) if @require_sessions
269
+ logger.info(" Request: #{body.to_json}")
270
+ logger.debug(" Accept: #{accept_header}") if body["method"] != "notifications/initialized"
271
+ logger.debug(" Redis Pool: #{ModelContextProtocol::Server::RedisConfig.stats}")
221
272
  end
222
- end
223
273
 
224
- def handle_post_request(env)
225
- validation_error = validate_headers(env)
226
- return validation_error if validation_error
227
-
228
- body_string = env["rack.input"].read
229
- body = JSON.parse(body_string)
230
- session_id = env["HTTP_MCP_SESSION_ID"]
231
- accept_header = env["HTTP_ACCEPT"] || ""
232
-
233
- case body["method"]
234
- when "initialize"
235
- handle_initialization(body, accept_header)
274
+ if body["method"] == "initialize"
275
+ handle_initialization(body, accept_header, session_context: session_context)
236
276
  else
237
277
  handle_regular_request(body, session_id, accept_header)
238
278
  end
239
- rescue JSON::ParserError
279
+ rescue JSON::ParserError => e
280
+ log_to_server_with_context do |logger|
281
+ logger.error("JSON parse error in streamable HTTP transport: #{e.message}")
282
+ end
240
283
  error_response = ErrorResponse[id: "", error: {code: -32700, message: "Parse error"}]
284
+ log_to_server_with_context do |logger|
285
+ logger.info("← Error response (code: #{error_response.error[:code]})")
286
+ logger.info(" #{error_response.serialized.to_json}")
287
+ end
241
288
  {json: error_response.serialized, status: 400}
242
289
  rescue ModelContextProtocol::Server::ParameterValidationError => validation_error
243
- @configuration.logger.error("Validation error", error: validation_error.message)
290
+ log_to_server_with_context(request_id: body&.dig("id")) do |logger|
291
+ logger.error("Parameter validation failed in streamable HTTP transport: #{validation_error.message}")
292
+ end
244
293
  error_response = ErrorResponse[id: body&.dig("id"), error: {code: -32602, message: validation_error.message}]
294
+ log_to_server_with_context(request_id: error_response.id) do |logger|
295
+ logger.info("← Error response (code: #{error_response.error[:code]})")
296
+ logger.info(" #{error_response.serialized.to_json}")
297
+ end
245
298
  {json: error_response.serialized, status: 400}
246
299
  rescue => e
247
- @configuration.logger.error("Error handling POST request", error: e.message, backtrace: e.backtrace.first(5))
300
+ log_to_server_with_context(request_id: body&.dig("id")) do |logger|
301
+ logger.error("Internal error handling POST request in streamable HTTP transport: #{e.message}")
302
+ logger.debug("Backtrace: #{e.backtrace.join("\n")}")
303
+ end
248
304
  error_response = ErrorResponse[id: body&.dig("id"), error: {code: -32603, message: "Internal error"}]
305
+ log_to_server_with_context(request_id: error_response.id) do |logger|
306
+ logger.info("← Error response (code: #{error_response.error[:code]})")
307
+ logger.info(" #{error_response.serialized.to_json}")
308
+ end
249
309
  {json: error_response.serialized, status: 500}
250
310
  end
251
311
 
252
- def handle_initialization(body, accept_header)
312
+ # Handle MCP initialization requests to establish protocol version and optional sessions
313
+ # Always returns JSON response regardless of Accept header to keep initialization simple
314
+ # @param body [Hash] Parsed JSON-RPC request body
315
+ # @param accept_header [String] HTTP Accept header value
316
+ # @param session_context [Hash] Per-request context to merge with server context
317
+ def handle_initialization(body, accept_header, session_context: {})
253
318
  result = @router.route(body, transport: self)
254
319
  response = Response[id: body["id"], result: result.serialized]
255
320
  response_headers = {}
256
-
257
321
  negotiated_protocol_version = result.serialized[:protocolVersion] || result.serialized["protocolVersion"]
258
322
 
259
323
  if @require_sessions
260
324
  session_id = SecureRandom.uuid
325
+ # Merge server-level defaults with request-level context
326
+ merged_context = (@configuration.context || {}).merge(session_context)
261
327
  @session_store.create_session(session_id, {
262
328
  server_instance: @server_instance,
263
- context: @configuration.context || {},
329
+ context: merged_context,
264
330
  created_at: Time.now.to_f,
265
331
  negotiated_protocol_version: negotiated_protocol_version
266
332
  })
333
+ # Store initial handler names for list_changed detection
334
+ current_handlers = @configuration.registry.handler_names
335
+ @session_store.store_registered_handlers(session_id, **current_handlers)
267
336
  response_headers["Mcp-Session-Id"] = session_id
268
337
  @session_protocol_versions[session_id] = negotiated_protocol_version
338
+ log_to_server_with_context { |logger| logger.info("Session created: #{session_id} (protocol: #{negotiated_protocol_version})") }
269
339
  else
270
340
  @session_protocol_versions[:default] = negotiated_protocol_version
271
341
  end
272
342
 
273
- if accept_header.include?("text/event-stream") && !accept_header.include?("application/json")
274
- response_headers.merge!({
275
- "Content-Type" => "text/event-stream",
276
- "Cache-Control" => "no-cache",
277
- "Connection" => "keep-alive"
278
- })
343
+ response_headers["Content-Type"] = "application/json"
279
344
 
280
- {
281
- stream: true,
282
- headers: response_headers,
283
- stream_proc: create_initialization_sse_stream_proc(response.serialized)
284
- }
285
- else
286
- response_headers["Content-Type"] = "application/json"
287
- {
288
- json: response.serialized,
289
- status: 200,
290
- headers: response_headers
291
- }
345
+ log_to_server_with_context(request_id: response.id) do |logger|
346
+ logger.info("← #{body["method"]} Response")
347
+ logger.info(" #{response.serialized.to_json}")
292
348
  end
349
+
350
+ {
351
+ json: response.serialized,
352
+ status: 200,
353
+ headers: response_headers
354
+ }
293
355
  end
294
356
 
357
+ # Handle regular MCP requests (tools, resources, prompts) with streaming/JSON decision logic
358
+ # Defaults to SSE streaming but returns JSON when client explicitly requests JSON only
295
359
  def handle_regular_request(body, session_id, accept_header)
360
+ session_context = {}
361
+
296
362
  if @require_sessions
363
+ # Per the MCP spec, servers SHOULD respond to requests without a valid
364
+ # Mcp-Session-Id header (other than initialization) with HTTP 400.
365
+ # The session ID MUST be present on all subsequent requests after initialization,
366
+ # including notifications like notifications/initialized.
297
367
  unless session_id && @session_store.session_exists?(session_id)
298
- if session_id && !@session_store.session_exists?(session_id)
368
+ if session_id
299
369
  error_response = ErrorResponse[id: body["id"], error: {code: -32600, message: "Session terminated"}]
300
370
  return {json: error_response.serialized, status: 404}
301
371
  else
@@ -303,6 +373,9 @@ module ModelContextProtocol
303
373
  return {json: error_response.serialized, status: 400}
304
374
  end
305
375
  end
376
+
377
+ session_context = @session_store.get_session_context(session_id)
378
+ check_and_notify_handler_changes(session_id)
306
379
  end
307
380
 
308
381
  message_type = determine_message_type(body)
@@ -311,17 +384,26 @@ module ModelContextProtocol
311
384
  when :notification, :response
312
385
  if body["method"] == "notifications/cancelled"
313
386
  handle_cancellation(body, session_id)
387
+ elsif message_type == :response && handle_ping_response(body)
388
+ # Ping response handled, don't forward to streams
389
+ log_to_server_with_context do |logger|
390
+ logger.info("← Ping response [accepted]")
391
+ end
314
392
  elsif session_id && @session_store.session_has_active_stream?(session_id)
315
393
  deliver_to_session_stream(session_id, body)
394
+ elsif message_type == :response
395
+ # This might be a ping response for an expired session
396
+ log_to_server_with_context do |logger|
397
+ logger.debug("← Response for expired/unknown session: #{session_id}")
398
+ end
316
399
  end
317
- {json: {}, status: 202}
400
+ log_to_server_with_context do |logger|
401
+ logger.info("← Notification [accepted]")
402
+ end
403
+ {status: 202}
318
404
 
319
405
  when :request
320
- has_progress_token = body.dig("params", "_meta", "progressToken")
321
- should_stream = (accept_header.include?("text/event-stream") && !accept_header.include?("application/json")) ||
322
- has_progress_token
323
-
324
- if should_stream
406
+ if accept_header.include?("text/event-stream")
325
407
  {
326
408
  stream: true,
327
409
  headers: {
@@ -329,32 +411,36 @@ module ModelContextProtocol
329
411
  "Cache-Control" => "no-cache",
330
412
  "Connection" => "keep-alive"
331
413
  },
332
- stream_proc: create_progressive_request_sse_stream_proc(body, session_id)
414
+ stream_proc: create_request_response_sse_stream_proc(body, session_id, session_context: session_context)
333
415
  }
334
- else
335
- result = @router.route(body, request_store: @request_store, session_id: session_id, transport: self)
336
-
337
- if result
338
- response = Response[id: body["id"], result: result.serialized]
416
+ elsif (result = @router.route(body, request_store: @request_store, session_id: session_id, transport: self, session_context: session_context))
417
+ response = Response[id: body["id"], result: result.serialized]
339
418
 
340
- if session_id && @session_store.session_has_active_stream?(session_id)
341
- deliver_to_session_stream(session_id, response.serialized)
342
- return {json: {accepted: true}, status: 200}
343
- end
419
+ log_to_server_with_context(request_id: response.id) do |logger|
420
+ logger.info("← #{body["method"]} Response")
421
+ logger.info(" #{response.serialized.to_json}")
422
+ end
344
423
 
345
- {
346
- json: response.serialized,
347
- status: 200,
348
- headers: {"Content-Type" => "application/json"}
349
- }
350
- else
351
- {json: {}, status: 204}
424
+ {
425
+ json: response.serialized,
426
+ status: 200,
427
+ headers: {"Content-Type" => "application/json"}
428
+ }
429
+ else
430
+ log_to_server_with_context do |logger|
431
+ logger.info("← Response (status: 204)")
352
432
  end
433
+ {json: {}, status: 204}
353
434
  end
354
435
  end
355
436
  end
356
437
 
357
- def handle_sse_request(env)
438
+ # Handle HTTP GET requests to establish persistent SSE connections for notifications
439
+ # Validates session requirements and Accept headers before opening long-lived streams
440
+ def handle_get_request(env)
441
+ origin_error = validate_origin!(env)
442
+ return origin_error if origin_error
443
+
358
444
  accept_header = env["HTTP_ACCEPT"] || ""
359
445
  unless accept_header.include?("text/event-stream")
360
446
  error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Accept header must include text/event-stream"}]
@@ -362,6 +448,10 @@ module ModelContextProtocol
362
448
  end
363
449
 
364
450
  session_id = env["HTTP_MCP_SESSION_ID"]
451
+
452
+ protocol_error = validate_protocol_version!(env, session_id: session_id)
453
+ return protocol_error if protocol_error
454
+
365
455
  last_event_id = env["HTTP_LAST_EVENT_ID"]
366
456
 
367
457
  if @require_sessions
@@ -384,166 +474,450 @@ module ModelContextProtocol
384
474
  "Cache-Control" => "no-cache",
385
475
  "Connection" => "keep-alive"
386
476
  },
387
- stream_proc: create_sse_stream_proc(session_id, last_event_id)
477
+ stream_proc: create_persistent_notification_sse_stream_proc(session_id, last_event_id)
388
478
  }
389
479
  end
390
480
 
481
+ # Handle HTTP DELETE requests to clean up sessions and associated resources
482
+ # Removes session data, closes streams, and cleans up request store entries
391
483
  def handle_delete_request(env)
484
+ origin_error = validate_origin!(env)
485
+ return origin_error if origin_error
486
+
392
487
  session_id = env["HTTP_MCP_SESSION_ID"]
393
488
 
489
+ protocol_error = validate_protocol_version!(env, session_id: session_id)
490
+ return protocol_error if protocol_error
491
+
492
+ @server_logger.info("→ DELETE /mcp [Session cleanup: #{session_id || "unknown"}]")
493
+
494
+ if @require_sessions
495
+ unless session_id
496
+ error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Invalid or missing session ID"}]
497
+ return {json: error_response.serialized, status: 400}
498
+ end
499
+
500
+ unless @session_store.session_exists?(session_id)
501
+ error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Session terminated"}]
502
+ return {json: error_response.serialized, status: 404}
503
+ end
504
+ end
505
+
394
506
  if session_id
395
507
  cleanup_session(session_id)
508
+ log_to_server_with_context { |logger| logger.info("Session cleanup: #{session_id}") }
396
509
  end
397
510
 
511
+ log_to_server_with_context do |logger|
512
+ logger.info("← DELETE Response")
513
+ logger.info(" #{{"success" => true}.to_json}")
514
+ end
398
515
  {json: {success: true}, status: 200}
399
516
  end
400
517
 
401
- def create_sse_stream_proc(session_id, last_event_id = nil)
518
+ # Create SSE stream processor for request-response pattern with real-time progress support
519
+ # Opens stream → Executes request → Sends response → Closes stream
520
+ # Enables progress notifications during long-running operations like tool calls
521
+ # @param request_body [Hash] Parsed JSON-RPC request
522
+ # @param session_id [String, nil] Session ID for this request
523
+ # @param session_context [Hash] Context to pass to handlers
524
+ def create_request_response_sse_stream_proc(request_body, session_id, session_context: {})
525
+ proc do |stream|
526
+ temp_stream_id = "temp-#{SecureRandom.hex(8)}"
527
+ @stream_registry.register_stream(temp_stream_id, stream)
528
+
529
+ log_to_server_with_context(request_id: request_body["id"]) do |logger|
530
+ logger.info("← SSE stream [opened] (#{temp_stream_id})")
531
+ logger.info(" Connection will remain open for real-time notifications")
532
+ end
533
+
534
+ begin
535
+ if (result = @router.route(request_body, request_store: @request_store, session_id: session_id, transport: self, stream_id: temp_stream_id, session_context: session_context))
536
+ response = Response[id: request_body["id"], result: result.serialized]
537
+ event_id = next_event_id
538
+ send_sse_event(stream, response.serialized, event_id)
539
+ @server_logger.debug("Sent response via SSE stream (id: #{request_body["id"]})")
540
+ else
541
+ event_id = next_event_id
542
+ send_sse_event(stream, {}, event_id)
543
+ @server_logger.debug("Sent empty response via SSE stream (id: #{request_body["id"]})")
544
+ end
545
+
546
+ close_stream(temp_stream_id, reason: "request_completed")
547
+ rescue IOError, Errno::EPIPE, Errno::ECONNRESET => e
548
+ @server_logger.debug("Client disconnected during progressive request processing: #{e.class.name}")
549
+ log_to_server_with_context { |logger| logger.info("← SSE stream [closed] (#{temp_stream_id}) [client_disconnected]") }
550
+ rescue ModelContextProtocol::Server::ParameterValidationError => e
551
+ @client_logger.error("Validation error", error: e.message)
552
+ error_response = ErrorResponse[id: request_body["id"], error: {code: -32602, message: e.message}]
553
+ send_sse_event(stream, error_response.serialized, next_event_id)
554
+ close_stream(temp_stream_id, reason: "validation_error")
555
+ rescue => e
556
+ @client_logger.error("Internal error", error: e.message, backtrace: e.backtrace)
557
+ error_response = ErrorResponse[id: request_body["id"], error: {code: -32603, message: e.message}]
558
+ send_sse_event(stream, error_response.serialized, next_event_id)
559
+ close_stream(temp_stream_id, reason: "internal_error")
560
+ ensure
561
+ @stream_registry.unregister_stream(temp_stream_id)
562
+ end
563
+ end
564
+ end
565
+
566
+ # Generate unique sequential event IDs for SSE streams
567
+ # Enables client-side event replay and ordering guarantees
568
+ def next_event_id
569
+ @event_counter.next_event_id
570
+ end
571
+
572
+ # Send formatted SSE event to stream with optional event ID
573
+ # Handles JSON serialization and proper SSE formatting with data/id fields
574
+ def send_sse_event(stream, data, event_id = nil)
575
+ if event_id
576
+ stream.write("id: #{event_id}\n")
577
+ end
578
+ message = data.is_a?(String) ? data : data.to_json
579
+ stream.write("data: #{message}\n\n")
580
+ stream.flush if stream.respond_to?(:flush)
581
+ end
582
+
583
+ # Close an active SSE stream and clean up associated resources
584
+ # Unregisters from stream registry and marks session inactive
585
+ def close_stream(session_id, reason: "completed")
586
+ if (stream = @stream_registry.get_local_stream(session_id))
587
+ begin
588
+ stream.close
589
+ rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
590
+ nil
591
+ end
592
+
593
+ reason_text = reason ? " [#{reason}]" : ""
594
+ log_to_server_with_context { |logger| logger.info("← SSE stream [closed] (#{session_id})#{reason_text}") }
595
+ @stream_registry.unregister_stream(session_id)
596
+ @session_store.mark_stream_inactive(session_id) if @require_sessions
597
+ end
598
+ end
599
+
600
+ # Create SSE stream processor for long-lived notification streams
601
+ # Opens stream → Keeps connection alive → Receives notifications over time
602
+ # Supports event replay from last_event_id for client reconnection scenarios
603
+ def create_persistent_notification_sse_stream_proc(session_id, last_event_id = nil)
402
604
  proc do |stream|
403
605
  @stream_registry.register_stream(session_id, stream) if session_id
404
606
 
607
+ log_to_server_with_context do |logger|
608
+ logger.info("← SSE stream [opened] (#{session_id || "no-session"})")
609
+ logger.info(" Connection will remain open for real-time notifications")
610
+ end
611
+
405
612
  if last_event_id
406
613
  replay_messages_after_event_id(stream, session_id, last_event_id)
407
614
  else
408
615
  flush_notifications_to_stream(stream)
409
616
  end
410
617
 
618
+ # Also flush any messages queued in Redis from other server instances
619
+ poll_and_deliver_redis_messages(stream, session_id) if session_id
620
+
411
621
  loop do
412
622
  break unless stream_connected?(stream)
623
+
624
+ # Poll for queued messages from Redis (cross-server delivery)
625
+ poll_and_deliver_redis_messages(stream, session_id) if session_id
626
+
413
627
  sleep 0.1
414
628
  end
415
629
  ensure
416
- @stream_registry.unregister_stream(session_id) if session_id
630
+ if session_id
631
+ log_to_server_with_context { |logger| logger.info("← SSE stream [closed] (#{session_id}) [loop_ended]") }
632
+ @stream_registry.unregister_stream(session_id)
633
+ end
417
634
  end
418
635
  end
419
636
 
637
+ # Test if an SSE stream is still connected by checking its status
638
+ # Returns false if stream has been disconnected due to network issues
639
+ # Actual connectivity testing is done via MCP ping requests in monitor_streams
420
640
  def stream_connected?(stream)
421
641
  return false unless stream
422
642
 
423
643
  begin
424
- stream.write(": ping\n\n")
425
- stream.flush if stream.respond_to?(:flush)
644
+ # Check if stream reports as closed first (quick check)
645
+ if stream.respond_to?(:closed?) && stream.closed?
646
+ return false
647
+ end
648
+
426
649
  true
427
650
  rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
428
651
  false
429
652
  end
430
653
  end
431
654
 
655
+ # Start background thread to monitor stream health and clean up disconnected streams
656
+ # Runs every 30 seconds to detect client disconnections and prevent resource leaks
432
657
  def start_stream_monitor
658
+ @stream_monitor_running = true
433
659
  @stream_monitor_thread = Thread.new do
434
- loop do
435
- sleep 30 # Check every 30 seconds
660
+ while @stream_monitor_running
661
+ # Sleep in 1-second intervals to allow quick shutdown response
662
+ 30.times do
663
+ break unless @stream_monitor_running
664
+ sleep 1
665
+ end
666
+
667
+ next unless @stream_monitor_running
436
668
 
437
669
  begin
438
670
  monitor_streams
439
671
  rescue => e
440
- @configuration.logger.error("Stream monitor error", error: e.message)
672
+ @server_logger.error("Stream monitor error: #{e.message}")
441
673
  end
442
674
  end
443
- rescue => e
444
- @configuration.logger.error("Stream monitor thread error", error: e.message)
445
- sleep 5
446
- retry
447
675
  end
448
676
  end
449
677
 
678
+ # Monitor all active streams for connectivity and clean up expired/disconnected ones
679
+ # Sends ping messages and removes streams that fail to respond
450
680
  def monitor_streams
451
681
  expired_sessions = @stream_registry.cleanup_expired_streams
682
+ unless expired_sessions.empty?
683
+ @server_logger.debug("Cleaned up #{expired_sessions.size} expired streams: #{expired_sessions.join(", ")}")
684
+ end
685
+
452
686
  expired_sessions.each do |session_id|
453
687
  @session_store.mark_stream_inactive(session_id)
454
688
  end
455
689
 
690
+ # Check for expired ping requests and close unresponsive streams
691
+ expired_pings = @server_request_store.get_expired_requests(@ping_timeout)
692
+ unless expired_pings.empty?
693
+ @server_logger.debug("Found #{expired_pings.size} expired ping requests")
694
+ expired_pings.each do |ping_info|
695
+ session_id = ping_info[:session_id]
696
+ request_id = ping_info[:request_id]
697
+ age = ping_info[:age]
698
+
699
+ @server_logger.warn("Ping timeout for session #{session_id} (request: #{request_id}, age: #{age.round(2)}s)")
700
+ close_stream(session_id, reason: "ping_timeout")
701
+ @server_request_store.unregister_request(request_id)
702
+ end
703
+ end
704
+
456
705
  @stream_registry.get_all_local_streams.each do |session_id, stream|
457
706
  if stream_connected?(stream)
458
- send_ping_to_stream(stream)
707
+ send_ping_to_stream(stream, session_id)
459
708
  @stream_registry.refresh_heartbeat(session_id)
460
709
  else
710
+ @server_logger.debug("Stream disconnected during monitoring: #{session_id}")
461
711
  close_stream(session_id, reason: "client_disconnected")
462
712
  end
463
- rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
713
+ rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF => e
714
+ @server_logger.debug("Network error during stream monitoring for #{session_id}: #{e.class.name}")
464
715
  close_stream(session_id, reason: "network_error")
465
716
  end
466
717
  end
467
718
 
468
- def send_ping_to_stream(stream)
469
- stream.write(": ping #{Time.now.iso8601}\n\n")
470
- stream.flush if stream.respond_to?(:flush)
719
+ # Send MCP-compliant ping request to test connectivity and expect response
720
+ # Tracks the ping in server request store for timeout detection
721
+ def send_ping_to_stream(stream, session_id)
722
+ ping_id = "ping-#{SecureRandom.hex(8)}"
723
+ ping_request = {
724
+ jsonrpc: "2.0",
725
+ id: ping_id,
726
+ method: "ping"
727
+ }
728
+
729
+ @server_request_store.register_request(ping_id, session_id, type: :ping)
730
+ send_to_stream(stream, ping_request)
731
+
732
+ @server_logger.debug("Sent MCP ping request (id: #{ping_id}) to stream: #{session_id}")
471
733
  end
472
734
 
735
+ # Send data to an SSE stream with proper event formatting and error handling
736
+ # Automatically closes stream on connection errors to prevent resource leaks
473
737
  def send_to_stream(stream, data)
474
738
  event_id = next_event_id
475
739
  send_sse_event(stream, data, event_id)
476
740
  end
477
741
 
742
+ # Replay missed messages from Redis after client reconnection
743
+ # Enables clients to catch up on messages they missed during disconnection
478
744
  def replay_messages_after_event_id(stream, session_id, last_event_id)
479
745
  flush_notifications_to_stream(stream)
480
746
  end
481
747
 
748
+ # Deliver data to a specific session's stream or queue for cross-server delivery
749
+ # Handles both local stream delivery and cross-server message queuing
750
+ # @return [Boolean] true if delivered to active stream, false if queued
482
751
  def deliver_to_session_stream(session_id, data)
483
752
  if @stream_registry.has_local_stream?(session_id)
484
753
  stream = @stream_registry.get_local_stream(session_id)
485
754
  begin
755
+ # MANDATORY connection validation before every delivery
756
+ @server_logger.debug("Validating stream connection for #{session_id}")
757
+ unless stream_connected?(stream)
758
+ @server_logger.warn("Stream #{session_id} failed connection validation - cleaning up")
759
+ close_stream(session_id, reason: "connection_validation_failed")
760
+ return false
761
+ end
762
+
763
+ @server_logger.debug("Stream #{session_id} passed connection validation")
486
764
  send_to_stream(stream, data)
765
+ @server_logger.debug("Successfully delivered message to active stream: #{session_id}")
487
766
  return true
488
- rescue IOError, Errno::EPIPE, Errno::ECONNRESET
489
- close_stream(session_id, reason: "client_disconnected")
767
+ rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF => e
768
+ @server_logger.warn("Failed to deliver to stream #{session_id}, network error: #{e.class.name}")
769
+ close_stream(session_id, reason: "network_error")
770
+ return false
490
771
  end
491
772
  end
492
773
 
774
+ @server_logger.debug("No local stream found for session #{session_id}, queuing message")
493
775
  @session_store.queue_message_for_session(session_id, data)
776
+ false
494
777
  end
495
778
 
779
+ # Clean up all resources associated with a session
780
+ # Removes from stream registry, session store, request store, and server request store
496
781
  def cleanup_session(session_id)
497
782
  @stream_registry.unregister_stream(session_id)
498
783
  @session_store.cleanup_session(session_id)
499
784
  @request_store.cleanup_session_requests(session_id)
785
+ @server_request_store.cleanup_session_requests(session_id)
500
786
  end
501
787
 
502
- def start_message_poller
503
- @message_poller.start
504
- end
505
-
506
- def has_active_streams?
507
- @stream_registry.has_any_local_streams?
508
- end
509
-
788
+ # Broadcast notification to all active streams on this transport instance
789
+ # Handles connection errors gracefully and removes disconnected streams
510
790
  def deliver_to_active_streams(notification)
791
+ delivered_count = 0
792
+ disconnected_streams = []
793
+
511
794
  @stream_registry.get_all_local_streams.each do |session_id, stream|
795
+ # Verify stream is still connected before attempting delivery
796
+ unless stream_connected?(stream)
797
+ disconnected_streams << session_id
798
+ next
799
+ end
800
+
512
801
  send_to_stream(stream, notification)
513
- rescue IOError, Errno::EPIPE, Errno::ECONNRESET
802
+ delivered_count += 1
803
+ @server_logger.debug("Delivered notification to stream: #{session_id}")
804
+ rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF => e
805
+ @server_logger.debug("Failed to deliver notification to stream #{session_id}, client disconnected: #{e.class.name}")
806
+ disconnected_streams << session_id
807
+ end
808
+
809
+ # Clean up disconnected streams
810
+ disconnected_streams.each do |session_id|
514
811
  close_stream(session_id, reason: "client_disconnected")
515
812
  end
813
+
814
+ @server_logger.debug("Delivered notifications to #{delivered_count} streams, cleaned up #{disconnected_streams.size} disconnected streams")
815
+ end
816
+
817
+ # Poll for messages queued in Redis and deliver to the stream
818
+ # Handles cross-server message delivery when notifications are queued by other server instances
819
+ def poll_and_deliver_redis_messages(stream, session_id)
820
+ return unless session_id
821
+
822
+ messages = @session_store.poll_messages_for_session(session_id)
823
+ return if messages.empty?
824
+
825
+ @server_logger.debug("Delivering #{messages.size} queued messages from Redis to stream #{session_id}")
826
+ messages.each do |message|
827
+ send_to_stream(stream, message)
828
+ end
829
+ rescue => e
830
+ @server_logger.error("Error polling Redis messages: #{e.message}")
516
831
  end
517
832
 
833
+ # Flush any queued notifications to a newly connected stream
834
+ # Ensures clients receive notifications that were queued while disconnected
518
835
  def flush_notifications_to_stream(stream)
519
836
  notifications = @notification_queue.pop_all
520
- notifications.each do |notification|
521
- send_to_stream(stream, notification)
837
+ @server_logger.debug("Checking notification queue: #{notifications.size} notifications queued")
838
+ if notifications.empty?
839
+ @server_logger.debug("No queued notifications to flush")
840
+ else
841
+ @server_logger.debug("Flushing #{notifications.size} queued notifications to new stream")
842
+ notifications.each do |notification|
843
+ send_to_stream(stream, notification)
844
+ @server_logger.debug("Flushed queued notification: #{notification[:method]}")
845
+ end
522
846
  end
523
847
  end
524
848
 
525
- # Handle a cancellation notification from the client
526
- #
527
- # @param message [Hash] the cancellation notification message
528
- # @param session_id [String, nil] the session ID if available
849
+ # Handle ping responses from clients to mark server-initiated ping requests as completed
850
+ # Returns true if this was a ping response, false otherwise
851
+ def handle_ping_response(message)
852
+ response_id = message["id"]
853
+ return false unless response_id
854
+
855
+ # Check if this response ID corresponds to a pending ping request
856
+ if @server_request_store.pending?(response_id)
857
+ request_info = @server_request_store.get_request(response_id)
858
+ if request_info && request_info["type"] == "ping"
859
+ @server_request_store.mark_completed(response_id)
860
+ @server_logger.debug("Received ping response for request: #{response_id}")
861
+ return true
862
+ end
863
+ end
864
+
865
+ false
866
+ rescue => e
867
+ @server_logger.error("Error processing ping response: #{e.message}")
868
+ false
869
+ end
870
+
871
+ # Handle client cancellation requests to abort in-progress operations
872
+ # Marks requests as cancelled in the request store to stop ongoing work
529
873
  def handle_cancellation(message, session_id = nil)
530
874
  params = message["params"]
531
875
  return unless params
532
876
 
533
- request_id = params["requestId"]
877
+ jsonrpc_request_id = params["requestId"]
534
878
  reason = params["reason"]
535
879
 
536
- return unless request_id
880
+ return unless jsonrpc_request_id
881
+
882
+ log_to_server_with_context(request_id: jsonrpc_request_id) do |logger|
883
+ logger.info("Processing cancellation (reason: #{reason || "unknown"})")
884
+ end
537
885
 
538
- @request_store.mark_cancelled(request_id, reason)
539
- rescue
886
+ @request_store.mark_cancelled(jsonrpc_request_id, reason)
887
+ rescue => e
888
+ log_to_server_with_context(request_id: jsonrpc_request_id) do |logger|
889
+ logger.error("Error processing cancellation: #{e.message}")
890
+ end
540
891
  nil
541
892
  end
542
893
 
543
- def cleanup
544
- @message_poller&.stop
545
- @stream_monitor_thread&.kill
546
- @redis = nil
894
+ # Check if registered handlers have changed for a session and send notifications
895
+ # Compares current handlers against previously stored handlers in Redis
896
+ def check_and_notify_handler_changes(session_id)
897
+ return unless session_id
898
+ return unless @session_store.session_exists?(session_id)
899
+
900
+ current = @configuration.registry.handler_names
901
+ previous = @session_store.get_registered_handlers(session_id)
902
+
903
+ return if previous.nil? # First request after init
904
+
905
+ changed_types = []
906
+ changed_types << :prompts if current[:prompts].sort != previous[:prompts]&.sort
907
+ changed_types << :resources if current[:resources].sort != previous[:resources]&.sort
908
+ changed_types << :tools if current[:tools].sort != previous[:tools]&.sort
909
+
910
+ return if changed_types.empty?
911
+
912
+ changed_types.each do |type|
913
+ send_notification("notifications/#{type}/list_changed", {}, session_id: session_id)
914
+ end
915
+
916
+ @session_store.store_registered_handlers(session_id, **current)
917
+ rescue => e
918
+ @server_logger.error("Error checking handler changes: #{e.class.name}: #{e.message}")
919
+ @server_logger.debug("Backtrace: #{e.backtrace.first(5).join("\n")}")
920
+ # Don't re-raise - handler change detection is optional, allow request to proceed
547
921
  end
548
922
  end
549
923
  end