model-context-protocol-rb 0.5.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +13 -1
- data/README.md +60 -25
- data/lib/model_context_protocol/server/cancellable.rb +5 -5
- data/lib/model_context_protocol/server/{mcp_logger.rb → client_logger.rb} +7 -10
- data/lib/model_context_protocol/server/configuration.rb +17 -34
- data/lib/model_context_protocol/server/global_config/server_logging.rb +78 -0
- data/lib/model_context_protocol/server/progressable.rb +43 -21
- data/lib/model_context_protocol/server/prompt.rb +12 -7
- data/lib/model_context_protocol/server/redis_pool_manager.rb +1 -1
- data/lib/model_context_protocol/server/resource.rb +7 -4
- data/lib/model_context_protocol/server/router.rb +8 -7
- data/lib/model_context_protocol/server/server_logger.rb +28 -0
- data/lib/model_context_protocol/server/stdio_transport/request_store.rb +17 -17
- data/lib/model_context_protocol/server/stdio_transport.rb +18 -12
- data/lib/model_context_protocol/server/streamable_http_transport/message_poller.rb +9 -9
- data/lib/model_context_protocol/server/streamable_http_transport/request_store.rb +36 -36
- data/lib/model_context_protocol/server/streamable_http_transport/server_request_store.rb +231 -0
- data/lib/model_context_protocol/server/streamable_http_transport.rb +419 -181
- data/lib/model_context_protocol/server/tool.rb +6 -5
- data/lib/model_context_protocol/server.rb +15 -13
- data/lib/model_context_protocol/version.rb +1 -1
- metadata +9 -6
|
@@ -15,65 +15,73 @@ module ModelContextProtocol
|
|
|
15
15
|
end
|
|
16
16
|
end
|
|
17
17
|
|
|
18
|
+
attr_reader :server_logger
|
|
19
|
+
|
|
20
|
+
# Initialize the HTTP transport with Redis-backed cross-server communication support
|
|
21
|
+
# Sets up background threads for message polling and stream monitoring in multi-server deployments
|
|
18
22
|
def initialize(router:, configuration:)
|
|
19
23
|
@router = router
|
|
20
24
|
@configuration = configuration
|
|
25
|
+
@client_logger = configuration.client_logger
|
|
26
|
+
@server_logger = configuration.server_logger
|
|
21
27
|
|
|
22
|
-
transport_options = @configuration.transport_options
|
|
23
28
|
@redis_pool = ModelContextProtocol::Server::RedisConfig.pool
|
|
29
|
+
@redis = ModelContextProtocol::Server::RedisClientProxy.new(@redis_pool)
|
|
30
|
+
|
|
31
|
+
transport_options = @configuration.transport_options
|
|
24
32
|
@require_sessions = transport_options.fetch(:require_sessions, false)
|
|
25
33
|
@default_protocol_version = transport_options.fetch(:default_protocol_version, "2025-03-26")
|
|
26
34
|
@session_protocol_versions = {}
|
|
27
35
|
@validate_origin = transport_options.fetch(:validate_origin, true)
|
|
28
36
|
@allowed_origins = transport_options.fetch(:allowed_origins, ["http://localhost", "https://localhost", "http://127.0.0.1", "https://127.0.0.1"])
|
|
29
|
-
@redis = ModelContextProtocol::Server::RedisClientProxy.new(@redis_pool)
|
|
30
|
-
|
|
31
|
-
@session_store = SessionStore.new(
|
|
32
|
-
@redis,
|
|
33
|
-
ttl: transport_options[:session_ttl] || 3600
|
|
34
|
-
)
|
|
35
37
|
|
|
38
|
+
@session_store = SessionStore.new(@redis, ttl: transport_options[:session_ttl] || 3600)
|
|
36
39
|
@server_instance = "#{Socket.gethostname}-#{Process.pid}-#{SecureRandom.hex(4)}"
|
|
37
40
|
@stream_registry = StreamRegistry.new(@redis, @server_instance)
|
|
38
41
|
@notification_queue = NotificationQueue.new(@redis, @server_instance)
|
|
39
42
|
@event_counter = EventCounter.new(@redis, @server_instance)
|
|
40
43
|
@request_store = RequestStore.new(@redis, @server_instance)
|
|
41
|
-
@
|
|
42
|
-
@
|
|
44
|
+
@server_request_store = ServerRequestStore.new(@redis, @server_instance)
|
|
45
|
+
@ping_timeout = transport_options.fetch(:ping_timeout, 10)
|
|
46
|
+
|
|
47
|
+
@message_poller = MessagePoller.new(@redis, @stream_registry, @client_logger) do |stream, message|
|
|
43
48
|
send_to_stream(stream, message)
|
|
44
49
|
end
|
|
50
|
+
@message_poller.start
|
|
45
51
|
|
|
46
|
-
|
|
52
|
+
@stream_monitor_running = false
|
|
53
|
+
@stream_monitor_thread = nil
|
|
47
54
|
start_stream_monitor
|
|
48
55
|
end
|
|
49
56
|
|
|
57
|
+
# Gracefully shut down the transport by stopping background threads and cleaning up resources
|
|
58
|
+
# Closes all active streams and returns Redis connections to the pool
|
|
50
59
|
def shutdown
|
|
51
|
-
@
|
|
60
|
+
@server_logger.info("Shutting down StreamableHttpTransport")
|
|
52
61
|
|
|
53
|
-
# Stop the message poller
|
|
54
62
|
@message_poller&.stop
|
|
55
63
|
|
|
56
|
-
|
|
64
|
+
@stream_monitor_running = false
|
|
57
65
|
if @stream_monitor_thread&.alive?
|
|
58
66
|
@stream_monitor_thread.kill
|
|
59
|
-
@stream_monitor_thread.join(
|
|
67
|
+
@stream_monitor_thread.join(5)
|
|
60
68
|
end
|
|
61
69
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
@stream_registry.unregister_stream(session_id)
|
|
65
|
-
@session_store.mark_stream_inactive(session_id)
|
|
70
|
+
@stream_registry.get_all_local_streams.each do |session_id, _stream|
|
|
71
|
+
close_stream(session_id, reason: "shutdown")
|
|
66
72
|
rescue => e
|
|
67
|
-
@
|
|
73
|
+
@server_logger.error("Error during stream cleanup for session #{session_id}: #{e.message}")
|
|
68
74
|
end
|
|
69
75
|
|
|
70
76
|
@redis_pool.checkin(@redis) if @redis_pool && @redis
|
|
71
77
|
|
|
72
|
-
@
|
|
78
|
+
@server_logger.info("StreamableHttpTransport shutdown complete")
|
|
73
79
|
end
|
|
74
80
|
|
|
81
|
+
# Main entry point for handling HTTP requests (POST, GET, DELETE)
|
|
82
|
+
# Routes requests to appropriate handlers and manages the request/response lifecycle
|
|
75
83
|
def handle
|
|
76
|
-
@
|
|
84
|
+
@server_logger.debug("Handling streamable HTTP transport request")
|
|
77
85
|
|
|
78
86
|
env = @configuration.transport_options[:env]
|
|
79
87
|
|
|
@@ -83,10 +91,13 @@ module ModelContextProtocol
|
|
|
83
91
|
|
|
84
92
|
case env["REQUEST_METHOD"]
|
|
85
93
|
when "POST"
|
|
94
|
+
@server_logger.debug("Handling POST request")
|
|
86
95
|
handle_post_request(env)
|
|
87
96
|
when "GET"
|
|
88
|
-
|
|
97
|
+
@server_logger.debug("Handling GET request")
|
|
98
|
+
handle_get_request(env)
|
|
89
99
|
when "DELETE"
|
|
100
|
+
@server_logger.debug("Handling DELETE request")
|
|
90
101
|
handle_delete_request(env)
|
|
91
102
|
else
|
|
92
103
|
error_response = ErrorResponse[id: nil, error: {code: -32601, message: "Method not allowed"}]
|
|
@@ -94,22 +105,68 @@ module ModelContextProtocol
|
|
|
94
105
|
end
|
|
95
106
|
end
|
|
96
107
|
|
|
97
|
-
|
|
108
|
+
# Send real-time notifications to active SSE streams or queue for delivery
|
|
109
|
+
# Used for progress updates, resource changes, and other server-initiated messages
|
|
110
|
+
# @param method [String] the notification method name
|
|
111
|
+
# @param params [Hash] the notification parameters
|
|
112
|
+
# @param session_id [String, nil] optional session ID for targeted delivery
|
|
113
|
+
def send_notification(method, params, session_id: nil)
|
|
98
114
|
notification = {
|
|
99
115
|
jsonrpc: "2.0",
|
|
100
116
|
method: method,
|
|
101
117
|
params: params
|
|
102
118
|
}
|
|
103
119
|
|
|
104
|
-
|
|
120
|
+
log_to_server_with_context do |logger|
|
|
121
|
+
logger.info("← #{method} [outgoing]")
|
|
122
|
+
logger.info(" Notification: #{notification.to_json}")
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
if session_id
|
|
126
|
+
# Deliver to specific session/stream
|
|
127
|
+
@server_logger.debug("Attempting targeted delivery to session: #{session_id}")
|
|
128
|
+
if deliver_to_session_stream(session_id, notification)
|
|
129
|
+
@server_logger.debug("Successfully delivered notification to specific stream: #{session_id}")
|
|
130
|
+
else
|
|
131
|
+
@server_logger.debug("Failed to deliver to specific stream #{session_id}, queuing notification: #{method}")
|
|
132
|
+
@notification_queue.push(notification)
|
|
133
|
+
end
|
|
134
|
+
elsif @stream_registry.get_local_stream(nil) # Check for persistent notification stream (no-session)
|
|
135
|
+
@server_logger.debug("No session_id provided, delivering notification to persistent notification stream")
|
|
136
|
+
if deliver_to_session_stream(nil, notification)
|
|
137
|
+
@server_logger.debug("Successfully delivered notification to persistent notification stream")
|
|
138
|
+
else
|
|
139
|
+
@server_logger.debug("Failed to deliver to persistent notification stream, queuing notification: #{method}")
|
|
140
|
+
@notification_queue.push(notification)
|
|
141
|
+
end
|
|
142
|
+
elsif @stream_registry.has_any_local_streams?
|
|
143
|
+
@server_logger.debug("No persistent notification stream, delivering notification to active streams")
|
|
105
144
|
deliver_to_active_streams(notification)
|
|
106
145
|
else
|
|
146
|
+
@server_logger.debug("No active streams, queuing notification: #{method}")
|
|
107
147
|
@notification_queue.push(notification)
|
|
108
148
|
end
|
|
109
149
|
end
|
|
110
150
|
|
|
111
151
|
private
|
|
112
152
|
|
|
153
|
+
# Provide logging context with request ID and MCP context information
|
|
154
|
+
# Ensures consistent logging format across all transport operations
|
|
155
|
+
def log_to_server_with_context(request_id: nil, &block)
|
|
156
|
+
original_context = Thread.current[:mcp_context]
|
|
157
|
+
if request_id && !Thread.current[:mcp_context]
|
|
158
|
+
Thread.current[:mcp_context] = {jsonrpc_request_id: request_id}
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
begin
|
|
162
|
+
block.call(@server_logger) if block_given?
|
|
163
|
+
ensure
|
|
164
|
+
Thread.current[:mcp_context] = original_context if request_id && original_context.nil?
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
# Validate HTTP headers for required content type and CORS origin
|
|
169
|
+
# Returns error response if headers are invalid, nil if valid
|
|
113
170
|
def validate_headers(env)
|
|
114
171
|
if @validate_origin
|
|
115
172
|
origin = env["HTTP_ORIGIN"]
|
|
@@ -139,6 +196,8 @@ module ModelContextProtocol
|
|
|
139
196
|
nil
|
|
140
197
|
end
|
|
141
198
|
|
|
199
|
+
# Determine JSON-RPC message type from request body structure
|
|
200
|
+
# Classifies messages as request, notification, response, or unknown
|
|
142
201
|
def determine_message_type(body)
|
|
143
202
|
if body.key?("method") && body.key?("id")
|
|
144
203
|
:request
|
|
@@ -151,76 +210,8 @@ module ModelContextProtocol
|
|
|
151
210
|
end
|
|
152
211
|
end
|
|
153
212
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
event_id = next_event_id
|
|
157
|
-
send_sse_event(stream, response_data, event_id)
|
|
158
|
-
end
|
|
159
|
-
end
|
|
160
|
-
|
|
161
|
-
def create_request_sse_stream_proc(response_data)
|
|
162
|
-
proc do |stream|
|
|
163
|
-
event_id = next_event_id
|
|
164
|
-
send_sse_event(stream, response_data, event_id)
|
|
165
|
-
end
|
|
166
|
-
end
|
|
167
|
-
|
|
168
|
-
def create_progressive_request_sse_stream_proc(request_body, session_id)
|
|
169
|
-
proc do |stream|
|
|
170
|
-
temp_stream_id = session_id || "temp-#{SecureRandom.hex(8)}"
|
|
171
|
-
@stream_registry.register_stream(temp_stream_id, stream)
|
|
172
|
-
|
|
173
|
-
begin
|
|
174
|
-
result = @router.route(request_body, request_store: @request_store, session_id: session_id, transport: self)
|
|
175
|
-
|
|
176
|
-
if result
|
|
177
|
-
response = Response[id: request_body["id"], result: result.serialized]
|
|
178
|
-
|
|
179
|
-
event_id = next_event_id
|
|
180
|
-
send_sse_event(stream, response.serialized, event_id)
|
|
181
|
-
else
|
|
182
|
-
event_id = next_event_id
|
|
183
|
-
send_sse_event(stream, {}, event_id)
|
|
184
|
-
end
|
|
185
|
-
|
|
186
|
-
# Close stream immediately when work is complete
|
|
187
|
-
close_stream(temp_stream_id, reason: "request_completed")
|
|
188
|
-
rescue IOError, Errno::EPIPE, Errno::ECONNRESET
|
|
189
|
-
# Client disconnected during processing
|
|
190
|
-
ensure
|
|
191
|
-
# Fallback cleanup
|
|
192
|
-
@stream_registry.unregister_stream(temp_stream_id)
|
|
193
|
-
end
|
|
194
|
-
end
|
|
195
|
-
end
|
|
196
|
-
|
|
197
|
-
def next_event_id
|
|
198
|
-
@event_counter.next_event_id
|
|
199
|
-
end
|
|
200
|
-
|
|
201
|
-
def send_sse_event(stream, data, event_id = nil)
|
|
202
|
-
if event_id
|
|
203
|
-
stream.write("id: #{event_id}\n")
|
|
204
|
-
end
|
|
205
|
-
message = data.is_a?(String) ? data : data.to_json
|
|
206
|
-
stream.write("data: #{message}\n\n")
|
|
207
|
-
stream.flush if stream.respond_to?(:flush)
|
|
208
|
-
end
|
|
209
|
-
|
|
210
|
-
def close_stream(session_id, reason: "completed")
|
|
211
|
-
if (stream = @stream_registry.get_local_stream(session_id))
|
|
212
|
-
begin
|
|
213
|
-
send_sse_event(stream, {type: "stream_complete", reason: reason})
|
|
214
|
-
stream.close
|
|
215
|
-
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
|
|
216
|
-
nil
|
|
217
|
-
end
|
|
218
|
-
|
|
219
|
-
@stream_registry.unregister_stream(session_id)
|
|
220
|
-
@session_store.mark_stream_inactive(session_id) if @require_sessions
|
|
221
|
-
end
|
|
222
|
-
end
|
|
223
|
-
|
|
213
|
+
# Handle HTTP POST requests containing JSON-RPC messages
|
|
214
|
+
# Parses request body and routes to initialization or regular request handlers
|
|
224
215
|
def handle_post_request(env)
|
|
225
216
|
validation_error = validate_headers(env)
|
|
226
217
|
return validation_error if validation_error
|
|
@@ -230,30 +221,65 @@ module ModelContextProtocol
|
|
|
230
221
|
session_id = env["HTTP_MCP_SESSION_ID"]
|
|
231
222
|
accept_header = env["HTTP_ACCEPT"] || ""
|
|
232
223
|
|
|
233
|
-
|
|
234
|
-
|
|
224
|
+
log_to_server_with_context(request_id: body["id"]) do |logger|
|
|
225
|
+
method = body["method"]
|
|
226
|
+
id = body["id"]
|
|
227
|
+
|
|
228
|
+
if method&.start_with?("notifications/") || id.nil?
|
|
229
|
+
logger.info("→ #{method} [notification]")
|
|
230
|
+
else
|
|
231
|
+
logger.info("→ #{method} (id: #{id}) [request]")
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
logger.info(" Request: #{body.to_json}")
|
|
235
|
+
logger.debug(" Accept: #{accept_header}") if body["method"] != "notifications/initialized"
|
|
236
|
+
logger.debug(" Redis Pool: #{ModelContextProtocol::Server::RedisConfig.stats}")
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
if body["method"] == "initialize"
|
|
235
240
|
handle_initialization(body, accept_header)
|
|
236
241
|
else
|
|
237
242
|
handle_regular_request(body, session_id, accept_header)
|
|
238
243
|
end
|
|
239
|
-
rescue JSON::ParserError
|
|
244
|
+
rescue JSON::ParserError => e
|
|
245
|
+
log_to_server_with_context do |logger|
|
|
246
|
+
logger.error("JSON parse error in streamable HTTP transport: #{e.message}")
|
|
247
|
+
end
|
|
240
248
|
error_response = ErrorResponse[id: "", error: {code: -32700, message: "Parse error"}]
|
|
249
|
+
log_to_server_with_context do |logger|
|
|
250
|
+
logger.info("← Error response (code: #{error_response.error[:code]})")
|
|
251
|
+
logger.info(" #{error_response.serialized.to_json}")
|
|
252
|
+
end
|
|
241
253
|
{json: error_response.serialized, status: 400}
|
|
242
254
|
rescue ModelContextProtocol::Server::ParameterValidationError => validation_error
|
|
243
|
-
|
|
255
|
+
log_to_server_with_context(request_id: body&.dig("id")) do |logger|
|
|
256
|
+
logger.error("Parameter validation failed in streamable HTTP transport: #{validation_error.message}")
|
|
257
|
+
end
|
|
244
258
|
error_response = ErrorResponse[id: body&.dig("id"), error: {code: -32602, message: validation_error.message}]
|
|
259
|
+
log_to_server_with_context(request_id: error_response.id) do |logger|
|
|
260
|
+
logger.info("← Error response (code: #{error_response.error[:code]})")
|
|
261
|
+
logger.info(" #{error_response.serialized.to_json}")
|
|
262
|
+
end
|
|
245
263
|
{json: error_response.serialized, status: 400}
|
|
246
264
|
rescue => e
|
|
247
|
-
|
|
265
|
+
log_to_server_with_context(request_id: body&.dig("id")) do |logger|
|
|
266
|
+
logger.error("Internal error handling POST request in streamable HTTP transport: #{e.message}")
|
|
267
|
+
logger.debug("Backtrace: #{e.backtrace.join("\n")}")
|
|
268
|
+
end
|
|
248
269
|
error_response = ErrorResponse[id: body&.dig("id"), error: {code: -32603, message: "Internal error"}]
|
|
270
|
+
log_to_server_with_context(request_id: error_response.id) do |logger|
|
|
271
|
+
logger.info("← Error response (code: #{error_response.error[:code]})")
|
|
272
|
+
logger.info(" #{error_response.serialized.to_json}")
|
|
273
|
+
end
|
|
249
274
|
{json: error_response.serialized, status: 500}
|
|
250
275
|
end
|
|
251
276
|
|
|
277
|
+
# Handle MCP initialization requests to establish protocol version and optional sessions
|
|
278
|
+
# Always returns JSON response regardless of Accept header to keep initialization simple
|
|
252
279
|
def handle_initialization(body, accept_header)
|
|
253
280
|
result = @router.route(body, transport: self)
|
|
254
281
|
response = Response[id: body["id"], result: result.serialized]
|
|
255
282
|
response_headers = {}
|
|
256
|
-
|
|
257
283
|
negotiated_protocol_version = result.serialized[:protocolVersion] || result.serialized["protocolVersion"]
|
|
258
284
|
|
|
259
285
|
if @require_sessions
|
|
@@ -266,32 +292,27 @@ module ModelContextProtocol
|
|
|
266
292
|
})
|
|
267
293
|
response_headers["Mcp-Session-Id"] = session_id
|
|
268
294
|
@session_protocol_versions[session_id] = negotiated_protocol_version
|
|
295
|
+
log_to_server_with_context { |logger| logger.info("Session created: #{session_id} (protocol: #{negotiated_protocol_version})") }
|
|
269
296
|
else
|
|
270
297
|
@session_protocol_versions[:default] = negotiated_protocol_version
|
|
271
298
|
end
|
|
272
299
|
|
|
273
|
-
|
|
274
|
-
response_headers.merge!({
|
|
275
|
-
"Content-Type" => "text/event-stream",
|
|
276
|
-
"Cache-Control" => "no-cache",
|
|
277
|
-
"Connection" => "keep-alive"
|
|
278
|
-
})
|
|
300
|
+
response_headers["Content-Type"] = "application/json"
|
|
279
301
|
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
stream_proc: create_initialization_sse_stream_proc(response.serialized)
|
|
284
|
-
}
|
|
285
|
-
else
|
|
286
|
-
response_headers["Content-Type"] = "application/json"
|
|
287
|
-
{
|
|
288
|
-
json: response.serialized,
|
|
289
|
-
status: 200,
|
|
290
|
-
headers: response_headers
|
|
291
|
-
}
|
|
302
|
+
log_to_server_with_context(request_id: response.id) do |logger|
|
|
303
|
+
logger.info("← #{body["method"]} Response")
|
|
304
|
+
logger.info(" #{response.serialized.to_json}")
|
|
292
305
|
end
|
|
306
|
+
|
|
307
|
+
{
|
|
308
|
+
json: response.serialized,
|
|
309
|
+
status: 200,
|
|
310
|
+
headers: response_headers
|
|
311
|
+
}
|
|
293
312
|
end
|
|
294
313
|
|
|
314
|
+
# Handle regular MCP requests (tools, resources, prompts) with streaming/JSON decision logic
|
|
315
|
+
# Defaults to SSE streaming but returns JSON when client explicitly requests JSON only
|
|
295
316
|
def handle_regular_request(body, session_id, accept_header)
|
|
296
317
|
if @require_sessions
|
|
297
318
|
unless session_id && @session_store.session_exists?(session_id)
|
|
@@ -311,17 +332,26 @@ module ModelContextProtocol
|
|
|
311
332
|
when :notification, :response
|
|
312
333
|
if body["method"] == "notifications/cancelled"
|
|
313
334
|
handle_cancellation(body, session_id)
|
|
335
|
+
elsif message_type == :response && handle_ping_response(body)
|
|
336
|
+
# Ping response handled, don't forward to streams
|
|
337
|
+
log_to_server_with_context do |logger|
|
|
338
|
+
logger.info("← Ping response [accepted]")
|
|
339
|
+
end
|
|
314
340
|
elsif session_id && @session_store.session_has_active_stream?(session_id)
|
|
315
341
|
deliver_to_session_stream(session_id, body)
|
|
342
|
+
elsif message_type == :response
|
|
343
|
+
# This might be a ping response for an expired session
|
|
344
|
+
log_to_server_with_context do |logger|
|
|
345
|
+
logger.debug("← Response for expired/unknown session: #{session_id}")
|
|
346
|
+
end
|
|
347
|
+
end
|
|
348
|
+
log_to_server_with_context do |logger|
|
|
349
|
+
logger.info("← Notification [accepted]")
|
|
316
350
|
end
|
|
317
351
|
{json: {}, status: 202}
|
|
318
352
|
|
|
319
353
|
when :request
|
|
320
|
-
|
|
321
|
-
should_stream = (accept_header.include?("text/event-stream") && !accept_header.include?("application/json")) ||
|
|
322
|
-
has_progress_token
|
|
323
|
-
|
|
324
|
-
if should_stream
|
|
354
|
+
if accept_header.include?("text/event-stream")
|
|
325
355
|
{
|
|
326
356
|
stream: true,
|
|
327
357
|
headers: {
|
|
@@ -329,32 +359,33 @@ module ModelContextProtocol
|
|
|
329
359
|
"Cache-Control" => "no-cache",
|
|
330
360
|
"Connection" => "keep-alive"
|
|
331
361
|
},
|
|
332
|
-
stream_proc:
|
|
362
|
+
stream_proc: create_request_response_sse_stream_proc(body, session_id)
|
|
333
363
|
}
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
if result
|
|
338
|
-
response = Response[id: body["id"], result: result.serialized]
|
|
364
|
+
elsif (result = @router.route(body, request_store: @request_store, session_id: session_id, transport: self))
|
|
365
|
+
response = Response[id: body["id"], result: result.serialized]
|
|
339
366
|
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
367
|
+
log_to_server_with_context(request_id: response.id) do |logger|
|
|
368
|
+
logger.info("← #{body["method"]} Response")
|
|
369
|
+
logger.info(" #{response.serialized.to_json}")
|
|
370
|
+
end
|
|
344
371
|
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
372
|
+
{
|
|
373
|
+
json: response.serialized,
|
|
374
|
+
status: 200,
|
|
375
|
+
headers: {"Content-Type" => "application/json"}
|
|
376
|
+
}
|
|
377
|
+
else
|
|
378
|
+
log_to_server_with_context do |logger|
|
|
379
|
+
logger.info("← Response (status: 204)")
|
|
352
380
|
end
|
|
381
|
+
{json: {}, status: 204}
|
|
353
382
|
end
|
|
354
383
|
end
|
|
355
384
|
end
|
|
356
385
|
|
|
357
|
-
|
|
386
|
+
# Handle HTTP GET requests to establish persistent SSE connections for notifications
|
|
387
|
+
# Validates session requirements and Accept headers before opening long-lived streams
|
|
388
|
+
def handle_get_request(env)
|
|
358
389
|
accept_header = env["HTTP_ACCEPT"] || ""
|
|
359
390
|
unless accept_header.include?("text/event-stream")
|
|
360
391
|
error_response = ErrorResponse[id: nil, error: {code: -32600, message: "Accept header must include text/event-stream"}]
|
|
@@ -384,24 +415,110 @@ module ModelContextProtocol
|
|
|
384
415
|
"Cache-Control" => "no-cache",
|
|
385
416
|
"Connection" => "keep-alive"
|
|
386
417
|
},
|
|
387
|
-
stream_proc:
|
|
418
|
+
stream_proc: create_persistent_notification_sse_stream_proc(session_id, last_event_id)
|
|
388
419
|
}
|
|
389
420
|
end
|
|
390
421
|
|
|
422
|
+
# Handle HTTP DELETE requests to clean up sessions and associated resources
|
|
423
|
+
# Removes session data, closes streams, and cleans up request store entries
|
|
391
424
|
def handle_delete_request(env)
|
|
392
425
|
session_id = env["HTTP_MCP_SESSION_ID"]
|
|
393
426
|
|
|
427
|
+
@server_logger.info("→ DELETE /mcp [Session cleanup: #{session_id || "unknown"}]")
|
|
428
|
+
|
|
394
429
|
if session_id
|
|
395
430
|
cleanup_session(session_id)
|
|
431
|
+
log_to_server_with_context { |logger| logger.info("Session cleanup: #{session_id}") }
|
|
396
432
|
end
|
|
397
433
|
|
|
434
|
+
log_to_server_with_context do |logger|
|
|
435
|
+
logger.info("← DELETE Response")
|
|
436
|
+
logger.info(" #{{"success" => true}.to_json}")
|
|
437
|
+
end
|
|
398
438
|
{json: {success: true}, status: 200}
|
|
399
439
|
end
|
|
400
440
|
|
|
401
|
-
|
|
441
|
+
# Create SSE stream processor for request-response pattern with real-time progress support
|
|
442
|
+
# Opens stream → Executes request → Sends response → Closes stream
|
|
443
|
+
# Enables progress notifications during long-running operations like tool calls
|
|
444
|
+
def create_request_response_sse_stream_proc(request_body, session_id)
|
|
445
|
+
proc do |stream|
|
|
446
|
+
temp_stream_id = "temp-#{SecureRandom.hex(8)}"
|
|
447
|
+
@stream_registry.register_stream(temp_stream_id, stream)
|
|
448
|
+
|
|
449
|
+
log_to_server_with_context(request_id: request_body["id"]) do |logger|
|
|
450
|
+
logger.info("← SSE stream [opened] (#{temp_stream_id})")
|
|
451
|
+
logger.info(" Connection will remain open for real-time notifications")
|
|
452
|
+
end
|
|
453
|
+
|
|
454
|
+
begin
|
|
455
|
+
if (result = @router.route(request_body, request_store: @request_store, session_id: session_id, transport: self, stream_id: temp_stream_id))
|
|
456
|
+
response = Response[id: request_body["id"], result: result.serialized]
|
|
457
|
+
event_id = next_event_id
|
|
458
|
+
send_sse_event(stream, response.serialized, event_id)
|
|
459
|
+
@server_logger.debug("Sent response via SSE stream (id: #{request_body["id"]})")
|
|
460
|
+
else
|
|
461
|
+
event_id = next_event_id
|
|
462
|
+
send_sse_event(stream, {}, event_id)
|
|
463
|
+
@server_logger.debug("Sent empty response via SSE stream (id: #{request_body["id"]})")
|
|
464
|
+
end
|
|
465
|
+
|
|
466
|
+
close_stream(temp_stream_id, reason: "request_completed")
|
|
467
|
+
rescue IOError, Errno::EPIPE, Errno::ECONNRESET => e
|
|
468
|
+
@server_logger.debug("Client disconnected during progressive request processing: #{e.class.name}")
|
|
469
|
+
log_to_server_with_context { |logger| logger.info("← SSE stream [closed] (#{temp_stream_id}) [client_disconnected]") }
|
|
470
|
+
ensure
|
|
471
|
+
@stream_registry.unregister_stream(temp_stream_id)
|
|
472
|
+
end
|
|
473
|
+
end
|
|
474
|
+
end
|
|
475
|
+
|
|
476
|
+
# Generate unique sequential event IDs for SSE streams
|
|
477
|
+
# Enables client-side event replay and ordering guarantees
|
|
478
|
+
def next_event_id
|
|
479
|
+
@event_counter.next_event_id
|
|
480
|
+
end
|
|
481
|
+
|
|
482
|
+
# Send formatted SSE event to stream with optional event ID
|
|
483
|
+
# Handles JSON serialization and proper SSE formatting with data/id fields
|
|
484
|
+
def send_sse_event(stream, data, event_id = nil)
|
|
485
|
+
if event_id
|
|
486
|
+
stream.write("id: #{event_id}\n")
|
|
487
|
+
end
|
|
488
|
+
message = data.is_a?(String) ? data : data.to_json
|
|
489
|
+
stream.write("data: #{message}\n\n")
|
|
490
|
+
stream.flush if stream.respond_to?(:flush)
|
|
491
|
+
end
|
|
492
|
+
|
|
493
|
+
# Close an active SSE stream and clean up associated resources
|
|
494
|
+
# Unregisters from stream registry and marks session inactive
|
|
495
|
+
def close_stream(session_id, reason: "completed")
|
|
496
|
+
if (stream = @stream_registry.get_local_stream(session_id))
|
|
497
|
+
begin
|
|
498
|
+
stream.close
|
|
499
|
+
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
|
|
500
|
+
nil
|
|
501
|
+
end
|
|
502
|
+
|
|
503
|
+
reason_text = reason ? " [#{reason}]" : ""
|
|
504
|
+
log_to_server_with_context { |logger| logger.info("← SSE stream [closed] (#{session_id})#{reason_text}") }
|
|
505
|
+
@stream_registry.unregister_stream(session_id)
|
|
506
|
+
@session_store.mark_stream_inactive(session_id) if @require_sessions
|
|
507
|
+
end
|
|
508
|
+
end
|
|
509
|
+
|
|
510
|
+
# Create SSE stream processor for long-lived notification streams
|
|
511
|
+
# Opens stream → Keeps connection alive → Receives notifications over time
|
|
512
|
+
# Supports event replay from last_event_id for client reconnection scenarios
|
|
513
|
+
def create_persistent_notification_sse_stream_proc(session_id, last_event_id = nil)
|
|
402
514
|
proc do |stream|
|
|
403
515
|
@stream_registry.register_stream(session_id, stream) if session_id
|
|
404
516
|
|
|
517
|
+
log_to_server_with_context do |logger|
|
|
518
|
+
logger.info("← SSE stream [opened] (#{session_id || "no-session"})")
|
|
519
|
+
logger.info(" Connection will remain open for real-time notifications")
|
|
520
|
+
end
|
|
521
|
+
|
|
405
522
|
if last_event_id
|
|
406
523
|
replay_messages_after_event_id(stream, session_id, last_event_id)
|
|
407
524
|
else
|
|
@@ -413,137 +530,258 @@ module ModelContextProtocol
|
|
|
413
530
|
sleep 0.1
|
|
414
531
|
end
|
|
415
532
|
ensure
|
|
416
|
-
|
|
533
|
+
if session_id
|
|
534
|
+
log_to_server_with_context { |logger| logger.info("← SSE stream [closed] (#{session_id}) [loop_ended]") }
|
|
535
|
+
@stream_registry.unregister_stream(session_id)
|
|
536
|
+
end
|
|
417
537
|
end
|
|
418
538
|
end
|
|
419
539
|
|
|
540
|
+
# Test if an SSE stream is still connected by checking its status
|
|
541
|
+
# Returns false if stream has been disconnected due to network issues
|
|
542
|
+
# Actual connectivity testing is done via MCP ping requests in monitor_streams
|
|
420
543
|
def stream_connected?(stream)
|
|
421
544
|
return false unless stream
|
|
422
545
|
|
|
423
546
|
begin
|
|
424
|
-
stream
|
|
425
|
-
|
|
547
|
+
# Check if stream reports as closed first (quick check)
|
|
548
|
+
if stream.respond_to?(:closed?) && stream.closed?
|
|
549
|
+
return false
|
|
550
|
+
end
|
|
551
|
+
|
|
426
552
|
true
|
|
427
553
|
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
|
|
428
554
|
false
|
|
429
555
|
end
|
|
430
556
|
end
|
|
431
557
|
|
|
558
|
+
# Start background thread to monitor stream health and clean up disconnected streams
|
|
559
|
+
# Runs every 30 seconds to detect client disconnections and prevent resource leaks
|
|
432
560
|
def start_stream_monitor
|
|
561
|
+
@stream_monitor_running = true
|
|
433
562
|
@stream_monitor_thread = Thread.new do
|
|
434
|
-
|
|
435
|
-
|
|
563
|
+
while @stream_monitor_running
|
|
564
|
+
# Sleep in 1-second intervals to allow quick shutdown response
|
|
565
|
+
30.times do
|
|
566
|
+
break unless @stream_monitor_running
|
|
567
|
+
sleep 1
|
|
568
|
+
end
|
|
569
|
+
|
|
570
|
+
next unless @stream_monitor_running
|
|
436
571
|
|
|
437
572
|
begin
|
|
438
573
|
monitor_streams
|
|
439
574
|
rescue => e
|
|
440
|
-
@
|
|
575
|
+
@server_logger.error("Stream monitor error: #{e.message}")
|
|
441
576
|
end
|
|
442
577
|
end
|
|
443
|
-
rescue => e
|
|
444
|
-
@configuration.logger.error("Stream monitor thread error", error: e.message)
|
|
445
|
-
sleep 5
|
|
446
|
-
retry
|
|
447
578
|
end
|
|
448
579
|
end
|
|
449
580
|
|
|
581
|
+
# Monitor all active streams for connectivity and clean up expired/disconnected ones
|
|
582
|
+
# Sends ping messages and removes streams that fail to respond
|
|
450
583
|
def monitor_streams
|
|
451
584
|
expired_sessions = @stream_registry.cleanup_expired_streams
|
|
585
|
+
unless expired_sessions.empty?
|
|
586
|
+
@server_logger.debug("Cleaned up #{expired_sessions.size} expired streams: #{expired_sessions.join(", ")}")
|
|
587
|
+
end
|
|
588
|
+
|
|
452
589
|
expired_sessions.each do |session_id|
|
|
453
590
|
@session_store.mark_stream_inactive(session_id)
|
|
454
591
|
end
|
|
455
592
|
|
|
593
|
+
# Check for expired ping requests and close unresponsive streams
|
|
594
|
+
expired_pings = @server_request_store.get_expired_requests(@ping_timeout)
|
|
595
|
+
unless expired_pings.empty?
|
|
596
|
+
@server_logger.debug("Found #{expired_pings.size} expired ping requests")
|
|
597
|
+
expired_pings.each do |ping_info|
|
|
598
|
+
session_id = ping_info[:session_id]
|
|
599
|
+
request_id = ping_info[:request_id]
|
|
600
|
+
age = ping_info[:age]
|
|
601
|
+
|
|
602
|
+
@server_logger.warn("Ping timeout for session #{session_id} (request: #{request_id}, age: #{age.round(2)}s)")
|
|
603
|
+
close_stream(session_id, reason: "ping_timeout")
|
|
604
|
+
@server_request_store.unregister_request(request_id)
|
|
605
|
+
end
|
|
606
|
+
end
|
|
607
|
+
|
|
456
608
|
@stream_registry.get_all_local_streams.each do |session_id, stream|
|
|
457
609
|
if stream_connected?(stream)
|
|
458
|
-
send_ping_to_stream(stream)
|
|
610
|
+
send_ping_to_stream(stream, session_id)
|
|
459
611
|
@stream_registry.refresh_heartbeat(session_id)
|
|
460
612
|
else
|
|
613
|
+
@server_logger.debug("Stream disconnected during monitoring: #{session_id}")
|
|
461
614
|
close_stream(session_id, reason: "client_disconnected")
|
|
462
615
|
end
|
|
463
|
-
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF
|
|
616
|
+
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF => e
|
|
617
|
+
@server_logger.debug("Network error during stream monitoring for #{session_id}: #{e.class.name}")
|
|
464
618
|
close_stream(session_id, reason: "network_error")
|
|
465
619
|
end
|
|
466
620
|
end
|
|
467
621
|
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
622
|
+
# Send MCP-compliant ping request to test connectivity and expect response
|
|
623
|
+
# Tracks the ping in server request store for timeout detection
|
|
624
|
+
def send_ping_to_stream(stream, session_id)
|
|
625
|
+
ping_id = "ping-#{SecureRandom.hex(8)}"
|
|
626
|
+
ping_request = {
|
|
627
|
+
jsonrpc: "2.0",
|
|
628
|
+
id: ping_id,
|
|
629
|
+
method: "ping"
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
@server_request_store.register_request(ping_id, session_id, type: :ping)
|
|
633
|
+
send_to_stream(stream, ping_request)
|
|
634
|
+
|
|
635
|
+
@server_logger.debug("Sent MCP ping request (id: #{ping_id}) to stream: #{session_id}")
|
|
471
636
|
end
|
|
472
637
|
|
|
638
|
+
# Send data to an SSE stream with proper event formatting and error handling
|
|
639
|
+
# Automatically closes stream on connection errors to prevent resource leaks
|
|
473
640
|
def send_to_stream(stream, data)
|
|
474
641
|
event_id = next_event_id
|
|
475
642
|
send_sse_event(stream, data, event_id)
|
|
476
643
|
end
|
|
477
644
|
|
|
645
|
+
# Replay missed messages from Redis after client reconnection
|
|
646
|
+
# Enables clients to catch up on messages they missed during disconnection
|
|
478
647
|
def replay_messages_after_event_id(stream, session_id, last_event_id)
|
|
479
648
|
flush_notifications_to_stream(stream)
|
|
480
649
|
end
|
|
481
650
|
|
|
651
|
+
# Deliver data to a specific session's stream or queue for cross-server delivery
|
|
652
|
+
# Handles both local stream delivery and cross-server message queuing
|
|
653
|
+
# @return [Boolean] true if delivered to active stream, false if queued
|
|
482
654
|
def deliver_to_session_stream(session_id, data)
|
|
483
655
|
if @stream_registry.has_local_stream?(session_id)
|
|
484
656
|
stream = @stream_registry.get_local_stream(session_id)
|
|
485
657
|
begin
|
|
658
|
+
# MANDATORY connection validation before every delivery
|
|
659
|
+
@server_logger.debug("Validating stream connection for #{session_id}")
|
|
660
|
+
unless stream_connected?(stream)
|
|
661
|
+
@server_logger.warn("Stream #{session_id} failed connection validation - cleaning up")
|
|
662
|
+
close_stream(session_id, reason: "connection_validation_failed")
|
|
663
|
+
return false
|
|
664
|
+
end
|
|
665
|
+
|
|
666
|
+
@server_logger.debug("Stream #{session_id} passed connection validation")
|
|
486
667
|
send_to_stream(stream, data)
|
|
668
|
+
@server_logger.debug("Successfully delivered message to active stream: #{session_id}")
|
|
487
669
|
return true
|
|
488
|
-
rescue IOError, Errno::EPIPE, Errno::ECONNRESET
|
|
489
|
-
|
|
670
|
+
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF => e
|
|
671
|
+
@server_logger.warn("Failed to deliver to stream #{session_id}, network error: #{e.class.name}")
|
|
672
|
+
close_stream(session_id, reason: "network_error")
|
|
673
|
+
return false
|
|
490
674
|
end
|
|
491
675
|
end
|
|
492
676
|
|
|
677
|
+
@server_logger.debug("No local stream found for session #{session_id}, queuing message")
|
|
493
678
|
@session_store.queue_message_for_session(session_id, data)
|
|
679
|
+
false
|
|
494
680
|
end
|
|
495
681
|
|
|
682
|
+
# Clean up all resources associated with a session
|
|
683
|
+
# Removes from stream registry, session store, request store, and server request store
|
|
496
684
|
def cleanup_session(session_id)
|
|
497
685
|
@stream_registry.unregister_stream(session_id)
|
|
498
686
|
@session_store.cleanup_session(session_id)
|
|
499
687
|
@request_store.cleanup_session_requests(session_id)
|
|
688
|
+
@server_request_store.cleanup_session_requests(session_id)
|
|
500
689
|
end
|
|
501
690
|
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
end
|
|
505
|
-
|
|
691
|
+
# Check if this transport instance has any active local streams
|
|
692
|
+
# Used to determine if notifications should be queued or delivered immediately
|
|
506
693
|
def has_active_streams?
|
|
507
694
|
@stream_registry.has_any_local_streams?
|
|
508
695
|
end
|
|
509
696
|
|
|
697
|
+
# Broadcast notification to all active streams on this transport instance
|
|
698
|
+
# Handles connection errors gracefully and removes disconnected streams
|
|
510
699
|
def deliver_to_active_streams(notification)
|
|
700
|
+
delivered_count = 0
|
|
701
|
+
disconnected_streams = []
|
|
702
|
+
|
|
511
703
|
@stream_registry.get_all_local_streams.each do |session_id, stream|
|
|
704
|
+
# Verify stream is still connected before attempting delivery
|
|
705
|
+
unless stream_connected?(stream)
|
|
706
|
+
disconnected_streams << session_id
|
|
707
|
+
next
|
|
708
|
+
end
|
|
709
|
+
|
|
512
710
|
send_to_stream(stream, notification)
|
|
513
|
-
|
|
711
|
+
delivered_count += 1
|
|
712
|
+
@server_logger.debug("Delivered notification to stream: #{session_id}")
|
|
713
|
+
rescue IOError, Errno::EPIPE, Errno::ECONNRESET, Errno::ENOTCONN, Errno::EBADF => e
|
|
714
|
+
@server_logger.debug("Failed to deliver notification to stream #{session_id}, client disconnected: #{e.class.name}")
|
|
715
|
+
disconnected_streams << session_id
|
|
716
|
+
end
|
|
717
|
+
|
|
718
|
+
# Clean up disconnected streams
|
|
719
|
+
disconnected_streams.each do |session_id|
|
|
514
720
|
close_stream(session_id, reason: "client_disconnected")
|
|
515
721
|
end
|
|
722
|
+
|
|
723
|
+
@server_logger.debug("Delivered notifications to #{delivered_count} streams, cleaned up #{disconnected_streams.size} disconnected streams")
|
|
516
724
|
end
|
|
517
725
|
|
|
726
|
+
# Flush any queued notifications to a newly connected stream
|
|
727
|
+
# Ensures clients receive notifications that were queued while disconnected
|
|
518
728
|
def flush_notifications_to_stream(stream)
|
|
519
729
|
notifications = @notification_queue.pop_all
|
|
520
|
-
notifications.
|
|
521
|
-
|
|
730
|
+
@server_logger.debug("Checking notification queue: #{notifications.size} notifications queued")
|
|
731
|
+
if notifications.empty?
|
|
732
|
+
@server_logger.debug("No queued notifications to flush")
|
|
733
|
+
else
|
|
734
|
+
@server_logger.debug("Flushing #{notifications.size} queued notifications to new stream")
|
|
735
|
+
notifications.each do |notification|
|
|
736
|
+
send_to_stream(stream, notification)
|
|
737
|
+
@server_logger.debug("Flushed queued notification: #{notification[:method]}")
|
|
738
|
+
end
|
|
739
|
+
end
|
|
740
|
+
end
|
|
741
|
+
|
|
742
|
+
# Handle ping responses from clients to mark server-initiated ping requests as completed
|
|
743
|
+
# Returns true if this was a ping response, false otherwise
|
|
744
|
+
def handle_ping_response(message)
|
|
745
|
+
response_id = message["id"]
|
|
746
|
+
return false unless response_id
|
|
747
|
+
|
|
748
|
+
# Check if this response ID corresponds to a pending ping request
|
|
749
|
+
if @server_request_store.pending?(response_id)
|
|
750
|
+
request_info = @server_request_store.get_request(response_id)
|
|
751
|
+
if request_info && request_info["type"] == "ping"
|
|
752
|
+
@server_request_store.mark_completed(response_id)
|
|
753
|
+
@server_logger.debug("Received ping response for request: #{response_id}")
|
|
754
|
+
return true
|
|
755
|
+
end
|
|
522
756
|
end
|
|
757
|
+
|
|
758
|
+
false
|
|
759
|
+
rescue => e
|
|
760
|
+
@server_logger.error("Error processing ping response: #{e.message}")
|
|
761
|
+
false
|
|
523
762
|
end
|
|
524
763
|
|
|
525
|
-
# Handle
|
|
526
|
-
#
|
|
527
|
-
# @param message [Hash] the cancellation notification message
|
|
528
|
-
# @param session_id [String, nil] the session ID if available
|
|
764
|
+
# Handle client cancellation requests to abort in-progress operations
|
|
765
|
+
# Marks requests as cancelled in the request store to stop ongoing work
|
|
529
766
|
def handle_cancellation(message, session_id = nil)
|
|
530
767
|
params = message["params"]
|
|
531
768
|
return unless params
|
|
532
769
|
|
|
533
|
-
|
|
770
|
+
jsonrpc_request_id = params["requestId"]
|
|
534
771
|
reason = params["reason"]
|
|
535
772
|
|
|
536
|
-
return unless
|
|
773
|
+
return unless jsonrpc_request_id
|
|
537
774
|
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
end
|
|
775
|
+
log_to_server_with_context(request_id: jsonrpc_request_id) do |logger|
|
|
776
|
+
logger.info("Processing cancellation (reason: #{reason || "unknown"})")
|
|
777
|
+
end
|
|
542
778
|
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
779
|
+
@request_store.mark_cancelled(jsonrpc_request_id, reason)
|
|
780
|
+
rescue => e
|
|
781
|
+
log_to_server_with_context(request_id: jsonrpc_request_id) do |logger|
|
|
782
|
+
logger.error("Error processing cancellation: #{e.message}")
|
|
783
|
+
end
|
|
784
|
+
nil
|
|
547
785
|
end
|
|
548
786
|
end
|
|
549
787
|
end
|