mcp_on_ruby 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,265 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyMCP
4
+ module Providers
5
+ class Openai < Base
6
+ def list_engines
7
+ response = create_client.get('models')
8
+
9
+ unless response.success?
10
+ raise RubyMCP::Errors::ProviderError,
11
+ "Failed to list OpenAI models: #{response.body['error']&.dig('message') || response.status}"
12
+ end
13
+
14
+ models = response.body['data']
15
+
16
+ models.map do |model_data|
17
+ next unless model_data['id'].start_with?('gpt')
18
+
19
+ capabilities = ['text-generation']
20
+ capabilities << 'streaming' if model_data['id'].start_with?('gpt-3.5', 'gpt-4')
21
+ capabilities << 'tool-calls' if model_data['id'].start_with?('gpt-3.5', 'gpt-4')
22
+
23
+ RubyMCP::Models::Engine.new(
24
+ id: "openai/#{model_data['id']}",
25
+ provider: 'openai',
26
+ model: model_data['id'],
27
+ capabilities: capabilities
28
+ )
29
+ end.compact
30
+ end
31
+
32
+ def generate(context, options = {})
33
+ messages = format_messages(context)
34
+
35
+ payload = {
36
+ model: options[:model],
37
+ messages: messages,
38
+ max_tokens: options[:max_tokens],
39
+ temperature: options[:temperature],
40
+ top_p: options[:top_p],
41
+ frequency_penalty: options[:frequency_penalty],
42
+ presence_penalty: options[:presence_penalty],
43
+ stop: options[:stop]
44
+ }.compact
45
+
46
+ if options[:tools]
47
+ payload[:tools] = options[:tools]
48
+ payload[:tool_choice] = options[:tool_choice] || 'auto'
49
+ end
50
+
51
+ response = create_client.post('chat/completions', payload)
52
+
53
+ unless response.success?
54
+ raise RubyMCP::Errors::ProviderError,
55
+ "OpenAI generation failed: #{response.body['error']&.dig('message') || response.status}"
56
+ end
57
+
58
+ choice = response.body['choices']&.first
59
+ content = choice&.dig('message', 'content')
60
+
61
+ # Handle tool calls
62
+ tool_calls = nil
63
+ if choice&.dig('message', 'tool_calls')
64
+ tool_calls = choice['message']['tool_calls'].map do |tc|
65
+ {
66
+ id: tc['id'],
67
+ type: 'function',
68
+ function: {
69
+ name: tc['function']['name'],
70
+ arguments: tc['function']['arguments']
71
+ }
72
+ }
73
+ end
74
+ end
75
+
76
+ result = {
77
+ provider: 'openai',
78
+ model: options[:model],
79
+ created_at: Time.now.utc.iso8601
80
+ }
81
+
82
+ if tool_calls
83
+ result[:tool_calls] = tool_calls
84
+ else
85
+ result[:content] = content
86
+ end
87
+
88
+ result
89
+ end
90
+
91
+ def generate_stream(context, options = {})
92
+ messages = format_messages(context)
93
+
94
+ payload = {
95
+ model: options[:model],
96
+ messages: messages,
97
+ max_tokens: options[:max_tokens],
98
+ temperature: options[:temperature],
99
+ top_p: options[:top_p],
100
+ frequency_penalty: options[:frequency_penalty],
101
+ presence_penalty: options[:presence_penalty],
102
+ stop: options[:stop],
103
+ stream: true
104
+ }.compact
105
+
106
+ if options[:tools]
107
+ payload[:tools] = options[:tools]
108
+ payload[:tool_choice] = options[:tool_choice] || 'auto'
109
+ end
110
+
111
+ conn = create_client
112
+
113
+ # Update the client to handle streaming
114
+ conn.options.timeout = 120 # Longer timeout for streaming
115
+
116
+ generation_id = SecureRandom.uuid
117
+ content_buffer = ''
118
+ current_tool_calls = []
119
+
120
+ # Initial event
121
+ yield({
122
+ id: generation_id,
123
+ event: 'generation.start',
124
+ created_at: Time.now.utc.iso8601
125
+ })
126
+
127
+ begin
128
+ conn.post('chat/completions') do |req|
129
+ req.body = payload.to_json
130
+ req.options.on_data = proc do |chunk, _size, _total|
131
+ next if chunk.strip.empty?
132
+
133
+ # Process each SSE event
134
+ chunk.split('data: ').each do |data|
135
+ next if data.strip.empty?
136
+
137
+ # Skip "[DONE]" marker
138
+ next if data.strip == '[DONE]'
139
+
140
+ begin
141
+ json = JSON.parse(data.strip)
142
+ delta = json.dig('choices', 0, 'delta')
143
+
144
+ if delta&.key?('content') && delta['content']
145
+ content_buffer += delta['content']
146
+
147
+ # Send content update
148
+ yield({
149
+ id: generation_id,
150
+ event: 'generation.content',
151
+ created_at: Time.now.utc.iso8601,
152
+ content: delta['content']
153
+ })
154
+ end
155
+
156
+ # Handle tool call updates
157
+ if delta&.key?('tool_calls')
158
+ delta['tool_calls'].each do |tc|
159
+ tc_id = tc['index']
160
+
161
+ # Initialize tool call if it's new
162
+ current_tool_calls[tc_id] ||= {
163
+ 'id' => SecureRandom.uuid,
164
+ 'type' => 'function',
165
+ 'function' => {
166
+ 'name' => '',
167
+ 'arguments' => ''
168
+ }
169
+ }
170
+
171
+ # Update function name
172
+ if tc.dig('function', 'name')
173
+ current_tool_calls[tc_id]['function']['name'] += tc['function']['name']
174
+ end
175
+
176
+ # Update arguments
177
+ if tc.dig('function', 'arguments')
178
+ current_tool_calls[tc_id]['function']['arguments'] += tc['function']['arguments']
179
+ end
180
+
181
+ # Send tool call update
182
+ yield({
183
+ id: generation_id,
184
+ event: 'generation.tool_call',
185
+ created_at: Time.now.utc.iso8601,
186
+ tool_calls: current_tool_calls
187
+ })
188
+ end
189
+ end
190
+ rescue JSON::ParserError => e
191
+ # Skip invalid JSON
192
+ RubyMCP.logger.warn "Invalid JSON in OpenAI stream: #{e.message}"
193
+ end
194
+ end
195
+ end
196
+ end
197
+ rescue Faraday::Error => e
198
+ raise RubyMCP::Errors::ProviderError, "OpenAI streaming failed: #{e.message}"
199
+ end
200
+
201
+ # Final event
202
+ if current_tool_calls.any?
203
+ # Final tool calls event
204
+ yield({
205
+ id: generation_id,
206
+ event: 'generation.complete',
207
+ created_at: Time.now.utc.iso8601,
208
+ tool_calls: current_tool_calls
209
+ })
210
+ else
211
+ # Final content event
212
+ yield({
213
+ id: generation_id,
214
+ event: 'generation.complete',
215
+ created_at: Time.now.utc.iso8601,
216
+ content: content_buffer
217
+ })
218
+ end
219
+ end
220
+
221
+ def abort_generation(_generation_id)
222
+ # OpenAI doesn't support aborting generations yet
223
+ raise RubyMCP::Errors::ProviderError, "OpenAI doesn't support aborting generations"
224
+ end
225
+
226
+ protected
227
+
228
+ def default_api_base
229
+ 'https://api.openai.com/v1'
230
+ end
231
+
232
+ private
233
+
234
+ def format_messages(context)
235
+ context.messages.map do |msg|
236
+ # Convert to OpenAI's message format
237
+ message = { 'role' => msg.role, 'content' => msg.content }
238
+
239
+ # Handle structured content
240
+ if msg.content_type == 'array'
241
+ content_parts = []
242
+
243
+ msg.content.each do |part|
244
+ if part.is_a?(String)
245
+ content_parts << { 'type' => 'text', 'text' => part }
246
+ elsif part.is_a?(Hash)
247
+ if part[:type] == 'text'
248
+ content_parts << { 'type' => 'text', 'text' => part[:text] }
249
+ elsif part[:type] == 'content_pointer'
250
+ # We don't have file IDs for OpenAI here
251
+ # In a real implementation, we would upload the file to OpenAI
252
+ content_parts << { 'type' => 'text', 'text' => "[Content reference: #{part[:content_id]}]" }
253
+ end
254
+ end
255
+ end
256
+
257
+ message['content'] = content_parts
258
+ end
259
+
260
+ message
261
+ end
262
+ end
263
+ end
264
+ end
265
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'dry-schema'
4
+
5
+ module RubyMCP
6
+ module Schemas
7
+ # Define schemas using dry-schema
8
+
9
+ ContextSchema = Dry::Schema.JSON do
10
+ optional(:id).maybe(:string).filled(format?: /^ctx_[a-zA-Z0-9]+$/)
11
+
12
+ optional(:messages).array(:hash) do
13
+ required(:role).filled(:string, included_in?: %w[user assistant system tool])
14
+ required(:content).filled
15
+ optional(:id).maybe(:string)
16
+ optional(:metadata).maybe(:hash)
17
+ end
18
+
19
+ optional(:metadata).maybe(:hash)
20
+ end
21
+
22
+ MessageSchema = Dry::Schema.JSON do
23
+ required(:context_id).filled(:string, format?: /^ctx_[a-zA-Z0-9]+$/)
24
+ required(:role).filled(:string, included_in?: %w[user assistant system tool])
25
+ required(:content).filled
26
+ optional(:id).maybe(:string)
27
+ optional(:metadata).maybe(:hash)
28
+ end
29
+
30
+ GenerateSchema = Dry::Schema.JSON do
31
+ required(:context_id).filled(:string, format?: /^ctx_[a-zA-Z0-9]+$/)
32
+ required(:engine_id).filled(:string, format?: %r{^[a-z0-9-]+/[a-z0-9-]+$})
33
+
34
+ optional(:max_tokens).maybe(:integer, gt?: 0)
35
+ optional(:temperature).maybe(:float, gteq?: 0, lteq?: 2)
36
+ optional(:top_p).maybe(:float, gteq?: 0, lteq?: 1)
37
+ optional(:frequency_penalty).maybe(:float, gteq?: -2, lteq?: 2)
38
+ optional(:presence_penalty).maybe(:float, gteq?: -2, lteq?: 2)
39
+ optional(:stop).maybe(:string)
40
+ optional(:update_context).maybe(:bool)
41
+
42
+ # Tool calling support could be added here
43
+ end
44
+
45
+ ContentSchema = Dry::Schema.JSON do
46
+ required(:context_id).filled(:string, format?: /^ctx_[a-zA-Z0-9]+$/)
47
+ optional(:id).maybe(:string)
48
+ optional(:type).maybe(:string)
49
+
50
+ optional(:file_data).maybe(:string)
51
+ optional(:filename).maybe(:string)
52
+ optional(:content_type).maybe(:string)
53
+ optional(:data).maybe(:hash)
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,84 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rack'
4
+ require 'rack/cors'
5
+ require 'json'
6
+
7
+ module RubyMCP
8
+ module Server
9
+ class App
10
+ attr_reader :config
11
+
12
+ def initialize(config = RubyMCP.configuration)
13
+ @config = config
14
+ @router = Router.new
15
+ setup_routes
16
+ end
17
+
18
+ def call(env)
19
+ request = Rack::Request.new(env)
20
+
21
+ # Handle CORS preflight requests
22
+ return [200, {}, []] if request.request_method == 'OPTIONS'
23
+
24
+ # Authenticate if required
25
+ if @config.auth_required && !authenticate(request)
26
+ return [401, { 'Content-Type' => 'application/json' }, [{ error: 'Unauthorized' }.to_json]]
27
+ end
28
+
29
+ # Route the request
30
+ response = @router.route(request)
31
+
32
+ # Default to 404 if no route matched
33
+ response || [404, { 'Content-Type' => 'application/json' }, [{ error: 'Not found' }.to_json]]
34
+ end
35
+
36
+ def rack_app
37
+ app = self
38
+
39
+ Rack::Builder.new do
40
+ use Rack::Cors do
41
+ allow do
42
+ origins '*'
43
+ resource '*',
44
+ headers: :any,
45
+ methods: %i[get post put delete options]
46
+ end
47
+ end
48
+
49
+ run app
50
+ end
51
+ end
52
+
53
+ private
54
+
55
+ def setup_routes
56
+ @router.add('GET', '/engines', EnginesController, :index)
57
+ @router.add('POST', '/contexts', ContextsController, :create)
58
+ @router.add('GET', '/contexts', ContextsController, :index)
59
+ @router.add('GET', '/contexts/:id', ContextsController, :show)
60
+ @router.add('DELETE', '/contexts/:id', ContextsController, :destroy)
61
+ @router.add('POST', '/messages', MessagesController, :create)
62
+ @router.add('POST', '/generate', GenerateController, :create)
63
+ @router.add('POST', '/generate/stream', GenerateController, :stream)
64
+ @router.add('POST', '/content', ContentController, :create)
65
+ @router.add('GET', '/content/:context_id/:id', ContentController, :show)
66
+ end
67
+
68
+ def authenticate(request)
69
+ auth_header = request.env['HTTP_AUTHORIZATION']
70
+ return false unless auth_header
71
+
72
+ token = auth_header.split(' ').last
73
+ return false unless token
74
+
75
+ begin
76
+ JWT.decode(token, @config.jwt_secret, true, { algorithm: 'HS256' })
77
+ true
78
+ rescue JWT::DecodeError
79
+ false
80
+ end
81
+ end
82
+ end
83
+ end
84
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyMCP
4
+ module Server
5
+ class BaseController
6
+ attr_reader :request, :params
7
+
8
+ def initialize(request, params = {})
9
+ @request = request
10
+ @params = params
11
+ end
12
+
13
+ protected
14
+
15
+ def json_response(status, data)
16
+ body = data.to_json
17
+ headers = {
18
+ 'Content-Type' => 'application/json',
19
+ 'Content-Length' => body.bytesize.to_s
20
+ }
21
+ [status, headers, [body]]
22
+ end
23
+
24
+ def ok(data = {})
25
+ json_response(200, data)
26
+ end
27
+
28
+ def created(data = {})
29
+ json_response(201, data)
30
+ end
31
+
32
+ def bad_request(error = 'Bad request')
33
+ json_response(400, { error: error })
34
+ end
35
+
36
+ def not_found(error = 'Not found')
37
+ json_response(404, { error: error })
38
+ end
39
+
40
+ def server_error(error = 'Internal server error')
41
+ json_response(500, { error: error })
42
+ end
43
+
44
+ def storage
45
+ RubyMCP.configuration.storage_instance
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'base64'
4
+
5
+ module RubyMCP
6
+ module Server
7
+ class ContentController < BaseController
8
+ def create
9
+ context_id = params[:context_id]
10
+ content_id = params[:id] || "cnt_#{SecureRandom.hex(10)}"
11
+ content_type = params[:type] || 'file'
12
+
13
+ begin
14
+ # Get context to ensure it exists
15
+ storage.get_context(context_id)
16
+
17
+ # Handle file data (base64 encoded)
18
+ data = if params[:file_data]
19
+ {
20
+ filename: params[:filename],
21
+ content_type: params[:content_type] || 'application/octet-stream',
22
+ data: Base64.strict_decode64(params[:file_data])
23
+ }
24
+ else
25
+ params[:data] || {}
26
+ end
27
+
28
+ # Store the content
29
+ storage.add_content(context_id, content_id, data)
30
+
31
+ created({
32
+ id: content_id,
33
+ context_id: context_id,
34
+ type: content_type
35
+ })
36
+ rescue RubyMCP::Errors::ContextError => e
37
+ not_found(e.message)
38
+ rescue ArgumentError => e
39
+ # Handle base64 decoding errors
40
+ bad_request("Invalid file_data: #{e.message}")
41
+ end
42
+ end
43
+
44
+ def show
45
+ context_id = params[:context_id]
46
+ content_id = params[:id]
47
+
48
+ begin
49
+ content = storage.get_content(context_id, content_id)
50
+
51
+ if content[:filename] && content[:data]
52
+ # Send file response
53
+ headers = {
54
+ 'Content-Type' => content[:content_type],
55
+ 'Content-Disposition' => "attachment; filename=\"#{content[:filename]}\""
56
+ }
57
+ [200, headers, [content[:data]]]
58
+ else
59
+ # Send JSON response
60
+ ok(content)
61
+ end
62
+ rescue RubyMCP::Errors::ContextError, RubyMCP::Errors::ContentError => e
63
+ not_found(e.message)
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyMCP
4
+ module Server
5
+ class ContextsController < BaseController
6
+ def index
7
+ limit = (params[:limit] || 50).to_i
8
+ offset = (params[:offset] || 0).to_i
9
+
10
+ contexts = storage.list_contexts(limit: limit, offset: offset)
11
+ ok({ contexts: contexts.map(&:to_h) })
12
+ end
13
+
14
+ def show
15
+ context = storage.get_context(params[:id])
16
+ ok(context.to_h)
17
+ rescue RubyMCP::Errors::ContextError => e
18
+ not_found(e.message)
19
+ end
20
+
21
+ def create
22
+ # Validate the request
23
+ RubyMCP::Validator.validate_context(params)
24
+
25
+ # Create a new context
26
+ messages = []
27
+
28
+ # If messages were provided, create message objects
29
+ if params[:messages].is_a?(Array)
30
+ params[:messages].each do |msg|
31
+ messages << RubyMCP::Models::Message.new(
32
+ role: msg[:role],
33
+ content: msg[:content],
34
+ id: msg[:id],
35
+ metadata: msg[:metadata]
36
+ )
37
+ end
38
+ end
39
+
40
+ # Create the context
41
+ context = RubyMCP::Models::Context.new(
42
+ id: params[:id],
43
+ messages: messages,
44
+ metadata: params[:metadata]
45
+ )
46
+
47
+ # Store the context
48
+ storage.create_context(context)
49
+
50
+ created(context.to_h)
51
+ rescue RubyMCP::Errors::ValidationError => e
52
+ bad_request(e.message)
53
+ end
54
+
55
+ def destroy
56
+ context = storage.delete_context(params[:id])
57
+ ok(context.to_h)
58
+ rescue RubyMCP::Errors::ContextError => e
59
+ not_found(e.message)
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rack'
4
+ require 'rack/handler/webrick'
5
+
6
+ module RubyMCP
7
+ module Server
8
+ class Controller
9
+ def initialize(config = RubyMCP.configuration)
10
+ @config = config
11
+ @app = App.new(config)
12
+ end
13
+
14
+ def start
15
+ options = {
16
+ Host: @config.server_host,
17
+ Port: @config.server_port
18
+ }
19
+
20
+ RubyMCP.logger.info "Starting RubyMCP server on #{@config.server_host}:#{@config.server_port}"
21
+ Rack::Handler::WEBrick.run @app.rack_app, **options
22
+ end
23
+
24
+ def stop
25
+ # Nothing to do here yet, but will be useful if we add more complex server
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyMCP
4
+ module Server
5
+ class EnginesController < BaseController
6
+ def index
7
+ engines = []
8
+
9
+ RubyMCP.configuration.providers.each do |provider_name, provider_config|
10
+ provider_class = get_provider_class(provider_name)
11
+ next unless provider_class
12
+
13
+ provider = provider_class.new(provider_config)
14
+ engines.concat(provider.list_engines)
15
+ end
16
+
17
+ ok({ engines: engines.map(&:to_h) })
18
+ end
19
+
20
+ private
21
+
22
+ def get_provider_class(provider_name)
23
+ class_name = provider_name.to_s.capitalize
24
+
25
+ if RubyMCP::Providers.const_defined?(class_name)
26
+ RubyMCP::Providers.const_get(class_name)
27
+ else
28
+ RubyMCP.logger.warn "Provider not found: #{provider_name}"
29
+ nil
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end