ollama-client 0.2.6 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +9 -0
- data/README.md +208 -25
- data/RELEASE_NOTES_v0.2.6.md +41 -0
- data/docs/AREAS_FOR_CONSIDERATION.md +325 -0
- data/docs/FEATURES_ADDED.md +12 -1
- data/examples/README.md +14 -0
- data/examples/basic_chat.rb +0 -0
- data/examples/basic_generate.rb +0 -0
- data/examples/mcp_executor.rb +39 -0
- data/examples/mcp_http_executor.rb +45 -0
- data/examples/tool_calling_parsing.rb +0 -0
- data/examples/tool_dto_example.rb +0 -0
- data/lib/ollama/config.rb +5 -3
- data/lib/ollama/embeddings.rb +40 -22
- data/lib/ollama/mcp/http_client.rb +149 -0
- data/lib/ollama/mcp/stdio_client.rb +146 -0
- data/lib/ollama/mcp/tools_bridge.rb +72 -0
- data/lib/ollama/mcp.rb +31 -0
- data/lib/ollama/options.rb +3 -1
- data/lib/ollama/version.rb +1 -1
- data/lib/ollama_client.rb +10 -5
- metadata +10 -2
data/docs/FEATURES_ADDED.md
CHANGED
|
@@ -43,7 +43,18 @@ client = Ollama::Client.new(config: config)
|
|
|
43
43
|
**Usage**:
|
|
44
44
|
```ruby
|
|
45
45
|
options = Ollama::Options.new(temperature: 0.7, top_p: 0.95)
|
|
46
|
-
|
|
46
|
+
# Use with chat() - chat() accepts options parameter
|
|
47
|
+
client.chat(
|
|
48
|
+
messages: [{ role: "user", content: "..." }],
|
|
49
|
+
format: {...},
|
|
50
|
+
options: options.to_h,
|
|
51
|
+
allow_chat: true
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
# Note: generate() doesn't accept options - use config instead
|
|
55
|
+
# config = Ollama::Config.new
|
|
56
|
+
# config.temperature = 0.7
|
|
57
|
+
# client = Ollama::Client.new(config: config)
|
|
47
58
|
```
|
|
48
59
|
|
|
49
60
|
**Files Added**:
|
data/examples/README.md
CHANGED
|
@@ -25,6 +25,14 @@ This directory contains **minimal examples** demonstrating `ollama-client` usage
|
|
|
25
25
|
- Demonstrates Tool class serialization/deserialization
|
|
26
26
|
- Shows DTO functionality
|
|
27
27
|
|
|
28
|
+
- **[mcp_executor.rb](mcp_executor.rb)** - MCP tools with Executor (local stdio)
|
|
29
|
+
- Connects to a local MCP server (stdio)
|
|
30
|
+
- Requires Node.js/npx for the filesystem server example
|
|
31
|
+
|
|
32
|
+
- **[mcp_http_executor.rb](mcp_http_executor.rb)** - MCP tools with Executor (remote URL)
|
|
33
|
+
- Connects to a remote MCP server via HTTP (e.g. [gitmcp.io](https://gitmcp.io)/owner/repo)
|
|
34
|
+
- Use the same URL you would add to Cursor’s `mcp.json`
|
|
35
|
+
|
|
28
36
|
## Running Examples
|
|
29
37
|
|
|
30
38
|
All examples are standalone and can be run directly:
|
|
@@ -41,6 +49,12 @@ ruby examples/tool_calling_parsing.rb
|
|
|
41
49
|
|
|
42
50
|
# Tool DTO
|
|
43
51
|
ruby examples/tool_dto_example.rb
|
|
52
|
+
|
|
53
|
+
# MCP Executor (local stdio; requires Node.js/npx)
|
|
54
|
+
ruby examples/mcp_executor.rb
|
|
55
|
+
|
|
56
|
+
# MCP Executor (remote URL, e.g. GitMCP)
|
|
57
|
+
ruby examples/mcp_http_executor.rb
|
|
44
58
|
```
|
|
45
59
|
|
|
46
60
|
### Requirements
|
data/examples/basic_chat.rb
CHANGED
|
File without changes
|
data/examples/basic_generate.rb
CHANGED
|
File without changes
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: Use a local MCP server's tools with Ollama::Agent::Executor.
|
|
4
|
+
#
|
|
5
|
+
# Prerequisites:
|
|
6
|
+
# - Ollama running (localhost:11434)
|
|
7
|
+
# - Node.js/npx (for @modelcontextprotocol/server-filesystem)
|
|
8
|
+
#
|
|
9
|
+
# Run:
|
|
10
|
+
# ruby examples/mcp_executor.rb
|
|
11
|
+
#
|
|
12
|
+
# This connects to the MCP filesystem server, fetches its tools, and runs
|
|
13
|
+
# the Executor so the LLM can call those tools (e.g. list directory, read file).
|
|
14
|
+
|
|
15
|
+
require_relative "../lib/ollama_client"
|
|
16
|
+
|
|
17
|
+
ollama = Ollama::Client.new
|
|
18
|
+
|
|
19
|
+
# Local MCP server via stdio; allow /tmp and the project directory
|
|
20
|
+
project_root = File.expand_path("..", __dir__)
|
|
21
|
+
mcp_client = Ollama::MCP::StdioClient.new(
|
|
22
|
+
command: "npx",
|
|
23
|
+
args: ["-y", "@modelcontextprotocol/server-filesystem", "/tmp", project_root],
|
|
24
|
+
timeout_seconds: 60
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
bridge = Ollama::MCP::ToolsBridge.new(stdio_client: mcp_client)
|
|
28
|
+
tools = bridge.tools_for_executor
|
|
29
|
+
|
|
30
|
+
executor = Ollama::Agent::Executor.new(ollama, tools: tools)
|
|
31
|
+
|
|
32
|
+
answer = executor.run(
|
|
33
|
+
system: "You have access to filesystem tools. Use them when the user asks about files or directories.",
|
|
34
|
+
user: "What files are in ~/project/ollama-client? List a few."
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
puts answer
|
|
38
|
+
|
|
39
|
+
mcp_client.close
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Example: Use a remote MCP server (HTTP URL) with Ollama::Agent::Executor.
|
|
4
|
+
#
|
|
5
|
+
# Works with GitMCP and any MCP-over-HTTP endpoint:
|
|
6
|
+
# https://gitmcp.io/owner/repo → MCP server for that GitHub repo
|
|
7
|
+
#
|
|
8
|
+
# Prerequisites:
|
|
9
|
+
# - Ollama running (localhost:11434)
|
|
10
|
+
# - Network access to the MCP URL
|
|
11
|
+
#
|
|
12
|
+
# Run:
|
|
13
|
+
# ruby examples/mcp_http_executor.rb
|
|
14
|
+
#
|
|
15
|
+
# To add this MCP to Cursor, use ~/.cursor/mcp.json:
|
|
16
|
+
# {
|
|
17
|
+
# "mcpServers": {
|
|
18
|
+
# "agent-runtime Docs": {
|
|
19
|
+
# "url": "https://gitmcp.io/shubhamtaywade82/agent-runtime"
|
|
20
|
+
# }
|
|
21
|
+
# }
|
|
22
|
+
# }
|
|
23
|
+
|
|
24
|
+
require "ollama_client"
|
|
25
|
+
|
|
26
|
+
client = Ollama::Client.new
|
|
27
|
+
|
|
28
|
+
mcp_client = Ollama::MCP::HttpClient.new(
|
|
29
|
+
url: "https://gitmcp.io/shubhamtaywade82/agent-runtime",
|
|
30
|
+
timeout_seconds: 60
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
bridge = Ollama::MCP::ToolsBridge.new(client: mcp_client)
|
|
34
|
+
tools = bridge.tools_for_executor
|
|
35
|
+
|
|
36
|
+
executor = Ollama::Agent::Executor.new(client, tools: tools)
|
|
37
|
+
|
|
38
|
+
answer = executor.run(
|
|
39
|
+
system: "You have access to the agent-runtime repository docs. Use tools when the user asks about the repo.",
|
|
40
|
+
user: "What does this repository do? Summarize briefly."
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
puts answer
|
|
44
|
+
|
|
45
|
+
mcp_client.close
|
|
File without changes
|
|
File without changes
|
data/lib/ollama/config.rb
CHANGED
|
@@ -6,14 +6,16 @@ module Ollama
|
|
|
6
6
|
# Configuration class with safe defaults for agent-grade usage
|
|
7
7
|
#
|
|
8
8
|
# ⚠️ THREAD SAFETY WARNING:
|
|
9
|
-
# Global configuration
|
|
10
|
-
#
|
|
11
|
-
# configuration
|
|
9
|
+
# Global configuration access is mutex-protected, but modifying global config
|
|
10
|
+
# while clients are active can cause race conditions. For concurrent agents
|
|
11
|
+
# or multi-threaded applications, use per-client configuration (recommended):
|
|
12
12
|
#
|
|
13
13
|
# config = Ollama::Config.new
|
|
14
14
|
# config.model = "llama3.1"
|
|
15
15
|
# client = Ollama::Client.new(config: config)
|
|
16
16
|
#
|
|
17
|
+
# Each client instance with its own config is thread-safe.
|
|
18
|
+
#
|
|
17
19
|
class Config
|
|
18
20
|
attr_accessor :base_url, :model, :timeout, :retries, :temperature, :top_p, :num_ctx, :on_response, :allow_chat,
|
|
19
21
|
:streaming_enabled
|
data/lib/ollama/embeddings.rb
CHANGED
|
@@ -21,7 +21,8 @@ module Ollama
|
|
|
21
21
|
# @param input [String, Array<String>] Single text or array of texts
|
|
22
22
|
# @return [Array<Float>, Array<Array<Float>>] Embedding vector(s)
|
|
23
23
|
def embed(model:, input:)
|
|
24
|
-
|
|
24
|
+
# Use /api/embed (not /api/embeddings) - the working endpoint
|
|
25
|
+
uri = URI("#{@config.base_url}/api/embed")
|
|
25
26
|
req = Net::HTTP::Post.new(uri)
|
|
26
27
|
req["Content-Type"] = "application/json"
|
|
27
28
|
|
|
@@ -42,11 +43,12 @@ module Ollama
|
|
|
42
43
|
handle_http_error(res, requested_model: model) unless res.is_a?(Net::HTTPSuccess)
|
|
43
44
|
|
|
44
45
|
response_body = JSON.parse(res.body)
|
|
45
|
-
|
|
46
|
+
# /api/embed returns "embeddings" (plural) as array of arrays
|
|
47
|
+
embeddings = response_body["embeddings"] || response_body["embedding"]
|
|
46
48
|
|
|
47
|
-
validate_embedding_response!(
|
|
49
|
+
validate_embedding_response!(embeddings, response_body, model)
|
|
48
50
|
|
|
49
|
-
format_embedding_result(
|
|
51
|
+
format_embedding_result(embeddings, input)
|
|
50
52
|
rescue JSON::ParserError => e
|
|
51
53
|
raise InvalidJSONError, "Failed to parse embeddings response: #{e.message}"
|
|
52
54
|
rescue Net::ReadTimeout, Net::OpenTimeout
|
|
@@ -57,42 +59,58 @@ module Ollama
|
|
|
57
59
|
|
|
58
60
|
private
|
|
59
61
|
|
|
60
|
-
def validate_embedding_response!(
|
|
61
|
-
if
|
|
62
|
+
def validate_embedding_response!(embeddings, response_body, model)
|
|
63
|
+
if embeddings.nil?
|
|
62
64
|
keys = response_body.keys.join(", ")
|
|
63
65
|
response_preview = response_body.inspect[0..200]
|
|
64
|
-
raise Error, "
|
|
66
|
+
raise Error, "Embeddings not found in response. Response keys: #{keys}. " \
|
|
65
67
|
"Full response: #{response_preview}"
|
|
66
68
|
end
|
|
67
69
|
|
|
68
|
-
|
|
70
|
+
# Handle both formats: array of arrays [[...]] or single array [...]
|
|
71
|
+
# Check if it's empty or contains empty arrays
|
|
72
|
+
if embeddings.is_a?(Array) && (embeddings.empty? || (embeddings.first.is_a?(Array) && embeddings.first.empty?))
|
|
73
|
+
error_msg = build_empty_embedding_error_message(model, response_body)
|
|
74
|
+
raise Error, error_msg
|
|
75
|
+
end
|
|
69
76
|
|
|
70
|
-
|
|
71
|
-
raise Error, error_msg
|
|
77
|
+
nil
|
|
72
78
|
end
|
|
73
79
|
|
|
74
80
|
def build_empty_embedding_error_message(model, response_body)
|
|
75
|
-
curl_command = "curl http://localhost:11434/api/
|
|
81
|
+
curl_command = "curl -X POST http://localhost:11434/api/embed " \
|
|
76
82
|
"-d '{\"model\":\"#{model}\",\"input\":\"test\"}'"
|
|
77
83
|
response_preview = response_body.inspect[0..300]
|
|
78
84
|
|
|
85
|
+
# Check for error messages in response
|
|
86
|
+
error_hint = ""
|
|
87
|
+
if response_body.is_a?(Hash)
|
|
88
|
+
if response_body.key?("error")
|
|
89
|
+
error_hint = "\n Error from Ollama: #{response_body["error"]}"
|
|
90
|
+
elsif response_body.key?("message")
|
|
91
|
+
error_hint = "\n Message from Ollama: #{response_body["message"]}"
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
79
95
|
"Empty embedding returned. This usually means:\n " \
|
|
80
96
|
"1. The model may not be properly loaded - try: ollama pull #{model}\n " \
|
|
81
|
-
"2. The model may
|
|
82
|
-
"3.
|
|
97
|
+
"2. The model file may be corrupted - try: ollama rm #{model} && ollama pull #{model}\n " \
|
|
98
|
+
"3. The model may not support embeddings - verify it's an embedding model\n " \
|
|
99
|
+
"4. Check if the model is working: #{curl_command}#{error_hint}\n" \
|
|
83
100
|
"Response: #{response_preview}"
|
|
84
101
|
end
|
|
85
102
|
|
|
86
|
-
def format_embedding_result(
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
embedding
|
|
103
|
+
def format_embedding_result(embeddings, input)
|
|
104
|
+
# /api/embed returns "embeddings" as array of arrays [[...]]
|
|
105
|
+
# For single input, it's [[...]], for multiple inputs it's [[...], [...], ...]
|
|
106
|
+
if embeddings.is_a?(Array) && embeddings.first.is_a?(Array)
|
|
107
|
+
# Already in correct format (array of arrays)
|
|
108
|
+
# For single input, return first embedding array
|
|
109
|
+
# For multiple inputs, return all embedding arrays
|
|
110
|
+
input.is_a?(Array) ? embeddings : embeddings.first
|
|
93
111
|
else
|
|
94
|
-
#
|
|
95
|
-
[
|
|
112
|
+
# Fallback: single array format (shouldn't happen with /api/embed)
|
|
113
|
+
input.is_a?(Array) ? [embeddings] : embeddings
|
|
96
114
|
end
|
|
97
115
|
end
|
|
98
116
|
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "net/http"
|
|
5
|
+
require "uri"
|
|
6
|
+
|
|
7
|
+
module Ollama
|
|
8
|
+
module MCP
|
|
9
|
+
# Connects to a remote MCP server via HTTP(S).
|
|
10
|
+
# Sends JSON-RPC via POST; supports session ID from initialize response.
|
|
11
|
+
# Use for URLs like https://gitmcp.io/owner/repo.
|
|
12
|
+
class HttpClient
|
|
13
|
+
PROTOCOL_VERSION = "2025-11-25"
|
|
14
|
+
|
|
15
|
+
def initialize(url:, timeout_seconds: 30, headers: {})
|
|
16
|
+
@uri = URI(url)
|
|
17
|
+
@uri.path = "/" if @uri.path.nil? || @uri.path.empty?
|
|
18
|
+
@timeout = timeout_seconds
|
|
19
|
+
@extra_headers = headers.transform_keys(&:to_s)
|
|
20
|
+
@request_id = 0
|
|
21
|
+
@session_id = nil
|
|
22
|
+
@initialized = false
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def start
|
|
26
|
+
return if @initialized
|
|
27
|
+
|
|
28
|
+
run_initialize
|
|
29
|
+
@initialized = true
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def tools
|
|
33
|
+
start
|
|
34
|
+
response = request("tools/list", {})
|
|
35
|
+
list = response.dig("result", "tools")
|
|
36
|
+
return [] unless list.is_a?(Array)
|
|
37
|
+
|
|
38
|
+
list.map do |t|
|
|
39
|
+
{
|
|
40
|
+
name: (t["name"] || t[:name]).to_s,
|
|
41
|
+
description: (t["description"] || t[:description]).to_s,
|
|
42
|
+
input_schema: t["inputSchema"] || t[:input_schema] || { "type" => "object" }
|
|
43
|
+
}
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def call_tool(name:, arguments: {})
|
|
48
|
+
start
|
|
49
|
+
response = request("tools/call", "name" => name.to_s, "arguments" => stringify_keys(arguments))
|
|
50
|
+
result = response["result"]
|
|
51
|
+
raise Ollama::Error, "tools/call failed: #{response["error"]}" if response["error"]
|
|
52
|
+
raise Ollama::Error, "tools/call returned no result" unless result
|
|
53
|
+
|
|
54
|
+
content_to_string(result["content"])
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def close
|
|
58
|
+
@session_id = nil
|
|
59
|
+
@initialized = false
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
private
|
|
63
|
+
|
|
64
|
+
def run_initialize
|
|
65
|
+
init_params = {
|
|
66
|
+
"protocolVersion" => PROTOCOL_VERSION,
|
|
67
|
+
"capabilities" => {},
|
|
68
|
+
"clientInfo" => {
|
|
69
|
+
"name" => "ollama-client",
|
|
70
|
+
"version" => Ollama::VERSION
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
response = request("initialize", init_params)
|
|
74
|
+
raise Ollama::Error, "initialize failed: #{response["error"]}" if response["error"]
|
|
75
|
+
|
|
76
|
+
send_notification("notifications/initialized", {})
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def next_id
|
|
80
|
+
@request_id += 1
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def request(method, params)
|
|
84
|
+
id = next_id
|
|
85
|
+
msg = { "jsonrpc" => "2.0", "id" => id, "method" => method, "params" => params }
|
|
86
|
+
post_request(msg, method: method)
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def send_notification(method, params)
|
|
90
|
+
body = { "jsonrpc" => "2.0", "method" => method, "params" => params }
|
|
91
|
+
post_request(body, method: method)
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def post_request(body, method: nil)
|
|
95
|
+
req = Net::HTTP::Post.new(@uri)
|
|
96
|
+
req["Content-Type"] = "application/json"
|
|
97
|
+
req["Accept"] = "application/json, text/event-stream"
|
|
98
|
+
req["MCP-Protocol-Version"] = PROTOCOL_VERSION
|
|
99
|
+
req["MCP-Session-Id"] = @session_id if @session_id
|
|
100
|
+
@extra_headers.each { |k, v| req[k] = v }
|
|
101
|
+
req.body = body.is_a?(Hash) ? JSON.generate(body) : body.to_s
|
|
102
|
+
|
|
103
|
+
res = http_request(req)
|
|
104
|
+
|
|
105
|
+
if method == "initialize" && res["MCP-Session-Id"]
|
|
106
|
+
@session_id = res["MCP-Session-Id"].to_s.strip
|
|
107
|
+
@session_id = nil if @session_id.empty?
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
return {} if res.code == "202"
|
|
111
|
+
|
|
112
|
+
raise Ollama::Error, "MCP HTTP error: #{res.code} #{res.message}" unless res.is_a?(Net::HTTPSuccess)
|
|
113
|
+
|
|
114
|
+
JSON.parse(res.body)
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def http_request(req)
|
|
118
|
+
Net::HTTP.start(
|
|
119
|
+
@uri.hostname,
|
|
120
|
+
@uri.port,
|
|
121
|
+
use_ssl: @uri.scheme == "https",
|
|
122
|
+
read_timeout: @timeout,
|
|
123
|
+
open_timeout: @timeout
|
|
124
|
+
) { |http| http.request(req) }
|
|
125
|
+
rescue Net::ReadTimeout, Net::OpenTimeout
|
|
126
|
+
raise Ollama::TimeoutError, "MCP server did not respond within #{@timeout}s"
|
|
127
|
+
rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH, SocketError => e
|
|
128
|
+
raise Ollama::Error, "MCP connection failed: #{e.message}"
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def stringify_keys(hash)
|
|
132
|
+
return {} if hash.nil?
|
|
133
|
+
|
|
134
|
+
hash.transform_keys(&:to_s)
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def content_to_string(content)
|
|
138
|
+
return "" unless content.is_a?(Array)
|
|
139
|
+
|
|
140
|
+
content.filter_map do |item|
|
|
141
|
+
next unless item.is_a?(Hash)
|
|
142
|
+
|
|
143
|
+
text = item["text"] || item[:text]
|
|
144
|
+
text&.to_s
|
|
145
|
+
end.join("\n")
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
end
|
|
149
|
+
end
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "open3"
|
|
5
|
+
|
|
6
|
+
module Ollama
|
|
7
|
+
module MCP
|
|
8
|
+
# Connects to a local MCP server via stdio (spawns subprocess).
|
|
9
|
+
# Handles JSON-RPC lifecycle: initialize, tools/list, tools/call.
|
|
10
|
+
class StdioClient
|
|
11
|
+
PROTOCOL_VERSION = "2025-11-25"
|
|
12
|
+
|
|
13
|
+
def initialize(command:, args: [], env: {}, timeout_seconds: 30)
|
|
14
|
+
@command = command
|
|
15
|
+
@args = Array(args)
|
|
16
|
+
@env = env
|
|
17
|
+
@timeout = timeout_seconds
|
|
18
|
+
@request_id = 0
|
|
19
|
+
@reader = nil
|
|
20
|
+
@writer = nil
|
|
21
|
+
@initialized = false
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def start
|
|
25
|
+
return if @initialized
|
|
26
|
+
|
|
27
|
+
env_merged = ENV.to_h.merge(@env.transform_keys(&:to_s))
|
|
28
|
+
stdin, stdout = Open3.popen2(env_merged, @command, *@args)
|
|
29
|
+
@writer = stdin
|
|
30
|
+
@reader = stdout
|
|
31
|
+
run_initialize
|
|
32
|
+
@initialized = true
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def tools
|
|
36
|
+
start
|
|
37
|
+
response = request("tools/list", {})
|
|
38
|
+
list = response.dig("result", "tools")
|
|
39
|
+
return [] unless list.is_a?(Array)
|
|
40
|
+
|
|
41
|
+
list.map do |t|
|
|
42
|
+
{
|
|
43
|
+
name: (t["name"] || t[:name]).to_s,
|
|
44
|
+
description: (t["description"] || t[:description]).to_s,
|
|
45
|
+
input_schema: t["inputSchema"] || t[:input_schema] || { "type" => "object" }
|
|
46
|
+
}
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def call_tool(name:, arguments: {})
|
|
51
|
+
start
|
|
52
|
+
response = request("tools/call", "name" => name.to_s, "arguments" => stringify_keys(arguments))
|
|
53
|
+
result = response["result"]
|
|
54
|
+
raise Ollama::Error, "tools/call failed: #{response["error"]}" if response["error"]
|
|
55
|
+
raise Ollama::Error, "tools/call returned no result" unless result
|
|
56
|
+
|
|
57
|
+
content_to_string(result["content"])
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def close
|
|
61
|
+
return unless @writer
|
|
62
|
+
|
|
63
|
+
@writer.close
|
|
64
|
+
@writer = nil
|
|
65
|
+
@reader = nil
|
|
66
|
+
@initialized = false
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
private
|
|
70
|
+
|
|
71
|
+
def run_initialize
|
|
72
|
+
init_params = {
|
|
73
|
+
"protocolVersion" => PROTOCOL_VERSION,
|
|
74
|
+
"capabilities" => {},
|
|
75
|
+
"clientInfo" => {
|
|
76
|
+
"name" => "ollama-client",
|
|
77
|
+
"version" => Ollama::VERSION
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
response = request("initialize", init_params)
|
|
81
|
+
raise Ollama::Error, "initialize failed: #{response["error"]}" if response["error"]
|
|
82
|
+
|
|
83
|
+
send_notification("notifications/initialized", {})
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def next_id
|
|
87
|
+
@request_id += 1
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def request(method, params)
|
|
91
|
+
id = next_id
|
|
92
|
+
msg = { "jsonrpc" => "2.0", "id" => id, "method" => method, "params" => params }
|
|
93
|
+
send_message(msg)
|
|
94
|
+
wait_for_response(id)
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def send_notification(method, params)
|
|
98
|
+
send_message("jsonrpc" => "2.0", "method" => method, "params" => params)
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def send_message(msg)
|
|
102
|
+
line = "#{JSON.generate(msg)}\n"
|
|
103
|
+
@writer.write(line)
|
|
104
|
+
@writer.flush
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def wait_for_response(expected_id)
|
|
108
|
+
loop do
|
|
109
|
+
line = read_line_with_timeout
|
|
110
|
+
next if line.nil? || line.strip.empty?
|
|
111
|
+
next unless line.strip.start_with?("{")
|
|
112
|
+
|
|
113
|
+
parsed = JSON.parse(line)
|
|
114
|
+
next if parsed["method"] # notification from server
|
|
115
|
+
|
|
116
|
+
return parsed if parsed["id"] == expected_id
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def read_line_with_timeout
|
|
121
|
+
unless @reader.wait_readable(@timeout)
|
|
122
|
+
raise Ollama::TimeoutError, "MCP server did not respond within #{@timeout}s"
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
@reader.gets
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
def stringify_keys(hash)
|
|
129
|
+
return {} if hash.nil?
|
|
130
|
+
|
|
131
|
+
hash.transform_keys(&:to_s)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
def content_to_string(content)
|
|
135
|
+
return "" unless content.is_a?(Array)
|
|
136
|
+
|
|
137
|
+
content.filter_map do |item|
|
|
138
|
+
next unless item.is_a?(Hash)
|
|
139
|
+
|
|
140
|
+
text = item["text"] || item[:text]
|
|
141
|
+
text&.to_s
|
|
142
|
+
end.join("\n")
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
end
|
|
146
|
+
end
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "../tool"
|
|
4
|
+
|
|
5
|
+
module Ollama
|
|
6
|
+
module MCP
|
|
7
|
+
# Bridges an MCP server's tools to Ollama::Agent::Executor.
|
|
8
|
+
# Fetches tools via tools/list, converts them to Ollama tool format,
|
|
9
|
+
# and provides a callable per tool that invokes tools/call.
|
|
10
|
+
# Accepts either client: (StdioClient or HttpClient) or stdio_client: for backward compatibility.
|
|
11
|
+
class ToolsBridge
|
|
12
|
+
def initialize(stdio_client: nil, client: nil)
|
|
13
|
+
@client = client || stdio_client
|
|
14
|
+
raise ArgumentError, "Provide client: or stdio_client:" unless @client
|
|
15
|
+
|
|
16
|
+
@tools_cache = nil
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
# Returns a hash suitable for Executor: name => { tool: Ollama::Tool, callable: proc }.
|
|
20
|
+
# Callable receives keyword args and returns a string (tool result for the LLM).
|
|
21
|
+
def tools_for_executor
|
|
22
|
+
fetch_tools unless @tools_cache
|
|
23
|
+
|
|
24
|
+
@tools_cache.transform_values do |entry|
|
|
25
|
+
{
|
|
26
|
+
tool: entry[:tool],
|
|
27
|
+
callable: build_callable(entry[:name])
|
|
28
|
+
}
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Returns raw MCP tool list (name, description, input_schema).
|
|
33
|
+
def list_tools
|
|
34
|
+
@client.tools
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
private
|
|
38
|
+
|
|
39
|
+
def fetch_tools
|
|
40
|
+
list = @client.tools
|
|
41
|
+
@tools_cache = list.each_with_object({}) do |mcp_tool, hash|
|
|
42
|
+
name = mcp_tool[:name]
|
|
43
|
+
next if name.nil? || name.to_s.empty?
|
|
44
|
+
|
|
45
|
+
hash[name.to_s] = {
|
|
46
|
+
name: name.to_s,
|
|
47
|
+
tool: mcp_tool_to_ollama(mcp_tool)
|
|
48
|
+
}
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def mcp_tool_to_ollama(mcp_tool)
|
|
53
|
+
schema = mcp_tool[:input_schema] || { "type" => "object" }
|
|
54
|
+
function_hash = {
|
|
55
|
+
"name" => mcp_tool[:name].to_s,
|
|
56
|
+
"description" => (mcp_tool[:description] || "MCP tool: #{mcp_tool[:name]}").to_s,
|
|
57
|
+
"parameters" => schema
|
|
58
|
+
}
|
|
59
|
+
Ollama::Tool.from_hash("type" => "function", "function" => function_hash)
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def build_callable(name)
|
|
63
|
+
client = @client
|
|
64
|
+
->(**kwargs) { client.call_tool(name: name, arguments: stringify_keys(kwargs)) }
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def stringify_keys(hash)
|
|
68
|
+
hash.transform_keys(&:to_s)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
data/lib/ollama/mcp.rb
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# MCP (Model Context Protocol) support for local servers.
|
|
4
|
+
#
|
|
5
|
+
# Connect to MCP servers running via stdio (e.g. npx @modelcontextprotocol/server-filesystem)
|
|
6
|
+
# and use their tools with Ollama::Agent::Executor.
|
|
7
|
+
#
|
|
8
|
+
# Example (remote URL, e.g. GitMCP):
|
|
9
|
+
# mcp_client = Ollama::MCP::HttpClient.new(url: "https://gitmcp.io/owner/repo")
|
|
10
|
+
# bridge = Ollama::MCP::ToolsBridge.new(client: mcp_client)
|
|
11
|
+
# tools = bridge.tools_for_executor
|
|
12
|
+
# executor = Ollama::Agent::Executor.new(ollama_client, tools: tools)
|
|
13
|
+
# executor.run(system: "...", user: "What does this repo do?")
|
|
14
|
+
#
|
|
15
|
+
# Example (local stdio):
|
|
16
|
+
# mcp_client = Ollama::MCP::StdioClient.new(
|
|
17
|
+
# command: "npx", args: ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"]
|
|
18
|
+
# )
|
|
19
|
+
# bridge = Ollama::MCP::ToolsBridge.new(stdio_client: mcp_client)
|
|
20
|
+
# tools = bridge.tools_for_executor
|
|
21
|
+
# executor.run(system: "...", user: "List files in /tmp")
|
|
22
|
+
#
|
|
23
|
+
module Ollama
|
|
24
|
+
# Model Context Protocol client and tools bridge for Executor.
|
|
25
|
+
module MCP
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
require_relative "mcp/stdio_client"
|
|
30
|
+
require_relative "mcp/http_client"
|
|
31
|
+
require_relative "mcp/tools_bridge"
|
data/lib/ollama/options.rb
CHANGED
|
@@ -8,7 +8,9 @@ module Ollama
|
|
|
8
8
|
#
|
|
9
9
|
# Example:
|
|
10
10
|
# options = Ollama::Options.new(temperature: 0.7, top_p: 0.95)
|
|
11
|
-
# client.
|
|
11
|
+
# client.chat(messages: [...], format: {...}, options: options.to_h, allow_chat: true)
|
|
12
|
+
#
|
|
13
|
+
# Note: generate() doesn't accept options parameter - set options in config instead
|
|
12
14
|
class Options
|
|
13
15
|
VALID_KEYS = %i[temperature top_p top_k num_ctx repeat_penalty seed].freeze
|
|
14
16
|
|