net-llm 0.3.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +11 -0
- data/README.md +24 -1
- data/lib/net/llm/ollama.rb +2 -1
- data/lib/net/llm/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 3a2377d518ae361463c292e8543a1a239a93943dcc64960354769a4bf06b12f6
|
4
|
+
data.tar.gz: 2f4c4da04122b0138bea3c1f57639951d40a4b32307b0c184dcdcad7f6faac14
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 89ea98f7df113797338c601d5fd47864bb0311f2923d1e8ab6bc349e399f998d77cc434bea635b40e8869852b557d87d3ee7cf113ddb20e01122c6a7d5155f1c
|
7
|
+
data.tar.gz: a75047b2104fd0d5d36e3bad3c1ea2a50d6f0930dcb76bc8aad2ffbc7faeec8e77373fe146c8a50f11a9ee47f53d3c51729c7dbfb86f9f3d520c2c8a40f142ae
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,16 @@
|
|
1
1
|
## [Unreleased]
|
2
2
|
|
3
|
+
## [0.4.0] - 2025-10-15
|
4
|
+
### Added
|
5
|
+
- Added tool/function calling support to Ollama provider
|
6
|
+
- Ollama `chat` method now accepts optional `tools` parameter matching OpenAI signature
|
7
|
+
- Tools work in both streaming and non-streaming modes
|
8
|
+
- Added comprehensive test coverage for tool functionality
|
9
|
+
|
10
|
+
### Changed
|
11
|
+
- Updated README with Ollama tools example
|
12
|
+
- Updated API coverage documentation
|
13
|
+
|
3
14
|
## [0.3.1] - 2025-10-08
|
4
15
|
### Fixed
|
5
16
|
- Added missing net-hippie runtime dependency to gemspec
|
data/README.md
CHANGED
@@ -87,6 +87,29 @@ response = client.chat(messages)
|
|
87
87
|
puts response['message']['content']
|
88
88
|
```
|
89
89
|
|
90
|
+
#### With Tools
|
91
|
+
|
92
|
+
```ruby
|
93
|
+
tools = [
|
94
|
+
{
|
95
|
+
type: 'function',
|
96
|
+
function: {
|
97
|
+
name: 'get_weather',
|
98
|
+
description: 'Get current weather',
|
99
|
+
parameters: {
|
100
|
+
type: 'object',
|
101
|
+
properties: {
|
102
|
+
location: { type: 'string' }
|
103
|
+
},
|
104
|
+
required: ['location']
|
105
|
+
}
|
106
|
+
}
|
107
|
+
}
|
108
|
+
]
|
109
|
+
|
110
|
+
response = client.chat(messages, tools)
|
111
|
+
```
|
112
|
+
|
90
113
|
#### Streaming
|
91
114
|
|
92
115
|
```ruby
|
@@ -195,7 +218,7 @@ Streaming methods still raise exceptions on HTTP errors.
|
|
195
218
|
- `/v1/embeddings`
|
196
219
|
|
197
220
|
### Ollama
|
198
|
-
- `/api/chat` (with streaming)
|
221
|
+
- `/api/chat` (with streaming and tools)
|
199
222
|
- `/api/generate` (with streaming)
|
200
223
|
- `/api/embed`
|
201
224
|
- `/api/tags`
|
data/lib/net/llm/ollama.rb
CHANGED
@@ -11,9 +11,10 @@ module Net
|
|
11
11
|
@http = http
|
12
12
|
end
|
13
13
|
|
14
|
-
def chat(messages, &block)
|
14
|
+
def chat(messages, tools = [], &block)
|
15
15
|
url = build_url("/api/chat")
|
16
16
|
payload = { model: model, messages: messages, stream: block_given? }
|
17
|
+
payload[:tools] = tools unless tools.empty?
|
17
18
|
|
18
19
|
if block_given?
|
19
20
|
stream_request(url, payload, &block)
|
data/lib/net/llm/version.rb
CHANGED