ollama-ruby 0.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +5 -0
- data/LICENSE +19 -0
- data/README.md +430 -0
- data/Rakefile +35 -0
- data/bin/ollama_chat +258 -0
- data/bin/ollama_console +20 -0
- data/lib/ollama/client/command.rb +25 -0
- data/lib/ollama/client/doc.rb +26 -0
- data/lib/ollama/client.rb +137 -0
- data/lib/ollama/commands/chat.rb +21 -0
- data/lib/ollama/commands/copy.rb +19 -0
- data/lib/ollama/commands/create.rb +20 -0
- data/lib/ollama/commands/delete.rb +19 -0
- data/lib/ollama/commands/embed.rb +21 -0
- data/lib/ollama/commands/embeddings.rb +20 -0
- data/lib/ollama/commands/generate.rb +21 -0
- data/lib/ollama/commands/ps.rb +19 -0
- data/lib/ollama/commands/pull.rb +19 -0
- data/lib/ollama/commands/push.rb +19 -0
- data/lib/ollama/commands/show.rb +20 -0
- data/lib/ollama/commands/tags.rb +19 -0
- data/lib/ollama/dto.rb +42 -0
- data/lib/ollama/errors.rb +15 -0
- data/lib/ollama/handlers/collector.rb +17 -0
- data/lib/ollama/handlers/concern.rb +31 -0
- data/lib/ollama/handlers/dump_json.rb +8 -0
- data/lib/ollama/handlers/dump_yaml.rb +8 -0
- data/lib/ollama/handlers/markdown.rb +22 -0
- data/lib/ollama/handlers/nop.rb +7 -0
- data/lib/ollama/handlers/print.rb +16 -0
- data/lib/ollama/handlers/progress.rb +36 -0
- data/lib/ollama/handlers/say.rb +19 -0
- data/lib/ollama/handlers/single.rb +17 -0
- data/lib/ollama/handlers.rb +13 -0
- data/lib/ollama/image.rb +31 -0
- data/lib/ollama/message.rb +9 -0
- data/lib/ollama/options.rb +68 -0
- data/lib/ollama/response.rb +5 -0
- data/lib/ollama/tool/function/parameters/property.rb +9 -0
- data/lib/ollama/tool/function/parameters.rb +10 -0
- data/lib/ollama/tool/function.rb +11 -0
- data/lib/ollama/tool.rb +9 -0
- data/lib/ollama/utils/ansi_markdown.rb +217 -0
- data/lib/ollama/utils/width.rb +22 -0
- data/lib/ollama/version.rb +8 -0
- data/lib/ollama.rb +43 -0
- data/ollama-ruby.gemspec +36 -0
- data/spec/assets/kitten.jpg +0 -0
- data/spec/ollama/client/doc_spec.rb +11 -0
- data/spec/ollama/client_spec.rb +144 -0
- data/spec/ollama/commands/chat_spec.rb +52 -0
- data/spec/ollama/commands/copy_spec.rb +28 -0
- data/spec/ollama/commands/create_spec.rb +37 -0
- data/spec/ollama/commands/delete_spec.rb +28 -0
- data/spec/ollama/commands/embed_spec.rb +52 -0
- data/spec/ollama/commands/embeddings_spec.rb +38 -0
- data/spec/ollama/commands/generate_spec.rb +29 -0
- data/spec/ollama/commands/ps_spec.rb +25 -0
- data/spec/ollama/commands/pull_spec.rb +28 -0
- data/spec/ollama/commands/push_spec.rb +28 -0
- data/spec/ollama/commands/show_spec.rb +28 -0
- data/spec/ollama/commands/tags_spec.rb +22 -0
- data/spec/ollama/handlers/collector_spec.rb +15 -0
- data/spec/ollama/handlers/dump_json_spec.rb +16 -0
- data/spec/ollama/handlers/dump_yaml_spec.rb +18 -0
- data/spec/ollama/handlers/markdown_spec.rb +46 -0
- data/spec/ollama/handlers/nop_spec.rb +15 -0
- data/spec/ollama/handlers/print_spec.rb +30 -0
- data/spec/ollama/handlers/progress_spec.rb +22 -0
- data/spec/ollama/handlers/say_spec.rb +30 -0
- data/spec/ollama/handlers/single_spec.rb +24 -0
- data/spec/ollama/image_spec.rb +23 -0
- data/spec/ollama/message_spec.rb +37 -0
- data/spec/ollama/options_spec.rb +25 -0
- data/spec/ollama/tool_spec.rb +78 -0
- data/spec/ollama/utils/ansi_markdown_spec.rb +15 -0
- data/spec/spec_helper.rb +16 -0
- metadata +321 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 6b2ad6a95b316e7258470683b9e499ff54627dfedd6d95115e6bb459360ff6fd
|
4
|
+
data.tar.gz: fd8c8a94cdea5ddea3387b676465bdbbc616b823275cce29df03f16ac4b331ed
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 3e85cdb97b9b6c578bc5f8921e559b1d62340ed022d4e98740a8aa2207e1f089e436a4513a81aede3df5572caccea45b84b2c6aee6345adfd5cf5bd39e5fe3e4
|
7
|
+
data.tar.gz: 424365eaa35e0751c34095c1cfa9cc95c8f2ef92a3add5cfe211068b3b9439b1c6471637fba3b3b3e65f1a3d06da908bb0a78f7f18b9468672463ae11f1cc6c7
|
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
Copyright Florian Frank
|
2
|
+
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
4
|
+
this software and associated documentation files (the “Software”), to deal in
|
5
|
+
the Software without restriction, including without limitation the rights to
|
6
|
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
7
|
+
of the Software, and to permit persons to whom the Software is furnished to do
|
8
|
+
so, subject to the following conditions:
|
9
|
+
|
10
|
+
The above copyright notice and this permission notice shall be included in all
|
11
|
+
copies or substantial portions of the Software.
|
12
|
+
|
13
|
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
19
|
+
SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,430 @@
|
|
1
|
+
# Ollama - Ruby Client Library for Ollama API
|
2
|
+
|
3
|
+
## Description
|
4
|
+
|
5
|
+
Ollama is a Ruby library gem that provides a client interface to interact with
|
6
|
+
an ollama server via the
|
7
|
+
[Ollama API](https://github.com/ollama/ollama/blob/main/docs/api.md).
|
8
|
+
|
9
|
+
## Installation (gem & bundler)
|
10
|
+
|
11
|
+
To install Ollama, you can use the following methods:
|
12
|
+
|
13
|
+
1. Type
|
14
|
+
|
15
|
+
```
|
16
|
+
gem install ollama-ruby
|
17
|
+
```
|
18
|
+
|
19
|
+
in your terminal.
|
20
|
+
|
21
|
+
1. Or add the line
|
22
|
+
|
23
|
+
```
|
24
|
+
gem 'ollama-ruby'
|
25
|
+
```
|
26
|
+
|
27
|
+
to your Gemfile and run `bundle install` in your terminal.
|
28
|
+
|
29
|
+
## Executables
|
30
|
+
|
31
|
+
### ollama_chat
|
32
|
+
|
33
|
+
This a chat client, that can be used to connect to an ollama server and enter a
|
34
|
+
chat converstation with a LLM. It can be called with the following arguments:
|
35
|
+
|
36
|
+
```
|
37
|
+
ollama_chat [OPTIONS]
|
38
|
+
|
39
|
+
-u URL the ollama base url, OLLAMA_URL
|
40
|
+
-m MODEL the ollama model to chat with, OLLAMA_MODEL
|
41
|
+
-M OPTIONS the model options as JSON file, see Ollama::Options
|
42
|
+
-s SYSTEM the system prompt to use as a file
|
43
|
+
-c CHAT a saved chat conversation to load
|
44
|
+
-v VOICE use VOICE (e. g. Samantha) to speak with say command
|
45
|
+
-d use markdown to display the chat messages
|
46
|
+
-h this help
|
47
|
+
```
|
48
|
+
|
49
|
+
The base URL can be either set by the environment variable `OLLAMA_URL` or it
|
50
|
+
is derived from the environment variable `OLLAMA_HOST`. The default model to
|
51
|
+
connect can be configured in the environment variable `OLLAMA_MODEL`.
|
52
|
+
|
53
|
+
```
|
54
|
+
$ ollama_chat -s sherlock.txt
|
55
|
+
Model with architecture llama found.
|
56
|
+
Connecting to llama3.1@http://ollama.local.net:11434 now…
|
57
|
+
Configured system prompt is:
|
58
|
+
You are Sherlock Holmes and the user is your new client, Dr. Watson is also in
|
59
|
+
the room. You will talk and act in the typical manner of Sherlock Holmes do and
|
60
|
+
try to solve the user's case using logic and deduction.
|
61
|
+
|
62
|
+
Type /help to display the chat help.
|
63
|
+
📨 user:
|
64
|
+
Good morning.
|
65
|
+
📨 assistant:
|
66
|
+
Ah, good morning, my dear fellow! It is a pleasure to make your acquaintance. I
|
67
|
+
am Sherlock Holmes, the renowned detective, and this is my trusty sidekick, Dr.
|
68
|
+
Watson. Please, have a seat and tell us about the nature of your visit. What
|
69
|
+
seems to be the problem that has brought you to our humble abode at 221B Baker
|
70
|
+
Street?
|
71
|
+
|
72
|
+
(Watson nods in encouragement as he takes notes)
|
73
|
+
|
74
|
+
Now, pray tell, what is it that puzzles you, my dear client? A missing item,
|
75
|
+
perhaps? Or a mysterious occurrence that requires clarification? The game, as
|
76
|
+
they say, is afoot!
|
77
|
+
```
|
78
|
+
|
79
|
+
This example shows how an image like this can be sent to a vision model for
|
80
|
+
analysis:
|
81
|
+
|
82
|
+
![cat](spec/assets/kitten.jpg)
|
83
|
+
|
84
|
+
```
|
85
|
+
$ ollama_chat -m llava-llama3
|
86
|
+
Model with architecture llama found.
|
87
|
+
Connecting to llava-llama3@http://localhost:11434 now…
|
88
|
+
Type /help to display the chat help.
|
89
|
+
📨 user> /image spec/assets/kitten.jpg
|
90
|
+
Attached image spec/assets/kitten.jpg to the next message.
|
91
|
+
📸 user> What's on this image?
|
92
|
+
📨 assistant:
|
93
|
+
The image captures a moment of tranquility featuring a young cat. The cat,
|
94
|
+
adorned with gray and white fur marked by black stripes on its face and legs,
|
95
|
+
is the central figure in this scene. Its eyes, a striking shade of blue, are
|
96
|
+
wide open and directed towards the camera, giving an impression of curiosity or
|
97
|
+
alertness.
|
98
|
+
|
99
|
+
The cat is comfortably nestled on a red blanket, which contrasts vividly with
|
100
|
+
its fur. The blanket, soft and inviting, provides a sense of warmth to the
|
101
|
+
image. In the background, partially obscured by the cat's head, is another
|
102
|
+
blanket of similar red hue. The repetition of the color adds a sense of harmony
|
103
|
+
to the composition.
|
104
|
+
|
105
|
+
The cat's position on the right side of the photo creates an interesting
|
106
|
+
asymmetry with the camera lens, which occupies the left side of the frame. This
|
107
|
+
visual balance enhances the overall composition of the image.
|
108
|
+
|
109
|
+
There are no discernible texts or other objects in the image. The focus is
|
110
|
+
solely on the cat and its immediate surroundings. The image does not provide
|
111
|
+
any information about the location or setting beyond what has been described.
|
112
|
+
The simplicity of the scene allows the viewer to concentrate on the main
|
113
|
+
subject - the young, blue-eyed cat.
|
114
|
+
```
|
115
|
+
|
116
|
+
The following commands can be given inside the chat, if prefixed by a `/`:
|
117
|
+
|
118
|
+
```
|
119
|
+
/paste to paste content
|
120
|
+
/list list the messages of the conversation
|
121
|
+
/clear clear the conversation messages
|
122
|
+
/pop n pop the last n message, defaults to 1
|
123
|
+
/regenerate the last answer message
|
124
|
+
/save filename store conversation messages
|
125
|
+
/load filename load conversation messages
|
126
|
+
/image filename attach image to the next message
|
127
|
+
/quit to quit.
|
128
|
+
/help to view this help.
|
129
|
+
```
|
130
|
+
|
131
|
+
### ollama_console
|
132
|
+
|
133
|
+
This is an interactive console, that can be used to try the different commands
|
134
|
+
provided by an `Ollama::Client` instance. For example this command generate a
|
135
|
+
response and displays it on the screen using the Markdown handler:
|
136
|
+
|
137
|
+
```
|
138
|
+
$ ollama_console
|
139
|
+
Commands: chat,copy,create,delete,embeddings,generate,help,ps,pull,push,show,tags
|
140
|
+
>> generate(model: 'llama3.1', stream: true, prompt: 'tell story w/ emoji and markdown', &Markdown)
|
141
|
+
```
|
142
|
+
|
143
|
+
> **The Quest for the Golden Coconut 🌴**
|
144
|
+
>
|
145
|
+
> In a small village nestled between two great palm trees 🌳, there lived a
|
146
|
+
> brave adventurer named Alex 👦. […]
|
147
|
+
|
148
|
+
## Usage
|
149
|
+
|
150
|
+
In your own software the library can be used as shown in this example:
|
151
|
+
|
152
|
+
```ruby
|
153
|
+
require "ollama"
|
154
|
+
include Ollama
|
155
|
+
|
156
|
+
client = Client.new(base_url: 'http://localhost:11434')
|
157
|
+
messages = Message.new(role: 'user', content: 'Why is the sky blue?')
|
158
|
+
client.chat(model: 'llama3.1', stream: true, messages:, &Print) # or
|
159
|
+
print client.chat(model: 'llama3.1', stream: true, messages:).map { |response|
|
160
|
+
response.message.content
|
161
|
+
}.join
|
162
|
+
```
|
163
|
+
|
164
|
+
## API
|
165
|
+
|
166
|
+
This Ollama library provides commands to interact with the the [Ollama REST
|
167
|
+
API](https://github.com/ollama/ollama/blob/main/docs/api.md)
|
168
|
+
|
169
|
+
|
170
|
+
### Handlers
|
171
|
+
|
172
|
+
Every command can be passed a handler that responds to `to_proc` that returns a
|
173
|
+
lambda expression of the form `-> response { … }` to handle the responses:
|
174
|
+
|
175
|
+
```ruby
|
176
|
+
generate(model: 'llama3.1', stream: true, prompt: 'Why is the sky blue?', &Print)
|
177
|
+
```
|
178
|
+
|
179
|
+
```ruby
|
180
|
+
generate(model: 'llama3.1', stream: true, prompt: 'Why is the sky blue?', &Print.new)
|
181
|
+
```
|
182
|
+
|
183
|
+
```ruby
|
184
|
+
generate(model: 'llama3.1', stream: true, prompt: 'Why is the sky blue?') { |r| print r.response }
|
185
|
+
```
|
186
|
+
|
187
|
+
```ruby
|
188
|
+
generate(model: 'llama3.1', stream: true, prompt: 'Why is the sky blue?', &-> r { print r.response })
|
189
|
+
```
|
190
|
+
|
191
|
+
The following standard handlers are available for the commands below:
|
192
|
+
|
193
|
+
| Handler | Description |
|
194
|
+
| :-----: | :---------- |
|
195
|
+
| **Collector** | collects all responses in an array and returns it as `result`. |
|
196
|
+
| **Single** | see **Collector** above, returns a single response directly, though, unless there has been more than one. |
|
197
|
+
| **Progress** | prints the current progress of the operation to the screen as a progress bar for _create/pull/push_. |
|
198
|
+
| **DumpJSON** | dumps all responses as JSON to `output`. |
|
199
|
+
| **DumpYAML** | dumps all responses as YAML to `output`. |
|
200
|
+
| **Print** | prints the responses to the display for _chat/generate_. |
|
201
|
+
| **Markdown** | _constantly_ prints the responses to the display as ANSI markdown for _chat/generate_. |
|
202
|
+
| **Say** | use say command to speak (defaults to voice _Samantha_). |
|
203
|
+
| **NOP** | does nothing, neither printing to the output nor returning the result. |
|
204
|
+
|
205
|
+
Their `output` IO handle can be changed by e. g. passing `Print.new(output:
|
206
|
+
io)` with `io` as the IO handle to the _generate_ command.
|
207
|
+
|
208
|
+
If you don't pass a handler explicitly, either the `stream_handler` is choosen
|
209
|
+
if the command expects a streaming response or the `default_handler` otherwise.
|
210
|
+
See the following commdand descriptions to find out what these defaults are for
|
211
|
+
each command. These commands can be tried out directly in the `ollama_console`.
|
212
|
+
|
213
|
+
### Chat
|
214
|
+
|
215
|
+
`default_handler` is **Single**, `stream_handler` is **Collector**,
|
216
|
+
`stream` is false by default.
|
217
|
+
|
218
|
+
```ruby
|
219
|
+
chat(model: 'llama3.1', stream: true, messages: { role: 'user', content: 'Why is the sky blue (no markdown)?' }, &Print)
|
220
|
+
```
|
221
|
+
|
222
|
+
### Generate
|
223
|
+
|
224
|
+
`default_handler` is **Single**, `stream_handler` is **Collector**,
|
225
|
+
`stream` is false by default.
|
226
|
+
|
227
|
+
```ruby
|
228
|
+
generate(model: 'llama3.1', stream: true, prompt: 'Use markdown – Why is the sky blue?', &Markdown)
|
229
|
+
```
|
230
|
+
|
231
|
+
### tags
|
232
|
+
|
233
|
+
`default_handler` is **Single**, streaming is not possible.
|
234
|
+
|
235
|
+
```ruby
|
236
|
+
tags.models.map(&:name) => ["llama3.1:latest",…]
|
237
|
+
```
|
238
|
+
|
239
|
+
### Show
|
240
|
+
|
241
|
+
`default_handler` is **Single**, streaming is not possible.
|
242
|
+
|
243
|
+
```ruby
|
244
|
+
show(name: 'llama3.1', &DumpJSON)
|
245
|
+
```
|
246
|
+
|
247
|
+
### Create
|
248
|
+
|
249
|
+
`default_handler` is **Single**, `stream_handler` is **Progress**,
|
250
|
+
`stream` is true by default.
|
251
|
+
|
252
|
+
```ruby
|
253
|
+
modelfile=<<~end
|
254
|
+
FROM llama3.1
|
255
|
+
SYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.
|
256
|
+
end
|
257
|
+
|
258
|
+
create(name: 'llama3.1-wopr', stream: true, modelfile:)
|
259
|
+
```
|
260
|
+
|
261
|
+
### Copy
|
262
|
+
|
263
|
+
`default_handler` is **Single**, streaming is not possible.
|
264
|
+
|
265
|
+
```ruby
|
266
|
+
copy(source: 'llama3.1', destination: 'user/llama3.1')
|
267
|
+
```
|
268
|
+
|
269
|
+
### Delete
|
270
|
+
|
271
|
+
`default_handler` is **Single**, streaming is not possible.
|
272
|
+
|
273
|
+
```ruby
|
274
|
+
delete(name: 'user/llama3.1')
|
275
|
+
```
|
276
|
+
|
277
|
+
### Pull
|
278
|
+
|
279
|
+
`default_handler` is **Single**, `stream_handler` is **Progress**,
|
280
|
+
`stream` is true by default.
|
281
|
+
|
282
|
+
```ruby
|
283
|
+
pull(name: 'llama3.1')
|
284
|
+
```
|
285
|
+
|
286
|
+
### Push
|
287
|
+
|
288
|
+
`default_handler` is **Single**, `stream_handler` is **Progress**,
|
289
|
+
`stream` is true by default.
|
290
|
+
|
291
|
+
```ruby
|
292
|
+
push(name: 'user/llama3.1')
|
293
|
+
```
|
294
|
+
|
295
|
+
### Embed
|
296
|
+
|
297
|
+
`default_handler` is **Single**, streaming is not possible.
|
298
|
+
|
299
|
+
```ruby
|
300
|
+
embed(model: 'all-minilm', input: 'Why is the sky blue?')
|
301
|
+
```
|
302
|
+
|
303
|
+
```ruby
|
304
|
+
embed(model: 'all-minilm', input: ['Why is the sky blue?', 'Why is the grass green?'])
|
305
|
+
```
|
306
|
+
|
307
|
+
### Embeddings
|
308
|
+
|
309
|
+
`default_handler` is **Single**, streaming is not possible.
|
310
|
+
|
311
|
+
```ruby
|
312
|
+
embeddings(model: 'llama3.1', prompt: 'The sky is blue because of rayleigh scattering', &DumpJSON)
|
313
|
+
```
|
314
|
+
|
315
|
+
### Ps
|
316
|
+
|
317
|
+
`default_handler` is **Single**, streaming is not possible.
|
318
|
+
|
319
|
+
```ruby
|
320
|
+
jj ps
|
321
|
+
```
|
322
|
+
|
323
|
+
## Auxiliary objects
|
324
|
+
|
325
|
+
The following objects are provided to interact with the ollama server. You can
|
326
|
+
run all of the examples in the `ollama_console`.
|
327
|
+
|
328
|
+
### Message
|
329
|
+
|
330
|
+
Messages can be be created by using the **Message** class:
|
331
|
+
|
332
|
+
```ruby
|
333
|
+
message = Message.new role: 'user', content: 'hello world'
|
334
|
+
```
|
335
|
+
|
336
|
+
### Image
|
337
|
+
|
338
|
+
If you want to add images to the message, you can use the **Image** class
|
339
|
+
|
340
|
+
```ruby
|
341
|
+
image = Ollama::Image.for_string("the-image")
|
342
|
+
message = Message.new role: 'user', content: 'hello world', images: [ image ]
|
343
|
+
```
|
344
|
+
|
345
|
+
It's possible to create an **Image** object via `for_base64(data)`,
|
346
|
+
`for_string(string)`, `for_io(io)`, or `for_filename(path)` class methods.
|
347
|
+
|
348
|
+
### Options
|
349
|
+
|
350
|
+
For `chat` and `generate` commdands it's possible to pass an **Options** object
|
351
|
+
to configure different
|
352
|
+
[parameters](https://github.com/ollama/ollama/blob/main/docs/modelfile.md#parameter)
|
353
|
+
for the running model. To set the `temperature` can be done via:
|
354
|
+
|
355
|
+
```ruby
|
356
|
+
options = Options.new(temperature: 0.999)
|
357
|
+
generate(model: 'llama3.1', options:, prompt: 'I am almost 0.5 years old and you are a teletubby.', &Print)
|
358
|
+
```
|
359
|
+
|
360
|
+
The class does some rudimentary type checking for the parameters as well.
|
361
|
+
|
362
|
+
### Tool… calling
|
363
|
+
|
364
|
+
You can use the provided `Tool`, `Tool::Function`,
|
365
|
+
`Tool::Function::Parameters`, and `Tool::Function::Parameters::Property`
|
366
|
+
classes to define tool functions in models that support it.
|
367
|
+
|
368
|
+
```ruby
|
369
|
+
def message(location)
|
370
|
+
Message.new(role: 'user', content: "What is the weather today in %s?" % location)
|
371
|
+
end
|
372
|
+
|
373
|
+
tools = Tool.new(
|
374
|
+
type: 'function',
|
375
|
+
function: Tool::Function.new(
|
376
|
+
name: 'get_current_weather',
|
377
|
+
description: 'Get the current weather for a location',
|
378
|
+
parameters: Tool::Function::Parameters.new(
|
379
|
+
type: 'object',
|
380
|
+
properties: {
|
381
|
+
location: Tool::Function::Parameters::Property.new(
|
382
|
+
type: 'string',
|
383
|
+
description: 'The location to get the weather for, e.g. San Francisco, CA'
|
384
|
+
),
|
385
|
+
temperature_unit: Tool::Function::Parameters::Property.new(
|
386
|
+
type: 'string',
|
387
|
+
description: "The unit to return the temperature in, either 'celsius' or 'fahrenheit'",
|
388
|
+
enum: %w[ celsius fahrenheit ]
|
389
|
+
),
|
390
|
+
},
|
391
|
+
required: %w[ location temperature_unit ]
|
392
|
+
)
|
393
|
+
)
|
394
|
+
)
|
395
|
+
jj chat(model: 'llama3.1', stream: false, messages: message('The City of Love'), tools:).message&.tool_calls
|
396
|
+
jj chat(model: 'llama3.1', stream: false, messages: message('The Windy City'), tools:).message&.tool_calls
|
397
|
+
```
|
398
|
+
|
399
|
+
## Errors
|
400
|
+
|
401
|
+
The library raises specific errors like `Ollama::Errors::NotFoundError` when
|
402
|
+
a model is not found:
|
403
|
+
|
404
|
+
```ruby
|
405
|
+
(show(name: 'nixda', &DumpJSON) rescue $!).class # => Ollama::NotFoundError
|
406
|
+
```
|
407
|
+
|
408
|
+
If `Ollama::Errors::TimeoutError` is raised, it might help to increase the
|
409
|
+
`connect_timeout`, `read_timeout` and `write_timeout` parameters of the
|
410
|
+
`Ollama::Client` instance.
|
411
|
+
|
412
|
+
For more generic errors an `Ollama::Errors::Error` is raised.
|
413
|
+
|
414
|
+
## Download
|
415
|
+
|
416
|
+
The homepage of this library is located at
|
417
|
+
|
418
|
+
* https://github.com/flori/ollama
|
419
|
+
|
420
|
+
## Author
|
421
|
+
|
422
|
+
<b>Ollama</b> was written by Florian Frank [Florian Frank](mailto:flori@ping.de)
|
423
|
+
|
424
|
+
## License
|
425
|
+
|
426
|
+
This software is licensed under the <i>MIT</i> license.
|
427
|
+
|
428
|
+
---
|
429
|
+
|
430
|
+
This is the end.
|
data/Rakefile
ADDED
@@ -0,0 +1,35 @@
|
|
1
|
+
# vim: set filetype=ruby et sw=2 ts=2:
|
2
|
+
|
3
|
+
require 'gem_hadar'
|
4
|
+
|
5
|
+
GemHadar do
|
6
|
+
name 'ollama-ruby'
|
7
|
+
path_name 'ollama'
|
8
|
+
module_type :module
|
9
|
+
author 'Florian Frank'
|
10
|
+
email 'flori@ping.de'
|
11
|
+
homepage "https://github.com/flori/#{name}"
|
12
|
+
summary 'Interacting with the Ollama API'
|
13
|
+
description 'Library that allows interacting with the Ollama API'
|
14
|
+
test_dir 'spec'
|
15
|
+
ignore '.*.sw[pon]', 'pkg', 'Gemfile.lock', '.AppleDouble', '.bundle',
|
16
|
+
'.yardoc', 'tags', 'errors.lst', 'cscope.out', 'coverage', 'tmp'
|
17
|
+
package_ignore '.all_images.yml', '.tool-versions', '.gitignore', 'VERSION',
|
18
|
+
'.utilsrc', '.rspec', *Dir.glob('.github/**/*', File::FNM_DOTMATCH)
|
19
|
+
readme 'README.md'
|
20
|
+
|
21
|
+
executables << 'ollama_console' << 'ollama_chat'
|
22
|
+
|
23
|
+
required_ruby_version '~> 3.1'
|
24
|
+
|
25
|
+
dependency 'excon', '~> 0.111'
|
26
|
+
dependency 'infobar', '~> 0.7'
|
27
|
+
dependency 'term-ansicolor', '~> 1.11'
|
28
|
+
dependency 'kramdown-parser-gfm', '~> 1.1'
|
29
|
+
dependency 'terminal-table', '~> 3.0'
|
30
|
+
development_dependency 'all_images', '~>0.4'
|
31
|
+
development_dependency 'rspec', '~>3.2'
|
32
|
+
development_dependency 'utils'
|
33
|
+
|
34
|
+
licenses << 'MIT'
|
35
|
+
end
|