ollama-ruby 0.0.0 → 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6b2ad6a95b316e7258470683b9e499ff54627dfedd6d95115e6bb459360ff6fd
4
- data.tar.gz: fd8c8a94cdea5ddea3387b676465bdbbc616b823275cce29df03f16ac4b331ed
3
+ metadata.gz: 327051092cef37a7fd95d6a5c3a5a13aa6e52df2314da5f4cfd28f2890ffa820
4
+ data.tar.gz: 86da0d23497f9717239abaa1830ac687562c8e9827a097781f0404e6b65e1e35
5
5
  SHA512:
6
- metadata.gz: 3e85cdb97b9b6c578bc5f8921e559b1d62340ed022d4e98740a8aa2207e1f089e436a4513a81aede3df5572caccea45b84b2c6aee6345adfd5cf5bd39e5fe3e4
7
- data.tar.gz: 424365eaa35e0751c34095c1cfa9cc95c8f2ef92a3add5cfe211068b3b9439b1c6471637fba3b3b3e65f1a3d06da908bb0a78f7f18b9468672463ae11f1cc6c7
6
+ metadata.gz: f3a9324f11877b0772d1f4bcb1902ea905e7132d94d73fe8b6b7e11a567c2aea04431838fa2b534557e4e1e76ae1ee47b13832aebb71e144ac329b13d15ea111
7
+ data.tar.gz: d40adf27de7e7700158027f1c6bfec7e97189b4a6081a7da36758edc31b5229b6efef62ee5a192af19afa5b633224cf8e80666d38154a9d3c1cae974a98f2de5
data/README.md CHANGED
@@ -153,12 +153,12 @@ In your own software the library can be used as shown in this example:
153
153
  require "ollama"
154
154
  include Ollama
155
155
 
156
- client = Client.new(base_url: 'http://localhost:11434')
156
+ ollama = Client.new(base_url: 'http://localhost:11434')
157
157
  messages = Message.new(role: 'user', content: 'Why is the sky blue?')
158
- client.chat(model: 'llama3.1', stream: true, messages:, &Print) # or
159
- print client.chat(model: 'llama3.1', stream: true, messages:).map { |response|
158
+ ollama.chat(model: 'llama3.1', stream: true, messages:, &Print) # or
159
+ print ollama.chat(model: 'llama3.1', stream: true, messages:).lazy.map { |response|
160
160
  response.message.content
161
- }.join
161
+ }
162
162
  ```
163
163
 
164
164
  ## API
@@ -15,7 +15,8 @@ class Ollama::Client::Doc
15
15
  delete: 'https://github.com/ollama/ollama/blob/main/docs/api.md#delete-a-model',
16
16
  pull: 'https://github.com/ollama/ollama/blob/main/docs/api.md#pull-a-model',
17
17
  push: 'https://github.com/ollama/ollama/blob/main/docs/api.md#push-a-model',
18
- embeddings: 'https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings',
18
+ embeddings: 'https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings', # superseded by /api/embed
19
+ embed: 'https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings',
19
20
  ps: 'https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models',
20
21
  )[name]
21
22
  end
@@ -5,13 +5,13 @@ class Ollama::Commands::Embed
5
5
  '/api/embed'
6
6
  end
7
7
 
8
- def initialize(model:, input:, truncate: nil, keep_alive: nil)
9
- @model, @input, @truncate, @keep_alive =
10
- model, input, truncate, keep_alive
8
+ def initialize(model:, input:, options: nil, truncate: nil, keep_alive: nil)
9
+ @model, @input, @options, @truncate, @keep_alive =
10
+ model, input, options, truncate, keep_alive
11
11
  @stream = false
12
12
  end
13
13
 
14
- attr_reader :model, :input, :truncate, :keep_alive, :stream
14
+ attr_reader :model, :input, :options, :truncate, :keep_alive, :stream
15
15
 
16
16
  attr_writer :client
17
17
 
@@ -1,6 +1,6 @@
1
1
  module Ollama
2
2
  # Ollama version
3
- VERSION = '0.0.0'
3
+ VERSION = '0.0.1'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama-ruby.gemspec CHANGED
@@ -1,14 +1,14 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama-ruby 0.0.0 ruby lib
2
+ # stub: ollama-ruby 0.0.1 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama-ruby".freeze
6
- s.version = "0.0.0".freeze
6
+ s.version = "0.0.1".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2024-08-12"
11
+ s.date = "2024-08-16"
12
12
  s.description = "Library that allows interacting with the Ollama API".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_console".freeze, "ollama_chat".freeze]
@@ -5,12 +5,12 @@ RSpec.describe Ollama::Client do
5
5
  'https://ai.foo.bar'
6
6
  end
7
7
 
8
- let :client do
8
+ let :ollama do
9
9
  described_class.new base_url:
10
10
  end
11
11
 
12
12
  it 'can be instantiated' do
13
- expect(client).to be_a described_class
13
+ expect(ollama).to be_a described_class
14
14
  end
15
15
 
16
16
  it 'can be configured via environment variable' do
@@ -20,7 +20,7 @@ RSpec.describe Ollama::Client do
20
20
  end
21
21
 
22
22
  it 'can disable ssl peer verification' do
23
- expect(client).to be_ssl_verify_peer
23
+ expect(ollama).to be_ssl_verify_peer
24
24
  client2 = described_class.new(
25
25
  base_url: 'https://ai.foo.bar?ssl_verify_peer=false'
26
26
  )
@@ -28,7 +28,7 @@ RSpec.describe Ollama::Client do
28
28
  end
29
29
 
30
30
  it 'has a string representation' do
31
- expect(client.to_s).to eq '#<Ollama::Client@https://ai.foo.bar>'
31
+ expect(ollama.to_s).to eq '#<Ollama::Client@https://ai.foo.bar>'
32
32
  end
33
33
 
34
34
  let :excon do
@@ -42,35 +42,35 @@ RSpec.describe Ollama::Client do
42
42
  it 'can raise error based on status code 500' do
43
43
  expect(excon).to receive(:send).and_return(double(status: 500, body: '{}'))
44
44
  expect {
45
- client.generate(model: 'llama3.1', prompt: 'Hello World')
45
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World')
46
46
  }.to raise_error(Ollama::Errors::Error)
47
47
  end
48
48
 
49
49
  it 'can raise error based on status code 404' do
50
50
  expect(excon).to receive(:send).and_return(double(status: 404, body: '{}'))
51
51
  expect {
52
- client.generate(model: 'llama3.1', prompt: 'Hello World')
52
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World')
53
53
  }.to raise_error(Ollama::Errors::NotFoundError)
54
54
  end
55
55
 
56
56
  it 'can raise error on connection error' do
57
57
  allow(excon).to receive(:post).and_raise Excon::Error::Socket
58
58
  expect {
59
- client.generate(model: 'llama3.1', prompt: 'Hello World')
59
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World')
60
60
  }.to raise_error(Ollama::Errors::SocketError)
61
61
  end
62
62
 
63
63
  it 'can raise error on timeout' do
64
64
  allow(excon).to receive(:post).and_raise Excon::Errors::Timeout
65
65
  expect {
66
- client.generate(model: 'llama3.1', prompt: 'Hello World')
66
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World')
67
67
  }.to raise_error(Ollama::Errors::TimeoutError)
68
68
  end
69
69
 
70
70
  it 'can raise a generic error' do
71
71
  allow(excon).to receive(:post).and_raise Excon::Errors::Error
72
72
  expect {
73
- client.generate(model: 'llama3.1', prompt: 'Hello World')
73
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World')
74
74
  }.to raise_error(Ollama::Errors::Error)
75
75
  end
76
76
 
@@ -95,20 +95,20 @@ RSpec.describe Ollama::Client do
95
95
 
96
96
  it 'can use procs directly' do
97
97
  response = nil
98
- client.ps { |r| response = r }
98
+ ollama.ps { |r| response = r }
99
99
  expect(response).to eq expected_response
100
100
  end
101
101
 
102
102
  it 'can convert from handler instance to proc' do
103
103
  handler = Ollama::Handlers::NOP.new
104
104
  expect(handler).to receive(:call).with(expected_response)
105
- client.ps(&handler)
105
+ ollama.ps(&handler)
106
106
  end
107
107
 
108
108
  it 'can convert from handler class to proc' do
109
109
  handler = Ollama::Handlers::NOP
110
110
  expect_any_instance_of(handler).to receive(:call).with(expected_response)
111
- client.ps(&handler)
111
+ ollama.ps(&handler)
112
112
  end
113
113
  end
114
114
 
@@ -121,7 +121,7 @@ RSpec.describe Ollama::Client do
121
121
  'Content-Type' => 'application/json; charset=utf-8',
122
122
  )
123
123
  ).and_return(double(status: 200, body: '{}'))
124
- client.generate(model: 'llama3.1', prompt: 'Hello World')
124
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World')
125
125
  end
126
126
 
127
127
  it 'can generate with stream' do
@@ -133,12 +133,12 @@ RSpec.describe Ollama::Client do
133
133
  ),
134
134
  response_block: an_instance_of(Proc)
135
135
  ).and_return(double(status: 200, body: '{}'))
136
- client.generate(model: 'llama3.1', prompt: 'Hello World', stream: true)
136
+ ollama.generate(model: 'llama3.1', prompt: 'Hello World', stream: true)
137
137
  end
138
138
  end
139
139
 
140
140
  it 'can help' do
141
141
  expect($stdout).to receive(:puts).with(/Commands:.*?chat/)
142
- client.help
142
+ ollama.help
143
143
  end
144
144
  end
@@ -41,8 +41,8 @@ RSpec.describe Ollama::Commands::Chat do
41
41
  Ollama::Message.new(role: 'user', content: "Let's play Global Thermonuclear War.")
42
42
  ]
43
43
  chat = described_class.new(model: 'llama3.1', messages:, stream: true)
44
- chat.client = client = double('client')
45
- expect(client).to receive(:request).
44
+ chat.client = ollama = double('Ollama::Client')
45
+ expect(ollama).to receive(:request).
46
46
  with(
47
47
  method: :post, path: '/api/chat', handler: Ollama::Handlers::NOP, stream: true,
48
48
  body: '{"json_class":"Ollama::Commands::Chat","model":"llama3.1","messages":[{"json_class":"Ollama::Message","role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
@@ -18,8 +18,8 @@ RSpec.describe Ollama::Commands::Copy do
18
18
 
19
19
  it 'can perform' do
20
20
  copy = described_class.new(source: 'llama3.1', destination: 'camell3')
21
- copy.client = client = double('client')
22
- expect(client).to receive(:request).with(
21
+ copy.client = ollama = double('Ollama::Client')
22
+ expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/copy', handler: Ollama::Handlers::NOP, stream: false,
24
24
  body: '{"json_class":"Ollama::Commands::Copy","source":"llama3.1","destination":"camell3","stream":false}'
25
25
  )
@@ -26,8 +26,8 @@ RSpec.describe Ollama::Commands::Create do
26
26
  modelfile: "FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.",
27
27
  stream: true
28
28
  )
29
- create.client = client = double('client')
30
- expect(client).to receive(:request).
29
+ create.client = ollama = double('Ollama::Client')
30
+ expect(ollama).to receive(:request).
31
31
  with(
32
32
  method: :post, path: '/api/create', handler: Ollama::Handlers::NOP, stream: true,
33
33
  body: '{"json_class":"Ollama::Commands::Create","name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
@@ -18,8 +18,8 @@ RSpec.describe Ollama::Commands::Delete do
18
18
 
19
19
  it 'can perform' do
20
20
  delete = described_class.new(name: 'llama3.1')
21
- delete.client = client = double('client')
22
- expect(client).to receive(:request).with(
21
+ delete.client = ollama = double('Ollama::Client')
22
+ expect(ollama).to receive(:request).with(
23
23
  method: :delete, path: '/api/delete', handler: Ollama::Handlers::NOP, stream: false,
24
24
  body: '{"json_class":"Ollama::Commands::Delete","name":"llama3.1","stream":false}'
25
25
  )
@@ -12,13 +12,14 @@ RSpec.describe Ollama::Commands::Embed do
12
12
  it 'can be converted to JSON' do
13
13
  embed = described_class.new(
14
14
  model: 'all-minilm',
15
+ options: Ollama::Options.new(num_ctx: 666),
15
16
  input: 'Why is the sky blue?'
16
17
  )
17
18
  expect(embed.as_json).to include(
18
19
  model: 'all-minilm', input: 'Why is the sky blue?',
19
20
  )
20
21
  expect(embed.to_json).to eq(
21
- '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","stream":false}'
22
+ '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","options":{"json_class":"Ollama::Options","num_ctx":666},"stream":false}'
22
23
  )
23
24
  end
24
25
 
@@ -41,8 +42,8 @@ RSpec.describe Ollama::Commands::Embed do
41
42
  model: 'all-minilm',
42
43
  input: 'Why is the sky blue?'
43
44
  )
44
- embed.client = client = double('client')
45
- expect(client).to receive(:request).
45
+ embed.client = ollama = double('Ollama::Client')
46
+ expect(ollama).to receive(:request).
46
47
  with(
47
48
  method: :post, path: '/api/embed', handler: Ollama::Handlers::NOP, stream: false,
48
49
  body: '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","stream":false}'
@@ -27,8 +27,8 @@ RSpec.describe Ollama::Commands::Embeddings do
27
27
  model: 'mxbai-embed-large',
28
28
  prompt: 'Here are the coordinates of all Soviet military installations: …'
29
29
  )
30
- embeddings.client = client = double('client')
31
- expect(client).to receive(:request).
30
+ embeddings.client = ollama = double('Ollama::Client')
31
+ expect(ollama).to receive(:request).
32
32
  with(
33
33
  method: :post, path: '/api/embeddings', handler: Ollama::Handlers::NOP, stream: false,
34
34
  body: '{"json_class":"Ollama::Commands::Embeddings","model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
@@ -18,8 +18,8 @@ RSpec.describe Ollama::Commands::Generate do
18
18
 
19
19
  it 'can perform' do
20
20
  generate = described_class.new(model: 'llama3.1', prompt: 'Hello World', stream: true)
21
- generate.client = client = double('client')
22
- expect(client).to receive(:request).
21
+ generate.client = ollama = double('Ollama::Client')
22
+ expect(ollama).to receive(:request).
23
23
  with(
24
24
  method: :post, path: '/api/generate', handler: Ollama::Handlers::NOP, stream: true,
25
25
  body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World","stream":true}'
@@ -14,8 +14,8 @@ RSpec.describe Ollama::Commands::Ps do
14
14
  end
15
15
 
16
16
  it 'can perform' do
17
- ps.client = client = double('client')
18
- expect(client).to receive(:request).
17
+ ps.client = ollama = double('Ollama::Client')
18
+ expect(ollama).to receive(:request).
19
19
  with(
20
20
  method: :get, path: '/api/ps', handler: Ollama::Handlers::NOP,
21
21
  stream: false
@@ -18,8 +18,8 @@ RSpec.describe Ollama::Commands::Pull do
18
18
 
19
19
  it 'can perform' do
20
20
  pull = described_class.new(name: 'llama3.1', stream: true)
21
- pull.client = client = double('client')
22
- expect(client).to receive(:request).with(
21
+ pull.client = ollama = double('Ollama::Client')
22
+ expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/pull', handler: Ollama::Handlers::NOP, stream: true,
24
24
  body: '{"json_class":"Ollama::Commands::Pull","name":"llama3.1","stream":true}'
25
25
  )
@@ -18,8 +18,8 @@ RSpec.describe Ollama::Commands::Push do
18
18
 
19
19
  it 'can perform' do
20
20
  push = described_class.new(name: 'llama3.1', stream: true)
21
- push.client = client = double('client')
22
- expect(client).to receive(:request).with(
21
+ push.client = ollama = double('Ollama::Client')
22
+ expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/push', handler: Ollama::Handlers::NOP, stream: true,
24
24
  body: '{"json_class":"Ollama::Commands::Push","name":"llama3.1","stream":true}'
25
25
  )
@@ -18,8 +18,8 @@ RSpec.describe Ollama::Commands::Show do
18
18
 
19
19
  it 'can perform' do
20
20
  show = described_class.new(name: 'llama3.1')
21
- show.client = client = double('client')
22
- expect(client).to receive(:request).with(
21
+ show.client = ollama = double('Ollama::Client')
22
+ expect(ollama).to receive(:request).with(
23
23
  method: :post, path: '/api/show', handler: Ollama::Handlers::NOP ,stream: false,
24
24
  body: '{"json_class":"Ollama::Commands::Show","name":"llama3.1","stream":false}'
25
25
  )
@@ -14,8 +14,8 @@ RSpec.describe Ollama::Commands::Tags do
14
14
  end
15
15
 
16
16
  it 'can perform' do
17
- tags.client = client = double('client')
18
- expect(client).to receive(:request).
17
+ tags.client = ollama = double('Ollama::Client')
18
+ expect(ollama).to receive(:request).
19
19
  with(method: :get, path: '/api/tags', stream: false, handler: Ollama::Handlers::NOP)
20
20
  tags.perform(Ollama::Handlers::NOP)
21
21
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.0
4
+ version: 0.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-08-12 00:00:00.000000000 Z
11
+ date: 2024-08-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gem_hadar