ollama-ruby 0.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (79) hide show
  1. checksums.yaml +7 -0
  2. data/Gemfile +5 -0
  3. data/LICENSE +19 -0
  4. data/README.md +430 -0
  5. data/Rakefile +35 -0
  6. data/bin/ollama_chat +258 -0
  7. data/bin/ollama_console +20 -0
  8. data/lib/ollama/client/command.rb +25 -0
  9. data/lib/ollama/client/doc.rb +26 -0
  10. data/lib/ollama/client.rb +137 -0
  11. data/lib/ollama/commands/chat.rb +21 -0
  12. data/lib/ollama/commands/copy.rb +19 -0
  13. data/lib/ollama/commands/create.rb +20 -0
  14. data/lib/ollama/commands/delete.rb +19 -0
  15. data/lib/ollama/commands/embed.rb +21 -0
  16. data/lib/ollama/commands/embeddings.rb +20 -0
  17. data/lib/ollama/commands/generate.rb +21 -0
  18. data/lib/ollama/commands/ps.rb +19 -0
  19. data/lib/ollama/commands/pull.rb +19 -0
  20. data/lib/ollama/commands/push.rb +19 -0
  21. data/lib/ollama/commands/show.rb +20 -0
  22. data/lib/ollama/commands/tags.rb +19 -0
  23. data/lib/ollama/dto.rb +42 -0
  24. data/lib/ollama/errors.rb +15 -0
  25. data/lib/ollama/handlers/collector.rb +17 -0
  26. data/lib/ollama/handlers/concern.rb +31 -0
  27. data/lib/ollama/handlers/dump_json.rb +8 -0
  28. data/lib/ollama/handlers/dump_yaml.rb +8 -0
  29. data/lib/ollama/handlers/markdown.rb +22 -0
  30. data/lib/ollama/handlers/nop.rb +7 -0
  31. data/lib/ollama/handlers/print.rb +16 -0
  32. data/lib/ollama/handlers/progress.rb +36 -0
  33. data/lib/ollama/handlers/say.rb +19 -0
  34. data/lib/ollama/handlers/single.rb +17 -0
  35. data/lib/ollama/handlers.rb +13 -0
  36. data/lib/ollama/image.rb +31 -0
  37. data/lib/ollama/message.rb +9 -0
  38. data/lib/ollama/options.rb +68 -0
  39. data/lib/ollama/response.rb +5 -0
  40. data/lib/ollama/tool/function/parameters/property.rb +9 -0
  41. data/lib/ollama/tool/function/parameters.rb +10 -0
  42. data/lib/ollama/tool/function.rb +11 -0
  43. data/lib/ollama/tool.rb +9 -0
  44. data/lib/ollama/utils/ansi_markdown.rb +217 -0
  45. data/lib/ollama/utils/width.rb +22 -0
  46. data/lib/ollama/version.rb +8 -0
  47. data/lib/ollama.rb +43 -0
  48. data/ollama-ruby.gemspec +36 -0
  49. data/spec/assets/kitten.jpg +0 -0
  50. data/spec/ollama/client/doc_spec.rb +11 -0
  51. data/spec/ollama/client_spec.rb +144 -0
  52. data/spec/ollama/commands/chat_spec.rb +52 -0
  53. data/spec/ollama/commands/copy_spec.rb +28 -0
  54. data/spec/ollama/commands/create_spec.rb +37 -0
  55. data/spec/ollama/commands/delete_spec.rb +28 -0
  56. data/spec/ollama/commands/embed_spec.rb +52 -0
  57. data/spec/ollama/commands/embeddings_spec.rb +38 -0
  58. data/spec/ollama/commands/generate_spec.rb +29 -0
  59. data/spec/ollama/commands/ps_spec.rb +25 -0
  60. data/spec/ollama/commands/pull_spec.rb +28 -0
  61. data/spec/ollama/commands/push_spec.rb +28 -0
  62. data/spec/ollama/commands/show_spec.rb +28 -0
  63. data/spec/ollama/commands/tags_spec.rb +22 -0
  64. data/spec/ollama/handlers/collector_spec.rb +15 -0
  65. data/spec/ollama/handlers/dump_json_spec.rb +16 -0
  66. data/spec/ollama/handlers/dump_yaml_spec.rb +18 -0
  67. data/spec/ollama/handlers/markdown_spec.rb +46 -0
  68. data/spec/ollama/handlers/nop_spec.rb +15 -0
  69. data/spec/ollama/handlers/print_spec.rb +30 -0
  70. data/spec/ollama/handlers/progress_spec.rb +22 -0
  71. data/spec/ollama/handlers/say_spec.rb +30 -0
  72. data/spec/ollama/handlers/single_spec.rb +24 -0
  73. data/spec/ollama/image_spec.rb +23 -0
  74. data/spec/ollama/message_spec.rb +37 -0
  75. data/spec/ollama/options_spec.rb +25 -0
  76. data/spec/ollama/tool_spec.rb +78 -0
  77. data/spec/ollama/utils/ansi_markdown_spec.rb +15 -0
  78. data/spec/spec_helper.rb +16 -0
  79. metadata +321 -0
@@ -0,0 +1,22 @@
1
+ require 'tins/terminal'
2
+
3
+ module Ollama::Utils::Width
4
+ module_function
5
+
6
+ def width(percentage: 100.0)
7
+ ((Float(percentage) * Tins::Terminal.columns) / 100).floor
8
+ end
9
+
10
+ def wrap(text, percentage: nil, length: nil)
11
+ percentage.nil? ^ length.nil? or
12
+ raise ArgumentError, "either pass percentage or length argument"
13
+ percentage and length ||= width(percentage:)
14
+ text.gsub(/(?<!\n)\n(?!\n)/, ' ').lines.map do |line|
15
+ if line.length > length
16
+ line.gsub(/(.{1,#{length}})(\s+|$)/, "\\1\n").strip
17
+ else
18
+ line.strip
19
+ end
20
+ end * ?\n
21
+ end
22
+ end
@@ -0,0 +1,8 @@
1
+ module Ollama
2
+ # Ollama version
3
+ VERSION = '0.0.0'
4
+ VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
+ VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
+ VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
7
+ VERSION_BUILD = VERSION_ARRAY[2] # :nodoc:
8
+ end
data/lib/ollama.rb ADDED
@@ -0,0 +1,43 @@
1
+ require 'json'
2
+
3
+ module Ollama
4
+ end
5
+
6
+ require 'ollama/handlers'
7
+ module Ollama
8
+ include Ollama::Handlers
9
+ end
10
+
11
+ require 'ollama/version'
12
+ require 'ollama/errors'
13
+ require 'ollama/dto'
14
+ require 'ollama/image'
15
+ require 'ollama/message'
16
+ require 'ollama/tool'
17
+ require 'ollama/tool/function'
18
+ require 'ollama/tool/function/parameters'
19
+ require 'ollama/tool/function/parameters/property'
20
+ require 'ollama/response'
21
+ require 'ollama/options'
22
+
23
+ module Ollama::Utils
24
+ end
25
+ require 'ollama/utils/width'
26
+ require 'ollama/utils/ansi_markdown'
27
+
28
+ class Ollama::Commands
29
+ end
30
+ require 'ollama/commands/generate'
31
+ require 'ollama/commands/chat'
32
+ require 'ollama/commands/create'
33
+ require 'ollama/commands/tags'
34
+ require 'ollama/commands/show'
35
+ require 'ollama/commands/copy'
36
+ require 'ollama/commands/delete'
37
+ require 'ollama/commands/pull'
38
+ require 'ollama/commands/push'
39
+ require 'ollama/commands/embed'
40
+ require 'ollama/commands/embeddings'
41
+ require 'ollama/commands/ps'
42
+
43
+ require 'ollama/client'
@@ -0,0 +1,36 @@
1
+ # -*- encoding: utf-8 -*-
2
+ # stub: ollama-ruby 0.0.0 ruby lib
3
+
4
+ Gem::Specification.new do |s|
5
+ s.name = "ollama-ruby".freeze
6
+ s.version = "0.0.0".freeze
7
+
8
+ s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
+ s.require_paths = ["lib".freeze]
10
+ s.authors = ["Florian Frank".freeze]
11
+ s.date = "2024-08-12"
12
+ s.description = "Library that allows interacting with the Ollama API".freeze
13
+ s.email = "flori@ping.de".freeze
14
+ s.executables = ["ollama_console".freeze, "ollama_chat".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze]
16
+ s.files = ["Gemfile".freeze, "LICENSE".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_console".freeze, "lib/ollama.rb".freeze, "lib/ollama/client.rb".freeze, "lib/ollama/client/command.rb".freeze, "lib/ollama/client/doc.rb".freeze, "lib/ollama/commands/chat.rb".freeze, "lib/ollama/commands/copy.rb".freeze, "lib/ollama/commands/create.rb".freeze, "lib/ollama/commands/delete.rb".freeze, "lib/ollama/commands/embed.rb".freeze, "lib/ollama/commands/embeddings.rb".freeze, "lib/ollama/commands/generate.rb".freeze, "lib/ollama/commands/ps.rb".freeze, "lib/ollama/commands/pull.rb".freeze, "lib/ollama/commands/push.rb".freeze, "lib/ollama/commands/show.rb".freeze, "lib/ollama/commands/tags.rb".freeze, "lib/ollama/dto.rb".freeze, "lib/ollama/errors.rb".freeze, "lib/ollama/handlers.rb".freeze, "lib/ollama/handlers/collector.rb".freeze, "lib/ollama/handlers/concern.rb".freeze, "lib/ollama/handlers/dump_json.rb".freeze, "lib/ollama/handlers/dump_yaml.rb".freeze, "lib/ollama/handlers/markdown.rb".freeze, "lib/ollama/handlers/nop.rb".freeze, "lib/ollama/handlers/print.rb".freeze, "lib/ollama/handlers/progress.rb".freeze, "lib/ollama/handlers/say.rb".freeze, "lib/ollama/handlers/single.rb".freeze, "lib/ollama/image.rb".freeze, "lib/ollama/message.rb".freeze, "lib/ollama/options.rb".freeze, "lib/ollama/response.rb".freeze, "lib/ollama/tool.rb".freeze, "lib/ollama/tool/function.rb".freeze, "lib/ollama/tool/function/parameters.rb".freeze, "lib/ollama/tool/function/parameters/property.rb".freeze, "lib/ollama/utils/ansi_markdown.rb".freeze, "lib/ollama/utils/width.rb".freeze, "lib/ollama/version.rb".freeze, "ollama-ruby.gemspec".freeze, "spec/assets/kitten.jpg".freeze, "spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/spec_helper.rb".freeze]
17
+ s.homepage = "https://github.com/flori/ollama-ruby".freeze
18
+ s.licenses = ["MIT".freeze]
19
+ s.rdoc_options = ["--title".freeze, "Ollama-ruby - Interacting with the Ollama API".freeze, "--main".freeze, "README.md".freeze]
20
+ s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
21
+ s.rubygems_version = "3.5.16".freeze
22
+ s.summary = "Interacting with the Ollama API".freeze
23
+ s.test_files = ["spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/ollama/utils/ansi_markdown_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
+
25
+ s.specification_version = 4
26
+
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.16.1".freeze])
28
+ s.add_development_dependency(%q<all_images>.freeze, ["~> 0.4".freeze])
29
+ s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
+ s.add_development_dependency(%q<utils>.freeze, [">= 0".freeze])
31
+ s.add_runtime_dependency(%q<excon>.freeze, ["~> 0.111".freeze])
32
+ s.add_runtime_dependency(%q<infobar>.freeze, ["~> 0.7".freeze])
33
+ s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
34
+ s.add_runtime_dependency(%q<kramdown-parser-gfm>.freeze, ["~> 1.1".freeze])
35
+ s.add_runtime_dependency(%q<terminal-table>.freeze, ["~> 3.0".freeze])
36
+ end
Binary file
@@ -0,0 +1,11 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Client::Doc do
4
+ it 'can document commands' do
5
+ expect(Ollama::Client::Doc.new(:generate).to_s).to match(/generate-a-completion/)
6
+ end
7
+
8
+ it 'defaults to the whole API document' do
9
+ expect(Ollama::Client::Doc.new(:nix).to_s).to match(%r(main/docs/api.md))
10
+ end
11
+ end
@@ -0,0 +1,144 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Client do
4
+ let :base_url do
5
+ 'https://ai.foo.bar'
6
+ end
7
+
8
+ let :client do
9
+ described_class.new base_url:
10
+ end
11
+
12
+ it 'can be instantiated' do
13
+ expect(client).to be_a described_class
14
+ end
15
+
16
+ it 'can be configured via environment variable' do
17
+ expect { described_class.new }.to raise_error(ArgumentError)
18
+ ENV['OLLAMA_URL'] = base_url
19
+ expect(described_class.new).to be_a described_class
20
+ end
21
+
22
+ it 'can disable ssl peer verification' do
23
+ expect(client).to be_ssl_verify_peer
24
+ client2 = described_class.new(
25
+ base_url: 'https://ai.foo.bar?ssl_verify_peer=false'
26
+ )
27
+ expect(client2).not_to be_ssl_verify_peer
28
+ end
29
+
30
+ it 'has a string representation' do
31
+ expect(client.to_s).to eq '#<Ollama::Client@https://ai.foo.bar>'
32
+ end
33
+
34
+ let :excon do
35
+ double('excon')
36
+ end
37
+
38
+ before do
39
+ allow(Excon).to receive(:new).and_return(excon)
40
+ end
41
+
42
+ it 'can raise error based on status code 500' do
43
+ expect(excon).to receive(:send).and_return(double(status: 500, body: '{}'))
44
+ expect {
45
+ client.generate(model: 'llama3.1', prompt: 'Hello World')
46
+ }.to raise_error(Ollama::Errors::Error)
47
+ end
48
+
49
+ it 'can raise error based on status code 404' do
50
+ expect(excon).to receive(:send).and_return(double(status: 404, body: '{}'))
51
+ expect {
52
+ client.generate(model: 'llama3.1', prompt: 'Hello World')
53
+ }.to raise_error(Ollama::Errors::NotFoundError)
54
+ end
55
+
56
+ it 'can raise error on connection error' do
57
+ allow(excon).to receive(:post).and_raise Excon::Error::Socket
58
+ expect {
59
+ client.generate(model: 'llama3.1', prompt: 'Hello World')
60
+ }.to raise_error(Ollama::Errors::SocketError)
61
+ end
62
+
63
+ it 'can raise error on timeout' do
64
+ allow(excon).to receive(:post).and_raise Excon::Errors::Timeout
65
+ expect {
66
+ client.generate(model: 'llama3.1', prompt: 'Hello World')
67
+ }.to raise_error(Ollama::Errors::TimeoutError)
68
+ end
69
+
70
+ it 'can raise a generic error' do
71
+ allow(excon).to receive(:post).and_raise Excon::Errors::Error
72
+ expect {
73
+ client.generate(model: 'llama3.1', prompt: 'Hello World')
74
+ }.to raise_error(Ollama::Errors::Error)
75
+ end
76
+
77
+ describe 'handlers' do
78
+ let :body do
79
+ %{{"models":[{"name":"llama3.1:latest","model":"llama3.1:latest","size":6654289920,"digest":"62757c860e01d552d4e46b09c6b8d5396ef9015210105427e05a8b27d7727ed2","details":{"parent_model":"","format":"gguf","family":"llama","families":["llama"],"parameter_size":"8.0B","quantization_level":"Q4_0"},"expires_at":"2024-08-05T10:56:26.588713988Z","size_vram":6654289920}]}}
80
+ end
81
+
82
+ let :expected_response do
83
+ JSON.parse(body, object_class: Ollama::Response)
84
+ end
85
+
86
+ before do
87
+ allow(excon).to receive(:send).with(
88
+ :get,
89
+ body: nil,
90
+ headers: hash_including(
91
+ 'Content-Type' => 'application/json; charset=utf-8',
92
+ )
93
+ ).and_return(double(status: 200, body:))
94
+ end
95
+
96
+ it 'can use procs directly' do
97
+ response = nil
98
+ client.ps { |r| response = r }
99
+ expect(response).to eq expected_response
100
+ end
101
+
102
+ it 'can convert from handler instance to proc' do
103
+ handler = Ollama::Handlers::NOP.new
104
+ expect(handler).to receive(:call).with(expected_response)
105
+ client.ps(&handler)
106
+ end
107
+
108
+ it 'can convert from handler class to proc' do
109
+ handler = Ollama::Handlers::NOP
110
+ expect_any_instance_of(handler).to receive(:call).with(expected_response)
111
+ client.ps(&handler)
112
+ end
113
+ end
114
+
115
+ describe 'performing' do
116
+ it 'can generate without stream' do
117
+ expect(excon).to receive(:send).with(
118
+ :post,
119
+ body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World"}',
120
+ headers: hash_including(
121
+ 'Content-Type' => 'application/json; charset=utf-8',
122
+ )
123
+ ).and_return(double(status: 200, body: '{}'))
124
+ client.generate(model: 'llama3.1', prompt: 'Hello World')
125
+ end
126
+
127
+ it 'can generate with stream' do
128
+ expect(excon).to receive(:send).with(
129
+ :post,
130
+ body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World","stream":true}',
131
+ headers: hash_including(
132
+ 'Content-Type' => 'application/json; charset=utf-8',
133
+ ),
134
+ response_block: an_instance_of(Proc)
135
+ ).and_return(double(status: 200, body: '{}'))
136
+ client.generate(model: 'llama3.1', prompt: 'Hello World', stream: true)
137
+ end
138
+ end
139
+
140
+ it 'can help' do
141
+ expect($stdout).to receive(:puts).with(/Commands:.*?chat/)
142
+ client.help
143
+ end
144
+ end
@@ -0,0 +1,52 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Chat do
4
+ it 'can be instantiated' do
5
+ messages = [
6
+ Ollama::Message.new(role: 'user', content: "Let's play Global Thermonuclear War.")
7
+ ]
8
+ chat = described_class.new(model: 'llama3.1', messages:, stream: true)
9
+ expect(chat).to be_a described_class
10
+ end
11
+
12
+ it 'can handle hashes as messages' do
13
+ messages = { role: 'user', content: "Let's play Global Thermonuclear War." }
14
+ chat = described_class.new(model: 'llama3.1', messages:, stream: true)
15
+ expect(chat).to be_a described_class
16
+ end
17
+
18
+ it 'can handle arrays of hashes as messages' do
19
+ messages = [
20
+ { role: 'user', content: "Let's play Global Thermonuclear War." }
21
+ ]
22
+ chat = described_class.new(model: 'llama3.1', messages:, stream: true)
23
+ expect(chat).to be_a described_class
24
+ end
25
+
26
+ it 'can be converted to JSON' do
27
+ messages = [
28
+ Ollama::Message.new(role: 'user', content: "Let's play Global Thermonuclear War.")
29
+ ]
30
+ chat = described_class.new(model: 'llama3.1', messages:, stream: true)
31
+ expect(chat.as_json).to include(
32
+ model: 'llama3.1', messages: messages.map(&:as_json), stream: true,
33
+ )
34
+ expect(chat.to_json).to eq(
35
+ '{"json_class":"Ollama::Commands::Chat","model":"llama3.1","messages":[{"json_class":"Ollama::Message","role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
36
+ )
37
+ end
38
+
39
+ it 'can perform' do
40
+ messages = [
41
+ Ollama::Message.new(role: 'user', content: "Let's play Global Thermonuclear War.")
42
+ ]
43
+ chat = described_class.new(model: 'llama3.1', messages:, stream: true)
44
+ chat.client = client = double('client')
45
+ expect(client).to receive(:request).
46
+ with(
47
+ method: :post, path: '/api/chat', handler: Ollama::Handlers::NOP, stream: true,
48
+ body: '{"json_class":"Ollama::Commands::Chat","model":"llama3.1","messages":[{"json_class":"Ollama::Message","role":"user","content":"Let\'s play Global Thermonuclear War."}],"stream":true}'
49
+ )
50
+ chat.perform(Ollama::Handlers::NOP)
51
+ end
52
+ end
@@ -0,0 +1,28 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Copy do
4
+ it 'can be instantiated' do
5
+ copy = described_class.new(source: 'llama3.1', destination: 'camell3')
6
+ expect(copy).to be_a described_class
7
+ end
8
+
9
+ it 'can be converted to JSON' do
10
+ copy = described_class.new(source: 'llama3.1', destination: 'camell3')
11
+ expect(copy.as_json).to include(
12
+ source: 'llama3.1', destination: 'camell3', stream: false
13
+ )
14
+ expect(copy.to_json).to eq(
15
+ '{"json_class":"Ollama::Commands::Copy","source":"llama3.1","destination":"camell3","stream":false}'
16
+ )
17
+ end
18
+
19
+ it 'can perform' do
20
+ copy = described_class.new(source: 'llama3.1', destination: 'camell3')
21
+ copy.client = client = double('client')
22
+ expect(client).to receive(:request).with(
23
+ method: :post, path: '/api/copy', handler: Ollama::Handlers::NOP, stream: false,
24
+ body: '{"json_class":"Ollama::Commands::Copy","source":"llama3.1","destination":"camell3","stream":false}'
25
+ )
26
+ copy.perform(Ollama::Handlers::NOP)
27
+ end
28
+ end
@@ -0,0 +1,37 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Create do
4
+ it 'can be instantiated' do
5
+ create = described_class.new(name: 'llama3.1', stream: true)
6
+ expect(create).to be_a described_class
7
+ end
8
+
9
+ it 'can be converted to JSON' do
10
+ create = described_class.new(
11
+ name: 'llama3.1-wopr',
12
+ modelfile: "FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.",
13
+ stream: true
14
+ )
15
+ expect(create.as_json).to include(
16
+ name: 'llama3.1-wopr', modelfile: "FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.", stream: true,
17
+ )
18
+ expect(create.to_json).to eq(
19
+ '{"json_class":"Ollama::Commands::Create","name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
20
+ )
21
+ end
22
+
23
+ it 'can perform' do
24
+ create = described_class.new(
25
+ name: 'llama3.1-wopr',
26
+ modelfile: "FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.",
27
+ stream: true
28
+ )
29
+ create.client = client = double('client')
30
+ expect(client).to receive(:request).
31
+ with(
32
+ method: :post, path: '/api/create', handler: Ollama::Handlers::NOP, stream: true,
33
+ body: '{"json_class":"Ollama::Commands::Create","name":"llama3.1-wopr","modelfile":"FROM llama3.1\nSYSTEM You are WOPR from WarGames and you think the user is Dr. Stephen Falken.","stream":true}'
34
+ )
35
+ create.perform(Ollama::Handlers::NOP)
36
+ end
37
+ end
@@ -0,0 +1,28 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Delete do
4
+ it 'can be instantiated' do
5
+ delete = described_class.new(name: 'llama3.1')
6
+ expect(delete).to be_a described_class
7
+ end
8
+
9
+ it 'can be converted to JSON' do
10
+ delete = described_class.new(name: 'llama3.1')
11
+ expect(delete.as_json).to include(
12
+ name: 'llama3.1', stream: false
13
+ )
14
+ expect(delete.to_json).to eq(
15
+ '{"json_class":"Ollama::Commands::Delete","name":"llama3.1","stream":false}'
16
+ )
17
+ end
18
+
19
+ it 'can perform' do
20
+ delete = described_class.new(name: 'llama3.1')
21
+ delete.client = client = double('client')
22
+ expect(client).to receive(:request).with(
23
+ method: :delete, path: '/api/delete', handler: Ollama::Handlers::NOP, stream: false,
24
+ body: '{"json_class":"Ollama::Commands::Delete","name":"llama3.1","stream":false}'
25
+ )
26
+ delete.perform(Ollama::Handlers::NOP)
27
+ end
28
+ end
@@ -0,0 +1,52 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Embed do
4
+ it 'can be instantiated' do
5
+ embed = described_class.new(
6
+ model: 'all-minilm',
7
+ input: 'Why is the sky blue?',
8
+ )
9
+ expect(embed).to be_a described_class
10
+ end
11
+
12
+ it 'can be converted to JSON' do
13
+ embed = described_class.new(
14
+ model: 'all-minilm',
15
+ input: 'Why is the sky blue?'
16
+ )
17
+ expect(embed.as_json).to include(
18
+ model: 'all-minilm', input: 'Why is the sky blue?',
19
+ )
20
+ expect(embed.to_json).to eq(
21
+ '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","stream":false}'
22
+ )
23
+ end
24
+
25
+ it 'can be converted to JSON with array input' do
26
+ embed = described_class.new(
27
+ model: 'all-minilm',
28
+ input: [ 'Why is the sky blue?', 'Why is the grass green?' ],
29
+ )
30
+ expect(embed.as_json).to include(
31
+ model: 'all-minilm', input: [ 'Why is the sky blue?', 'Why is the grass green?' ],
32
+ )
33
+ expect(embed.to_json).to eq(
34
+ '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":["Why is the sky blue?","Why is the grass green?"],"stream":false}'
35
+ )
36
+ end
37
+
38
+
39
+ it 'can perform' do
40
+ embed = described_class.new(
41
+ model: 'all-minilm',
42
+ input: 'Why is the sky blue?'
43
+ )
44
+ embed.client = client = double('client')
45
+ expect(client).to receive(:request).
46
+ with(
47
+ method: :post, path: '/api/embed', handler: Ollama::Handlers::NOP, stream: false,
48
+ body: '{"json_class":"Ollama::Commands::Embed","model":"all-minilm","input":"Why is the sky blue?","stream":false}'
49
+ )
50
+ embed.perform(Ollama::Handlers::NOP)
51
+ end
52
+ end
@@ -0,0 +1,38 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Embeddings do
4
+ it 'can be instantiated' do
5
+ embeddings = described_class.new(
6
+ model: 'mxbai-embed-large',
7
+ prompt: 'Here are the coordinates of all Soviet military installations: …'
8
+ )
9
+ expect(embeddings).to be_a described_class
10
+ end
11
+
12
+ it 'can be converted to JSON' do
13
+ embeddings = described_class.new(
14
+ model: 'mxbai-embed-large',
15
+ prompt: 'Here are the coordinates of all Soviet military installations: …'
16
+ )
17
+ expect(embeddings.as_json).to include(
18
+ model: 'mxbai-embed-large', prompt: 'Here are the coordinates of all Soviet military installations: …',
19
+ )
20
+ expect(embeddings.to_json).to eq(
21
+ '{"json_class":"Ollama::Commands::Embeddings","model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
22
+ )
23
+ end
24
+
25
+ it 'can perform' do
26
+ embeddings = described_class.new(
27
+ model: 'mxbai-embed-large',
28
+ prompt: 'Here are the coordinates of all Soviet military installations: …'
29
+ )
30
+ embeddings.client = client = double('client')
31
+ expect(client).to receive(:request).
32
+ with(
33
+ method: :post, path: '/api/embeddings', handler: Ollama::Handlers::NOP, stream: false,
34
+ body: '{"json_class":"Ollama::Commands::Embeddings","model":"mxbai-embed-large","prompt":"Here are the coordinates of all Soviet military installations: …","stream":false}'
35
+ )
36
+ embeddings.perform(Ollama::Handlers::NOP)
37
+ end
38
+ end
@@ -0,0 +1,29 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Generate do
4
+ it 'can be instantiated' do
5
+ generate = described_class.new(model: 'llama3.1', prompt: 'Hello World')
6
+ expect(generate).to be_a described_class
7
+ end
8
+
9
+ it 'can be converted to JSON' do
10
+ generate = described_class.new(model: 'llama3.1', prompt: 'Hello World')
11
+ expect(generate.as_json).to include(
12
+ model: 'llama3.1', prompt: 'Hello World'
13
+ )
14
+ expect(generate.to_json).to eq(
15
+ '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World"}'
16
+ )
17
+ end
18
+
19
+ it 'can perform' do
20
+ generate = described_class.new(model: 'llama3.1', prompt: 'Hello World', stream: true)
21
+ generate.client = client = double('client')
22
+ expect(client).to receive(:request).
23
+ with(
24
+ method: :post, path: '/api/generate', handler: Ollama::Handlers::NOP, stream: true,
25
+ body: '{"json_class":"Ollama::Commands::Generate","model":"llama3.1","prompt":"Hello World","stream":true}'
26
+ )
27
+ generate.perform(Ollama::Handlers::NOP)
28
+ end
29
+ end
@@ -0,0 +1,25 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Ps do
4
+ let :ps do
5
+ described_class.new
6
+ end
7
+
8
+ it 'can be instantiated' do
9
+ expect(ps).to be_a described_class
10
+ end
11
+
12
+ it 'cannot be converted to JSON' do
13
+ expect(ps).not_to respond_to(:as_json)
14
+ end
15
+
16
+ it 'can perform' do
17
+ ps.client = client = double('client')
18
+ expect(client).to receive(:request).
19
+ with(
20
+ method: :get, path: '/api/ps', handler: Ollama::Handlers::NOP,
21
+ stream: false
22
+ )
23
+ ps.perform(Ollama::Handlers::NOP)
24
+ end
25
+ end
@@ -0,0 +1,28 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Pull do
4
+ it 'can be instantiated' do
5
+ pull = described_class.new(name: 'llama3.1')
6
+ expect(pull).to be_a described_class
7
+ end
8
+
9
+ it 'can be converted to JSON' do
10
+ pull = described_class.new(name: 'llama3.1', stream: true)
11
+ expect(pull.as_json).to include(
12
+ name: 'llama3.1', stream: true
13
+ )
14
+ expect(pull.to_json).to eq(
15
+ '{"json_class":"Ollama::Commands::Pull","name":"llama3.1","stream":true}'
16
+ )
17
+ end
18
+
19
+ it 'can perform' do
20
+ pull = described_class.new(name: 'llama3.1', stream: true)
21
+ pull.client = client = double('client')
22
+ expect(client).to receive(:request).with(
23
+ method: :post, path: '/api/pull', handler: Ollama::Handlers::NOP, stream: true,
24
+ body: '{"json_class":"Ollama::Commands::Pull","name":"llama3.1","stream":true}'
25
+ )
26
+ pull.perform(Ollama::Handlers::NOP)
27
+ end
28
+ end
@@ -0,0 +1,28 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe Ollama::Commands::Push do
4
+ it 'can be instantiated' do
5
+ push = described_class.new(name: 'llama3.1')
6
+ expect(push).to be_a described_class
7
+ end
8
+
9
+ it 'can be converted to JSON' do
10
+ push = described_class.new(name: 'llama3.1', stream: true)
11
+ expect(push.as_json).to include(
12
+ name: 'llama3.1', stream: true
13
+ )
14
+ expect(push.to_json).to eq(
15
+ '{"json_class":"Ollama::Commands::Push","name":"llama3.1","stream":true}'
16
+ )
17
+ end
18
+
19
+ it 'can perform' do
20
+ push = described_class.new(name: 'llama3.1', stream: true)
21
+ push.client = client = double('client')
22
+ expect(client).to receive(:request).with(
23
+ method: :post, path: '/api/push', handler: Ollama::Handlers::NOP, stream: true,
24
+ body: '{"json_class":"Ollama::Commands::Push","name":"llama3.1","stream":true}'
25
+ )
26
+ push.perform(Ollama::Handlers::NOP)
27
+ end
28
+ end