ollama-ruby 0.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (79) hide show
  1. checksums.yaml +7 -0
  2. data/Gemfile +5 -0
  3. data/LICENSE +19 -0
  4. data/README.md +430 -0
  5. data/Rakefile +35 -0
  6. data/bin/ollama_chat +258 -0
  7. data/bin/ollama_console +20 -0
  8. data/lib/ollama/client/command.rb +25 -0
  9. data/lib/ollama/client/doc.rb +26 -0
  10. data/lib/ollama/client.rb +137 -0
  11. data/lib/ollama/commands/chat.rb +21 -0
  12. data/lib/ollama/commands/copy.rb +19 -0
  13. data/lib/ollama/commands/create.rb +20 -0
  14. data/lib/ollama/commands/delete.rb +19 -0
  15. data/lib/ollama/commands/embed.rb +21 -0
  16. data/lib/ollama/commands/embeddings.rb +20 -0
  17. data/lib/ollama/commands/generate.rb +21 -0
  18. data/lib/ollama/commands/ps.rb +19 -0
  19. data/lib/ollama/commands/pull.rb +19 -0
  20. data/lib/ollama/commands/push.rb +19 -0
  21. data/lib/ollama/commands/show.rb +20 -0
  22. data/lib/ollama/commands/tags.rb +19 -0
  23. data/lib/ollama/dto.rb +42 -0
  24. data/lib/ollama/errors.rb +15 -0
  25. data/lib/ollama/handlers/collector.rb +17 -0
  26. data/lib/ollama/handlers/concern.rb +31 -0
  27. data/lib/ollama/handlers/dump_json.rb +8 -0
  28. data/lib/ollama/handlers/dump_yaml.rb +8 -0
  29. data/lib/ollama/handlers/markdown.rb +22 -0
  30. data/lib/ollama/handlers/nop.rb +7 -0
  31. data/lib/ollama/handlers/print.rb +16 -0
  32. data/lib/ollama/handlers/progress.rb +36 -0
  33. data/lib/ollama/handlers/say.rb +19 -0
  34. data/lib/ollama/handlers/single.rb +17 -0
  35. data/lib/ollama/handlers.rb +13 -0
  36. data/lib/ollama/image.rb +31 -0
  37. data/lib/ollama/message.rb +9 -0
  38. data/lib/ollama/options.rb +68 -0
  39. data/lib/ollama/response.rb +5 -0
  40. data/lib/ollama/tool/function/parameters/property.rb +9 -0
  41. data/lib/ollama/tool/function/parameters.rb +10 -0
  42. data/lib/ollama/tool/function.rb +11 -0
  43. data/lib/ollama/tool.rb +9 -0
  44. data/lib/ollama/utils/ansi_markdown.rb +217 -0
  45. data/lib/ollama/utils/width.rb +22 -0
  46. data/lib/ollama/version.rb +8 -0
  47. data/lib/ollama.rb +43 -0
  48. data/ollama-ruby.gemspec +36 -0
  49. data/spec/assets/kitten.jpg +0 -0
  50. data/spec/ollama/client/doc_spec.rb +11 -0
  51. data/spec/ollama/client_spec.rb +144 -0
  52. data/spec/ollama/commands/chat_spec.rb +52 -0
  53. data/spec/ollama/commands/copy_spec.rb +28 -0
  54. data/spec/ollama/commands/create_spec.rb +37 -0
  55. data/spec/ollama/commands/delete_spec.rb +28 -0
  56. data/spec/ollama/commands/embed_spec.rb +52 -0
  57. data/spec/ollama/commands/embeddings_spec.rb +38 -0
  58. data/spec/ollama/commands/generate_spec.rb +29 -0
  59. data/spec/ollama/commands/ps_spec.rb +25 -0
  60. data/spec/ollama/commands/pull_spec.rb +28 -0
  61. data/spec/ollama/commands/push_spec.rb +28 -0
  62. data/spec/ollama/commands/show_spec.rb +28 -0
  63. data/spec/ollama/commands/tags_spec.rb +22 -0
  64. data/spec/ollama/handlers/collector_spec.rb +15 -0
  65. data/spec/ollama/handlers/dump_json_spec.rb +16 -0
  66. data/spec/ollama/handlers/dump_yaml_spec.rb +18 -0
  67. data/spec/ollama/handlers/markdown_spec.rb +46 -0
  68. data/spec/ollama/handlers/nop_spec.rb +15 -0
  69. data/spec/ollama/handlers/print_spec.rb +30 -0
  70. data/spec/ollama/handlers/progress_spec.rb +22 -0
  71. data/spec/ollama/handlers/say_spec.rb +30 -0
  72. data/spec/ollama/handlers/single_spec.rb +24 -0
  73. data/spec/ollama/image_spec.rb +23 -0
  74. data/spec/ollama/message_spec.rb +37 -0
  75. data/spec/ollama/options_spec.rb +25 -0
  76. data/spec/ollama/tool_spec.rb +78 -0
  77. data/spec/ollama/utils/ansi_markdown_spec.rb +15 -0
  78. data/spec/spec_helper.rb +16 -0
  79. metadata +321 -0
data/bin/ollama_chat ADDED
@@ -0,0 +1,258 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'ollama'
4
+ include Ollama
5
+ require 'term/ansicolor'
6
+ include Term::ANSIColor
7
+ require 'tins/go'
8
+ include Tins::GO
9
+ require 'reline'
10
+
11
+ class FollowChat
12
+ include Ollama::Handlers::Concern
13
+ include Term::ANSIColor
14
+
15
+ def initialize(messages:, markdown: false, voice: nil, output: $stdout)
16
+ super(output:)
17
+ @output.sync = true
18
+ @markdown = markdown
19
+ @say = voice ? Ollama::Handlers::Say.new(voice:) : NOP
20
+ @messages = messages
21
+ @user = nil
22
+ end
23
+
24
+ def call(response)
25
+ ENV['DEBUG'].to_i == 1 and jj response
26
+ if response&.message&.role == 'assistant'
27
+ if @messages.last.role != 'assistant'
28
+ @messages << Ollama::Message.new(role: 'assistant', content: '')
29
+ @user = message_type(@messages.last.images) + " " +
30
+ bold { color(111) { 'assistant:' } }
31
+ puts @user unless @markdown
32
+ end
33
+ content = response.message&.content
34
+ @messages.last.content << content
35
+ if @markdown and @messages.last.content.present?
36
+ markdown_content = Ollama::Utils::ANSIMarkdown.parse(@messages.last.content)
37
+ @output.print clear_screen, move_home, @user, ?\n, markdown_content
38
+ else
39
+ @output.print content
40
+ end
41
+ @say.call(response)
42
+ end
43
+ response.done and @output.puts
44
+ self
45
+ end
46
+ end
47
+
48
+ def pull_model_unless_present(client, model, options)
49
+ retried = false
50
+ begin
51
+ client.show(name: model) { |response|
52
+ puts green {
53
+ "Model with architecture #{response.model_info['general.architecture']} found."
54
+ }
55
+ if options
56
+ puts "Model options are:"
57
+ jj options
58
+ end
59
+ if system = response.system
60
+ puts "Configured model system prompt is:\n#{italic { system }}"
61
+ return system
62
+ else
63
+ return
64
+ end
65
+ }
66
+ rescue Errors::NotFoundError
67
+ puts "Model #{model} not found, attempting to pull it now…"
68
+ client.pull(name: model)
69
+ if retried
70
+ exit 1
71
+ else
72
+ retried = true
73
+ retry
74
+ end
75
+ rescue Errors::Error => e
76
+ warn "Caught #{e.class}: #{e} => Exiting."
77
+ exit 1
78
+ end
79
+ end
80
+
81
+ def load_conversation(filename)
82
+ unless File.exist?(filename)
83
+ puts "File #{filename} doesn't exist. Choose another filename."
84
+ return
85
+ end
86
+ File.open(filename, 'r') do |output|
87
+ return JSON(output.read, create_additions: true)
88
+ end
89
+ end
90
+
91
+ def save_conversation(filename, messages)
92
+ if File.exist?(filename)
93
+ puts "File #{filename} already exists. Choose another filename."
94
+ return
95
+ end
96
+ File.open(filename, 'w') do |output|
97
+ output.puts JSON(messages)
98
+ end
99
+ end
100
+
101
+ def message_type(images)
102
+ if images.present?
103
+ ?📸
104
+ else
105
+ ?📨
106
+ end
107
+ end
108
+
109
+ def list_conversation(messages, markdown)
110
+ messages.each do |m|
111
+ role_color = case m.role
112
+ when 'user' then 172
113
+ when 'assistant' then 111
114
+ when 'system' then 213
115
+ else 210
116
+ end
117
+ content = if markdown && m.content.present?
118
+ Ollama::Utils::ANSIMarkdown.parse(m.content)
119
+ else
120
+ m.content
121
+ end
122
+ puts message_type(m.images) + " " +
123
+ bold { color(role_color) { m.role } } + ":\n#{content}"
124
+ end
125
+ end
126
+
127
+ def display_chat_help
128
+ puts <<~end
129
+ /paste to paste content
130
+ /list list the messages of the conversation
131
+ /clear clear the conversation messages
132
+ /pop n pop the last n message, defaults to 1
133
+ /regenerate the last answer message
134
+ /save filename store conversation messages
135
+ /load filename load conversation messages
136
+ /image filename attach image to the next message
137
+ /quit to quit.
138
+ /help to view this help.
139
+ end
140
+ end
141
+
142
+ def usage
143
+ puts <<~end
144
+ #{File.basename($0)} [OPTIONS]
145
+
146
+ -u URL the ollama base url, OLLAMA_URL
147
+ -m MODEL the ollama model to chat with, OLLAMA_MODEL
148
+ -M OPTIONS the model options as JSON file, see Ollama::Options
149
+ -s SYSTEM the system prompt to use as a file
150
+ -c CHAT a saved chat conversation to load
151
+ -v VOICE use VOICE (e. g. Samantha) to speak with say command
152
+ -d use markdown to display the chat messages
153
+ -h this help
154
+
155
+ end
156
+ exit 0
157
+ end
158
+
159
+ opts = go 'u:m:M:s:c:v:dh'
160
+
161
+ opts[?h] and usage
162
+
163
+ base_url = opts[?u] || ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST')
164
+ model = opts[?m] || ENV.fetch('OLLAMA_MODEL', 'llama3.1')
165
+ options = if options_file = opts[?M]
166
+ JSON(File.read(options_file), create_additions: true)
167
+ end
168
+
169
+ client = Client.new(base_url:)
170
+
171
+ model_system = pull_model_unless_present(client, model, options)
172
+
173
+ puts green { "Connecting to #{model}@#{base_url} now…" }
174
+
175
+ messages = []
176
+
177
+ if opts[?c]
178
+ messages.concat load_conversation(opts[?c])
179
+ else
180
+ system = nil
181
+ if system_prompt_file = opts[?s]
182
+ system = File.read(system_prompt_file)
183
+ end
184
+ system ||= ENV['OLLAMA_SYSTEM']
185
+
186
+ if system
187
+ messages << Message.new(role: 'system', content: system)
188
+ puts "Configured system prompt is:\n#{italic { system }}"
189
+ elsif model_system.present?
190
+ puts "Using model system prompt."
191
+ end
192
+ end
193
+
194
+ puts "Type /help to display the chat help."
195
+
196
+ images = nil
197
+ loop do
198
+ prompt = bold { color(172) { message_type(images) + " user" } } + bold { "> " }
199
+ case content = Reline.readline(prompt, true)&.chomp
200
+ when %r(^/paste$)
201
+ puts bold { "Paste your content and then press C-d!" }
202
+ content = STDIN.read
203
+ when %r(^/quit$)
204
+ puts "Goodbye."
205
+ exit 0
206
+ when %r(^/list$)
207
+ list_conversation(messages, opts[?d])
208
+ next
209
+ when %r(^/clear$)
210
+ messages.clear
211
+ puts "Cleared messages."
212
+ next
213
+ when %r(^/pop\s*(\d*)$)
214
+ n = $1.to_i.clamp(1, Float::INFINITY)
215
+ messages.pop(n)
216
+ puts "Popped the last #{n} messages."
217
+ next
218
+ when %r(^/regenerate$)
219
+ if content = messages[-2]&.content
220
+ images = messages[-2]&.images
221
+ messages.pop(2)
222
+ else
223
+ puts "Not enough messages in this conversation."
224
+ redo
225
+ end
226
+ when %r(^/save (.+)$)
227
+ save_conversation($1, messages)
228
+ puts "Saved conversation to #$1."
229
+ next
230
+ when %r(^/load (.+)$)
231
+ messages = load_conversation($1)
232
+ puts "Loaded conversation from #$1."
233
+ next
234
+ when %r(^/image (.+)$)
235
+ filename = File.expand_path($1)
236
+ if File.exist?(filename)
237
+ images = Image.for_filename(filename)
238
+ puts "Attached image #$1 to the next message."
239
+ redo
240
+ else
241
+ puts "Filename #$1 doesn't exist. Choose another one."
242
+ next
243
+ end
244
+ when %r(^/help$)
245
+ display_chat_help
246
+ next
247
+ when nil
248
+ puts "Type /quit to quit."
249
+ next
250
+ end
251
+ messages << Message.new(role: 'user', content:, images:)
252
+ handler = FollowChat.new(messages:, markdown: opts[?d], voice: opts[?v])
253
+ client.chat(model:, messages:, options:, stream: true, &handler)
254
+ ENV['DEBUG'].to_i == 1 and jj messages
255
+ images = nil
256
+ rescue Interrupt
257
+ puts "Type /quit to quit."
258
+ end
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'ollama'
4
+ include Ollama
5
+ require 'irb'
6
+ require 'irb/history'
7
+
8
+ base_url = ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST')
9
+ client = Client.new(base_url:)
10
+ IRB.setup nil
11
+ IRB.conf[:MAIN_CONTEXT] = IRB::Irb.new.context
12
+ IRB.conf[:HISTORY_FILE] = File.join(ENV.fetch('HOME'), '.ollama_console-history')
13
+ IRB.conf[:SAVE_HISTORY] = 1000
14
+ require 'irb/ext/multi-irb'
15
+ if io = IRB.conf[:MAIN_CONTEXT].io and io.support_history_saving?
16
+ io.load_history
17
+ at_exit { io.save_history }
18
+ end
19
+ client.help
20
+ IRB.irb nil, client
@@ -0,0 +1,25 @@
1
+ module Ollama::Client::Command
2
+ extend Tins::Concern
3
+
4
+ module ClassMethods
5
+ # Create Command +name+, if +stream+ was true, set stream_handler as
6
+ # default, otherwise default_handler.
7
+ def command(name, default_handler:, stream_handler: nil)
8
+ klass = Ollama::Commands.const_get(name.to_s.camelize)
9
+ doc Ollama::Client::Doc.new(name)
10
+ define_method(name) do |**parameters, &handler|
11
+ instance = klass.new(**parameters)
12
+ instance.client = self
13
+ unless handler
14
+ instance.stream and stream_handler and
15
+ handler ||= stream_handler
16
+ handler ||= default_handler
17
+ end
18
+ handler.is_a?(Class) and handler = handler.new
19
+ instance.perform(handler)
20
+ handler.result if handler.respond_to?(:result)
21
+ end
22
+ self
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,26 @@
1
+ require 'term/ansicolor'
2
+
3
+ class Ollama::Client::Doc
4
+ include Term::ANSIColor
5
+
6
+ def initialize(name)
7
+ @name = name
8
+ @url = Hash.new('https://github.com/ollama/ollama/blob/main/docs/api.md').merge(
9
+ generate: 'https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion',
10
+ chat: 'https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion',
11
+ create: 'https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model',
12
+ tags: 'https://github.com/ollama/ollama/blob/main/docs/api.md#list-local-models',
13
+ show: 'https://github.com/ollama/ollama/blob/main/docs/api.md#show-model-information',
14
+ copy: 'https://github.com/ollama/ollama/blob/main/docs/api.md#copy-a-model',
15
+ delete: 'https://github.com/ollama/ollama/blob/main/docs/api.md#delete-a-model',
16
+ pull: 'https://github.com/ollama/ollama/blob/main/docs/api.md#pull-a-model',
17
+ push: 'https://github.com/ollama/ollama/blob/main/docs/api.md#push-a-model',
18
+ embeddings: 'https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings',
19
+ ps: 'https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models',
20
+ )[name]
21
+ end
22
+
23
+ def to_s
24
+ (hyperlink(@url) { @name } if @url).to_s
25
+ end
26
+ end
@@ -0,0 +1,137 @@
1
+ require 'tins/xt/string_camelize'
2
+ require 'tins/annotate'
3
+ require 'excon'
4
+
5
+ class Ollama::Client
6
+ end
7
+ require 'ollama/client/doc'
8
+ require 'ollama/client/command'
9
+
10
+ class Ollama::Client
11
+ include Tins::Annotate
12
+ include Ollama::Handlers
13
+ include Ollama::Client::Command
14
+
15
+ annotate :doc
16
+
17
+ def initialize(base_url: nil, output: $stdout, connect_timeout: nil, read_timeout: nil, write_timeout: nil, debug: nil)
18
+ base_url.nil? and base_url = ENV.fetch('OLLAMA_URL') do
19
+ raise ArgumentError,
20
+ 'missing :base_url parameter or OLLAMA_URL environment variable'
21
+ end
22
+ base_url.is_a? URI or base_url = URI.parse(base_url)
23
+ base_url.is_a?(URI::HTTP) || base_url.is_a?(URI::HTTPS) or
24
+ raise ArgumentError, "require #{base_url.inspect} to be http/https-URI"
25
+ @ssl_verify_peer = base_url.query.to_s.split(?&).inject({}) { |h, l|
26
+ h.merge Hash[*l.split(?=)]
27
+ }['ssl_verify_peer'] != 'false'
28
+ @base_url, @output, @connect_timeout, @read_timeout, @write_timeout, @debug =
29
+ base_url, output, connect_timeout, read_timeout, write_timeout, debug
30
+ end
31
+
32
+ attr_accessor :output
33
+
34
+ def ssl_verify_peer?
35
+ !!@ssl_verify_peer
36
+ end
37
+
38
+ command(:chat, default_handler: Single, stream_handler: Collector)
39
+
40
+ command(:generate, default_handler: Single, stream_handler: Collector)
41
+
42
+ command(:tags, default_handler: Single)
43
+
44
+ command(:show, default_handler: Single)
45
+
46
+ command(:create, default_handler: Single, stream_handler: Progress)
47
+
48
+ command(:copy, default_handler: Single)
49
+
50
+ command(:delete, default_handler: Single)
51
+
52
+ command(:pull, default_handler: Single, stream_handler: Progress)
53
+
54
+ command(:push, default_handler: Single, stream_handler: Progress)
55
+
56
+ command(:embed, default_handler: Single)
57
+
58
+ command(:embeddings, default_handler: Single)
59
+
60
+ command(:ps, default_handler: Single)
61
+
62
+ def commands
63
+ doc_annotations.sort_by(&:first).transpose.last
64
+ end
65
+
66
+ doc Doc.new(:help)
67
+ def help
68
+ @output.puts "Commands: %s" % commands.join(?,)
69
+ end
70
+
71
+ def request(method:, path:, handler:, body: nil, stream: nil)
72
+ url = @base_url + path
73
+ responses = Enumerator.new do |yielder|
74
+ if stream
75
+ response_block = -> chunk, remaining_bytes, total_bytes do
76
+ response_line = parse_json(chunk)
77
+ response_line and yielder.yield response_line
78
+ end
79
+ response = excon(url).send(method, headers:, body:, response_block:)
80
+ else
81
+ response = excon(url).send(method, headers:, body:)
82
+ end
83
+
84
+ case response.status
85
+ when 200
86
+ response.body.each_line do |l|
87
+ response_line = parse_json(l)
88
+ response_line and yielder.yield response_line
89
+ end
90
+ when 404
91
+ raise Ollama::Errors::NotFoundError, "#{response.status} #{response.body.inspect}"
92
+ else
93
+ raise Ollama::Errors::Error, "#{response.status} #{response.body.inspect}"
94
+ end
95
+ end
96
+ responses.each { |response| handler.call(response) }
97
+ self
98
+ rescue Excon::Errors::SocketError => e
99
+ raise Ollama::Errors::SocketError, "Caught #{e.class} #{e.message.inspect} for #{url.to_s.inspect}"
100
+ rescue Excon::Errors::Timeout => e
101
+ raise Ollama::Errors::TimeoutError, "Caught #{e.class} #{e.message.inspect} for #{url.to_s.inspect}"
102
+ rescue Excon::Error => e
103
+ raise Ollama::Errors::Error, "Caught #{e.class} #{e.message.inspect} for #{url.to_s.inspect}"
104
+ end
105
+
106
+ def inspect
107
+ "#<#{self.class}@#{@base_url.to_s}>"
108
+ end
109
+
110
+ alias to_s inspect
111
+
112
+ private
113
+
114
+ def headers
115
+ {
116
+ 'User-Agent' => '%s/%s' % [ self.class, Ollama::VERSION ],
117
+ 'Content-Type' => 'application/json; charset=utf-8',
118
+ }
119
+ end
120
+
121
+ def excon(url)
122
+ params = {
123
+ connect_timeout: @connect_timeout,
124
+ read_timeout: @read_timeout,
125
+ write_timeout: @write_timeout,
126
+ ssl_verify_peer: @ssl_verify_peer,
127
+ debug: @debug,
128
+ }.compact
129
+ Excon.new(url, params)
130
+ end
131
+
132
+ def parse_json(string)
133
+ JSON.parse(string, object_class: Ollama::Response)
134
+ rescue JSON::ParserError
135
+ return
136
+ end
137
+ end
@@ -0,0 +1,21 @@
1
+ class Ollama::Commands::Chat
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/chat'
6
+ end
7
+
8
+ def initialize(model:, messages:, tools: nil, format: nil, options: nil, stream: nil, keep_alive: nil)
9
+ @model, @messages, @tools, @format, @options, @stream, @keep_alive =
10
+ model, as_array_of_hashes(messages), as_array_of_hashes(tools),
11
+ format, options, stream, keep_alive
12
+ end
13
+
14
+ attr_reader :model, :messages, :tools, :format, :options, :stream, :keep_alive
15
+
16
+ attr_writer :client
17
+
18
+ def perform(handler)
19
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
20
+ end
21
+ end
@@ -0,0 +1,19 @@
1
+ class Ollama::Commands::Copy
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/copy'
6
+ end
7
+
8
+ def initialize(source:, destination:)
9
+ @source, @destination, @stream = source, destination, false
10
+ end
11
+
12
+ attr_reader :source, :destination, :stream
13
+
14
+ attr_writer :client
15
+
16
+ def perform(handler)
17
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
18
+ end
19
+ end
@@ -0,0 +1,20 @@
1
+ class Ollama::Commands::Create
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/create'
6
+ end
7
+
8
+ def initialize(name:, modelfile: nil, quantize: nil, stream: nil, path: nil)
9
+ @name, @modelfile, @quantize, @stream, @path =
10
+ name, modelfile, quantize, stream, path
11
+ end
12
+
13
+ attr_reader :name, :modelfile, :quantize, :stream, :path
14
+
15
+ attr_writer :client
16
+
17
+ def perform(handler)
18
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
19
+ end
20
+ end
@@ -0,0 +1,19 @@
1
+ class Ollama::Commands::Delete
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/delete'
6
+ end
7
+
8
+ def initialize(name:)
9
+ @name, @stream = name, false
10
+ end
11
+
12
+ attr_reader :name, :stream
13
+
14
+ attr_writer :client
15
+
16
+ def perform(handler)
17
+ @client.request(method: :delete, path: self.class.path, body: to_json, stream:, handler:)
18
+ end
19
+ end
@@ -0,0 +1,21 @@
1
+ class Ollama::Commands::Embed
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/embed'
6
+ end
7
+
8
+ def initialize(model:, input:, truncate: nil, keep_alive: nil)
9
+ @model, @input, @truncate, @keep_alive =
10
+ model, input, truncate, keep_alive
11
+ @stream = false
12
+ end
13
+
14
+ attr_reader :model, :input, :truncate, :keep_alive, :stream
15
+
16
+ attr_writer :client
17
+
18
+ def perform(handler)
19
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
20
+ end
21
+ end
@@ -0,0 +1,20 @@
1
+ class Ollama::Commands::Embeddings
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/embeddings'
6
+ end
7
+
8
+ def initialize(model:, prompt:, options: nil, keep_alive: nil)
9
+ @model, @prompt, @options, @keep_alive, @stream =
10
+ model, prompt, options, keep_alive, false
11
+ end
12
+
13
+ attr_reader :model, :prompt, :options, :keep_alive, :stream
14
+
15
+ attr_writer :client
16
+
17
+ def perform(handler)
18
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
19
+ end
20
+ end
@@ -0,0 +1,21 @@
1
+ class Ollama::Commands::Generate
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/generate'
6
+ end
7
+
8
+ def initialize(model:, prompt:, suffix: nil, images: nil, format: nil, options: nil, system: nil, template: nil, context: nil, stream: nil, raw: nil, keep_alive: nil)
9
+ @model, @prompt, @suffix, @images, @format, @options, @system, @template, @context, @stream, @raw, @keep_alive =
10
+ model, prompt, suffix, (Array(images) if images), format, options, system, template, context, stream, raw, keep_alive
11
+ end
12
+
13
+ attr_reader :model, :prompt, :suffix, :images, :format, :options, :system,
14
+ :template, :context, :stream, :raw, :keep_alive
15
+
16
+ attr_writer :client
17
+
18
+ def perform(handler)
19
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
20
+ end
21
+ end
@@ -0,0 +1,19 @@
1
+ class Ollama::Commands::Ps
2
+ def self.path
3
+ '/api/ps'
4
+ end
5
+
6
+ def initialize(**parameters)
7
+ parameters.empty? or raise ArgumentError,
8
+ "Invalid parameters: #{parameters.keys * ' '}"
9
+ @stream = false
10
+ end
11
+
12
+ attr_reader :stream
13
+
14
+ attr_writer :client
15
+
16
+ def perform(handler)
17
+ @client.request(method: :get, path: self.class.path, stream:, handler:)
18
+ end
19
+ end
@@ -0,0 +1,19 @@
1
+ class Ollama::Commands::Pull
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/pull'
6
+ end
7
+
8
+ def initialize(name:, insecure: nil, stream: true)
9
+ @name, @insecure, @stream = name, insecure, stream
10
+ end
11
+
12
+ attr_reader :name, :insecure, :stream
13
+
14
+ attr_writer :client
15
+
16
+ def perform(handler)
17
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
18
+ end
19
+ end
@@ -0,0 +1,19 @@
1
+ class Ollama::Commands::Push
2
+ include Ollama::DTO
3
+
4
+ def self.path
5
+ '/api/push'
6
+ end
7
+
8
+ def initialize(name:, insecure: nil, stream: true)
9
+ @name, @insecure, @stream = name, insecure, stream
10
+ end
11
+
12
+ attr_reader :name, :insecure, :stream
13
+
14
+ attr_writer :client
15
+
16
+ def perform(handler)
17
+ @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
18
+ end
19
+ end