ollama-ruby 1.5.0 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. checksums.yaml +4 -4
  2. data/.contexts/code_comment.rb +25 -0
  3. data/.contexts/full.rb +43 -0
  4. data/.contexts/info.rb +17 -0
  5. data/.contexts/lib.rb +27 -0
  6. data/.contexts/yard.md +93 -0
  7. data/CHANGES.md +22 -0
  8. data/README.md +54 -0
  9. data/Rakefile +3 -2
  10. data/bin/ollama_cli +31 -4
  11. data/bin/ollama_console +18 -0
  12. data/lib/ollama/client/command.rb +29 -3
  13. data/lib/ollama/client/configuration/config.rb +114 -3
  14. data/lib/ollama/client/doc.rb +18 -0
  15. data/lib/ollama/client.rb +131 -2
  16. data/lib/ollama/commands/chat.rb +96 -1
  17. data/lib/ollama/commands/copy.rb +59 -1
  18. data/lib/ollama/commands/create.rb +112 -1
  19. data/lib/ollama/commands/delete.rb +53 -1
  20. data/lib/ollama/commands/embed.rb +82 -1
  21. data/lib/ollama/commands/embeddings.rb +72 -1
  22. data/lib/ollama/commands/generate.rb +118 -2
  23. data/lib/ollama/commands/ps.rb +55 -0
  24. data/lib/ollama/commands/pull.rb +72 -1
  25. data/lib/ollama/commands/push.rb +65 -1
  26. data/lib/ollama/commands/show.rb +64 -1
  27. data/lib/ollama/commands/tags.rb +50 -0
  28. data/lib/ollama/commands/version.rb +50 -1
  29. data/lib/ollama/dto.rb +98 -1
  30. data/lib/ollama/errors.rb +50 -0
  31. data/lib/ollama/handlers/collector.rb +34 -0
  32. data/lib/ollama/handlers/concern.rb +60 -2
  33. data/lib/ollama/handlers/dump_json.rb +20 -0
  34. data/lib/ollama/handlers/dump_yaml.rb +22 -0
  35. data/lib/ollama/handlers/markdown.rb +28 -0
  36. data/lib/ollama/handlers/nop.rb +20 -0
  37. data/lib/ollama/handlers/print.rb +27 -0
  38. data/lib/ollama/handlers/progress.rb +38 -0
  39. data/lib/ollama/handlers/say.rb +66 -0
  40. data/lib/ollama/handlers/single.rb +35 -0
  41. data/lib/ollama/handlers.rb +9 -0
  42. data/lib/ollama/image.rb +67 -0
  43. data/lib/ollama/json_loader.rb +17 -0
  44. data/lib/ollama/message.rb +46 -1
  45. data/lib/ollama/options.rb +27 -2
  46. data/lib/ollama/response.rb +17 -0
  47. data/lib/ollama/tool/function/parameters/property.rb +41 -1
  48. data/lib/ollama/tool/function/parameters.rb +40 -1
  49. data/lib/ollama/tool/function.rb +44 -1
  50. data/lib/ollama/tool.rb +37 -1
  51. data/lib/ollama/version.rb +1 -1
  52. data/lib/ollama.rb +26 -0
  53. data/ollama-ruby.gemspec +6 -5
  54. data/spec/ollama/client/doc_spec.rb +1 -1
  55. data/spec/ollama/client_spec.rb +19 -1
  56. data/spec/ollama/commands/chat_spec.rb +1 -1
  57. data/spec/ollama/commands/copy_spec.rb +1 -1
  58. data/spec/ollama/commands/create_spec.rb +1 -1
  59. data/spec/ollama/commands/delete_spec.rb +1 -1
  60. data/spec/ollama/commands/embed_spec.rb +1 -1
  61. data/spec/ollama/commands/embeddings_spec.rb +1 -1
  62. data/spec/ollama/commands/generate_spec.rb +1 -1
  63. data/spec/ollama/commands/ps_spec.rb +1 -1
  64. data/spec/ollama/commands/pull_spec.rb +1 -1
  65. data/spec/ollama/commands/push_spec.rb +1 -1
  66. data/spec/ollama/commands/show_spec.rb +1 -1
  67. data/spec/ollama/commands/tags_spec.rb +1 -1
  68. data/spec/ollama/commands/version_spec.rb +1 -1
  69. data/spec/ollama/handlers/collector_spec.rb +1 -1
  70. data/spec/ollama/handlers/dump_json_spec.rb +1 -1
  71. data/spec/ollama/handlers/dump_yaml_spec.rb +1 -1
  72. data/spec/ollama/handlers/markdown_spec.rb +1 -1
  73. data/spec/ollama/handlers/nop_spec.rb +2 -2
  74. data/spec/ollama/handlers/print_spec.rb +1 -1
  75. data/spec/ollama/handlers/progress_spec.rb +1 -1
  76. data/spec/ollama/handlers/say_spec.rb +1 -1
  77. data/spec/ollama/handlers/single_spec.rb +1 -1
  78. data/spec/ollama/image_spec.rb +1 -1
  79. data/spec/ollama/message_spec.rb +1 -1
  80. data/spec/ollama/options_spec.rb +1 -1
  81. data/spec/ollama/tool_spec.rb +1 -1
  82. data/spec/spec_helper.rb +2 -6
  83. metadata +24 -5
data/lib/ollama/client.rb CHANGED
@@ -1,3 +1,16 @@
1
+ # A class that serves as the main entry point for interacting with the Ollama API.
2
+ #
3
+ # The Client class provides methods to communicate with an Ollama server, handling
4
+ # various API endpoints such as chat, generate, create, and model management commands.
5
+ # It manages configuration settings like base URL, timeouts, and output streams,
6
+ # and supports different response handlers for processing API results.
7
+ #
8
+ # @example Initializing a client with a base URL
9
+ # client = Ollama::Client.new(base_url: 'http://localhost:11434')
10
+ #
11
+ # @example Configuring a client using a configuration object
12
+ # config = Ollama::Client::Config[base_url: 'http://localhost:11434']
13
+ # client = Ollama::Client.configure_with(config)
1
14
  class Ollama::Client
2
15
  end
3
16
  require 'ollama/client/doc'
@@ -12,6 +25,22 @@ class Ollama::Client
12
25
 
13
26
  annotate :doc
14
27
 
28
+ # The initialize method sets up a new client instance with the specified configuration parameters.
29
+ #
30
+ # This method is responsible for initializing a new Ollama::Client instance by processing
31
+ # various configuration options including the base URL, output stream, timeouts, and debug settings.
32
+ # It handles default values for the base URL by falling back to an environment variable,
33
+ # validates that the base URL is a valid HTTP or HTTPS URI, and extracts SSL verification
34
+ # settings from query parameters. The method also sets up instance variables for all
35
+ # configuration options, making them available for use in subsequent client operations.
36
+ #
37
+ # @param base_url [ String, nil ] the base URL of the Ollama API endpoint, defaults to nil
38
+ # @param output [ IO ] the output stream to be used for handling responses, defaults to $stdout
39
+ # @param connect_timeout [ Integer, nil ] the connection timeout value in seconds, defaults to nil
40
+ # @param read_timeout [ Integer, nil ] the read timeout value in seconds, defaults to nil
41
+ # @param write_timeout [ Integer, nil ] the write timeout value in seconds, defaults to nil
42
+ # @param debug [ Boolean, nil ] the debug flag indicating whether debug output is enabled, defaults to nil
43
+ # @param user_agent [ String, nil ] the user agent string to be used for API requests, defaults to nil
15
44
  def initialize(base_url: nil, output: $stdout, connect_timeout: nil, read_timeout: nil, write_timeout: nil, debug: nil, user_agent: nil)
16
45
  base_url.nil? and base_url = ENV.fetch('OLLAMA_URL') do
17
46
  raise ArgumentError,
@@ -27,14 +56,39 @@ class Ollama::Client
27
56
  base_url, output, connect_timeout, read_timeout, write_timeout, debug, user_agent
28
57
  end
29
58
 
59
+ # The output attribute accessor allows reading and setting the output stream
60
+ # used for handling responses and messages.
61
+ #
62
+ # @attr [ IO ] the output stream, typically $stdout, to which responses and
63
+ # messages are written
30
64
  attr_accessor :output
31
65
 
66
+ # The base_url attribute reader returns the base URL used for making requests to the Ollama API.
67
+ #
68
+ # @return [ URI ] the base URL configured for API requests
32
69
  attr_reader :base_url
33
70
 
71
+ # The ssl_verify_peer? method checks whether SSL peer verification is enabled.
72
+ #
73
+ # This method returns a boolean value indicating if the client should verify
74
+ # the SSL certificate of the Ollama server during communication. It converts
75
+ # the internal SSL verification flag to a boolean value for easy checking.
76
+ #
77
+ # @return [ TrueClass, FalseClass ] true if SSL peer verification is enabled,
78
+ # false otherwise
34
79
  def ssl_verify_peer?
35
80
  !!@ssl_verify_peer
36
81
  end
37
82
 
83
+ # Defines a command method with its associated command class and handlers.
84
+ #
85
+ # This is an example of Ruby's metaprogramming capabilities where we dynamically
86
+ # create methods that delegate to specific command classes. The client supports
87
+ # many commands including chat, generate, tags, show, create, copy, delete,
88
+ # pull, push, embed, embeddings, ps, and version.
89
+ #
90
+ # @example Generated command method
91
+ # client.chat(model: 'llama3.1', messages: [{role: 'user', content: 'Hello'}])
38
92
  command(:chat, default_handler: Single, stream_handler: Collector)
39
93
 
40
94
  command(:generate, default_handler: Single, stream_handler: Collector)
@@ -61,15 +115,45 @@ class Ollama::Client
61
115
 
62
116
  command(:version, default_handler: Single)
63
117
 
118
+ # The commands method retrieves and sorts the documented commands available
119
+ # in the client.
120
+ #
121
+ # This method extracts all command annotations from the class, sorts them by
122
+ # their names, and returns an array containing only the command names in
123
+ # alphabetical order.
124
+ #
125
+ # @return [ Array<String> ] an array of command names sorted alphabetically
64
126
  def commands
65
127
  doc_annotations.sort_by(&:first).transpose.last
66
128
  end
67
129
 
68
130
  doc Doc.new(:help)
131
+ # The help method displays a list of available commands to the output stream.
132
+ #
133
+ # This method retrieves the sorted list of documented commands from the client
134
+ # and outputs them as a comma-separated string to the configured output stream.
135
+ # It is typically used to provide users with information about which commands
136
+ # are available for execution through the client interface.
69
137
  def help
70
138
  @output.puts "Commands: %s" % commands.join(?,)
71
139
  end
72
140
 
141
+ # The request method sends an HTTP request to the Ollama API and processes
142
+ # responses through a handler.
143
+ #
144
+ # This method constructs an HTTP request to the specified API endpoint,
145
+ # handling both streaming and non-streaming responses. It manages different
146
+ # HTTP status codes, including success (200), not found (404), and other
147
+ # error cases. The method also includes comprehensive error handling for
148
+ # network-related issues such as socket errors and timeouts.
149
+ #
150
+ # @param method [ Symbol ] the HTTP method to use for the request (:get, :post, :delete)
151
+ # @param path [ String ] the API endpoint path to request
152
+ # @param handler [ Ollama::Handler ] the handler object responsible for processing API responses
153
+ # @param body [ String, nil ] the request body content, if applicable
154
+ # @param stream [ TrueClass, FalseClass, nil ] whether to enable streaming for the operation
155
+ #
156
+ # @return [ Ollama::Client ] returns the client instance itself after initiating the request
73
157
  def request(method:, path:, handler:, body: nil, stream: nil)
74
158
  url = @base_url + path
75
159
  responses = Enumerator.new do |yielder|
@@ -105,14 +189,28 @@ class Ollama::Client
105
189
  raise Ollama::Errors::Error, "Caught #{e.class} #{e.message.inspect} for #{url.to_s.inspect}"
106
190
  end
107
191
 
192
+ # The inspect method returns a string representation of the client instance.
193
+ #
194
+ # This method provides a human-readable description of the client object,
195
+ # including its class name and the base URL it is configured to use.
196
+ #
197
+ # @return [ String ] a string representation in the format "#<Ollama::Client@http://localhost:11434>"
108
198
  def inspect
109
- "#<#{self.class}@#{@base_url.to_s}>"
199
+ "#<#{self.class}@#{@base_url}>"
110
200
  end
111
201
 
112
202
  alias to_s inspect
113
203
 
114
204
  private
115
205
 
206
+ # The headers method constructs and returns a hash of HTTP headers.
207
+ #
208
+ # This method generates a set of standard HTTP headers required for making
209
+ # requests to the Ollama API, including the User-Agent and Content-Type. It
210
+ # uses the instance's configured user agent or falls back to the class-level
211
+ # user agent if none is set.
212
+ #
213
+ # @return [ Hash ] a hash containing the HTTP headers with keys 'User-Agent' and 'Content-Type'
116
214
  def headers
117
215
  {
118
216
  'User-Agent' => @user_agent || self.class.user_agent,
@@ -120,10 +218,29 @@ class Ollama::Client
120
218
  }
121
219
  end
122
220
 
221
+ # The user_agent method generates a formatted user agent string for API requests.
222
+ #
223
+ # This method creates a user agent identifier that combines the class name
224
+ # with the library version, which is used to identify the client making
225
+ # requests to the Ollama API.
226
+ #
227
+ # @return [ String ] a formatted user agent string in the format "Ollama::Client/1.2.3"
123
228
  def self.user_agent
124
229
  '%s/%s' % [ self, Ollama::VERSION ]
125
230
  end
126
231
 
232
+ # The excon method creates and returns a new Excon client instance configured
233
+ # with the receiver's timeout and debugging settings.
234
+ #
235
+ # This method constructs an Excon client object using the provided URL and
236
+ # configures it with connection, read, and write timeouts, SSL verification
237
+ # settings, and debug mode based on the instance variables of the receiver.
238
+ # It compacts the parameters hash to remove any nil values before passing
239
+ # them to Excon.new.
240
+ #
241
+ # @param url [ String ] the URL to be used for the Excon client
242
+ #
243
+ # @return [ Excon ] a new Excon client instance configured with the specified parameters
127
244
  def excon(url)
128
245
  params = {
129
246
  connect_timeout: @connect_timeout,
@@ -135,9 +252,21 @@ class Ollama::Client
135
252
  Excon.new(url, params)
136
253
  end
137
254
 
255
+ # The parse_json method attempts to parse a JSON string into a structured
256
+ # object.
257
+ #
258
+ # This method takes a string containing JSON data and converts it into a Ruby
259
+ # object using the JSON.parse method. It specifies Ollama::Response as the
260
+ # object class to ensure that the parsed data is wrapped in the appropriate
261
+ # response structure.
262
+ #
263
+ # @param string [ String ] the JSON string to be parsed
264
+ #
265
+ # @return [ Ollama::Response, nil ] the parsed JSON object or nil if parsing fails
138
266
  def parse_json(string)
139
267
  JSON.parse(string, object_class: Ollama::Response)
140
- rescue JSON::ParserError
268
+ rescue JSON::ParserError => e
269
+ warn "Caught #{e.class}: #{e}"
141
270
  return
142
271
  end
143
272
  end
@@ -1,20 +1,115 @@
1
+ # A command class that represents the chat API endpoint for Ollama.
2
+ #
3
+ # This class is used to interact with the Ollama API's chat endpoint, which
4
+ # generates conversational responses using a specified model. It inherits from
5
+ # the base command structure and provides the necessary functionality to execute
6
+ # chat requests for interactive conversations with language models.
7
+ #
8
+ # @example Initiating a chat conversation
9
+ # messages = [
10
+ # Ollama::Message.new(role: 'user', content: 'Hello, how are you?'),
11
+ # Ollama::Message.new(role: 'assistant', content: 'I am doing well, thank you!')
12
+ # ]
13
+ # chat = ollama.chat(model: 'llama3.1', stream: true, messages:)
1
14
  class Ollama::Commands::Chat
2
15
  include Ollama::DTO
3
16
 
17
+ # The path method returns the API endpoint path for chat requests.
18
+ #
19
+ # This class method provides the specific URL path used to interact with the
20
+ # Ollama API's chat endpoint. It is utilized internally by the command
21
+ # structure to determine the correct API route for conversational interactions.
22
+ #
23
+ # @return [ String ] the API endpoint path '/api/chat' for chat requests
4
24
  def self.path
5
25
  '/api/chat'
6
26
  end
7
27
 
28
+ # The initialize method sets up a new instance with streaming behavior.
29
+ #
30
+ # This method is responsible for initializing a new object instance and
31
+ # configuring it with parameters required for chat interactions. It sets up
32
+ # the model, conversation messages, tools, format, options, streaming behavior,
33
+ # keep-alive duration, and thinking mode.
34
+ #
35
+ # @param model [ String ] the name of the model to use for chat responses
36
+ # @param messages [ Array<Ollama::Message>, Hash, nil ] conversation history with roles and content
37
+ # @param tools [ Array<Ollama::Tool>, Hash, nil ] tools available for function calling
38
+ # @param format [ String, nil ] response format (e.g., 'json')
39
+ # @param options [ Ollama::Options, nil ] configuration parameters for the model
40
+ # @param stream [ TrueClass, FalseClass, nil ] whether to enable streaming for the operation
41
+ # @param keep_alive [ String, nil ] duration to keep the model loaded in memory
42
+ # @param think [ Boolean, nil ] whether to enable thinking mode for reasoning
8
43
  def initialize(model:, messages:, tools: nil, format: nil, options: nil, stream: nil, keep_alive: nil, think: nil)
9
44
  @model, @messages, @tools, @format, @options, @stream, @keep_alive, @think =
10
45
  model, as_array_of_hashes(messages), as_array_of_hashes(tools),
11
46
  format, options, stream, keep_alive, think
12
47
  end
13
48
 
14
- attr_reader :model, :messages, :tools, :format, :options, :stream, :keep_alive, :think
49
+ # The model attribute reader returns the model name associated with the object.
50
+ #
51
+ # @return [ String ] the name of the model to use for chat responses
52
+ attr_reader :model
15
53
 
54
+ # The messages attribute reader returns the conversation history associated with the object.
55
+ #
56
+ # @return [ Array<Ollama::Message>, nil ] conversation history with roles and content
57
+ attr_reader :messages
58
+
59
+ # The tools attribute reader returns the available tools associated with the object.
60
+ #
61
+ # @return [ Array<Ollama::Tool>, nil ] tools available for function calling
62
+ attr_reader :tools
63
+
64
+ # The format attribute reader returns the response format associated with the object.
65
+ #
66
+ # @return [ String, nil ] response format (e.g., 'json')
67
+ attr_reader :format
68
+
69
+ # The options attribute reader returns the model configuration parameters associated with the object.
70
+ #
71
+ # @return [ Ollama::Options, nil ] configuration parameters for the model
72
+ attr_reader :options
73
+
74
+ # The stream attribute reader returns the streaming behavior setting
75
+ # associated with the object.
76
+ #
77
+ # @return [ TrueClass, FalseClass, nil ] the streaming behavior flag, indicating whether
78
+ # streaming is enabled for the command execution (nil by default)
79
+ attr_reader :stream
80
+
81
+ # The keep_alive attribute reader returns the keep-alive duration associated with the object.
82
+ #
83
+ # @return [ String, nil ] duration to keep the model loaded in memory
84
+ attr_reader :keep_alive
85
+
86
+ # The think attribute reader returns the thinking mode setting associated with the object.
87
+ #
88
+ # @return [ Boolean, nil ] whether thinking mode is enabled for reasoning
89
+ attr_reader :think
90
+
91
+ # The client attribute writer allows setting the client instance associated
92
+ # with the object.
93
+ #
94
+ # This method assigns the client that will be used to perform requests and
95
+ # handle responses for this command. It is typically called internally when a
96
+ # command is executed through a client instance.
97
+ #
98
+ # @attr_writer [ Ollama::Client ] the assigned client instance
16
99
  attr_writer :client
17
100
 
101
+
102
+ # The perform method executes a command request using the specified handler.
103
+ #
104
+ # This method initiates a POST request to the Ollama API's chat endpoint,
105
+ # utilizing the client instance to send the request and process responses
106
+ # through the provided handler. It handles both streaming and non-streaming
107
+ # scenarios based on the command's configuration.
108
+ #
109
+ # @param handler [ Ollama::Handler ] the handler object responsible for processing API
110
+ # responses
111
+ #
112
+ # @return [ self ] returns the current instance after initiating the request
18
113
  def perform(handler)
19
114
  @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
20
115
  end
@@ -1,18 +1,76 @@
1
+ # A command class that represents the copy API endpoint for Ollama.
2
+ #
3
+ # This class is used to interact with the Ollama API's copy endpoint, which
4
+ # creates a copy of an existing model with a new name. It inherits from the base
5
+ # command structure and provides the necessary functionality to execute copy
6
+ # requests for model duplication.
7
+ #
8
+ # @example Copying a model to a new name
9
+ # copy = ollama.copy(source: 'llama3.1', destination: 'user/llama3.1')
1
10
  class Ollama::Commands::Copy
2
11
  include Ollama::DTO
3
12
 
13
+ # The path method returns the API endpoint path for copy requests.
14
+ #
15
+ # This class method provides the specific URL path used to interact with the
16
+ # Ollama API's copy endpoint. It is utilized internally by the command
17
+ # structure to determine the correct API route for duplicating models.
18
+ #
19
+ # @return [ String ] the API endpoint path '/api/copy' for copy requests
4
20
  def self.path
5
21
  '/api/copy'
6
22
  end
7
23
 
24
+ # The initialize method sets up a new instance with streaming disabled.
25
+ #
26
+ # This method is responsible for initializing a new object instance and
27
+ # configuring it with the source and destination model names. It explicitly
28
+ # disables streaming since copy operations are typically non-streaming.
29
+ #
30
+ # @param source [ String ] the name of the source model to be copied
31
+ # @param destination [ String ] the name of the new model to be created
8
32
  def initialize(source:, destination:)
9
33
  @source, @destination, @stream = source, destination, false
10
34
  end
11
35
 
12
- attr_reader :source, :destination, :stream
36
+ # The source attribute reader returns the source model name associated with the object.
37
+ #
38
+ # @return [ String ] the name of the source model to be copied
39
+ attr_reader :source
13
40
 
41
+ # The destination attribute reader returns the destination model name associated with the object.
42
+ #
43
+ # @return [ String ] the name of the new model to be created
44
+ attr_reader :destination
45
+
46
+ # The stream attribute reader returns the streaming behavior setting
47
+ # associated with the object.
48
+ #
49
+ # @return [ FalseClass ] the streaming behavior flag, indicating whether
50
+ # streaming is enabled for the command execution (always false for copy commands)
51
+ attr_reader :stream
52
+
53
+ # The client attribute writer allows setting the client instance associated
54
+ # with the object.
55
+ #
56
+ # This method assigns the client that will be used to perform requests and
57
+ # handle responses for this command. It is typically called internally when a
58
+ # command is executed through a client instance.
59
+ #
60
+ # @attr_writer [ Ollama::Client ] the assigned client instance
14
61
  attr_writer :client
15
62
 
63
+ # The perform method executes a command request using the specified handler.
64
+ #
65
+ # This method initiates a POST request to the Ollama API's copy endpoint,
66
+ # utilizing the client instance to send the request and process responses
67
+ # through the provided handler. It handles non-streaming scenarios since
68
+ # copy commands do not support streaming.
69
+ #
70
+ # @param handler [ Ollama::Handler ] the handler object responsible for processing API
71
+ # responses
72
+ #
73
+ # @return [ self ] returns the current instance after initiating the request
16
74
  def perform(handler)
17
75
  @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
18
76
  end
@@ -1,20 +1,131 @@
1
+ # A command class that represents the create API endpoint for Ollama.
2
+ #
3
+ # This class is used to interact with the Ollama API's create endpoint, which
4
+ # creates a new model based on a modelfile or existing model. It inherits from
5
+ # the base command structure and provides the necessary functionality to execute
6
+ # model creation requests.
7
+ #
8
+ # @example Creating a new model from an existing model
9
+ # create = ollama.create(model: 'llama3.1-wopr', from: 'llama3.1', system: 'You are WOPR from WarGames')
10
+ #
11
+ # @example Creating a model with files and parameters
12
+ # create = ollama.create(
13
+ # model: 'my-model',
14
+ # from: 'llama3.1',
15
+ # files: { 'modelfile' => 'FROM llama3.1\nPARAMETER temperature 0.7' },
16
+ # parameters: Ollama::Options.new(temperature: 0.7, num_ctx: 8192)
17
+ # )
1
18
  class Ollama::Commands::Create
2
19
  include Ollama::DTO
3
20
 
21
+ # The path method returns the API endpoint path for create requests.
22
+ #
23
+ # This class method provides the specific URL path used to interact with the
24
+ # Ollama API's create endpoint. It is utilized internally by the command
25
+ # structure to determine the correct API route for creating new models.
26
+ #
27
+ # @return [ String ] the API endpoint path '/api/create' for create requests
4
28
  def self.path
5
29
  '/api/create'
6
30
  end
7
31
 
32
+ # The initialize method sets up a new instance with streaming enabled by default.
33
+ #
34
+ # This method is responsible for initializing a new object instance and
35
+ # configuring it with parameters required for model creation. It sets up the
36
+ # model name, source model (if any), files, adapters, template, license,
37
+ # system prompt, parameters, messages, and streaming behavior.
38
+ #
39
+ # @param model [ String ] the name of the new model to be created
40
+ # @param from [ String, nil ] the base model to create from (e.g., 'llama3.1')
41
+ # @param files [ Hash, nil ] file contents for the modelfile and other files
42
+ # @param adapters [ Hash, nil ] adapter files to use for quantization
43
+ # @param template [ String, nil ] the template to use for the model
44
+ # @param license [ String, Array<String>, nil ] the license(s) for the model
45
+ # @param system [ String, nil ] the system prompt to use for the model
46
+ # @param parameters [ Ollama::Options, nil ] configuration parameters for the model
47
+ # @param messages [ Array<Ollama::Message>, nil ] initial conversation messages
48
+ # @param stream [ TrueClass, FalseClass ] whether to enable streaming for the operation, defaults to true
49
+ # @param quantize [ String, nil ] quantization method to use (e.g., 'Q4_0')
8
50
  def initialize(model:, from: nil, files: nil, adapters: nil, template: nil, license: nil, system: nil, parameters: nil, messages: nil, stream: true, quantize: nil)
9
51
  @model, @from, @files, @adapters, @license, @system, @parameters, @messages, @stream, @quantize =
10
52
  model, from, as_hash(files), as_hash(adapters), as_array(license), system,
11
53
  as_hash(parameters), as_array_of_hashes(messages), stream, quantize
12
54
  end
13
55
 
14
- attr_reader :model, :from, :files, :adapters, :license, :system, :parameters, :messages, :stream, :quantize
56
+ # The model attribute reader returns the model name associated with the object.
57
+ #
58
+ # @return [ String ] the name of the new model to be created
59
+ attr_reader :model
15
60
 
61
+ # The from attribute reader returns the base model name associated with the object.
62
+ #
63
+ # @return [ String, nil ] the base model to create from (e.g., 'llama3.1')
64
+ attr_reader :from
65
+
66
+ # The files attribute reader returns the file contents associated with the object.
67
+ #
68
+ # @return [ Hash, nil ] file contents for the modelfile and other files
69
+ attr_reader :files
70
+
71
+ # The adapters attribute reader returns the adapter files associated with the object.
72
+ #
73
+ # @return [ Hash, nil ] adapter files to use for quantization
74
+ attr_reader :adapters
75
+
76
+ # The license attribute reader returns the license(s) associated with the object.
77
+ #
78
+ # @return [ String, Array<String>, nil ] the license(s) for the model
79
+ attr_reader :license
80
+
81
+ # The system attribute reader returns the system prompt associated with the object.
82
+ #
83
+ # @return [ String, nil ] the system prompt to use for the model
84
+ attr_reader :system
85
+
86
+ # The parameters attribute reader returns the model configuration parameters associated with the object.
87
+ #
88
+ # @return [ Ollama::Options, nil ] configuration parameters for the model
89
+ attr_reader :parameters
90
+
91
+ # The messages attribute reader returns the initial conversation messages associated with the object.
92
+ #
93
+ # @return [ Array<Ollama::Message>, nil ] initial conversation messages
94
+ attr_reader :messages
95
+
96
+ # The stream attribute reader returns the streaming behavior setting
97
+ # associated with the object.
98
+ #
99
+ # @return [ TrueClass, FalseClass ] the streaming behavior flag, indicating whether
100
+ # streaming is enabled for the command execution (defaults to true for create commands)
101
+ attr_reader :stream
102
+
103
+ # The quantize attribute reader returns the quantization method associated with the object.
104
+ #
105
+ # @return [ String, nil ] quantization method to use (e.g., 'Q4_0')
106
+ attr_reader :quantize
107
+
108
+ # The client attribute writer allows setting the client instance associated
109
+ # with the object.
110
+ #
111
+ # This method assigns the client that will be used to perform requests and
112
+ # handle responses for this command. It is typically called internally when a
113
+ # command is executed through a client instance.
114
+ #
115
+ # @attr_writer [ Ollama::Client ] the assigned client instance
16
116
  attr_writer :client
17
117
 
118
+ # The perform method executes a command request using the specified handler.
119
+ #
120
+ # This method initiates a POST request to the Ollama API's create endpoint,
121
+ # utilizing the client instance to send the request and process responses
122
+ # through the provided handler. It handles both streaming and non-streaming
123
+ # scenarios based on the command's configuration.
124
+ #
125
+ # @param handler [ Ollama::Handler ] the handler object responsible for processing API
126
+ # responses
127
+ #
128
+ # @return [ self ] returns the current instance after initiating the request
18
129
  def perform(handler)
19
130
  @client.request(method: :post, path: self.class.path, body: to_json, stream:, handler:)
20
131
  end
@@ -1,18 +1,70 @@
1
+ # A command class that represents the delete API endpoint for Ollama.
2
+ #
3
+ # This class is used to interact with the Ollama API's delete endpoint, which
4
+ # removes a specified model from the local system. It inherits from the base
5
+ # command structure and provides the necessary functionality to execute delete
6
+ # requests for model removal.
7
+ #
8
+ # @example Deleting a local model
9
+ # delete = ollama.delete(model: 'user/llama3.1')
1
10
  class Ollama::Commands::Delete
2
11
  include Ollama::DTO
3
12
 
13
+ # The path method returns the API endpoint path for delete requests.
14
+ #
15
+ # This class method provides the specific URL path used to interact with the
16
+ # Ollama API's delete endpoint. It is utilized internally by the command
17
+ # structure to determine the correct API route for removing models from local storage.
18
+ #
19
+ # @return [ String ] the API endpoint path '/api/delete' for delete requests
4
20
  def self.path
5
21
  '/api/delete'
6
22
  end
7
23
 
24
+ # The initialize method sets up a new instance with streaming disabled.
25
+ #
26
+ # This method is responsible for initializing a new object instance and
27
+ # configuring it with the model name to be deleted. It explicitly disables
28
+ # streaming since delete operations are typically non-streaming.
29
+ #
30
+ # @param model [ String ] the name of the model to be deleted
8
31
  def initialize(model:)
9
32
  @model, @stream = model, false
10
33
  end
11
34
 
12
- attr_reader :model, :stream
35
+ # The model attribute reader returns the model name associated with the object.
36
+ #
37
+ # @return [ String ] the name of the model to be deleted
38
+ attr_reader :model
13
39
 
40
+ # The stream attribute reader returns the streaming behavior setting
41
+ # associated with the object.
42
+ #
43
+ # @return [ FalseClass ] the streaming behavior flag, indicating whether
44
+ # streaming is enabled for the command execution (always false for delete commands)
45
+ attr_reader :stream
46
+
47
+ # The client attribute writer allows setting the client instance associated
48
+ # with the object.
49
+ #
50
+ # This method assigns the client that will be used to perform requests and
51
+ # handle responses for this command. It is typically called internally when a
52
+ # command is executed through a client instance.
53
+ #
54
+ # @attr_writer [ Ollama::Client ] the assigned client instance
14
55
  attr_writer :client
15
56
 
57
+ # The perform method executes a command request using the specified handler.
58
+ #
59
+ # This method initiates a DELETE request to the Ollama API's delete endpoint,
60
+ # utilizing the client instance to send the request and process responses
61
+ # through the provided handler. It handles non-streaming scenarios since
62
+ # delete commands do not support streaming.
63
+ #
64
+ # @param handler [ Ollama::Handler ] the handler object responsible for processing API
65
+ # responses
66
+ #
67
+ # @return [ self ] returns the current instance after initiating the request
16
68
  def perform(handler)
17
69
  @client.request(method: :delete, path: self.class.path, body: to_json, stream:, handler:)
18
70
  end