ruby_llm_community 0.0.6 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +3 -3
  3. data/lib/generators/ruby_llm/install/templates/create_models_migration.rb.tt +34 -0
  4. data/lib/generators/ruby_llm/install/templates/initializer.rb.tt +5 -0
  5. data/lib/generators/ruby_llm/install/templates/model_model.rb.tt +6 -0
  6. data/lib/generators/ruby_llm/install_generator.rb +27 -2
  7. data/lib/ruby_llm/active_record/acts_as.rb +163 -24
  8. data/lib/ruby_llm/aliases.json +58 -5
  9. data/lib/ruby_llm/aliases.rb +7 -25
  10. data/lib/ruby_llm/chat.rb +10 -17
  11. data/lib/ruby_llm/configuration.rb +5 -12
  12. data/lib/ruby_llm/connection.rb +4 -4
  13. data/lib/ruby_llm/connection_multipart.rb +19 -0
  14. data/lib/ruby_llm/content.rb +5 -2
  15. data/lib/ruby_llm/embedding.rb +1 -2
  16. data/lib/ruby_llm/error.rb +0 -8
  17. data/lib/ruby_llm/image.rb +23 -8
  18. data/lib/ruby_llm/image_attachment.rb +21 -0
  19. data/lib/ruby_llm/message.rb +6 -6
  20. data/lib/ruby_llm/model/info.rb +12 -10
  21. data/lib/ruby_llm/model/pricing.rb +0 -3
  22. data/lib/ruby_llm/model/pricing_category.rb +0 -2
  23. data/lib/ruby_llm/model/pricing_tier.rb +0 -1
  24. data/lib/ruby_llm/models.json +2147 -470
  25. data/lib/ruby_llm/models.rb +65 -34
  26. data/lib/ruby_llm/provider.rb +8 -8
  27. data/lib/ruby_llm/providers/anthropic/capabilities.rb +1 -46
  28. data/lib/ruby_llm/providers/anthropic/chat.rb +2 -2
  29. data/lib/ruby_llm/providers/anthropic/media.rb +0 -1
  30. data/lib/ruby_llm/providers/anthropic/tools.rb +1 -2
  31. data/lib/ruby_llm/providers/anthropic.rb +1 -2
  32. data/lib/ruby_llm/providers/bedrock/chat.rb +2 -4
  33. data/lib/ruby_llm/providers/bedrock/media.rb +0 -1
  34. data/lib/ruby_llm/providers/bedrock/models.rb +0 -2
  35. data/lib/ruby_llm/providers/bedrock/streaming/base.rb +0 -12
  36. data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +0 -7
  37. data/lib/ruby_llm/providers/bedrock/streaming/message_processing.rb +0 -12
  38. data/lib/ruby_llm/providers/bedrock/streaming/payload_processing.rb +0 -12
  39. data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +0 -13
  40. data/lib/ruby_llm/providers/bedrock/streaming.rb +0 -18
  41. data/lib/ruby_llm/providers/bedrock.rb +1 -2
  42. data/lib/ruby_llm/providers/deepseek/capabilities.rb +1 -2
  43. data/lib/ruby_llm/providers/deepseek/chat.rb +0 -1
  44. data/lib/ruby_llm/providers/gemini/capabilities.rb +28 -100
  45. data/lib/ruby_llm/providers/gemini/chat.rb +57 -29
  46. data/lib/ruby_llm/providers/gemini/embeddings.rb +0 -2
  47. data/lib/ruby_llm/providers/gemini/images.rb +1 -2
  48. data/lib/ruby_llm/providers/gemini/media.rb +0 -1
  49. data/lib/ruby_llm/providers/gemini/models.rb +1 -2
  50. data/lib/ruby_llm/providers/gemini/streaming.rb +15 -1
  51. data/lib/ruby_llm/providers/gemini/tools.rb +0 -5
  52. data/lib/ruby_llm/providers/gpustack/chat.rb +11 -1
  53. data/lib/ruby_llm/providers/gpustack/media.rb +45 -0
  54. data/lib/ruby_llm/providers/gpustack/models.rb +44 -9
  55. data/lib/ruby_llm/providers/gpustack.rb +1 -0
  56. data/lib/ruby_llm/providers/mistral/capabilities.rb +2 -10
  57. data/lib/ruby_llm/providers/mistral/chat.rb +0 -2
  58. data/lib/ruby_llm/providers/mistral/embeddings.rb +0 -3
  59. data/lib/ruby_llm/providers/mistral/models.rb +0 -1
  60. data/lib/ruby_llm/providers/ollama/chat.rb +0 -1
  61. data/lib/ruby_llm/providers/ollama/media.rb +1 -6
  62. data/lib/ruby_llm/providers/ollama/models.rb +36 -0
  63. data/lib/ruby_llm/providers/ollama.rb +1 -0
  64. data/lib/ruby_llm/providers/openai/capabilities.rb +3 -16
  65. data/lib/ruby_llm/providers/openai/chat.rb +1 -3
  66. data/lib/ruby_llm/providers/openai/embeddings.rb +0 -3
  67. data/lib/ruby_llm/providers/openai/images.rb +73 -3
  68. data/lib/ruby_llm/providers/openai/media.rb +0 -1
  69. data/lib/ruby_llm/providers/openai/response.rb +120 -29
  70. data/lib/ruby_llm/providers/openai/response_media.rb +2 -2
  71. data/lib/ruby_llm/providers/openai/streaming.rb +107 -47
  72. data/lib/ruby_llm/providers/openai/tools.rb +1 -1
  73. data/lib/ruby_llm/providers/openai.rb +1 -3
  74. data/lib/ruby_llm/providers/openai_base.rb +2 -2
  75. data/lib/ruby_llm/providers/openrouter/models.rb +1 -16
  76. data/lib/ruby_llm/providers/perplexity/capabilities.rb +0 -1
  77. data/lib/ruby_llm/providers/perplexity/chat.rb +0 -1
  78. data/lib/ruby_llm/providers/perplexity.rb +1 -5
  79. data/lib/ruby_llm/providers/vertexai/chat.rb +14 -0
  80. data/lib/ruby_llm/providers/vertexai/embeddings.rb +32 -0
  81. data/lib/ruby_llm/providers/vertexai/models.rb +130 -0
  82. data/lib/ruby_llm/providers/vertexai/streaming.rb +14 -0
  83. data/lib/ruby_llm/providers/vertexai.rb +55 -0
  84. data/lib/ruby_llm/railtie.rb +0 -1
  85. data/lib/ruby_llm/stream_accumulator.rb +72 -10
  86. data/lib/ruby_llm/streaming.rb +16 -25
  87. data/lib/ruby_llm/tool.rb +2 -19
  88. data/lib/ruby_llm/tool_call.rb +0 -9
  89. data/lib/ruby_llm/version.rb +1 -1
  90. data/lib/ruby_llm_community.rb +5 -3
  91. data/lib/tasks/models.rake +525 -0
  92. data/lib/tasks/release.rake +37 -2
  93. data/lib/tasks/vcr.rake +0 -7
  94. metadata +13 -4
  95. data/lib/tasks/aliases.rake +0 -235
  96. data/lib/tasks/models_docs.rake +0 -224
  97. data/lib/tasks/models_update.rake +0 -108
data/lib/ruby_llm/chat.rb CHANGED
@@ -1,13 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- # Represents a conversation with an AI model. Handles message history,
5
- # streaming responses, and tool integration with a simple, conversational API.
6
- #
7
- # Example:
8
- # chat = RubyLLM.chat
9
- # chat.ask "What's the best way to learn Ruby?"
10
- # chat.ask "Can you elaborate on that?"
4
+ # Represents a conversation with an AI model
11
5
  class Chat
12
6
  include Enumerable
13
7
 
@@ -22,7 +16,7 @@ module RubyLLM
22
16
  @config = context&.config || RubyLLM.config
23
17
  model_id = model || @config.default_model
24
18
  with_model(model_id, provider: provider, assume_exists: assume_model_exists)
25
- @temperature = 0.7
19
+ @temperature = nil
26
20
  @messages = []
27
21
  @tools = {}
28
22
  @cache_prompts = { system: false, user: false, tools: false }
@@ -138,8 +132,8 @@ module RubyLLM
138
132
  messages,
139
133
  tools: @tools,
140
134
  temperature: @temperature,
141
- model: @model.id,
142
135
  cache_prompts: @cache_prompts.dup,
136
+ model: @model,
143
137
  params: @params,
144
138
  headers: @headers,
145
139
  schema: @schema,
@@ -148,7 +142,6 @@ module RubyLLM
148
142
 
149
143
  @on[:new_message]&.call unless block_given?
150
144
 
151
- # Parse JSON if schema was set
152
145
  if @schema && response.content.is_a?(String)
153
146
  begin
154
147
  response.content = JSON.parse(response.content)
@@ -177,6 +170,10 @@ module RubyLLM
177
170
  @messages.clear
178
171
  end
179
172
 
173
+ def instance_variables
174
+ super - %i[@connection @config]
175
+ end
176
+
180
177
  private
181
178
 
182
179
  def wrap_streaming_block(&block)
@@ -191,12 +188,11 @@ module RubyLLM
191
188
  @on[:new_message]&.call
192
189
  end
193
190
 
194
- # Pass chunk to user's block
195
191
  block.call chunk
196
192
  end
197
193
  end
198
194
 
199
- def handle_tool_calls(response, &)
195
+ def handle_tool_calls(response, &) # rubocop:disable Metrics/PerceivedComplexity
200
196
  halt_result = nil
201
197
 
202
198
  response.tool_calls.each_value do |tool_call|
@@ -204,7 +200,8 @@ module RubyLLM
204
200
  @on[:tool_call]&.call(tool_call)
205
201
  result = execute_tool tool_call
206
202
  @on[:tool_result]&.call(result)
207
- message = add_message role: :tool, content: result.to_s, tool_call_id: tool_call.id
203
+ content = result.is_a?(Content) ? result : result.to_s
204
+ message = add_message role: :tool, content:, tool_call_id: tool_call.id
208
205
  @on[:end_message]&.call(message)
209
206
 
210
207
  halt_result = result if result.is_a?(Tool::Halt)
@@ -218,9 +215,5 @@ module RubyLLM
218
215
  args = tool_call.arguments
219
216
  tool.call(args)
220
217
  end
221
-
222
- def instance_variables
223
- super - %i[@connection @config]
224
- end
225
218
  end
226
219
  end
@@ -1,16 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- # Global configuration for RubyLLM. Manages API keys, default models,
5
- # and provider-specific settings.
6
- #
7
- # Configure via:
8
- # RubyLLM.configure do |config|
9
- # config.openai_api_key = ENV['OPENAI_API_KEY']
10
- # config.anthropic_api_key = ENV['ANTHROPIC_API_KEY']
11
- # end
4
+ # Global configuration for RubyLLM
12
5
  class Configuration
13
- # Provider-specific configuration
14
6
  attr_accessor :openai_api_key,
15
7
  :openai_api_base,
16
8
  :openai_organization_id,
@@ -18,6 +10,8 @@ module RubyLLM
18
10
  :openai_use_system_role,
19
11
  :anthropic_api_key,
20
12
  :gemini_api_key,
13
+ :vertexai_project_id,
14
+ :vertexai_location,
21
15
  :deepseek_api_key,
22
16
  :perplexity_api_key,
23
17
  :bedrock_api_key,
@@ -33,6 +27,8 @@ module RubyLLM
33
27
  :default_model,
34
28
  :default_embedding_model,
35
29
  :default_image_model,
30
+ # Model registry
31
+ :model_registry_class,
36
32
  # Connection configuration
37
33
  :request_timeout,
38
34
  :max_retries,
@@ -47,7 +43,6 @@ module RubyLLM
47
43
  :log_stream_debug
48
44
 
49
45
  def initialize
50
- # Connection configuration
51
46
  @request_timeout = 120
52
47
  @max_retries = 3
53
48
  @retry_interval = 0.1
@@ -55,12 +50,10 @@ module RubyLLM
55
50
  @retry_interval_randomness = 0.5
56
51
  @http_proxy = nil
57
52
 
58
- # Default models
59
53
  @default_model = 'gpt-4.1-nano'
60
54
  @default_embedding_model = 'text-embedding-3-small'
61
55
  @default_image_model = 'gpt-image-1'
62
56
 
63
- # Logging configuration
64
57
  @log_file = $stdout
65
58
  @log_level = ENV['RUBYLLM_DEBUG'] ? Logger::DEBUG : Logger::INFO
66
59
  @log_stream_debug = ENV['RUBYLLM_STREAM_DEBUG'] == 'true'
@@ -48,6 +48,10 @@ module RubyLLM
48
48
  end
49
49
  end
50
50
 
51
+ def instance_variables
52
+ super - %i[@config @connection]
53
+ end
54
+
51
55
  private
52
56
 
53
57
  def setup_timeout(faraday)
@@ -118,9 +122,5 @@ module RubyLLM
118
122
  raise ConfigurationError,
119
123
  "#{@provider.name} provider is not configured. Add this to your initialization:\n\n#{config_block}"
120
124
  end
121
-
122
- def instance_variables
123
- super - %i[@config @connection]
124
- end
125
125
  end
126
126
  end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ # A connection that uses multipart/form-data for file uploads
5
+ class ConnectionMultipart < Connection
6
+ def post(url, payload, &)
7
+ @connection.post url, payload do |req|
8
+ req.headers.merge! @provider.headers if @provider.respond_to?(:headers)
9
+ req.headers['Content-Type'] = 'multipart/form-data'
10
+ yield req if block_given?
11
+ end
12
+ end
13
+
14
+ def setup_middleware(faraday)
15
+ super
16
+ faraday.request :multipart, content_type: 'multipart/form-data'
17
+ end
18
+ end
19
+ end
@@ -2,7 +2,6 @@
2
2
 
3
3
  module RubyLLM
4
4
  # Represents the content sent to or received from an LLM.
5
- # Selects the appropriate attachment class based on the content type.
6
5
  class Content
7
6
  attr_reader :text, :attachments
8
7
 
@@ -19,6 +18,11 @@ module RubyLLM
19
18
  self
20
19
  end
21
20
 
21
+ def attach(attachment)
22
+ @attachments << attachment
23
+ self
24
+ end
25
+
22
26
  def format
23
27
  if @text && @attachments.empty?
24
28
  @text
@@ -42,7 +46,6 @@ module RubyLLM
42
46
 
43
47
  def process_attachments(attachments)
44
48
  if attachments.is_a?(Hash)
45
- # Ignores types (like :image, :audio, :text, :pdf) since we have robust MIME type detection
46
49
  attachments.each_value { |attachment| process_attachments_array_or_string(attachment) }
47
50
  else
48
51
  process_attachments_array_or_string attachments
@@ -1,8 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- # Core embedding interface. Provides a clean way to generate embeddings
5
- # from text using various provider models.
4
+ # Core embedding interface.
6
5
  class Embedding
7
6
  attr_reader :vectors, :model, :input_tokens
8
7
 
@@ -3,13 +3,6 @@
3
3
  module RubyLLM
4
4
  # Custom error class that wraps API errors from different providers
5
5
  # into a consistent format with helpful error messages.
6
- #
7
- # Example:
8
- # begin
9
- # chat.ask "What's 2+2?"
10
- # rescue RubyLLM::Error => e
11
- # puts "Couldn't chat with AI: #{e.message}"
12
- # end
13
6
  class Error < StandardError
14
7
  attr_reader :response
15
8
 
@@ -36,7 +29,6 @@ module RubyLLM
36
29
  class UnauthorizedError < Error; end
37
30
 
38
31
  # Faraday middleware that maps provider-specific API errors to RubyLLM errors.
39
- # Uses provider's parse_error method to extract meaningful error messages.
40
32
  class ErrorMiddleware < Faraday::Middleware
41
33
  def initialize(app, options = {})
42
34
  super(app)
@@ -2,24 +2,22 @@
2
2
 
3
3
  module RubyLLM
4
4
  # Represents a generated image from an AI model.
5
- # Provides an interface to image generation capabilities
6
- # from providers like DALL-E and Gemini's Imagen.
7
5
  class Image
8
- attr_reader :url, :data, :mime_type, :revised_prompt, :model_id
6
+ attr_reader :url, :data, :mime_type, :revised_prompt, :model_id, :usage
9
7
 
10
- def initialize(url: nil, data: nil, mime_type: nil, revised_prompt: nil, model_id: nil)
8
+ def initialize(url: nil, data: nil, mime_type: nil, revised_prompt: nil, model_id: nil, usage: {}) # rubocop:disable Metrics/ParameterLists
11
9
  @url = url
12
10
  @data = data
13
11
  @mime_type = mime_type
14
12
  @revised_prompt = revised_prompt
15
13
  @model_id = model_id
14
+ @usage = usage
16
15
  end
17
16
 
18
17
  def base64?
19
18
  !@data.nil?
20
19
  end
21
20
 
22
- # Returns the raw binary image data regardless of source
23
21
  def to_blob
24
22
  if base64?
25
23
  Base64.decode64 @data
@@ -29,7 +27,6 @@ module RubyLLM
29
27
  end
30
28
  end
31
29
 
32
- # Saves the image to a file path
33
30
  def save(path)
34
31
  File.binwrite(File.expand_path(path), to_blob)
35
32
  path
@@ -40,14 +37,32 @@ module RubyLLM
40
37
  provider: nil,
41
38
  assume_model_exists: false,
42
39
  size: '1024x1024',
43
- context: nil)
40
+ context: nil,
41
+ with: nil,
42
+ params: {})
44
43
  config = context&.config || RubyLLM.config
45
44
  model ||= config.default_image_model
46
45
  model, provider_instance = Models.resolve(model, provider: provider, assume_exists: assume_model_exists,
47
46
  config: config)
48
47
  model_id = model.id
49
48
 
50
- provider_instance.paint(prompt, model: model_id, size:)
49
+ provider_instance.paint(prompt, model: model_id, size:, with:, params:)
50
+ end
51
+
52
+ def total_cost
53
+ input_cost + output_cost
54
+ end
55
+
56
+ def model_info
57
+ @model_info ||= RubyLLM.models.find(model_id)
58
+ end
59
+
60
+ def input_cost
61
+ usage['input_tokens'] * model_info.input_price_per_million / 1_000_000
62
+ end
63
+
64
+ def output_cost
65
+ usage['output_tokens'] * model_info.output_price_per_million / 1_000_000
51
66
  end
52
67
  end
53
68
  end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ # A class representing a file attachment that is an image generated by an LLM.
5
+ class ImageAttachment < Attachment
6
+ attr_reader :image, :content, :id, :reasoning_id
7
+
8
+ def initialize(data:, mime_type:, model_id:, id: nil, reasoning_id: nil)
9
+ super(nil, filename: nil)
10
+ @image = Image.new(data:, mime_type:, model_id:)
11
+ @content = Base64.strict_decode64(data)
12
+ @mime_type = mime_type
13
+ @id = id
14
+ @reasoning_id = reasoning_id
15
+ end
16
+
17
+ def image?
18
+ true
19
+ end
20
+ end
21
+ end
@@ -1,14 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- # A single message in a chat conversation. Can represent user input,
5
- # AI responses, or tool interactions. Tracks token usage and handles
6
- # the complexities of tool calls and responses.
4
+ # A single message in a chat conversation.
7
5
  class Message
8
6
  ROLES = %i[system user assistant tool].freeze
9
7
 
10
8
  attr_reader :role, :tool_calls, :tool_call_id, :input_tokens, :output_tokens, :model_id, :raw,
11
- :cached_tokens, :cache_creation_tokens
9
+ :cached_tokens, :cache_creation_tokens, :reasoning_id
12
10
  attr_writer :content
13
11
 
14
12
  def initialize(options = {})
@@ -21,6 +19,7 @@ module RubyLLM
21
19
  @tool_call_id = options[:tool_call_id]
22
20
  @cached_tokens = options[:cached_tokens]
23
21
  @cache_creation_tokens = options[:cache_creation_tokens]
22
+ @reasoning_id = options[:reasoning_id]
24
23
  @raw = options[:raw]
25
24
 
26
25
  ensure_valid_role
@@ -56,7 +55,8 @@ module RubyLLM
56
55
  output_tokens: output_tokens,
57
56
  model_id: model_id,
58
57
  cache_creation_tokens: cache_creation_tokens,
59
- cached_tokens: cached_tokens
58
+ cached_tokens: cached_tokens,
59
+ reasoning_id: reasoning_id
60
60
  }.compact
61
61
  end
62
62
 
@@ -70,7 +70,7 @@ module RubyLLM
70
70
  case content
71
71
  when String then Content.new(content)
72
72
  when Hash then Content.new(content[:text], content)
73
- else content # Pass through nil, Content, or other types
73
+ else content
74
74
  end
75
75
  end
76
76
 
@@ -3,18 +3,22 @@
3
3
  module RubyLLM
4
4
  module Model
5
5
  # Information about an AI model's capabilities, pricing, and metadata.
6
- # Used by the Models registry to help developers choose the right model
7
- # for their needs.
8
- #
9
- # Example:
10
- # model = RubyLLM.models.find('gpt-4')
11
- # model.supports_vision? # => true
12
- # model.supports_functions? # => true
13
- # model.input_price_per_million # => 30.0
14
6
  class Info
15
7
  attr_reader :id, :name, :provider, :family, :created_at, :context_window, :max_output_tokens, :knowledge_cutoff,
16
8
  :modalities, :capabilities, :pricing, :metadata
17
9
 
10
+ # Create a default model with assumed capabilities
11
+ def self.default(model_id, provider)
12
+ new(
13
+ id: model_id,
14
+ name: model_id.tr('-', ' ').capitalize,
15
+ provider: provider,
16
+ capabilities: %w[function_calling streaming vision structured_output],
17
+ modalities: { input: %w[text image], output: %w[text] },
18
+ metadata: { warning: 'Assuming model exists, capabilities may not be accurate' }
19
+ )
20
+ end
21
+
18
22
  def initialize(data)
19
23
  @id = data[:id]
20
24
  @name = data[:name]
@@ -30,7 +34,6 @@ module RubyLLM
30
34
  @metadata = data[:metadata] || {}
31
35
  end
32
36
 
33
- # Capability methods
34
37
  def supports?(capability)
35
38
  capabilities.include?(capability.to_s)
36
39
  end
@@ -41,7 +44,6 @@ module RubyLLM
41
44
  end
42
45
  end
43
46
 
44
- # Backward compatibility methods
45
47
  def display_name
46
48
  name
47
49
  end
@@ -3,12 +3,10 @@
3
3
  module RubyLLM
4
4
  module Model
5
5
  # A collection that manages and provides access to different categories of pricing information
6
- # (text tokens, images, audio tokens, embeddings)
7
6
  class Pricing
8
7
  def initialize(data)
9
8
  @data = {}
10
9
 
11
- # Initialize pricing categories
12
10
  %i[text_tokens images audio_tokens embeddings].each do |category|
13
11
  @data[category] = PricingCategory.new(data[category]) if data[category] && !empty_pricing?(data[category])
14
12
  end
@@ -33,7 +31,6 @@ module RubyLLM
33
31
  private
34
32
 
35
33
  def empty_pricing?(data)
36
- # Check if all pricing values in this category are zero or nil
37
34
  return true unless data
38
35
 
39
36
  %i[standard batch].each do |tier|
@@ -11,7 +11,6 @@ module RubyLLM
11
11
  @batch = PricingTier.new(data[:batch] || {}) unless empty_tier?(data[:batch])
12
12
  end
13
13
 
14
- # Shorthand methods that default to standard tier
15
14
  def input
16
15
  standard&.input_per_million
17
16
  end
@@ -24,7 +23,6 @@ module RubyLLM
24
23
  standard&.cached_input_per_million
25
24
  end
26
25
 
27
- # Get value for a specific tier
28
26
  def [](key)
29
27
  key == :batch ? batch : standard
30
28
  end
@@ -7,7 +7,6 @@ module RubyLLM
7
7
  def initialize(data = {})
8
8
  @values = {}
9
9
 
10
- # Only store non-zero values
11
10
  data.each do |key, value|
12
11
  @values[key.to_sym] = value if value && value != 0.0
13
12
  end