durable-llm 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,201 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This module provides utility functions for working with LLM providers, including
4
+ # provider discovery, comparison, and model routing capabilities. It helps developers
5
+ # choose and switch between providers efficiently.
6
+
7
+ module Durable
8
+ module Llm
9
+ # Utility methods for provider management and comparison
10
+ #
11
+ # This module offers helper methods for:
12
+ # - Discovering available providers
13
+ # - Finding providers that support specific models
14
+ # - Comparing provider capabilities
15
+ # - Routing requests to appropriate providers
16
+ #
17
+ # @example Find provider for a model
18
+ # provider = ProviderUtilities.provider_for_model('gpt-4')
19
+ # # => :openai
20
+ module ProviderUtilities
21
+ module_function
22
+
23
+ # Lists all available providers
24
+ #
25
+ # @return [Array<Symbol>] Array of provider names
26
+ # @example List providers
27
+ # providers = ProviderUtilities.available_providers
28
+ # # => [:openai, :anthropic, :google, ...]
29
+ def available_providers
30
+ Providers.available_providers
31
+ end
32
+
33
+ # Finds the provider that supports a given model
34
+ #
35
+ # @param model_id [String] The model identifier
36
+ # @return [Symbol, nil] The provider name or nil if not found
37
+ # @example Find provider for GPT-4
38
+ # provider = ProviderUtilities.provider_for_model('gpt-4')
39
+ # # => :openai
40
+ # @example Find provider for Claude
41
+ # provider = ProviderUtilities.provider_for_model('claude-3-opus-20240229')
42
+ # # => :anthropic
43
+ def provider_for_model(model_id)
44
+ Providers.model_id_to_provider(model_id)
45
+ end
46
+
47
+ # Gets all models available for a provider
48
+ #
49
+ # @param provider_name [Symbol, String] The provider name
50
+ # @param options [Hash] Provider configuration options
51
+ # @return [Array<String>] Array of model IDs
52
+ # @example Get OpenAI models
53
+ # models = ProviderUtilities.models_for_provider(:openai)
54
+ def models_for_provider(provider_name, **options)
55
+ Durable::Llm.models(provider_name, **options)
56
+ rescue StandardError
57
+ []
58
+ end
59
+
60
+ # Checks if a provider supports a specific capability
61
+ #
62
+ # @param provider_name [Symbol, String] The provider name
63
+ # @param capability [Symbol] The capability to check (:streaming, :embeddings, :chat)
64
+ # @return [Boolean] True if capability is supported
65
+ # @example Check streaming support
66
+ # supports = ProviderUtilities.supports_capability?(:openai, :streaming)
67
+ # # => true
68
+ def supports_capability?(provider_name, capability)
69
+ provider_class = Providers.provider_class_for(provider_name)
70
+ instance = provider_class.new
71
+
72
+ case capability
73
+ when :streaming
74
+ instance.respond_to?(:stream?) && instance.stream?
75
+ when :embeddings
76
+ instance.respond_to?(:embedding)
77
+ when :chat, :completion
78
+ instance.respond_to?(:completion)
79
+ else
80
+ false
81
+ end
82
+ rescue StandardError
83
+ false
84
+ end
85
+
86
+ # Finds all providers that support a specific capability
87
+ #
88
+ # @param capability [Symbol] The capability to filter by
89
+ # @return [Array<Symbol>] Providers supporting the capability
90
+ # @example Find streaming providers
91
+ # providers = ProviderUtilities.providers_with_capability(:streaming)
92
+ # # => [:openai, :anthropic, :google, ...]
93
+ def providers_with_capability(capability)
94
+ available_providers.select do |provider|
95
+ supports_capability?(provider, capability)
96
+ end
97
+ end
98
+
99
+ # Compares models across providers based on common characteristics
100
+ #
101
+ # @param model_ids [Array<String>] Models to compare
102
+ # @return [Hash] Comparison data
103
+ # @example Compare models
104
+ # comparison = ProviderUtilities.compare_models(['gpt-4', 'claude-3-opus-20240229'])
105
+ def compare_models(model_ids)
106
+ model_ids.map do |model_id|
107
+ provider = provider_for_model(model_id)
108
+ {
109
+ model: model_id,
110
+ provider: provider,
111
+ streaming: provider ? supports_capability?(provider, :streaming) : false
112
+ }
113
+ end
114
+ end
115
+
116
+ # Creates a fallback chain of providers for redundancy
117
+ #
118
+ # This method helps build resilient systems by providing fallback options
119
+ # when a primary provider is unavailable.
120
+ #
121
+ # @param providers [Array<Symbol>] Ordered list of providers to try
122
+ # @param options [Hash] Configuration options
123
+ # @return [Array<Durable::Llm::Client>] Array of clients in fallback order
124
+ # @example Create fallback chain
125
+ # clients = ProviderUtilities.fallback_chain(
126
+ # [:openai, :anthropic, :google],
127
+ # model_map: {
128
+ # openai: 'gpt-4',
129
+ # anthropic: 'claude-3-opus-20240229',
130
+ # google: 'gemini-pro'
131
+ # }
132
+ # )
133
+ def fallback_chain(providers, options = {})
134
+ model_map = options[:model_map] || {}
135
+
136
+ providers.map do |provider|
137
+ model = model_map[provider]
138
+ Durable::Llm.new(provider, model: model)
139
+ rescue StandardError => e
140
+ warn "Failed to create client for #{provider}: #{e.message}"
141
+ nil
142
+ end.compact
143
+ end
144
+
145
+ # Executes a completion with automatic provider fallback
146
+ #
147
+ # @param text [String] The input text
148
+ # @param providers [Array<Symbol>] Ordered providers to try
149
+ # @param model_map [Hash] Map of provider to model
150
+ # @return [String, nil] The completion text or nil if all fail
151
+ # @example Completion with fallback
152
+ # result = ProviderUtilities.complete_with_fallback(
153
+ # 'Hello!',
154
+ # providers: [:openai, :anthropic],
155
+ # model_map: { openai: 'gpt-4', anthropic: 'claude-3-opus-20240229' }
156
+ # )
157
+ def complete_with_fallback(text, providers:, model_map: {})
158
+ providers.each do |provider|
159
+ begin
160
+ client = Durable::Llm.new(provider, model: model_map[provider])
161
+ return client.complete(text)
162
+ rescue StandardError => e
163
+ warn "Provider #{provider} failed: #{e.message}"
164
+ next
165
+ end
166
+ end
167
+
168
+ nil # All providers failed
169
+ end
170
+
171
+ # Gets provider information including capabilities
172
+ #
173
+ # @param provider_name [Symbol, String] The provider name
174
+ # @return [Hash] Provider information
175
+ # @example Get provider info
176
+ # info = ProviderUtilities.provider_info(:openai)
177
+ # # => { name: :openai, streaming: true, embeddings: true, ... }
178
+ def provider_info(provider_name)
179
+ {
180
+ name: provider_name,
181
+ streaming: supports_capability?(provider_name, :streaming),
182
+ embeddings: supports_capability?(provider_name, :embeddings),
183
+ chat: supports_capability?(provider_name, :chat)
184
+ }
185
+ rescue StandardError => e
186
+ { name: provider_name, error: e.message }
187
+ end
188
+
189
+ # Lists all providers with their capabilities
190
+ #
191
+ # @return [Array<Hash>] Array of provider information hashes
192
+ # @example List all provider capabilities
193
+ # all = ProviderUtilities.all_provider_info
194
+ def all_provider_info
195
+ available_providers.map { |p| provider_info(p) }
196
+ end
197
+ end
198
+ end
199
+ end
200
+
201
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -21,15 +21,40 @@ module Durable
21
21
  # implementations for optional features.
22
22
  #
23
23
  # Subclasses must implement the following methods:
24
- # - default_api_key
25
- # - completion
26
- # - models
27
- # - handle_response
24
+ # - {#default_api_key} - Returns the default API key from configuration
25
+ # - {#completion} - Performs a completion request
26
+ # - {#models} - Returns list of available models
27
+ # - {#handle_response} - Processes API responses
28
28
  #
29
- # Subclasses may override:
30
- # - stream?
31
- # - stream
32
- # - embedding
29
+ # Subclasses may optionally override:
30
+ # - {#stream?} - Check if streaming is supported
31
+ # - {#stream} - Perform streaming requests
32
+ # - {#embedding} - Generate embeddings
33
+ #
34
+ # @abstract Subclass and implement required methods
35
+ # @example Implementing a custom provider
36
+ # class MyProvider < Durable::Llm::Providers::Base
37
+ # def default_api_key
38
+ # Durable::Llm.configuration.my_provider&.api_key ||
39
+ # ENV['MY_PROVIDER_API_KEY']
40
+ # end
41
+ #
42
+ # def completion(options)
43
+ # # Make API request
44
+ # response = make_request(options)
45
+ # handle_response(response)
46
+ # end
47
+ #
48
+ # def models
49
+ # ['model-1', 'model-2']
50
+ # end
51
+ #
52
+ # private
53
+ #
54
+ # def handle_response(response)
55
+ # # Process and return response
56
+ # end
57
+ # end
33
58
  class Base
34
59
  # @return [String, nil] The default API key for this provider, or nil if not configured
35
60
  # @raise [NotImplementedError] Subclasses must implement this method
@@ -44,6 +69,10 @@ module Durable
44
69
  # Initializes a new provider instance
45
70
  #
46
71
  # @param api_key [String, nil] The API key to use for authentication. If nil, uses default_api_key
72
+ # @example Initialize with explicit API key
73
+ # provider = Durable::Llm::Providers::OpenAI.new(api_key: 'sk-...')
74
+ # @example Initialize with default API key from configuration
75
+ # provider = Durable::Llm::Providers::OpenAI.new
47
76
  def initialize(api_key: nil)
48
77
  @api_key = api_key || default_api_key
49
78
  end
@@ -59,25 +88,58 @@ module Durable
59
88
 
60
89
  # Retrieves the list of available models, with caching
61
90
  #
91
+ # Models are cached in `~/.local/durable-llm/cache/` for 1 hour to reduce
92
+ # API calls. The cache is automatically refreshed after expiration.
93
+ #
62
94
  # @return [Array<String>] The list of available model names
95
+ # @example Get available models for OpenAI
96
+ # models = Durable::Llm::Providers::OpenAI.models
97
+ # # => ["gpt-4", "gpt-3.5-turbo", ...]
63
98
  def self.models
64
- cache_dir = File.expand_path("#{Dir.home}/.local/durable-llm/cache")
99
+ cache_file = model_cache_file
100
+ return cached_models(cache_file) if cache_valid?(cache_file)
65
101
 
102
+ fetch_and_cache_models(cache_file)
103
+ end
104
+
105
+ # Returns the path to the model cache file
106
+ #
107
+ # @return [String] The cache file path
108
+ def self.model_cache_file
109
+ cache_dir = File.expand_path("#{Dir.home}/.local/durable-llm/cache")
66
110
  FileUtils.mkdir_p(cache_dir) unless File.directory?(cache_dir)
67
- cache_file = File.join(cache_dir, "#{name.split('::').last}.json")
111
+ File.join(cache_dir, "#{name.split('::').last}.json")
112
+ end
68
113
 
69
- file_exists = File.exist?(cache_file)
70
- file_new_enough = file_exists && File.mtime(cache_file) > Time.now - 3600
114
+ # Checks if the cache file is valid (exists and not expired)
115
+ #
116
+ # @param cache_file [String] The cache file path
117
+ # @return [Boolean] True if cache is valid, false otherwise
118
+ def self.cache_valid?(cache_file)
119
+ File.exist?(cache_file) && File.mtime(cache_file) > Time.now - 3600
120
+ end
71
121
 
72
- if file_exists && file_new_enough
73
- JSON.parse(File.read(cache_file))
74
- else
75
- models = new.models
76
- File.write(cache_file, JSON.generate(models)) if models.length.positive?
77
- models
78
- end
122
+ # Reads models from cache file
123
+ #
124
+ # @param cache_file [String] The cache file path
125
+ # @return [Array<String>] The cached model names
126
+ def self.cached_models(cache_file)
127
+ JSON.parse(File.read(cache_file))
79
128
  end
80
129
 
130
+ # Fetches models from API and caches them
131
+ #
132
+ # @param cache_file [String] The cache file path
133
+ # @return [Array<String>] The fetched model names
134
+ def self.fetch_and_cache_models(cache_file)
135
+ models = new.models
136
+ File.write(cache_file, JSON.generate(models)) if models.length.positive?
137
+ models
138
+ end
139
+
140
+ private_class_method :model_cache_file, :cache_valid?, :cached_models, :fetch_and_cache_models
141
+
142
+
81
143
  # Returns the list of supported option names for completions
82
144
  #
83
145
  # @return [Array<String>] The supported option names
@@ -139,6 +201,71 @@ module Durable
139
201
  def handle_response(response)
140
202
  raise NotImplementedError, 'Subclasses must implement handle_response'
141
203
  end
204
+
205
+ # Validates that required parameters are present in the options hash
206
+ #
207
+ # @param options [Hash] The options hash to validate
208
+ # @param required_params [Array<Symbol, String>] List of required parameter names
209
+ # @raise [ArgumentError] If any required parameters are missing or empty
210
+ # @return [void]
211
+ # @example Validate completion parameters
212
+ # validate_required_params(options, [:model, :messages])
213
+ def validate_required_params(options, required_params)
214
+ missing = required_params.select do |param|
215
+ value = options[param] || options[param.to_s]
216
+ value.nil? || (value.respond_to?(:empty?) && value.empty?)
217
+ end
218
+
219
+ return if missing.empty?
220
+
221
+ raise ArgumentError, "Missing required parameters: #{missing.join(', ')}. " \
222
+ "Please provide these parameters in your request."
223
+ end
224
+
225
+ # Validates that a parameter is within a specified range
226
+ #
227
+ # @param value [Numeric] The value to validate
228
+ # @param param_name [String, Symbol] The parameter name for error messages
229
+ # @param min [Numeric] The minimum allowed value (inclusive)
230
+ # @param max [Numeric] The maximum allowed value (inclusive)
231
+ # @raise [ArgumentError] If the value is outside the allowed range
232
+ # @return [void]
233
+ # @example Validate temperature parameter
234
+ # validate_range(options[:temperature], :temperature, 0.0, 2.0)
235
+ def validate_range(value, param_name, min, max)
236
+ return if value.nil? # Allow nil values (will use provider defaults)
237
+ return if value >= min && value <= max
238
+
239
+ raise ArgumentError, "#{param_name} must be between #{min} and #{max}, got #{value}"
240
+ end
241
+
242
+ # Validates that the API key is configured
243
+ #
244
+ # @raise [Durable::Llm::AuthenticationError] If API key is not configured
245
+ # @return [void]
246
+ # @example Validate API key before making request
247
+ # validate_api_key
248
+ def validate_api_key
249
+ return unless @api_key.nil? || @api_key.to_s.strip.empty?
250
+
251
+ provider_name = self.class.name.split('::').last
252
+ raise Durable::Llm::AuthenticationError,
253
+ "API key not configured for #{provider_name}. " \
254
+ "Set it via Durable::Llm.configure or environment variable."
255
+ end
256
+
257
+ # Sanitizes and normalizes request options
258
+ #
259
+ # @param options [Hash] The raw options hash
260
+ # @return [Hash] The sanitized options with string keys converted to symbols
261
+ # @example Sanitize options
262
+ # sanitized = sanitize_options({ 'model' => 'gpt-4', 'temperature' => 0.7 })
263
+ # # => { model: 'gpt-4', temperature: 0.7 }
264
+ def sanitize_options(options)
265
+ return {} if options.nil?
266
+
267
+ options.transform_keys(&:to_sym)
268
+ end
142
269
  end
143
270
  end
144
271
  end
@@ -144,6 +144,10 @@ module Durable
144
144
  "#{response.status} Error: #{message}"
145
145
  end
146
146
 
147
+ # Response object for Cohere chat API responses.
148
+ #
149
+ # Wraps the raw response and provides a consistent interface for accessing
150
+ # message content and metadata.
147
151
  class CohereResponse
148
152
  attr_reader :raw_response
149
153
 
@@ -160,6 +164,9 @@ module Durable
160
164
  end
161
165
  end
162
166
 
167
+ # Represents a single choice in a Cohere response.
168
+ #
169
+ # Contains the generated text content.
163
170
  class CohereChoice
164
171
  attr_reader :text
165
172
 
@@ -172,6 +179,9 @@ module Durable
172
179
  end
173
180
  end
174
181
 
182
+ # Response object for Cohere embedding API responses.
183
+ #
184
+ # Wraps embedding data and provides array access to the vector representation.
175
185
  class CohereEmbeddingResponse
176
186
  attr_reader :embedding
177
187
 
@@ -184,6 +194,9 @@ module Durable
184
194
  end
185
195
  end
186
196
 
197
+ # Response object for streaming Cohere chat chunks.
198
+ #
199
+ # Wraps individual chunks from the Server-Sent Events stream.
187
200
  class CohereStreamResponse
188
201
  attr_reader :choices
189
202
 
@@ -196,6 +209,9 @@ module Durable
196
209
  end
197
210
  end
198
211
 
212
+ # Represents a single choice in a streaming Cohere response chunk.
213
+ #
214
+ # Contains the delta (incremental content) for the choice.
199
215
  class CohereStreamChoice
200
216
  attr_reader :delta
201
217
 
@@ -208,6 +224,9 @@ module Durable
208
224
  end
209
225
  end
210
226
 
227
+ # Represents the incremental content delta in a streaming response.
228
+ #
229
+ # Contains the text content of the delta.
211
230
  class CohereStreamDelta
212
231
  attr_reader :text
213
232
 
@@ -11,6 +11,10 @@ require 'durable/llm/providers/base'
11
11
  module Durable
12
12
  module Llm
13
13
  module Providers
14
+ # Fireworks AI provider for accessing Fireworks AI's language models.
15
+ #
16
+ # Provides completion, embedding, and streaming capabilities with proper
17
+ # error handling and response normalization.
14
18
  class Fireworks < Durable::Llm::Providers::Base
15
19
  BASE_URL = 'https://api.fireworks.ai/inference/v1'
16
20
 
@@ -174,6 +178,10 @@ module Durable
174
178
  "#{response.status} Error: #{message}"
175
179
  end
176
180
 
181
+ # Response object for Fireworks chat API responses.
182
+ #
183
+ # Wraps the raw response and provides a consistent interface for accessing
184
+ # message content and metadata.
177
185
  class FireworksResponse
178
186
  attr_reader :raw_response
179
187
 
@@ -194,6 +202,9 @@ module Durable
194
202
  end
195
203
  end
196
204
 
205
+ # Represents a single choice in a Fireworks response.
206
+ #
207
+ # Contains the message and finish reason for the choice.
197
208
  class FireworksChoice
198
209
  attr_reader :message, :finish_reason
199
210
 
@@ -207,6 +218,9 @@ module Durable
207
218
  end
208
219
  end
209
220
 
221
+ # Represents a message in a Fireworks conversation.
222
+ #
223
+ # Messages have a role (user, assistant) and text content.
210
224
  class FireworksMessage
211
225
  attr_reader :role, :content
212
226
 
@@ -220,6 +234,9 @@ module Durable
220
234
  end
221
235
  end
222
236
 
237
+ # Response object for streaming Fireworks chat chunks.
238
+ #
239
+ # Wraps individual chunks from the Server-Sent Events stream.
223
240
  class FireworksStreamResponse
224
241
  attr_reader :choices
225
242
 
@@ -232,6 +249,9 @@ module Durable
232
249
  end
233
250
  end
234
251
 
252
+ # Response object for Fireworks embedding API responses.
253
+ #
254
+ # Wraps embedding data and provides array access to the vector representation.
235
255
  class FireworksEmbeddingResponse
236
256
  attr_reader :embedding
237
257
 
@@ -244,6 +264,9 @@ module Durable
244
264
  end
245
265
  end
246
266
 
267
+ # Represents a single choice in a streaming Fireworks response chunk.
268
+ #
269
+ # Contains the delta (incremental content) and finish reason for the choice.
247
270
  class FireworksStreamChoice
248
271
  attr_reader :delta, :finish_reason
249
272
 
@@ -258,6 +281,9 @@ module Durable
258
281
  end
259
282
  end
260
283
 
284
+ # Represents the incremental content delta in a streaming response.
285
+ #
286
+ # Contains the role and text content of the delta.
261
287
  class FireworksStreamDelta
262
288
  attr_reader :role, :content
263
289
 
@@ -11,6 +11,10 @@ require 'event_stream_parser'
11
11
  module Durable
12
12
  module Llm
13
13
  module Providers
14
+ # Google Generative AI provider for accessing Gemini language models.
15
+ #
16
+ # Provides completion, embedding, and streaming capabilities with proper
17
+ # error handling and response normalization for Google's Generative Language API.
14
18
  class Google < Durable::Llm::Providers::Base
15
19
  BASE_URL = 'https://generativelanguage.googleapis.com'
16
20
 
@@ -206,6 +210,10 @@ module Durable
206
210
  "#{response.status} Error: #{message}"
207
211
  end
208
212
 
213
+ # Response object for Google Generative AI API responses.
214
+ #
215
+ # Wraps the raw response and provides a consistent interface for accessing
216
+ # candidate content and metadata.
209
217
  class GoogleResponse
210
218
  attr_reader :raw_response
211
219
 
@@ -222,6 +230,9 @@ module Durable
222
230
  end
223
231
  end
224
232
 
233
+ # Represents a single candidate choice in a Google response.
234
+ #
235
+ # Contains the message content from the candidate.
225
236
  class GoogleChoice
226
237
  attr_reader :message
227
238
 
@@ -234,6 +245,9 @@ module Durable
234
245
  end
235
246
  end
236
247
 
248
+ # Represents a message in a Google conversation.
249
+ #
250
+ # Messages contain text content extracted from parts.
237
251
  class GoogleMessage
238
252
  attr_reader :content
239
253
 
@@ -246,6 +260,9 @@ module Durable
246
260
  end
247
261
  end
248
262
 
263
+ # Response object for streaming Google Generative AI chunks.
264
+ #
265
+ # Wraps individual chunks from the streaming response.
249
266
  class GoogleStreamResponse
250
267
  attr_reader :choices
251
268
 
@@ -258,6 +275,9 @@ module Durable
258
275
  end
259
276
  end
260
277
 
278
+ # Represents a single choice in a streaming Google response chunk.
279
+ #
280
+ # Contains the delta (incremental content) for the choice.
261
281
  class GoogleStreamChoice
262
282
  attr_reader :delta
263
283
 
@@ -270,6 +290,9 @@ module Durable
270
290
  end
271
291
  end
272
292
 
293
+ # Represents the incremental content delta in a streaming response.
294
+ #
295
+ # Contains the text content of the delta.
273
296
  class GoogleStreamDelta
274
297
  attr_reader :content
275
298
 
@@ -282,6 +305,9 @@ module Durable
282
305
  end
283
306
  end
284
307
 
308
+ # Response object for Google embedding API responses.
309
+ #
310
+ # Wraps embedding data and provides array access to the vector representation.
285
311
  class GoogleEmbeddingResponse
286
312
  attr_reader :embedding
287
313
 
@@ -11,6 +11,10 @@ require 'event_stream_parser'
11
11
  module Durable
12
12
  module Llm
13
13
  module Providers
14
+ # Groq provider for accessing language models via OpenAI-compatible API.
15
+ #
16
+ # Provides completion, embedding, and streaming capabilities with proper
17
+ # error handling and response normalization.
14
18
  class Groq < Durable::Llm::Providers::Base
15
19
  BASE_URL = 'https://api.groq.com/openai/v1'
16
20
 
@@ -144,6 +148,10 @@ module Durable
144
148
  end
145
149
  end
146
150
 
151
+ # Response object for Groq chat API responses.
152
+ #
153
+ # Wraps the raw response and provides a consistent interface for accessing
154
+ # message content, embeddings, and metadata.
147
155
  class GroqResponse
148
156
  attr_reader :raw_response
149
157
 
@@ -172,6 +180,9 @@ module Durable
172
180
  end
173
181
  end
174
182
 
183
+ # Represents a single choice in a Groq response.
184
+ #
185
+ # Contains the message and finish reason for the choice.
175
186
  class GroqChoice
176
187
  attr_reader :message, :finish_reason
177
188
 
@@ -185,6 +196,9 @@ module Durable
185
196
  end
186
197
  end
187
198
 
199
+ # Represents a message in a Groq conversation.
200
+ #
201
+ # Messages have a role (user, assistant, system) and text content.
188
202
  class GroqMessage
189
203
  attr_reader :role, :content
190
204
 
@@ -198,6 +212,9 @@ module Durable
198
212
  end
199
213
  end
200
214
 
215
+ # Response object for streaming Groq chat chunks.
216
+ #
217
+ # Wraps individual chunks from the Server-Sent Events stream.
201
218
  class GroqStreamResponse
202
219
  attr_reader :choices
203
220
 
@@ -210,6 +227,9 @@ module Durable
210
227
  end
211
228
  end
212
229
 
230
+ # Represents a single choice in a streaming Groq response chunk.
231
+ #
232
+ # Contains the delta (incremental content) and finish reason for the choice.
213
233
  class GroqStreamChoice
214
234
  attr_reader :delta, :finish_reason
215
235
 
@@ -224,6 +244,9 @@ module Durable
224
244
  end
225
245
  end
226
246
 
247
+ # Represents the incremental content delta in a streaming response.
248
+ #
249
+ # Contains the role and text content of the delta.
227
250
  class GroqStreamDelta
228
251
  attr_reader :role, :content
229
252
 
@@ -237,6 +260,9 @@ module Durable
237
260
  end
238
261
  end
239
262
 
263
+ # Response object for Groq embedding API responses.
264
+ #
265
+ # Wraps embedding data and provides array access to the vector representation.
240
266
  class GroqEmbeddingResponse
241
267
  attr_reader :embedding
242
268