durable-llm 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CLI.md +0 -2
  5. data/Gemfile +7 -9
  6. data/README.md +564 -30
  7. data/Rakefile +16 -6
  8. data/devenv.lock +171 -0
  9. data/devenv.nix +12 -0
  10. data/devenv.yaml +8 -0
  11. data/durable-llm.gemspec +52 -0
  12. data/examples/openai_quick_complete.rb +4 -2
  13. data/lib/durable/llm/cli.rb +218 -22
  14. data/lib/durable/llm/client.rb +228 -8
  15. data/lib/durable/llm/configuration.rb +163 -10
  16. data/lib/durable/llm/convenience.rb +102 -0
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/provider_utilities.rb +201 -0
  19. data/lib/durable/llm/providers/anthropic.rb +232 -24
  20. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  21. data/lib/durable/llm/providers/base.rb +220 -11
  22. data/lib/durable/llm/providers/cohere.rb +157 -11
  23. data/lib/durable/llm/providers/deepseek.rb +233 -0
  24. data/lib/durable/llm/providers/fireworks.rb +304 -0
  25. data/lib/durable/llm/providers/google.rb +327 -0
  26. data/lib/durable/llm/providers/groq.rb +133 -25
  27. data/lib/durable/llm/providers/huggingface.rb +120 -17
  28. data/lib/durable/llm/providers/mistral.rb +431 -0
  29. data/lib/durable/llm/providers/openai.rb +150 -4
  30. data/lib/durable/llm/providers/opencode.rb +253 -0
  31. data/lib/durable/llm/providers/openrouter.rb +256 -0
  32. data/lib/durable/llm/providers/perplexity.rb +273 -0
  33. data/lib/durable/llm/providers/together.rb +346 -0
  34. data/lib/durable/llm/providers/xai.rb +355 -0
  35. data/lib/durable/llm/providers.rb +113 -13
  36. data/lib/durable/llm/response_helpers.rb +185 -0
  37. data/lib/durable/llm/version.rb +5 -1
  38. data/lib/durable/llm.rb +214 -1
  39. data/lib/durable.rb +29 -4
  40. data/sig/durable/llm.rbs +303 -1
  41. metadata +106 -28
  42. data/Gemfile.lock +0 -103
@@ -0,0 +1,347 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Azure OpenAI provider implementation for Durable LLM
4
+
5
+ require 'faraday'
6
+ require 'json'
7
+ require 'durable/llm/errors'
8
+ require 'durable/llm/providers/base'
9
+ require 'event_stream_parser'
10
+
11
+ module Durable
12
+ module Llm
13
+ module Providers
14
+ # Azure OpenAI provider for accessing Azure OpenAI's language models
15
+ #
16
+ # This provider implements the Azure OpenAI API for chat completions,
17
+ # embeddings, and streaming. It handles authentication via API keys,
18
+ # deployment-based routing, and response normalization.
19
+ class AzureOpenai < Durable::Llm::Providers::Base
20
+ BASE_URL_TEMPLATE = 'https://%s.openai.azure.com/openai/deployments/%s'
21
+
22
+ def default_api_key
23
+ begin
24
+ Durable::Llm.configuration.azure_openai&.api_key
25
+ rescue NoMethodError
26
+ nil
27
+ end || ENV['AZURE_OPENAI_API_KEY']
28
+ end
29
+
30
+ attr_accessor :api_key, :resource_name, :api_version
31
+
32
+ def initialize(api_key: nil, resource_name: nil, api_version: '2024-02-01')
33
+ super(api_key: api_key)
34
+ @resource_name = resource_name || ENV['AZURE_OPENAI_RESOURCE_NAME']
35
+ @api_version = api_version
36
+ # NOTE: BASE_URL will be constructed per request since deployment is in model
37
+ end
38
+
39
+ def completion(options)
40
+ model = options.delete(:model) || options.delete('model')
41
+ base_url = format(BASE_URL_TEMPLATE, @resource_name, model)
42
+ conn = build_connection(base_url)
43
+
44
+ response = conn.post('chat/completions') do |req|
45
+ req.headers['api-key'] = @api_key
46
+ req.params['api-version'] = @api_version
47
+ req.body = options
48
+ end
49
+
50
+ handle_response(response)
51
+ end
52
+
53
+ def embedding(model:, input:, **options)
54
+ base_url = format(BASE_URL_TEMPLATE, @resource_name, model)
55
+ conn = build_connection(base_url)
56
+
57
+ response = conn.post('embeddings') do |req|
58
+ req.headers['api-key'] = @api_key
59
+ req.params['api-version'] = @api_version
60
+ req.body = { input: input, **options }
61
+ end
62
+
63
+ handle_response(response, AzureOpenaiEmbeddingResponse)
64
+ end
65
+
66
+ def models
67
+ # Azure OpenAI doesn't have a public models endpoint, return hardcoded list
68
+ [
69
+ # GPT-5 series
70
+ 'gpt-5',
71
+ 'gpt-5-mini',
72
+ 'gpt-5-nano',
73
+ 'gpt-5-chat',
74
+ 'gpt-5-codex',
75
+ 'gpt-5-pro',
76
+ # GPT-4.1 series
77
+ 'gpt-4.1',
78
+ 'gpt-4.1-mini',
79
+ 'gpt-4.1-nano',
80
+ # GPT-4o series
81
+ 'gpt-4o',
82
+ 'gpt-4o-mini',
83
+ 'gpt-4o-audio-preview',
84
+ 'gpt-4o-mini-audio-preview',
85
+ 'gpt-4o-realtime-preview',
86
+ 'gpt-4o-mini-realtime-preview',
87
+ 'gpt-4o-transcribe',
88
+ 'gpt-4o-mini-transcribe',
89
+ 'gpt-4o-mini-tts',
90
+ # GPT-4 Turbo
91
+ 'gpt-4-turbo',
92
+ # GPT-4
93
+ 'gpt-4',
94
+ 'gpt-4-32k',
95
+ # GPT-3.5
96
+ 'gpt-3.5-turbo',
97
+ 'gpt-35-turbo',
98
+ 'gpt-35-turbo-instruct',
99
+ # O-series
100
+ 'o3',
101
+ 'o3-mini',
102
+ 'o3-pro',
103
+ 'o4-mini',
104
+ 'o1',
105
+ 'o1-mini',
106
+ 'o1-preview',
107
+ 'codex-mini',
108
+ # Embeddings
109
+ 'text-embedding-ada-002',
110
+ 'text-embedding-3-small',
111
+ 'text-embedding-3-large',
112
+ # Audio
113
+ 'whisper',
114
+ 'gpt-4o-transcribe',
115
+ 'gpt-4o-mini-transcribe',
116
+ 'tts',
117
+ 'tts-hd',
118
+ 'gpt-4o-mini-tts',
119
+ # Image generation
120
+ 'dall-e-3',
121
+ 'gpt-image-1',
122
+ 'gpt-image-1-mini',
123
+ # Video generation
124
+ 'sora',
125
+ # Other
126
+ 'model-router',
127
+ 'computer-use-preview',
128
+ 'gpt-oss-120b',
129
+ 'gpt-oss-20b'
130
+ ]
131
+ end
132
+
133
+ def self.stream?
134
+ true
135
+ end
136
+
137
+ def stream(options)
138
+ model = options[:model] || options['model']
139
+ base_url = format(BASE_URL_TEMPLATE, @resource_name, model)
140
+ conn = build_connection(base_url)
141
+
142
+ options[:stream] = true
143
+ options['temperature'] = options['temperature'].to_f if options['temperature']
144
+
145
+ response = conn.post('chat/completions') do |req|
146
+ setup_stream_request(req, options) do |chunk|
147
+ yield AzureOpenaiStreamResponse.new(chunk)
148
+ end
149
+ end
150
+
151
+ handle_response(response)
152
+ end
153
+
154
+ def setup_stream_request(req, options)
155
+ req.headers['api-key'] = @api_key
156
+ req.params['api-version'] = @api_version
157
+ req.headers['Accept'] = 'text/event-stream'
158
+ req.body = options
159
+
160
+ user_proc = proc do |chunk, _size, _total|
161
+ yield chunk
162
+ end
163
+
164
+ req.options.on_data = to_json_stream(user_proc: user_proc)
165
+ end
166
+
167
+ private
168
+
169
+ def build_connection(base_url)
170
+ Faraday.new(url: base_url) do |faraday|
171
+ faraday.request :json
172
+ faraday.response :json
173
+ faraday.adapter Faraday.default_adapter
174
+ end
175
+ end
176
+
177
+ # CODE-FROM: ruby-openai @ https://github.com/alexrudall/ruby-openai/blob/main/lib/openai/http.rb
178
+ # MIT License: https://github.com/alexrudall/ruby-openai/blob/main/LICENSE.md
179
+ def to_json_stream(user_proc:)
180
+ parser = EventStreamParser::Parser.new
181
+
182
+ proc do |chunk, _bytes, env|
183
+ if env && env.status != 200
184
+ raise_error = Faraday::Response::RaiseError.new
185
+ raise_error.on_complete(env.merge(body: try_parse_json(chunk)))
186
+ end
187
+
188
+ parser.feed(chunk) do |_type, data|
189
+ user_proc.call(JSON.parse(data)) unless data == '[DONE]'
190
+ end
191
+ end
192
+ end
193
+
194
+ def try_parse_json(maybe_json)
195
+ JSON.parse(maybe_json)
196
+ rescue JSON::ParserError
197
+ maybe_json
198
+ end
199
+
200
+ # END-CODE-FROM
201
+
202
+ def handle_response(response, response_class = AzureOpenaiResponse)
203
+ case response.status
204
+ when 200..299
205
+ response_class.new(response.body)
206
+ else
207
+ raise_error(response)
208
+ end
209
+ end
210
+
211
+ def raise_error(response)
212
+ error_class = case response.status
213
+ when 401 then Durable::Llm::AuthenticationError
214
+ when 429 then Durable::Llm::RateLimitError
215
+ when 400..499 then Durable::Llm::InvalidRequestError
216
+ when 500..599 then Durable::Llm::ServerError
217
+ else Durable::Llm::APIError
218
+ end
219
+
220
+ message = if error_class == Durable::Llm::APIError
221
+ "Unexpected response code: #{response.status}"
222
+ else
223
+ parse_error_message(response)
224
+ end
225
+
226
+ raise error_class, message
227
+ end
228
+
229
+ def parse_error_message(response)
230
+ body = begin
231
+ JSON.parse(response.body)
232
+ rescue StandardError
233
+ nil
234
+ end
235
+ message = body&.dig('error', 'message') || response.body
236
+ "#{response.status} Error: #{message}"
237
+ end
238
+
239
+ # Response wrapper for Azure OpenAI completion API responses
240
+ class AzureOpenaiResponse
241
+ attr_reader :raw_response
242
+
243
+ def initialize(response)
244
+ @raw_response = response
245
+ end
246
+
247
+ def choices
248
+ @raw_response['choices'].map { |choice| AzureOpenaiChoice.new(choice) }
249
+ end
250
+
251
+ def data
252
+ @raw_response['data']
253
+ end
254
+
255
+ def to_s
256
+ choices.map(&:to_s).join(' ')
257
+ end
258
+ end
259
+
260
+ # Choice wrapper for Azure OpenAI API responses
261
+ class AzureOpenaiChoice
262
+ attr_reader :message, :finish_reason
263
+
264
+ def initialize(choice)
265
+ @message = AzureOpenaiMessage.new(choice['message'])
266
+ @finish_reason = choice['finish_reason']
267
+ end
268
+
269
+ def to_s
270
+ @message.to_s
271
+ end
272
+ end
273
+
274
+ # Message wrapper for Azure OpenAI API responses
275
+ class AzureOpenaiMessage
276
+ attr_reader :role, :content
277
+
278
+ def initialize(message)
279
+ @role = message['role']
280
+ @content = message['content']
281
+ end
282
+
283
+ def to_s
284
+ @content
285
+ end
286
+ end
287
+
288
+ # Stream response wrapper for Azure OpenAI streaming API
289
+ class AzureOpenaiStreamResponse
290
+ attr_reader :choices
291
+
292
+ def initialize(parsed)
293
+ @choices = AzureOpenaiStreamChoice.new(parsed['choices'])
294
+ end
295
+
296
+ def to_s
297
+ @choices.to_s
298
+ end
299
+ end
300
+
301
+ # Embedding response wrapper for Azure OpenAI embedding API
302
+ class AzureOpenaiEmbeddingResponse
303
+ attr_reader :embedding
304
+
305
+ def initialize(data)
306
+ @embedding = data.dig('data', 0, 'embedding')
307
+ end
308
+
309
+ def to_a
310
+ @embedding
311
+ end
312
+ end
313
+
314
+ # Stream choice wrapper for Azure OpenAI streaming responses
315
+ class AzureOpenaiStreamChoice
316
+ attr_reader :delta, :finish_reason
317
+
318
+ def initialize(choice)
319
+ @choice = [choice].flatten.first
320
+ @delta = AzureOpenaiStreamDelta.new(@choice['delta'])
321
+ @finish_reason = @choice['finish_reason']
322
+ end
323
+
324
+ def to_s
325
+ @delta.to_s
326
+ end
327
+ end
328
+
329
+ # Stream delta wrapper for Azure OpenAI streaming responses
330
+ class AzureOpenaiStreamDelta
331
+ attr_reader :role, :content
332
+
333
+ def initialize(delta)
334
+ @role = delta['role']
335
+ @content = delta['content']
336
+ end
337
+
338
+ def to_s
339
+ @content || ''
340
+ end
341
+ end
342
+ end
343
+ end
344
+ end
345
+ end
346
+
347
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -1,65 +1,274 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'fileutils'
5
+
6
+ # This file defines the abstract base class for all LLM providers in the Durable gem,
7
+ # establishing a common interface and shared functionality that all provider implementations
8
+ # must follow. It defines required methods like completion, models, and streaming capabilities,
9
+ # provides caching mechanisms for model lists, handles default API key resolution, and includes
10
+ # stub implementations for optional features like embeddings. The base class ensures consistency
11
+ # across different LLM providers while allowing each provider to implement their specific API
12
+ # communication patterns and response handling.
13
+
1
14
  module Durable
2
15
  module Llm
3
16
  module Providers
17
+ # Abstract base class for all LLM providers
18
+ #
19
+ # This class defines the common interface that all LLM provider implementations must follow.
20
+ # It provides default implementations for caching model lists, handling API keys, and stub
21
+ # implementations for optional features.
22
+ #
23
+ # Subclasses must implement the following methods:
24
+ # - {#default_api_key} - Returns the default API key from configuration
25
+ # - {#completion} - Performs a completion request
26
+ # - {#models} - Returns list of available models
27
+ # - {#handle_response} - Processes API responses
28
+ #
29
+ # Subclasses may optionally override:
30
+ # - {#stream?} - Check if streaming is supported
31
+ # - {#stream} - Perform streaming requests
32
+ # - {#embedding} - Generate embeddings
33
+ #
34
+ # @abstract Subclass and implement required methods
35
+ # @example Implementing a custom provider
36
+ # class MyProvider < Durable::Llm::Providers::Base
37
+ # def default_api_key
38
+ # Durable::Llm.configuration.my_provider&.api_key ||
39
+ # ENV['MY_PROVIDER_API_KEY']
40
+ # end
41
+ #
42
+ # def completion(options)
43
+ # # Make API request
44
+ # response = make_request(options)
45
+ # handle_response(response)
46
+ # end
47
+ #
48
+ # def models
49
+ # ['model-1', 'model-2']
50
+ # end
51
+ #
52
+ # private
53
+ #
54
+ # def handle_response(response)
55
+ # # Process and return response
56
+ # end
57
+ # end
4
58
  class Base
59
+ # @return [String, nil] The default API key for this provider, or nil if not configured
60
+ # @raise [NotImplementedError] Subclasses must implement this method
5
61
  def default_api_key
6
62
  raise NotImplementedError, 'Subclasses must implement default_api_key'
7
63
  end
8
64
 
65
+ # @!attribute [rw] api_key
66
+ # @return [String, nil] The API key used for authentication
9
67
  attr_accessor :api_key
10
68
 
69
+ # Initializes a new provider instance
70
+ #
71
+ # @param api_key [String, nil] The API key to use for authentication. If nil, uses default_api_key
72
+ # @example Initialize with explicit API key
73
+ # provider = Durable::Llm::Providers::OpenAI.new(api_key: 'sk-...')
74
+ # @example Initialize with default API key from configuration
75
+ # provider = Durable::Llm::Providers::OpenAI.new
11
76
  def initialize(api_key: nil)
12
77
  @api_key = api_key || default_api_key
13
78
  end
14
79
 
80
+ # Performs a completion request
81
+ #
82
+ # @param options [Hash] The completion options including model, messages, etc.
83
+ # @return [Object] The completion response object
84
+ # @raise [NotImplementedError] Subclasses must implement this method
15
85
  def completion(options)
16
86
  raise NotImplementedError, 'Subclasses must implement completion'
17
87
  end
18
88
 
89
+ # Retrieves the list of available models, with caching
90
+ #
91
+ # Models are cached in `~/.local/durable-llm/cache/` for 1 hour to reduce
92
+ # API calls. The cache is automatically refreshed after expiration.
93
+ #
94
+ # @return [Array<String>] The list of available model names
95
+ # @example Get available models for OpenAI
96
+ # models = Durable::Llm::Providers::OpenAI.models
97
+ # # => ["gpt-4", "gpt-3.5-turbo", ...]
19
98
  def self.models
20
- cache_dir = File.expand_path("#{Dir.home}/.local/durable-llm/cache")
99
+ cache_file = model_cache_file
100
+ return cached_models(cache_file) if cache_valid?(cache_file)
21
101
 
102
+ fetch_and_cache_models(cache_file)
103
+ end
104
+
105
+ # Returns the path to the model cache file
106
+ #
107
+ # @return [String] The cache file path
108
+ def self.model_cache_file
109
+ cache_dir = File.expand_path("#{Dir.home}/.local/durable-llm/cache")
22
110
  FileUtils.mkdir_p(cache_dir) unless File.directory?(cache_dir)
23
- cache_file = File.join(cache_dir, "#{name.split('::').last}.json")
111
+ File.join(cache_dir, "#{name.split('::').last}.json")
112
+ end
24
113
 
25
- file_exists = File.exist?(cache_file)
26
- file_new_enough = file_exists && File.mtime(cache_file) > Time.now - 3600
114
+ # Checks if the cache file is valid (exists and not expired)
115
+ #
116
+ # @param cache_file [String] The cache file path
117
+ # @return [Boolean] True if cache is valid, false otherwise
118
+ def self.cache_valid?(cache_file)
119
+ File.exist?(cache_file) && File.mtime(cache_file) > Time.now - 3600
120
+ end
27
121
 
28
- if file_exists && file_new_enough
29
- JSON.parse(File.read(cache_file))
30
- else
31
- models = new.models
32
- File.write(cache_file, JSON.generate(models)) if models.length > 0
33
- models
34
- end
122
+ # Reads models from cache file
123
+ #
124
+ # @param cache_file [String] The cache file path
125
+ # @return [Array<String>] The cached model names
126
+ def self.cached_models(cache_file)
127
+ JSON.parse(File.read(cache_file))
128
+ end
129
+
130
+ # Fetches models from API and caches them
131
+ #
132
+ # @param cache_file [String] The cache file path
133
+ # @return [Array<String>] The fetched model names
134
+ def self.fetch_and_cache_models(cache_file)
135
+ models = new.models
136
+ File.write(cache_file, JSON.generate(models)) if models.length.positive?
137
+ models
138
+ end
139
+
140
+ private_class_method :model_cache_file, :cache_valid?, :cached_models, :fetch_and_cache_models
141
+
142
+
143
+ # Returns the list of supported option names for completions
144
+ #
145
+ # @return [Array<String>] The supported option names
146
+ def self.options
147
+ %w[temperature max_tokens top_p frequency_penalty presence_penalty]
35
148
  end
36
149
 
150
+ # Retrieves the list of available models for this provider instance
151
+ #
152
+ # @return [Array<String>] The list of available model names
153
+ # @raise [NotImplementedError] Subclasses must implement this method
37
154
  def models
38
155
  raise NotImplementedError, 'Subclasses must implement models'
39
156
  end
40
157
 
158
+ # Checks if this provider class supports streaming
159
+ #
160
+ # @return [Boolean] True if streaming is supported, false otherwise
41
161
  def self.stream?
42
162
  false
43
163
  end
44
164
 
165
+ # Checks if this provider instance supports streaming
166
+ #
167
+ # @return [Boolean] True if streaming is supported, false otherwise
45
168
  def stream?
46
169
  self.class.stream?
47
170
  end
48
171
 
172
+ # Performs a streaming completion request
173
+ #
174
+ # @param options [Hash] The stream options including model, messages, etc.
175
+ # @yield [Object] Yields stream response chunks as they arrive
176
+ # @return [Object] The final response object
177
+ # @raise [NotImplementedError] Subclasses must implement this method
49
178
  def stream(options, &block)
50
179
  raise NotImplementedError, 'Subclasses must implement stream'
51
180
  end
52
181
 
182
+ # Performs an embedding request
183
+ #
184
+ # @param model [String] The model to use for generating embeddings
185
+ # @param input [String, Array<String>] The input text(s) to embed
186
+ # @param options [Hash] Additional options for the embedding request
187
+ # @return [Object] The embedding response object
188
+ # @raise [NotImplementedError] Subclasses must implement this method
53
189
  def embedding(model:, input:, **options)
54
190
  raise NotImplementedError, 'Subclasses must implement embedding'
55
191
  end
56
192
 
57
193
  private
58
194
 
195
+ # Handles the raw response from the API, processing errors and returning normalized response
196
+ #
197
+ # @param response [Object] The raw response from the API call
198
+ # @return [Object] The processed response object
199
+ # @raise [Durable::Llm::APIError] If the response indicates an API error
200
+ # @raise [NotImplementedError] Subclasses must implement this method
59
201
  def handle_response(response)
60
202
  raise NotImplementedError, 'Subclasses must implement handle_response'
61
203
  end
204
+
205
+ # Validates that required parameters are present in the options hash
206
+ #
207
+ # @param options [Hash] The options hash to validate
208
+ # @param required_params [Array<Symbol, String>] List of required parameter names
209
+ # @raise [ArgumentError] If any required parameters are missing or empty
210
+ # @return [void]
211
+ # @example Validate completion parameters
212
+ # validate_required_params(options, [:model, :messages])
213
+ def validate_required_params(options, required_params)
214
+ missing = required_params.select do |param|
215
+ value = options[param] || options[param.to_s]
216
+ value.nil? || (value.respond_to?(:empty?) && value.empty?)
217
+ end
218
+
219
+ return if missing.empty?
220
+
221
+ raise ArgumentError, "Missing required parameters: #{missing.join(', ')}. " \
222
+ "Please provide these parameters in your request."
223
+ end
224
+
225
+ # Validates that a parameter is within a specified range
226
+ #
227
+ # @param value [Numeric] The value to validate
228
+ # @param param_name [String, Symbol] The parameter name for error messages
229
+ # @param min [Numeric] The minimum allowed value (inclusive)
230
+ # @param max [Numeric] The maximum allowed value (inclusive)
231
+ # @raise [ArgumentError] If the value is outside the allowed range
232
+ # @return [void]
233
+ # @example Validate temperature parameter
234
+ # validate_range(options[:temperature], :temperature, 0.0, 2.0)
235
+ def validate_range(value, param_name, min, max)
236
+ return if value.nil? # Allow nil values (will use provider defaults)
237
+ return if value >= min && value <= max
238
+
239
+ raise ArgumentError, "#{param_name} must be between #{min} and #{max}, got #{value}"
240
+ end
241
+
242
+ # Validates that the API key is configured
243
+ #
244
+ # @raise [Durable::Llm::AuthenticationError] If API key is not configured
245
+ # @return [void]
246
+ # @example Validate API key before making request
247
+ # validate_api_key
248
+ def validate_api_key
249
+ return unless @api_key.nil? || @api_key.to_s.strip.empty?
250
+
251
+ provider_name = self.class.name.split('::').last
252
+ raise Durable::Llm::AuthenticationError,
253
+ "API key not configured for #{provider_name}. " \
254
+ "Set it via Durable::Llm.configure or environment variable."
255
+ end
256
+
257
+ # Sanitizes and normalizes request options
258
+ #
259
+ # @param options [Hash] The raw options hash
260
+ # @return [Hash] The sanitized options with string keys converted to symbols
261
+ # @example Sanitize options
262
+ # sanitized = sanitize_options({ 'model' => 'gpt-4', 'temperature' => 0.7 })
263
+ # # => { model: 'gpt-4', temperature: 0.7 }
264
+ def sanitize_options(options)
265
+ return {} if options.nil?
266
+
267
+ options.transform_keys(&:to_sym)
268
+ end
62
269
  end
63
270
  end
64
271
  end
65
272
  end
273
+
274
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.