durable-llm 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CLI.md +0 -2
  5. data/Gemfile +7 -9
  6. data/README.md +564 -30
  7. data/Rakefile +16 -6
  8. data/devenv.lock +171 -0
  9. data/devenv.nix +12 -0
  10. data/devenv.yaml +8 -0
  11. data/durable-llm.gemspec +52 -0
  12. data/examples/openai_quick_complete.rb +4 -2
  13. data/lib/durable/llm/cli.rb +218 -22
  14. data/lib/durable/llm/client.rb +228 -8
  15. data/lib/durable/llm/configuration.rb +163 -10
  16. data/lib/durable/llm/convenience.rb +102 -0
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/provider_utilities.rb +201 -0
  19. data/lib/durable/llm/providers/anthropic.rb +232 -24
  20. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  21. data/lib/durable/llm/providers/base.rb +220 -11
  22. data/lib/durable/llm/providers/cohere.rb +157 -11
  23. data/lib/durable/llm/providers/deepseek.rb +233 -0
  24. data/lib/durable/llm/providers/fireworks.rb +304 -0
  25. data/lib/durable/llm/providers/google.rb +327 -0
  26. data/lib/durable/llm/providers/groq.rb +133 -25
  27. data/lib/durable/llm/providers/huggingface.rb +120 -17
  28. data/lib/durable/llm/providers/mistral.rb +431 -0
  29. data/lib/durable/llm/providers/openai.rb +150 -4
  30. data/lib/durable/llm/providers/opencode.rb +253 -0
  31. data/lib/durable/llm/providers/openrouter.rb +256 -0
  32. data/lib/durable/llm/providers/perplexity.rb +273 -0
  33. data/lib/durable/llm/providers/together.rb +346 -0
  34. data/lib/durable/llm/providers/xai.rb +355 -0
  35. data/lib/durable/llm/providers.rb +113 -13
  36. data/lib/durable/llm/response_helpers.rb +185 -0
  37. data/lib/durable/llm/version.rb +5 -1
  38. data/lib/durable/llm.rb +214 -1
  39. data/lib/durable.rb +29 -4
  40. data/sig/durable/llm.rbs +303 -1
  41. metadata +106 -28
  42. data/Gemfile.lock +0 -103
@@ -1,34 +1,131 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file serves as the main registry and loader for LLM providers in the Durable gem,
4
+ # providing a centralized interface to manage and discover available provider classes. It handles
5
+ # automatic loading of provider modules, maintains a dynamic list of registered providers, offers
6
+ # utility methods for model discovery and provider resolution based on model IDs, and includes
7
+ # provider aliases for backwards compatibility and convenience access.
8
+
1
9
  require 'durable/llm/providers/openai'
2
10
  require 'durable/llm/providers/anthropic'
11
+ require 'durable/llm/providers/cohere'
12
+ require 'durable/llm/providers/groq'
13
+ require 'durable/llm/providers/huggingface'
14
+ require 'durable/llm/providers/azure_openai'
15
+ require 'durable/llm/providers/deepseek'
16
+ require 'durable/llm/providers/fireworks'
17
+ require 'durable/llm/providers/google'
18
+ require 'durable/llm/providers/mistral'
19
+ require 'durable/llm/providers/opencode'
20
+ require 'durable/llm/providers/openrouter'
21
+ require 'durable/llm/providers/perplexity'
22
+ require 'durable/llm/providers/together'
23
+ require 'durable/llm/providers/xai'
3
24
 
4
25
  module Durable
5
26
  module Llm
27
+ # Main module for LLM providers, providing registry and utility methods
6
28
  module Providers
29
+ # Loads all provider files in the providers directory.
30
+ #
31
+ # This method dynamically requires all Ruby files in the providers subdirectory,
32
+ # ensuring that all provider classes are loaded and available for use.
33
+ #
34
+ # @return [void]
7
35
  def self.load_all
8
- Dir[File.join(__dir__, 'providers', '*.rb')].each { |file| require file }
36
+ Dir[File.join(__dir__, 'providers', '*.rb')].sort.each { |file| require file }
37
+ end
38
+
39
+ # Returns the provider class for a given provider symbol.
40
+ #
41
+ # This method handles the mapping from provider symbols to their corresponding
42
+ # class constants, including special cases where the symbol doesn't directly
43
+ # map to a capitalized class name.
44
+ #
45
+ # @param provider_sym [Symbol] The provider symbol (e.g., :openai, :anthropic)
46
+ # @return [Class] The provider class
47
+ # @raise [NameError] If the provider class cannot be found
48
+ def self.provider_class_for(provider_sym)
49
+ # Handle special cases where capitalize doesn't match
50
+ case provider_sym
51
+ when :deepseek
52
+ DeepSeek
53
+ when :openrouter
54
+ OpenRouter
55
+ when :azureopenai
56
+ AzureOpenai
57
+ when :opencode
58
+ Opencode
59
+ else
60
+ const_get(provider_sym.to_s.capitalize)
61
+ end
9
62
  end
10
63
 
64
+ # Returns a list of all available provider symbols.
65
+ #
66
+ # This method dynamically discovers all provider classes by inspecting the
67
+ # module's constants and filtering for classes that inherit from Base,
68
+ # excluding the Base class itself.
69
+ #
70
+ # @return [Array<Symbol>] Array of provider symbols
11
71
  def self.providers
12
- @provider_list ||= constants.select do |const_name|
13
- const = const_get(const_name)
14
- last_component = const.name.split('::').last
15
- next if last_component == 'Base'
72
+ @providers ||= begin
73
+ provider_classes = constants.select do |const_name|
74
+ const = const_get(const_name)
75
+ next if const.name.split('::').last == 'Base'
16
76
 
17
- const.is_a?(Class) && const.to_s.split('::').last.to_s == const_name.to_s
18
- end.map(&:to_s).map(&:downcase).map(&:to_sym)
77
+ const.is_a?(Class) && const < Durable::Llm::Providers::Base
78
+ end
79
+
80
+ provider_classes.map do |const_name|
81
+ const_get(const_name).name.split('::').last.downcase.to_sym
82
+ end.uniq
83
+ end
19
84
  end
20
85
 
86
+ # Returns a list of all available provider names as strings.
87
+ #
88
+ # Alias for providers that returns strings instead of symbols, useful for
89
+ # display purposes in error messages and documentation.
90
+ #
91
+ # @return [Array<String>] Array of provider names
92
+ def self.available_providers
93
+ providers.map(&:to_s).sort
94
+ end
95
+
96
+ # Returns a flat list of all model IDs across all providers.
97
+ #
98
+ # This method aggregates model IDs from all available providers by calling
99
+ # their models method. If a provider fails to return models (e.g., due to
100
+ # missing API keys), it gracefully handles the error and continues.
101
+ #
102
+ # @return [Array<String>] Array of model IDs
21
103
  def self.model_ids
22
- providers.flat_map do |provider|
23
- provider_class = const_get(provider.to_s.capitalize)
24
- provider_class.models
104
+ providers.flat_map do |provider_sym|
105
+ provider_class = provider_class_for(provider_sym)
106
+ begin
107
+ provider_class.models
108
+ rescue StandardError
109
+ []
110
+ end
25
111
  end
26
112
  end
27
113
 
114
+ # Finds the provider class that supports a given model ID.
115
+ #
116
+ # This method searches through all providers to find which one supports
117
+ # the specified model ID. Returns nil if no provider supports the model.
118
+ #
119
+ # @param model_id [String] The model ID to search for
120
+ # @return [Class, nil] The provider class that supports the model, or nil
28
121
  def self.model_id_to_provider(model_id)
29
- providers.each do |provider|
30
- provider_class = const_get(provider.to_s.capitalize)
31
- return provider_class if provider_class.models.include?(model_id)
122
+ providers.each do |provider_sym|
123
+ provider_class = provider_class_for(provider_sym)
124
+ begin
125
+ return provider_class if provider_class.models.include?(model_id)
126
+ rescue StandardError
127
+ next
128
+ end
32
129
  end
33
130
  nil
34
131
  end
@@ -36,6 +133,9 @@ module Durable
36
133
  Openai = OpenAI
37
134
  Claude = Anthropic
38
135
  Claude3 = Anthropic
136
+ AzureOpenAI = AzureOpenai
39
137
  end
40
138
  end
41
139
  end
140
+
141
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -0,0 +1,185 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This module provides helper methods for extracting and formatting responses from
4
+ # LLM API calls. It offers convenient methods to work with response objects from
5
+ # different providers, abstracting away the complexity of response structure variations.
6
+
7
+ module Durable
8
+ module Llm
9
+ # Helper methods for working with LLM responses
10
+ #
11
+ # This module provides convenience methods for extracting content, messages,
12
+ # and metadata from LLM response objects. It handles the common patterns of
13
+ # response processing across different providers.
14
+ #
15
+ # @example Using response helpers
16
+ # response = client.chat(messages: [...])
17
+ # content = ResponseHelpers.extract_content(response)
18
+ # tokens = ResponseHelpers.token_usage(response)
19
+ module ResponseHelpers
20
+ module_function
21
+
22
+ # Extracts the text content from a completion response
23
+ #
24
+ # @param response [Object] The API response object
25
+ # @return [String, nil] The extracted content or nil if not found
26
+ # @example Extract content from response
27
+ # response = client.completion(messages: [...])
28
+ # text = ResponseHelpers.extract_content(response)
29
+ # puts text
30
+ def extract_content(response)
31
+ return nil unless response
32
+ return nil unless response.respond_to?(:choices)
33
+ return nil if response.choices.empty?
34
+
35
+ choice = response.choices.first
36
+ return nil unless choice.respond_to?(:message)
37
+
38
+ message = choice.message
39
+ return nil unless message.respond_to?(:content)
40
+
41
+ message.content
42
+ end
43
+
44
+ # Extracts all choice contents from a response
45
+ #
46
+ # @param response [Object] The API response object
47
+ # @return [Array<String>] Array of content strings from all choices
48
+ # @example Get all alternatives
49
+ # response = client.completion(messages: [...], n: 3)
50
+ # alternatives = ResponseHelpers.all_contents(response)
51
+ def all_contents(response)
52
+ return [] unless response&.respond_to?(:choices)
53
+
54
+ response.choices.map do |choice|
55
+ next unless choice.respond_to?(:message)
56
+
57
+ message = choice.message
58
+ message.content if message.respond_to?(:content)
59
+ end.compact
60
+ end
61
+
62
+ # Extracts token usage information from a response
63
+ #
64
+ # @param response [Object] The API response object
65
+ # @return [Hash, nil] Hash with :prompt_tokens, :completion_tokens, :total_tokens
66
+ # @example Get token usage
67
+ # response = client.completion(messages: [...])
68
+ # usage = ResponseHelpers.token_usage(response)
69
+ # puts "Used #{usage[:total_tokens]} tokens"
70
+ def token_usage(response)
71
+ return nil unless response&.respond_to?(:usage)
72
+
73
+ usage = response.usage
74
+ return nil unless usage
75
+
76
+ {
77
+ prompt_tokens: usage.prompt_tokens,
78
+ completion_tokens: usage.completion_tokens,
79
+ total_tokens: usage.total_tokens
80
+ }
81
+ end
82
+
83
+ # Extracts the finish reason from a response
84
+ #
85
+ # @param response [Object] The API response object
86
+ # @return [String, nil] The finish reason (e.g., 'stop', 'length', 'content_filter')
87
+ # @example Check why completion finished
88
+ # response = client.completion(messages: [...])
89
+ # reason = ResponseHelpers.finish_reason(response)
90
+ # puts "Finished because: #{reason}"
91
+ def finish_reason(response)
92
+ return nil unless response&.respond_to?(:choices)
93
+ return nil if response.choices.empty?
94
+
95
+ choice = response.choices.first
96
+ choice.finish_reason if choice.respond_to?(:finish_reason)
97
+ end
98
+
99
+ # Checks if a response was truncated due to length
100
+ #
101
+ # @param response [Object] The API response object
102
+ # @return [Boolean] True if response was truncated
103
+ # @example Check if truncated
104
+ # response = client.completion(messages: [...])
105
+ # if ResponseHelpers.truncated?(response)
106
+ # puts "Response was cut off. Consider increasing max_tokens."
107
+ # end
108
+ def truncated?(response)
109
+ finish_reason(response) == 'length'
110
+ end
111
+
112
+ # Formats a response as a simple hash with common fields
113
+ #
114
+ # @param response [Object] The API response object
115
+ # @return [Hash] Simplified response hash
116
+ # @example Format response
117
+ # response = client.completion(messages: [...])
118
+ # simple = ResponseHelpers.to_hash(response)
119
+ # # => { content: "...", tokens: {...}, finish_reason: "stop" }
120
+ def to_hash(response)
121
+ {
122
+ content: extract_content(response),
123
+ tokens: token_usage(response),
124
+ finish_reason: finish_reason(response),
125
+ all_contents: all_contents(response)
126
+ }
127
+ end
128
+
129
+ # Extracts model information from response
130
+ #
131
+ # @param response [Object] The API response object
132
+ # @return [String, nil] The model used for the completion
133
+ # @example Get model name
134
+ # response = client.completion(messages: [...])
135
+ # model = ResponseHelpers.model_used(response)
136
+ # puts "Model: #{model}"
137
+ def model_used(response)
138
+ return nil unless response&.respond_to?(:model)
139
+
140
+ response.model
141
+ end
142
+
143
+ # Calculates the cost of a response (approximate)
144
+ #
145
+ # This is a rough estimate based on common pricing. For accurate costs,
146
+ # consult your provider's pricing page.
147
+ #
148
+ # @param response [Object] The API response object
149
+ # @param model [String, nil] Optional model name for pricing lookup
150
+ # @return [Float, nil] Estimated cost in USD
151
+ # @example Estimate cost
152
+ # response = client.completion(messages: [...])
153
+ # cost = ResponseHelpers.estimate_cost(response)
154
+ # puts "Estimated cost: $#{cost}"
155
+ def estimate_cost(response, model = nil)
156
+ usage = token_usage(response)
157
+ return nil unless usage
158
+
159
+ model ||= model_used(response)
160
+ return nil unless model
161
+
162
+ # Rough pricing estimates (as of 2025)
163
+ pricing = case model
164
+ when /gpt-4-turbo/
165
+ { prompt: 0.01 / 1000, completion: 0.03 / 1000 }
166
+ when /gpt-4/
167
+ { prompt: 0.03 / 1000, completion: 0.06 / 1000 }
168
+ when /gpt-3.5-turbo/
169
+ { prompt: 0.0015 / 1000, completion: 0.002 / 1000 }
170
+ when /claude-3-opus/
171
+ { prompt: 0.015 / 1000, completion: 0.075 / 1000 }
172
+ when /claude-3-sonnet/
173
+ { prompt: 0.003 / 1000, completion: 0.015 / 1000 }
174
+ else
175
+ return nil # Unknown model
176
+ end
177
+
178
+ (usage[:prompt_tokens] * pricing[:prompt]) +
179
+ (usage[:completion_tokens] * pricing[:completion])
180
+ end
181
+ end
182
+ end
183
+ end
184
+
185
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -1,7 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Defines the version constant
4
+
3
5
  module Durable
4
6
  module Llm
5
- VERSION = '0.1.4'
7
+ VERSION = '0.1.6'
6
8
  end
7
9
  end
10
+
11
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
data/lib/durable/llm.rb CHANGED
@@ -1,27 +1,240 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Main entry point for the Durable::Llm module.
4
+ #
5
+ # This module provides a unified interface for interacting with multiple Large Language Model (LLM)
6
+ # providers through a consistent API. It handles configuration management, provider instantiation,
7
+ # and offers convenience methods for common LLM operations.
8
+ #
9
+ # The module uses Zeitwerk for efficient autoloading of its components and maintains a global
10
+ # configuration that can be customized through environment variables or programmatic setup.
11
+ #
12
+ # ## Basic Usage
13
+ #
14
+ # ```ruby
15
+ # require 'durable/llm'
16
+ #
17
+ # # Configure API keys
18
+ # Durable::Llm.configure do |config|
19
+ # config.openai.api_key = 'your-openai-key'
20
+ # end
21
+ #
22
+ # # Create a client and make a request
23
+ # client = Durable::Llm.new(:openai, model: 'gpt-4')
24
+ # response = client.complete('Hello, world!')
25
+ # puts response # => "Hello! How can I help you today?"
26
+ # ```
27
+ #
28
+ # ## Configuration
29
+ #
30
+ # Configuration can be done via environment variables using the `DLLM__` prefix:
31
+ #
32
+ # ```bash
33
+ # export DLLM__OPENAI__API_KEY=your-key-here
34
+ # export DLLM__ANTHROPIC__API_KEY=your-anthropic-key
35
+ # ```
36
+ #
37
+ # Or programmatically:
38
+ #
39
+ # ```ruby
40
+ # Durable::Llm.configure do |config|
41
+ # config.openai.api_key = 'your-key'
42
+ # config.anthropic.api_key = 'your-anthropic-key'
43
+ # config.default_provider = 'anthropic'
44
+ # end
45
+ # ```
46
+ #
47
+ # ## Supported Providers
48
+ #
49
+ # - OpenAI (GPT models)
50
+ # - Anthropic (Claude models)
51
+ # - Google (Gemini models)
52
+ # - Cohere
53
+ # - Mistral AI
54
+ # - Groq
55
+ # - Fireworks AI
56
+ # - Together AI
57
+ # - DeepSeek
58
+ # - OpenRouter
59
+ # - Perplexity
60
+ # - xAI
61
+ # - Azure OpenAI
62
+ #
63
+ # @see Durable::Llm::Client For the main client interface
64
+ # @see Durable::Llm::Configuration For configuration options
65
+ # @see Durable::Llm::Providers For available providers
66
+
1
67
  require 'zeitwerk'
2
68
  loader = Zeitwerk::Loader.new
3
69
  loader.tag = File.basename(__FILE__, '.rb')
4
70
  loader.inflector = Zeitwerk::GemInflector.new(__FILE__)
5
- loader.push_dir(File.dirname(__FILE__) + '/..')
71
+ loader.push_dir("#{File.dirname(__FILE__)}/..")
6
72
 
7
73
  require 'durable/llm/configuration'
74
+ require 'durable/llm/version'
8
75
 
9
76
  module Durable
77
+ # The Llm module provides a unified interface for Large Language Model operations.
78
+ #
79
+ # This module serves as the main entry point for the Durable LLM gem, offering:
80
+ # - Global configuration management
81
+ # - Provider-agnostic client creation
82
+ # - Convenience methods for common operations
83
+ # - Access to version information
84
+ #
85
+ # The module maintains a singleton configuration instance that can be customized
86
+ # to set API keys, default providers, and other global settings.
87
+ #
88
+ # @example Basic setup and usage
89
+ # Durable::Llm.configure do |config|
90
+ # config.openai.api_key = 'sk-...'
91
+ # end
92
+ #
93
+ # client = Durable::Llm.new(:openai)
94
+ # response = client.complete('Hello!')
95
+ #
96
+ # @see Durable::Llm::Client
97
+ # @see Durable::Llm::Configuration
10
98
  module Llm
11
99
  class << self
100
+ # @return [Configuration] The global configuration instance
12
101
  attr_accessor :configuration
13
102
 
103
+ # Returns the current configuration instance.
104
+ #
105
+ # This is an alias for the configuration accessor, provided for convenience.
106
+ #
107
+ # @return [Configuration] The global configuration instance
108
+ # @see #configuration
14
109
  def config
15
110
  configuration
16
111
  end
112
+
113
+ # Creates a new LLM client for the specified provider.
114
+ #
115
+ # This is a convenience method that creates a new Client instance with the
116
+ # given provider and options. It's equivalent to calling
117
+ # `Durable::Llm::Client.new(provider, options)`.
118
+ #
119
+ # @param provider [Symbol, String] The provider name (e.g., :openai, :anthropic)
120
+ # @param options [Hash] Configuration options for the client
121
+ # @option options [String] :model The default model to use
122
+ # @option options [String] :api_key API key for authentication
123
+ # @return [Client] A new client instance
124
+ # @raise [NameError] If the provider is not found
125
+ # @example Create an OpenAI client
126
+ # client = Durable::Llm.new(:openai, api_key: 'sk-...', model: 'gpt-4')
127
+ # @example Create an Anthropic client
128
+ # client = Durable::Llm.new(:anthropic, api_key: 'sk-ant-...')
129
+ def new(provider, options = {})
130
+ Client.new(provider, options)
131
+ end
17
132
  end
18
133
 
134
+ # Configures the global LLM settings.
135
+ #
136
+ # This method initializes or yields the global configuration instance,
137
+ # allowing you to set API keys, default providers, and other global options.
138
+ #
139
+ # @yield [configuration] The configuration instance to modify
140
+ # @yieldparam configuration [Configuration] The global configuration object
141
+ # @return [void]
142
+ # @example Configure API keys
143
+ # Durable::Llm.configure do |config|
144
+ # config.openai.api_key = 'sk-...'
145
+ # config.anthropic.api_key = 'sk-ant-...'
146
+ # config.default_provider = 'openai'
147
+ # end
148
+ # @example Configure from environment
149
+ # # Environment variables are automatically loaded
150
+ # ENV['DLLM__OPENAI__API_KEY'] = 'sk-...'
151
+ # Durable::Llm.configure do |config|
152
+ # # Additional programmatic configuration
153
+ # end
19
154
  def self.configure
20
155
  self.configuration ||= Configuration.new
21
156
  yield(configuration)
22
157
  end
158
+
159
+ # Creates a quick completion with minimal setup.
160
+ #
161
+ # This is a convenience method for one-off completions that automatically
162
+ # creates a client, performs the completion, and returns the text result.
163
+ #
164
+ # @param text [String] The input text to complete
165
+ # @param provider [Symbol] The provider to use (default: :openai)
166
+ # @param model [String] The model to use (required)
167
+ # @param options [Hash] Additional options for the client
168
+ # @return [String] The completion text
169
+ # @raise [ArgumentError] If required parameters are missing
170
+ # @example Quick completion with OpenAI
171
+ # result = Durable::Llm.complete('What is Ruby?', model: 'gpt-4')
172
+ # puts result
173
+ # @example Quick completion with Anthropic
174
+ # result = Durable::Llm.complete('Explain AI', provider: :anthropic, model: 'claude-3-opus-20240229')
175
+ # puts result
176
+ def self.complete(text, provider: :openai, model: nil, **options)
177
+ raise ArgumentError, 'text is required' if text.nil? || text.to_s.strip.empty?
178
+ raise ArgumentError, 'model is required' if model.nil? || model.to_s.strip.empty?
179
+
180
+ client = new(provider, options.merge(model: model))
181
+ client.complete(text)
182
+ end
183
+
184
+ # Creates a chat completion with minimal setup.
185
+ #
186
+ # This is a convenience method for quick chat interactions that automatically
187
+ # creates a client and performs the chat completion.
188
+ #
189
+ # @param messages [Array<Hash>] Array of message hashes with :role and :content
190
+ # @param provider [Symbol] The provider to use (default: :openai)
191
+ # @param model [String] The model to use (required)
192
+ # @param options [Hash] Additional options for the client and request
193
+ # @return [Object] The chat response object
194
+ # @raise [ArgumentError] If required parameters are missing
195
+ # @example Simple chat
196
+ # response = Durable::Llm.chat(
197
+ # [{ role: 'user', content: 'Hello!' }],
198
+ # model: 'gpt-4'
199
+ # )
200
+ # puts response.choices.first.message.content
201
+ def self.chat(messages, provider: :openai, model: nil, **options)
202
+ raise ArgumentError, 'messages are required' if messages.nil? || messages.empty?
203
+ raise ArgumentError, 'model is required' if model.nil? || model.to_s.strip.empty?
204
+
205
+ request_keys = %i[temperature max_tokens top_p frequency_penalty presence_penalty]
206
+ request_params = options.select { |k, _| request_keys.include?(k) }
207
+ client_options = options.reject { |k, _| request_keys.include?(k) }
208
+
209
+ client = new(provider, client_options.merge(model: model))
210
+ client.chat(messages: messages, **request_params)
211
+ end
212
+
213
+ # Lists available models for a provider.
214
+ #
215
+ # @param provider [Symbol] The provider name (default: :openai)
216
+ # @param options [Hash] Provider options (e.g., api_key)
217
+ # @return [Array<String>] List of available model IDs
218
+ # @example List OpenAI models
219
+ # models = Durable::Llm.models(:openai)
220
+ # puts models.inspect
221
+ def self.models(provider = :openai, **options)
222
+ client = new(provider, options)
223
+ client.provider.models
224
+ end
23
225
  end
24
226
  end
25
227
 
26
228
  Durable::Llm.configure do
27
229
  end
230
+
231
+ require 'durable/llm/providers'
232
+ require 'durable/llm/client'
233
+ require 'durable/llm/response_helpers'
234
+ require 'durable/llm/provider_utilities'
235
+
236
+ # Load global convenience functions for easier access
237
+ # Users can skip this by requiring 'durable/llm/core' instead of 'durable/llm'
238
+ require 'durable/llm/convenience' unless ENV['DLLM_NO_CONVENIENCE']
239
+
240
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
data/lib/durable.rb CHANGED
@@ -1,7 +1,32 @@
1
- require 'durable/llm'
1
+ # frozen_string_literal: true
2
2
 
3
+ # Main entry point for the Durable gem.
4
+ #
5
+ # This module provides a namespace for Durable Programming LLC's Ruby gems.
6
+ # It uses autoloading for efficient memory usage and lazy loading of components.
7
+ #
8
+ # Currently, it provides access to the LLM functionality through the Llm submodule.
9
+ #
10
+ # @example Basic usage
11
+ # require 'durable'
12
+ #
13
+ # # Access LLM functionality
14
+ # Durable::Llm.configure do |config|
15
+ # config.openai.api_key = 'your-key'
16
+ # end
17
+ #
18
+ # client = Durable::Llm.new(:openai)
19
+ # response = client.complete('Hello!')
20
+ #
21
+ # @see Durable::Llm
22
+
23
+ # Namespace module for Durable Programming LLC's Ruby gems.
24
+ #
25
+ # This module serves as the root namespace for all Durable gems, providing
26
+ # autoloaded access to various components and functionality.
3
27
  module Durable
4
- # This module serves as a namespace for the Durable gem.
5
- # It currently only requires the Llm module, but can be expanded
6
- # in the future to include other Durable-related functionality.
28
+ # Autoload the Llm module for lazy loading
29
+ autoload :Llm, 'durable/llm'
7
30
  end
31
+
32
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.