durable-llm 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CONFIGURE.md +132 -0
  5. data/Gemfile +7 -9
  6. data/Gemfile.lock +3 -3
  7. data/README.md +1 -0
  8. data/Rakefile +6 -6
  9. data/devenv.lock +103 -0
  10. data/devenv.nix +9 -0
  11. data/devenv.yaml +15 -0
  12. data/durable-llm.gemspec +44 -0
  13. data/examples/openai_quick_complete.rb +3 -1
  14. data/lib/durable/llm/cli.rb +247 -60
  15. data/lib/durable/llm/client.rb +92 -11
  16. data/lib/durable/llm/configuration.rb +174 -23
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/providers/anthropic.rb +246 -36
  19. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  20. data/lib/durable/llm/providers/base.rb +106 -9
  21. data/lib/durable/llm/providers/cohere.rb +227 -0
  22. data/lib/durable/llm/providers/deepseek.rb +233 -0
  23. data/lib/durable/llm/providers/fireworks.rb +278 -0
  24. data/lib/durable/llm/providers/google.rb +301 -0
  25. data/lib/durable/llm/providers/groq.rb +108 -29
  26. data/lib/durable/llm/providers/huggingface.rb +122 -18
  27. data/lib/durable/llm/providers/mistral.rb +431 -0
  28. data/lib/durable/llm/providers/openai.rb +162 -25
  29. data/lib/durable/llm/providers/opencode.rb +253 -0
  30. data/lib/durable/llm/providers/openrouter.rb +256 -0
  31. data/lib/durable/llm/providers/perplexity.rb +273 -0
  32. data/lib/durable/llm/providers/together.rb +346 -0
  33. data/lib/durable/llm/providers/xai.rb +355 -0
  34. data/lib/durable/llm/providers.rb +103 -15
  35. data/lib/durable/llm/version.rb +5 -1
  36. data/lib/durable/llm.rb +143 -3
  37. data/lib/durable.rb +29 -4
  38. data/sig/durable/llm.rbs +302 -1
  39. metadata +50 -36
@@ -1,64 +1,215 @@
1
+ # frozen_string_literal: true
2
+
3
+ # frozen_string_literal: true
4
+
1
5
  require 'ostruct'
2
6
 
3
7
  module Durable
4
8
  module Llm
9
+ # Configuration class for managing LLM provider settings and API keys.
10
+ #
11
+ # This class provides a centralized configuration management system for the Durable LLM gem.
12
+ # It supports dynamic provider configuration through method_missing, automatic loading from
13
+ # environment variables using the `DLLM__` prefix pattern, and optional integration with
14
+ # Datasette LLM configuration files.
15
+ #
16
+ # ## Basic Usage
17
+ #
18
+ # ```ruby
19
+ # config = Durable::Llm::Configuration.new
20
+ #
21
+ # # Configure providers dynamically
22
+ # config.openai = { api_key: 'sk-...', model: 'gpt-4' }
23
+ # config.anthropic.api_key = 'sk-ant-...'
24
+ #
25
+ # # Set default provider
26
+ # config.default_provider = 'anthropic'
27
+ # ```
28
+ #
29
+ # ## Environment Variable Configuration
30
+ #
31
+ # Configuration can be loaded from environment variables using the `DLLM__` prefix:
32
+ #
33
+ # ```bash
34
+ # export DLLM__OPENAI__API_KEY=sk-your-key
35
+ # export DLLM__ANTHROPIC__API_KEY=sk-ant-your-key
36
+ # export DLLM__OPENAI__MODEL=gpt-4
37
+ # ```
38
+ #
39
+ # ## Datasette LLM Integration
40
+ #
41
+ # The configuration automatically loads API keys from Datasette LLM's configuration file
42
+ # at `~/.config/io.datasette.llm/keys.json` when `load_from_datasette` is called.
43
+ #
44
+ # @example Dynamic provider configuration
45
+ # config = Durable::Llm::Configuration.new
46
+ # config.openai.api_key = 'sk-...'
47
+ # config.anthropic = { api_key: 'sk-ant-...', model: 'claude-3' }
48
+ #
49
+ # @example Environment variable loading
50
+ # ENV['DLLM__OPENAI__API_KEY'] = 'sk-...'
51
+ # config = Durable::Llm::Configuration.new # Automatically loads from env
52
+ #
53
+ # @example Datasette integration
54
+ # config.load_from_datasette # Loads from ~/.config/io.datasette.llm/keys.json
55
+ #
56
+ # @see Durable::Llm::Client
57
+ # @see Durable::Llm::Providers
5
58
  class Configuration
59
+ # @return [String] The default provider name to use when none is specified
6
60
  attr_accessor :default_provider
61
+
62
+ # @return [Hash<Symbol, OpenStruct>] Hash of provider configurations keyed by provider name
7
63
  attr_reader :providers
8
64
 
65
+ # Initializes a new Configuration instance.
66
+ #
67
+ # Creates an empty providers hash, sets the default provider to 'openai',
68
+ # and automatically loads configuration from environment variables.
69
+ #
70
+ # @return [Configuration] A new configuration instance
9
71
  def initialize
10
72
  @providers = {}
11
73
  @default_provider = 'openai'
12
74
  load_from_env
75
+ end
13
76
 
77
+ # Clears all provider configurations and resets to defaults.
78
+ #
79
+ # This method removes all configured providers, resets the default provider
80
+ # to 'openai', and reloads configuration from environment variables.
81
+ #
82
+ # @return [void]
83
+ def clear
84
+ @providers.clear
85
+ @default_provider = 'openai'
86
+ load_from_env
14
87
  end
15
88
 
89
+ # Loads API keys from Datasette LLM configuration file.
90
+ #
91
+ # This method attempts to load API keys from the Datasette LLM configuration
92
+ # file located at `~/.config/io.datasette.llm/keys.json`. If the file exists
93
+ # and contains valid JSON, it will populate the API keys for any configured
94
+ # providers that have matching entries in the file.
95
+ #
96
+ # The method gracefully handles missing files, invalid JSON, and other
97
+ # file system errors by issuing warnings and continuing execution.
98
+ #
99
+ # @return [void]
100
+ # @example Load Datasette configuration
101
+ # config = Durable::Llm::Configuration.new
102
+ # config.load_from_datasette # Loads keys from ~/.config/io.datasette.llm/keys.json
16
103
  def load_from_datasette
17
-
18
104
  config_file = File.expand_path('~/.config/io.datasette.llm/keys.json')
19
105
 
20
- if File.exist?(config_file)
106
+ return unless File.exist?(config_file)
107
+
108
+ begin
21
109
  config_data = JSON.parse(File.read(config_file))
22
110
 
23
111
  Durable::Llm::Providers.providers.each do |provider|
112
+ next unless config_data[provider.to_s]
24
113
 
25
114
  @providers[provider.to_sym] ||= OpenStruct.new
26
-
27
- if config_data[provider.to_s]
28
- @providers[provider.to_sym][:api_key] = config_data[provider.to_s]
29
- end
30
-
115
+ @providers[provider.to_sym].api_key = config_data[provider.to_s]
31
116
  end
117
+ rescue JSON::ParserError => e
118
+ warn "Error parsing Datasette LLM configuration file: #{e.message}"
119
+ rescue StandardError => e
120
+ warn "Error loading Datasette LLM configuration: #{e.message}"
32
121
  end
33
-
34
- rescue JSON::ParserError => e
35
- puts "Error parsing JSON file: #{e.message}"
36
122
  end
37
123
 
124
+ # Loads configuration from environment variables.
125
+ #
126
+ # This method scans all environment variables for those starting with the
127
+ # `DLLM__` prefix and automatically configures provider settings based on
128
+ # the variable names. The format is `DLLM__PROVIDER__SETTING=value`.
129
+ #
130
+ # For example:
131
+ # - `DLLM__OPENAI__API_KEY=sk-...` sets the API key for OpenAI
132
+ # - `DLLM__ANTHROPIC__MODEL=claude-3` sets the default model for Anthropic
133
+ #
134
+ # Provider and setting names are converted to lowercase symbols for consistency.
135
+ #
136
+ # @return [void]
137
+ # @example Environment variable configuration
138
+ # ENV['DLLM__OPENAI__API_KEY'] = 'sk-...'
139
+ # ENV['DLLM__ANTHROPIC__MODEL'] = 'claude-3'
140
+ # config = Durable::Llm::Configuration.new # Automatically loads these values
38
141
  def load_from_env
39
142
  ENV.each do |key, value|
40
- if key.start_with?('DLLM__')
41
- parts = key.split('__')
42
- provider = parts[1].downcase.to_sym
43
- setting = parts[2].downcase.to_sym
44
- @providers[provider] ||= OpenStruct.new
45
- @providers[provider][setting] = value
46
- end
143
+ next unless key.start_with?('DLLM__')
144
+
145
+ parts = key.split('__')
146
+ next unless parts.length >= 3 # Must have DLLM__PROVIDER__SETTING
147
+
148
+ provider = parts[1].downcase.to_sym
149
+ setting = parts[2].downcase.to_sym
150
+ @providers[provider] ||= OpenStruct.new
151
+ @providers[provider][setting] = value
47
152
  end
48
153
  end
49
154
 
50
- def method_missing(method_name, *args, &block)
155
+ # Provides dynamic access to provider configurations.
156
+ #
157
+ # This method implements dynamic method dispatch for provider configuration.
158
+ # It allows accessing and setting provider configurations using method calls
159
+ # like `config.openai` or `config.openai = { api_key: '...' }`.
160
+ #
161
+ # ## Getter Methods
162
+ #
163
+ # When called without an assignment (e.g., `config.openai`), it returns
164
+ # an OpenStruct for the specified provider, creating one if it doesn't exist.
165
+ #
166
+ # ## Setter Methods
167
+ #
168
+ # When called with an assignment (e.g., `config.openai = ...`), it sets
169
+ # the configuration for the provider:
170
+ #
171
+ # - If passed a Hash, merges the hash values into the provider's OpenStruct
172
+ # - If passed any other object, replaces the provider's configuration entirely
173
+ #
174
+ # @param method_name [Symbol] The method name being called
175
+ # @param args [Array] Arguments passed to the method
176
+ # @return [OpenStruct] For getter calls, returns the provider configuration
177
+ # @return [Object] For setter calls, returns the assigned value
178
+ # @example Dynamic getter
179
+ # config.openai # => #<OpenStruct>
180
+ # @example Hash setter (merges values)
181
+ # config.openai = { api_key: 'sk-...', model: 'gpt-4' }
182
+ # @example Object setter (replaces configuration)
183
+ # config.openai = OpenStruct.new(api_key: 'sk-...')
184
+ def method_missing(method_name, *args)
185
+ provider_name = method_name.to_s.chomp('=').to_sym
186
+
51
187
  if method_name.to_s.end_with?('=')
52
- provider = method_name.to_s.chomp('=').to_sym
53
- @providers[provider] = args.first
188
+ @providers[provider_name] ||= OpenStruct.new
189
+ if args.first.is_a?(Hash)
190
+ args.first.each { |k, v| @providers[provider_name][k] = v }
191
+ else
192
+ @providers[provider_name] = args.first
193
+ end
54
194
  else
55
- @providers[method_name]
195
+ @providers[provider_name] ||= OpenStruct.new
56
196
  end
57
197
  end
58
198
 
59
- def respond_to_missing?(method_name, include_private = false)
60
- method_name.to_s.end_with?('=') || @providers.key?(method_name) || super
199
+ # Indicates whether the configuration responds to the given method.
200
+ #
201
+ # This method always returns true to support dynamic provider configuration
202
+ # methods. Any method call on the configuration object is considered valid
203
+ # since providers are created dynamically as needed.
204
+ #
205
+ # @param method_name [Symbol] The method name to check
206
+ # @param include_private [Boolean] Whether to include private methods
207
+ # @return [Boolean] Always returns true
208
+ def respond_to_missing?(_method_name, _include_private = false)
209
+ true
61
210
  end
62
211
  end
63
212
  end
64
213
  end
214
+
215
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -1,33 +1,218 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file defines a comprehensive hierarchy of custom exception classes for the Durable LLM gem,
4
+ # providing specific error types for different failure scenarios including API errors, rate limiting,
5
+ # authentication issues, network problems, and configuration errors. The error hierarchy extends
6
+ # from a base Error class and allows for precise error handling and user feedback throughout the
7
+ # gem's LLM provider interactions and operations.
8
+
1
9
  module Durable
2
10
  module Llm
11
+ # Base error class for all Durable LLM exceptions.
12
+ #
13
+ # All custom errors in the Durable LLM gem inherit from this class,
14
+ # allowing users to rescue all LLM-related errors with a single catch block.
15
+ #
16
+ # @example Rescuing all Durable LLM errors
17
+ # begin
18
+ # # LLM operation
19
+ # rescue Durable::Llm::Error => e
20
+ # puts "LLM operation failed: #{e.message}"
21
+ # end
3
22
  class Error < StandardError; end
4
23
 
24
+ # Error raised when an API request fails with an unexpected error.
25
+ #
26
+ # This error is raised for API errors that don't fit into more specific categories
27
+ # like authentication, rate limiting, or server errors.
28
+ #
29
+ # @example Handling API errors
30
+ # begin
31
+ # client.complete("Hello")
32
+ # rescue Durable::Llm::APIError => e
33
+ # puts "API request failed: #{e.message}"
34
+ # end
5
35
  class APIError < Error; end
6
36
 
37
+ # Error raised when the API rate limit has been exceeded.
38
+ #
39
+ # This typically occurs when too many requests are made within a short time period.
40
+ # Users should implement retry logic with exponential backoff when encountering this error.
41
+ #
42
+ # @example Handling rate limit errors with retry
43
+ # retries = 0
44
+ # begin
45
+ # client.complete("Hello")
46
+ # rescue Durable::Llm::RateLimitError => e
47
+ # if retries < 3
48
+ # sleep(2 ** retries)
49
+ # retries += 1
50
+ # retry
51
+ # else
52
+ # puts "Rate limit exceeded after retries: #{e.message}"
53
+ # end
54
+ # end
7
55
  class RateLimitError < Error; end
8
56
 
57
+ # Error raised when authentication with the LLM provider fails.
58
+ #
59
+ # This typically occurs when API keys are invalid, expired, or not provided.
60
+ # Users should check their API key configuration when encountering this error.
61
+ #
62
+ # @example Handling authentication errors
63
+ # begin
64
+ # client.complete("Hello")
65
+ # rescue Durable::Llm::AuthenticationError => e
66
+ # puts "Authentication failed. Please check your API key: #{e.message}"
67
+ # end
9
68
  class AuthenticationError < Error; end
10
69
 
70
+ # Error raised when the request parameters are invalid.
71
+ #
72
+ # This occurs when the request contains malformed data, invalid parameters,
73
+ # or violates the API's constraints.
74
+ #
75
+ # @example Handling invalid request errors
76
+ # begin
77
+ # client.complete("Hello", model: "invalid-model")
78
+ # rescue Durable::Llm::InvalidRequestError => e
79
+ # puts "Invalid request parameters: #{e.message}"
80
+ # end
11
81
  class InvalidRequestError < Error; end
12
82
 
83
+ # Error raised when a requested resource cannot be found.
84
+ #
85
+ # This typically occurs when requesting a model or resource that doesn't exist
86
+ # or is not available to the user.
87
+ #
88
+ # @example Handling resource not found errors
89
+ # begin
90
+ # client.complete("Hello", model: "nonexistent-model")
91
+ # rescue Durable::Llm::ResourceNotFoundError => e
92
+ # puts "Requested resource not found: #{e.message}"
93
+ # end
13
94
  class ResourceNotFoundError < Error; end
14
95
 
96
+ # Error raised when a request times out.
97
+ #
98
+ # This occurs when the API request takes longer than the configured timeout period.
99
+ # Users may want to increase timeout settings or retry the request.
100
+ #
101
+ # @example Handling timeout errors
102
+ # begin
103
+ # client.complete("Hello")
104
+ # rescue Durable::Llm::TimeoutError => e
105
+ # puts "Request timed out: #{e.message}"
106
+ # end
15
107
  class TimeoutError < Error; end
16
108
 
109
+ # Error raised when the LLM provider's server encounters an internal error.
110
+ #
111
+ # This indicates a problem on the provider's side, not with the user's request.
112
+ # Users should retry the request after a short delay.
113
+ #
114
+ # @example Handling server errors
115
+ # begin
116
+ # client.complete("Hello")
117
+ # rescue Durable::Llm::ServerError => e
118
+ # puts "Server error occurred: #{e.message}"
119
+ # # Consider retrying after a delay
120
+ # end
17
121
  class ServerError < Error; end
18
122
 
123
+ # Error raised when attempting to use an unsupported LLM provider.
124
+ #
125
+ # This occurs when the requested provider is not implemented or configured
126
+ # in the Durable LLM gem.
127
+ #
128
+ # @example Handling unsupported provider errors
129
+ # begin
130
+ # client = Durable::Llm::Client.new(provider: "unsupported-provider")
131
+ # rescue Durable::Llm::UnsupportedProviderError => e
132
+ # puts "Unsupported provider: #{e.message}"
133
+ # end
19
134
  class UnsupportedProviderError < Error; end
20
135
 
136
+ # Error raised when there is a configuration problem.
137
+ #
138
+ # This occurs when required configuration is missing, invalid, or inconsistent.
139
+ # Users should check their configuration settings.
140
+ #
141
+ # @example Handling configuration errors
142
+ # begin
143
+ # client = Durable::Llm::Client.new(api_key: nil)
144
+ # rescue Durable::Llm::ConfigurationError => e
145
+ # puts "Configuration error: #{e.message}"
146
+ # end
21
147
  class ConfigurationError < Error; end
22
148
 
149
+ # Error raised when the requested model is not found or not available.
150
+ #
151
+ # This is similar to ResourceNotFoundError but specifically for models.
152
+ # It occurs when the specified model doesn't exist or isn't accessible.
153
+ #
154
+ # @example Handling model not found errors
155
+ # begin
156
+ # client.complete("Hello", model: "unknown-model")
157
+ # rescue Durable::Llm::ModelNotFoundError => e
158
+ # puts "Model not found: #{e.message}"
159
+ # end
23
160
  class ModelNotFoundError < Error; end
24
161
 
162
+ # Error raised when the account has insufficient quota or credits.
163
+ #
164
+ # This occurs when the user's account has exhausted its usage limits
165
+ # or doesn't have enough credits for the requested operation.
166
+ #
167
+ # @example Handling insufficient quota errors
168
+ # begin
169
+ # client.complete("Hello")
170
+ # rescue Durable::Llm::InsufficientQuotaError => e
171
+ # puts "Insufficient quota: #{e.message}"
172
+ # end
25
173
  class InsufficientQuotaError < Error; end
26
174
 
175
+ # Error raised when the API response is invalid or malformed.
176
+ #
177
+ # This occurs when the provider returns a response that cannot be parsed
178
+ # or doesn't match the expected format.
179
+ #
180
+ # @example Handling invalid response errors
181
+ # begin
182
+ # client.complete("Hello")
183
+ # rescue Durable::Llm::InvalidResponseError => e
184
+ # puts "Invalid response received: #{e.message}"
185
+ # end
27
186
  class InvalidResponseError < Error; end
28
187
 
188
+ # Error raised when there is a network connectivity problem.
189
+ #
190
+ # This occurs when the request cannot reach the LLM provider due to
191
+ # network issues, DNS problems, or connectivity failures.
192
+ #
193
+ # @example Handling network errors
194
+ # begin
195
+ # client.complete("Hello")
196
+ # rescue Durable::Llm::NetworkError => e
197
+ # puts "Network error: #{e.message}"
198
+ # end
29
199
  class NetworkError < Error; end
30
200
 
201
+ # Error raised when there is a problem with streaming responses.
202
+ #
203
+ # This occurs during streaming operations when the connection is interrupted,
204
+ # the stream format is invalid, or other streaming-specific issues arise.
205
+ #
206
+ # @example Handling streaming errors
207
+ # begin
208
+ # client.stream("Hello") do |chunk|
209
+ # puts chunk
210
+ # end
211
+ # rescue Durable::Llm::StreamingError => e
212
+ # puts "Streaming error: #{e.message}"
213
+ # end
31
214
  class StreamingError < Error; end
32
215
  end
33
216
  end
217
+
218
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.