durable-llm 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.envrc +7 -0
- data/CHANGELOG.md +5 -0
- data/CLI.md +0 -2
- data/Gemfile +7 -9
- data/README.md +564 -30
- data/Rakefile +16 -6
- data/devenv.lock +171 -0
- data/devenv.nix +12 -0
- data/devenv.yaml +8 -0
- data/durable-llm.gemspec +52 -0
- data/examples/openai_quick_complete.rb +4 -2
- data/lib/durable/llm/cli.rb +218 -22
- data/lib/durable/llm/client.rb +228 -8
- data/lib/durable/llm/configuration.rb +163 -10
- data/lib/durable/llm/convenience.rb +102 -0
- data/lib/durable/llm/errors.rb +185 -0
- data/lib/durable/llm/provider_utilities.rb +201 -0
- data/lib/durable/llm/providers/anthropic.rb +232 -24
- data/lib/durable/llm/providers/azure_openai.rb +347 -0
- data/lib/durable/llm/providers/base.rb +220 -11
- data/lib/durable/llm/providers/cohere.rb +157 -11
- data/lib/durable/llm/providers/deepseek.rb +233 -0
- data/lib/durable/llm/providers/fireworks.rb +304 -0
- data/lib/durable/llm/providers/google.rb +327 -0
- data/lib/durable/llm/providers/groq.rb +133 -25
- data/lib/durable/llm/providers/huggingface.rb +120 -17
- data/lib/durable/llm/providers/mistral.rb +431 -0
- data/lib/durable/llm/providers/openai.rb +150 -4
- data/lib/durable/llm/providers/opencode.rb +253 -0
- data/lib/durable/llm/providers/openrouter.rb +256 -0
- data/lib/durable/llm/providers/perplexity.rb +273 -0
- data/lib/durable/llm/providers/together.rb +346 -0
- data/lib/durable/llm/providers/xai.rb +355 -0
- data/lib/durable/llm/providers.rb +113 -13
- data/lib/durable/llm/response_helpers.rb +185 -0
- data/lib/durable/llm/version.rb +5 -1
- data/lib/durable/llm.rb +214 -1
- data/lib/durable.rb +29 -4
- data/sig/durable/llm.rbs +303 -1
- metadata +106 -28
- data/Gemfile.lock +0 -103
data/lib/durable/llm/client.rb
CHANGED
|
@@ -1,55 +1,275 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# This file implements the main Client class that provides a unified interface for interacting
|
|
4
|
+
# with different LLM providers. It acts as a facade that delegates operations like completion,
|
|
5
|
+
# chat, embedding, and streaming to the appropriate provider instance while handling parameter
|
|
6
|
+
# processing, model configuration, and providing convenience methods for quick text completion.
|
|
7
|
+
# The client automatically resolves provider classes based on the provider name and manages
|
|
8
|
+
# default parameters including model selection.
|
|
9
|
+
|
|
1
10
|
require 'zeitwerk'
|
|
2
11
|
require 'durable/llm/providers'
|
|
3
12
|
|
|
4
13
|
module Durable
|
|
5
14
|
module Llm
|
|
15
|
+
# Unified interface for interacting with different LLM providers
|
|
16
|
+
#
|
|
17
|
+
# The Client class provides a facade that delegates operations like completion, chat,
|
|
18
|
+
# embedding, and streaming to the appropriate provider instance while handling parameter
|
|
19
|
+
# processing, model configuration, and providing convenience methods for quick text completion.
|
|
20
|
+
# The client automatically resolves provider classes based on the provider name and manages
|
|
21
|
+
# default parameters including model selection.
|
|
6
22
|
class Client
|
|
23
|
+
# @return [Object] The underlying provider instance
|
|
7
24
|
attr_reader :provider
|
|
25
|
+
|
|
26
|
+
# @return [String, nil] The default model to use for requests
|
|
8
27
|
attr_accessor :model
|
|
9
28
|
|
|
29
|
+
# Initializes a new LLM client for the specified provider
|
|
30
|
+
#
|
|
31
|
+
# @param provider_name [Symbol, String] The name of the LLM provider (e.g., :openai, :anthropic)
|
|
32
|
+
# @param options [Hash] Configuration options for the provider and client
|
|
33
|
+
# @option options [String] :model The default model to use for requests
|
|
34
|
+
# @option options [String] 'model' Alternative string key for model
|
|
35
|
+
# @option options [String] :api_key API key for authentication (provider-specific)
|
|
36
|
+
# @raise [ArgumentError] If provider_name is nil or empty
|
|
37
|
+
# @raise [NameError] If the provider class cannot be found
|
|
38
|
+
# @example Initialize with OpenAI provider
|
|
39
|
+
# client = Durable::Llm::Client.new(:openai, model: 'gpt-4', api_key: 'sk-...')
|
|
40
|
+
# @example Initialize with Anthropic provider
|
|
41
|
+
# client = Durable::Llm::Client.new(:anthropic, model: 'claude-3-opus-20240229')
|
|
10
42
|
def initialize(provider_name, options = {})
|
|
11
|
-
|
|
43
|
+
if provider_name.nil? || provider_name.to_s.strip.empty?
|
|
44
|
+
raise ArgumentError, 'provider_name cannot be nil or empty. Supported providers: ' \
|
|
45
|
+
"#{Durable::Llm::Providers.available_providers.join(', ')}"
|
|
46
|
+
end
|
|
47
|
+
raise ArgumentError, 'options must be a Hash' unless options.is_a?(Hash)
|
|
48
|
+
|
|
49
|
+
@model = options.delete('model') || options.delete(:model) if options.key?('model') || options.key?(:model)
|
|
12
50
|
|
|
13
|
-
provider_class = Durable::Llm::Providers.
|
|
51
|
+
provider_class = Durable::Llm::Providers.provider_class_for(provider_name)
|
|
14
52
|
|
|
15
53
|
@provider = provider_class.new(**options)
|
|
16
54
|
end
|
|
17
55
|
|
|
56
|
+
# Returns the default parameters to merge with request options
|
|
57
|
+
#
|
|
58
|
+
# @return [Hash] Default parameters including model if set
|
|
18
59
|
def default_params
|
|
19
|
-
{ model: @model }
|
|
60
|
+
@model ? { model: @model } : {}
|
|
20
61
|
end
|
|
21
62
|
|
|
22
|
-
|
|
63
|
+
# Performs a text completion with minimal configuration
|
|
64
|
+
#
|
|
65
|
+
# @param text [String] The input text to complete
|
|
66
|
+
# @param opts [Hash] Additional options (currently unused, reserved for future use)
|
|
67
|
+
# @return [String] The generated completion text
|
|
68
|
+
# @raise [ArgumentError] If text is nil or empty
|
|
69
|
+
# @raise [Durable::Llm::APIError] If the API request fails
|
|
70
|
+
# @raise [IndexError] If the response contains no choices
|
|
71
|
+
# @raise [NoMethodError] If the response structure is unexpected
|
|
72
|
+
# @example Text completion with OpenAI
|
|
73
|
+
# client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
|
|
74
|
+
# response = client.complete('What is the capital of France?')
|
|
75
|
+
# puts response # => "The capital of France is Paris."
|
|
76
|
+
def complete(text, _opts = {})
|
|
77
|
+
if text.nil? || text.to_s.strip.empty?
|
|
78
|
+
raise ArgumentError, 'text cannot be nil or empty. Provide a non-empty string for completion.'
|
|
79
|
+
end
|
|
80
|
+
|
|
23
81
|
response = completion(process_params(messages: [{ role: 'user', content: text }]))
|
|
24
82
|
|
|
25
|
-
response.choices.first
|
|
83
|
+
choice = response.choices.first
|
|
84
|
+
unless choice
|
|
85
|
+
raise IndexError, 'No completion choices returned from the API. This may indicate an ' \
|
|
86
|
+
'API error or invalid request parameters.'
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
message = choice.message
|
|
90
|
+
unless message
|
|
91
|
+
raise NoMethodError, 'Response choice has no message. The API response format may be ' \
|
|
92
|
+
'unexpected or the provider may have changed their response structure.'
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
content = message.content
|
|
96
|
+
unless content
|
|
97
|
+
raise NoMethodError, 'Response message has no content. This may occur if the model ' \
|
|
98
|
+
'refused to respond or if content filtering was applied.'
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
content
|
|
26
102
|
end
|
|
103
|
+
alias quick_complete complete
|
|
27
104
|
|
|
105
|
+
# Performs a completion request
|
|
106
|
+
#
|
|
107
|
+
# @param params [Hash] The completion parameters
|
|
108
|
+
# @option params [String] :model The model to use (overrides default)
|
|
109
|
+
# @option params [Array<Hash>] :messages The conversation messages
|
|
110
|
+
# @option params [Float] :temperature Sampling temperature (0.0-2.0)
|
|
111
|
+
# @option params [Integer] :max_tokens Maximum tokens to generate
|
|
112
|
+
# @return [Object] The completion response object
|
|
113
|
+
# @raise [ArgumentError] If params is not a Hash
|
|
114
|
+
# @raise [Durable::Llm::APIError] If the API request fails
|
|
115
|
+
# @example Perform a completion
|
|
116
|
+
# client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
|
|
117
|
+
# response = client.completion(
|
|
118
|
+
# messages: [
|
|
119
|
+
# { role: 'system', content: 'You are a helpful assistant.' },
|
|
120
|
+
# { role: 'user', content: 'Hello!' }
|
|
121
|
+
# ],
|
|
122
|
+
# temperature: 0.7
|
|
123
|
+
# )
|
|
28
124
|
def completion(params = {})
|
|
125
|
+
raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
|
|
126
|
+
|
|
29
127
|
@provider.completion(process_params(params))
|
|
30
128
|
end
|
|
31
129
|
|
|
130
|
+
# Performs a chat completion request (alias for completion)
|
|
131
|
+
#
|
|
132
|
+
# @param params [Hash] The chat parameters
|
|
133
|
+
# @option params [String] :model The model to use (overrides default)
|
|
134
|
+
# @option params [Array<Hash>] :messages The conversation messages
|
|
135
|
+
# @option params [Float] :temperature Sampling temperature (0.0-2.0)
|
|
136
|
+
# @option params [Integer] :max_tokens Maximum tokens to generate
|
|
137
|
+
# @return [Object] The chat response object
|
|
138
|
+
# @raise [ArgumentError] If params is not a Hash
|
|
139
|
+
# @raise [Durable::Llm::APIError] If the API request fails
|
|
140
|
+
# @see #completion
|
|
32
141
|
def chat(params = {})
|
|
33
|
-
|
|
142
|
+
raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
|
|
143
|
+
|
|
144
|
+
@provider.completion(process_params(params))
|
|
34
145
|
end
|
|
35
146
|
|
|
147
|
+
# Performs an embedding request
|
|
148
|
+
#
|
|
149
|
+
# @param params [Hash] The embedding parameters including model and input
|
|
150
|
+
# @option params [String] :model The embedding model to use
|
|
151
|
+
# @option params [String, Array<String>] :input The text(s) to embed
|
|
152
|
+
# @return [Object] The embedding response object
|
|
153
|
+
# @raise [ArgumentError] If params is not a Hash or missing required fields
|
|
154
|
+
# @raise [NotImplementedError] If the provider doesn't support embeddings
|
|
155
|
+
# @raise [Durable::Llm::APIError] If the API request fails
|
|
156
|
+
# @example Generate embeddings
|
|
157
|
+
# client = Durable::Llm::Client.new(:openai)
|
|
158
|
+
# response = client.embed(
|
|
159
|
+
# model: 'text-embedding-ada-002',
|
|
160
|
+
# input: 'Hello, world!'
|
|
161
|
+
# )
|
|
36
162
|
def embed(params = {})
|
|
37
|
-
|
|
163
|
+
raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
|
|
164
|
+
|
|
165
|
+
@provider.embedding(**process_params(params))
|
|
166
|
+
rescue NotImplementedError
|
|
167
|
+
provider_name = @provider.class.name.split('::').last
|
|
168
|
+
raise NotImplementedError, "#{provider_name} does not support embeddings. " \
|
|
169
|
+
'Try using a provider like OpenAI that offers embedding models.'
|
|
38
170
|
end
|
|
39
171
|
|
|
172
|
+
# Performs a streaming completion request
|
|
173
|
+
#
|
|
174
|
+
# @param params [Hash] The streaming parameters
|
|
175
|
+
# @option params [String] :model The model to use (overrides default)
|
|
176
|
+
# @option params [Array<Hash>] :messages The conversation messages
|
|
177
|
+
# @option params [Float] :temperature Sampling temperature (0.0-2.0)
|
|
178
|
+
# @option params [Integer] :max_tokens Maximum tokens to generate
|
|
179
|
+
# @yield [Object] Yields stream response chunks as they arrive
|
|
180
|
+
# @return [Object] The final response object
|
|
181
|
+
# @raise [ArgumentError] If params is not a Hash or no block is given
|
|
182
|
+
# @raise [NotImplementedError] If the provider doesn't support streaming
|
|
183
|
+
# @raise [Durable::Llm::APIError] If the API request fails
|
|
184
|
+
# @example Stream a completion
|
|
185
|
+
# client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
|
|
186
|
+
# client.stream(messages: [{ role: 'user', content: 'Count to 10' }]) do |chunk|
|
|
187
|
+
# print chunk.choices.first.delta.content
|
|
188
|
+
# end
|
|
40
189
|
def stream(params = {}, &block)
|
|
190
|
+
raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
|
|
191
|
+
unless block_given?
|
|
192
|
+
raise ArgumentError, 'block required for streaming. Use: client.stream(params) { |chunk| ... }'
|
|
193
|
+
end
|
|
194
|
+
|
|
41
195
|
@provider.stream(process_params(params), &block)
|
|
196
|
+
rescue NotImplementedError
|
|
197
|
+
provider_name = @provider.class.name.split('::').last
|
|
198
|
+
raise NotImplementedError, "#{provider_name} does not support streaming. " \
|
|
199
|
+
'Try using completion() or chat() instead.'
|
|
42
200
|
end
|
|
43
201
|
|
|
202
|
+
# Checks if the provider supports streaming
|
|
203
|
+
#
|
|
204
|
+
# @return [Boolean] True if streaming is supported, false otherwise
|
|
44
205
|
def stream?
|
|
45
206
|
@provider.stream?
|
|
46
207
|
end
|
|
47
208
|
|
|
209
|
+
# Sets the model for subsequent requests (fluent interface)
|
|
210
|
+
#
|
|
211
|
+
# @param model_name [String] The model to use
|
|
212
|
+
# @return [Client] Returns self for method chaining
|
|
213
|
+
# @example Fluent API usage
|
|
214
|
+
# client = Durable::Llm::Client.new(:openai)
|
|
215
|
+
# client.with_model('gpt-4').complete('Hello!')
|
|
216
|
+
def with_model(model_name)
|
|
217
|
+
@model = model_name
|
|
218
|
+
self
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
# Sets temperature for the next request (fluent interface)
|
|
222
|
+
#
|
|
223
|
+
# @param temp [Float] The temperature value (0.0-2.0)
|
|
224
|
+
# @return [Client] Returns self for method chaining
|
|
225
|
+
# @example Fluent temperature setting
|
|
226
|
+
# client.with_temperature(0.7).complete('Be creative!')
|
|
227
|
+
def with_temperature(temp)
|
|
228
|
+
@next_temperature = temp
|
|
229
|
+
self
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
# Sets max tokens for the next request (fluent interface)
|
|
233
|
+
#
|
|
234
|
+
# @param tokens [Integer] Maximum tokens to generate
|
|
235
|
+
# @return [Client] Returns self for method chaining
|
|
236
|
+
# @example Fluent max tokens setting
|
|
237
|
+
# client.with_max_tokens(500).complete('Write a story')
|
|
238
|
+
def with_max_tokens(tokens)
|
|
239
|
+
@next_max_tokens = tokens
|
|
240
|
+
self
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# Creates a copy of the client with different configuration
|
|
244
|
+
#
|
|
245
|
+
# @param options [Hash] New configuration options
|
|
246
|
+
# @option options [String] :model Override the model
|
|
247
|
+
# @return [Client] A new client instance with merged configuration
|
|
248
|
+
# @example Clone with different model
|
|
249
|
+
# gpt4_client = client.clone_with(model: 'gpt-4')
|
|
250
|
+
# gpt35_client = client.clone_with(model: 'gpt-3.5-turbo')
|
|
251
|
+
def clone_with(**options)
|
|
252
|
+
provider_name = @provider.class.name.split('::').last.downcase.to_sym
|
|
253
|
+
self.class.new(provider_name, options.merge(model: @model))
|
|
254
|
+
end
|
|
255
|
+
|
|
48
256
|
private
|
|
49
257
|
|
|
50
258
|
def process_params(opts = {})
|
|
51
|
-
default_params.dup.merge(opts)
|
|
259
|
+
params = default_params.dup.merge(opts)
|
|
260
|
+
|
|
261
|
+
# Apply fluent interface settings if present
|
|
262
|
+
params[:temperature] = @next_temperature if @next_temperature
|
|
263
|
+
params[:max_tokens] = @next_max_tokens if @next_max_tokens
|
|
264
|
+
|
|
265
|
+
# Clear one-time settings after use
|
|
266
|
+
@next_temperature = nil
|
|
267
|
+
@next_max_tokens = nil
|
|
268
|
+
|
|
269
|
+
params
|
|
52
270
|
end
|
|
53
271
|
end
|
|
54
272
|
end
|
|
55
273
|
end
|
|
274
|
+
|
|
275
|
+
# Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
|
|
@@ -1,43 +1,150 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# frozen_string_literal: true
|
|
4
|
+
|
|
1
5
|
require 'ostruct'
|
|
2
6
|
|
|
3
7
|
module Durable
|
|
4
8
|
module Llm
|
|
9
|
+
# Configuration class for managing LLM provider settings and API keys.
|
|
10
|
+
#
|
|
11
|
+
# This class provides a centralized configuration management system for the Durable LLM gem.
|
|
12
|
+
# It supports dynamic provider configuration through method_missing, automatic loading from
|
|
13
|
+
# environment variables using the `DLLM__` prefix pattern, and optional integration with
|
|
14
|
+
# Datasette LLM configuration files.
|
|
15
|
+
#
|
|
16
|
+
# ## Basic Usage
|
|
17
|
+
#
|
|
18
|
+
# ```ruby
|
|
19
|
+
# config = Durable::Llm::Configuration.new
|
|
20
|
+
#
|
|
21
|
+
# # Configure providers dynamically
|
|
22
|
+
# config.openai = { api_key: 'sk-...', model: 'gpt-4' }
|
|
23
|
+
# config.anthropic.api_key = 'sk-ant-...'
|
|
24
|
+
#
|
|
25
|
+
# # Set default provider
|
|
26
|
+
# config.default_provider = 'anthropic'
|
|
27
|
+
# ```
|
|
28
|
+
#
|
|
29
|
+
# ## Environment Variable Configuration
|
|
30
|
+
#
|
|
31
|
+
# Configuration can be loaded from environment variables using the `DLLM__` prefix:
|
|
32
|
+
#
|
|
33
|
+
# ```bash
|
|
34
|
+
# export DLLM__OPENAI__API_KEY=sk-your-key
|
|
35
|
+
# export DLLM__ANTHROPIC__API_KEY=sk-ant-your-key
|
|
36
|
+
# export DLLM__OPENAI__MODEL=gpt-4
|
|
37
|
+
# ```
|
|
38
|
+
#
|
|
39
|
+
# ## Datasette LLM Integration
|
|
40
|
+
#
|
|
41
|
+
# The configuration automatically loads API keys from Datasette LLM's configuration file
|
|
42
|
+
# at `~/.config/io.datasette.llm/keys.json` when `load_from_datasette` is called.
|
|
43
|
+
#
|
|
44
|
+
# @example Dynamic provider configuration
|
|
45
|
+
# config = Durable::Llm::Configuration.new
|
|
46
|
+
# config.openai.api_key = 'sk-...'
|
|
47
|
+
# config.anthropic = { api_key: 'sk-ant-...', model: 'claude-3' }
|
|
48
|
+
#
|
|
49
|
+
# @example Environment variable loading
|
|
50
|
+
# ENV['DLLM__OPENAI__API_KEY'] = 'sk-...'
|
|
51
|
+
# config = Durable::Llm::Configuration.new # Automatically loads from env
|
|
52
|
+
#
|
|
53
|
+
# @example Datasette integration
|
|
54
|
+
# config.load_from_datasette # Loads from ~/.config/io.datasette.llm/keys.json
|
|
55
|
+
#
|
|
56
|
+
# @see Durable::Llm::Client
|
|
57
|
+
# @see Durable::Llm::Providers
|
|
5
58
|
class Configuration
|
|
59
|
+
# @return [String] The default provider name to use when none is specified
|
|
6
60
|
attr_accessor :default_provider
|
|
61
|
+
|
|
62
|
+
# @return [Hash<Symbol, OpenStruct>] Hash of provider configurations keyed by provider name
|
|
7
63
|
attr_reader :providers
|
|
8
64
|
|
|
65
|
+
# Initializes a new Configuration instance.
|
|
66
|
+
#
|
|
67
|
+
# Creates an empty providers hash, sets the default provider to 'openai',
|
|
68
|
+
# and automatically loads configuration from environment variables.
|
|
69
|
+
#
|
|
70
|
+
# @return [Configuration] A new configuration instance
|
|
9
71
|
def initialize
|
|
10
72
|
@providers = {}
|
|
11
73
|
@default_provider = 'openai'
|
|
12
74
|
load_from_env
|
|
13
75
|
end
|
|
14
76
|
|
|
77
|
+
# Clears all provider configurations and resets to defaults.
|
|
78
|
+
#
|
|
79
|
+
# This method removes all configured providers, resets the default provider
|
|
80
|
+
# to 'openai', and reloads configuration from environment variables.
|
|
81
|
+
#
|
|
82
|
+
# @return [void]
|
|
15
83
|
def clear
|
|
16
84
|
@providers.clear
|
|
17
85
|
@default_provider = 'openai'
|
|
86
|
+
load_from_env
|
|
18
87
|
end
|
|
19
88
|
|
|
89
|
+
# Loads API keys from Datasette LLM configuration file.
|
|
90
|
+
#
|
|
91
|
+
# This method attempts to load API keys from the Datasette LLM configuration
|
|
92
|
+
# file located at `~/.config/io.datasette.llm/keys.json`. If the file exists
|
|
93
|
+
# and contains valid JSON, it will populate the API keys for any configured
|
|
94
|
+
# providers that have matching entries in the file.
|
|
95
|
+
#
|
|
96
|
+
# The method gracefully handles missing files, invalid JSON, and other
|
|
97
|
+
# file system errors by issuing warnings and continuing execution.
|
|
98
|
+
#
|
|
99
|
+
# @return [void]
|
|
100
|
+
# @example Load Datasette configuration
|
|
101
|
+
# config = Durable::Llm::Configuration.new
|
|
102
|
+
# config.load_from_datasette # Loads keys from ~/.config/io.datasette.llm/keys.json
|
|
20
103
|
def load_from_datasette
|
|
21
104
|
config_file = File.expand_path('~/.config/io.datasette.llm/keys.json')
|
|
22
105
|
|
|
23
|
-
|
|
106
|
+
return unless File.exist?(config_file)
|
|
107
|
+
|
|
108
|
+
begin
|
|
24
109
|
config_data = JSON.parse(File.read(config_file))
|
|
25
110
|
|
|
26
111
|
Durable::Llm::Providers.providers.each do |provider|
|
|
27
|
-
|
|
112
|
+
next unless config_data[provider.to_s]
|
|
28
113
|
|
|
29
|
-
@providers[provider.to_sym]
|
|
114
|
+
@providers[provider.to_sym] ||= OpenStruct.new
|
|
115
|
+
@providers[provider.to_sym].api_key = config_data[provider.to_s]
|
|
30
116
|
end
|
|
117
|
+
rescue JSON::ParserError => e
|
|
118
|
+
warn "Error parsing Datasette LLM configuration file: #{e.message}"
|
|
119
|
+
rescue StandardError => e
|
|
120
|
+
warn "Error loading Datasette LLM configuration: #{e.message}"
|
|
31
121
|
end
|
|
32
|
-
rescue JSON::ParserError => e
|
|
33
|
-
puts "Error parsing JSON file: #{e.message}"
|
|
34
122
|
end
|
|
35
123
|
|
|
124
|
+
# Loads configuration from environment variables.
|
|
125
|
+
#
|
|
126
|
+
# This method scans all environment variables for those starting with the
|
|
127
|
+
# `DLLM__` prefix and automatically configures provider settings based on
|
|
128
|
+
# the variable names. The format is `DLLM__PROVIDER__SETTING=value`.
|
|
129
|
+
#
|
|
130
|
+
# For example:
|
|
131
|
+
# - `DLLM__OPENAI__API_KEY=sk-...` sets the API key for OpenAI
|
|
132
|
+
# - `DLLM__ANTHROPIC__MODEL=claude-3` sets the default model for Anthropic
|
|
133
|
+
#
|
|
134
|
+
# Provider and setting names are converted to lowercase symbols for consistency.
|
|
135
|
+
#
|
|
136
|
+
# @return [void]
|
|
137
|
+
# @example Environment variable configuration
|
|
138
|
+
# ENV['DLLM__OPENAI__API_KEY'] = 'sk-...'
|
|
139
|
+
# ENV['DLLM__ANTHROPIC__MODEL'] = 'claude-3'
|
|
140
|
+
# config = Durable::Llm::Configuration.new # Automatically loads these values
|
|
36
141
|
def load_from_env
|
|
37
142
|
ENV.each do |key, value|
|
|
38
143
|
next unless key.start_with?('DLLM__')
|
|
39
144
|
|
|
40
145
|
parts = key.split('__')
|
|
146
|
+
next unless parts.length >= 3 # Must have DLLM__PROVIDER__SETTING
|
|
147
|
+
|
|
41
148
|
provider = parts[1].downcase.to_sym
|
|
42
149
|
setting = parts[2].downcase.to_sym
|
|
43
150
|
@providers[provider] ||= OpenStruct.new
|
|
@@ -45,18 +152,64 @@ module Durable
|
|
|
45
152
|
end
|
|
46
153
|
end
|
|
47
154
|
|
|
155
|
+
# Provides dynamic access to provider configurations.
|
|
156
|
+
#
|
|
157
|
+
# This method implements dynamic method dispatch for provider configuration.
|
|
158
|
+
# It allows accessing and setting provider configurations using method calls
|
|
159
|
+
# like `config.openai` or `config.openai = { api_key: '...' }`.
|
|
160
|
+
#
|
|
161
|
+
# ## Getter Methods
|
|
162
|
+
#
|
|
163
|
+
# When called without an assignment (e.g., `config.openai`), it returns
|
|
164
|
+
# an OpenStruct for the specified provider, creating one if it doesn't exist.
|
|
165
|
+
#
|
|
166
|
+
# ## Setter Methods
|
|
167
|
+
#
|
|
168
|
+
# When called with an assignment (e.g., `config.openai = ...`), it sets
|
|
169
|
+
# the configuration for the provider:
|
|
170
|
+
#
|
|
171
|
+
# - If passed a Hash, merges the hash values into the provider's OpenStruct
|
|
172
|
+
# - If passed any other object, replaces the provider's configuration entirely
|
|
173
|
+
#
|
|
174
|
+
# @param method_name [Symbol] The method name being called
|
|
175
|
+
# @param args [Array] Arguments passed to the method
|
|
176
|
+
# @return [OpenStruct] For getter calls, returns the provider configuration
|
|
177
|
+
# @return [Object] For setter calls, returns the assigned value
|
|
178
|
+
# @example Dynamic getter
|
|
179
|
+
# config.openai # => #<OpenStruct>
|
|
180
|
+
# @example Hash setter (merges values)
|
|
181
|
+
# config.openai = { api_key: 'sk-...', model: 'gpt-4' }
|
|
182
|
+
# @example Object setter (replaces configuration)
|
|
183
|
+
# config.openai = OpenStruct.new(api_key: 'sk-...')
|
|
48
184
|
def method_missing(method_name, *args)
|
|
185
|
+
provider_name = method_name.to_s.chomp('=').to_sym
|
|
186
|
+
|
|
49
187
|
if method_name.to_s.end_with?('=')
|
|
50
|
-
|
|
51
|
-
|
|
188
|
+
@providers[provider_name] ||= OpenStruct.new
|
|
189
|
+
if args.first.is_a?(Hash)
|
|
190
|
+
args.first.each { |k, v| @providers[provider_name][k] = v }
|
|
191
|
+
else
|
|
192
|
+
@providers[provider_name] = args.first
|
|
193
|
+
end
|
|
52
194
|
else
|
|
53
|
-
@providers[
|
|
195
|
+
@providers[provider_name] ||= OpenStruct.new
|
|
54
196
|
end
|
|
55
197
|
end
|
|
56
198
|
|
|
57
|
-
|
|
58
|
-
|
|
199
|
+
# Indicates whether the configuration responds to the given method.
|
|
200
|
+
#
|
|
201
|
+
# This method always returns true to support dynamic provider configuration
|
|
202
|
+
# methods. Any method call on the configuration object is considered valid
|
|
203
|
+
# since providers are created dynamically as needed.
|
|
204
|
+
#
|
|
205
|
+
# @param method_name [Symbol] The method name to check
|
|
206
|
+
# @param include_private [Boolean] Whether to include private methods
|
|
207
|
+
# @return [Boolean] Always returns true
|
|
208
|
+
def respond_to_missing?(_method_name, _include_private = false)
|
|
209
|
+
true
|
|
59
210
|
end
|
|
60
211
|
end
|
|
61
212
|
end
|
|
62
213
|
end
|
|
214
|
+
|
|
215
|
+
# Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# This file provides global convenience functions for quick access to Durable LLM functionality
|
|
4
|
+
# without requiring explicit module qualification. These functions follow Ruby conventions for
|
|
5
|
+
# global helper methods and make the library more approachable for quick usage and scripting.
|
|
6
|
+
# The functions delegate to the main Durable::Llm module methods while providing shorter names.
|
|
7
|
+
|
|
8
|
+
# Creates a new Durable LLM client with the specified provider and options.
|
|
9
|
+
#
|
|
10
|
+
# This is a global convenience function that provides quick access to client creation
|
|
11
|
+
# without requiring the full Durable::Llm module path. It's equivalent to calling
|
|
12
|
+
# Durable::Llm.new(provider, options).
|
|
13
|
+
#
|
|
14
|
+
# @param provider [Symbol, String] The provider name (e.g., :openai, :anthropic)
|
|
15
|
+
# @param options [Hash] Configuration options for the client
|
|
16
|
+
# @option options [String] :model The default model to use
|
|
17
|
+
# @option options [String] :api_key API key for authentication
|
|
18
|
+
# @return [Durable::Llm::Client] A new client instance
|
|
19
|
+
# @example Create an OpenAI client
|
|
20
|
+
# client = DurableLlm(:openai, model: 'gpt-4', api_key: 'sk-...')
|
|
21
|
+
# response = client.complete('Hello!')
|
|
22
|
+
# @example Create an Anthropic client
|
|
23
|
+
# client = DurableLlm(:anthropic, model: 'claude-3-opus-20240229')
|
|
24
|
+
def DurableLlm(provider, **options)
|
|
25
|
+
Durable::Llm.new(provider, options)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Shorter alias for DurableLlm
|
|
29
|
+
#
|
|
30
|
+
# @param provider [Symbol, String] The provider name
|
|
31
|
+
# @param options [Hash] Configuration options
|
|
32
|
+
# @return [Durable::Llm::Client] A new client instance
|
|
33
|
+
# @see DurableLlm
|
|
34
|
+
def DLLM(provider, **options)
|
|
35
|
+
Durable::Llm.new(provider, options)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# Performs a quick text completion with minimal setup
|
|
39
|
+
#
|
|
40
|
+
# This global convenience function allows for one-line LLM completions without
|
|
41
|
+
# explicit client creation. Perfect for scripts and REPL usage.
|
|
42
|
+
#
|
|
43
|
+
# @param text [String] The input text to complete
|
|
44
|
+
# @param provider [Symbol] The provider to use (default: :openai)
|
|
45
|
+
# @param model [String] The model to use (required)
|
|
46
|
+
# @param options [Hash] Additional client options
|
|
47
|
+
# @return [String] The completion text
|
|
48
|
+
# @example Quick completion
|
|
49
|
+
# result = LlmComplete('What is Ruby?', model: 'gpt-4')
|
|
50
|
+
# puts result
|
|
51
|
+
# @example With specific provider
|
|
52
|
+
# result = LlmComplete('Explain AI', provider: :anthropic, model: 'claude-3-opus-20240229')
|
|
53
|
+
def LlmComplete(text, provider: :openai, model: nil, **options)
|
|
54
|
+
Durable::Llm.complete(text, provider: provider, model: model, **options)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Performs a chat completion with minimal setup
|
|
58
|
+
#
|
|
59
|
+
# This global convenience function allows for quick chat interactions without
|
|
60
|
+
# explicit client creation.
|
|
61
|
+
#
|
|
62
|
+
# @param messages [Array<Hash>] Array of message hashes with :role and :content
|
|
63
|
+
# @param provider [Symbol] The provider to use (default: :openai)
|
|
64
|
+
# @param model [String] The model to use (required)
|
|
65
|
+
# @param options [Hash] Additional options
|
|
66
|
+
# @return [Object] The chat response object
|
|
67
|
+
# @example Simple chat
|
|
68
|
+
# response = LlmChat([{ role: 'user', content: 'Hello!' }], model: 'gpt-4')
|
|
69
|
+
# puts response.choices.first.message.content
|
|
70
|
+
def LlmChat(messages, provider: :openai, model: nil, **options)
|
|
71
|
+
Durable::Llm.chat(messages, provider: provider, model: model, **options)
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Lists available models for a provider
|
|
75
|
+
#
|
|
76
|
+
# @param provider [Symbol] The provider name (default: :openai)
|
|
77
|
+
# @param options [Hash] Provider options
|
|
78
|
+
# @return [Array<String>] List of available model IDs
|
|
79
|
+
# @example List models
|
|
80
|
+
# models = LlmModels(:openai)
|
|
81
|
+
# puts models.inspect
|
|
82
|
+
def LlmModels(provider = :openai, **options)
|
|
83
|
+
Durable::Llm.models(provider, **options)
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Configures Durable LLM with a block
|
|
87
|
+
#
|
|
88
|
+
# This global convenience function provides easy access to configuration.
|
|
89
|
+
#
|
|
90
|
+
# @yield [configuration] The configuration instance to modify
|
|
91
|
+
# @yieldparam configuration [Durable::Llm::Configuration] The config object
|
|
92
|
+
# @return [void]
|
|
93
|
+
# @example Configure API keys
|
|
94
|
+
# LlmConfigure do |config|
|
|
95
|
+
# config.openai.api_key = 'sk-...'
|
|
96
|
+
# config.anthropic.api_key = 'sk-ant-...'
|
|
97
|
+
# end
|
|
98
|
+
def LlmConfigure(&block)
|
|
99
|
+
Durable::Llm.configure(&block)
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
|