durable-llm 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/devenv.lock CHANGED
@@ -3,10 +3,10 @@
3
3
  "devenv": {
4
4
  "locked": {
5
5
  "dir": "src/modules",
6
- "lastModified": 1760162706,
6
+ "lastModified": 1761922975,
7
7
  "owner": "cachix",
8
8
  "repo": "devenv",
9
- "rev": "0d5ad578728fe4bce66eb4398b8b1e66deceb4e4",
9
+ "rev": "c9f0b47815a4895fadac87812de8a4de27e0ace1",
10
10
  "type": "github"
11
11
  },
12
12
  "original": {
@@ -19,10 +19,10 @@
19
19
  "flake-compat": {
20
20
  "flake": false,
21
21
  "locked": {
22
- "lastModified": 1747046372,
22
+ "lastModified": 1761588595,
23
23
  "owner": "edolstra",
24
24
  "repo": "flake-compat",
25
- "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
25
+ "rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
26
26
  "type": "github"
27
27
  },
28
28
  "original": {
@@ -31,6 +31,38 @@
31
31
  "type": "github"
32
32
  }
33
33
  },
34
+ "flake-compat_2": {
35
+ "flake": false,
36
+ "locked": {
37
+ "lastModified": 1761588595,
38
+ "owner": "edolstra",
39
+ "repo": "flake-compat",
40
+ "rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
41
+ "type": "github"
42
+ },
43
+ "original": {
44
+ "owner": "edolstra",
45
+ "repo": "flake-compat",
46
+ "type": "github"
47
+ }
48
+ },
49
+ "flake-utils": {
50
+ "inputs": {
51
+ "systems": "systems"
52
+ },
53
+ "locked": {
54
+ "lastModified": 1731533236,
55
+ "owner": "numtide",
56
+ "repo": "flake-utils",
57
+ "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
58
+ "type": "github"
59
+ },
60
+ "original": {
61
+ "owner": "numtide",
62
+ "repo": "flake-utils",
63
+ "type": "github"
64
+ }
65
+ },
34
66
  "git-hooks": {
35
67
  "inputs": {
36
68
  "flake-compat": "flake-compat",
@@ -40,10 +72,10 @@
40
72
  ]
41
73
  },
42
74
  "locked": {
43
- "lastModified": 1759523803,
75
+ "lastModified": 1760663237,
44
76
  "owner": "cachix",
45
77
  "repo": "git-hooks.nix",
46
- "rev": "cfc9f7bb163ad8542029d303e599c0f7eee09835",
78
+ "rev": "ca5b894d3e3e151ffc1db040b6ce4dcc75d31c37",
47
79
  "type": "github"
48
80
  },
49
81
  "original": {
@@ -74,10 +106,10 @@
74
106
  },
75
107
  "nixpkgs": {
76
108
  "locked": {
77
- "lastModified": 1758532697,
109
+ "lastModified": 1761313199,
78
110
  "owner": "cachix",
79
111
  "repo": "devenv-nixpkgs",
80
- "rev": "207a4cb0e1253c7658c6736becc6eb9cace1f25f",
112
+ "rev": "d1c30452ebecfc55185ae6d1c983c09da0c274ff",
81
113
  "type": "github"
82
114
  },
83
115
  "original": {
@@ -87,15 +119,51 @@
87
119
  "type": "github"
88
120
  }
89
121
  },
122
+ "nixpkgs-ruby": {
123
+ "inputs": {
124
+ "flake-compat": "flake-compat_2",
125
+ "flake-utils": "flake-utils",
126
+ "nixpkgs": [
127
+ "nixpkgs"
128
+ ]
129
+ },
130
+ "locked": {
131
+ "lastModified": 1759902829,
132
+ "owner": "bobvanderlinden",
133
+ "repo": "nixpkgs-ruby",
134
+ "rev": "5fba6c022a63f1e76dee4da71edddad8959f088a",
135
+ "type": "github"
136
+ },
137
+ "original": {
138
+ "owner": "bobvanderlinden",
139
+ "repo": "nixpkgs-ruby",
140
+ "type": "github"
141
+ }
142
+ },
90
143
  "root": {
91
144
  "inputs": {
92
145
  "devenv": "devenv",
93
146
  "git-hooks": "git-hooks",
94
147
  "nixpkgs": "nixpkgs",
148
+ "nixpkgs-ruby": "nixpkgs-ruby",
95
149
  "pre-commit-hooks": [
96
150
  "git-hooks"
97
151
  ]
98
152
  }
153
+ },
154
+ "systems": {
155
+ "locked": {
156
+ "lastModified": 1681028828,
157
+ "owner": "nix-systems",
158
+ "repo": "default",
159
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
160
+ "type": "github"
161
+ },
162
+ "original": {
163
+ "owner": "nix-systems",
164
+ "repo": "default",
165
+ "type": "github"
166
+ }
99
167
  }
100
168
  },
101
169
  "root": "root",
data/devenv.nix CHANGED
@@ -4,6 +4,9 @@
4
4
 
5
5
  packages = [ pkgs.git ];
6
6
 
7
- languages.ruby.enable = true;
7
+ languages.ruby = {
8
+ enable = true;
9
+ version = "3.4.7";
10
+ };
8
11
 
9
12
  }
data/devenv.yaml CHANGED
@@ -1,15 +1,8 @@
1
- # yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
2
1
  inputs:
3
2
  nixpkgs:
4
3
  url: github:cachix/devenv-nixpkgs/rolling
5
-
6
- # If you're using non-OSS software, you can set allowUnfree to true.
7
- # allowUnfree: true
8
-
9
- # If you're willing to use a package that's vulnerable
10
- # permittedInsecurePackages:
11
- # - "openssl-1.1.1w"
12
-
13
- # If you have more than one devenv you can merge them
14
- #imports:
15
- # - ./backend
4
+ nixpkgs-ruby:
5
+ url: github:bobvanderlinden/nixpkgs-ruby
6
+ inputs:
7
+ nixpkgs:
8
+ follows: nixpkgs
data/durable-llm.gemspec CHANGED
@@ -21,24 +21,32 @@ Gem::Specification.new do |spec|
21
21
  spec.metadata['changelog_uri'] = 'https://github.com/durableprogramming/durable-llm/blob/main/CHANGELOG.md'
22
22
 
23
23
  spec.files = Dir.chdir(__dir__) do
24
- `git ls-files -z`.split("\x0").reject do |f|
25
- (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
24
+ if system('git rev-parse --git-dir > /dev/null 2>&1')
25
+ `git ls-files -z`.split("\x0").reject do |f|
26
+ (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
27
+ end
28
+ else
29
+ Dir.glob('**/*', File::FNM_DOTMATCH).reject do |f|
30
+ File.directory?(f) || (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
31
+ end
26
32
  end
27
33
  end
28
34
  spec.bindir = 'exe'
29
35
  spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
30
36
  spec.require_paths = ['lib']
31
37
 
32
- spec.add_dependency 'event_stream_parser', '~> 1.0'
33
- spec.add_dependency 'faraday', '> 1.0'
34
- spec.add_dependency 'highline', '~> 3.1'
35
- spec.add_dependency 'json', '~> 2.6'
36
- spec.add_dependency 'thor', '~> 1.3'
37
- spec.add_dependency 'zeitwerk', '~> 2.6'
38
+ spec.add_dependency 'event_stream_parser', '~> 1.0', '>= 1.0.0'
39
+ spec.add_dependency 'faraday', '>= 1.0', '< 3.0'
40
+ spec.add_dependency 'highline', '~> 3.1', '>= 3.1.0'
41
+ spec.add_dependency 'json', '~> 2.6', '>= 2.6.0'
42
+ spec.add_dependency 'ostruct', '~> 0.6.0'
43
+ spec.add_dependency 'thor', '~> 1.3', '>= 1.3.0'
44
+ spec.add_dependency 'zeitwerk', '~> 2.6', '>= 2.6.0'
38
45
 
39
46
  spec.add_development_dependency 'dotenv', '~> 2.8'
40
47
  spec.add_development_dependency 'minitest', '~> 5.0'
41
48
  spec.add_development_dependency 'mocha', '~> 2.1'
42
49
  spec.add_development_dependency 'rubocop', '~> 1.0'
43
50
  spec.add_development_dependency 'vcr', '~> 6.0'
51
+ spec.add_development_dependency 'yard', '~> 0.9'
44
52
  end
@@ -5,6 +5,6 @@ require 'durable/llm/client'
5
5
 
6
6
  client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
7
7
 
8
- response = client.quick_complete("What's the capital of California?")
8
+ response = client.complete("What's the capital of California?")
9
9
 
10
10
  puts response
@@ -13,6 +13,9 @@ require 'durable/llm/providers'
13
13
 
14
14
  module Durable
15
15
  module Llm
16
+ # Command-line interface for Durable LLM gem.
17
+ #
18
+ # Provides Thor-based CLI commands for interacting with LLM providers.
16
19
  class CLI < Thor
17
20
  def self.exit_on_failure?
18
21
  true
@@ -21,35 +24,35 @@ module Durable
21
24
  CONVERSATIONS_DIR = File.expand_path('~/.durable_llm/conversations')
22
25
  LAST_CONVERSATION_FILE = File.join(CONVERSATIONS_DIR, 'last_conversation.txt')
23
26
 
24
- def conversation_file_path(id)
25
- File.join(CONVERSATIONS_DIR, "#{id}.json")
26
- end
27
+ no_commands do
28
+ def conversation_file_path(id)
29
+ File.join(CONVERSATIONS_DIR, "#{id}.json")
30
+ end
27
31
 
28
- def load_conversation(id)
29
- path = conversation_file_path(id)
30
- return nil unless File.exist?(path)
32
+ def load_conversation(id)
33
+ path = conversation_file_path(id)
34
+ return nil unless File.exist?(path)
31
35
 
32
- JSON.parse(File.read(path))
33
- end
36
+ JSON.parse(File.read(path))
37
+ end
34
38
 
35
- def save_conversation(conversation)
36
- FileUtils.mkdir_p(CONVERSATIONS_DIR) unless Dir.exist?(CONVERSATIONS_DIR)
37
- id = conversation['id'] || SecureRandom.uuid
38
- conversation['id'] = id
39
- conversation['updated_at'] = Time.now.iso8601
40
- File.write(conversation_file_path(id), JSON.generate(conversation))
41
- File.write(LAST_CONVERSATION_FILE, id)
42
- id
43
- end
39
+ def save_conversation(conversation)
40
+ FileUtils.mkdir_p(CONVERSATIONS_DIR) unless Dir.exist?(CONVERSATIONS_DIR)
41
+ id = conversation['id'] || SecureRandom.uuid
42
+ conversation['id'] = id
43
+ conversation['updated_at'] = Time.now.iso8601
44
+ File.write(conversation_file_path(id), JSON.generate(conversation))
45
+ File.write(LAST_CONVERSATION_FILE, id)
46
+ id
47
+ end
44
48
 
45
- def last_conversation_id
46
- return nil unless File.exist?(LAST_CONVERSATION_FILE)
49
+ def last_conversation_id
50
+ return nil unless File.exist?(LAST_CONVERSATION_FILE)
47
51
 
48
- File.read(LAST_CONVERSATION_FILE).strip
52
+ File.read(LAST_CONVERSATION_FILE).strip
53
+ end
49
54
  end
50
55
 
51
- private :load_conversation, :save_conversation
52
-
53
56
  # Run a single prompt and get a response
54
57
  #
55
58
  # @param prompt [Array<String>] The prompt text to send to the model
@@ -33,8 +33,19 @@ module Durable
33
33
  # @option options [String] :model The default model to use for requests
34
34
  # @option options [String] 'model' Alternative string key for model
35
35
  # @option options [String] :api_key API key for authentication (provider-specific)
36
+ # @raise [ArgumentError] If provider_name is nil or empty
36
37
  # @raise [NameError] If the provider class cannot be found
38
+ # @example Initialize with OpenAI provider
39
+ # client = Durable::Llm::Client.new(:openai, model: 'gpt-4', api_key: 'sk-...')
40
+ # @example Initialize with Anthropic provider
41
+ # client = Durable::Llm::Client.new(:anthropic, model: 'claude-3-opus-20240229')
37
42
  def initialize(provider_name, options = {})
43
+ if provider_name.nil? || provider_name.to_s.strip.empty?
44
+ raise ArgumentError, 'provider_name cannot be nil or empty. Supported providers: ' \
45
+ "#{Durable::Llm::Providers.available_providers.join(', ')}"
46
+ end
47
+ raise ArgumentError, 'options must be a Hash' unless options.is_a?(Hash)
48
+
38
49
  @model = options.delete('model') || options.delete(:model) if options.key?('model') || options.key?(:model)
39
50
 
40
51
  provider_class = Durable::Llm::Providers.provider_class_for(provider_name)
@@ -49,70 +60,143 @@ module Durable
49
60
  @model ? { model: @model } : {}
50
61
  end
51
62
 
52
- # Performs a quick text completion with minimal configuration
63
+ # Performs a text completion with minimal configuration
53
64
  #
54
65
  # @param text [String] The input text to complete
55
66
  # @param opts [Hash] Additional options (currently unused, reserved for future use)
56
67
  # @return [String] The generated completion text
68
+ # @raise [ArgumentError] If text is nil or empty
57
69
  # @raise [Durable::Llm::APIError] If the API request fails
58
70
  # @raise [IndexError] If the response contains no choices
59
71
  # @raise [NoMethodError] If the response structure is unexpected
60
- def quick_complete(text, _opts = {})
72
+ # @example Text completion with OpenAI
73
+ # client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
74
+ # response = client.complete('What is the capital of France?')
75
+ # puts response # => "The capital of France is Paris."
76
+ def complete(text, _opts = {})
77
+ if text.nil? || text.to_s.strip.empty?
78
+ raise ArgumentError, 'text cannot be nil or empty. Provide a non-empty string for completion.'
79
+ end
80
+
61
81
  response = completion(process_params(messages: [{ role: 'user', content: text }]))
62
82
 
63
83
  choice = response.choices.first
64
- raise IndexError, 'No completion choices returned' unless choice
84
+ unless choice
85
+ raise IndexError, 'No completion choices returned from the API. This may indicate an ' \
86
+ 'API error or invalid request parameters.'
87
+ end
65
88
 
66
89
  message = choice.message
67
- raise NoMethodError, 'Response choice has no message' unless message
90
+ unless message
91
+ raise NoMethodError, 'Response choice has no message. The API response format may be ' \
92
+ 'unexpected or the provider may have changed their response structure.'
93
+ end
68
94
 
69
95
  content = message.content
70
- raise NoMethodError, 'Response message has no content' unless content
96
+ unless content
97
+ raise NoMethodError, 'Response message has no content. This may occur if the model ' \
98
+ 'refused to respond or if content filtering was applied.'
99
+ end
71
100
 
72
101
  content
73
102
  end
103
+ alias quick_complete complete
74
104
 
75
105
  # Performs a completion request
76
106
  #
77
107
  # @param params [Hash] The completion parameters
108
+ # @option params [String] :model The model to use (overrides default)
109
+ # @option params [Array<Hash>] :messages The conversation messages
110
+ # @option params [Float] :temperature Sampling temperature (0.0-2.0)
111
+ # @option params [Integer] :max_tokens Maximum tokens to generate
78
112
  # @return [Object] The completion response object
113
+ # @raise [ArgumentError] If params is not a Hash
79
114
  # @raise [Durable::Llm::APIError] If the API request fails
115
+ # @example Perform a completion
116
+ # client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
117
+ # response = client.completion(
118
+ # messages: [
119
+ # { role: 'system', content: 'You are a helpful assistant.' },
120
+ # { role: 'user', content: 'Hello!' }
121
+ # ],
122
+ # temperature: 0.7
123
+ # )
80
124
  def completion(params = {})
125
+ raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
126
+
81
127
  @provider.completion(process_params(params))
82
128
  end
83
129
 
84
130
  # Performs a chat completion request (alias for completion)
85
131
  #
86
132
  # @param params [Hash] The chat parameters
133
+ # @option params [String] :model The model to use (overrides default)
134
+ # @option params [Array<Hash>] :messages The conversation messages
135
+ # @option params [Float] :temperature Sampling temperature (0.0-2.0)
136
+ # @option params [Integer] :max_tokens Maximum tokens to generate
87
137
  # @return [Object] The chat response object
138
+ # @raise [ArgumentError] If params is not a Hash
88
139
  # @raise [Durable::Llm::APIError] If the API request fails
140
+ # @see #completion
89
141
  def chat(params = {})
142
+ raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
143
+
90
144
  @provider.completion(process_params(params))
91
145
  end
92
146
 
93
147
  # Performs an embedding request
94
148
  #
95
149
  # @param params [Hash] The embedding parameters including model and input
150
+ # @option params [String] :model The embedding model to use
151
+ # @option params [String, Array<String>] :input The text(s) to embed
96
152
  # @return [Object] The embedding response object
153
+ # @raise [ArgumentError] If params is not a Hash or missing required fields
97
154
  # @raise [NotImplementedError] If the provider doesn't support embeddings
98
155
  # @raise [Durable::Llm::APIError] If the API request fails
156
+ # @example Generate embeddings
157
+ # client = Durable::Llm::Client.new(:openai)
158
+ # response = client.embed(
159
+ # model: 'text-embedding-ada-002',
160
+ # input: 'Hello, world!'
161
+ # )
99
162
  def embed(params = {})
163
+ raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
164
+
100
165
  @provider.embedding(**process_params(params))
101
166
  rescue NotImplementedError
102
- raise NotImplementedError, "#{@provider.class.name} does not support embeddings"
167
+ provider_name = @provider.class.name.split('::').last
168
+ raise NotImplementedError, "#{provider_name} does not support embeddings. " \
169
+ 'Try using a provider like OpenAI that offers embedding models.'
103
170
  end
104
171
 
105
172
  # Performs a streaming completion request
106
173
  #
107
174
  # @param params [Hash] The streaming parameters
175
+ # @option params [String] :model The model to use (overrides default)
176
+ # @option params [Array<Hash>] :messages The conversation messages
177
+ # @option params [Float] :temperature Sampling temperature (0.0-2.0)
178
+ # @option params [Integer] :max_tokens Maximum tokens to generate
108
179
  # @yield [Object] Yields stream response chunks as they arrive
109
180
  # @return [Object] The final response object
181
+ # @raise [ArgumentError] If params is not a Hash or no block is given
110
182
  # @raise [NotImplementedError] If the provider doesn't support streaming
111
183
  # @raise [Durable::Llm::APIError] If the API request fails
184
+ # @example Stream a completion
185
+ # client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
186
+ # client.stream(messages: [{ role: 'user', content: 'Count to 10' }]) do |chunk|
187
+ # print chunk.choices.first.delta.content
188
+ # end
112
189
  def stream(params = {}, &block)
190
+ raise ArgumentError, 'params must be a Hash' unless params.is_a?(Hash)
191
+ unless block_given?
192
+ raise ArgumentError, 'block required for streaming. Use: client.stream(params) { |chunk| ... }'
193
+ end
194
+
113
195
  @provider.stream(process_params(params), &block)
114
196
  rescue NotImplementedError
115
- raise NotImplementedError, "#{@provider.class.name} does not support streaming"
197
+ provider_name = @provider.class.name.split('::').last
198
+ raise NotImplementedError, "#{provider_name} does not support streaming. " \
199
+ 'Try using completion() or chat() instead.'
116
200
  end
117
201
 
118
202
  # Checks if the provider supports streaming
@@ -122,10 +206,67 @@ module Durable
122
206
  @provider.stream?
123
207
  end
124
208
 
209
+ # Sets the model for subsequent requests (fluent interface)
210
+ #
211
+ # @param model_name [String] The model to use
212
+ # @return [Client] Returns self for method chaining
213
+ # @example Fluent API usage
214
+ # client = Durable::Llm::Client.new(:openai)
215
+ # client.with_model('gpt-4').complete('Hello!')
216
+ def with_model(model_name)
217
+ @model = model_name
218
+ self
219
+ end
220
+
221
+ # Sets temperature for the next request (fluent interface)
222
+ #
223
+ # @param temp [Float] The temperature value (0.0-2.0)
224
+ # @return [Client] Returns self for method chaining
225
+ # @example Fluent temperature setting
226
+ # client.with_temperature(0.7).complete('Be creative!')
227
+ def with_temperature(temp)
228
+ @next_temperature = temp
229
+ self
230
+ end
231
+
232
+ # Sets max tokens for the next request (fluent interface)
233
+ #
234
+ # @param tokens [Integer] Maximum tokens to generate
235
+ # @return [Client] Returns self for method chaining
236
+ # @example Fluent max tokens setting
237
+ # client.with_max_tokens(500).complete('Write a story')
238
+ def with_max_tokens(tokens)
239
+ @next_max_tokens = tokens
240
+ self
241
+ end
242
+
243
+ # Creates a copy of the client with different configuration
244
+ #
245
+ # @param options [Hash] New configuration options
246
+ # @option options [String] :model Override the model
247
+ # @return [Client] A new client instance with merged configuration
248
+ # @example Clone with different model
249
+ # gpt4_client = client.clone_with(model: 'gpt-4')
250
+ # gpt35_client = client.clone_with(model: 'gpt-3.5-turbo')
251
+ def clone_with(**options)
252
+ provider_name = @provider.class.name.split('::').last.downcase.to_sym
253
+ self.class.new(provider_name, options.merge(model: @model))
254
+ end
255
+
125
256
  private
126
257
 
127
258
  def process_params(opts = {})
128
- default_params.dup.merge(opts)
259
+ params = default_params.dup.merge(opts)
260
+
261
+ # Apply fluent interface settings if present
262
+ params[:temperature] = @next_temperature if @next_temperature
263
+ params[:max_tokens] = @next_max_tokens if @next_max_tokens
264
+
265
+ # Clear one-time settings after use
266
+ @next_temperature = nil
267
+ @next_max_tokens = nil
268
+
269
+ params
129
270
  end
130
271
  end
131
272
  end
@@ -0,0 +1,102 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file provides global convenience functions for quick access to Durable LLM functionality
4
+ # without requiring explicit module qualification. These functions follow Ruby conventions for
5
+ # global helper methods and make the library more approachable for quick usage and scripting.
6
+ # The functions delegate to the main Durable::Llm module methods while providing shorter names.
7
+
8
+ # Creates a new Durable LLM client with the specified provider and options.
9
+ #
10
+ # This is a global convenience function that provides quick access to client creation
11
+ # without requiring the full Durable::Llm module path. It's equivalent to calling
12
+ # Durable::Llm.new(provider, options).
13
+ #
14
+ # @param provider [Symbol, String] The provider name (e.g., :openai, :anthropic)
15
+ # @param options [Hash] Configuration options for the client
16
+ # @option options [String] :model The default model to use
17
+ # @option options [String] :api_key API key for authentication
18
+ # @return [Durable::Llm::Client] A new client instance
19
+ # @example Create an OpenAI client
20
+ # client = DurableLlm(:openai, model: 'gpt-4', api_key: 'sk-...')
21
+ # response = client.complete('Hello!')
22
+ # @example Create an Anthropic client
23
+ # client = DurableLlm(:anthropic, model: 'claude-3-opus-20240229')
24
+ def DurableLlm(provider, **options)
25
+ Durable::Llm.new(provider, options)
26
+ end
27
+
28
+ # Shorter alias for DurableLlm
29
+ #
30
+ # @param provider [Symbol, String] The provider name
31
+ # @param options [Hash] Configuration options
32
+ # @return [Durable::Llm::Client] A new client instance
33
+ # @see DurableLlm
34
+ def DLLM(provider, **options)
35
+ Durable::Llm.new(provider, options)
36
+ end
37
+
38
+ # Performs a quick text completion with minimal setup
39
+ #
40
+ # This global convenience function allows for one-line LLM completions without
41
+ # explicit client creation. Perfect for scripts and REPL usage.
42
+ #
43
+ # @param text [String] The input text to complete
44
+ # @param provider [Symbol] The provider to use (default: :openai)
45
+ # @param model [String] The model to use (required)
46
+ # @param options [Hash] Additional client options
47
+ # @return [String] The completion text
48
+ # @example Quick completion
49
+ # result = LlmComplete('What is Ruby?', model: 'gpt-4')
50
+ # puts result
51
+ # @example With specific provider
52
+ # result = LlmComplete('Explain AI', provider: :anthropic, model: 'claude-3-opus-20240229')
53
+ def LlmComplete(text, provider: :openai, model: nil, **options)
54
+ Durable::Llm.complete(text, provider: provider, model: model, **options)
55
+ end
56
+
57
+ # Performs a chat completion with minimal setup
58
+ #
59
+ # This global convenience function allows for quick chat interactions without
60
+ # explicit client creation.
61
+ #
62
+ # @param messages [Array<Hash>] Array of message hashes with :role and :content
63
+ # @param provider [Symbol] The provider to use (default: :openai)
64
+ # @param model [String] The model to use (required)
65
+ # @param options [Hash] Additional options
66
+ # @return [Object] The chat response object
67
+ # @example Simple chat
68
+ # response = LlmChat([{ role: 'user', content: 'Hello!' }], model: 'gpt-4')
69
+ # puts response.choices.first.message.content
70
+ def LlmChat(messages, provider: :openai, model: nil, **options)
71
+ Durable::Llm.chat(messages, provider: provider, model: model, **options)
72
+ end
73
+
74
+ # Lists available models for a provider
75
+ #
76
+ # @param provider [Symbol] The provider name (default: :openai)
77
+ # @param options [Hash] Provider options
78
+ # @return [Array<String>] List of available model IDs
79
+ # @example List models
80
+ # models = LlmModels(:openai)
81
+ # puts models.inspect
82
+ def LlmModels(provider = :openai, **options)
83
+ Durable::Llm.models(provider, **options)
84
+ end
85
+
86
+ # Configures Durable LLM with a block
87
+ #
88
+ # This global convenience function provides easy access to configuration.
89
+ #
90
+ # @yield [configuration] The configuration instance to modify
91
+ # @yieldparam configuration [Durable::Llm::Configuration] The config object
92
+ # @return [void]
93
+ # @example Configure API keys
94
+ # LlmConfigure do |config|
95
+ # config.openai.api_key = 'sk-...'
96
+ # config.anthropic.api_key = 'sk-ant-...'
97
+ # end
98
+ def LlmConfigure(&block)
99
+ Durable::Llm.configure(&block)
100
+ end
101
+
102
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.