durable-llm 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CONFIGURE.md +132 -0
  5. data/Gemfile +7 -9
  6. data/Gemfile.lock +3 -3
  7. data/README.md +1 -0
  8. data/Rakefile +6 -6
  9. data/devenv.lock +103 -0
  10. data/devenv.nix +9 -0
  11. data/devenv.yaml +15 -0
  12. data/durable-llm.gemspec +44 -0
  13. data/examples/openai_quick_complete.rb +3 -1
  14. data/lib/durable/llm/cli.rb +247 -60
  15. data/lib/durable/llm/client.rb +92 -11
  16. data/lib/durable/llm/configuration.rb +174 -23
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/providers/anthropic.rb +246 -36
  19. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  20. data/lib/durable/llm/providers/base.rb +106 -9
  21. data/lib/durable/llm/providers/cohere.rb +227 -0
  22. data/lib/durable/llm/providers/deepseek.rb +233 -0
  23. data/lib/durable/llm/providers/fireworks.rb +278 -0
  24. data/lib/durable/llm/providers/google.rb +301 -0
  25. data/lib/durable/llm/providers/groq.rb +108 -29
  26. data/lib/durable/llm/providers/huggingface.rb +122 -18
  27. data/lib/durable/llm/providers/mistral.rb +431 -0
  28. data/lib/durable/llm/providers/openai.rb +162 -25
  29. data/lib/durable/llm/providers/opencode.rb +253 -0
  30. data/lib/durable/llm/providers/openrouter.rb +256 -0
  31. data/lib/durable/llm/providers/perplexity.rb +273 -0
  32. data/lib/durable/llm/providers/together.rb +346 -0
  33. data/lib/durable/llm/providers/xai.rb +355 -0
  34. data/lib/durable/llm/providers.rb +103 -15
  35. data/lib/durable/llm/version.rb +5 -1
  36. data/lib/durable/llm.rb +143 -3
  37. data/lib/durable.rb +29 -4
  38. data/sig/durable/llm.rbs +302 -1
  39. metadata +50 -36
@@ -1,7 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file implements the command-line interface for the Durable LLM gem using Thor, providing commands for single prompts, interactive chat sessions, and listing available models. It handles provider resolution, streaming responses, model options, system prompts, and conversation management through a user-friendly CLI with support for both one-shot completions and multi-turn conversations.
4
+
1
5
  require 'thor'
2
- require 'durable/llm'
3
- require 'durable/llm/client'
4
6
  require 'highline'
7
+ require 'json'
8
+ require 'securerandom'
9
+ require 'fileutils'
10
+ require 'time'
11
+ require 'durable/llm/client'
12
+ require 'durable/llm/providers'
5
13
 
6
14
  module Durable
7
15
  module Llm
@@ -10,29 +18,72 @@ module Durable
10
18
  true
11
19
  end
12
20
 
13
- desc "prompt PROMPT", "Run a prompt"
14
- option :model, aliases: "-m", desc: "Specify the model to use"
15
- option :system, aliases: "-s", desc: "Set a system prompt"
16
- option :continue, aliases: "-c", type: :boolean, desc: "Continue the previous conversation"
17
- option :conversation, aliases: "--cid", desc: "Continue a specific conversation by ID"
18
- option :no_stream, type: :boolean, desc: "Disable streaming of tokens"
19
- option :option, aliases: "-o", type: :hash, desc: "Set model-specific options"
20
-
21
- def prompt(prompt)
22
- config = Durable::Llm.configuration
23
- model = options[:model] || "gpt-3.5-turbo"
21
+ CONVERSATIONS_DIR = File.expand_path('~/.durable_llm/conversations')
22
+ LAST_CONVERSATION_FILE = File.join(CONVERSATIONS_DIR, 'last_conversation.txt')
23
+
24
+ def conversation_file_path(id)
25
+ File.join(CONVERSATIONS_DIR, "#{id}.json")
26
+ end
27
+
28
+ def load_conversation(id)
29
+ path = conversation_file_path(id)
30
+ return nil unless File.exist?(path)
31
+
32
+ JSON.parse(File.read(path))
33
+ end
34
+
35
+ def save_conversation(conversation)
36
+ FileUtils.mkdir_p(CONVERSATIONS_DIR) unless Dir.exist?(CONVERSATIONS_DIR)
37
+ id = conversation['id'] || SecureRandom.uuid
38
+ conversation['id'] = id
39
+ conversation['updated_at'] = Time.now.iso8601
40
+ File.write(conversation_file_path(id), JSON.generate(conversation))
41
+ File.write(LAST_CONVERSATION_FILE, id)
42
+ id
43
+ end
44
+
45
+ def last_conversation_id
46
+ return nil unless File.exist?(LAST_CONVERSATION_FILE)
47
+
48
+ File.read(LAST_CONVERSATION_FILE).strip
49
+ end
50
+
51
+ private :load_conversation, :save_conversation
52
+
53
+ # Run a single prompt and get a response
54
+ #
55
+ # @param prompt [Array<String>] The prompt text to send to the model
56
+ # @option options :model [String] The model to use (default: gpt-3.5-turbo)
57
+ # @option options :system [String] System prompt to set context
58
+ # @option options :continue [Boolean] Continue the last conversation
59
+ # @option options :conversation [String] Continue a specific conversation by ID
60
+ # @option options :no_stream [Boolean] Disable streaming responses
61
+ # @option options :option [Hash] Additional model-specific options
62
+ # @return [void] Outputs the response to stdout
63
+ # @raise [RuntimeError] If no provider is found for the specified model
64
+ desc 'prompt PROMPT', 'Run a prompt'
65
+ option :model, aliases: '-m', desc: 'Specify the model to use'
66
+ option :system, aliases: '-s', desc: 'Set a system prompt'
67
+ option :continue, aliases: '-c', type: :boolean, desc: 'Continue the previous conversation'
68
+ option :conversation, aliases: '--cid', desc: 'Continue a specific conversation by ID'
69
+ option :no_stream, type: :boolean, desc: 'Disable streaming of tokens'
70
+ option :option, aliases: '-o', type: :hash, desc: 'Set model-specific options'
71
+
72
+ def prompt(*prompt)
73
+ model = options[:model] || 'gpt-3.5-turbo'
24
74
  provider_class = Durable::Llm::Providers.model_id_to_provider(model)
25
75
 
26
- if provider_class.nil?
27
- raise "no provider found for model '#{model}'"
28
- end
76
+ raise "no provider found for model '#{model}'" if provider_class.nil?
29
77
 
30
78
  provider_name = provider_class.name.split('::').last.downcase.to_sym
31
79
  client = Durable::Llm::Client.new(provider_name)
32
-
33
- messages = []
34
- messages << { role: "system", content: options[:system] } if options[:system]
35
- messages << { role: "user", content: prompt }
80
+
81
+ conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
82
+ conversation = conversation_id ? load_conversation(conversation_id) : nil
83
+
84
+ messages = conversation ? conversation['messages'].dup : []
85
+ messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
86
+ messages << { role: 'user', content: prompt.join(' ') }
36
87
 
37
88
  params = {
38
89
  model: model,
@@ -40,37 +91,70 @@ module Durable
40
91
  }
41
92
  params.merge!(options[:option]) if options[:option]
42
93
 
43
- if options[:no_stream]
44
- response = client.completion(params)
45
- puts response.choices.first.to_s
46
- else
47
- client.stream(params) do |chunk|
48
- print chunk.to_s
49
- $stdout.flush
94
+ begin
95
+ if options[:no_stream] || !client.stream?
96
+ response = client.completion(**params)
97
+ assistant_message = response.choices.first.to_s
98
+ puts assistant_message
99
+ messages << { role: 'assistant', content: assistant_message }
100
+ else
101
+ assistant_content = ''
102
+ client.stream(**params) do |chunk|
103
+ print chunk
104
+ assistant_content += chunk
105
+ $stdout.flush
106
+ end
107
+ messages << { role: 'assistant', content: assistant_content }
50
108
  end
109
+
110
+ # Save conversation
111
+ conversation_data = {
112
+ 'id' => conversation_id,
113
+ 'model' => model,
114
+ 'messages' => messages,
115
+ 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
116
+ }
117
+ save_conversation(conversation_data)
118
+ rescue Durable::Llm::Error => e
119
+ warn "API Error: #{e.message}"
120
+ exit 1
121
+ rescue StandardError => e
122
+ warn "Unexpected error: #{e.message}"
123
+ exit 1
51
124
  end
52
125
  end
53
126
 
54
- desc "chat", "Start an interactive chat"
55
- option :model, aliases: "-m", desc: "Specify the model to use"
56
- option :system, aliases: "-s", desc: "Set a system prompt"
57
- option :continue, aliases: "-c", type: :boolean, desc: "Continue the previous conversation"
58
- option :conversation, aliases: "--cid", desc: "Continue a specific conversation by ID"
59
- option :option, aliases: "-o", type: :hash, desc: "Set model-specific options"
127
+ # Start an interactive chat session with the model
128
+ #
129
+ # @option options :model [String] The model to use (default: gpt-3.5-turbo)
130
+ # @option options :system [String] System prompt to set context
131
+ # @option options :continue [Boolean] Continue the last conversation
132
+ # @option options :conversation [String] Continue a specific conversation by ID
133
+ # @option options :no_stream [Boolean] Disable streaming responses
134
+ # @option options :option [Hash] Additional model-specific options
135
+ # @return [void] Starts interactive chat session
136
+ # @raise [RuntimeError] If no provider is found for the specified model
137
+ desc 'chat', 'Start an interactive chat'
138
+ option :model, aliases: '-m', desc: 'Specify the model to use'
139
+ option :system, aliases: '-s', desc: 'Set a system prompt'
140
+ option :continue, aliases: '-c', type: :boolean, desc: 'Continue the previous conversation'
141
+ option :conversation, aliases: '--cid', desc: 'Continue a specific conversation by ID'
142
+ option :no_stream, type: :boolean, desc: 'Disable streaming of tokens'
143
+ option :option, aliases: '-o', type: :hash, desc: 'Set model-specific options'
60
144
  def chat
61
- config = Durable::Llm.configuration
62
- model = options[:model] || "gpt-3.5-turbo"
145
+ model = options[:model] || 'gpt-3.5-turbo'
63
146
  provider_class = Durable::Llm::Providers.model_id_to_provider(model)
64
147
 
65
- if provider_class.nil? || provider_class.name.nil?
66
- raise "no provider found for model '#{model}'"
67
- end
148
+ raise "no provider found for model '#{model}'" if provider_class.nil? || provider_class.name.nil?
68
149
 
69
150
  provider_name = provider_class.name.split('::').last.downcase.to_sym
70
151
  client = Durable::Llm::Client.new(provider_name)
71
-
72
- messages = []
73
- messages << { role: "system", content: options[:system] } if options[:system]
152
+
153
+ conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
154
+ conversation = conversation_id ? load_conversation(conversation_id) : nil
155
+
156
+ messages = conversation ? conversation['messages'].dup : []
157
+ messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
74
158
 
75
159
  cli = HighLine.new
76
160
 
@@ -79,44 +163,147 @@ module Durable
79
163
  cli.say("Type '!multi' to enter multiple lines, then '!end' to finish")
80
164
 
81
165
  loop do
82
- input = cli.ask("> ")
83
- break if ['exit', 'quit'].include?(input.downcase)
166
+ input = cli.ask('> ')
167
+ break if %w[exit quit].include?(input.downcase)
84
168
 
85
- if input == "!multi"
169
+ if input == '!multi'
86
170
  input = cli.ask("Enter multiple lines. Type '!end' to finish:") do |q|
87
- q.gather = "!end"
171
+ q.gather = '!end'
88
172
  end
89
173
  end
90
174
 
91
- messages << { role: "user", content: input }
175
+ messages << { role: 'user', content: input }
92
176
  params = {
93
177
  model: model,
94
178
  messages: messages
95
179
  }
96
180
  params.merge!(options[:option]) if options[:option]
97
181
 
98
- response = client.completion(params)
99
- cli.say(response.choices.first.to_s)
100
- messages << { role: "assistant", content: response.choices.first.to_s }
182
+ begin
183
+ if options[:no_stream] || !client.stream?
184
+ response = client.completion(**params)
185
+ assistant_message = response.choices.first.to_s
186
+ cli.say(assistant_message)
187
+ messages << { role: 'assistant', content: assistant_message }
188
+ else
189
+ assistant_content = ''
190
+ client.stream(**params) do |chunk|
191
+ print chunk
192
+ assistant_content += chunk
193
+ $stdout.flush
194
+ end
195
+ puts # Add newline after streaming
196
+ messages << { role: 'assistant', content: assistant_content }
197
+ end
198
+
199
+ # Save conversation after each exchange
200
+ conversation_data = {
201
+ 'id' => conversation_id,
202
+ 'model' => model,
203
+ 'messages' => messages,
204
+ 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
205
+ }
206
+ conversation_id = save_conversation(conversation_data)
207
+ rescue Durable::Llm::Error => e
208
+ cli.say("API Error: #{e.message}")
209
+ next
210
+ rescue StandardError => e
211
+ cli.say("Unexpected error: #{e.message}")
212
+ next
213
+ end
101
214
  end
102
215
  end
103
216
 
104
- desc "models", "List available models"
105
- option :options, type: :boolean, desc: "Show model options"
217
+ # List all available models from all providers
218
+ #
219
+ # @option options :options [Boolean] Show model-specific options for each model
220
+ # @return [void] Outputs available models to stdout
221
+ desc 'models', 'List available models'
222
+ option :options, type: :boolean, desc: 'Show model options'
106
223
  def models
107
224
  cli = HighLine.new
108
- cli.say("Available models:")
109
-
110
- Durable::Llm::Providers.providers.each do |provider_name|
111
- provider_class = Durable::Llm::Providers.const_get(provider_name.to_s.capitalize)
112
- provider_models = provider_class.new.models
113
-
114
- cli.say("#{provider_name.to_s.capitalize}:")
115
- provider_models.each do |model|
116
- cli.say(" #{model}")
225
+ cli.say('Available models:')
226
+
227
+ Durable::Llm::Providers.providers.each do |provider_sym|
228
+ provider_class = Durable::Llm::Providers.provider_class_for(provider_sym)
229
+ begin
230
+ provider_models = provider_class.models
231
+ cli.say("#{provider_sym.to_s.capitalize}:")
232
+ provider_models.each do |model|
233
+ cli.say(" #{model}")
234
+ if options[:options]
235
+ provider_options = provider_class.options
236
+ cli.say(" Options: #{provider_options.join(', ')}")
237
+ end
238
+ end
239
+ rescue StandardError => e
240
+ cli.say("#{provider_sym.to_s.capitalize}: Error loading models - #{e.message}")
117
241
  end
118
242
  end
119
243
  end
244
+
245
+ # List all saved conversations
246
+ #
247
+ # @return [void] Outputs list of saved conversations to stdout
248
+ desc 'conversations', 'List saved conversations'
249
+ def conversations
250
+ cli = HighLine.new
251
+
252
+ unless Dir.exist?(CONVERSATIONS_DIR)
253
+ cli.say('No conversations found.')
254
+ return
255
+ end
256
+
257
+ conversation_files = Dir.glob("#{CONVERSATIONS_DIR}/*.json").sort_by { |f| File.mtime(f) }.reverse
258
+
259
+ if conversation_files.empty?
260
+ cli.say('No conversations found.')
261
+ return
262
+ end
263
+
264
+ cli.say('Saved conversations:')
265
+ cli.say('')
266
+
267
+ conversation_files.each do |file|
268
+ id = File.basename(file, '.json')
269
+ begin
270
+ conversation = JSON.parse(File.read(file))
271
+ model = conversation['model'] || 'unknown'
272
+ message_count = conversation['messages']&.length || 0
273
+ updated_at = conversation['updated_at'] ? Time.parse(conversation['updated_at']).strftime('%Y-%m-%d %H:%M') : 'unknown'
274
+
275
+ marker = id == last_conversation_id ? ' *' : ''
276
+ cli.say("#{id}#{marker} - #{model} (#{message_count} messages, updated #{updated_at})")
277
+ rescue JSON::ParserError
278
+ cli.say("#{id} - [corrupted conversation file]")
279
+ end
280
+ end
281
+
282
+ cli.say('')
283
+ cli.say('* indicates the last active conversation')
284
+ end
285
+
286
+ # Delete a saved conversation by ID
287
+ #
288
+ # @param id [String] The conversation ID to delete
289
+ # @return [void] Outputs confirmation message to stdout
290
+ desc 'delete_conversation ID', 'Delete a saved conversation'
291
+ def delete_conversation(id)
292
+ cli = HighLine.new
293
+
294
+ path = conversation_file_path(id)
295
+ if File.exist?(path)
296
+ File.delete(path)
297
+ cli.say("Deleted conversation #{id}")
298
+
299
+ # Remove from last conversation if it was the last one
300
+ File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE)
301
+ else
302
+ cli.say("Conversation #{id} not found")
303
+ end
304
+ end
120
305
  end
121
306
  end
122
307
  end
308
+
309
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -1,46 +1,125 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file implements the main Client class that provides a unified interface for interacting
4
+ # with different LLM providers. It acts as a facade that delegates operations like completion,
5
+ # chat, embedding, and streaming to the appropriate provider instance while handling parameter
6
+ # processing, model configuration, and providing convenience methods for quick text completion.
7
+ # The client automatically resolves provider classes based on the provider name and manages
8
+ # default parameters including model selection.
9
+
1
10
  require 'zeitwerk'
2
11
  require 'durable/llm/providers'
3
12
 
4
13
  module Durable
5
14
  module Llm
15
+ # Unified interface for interacting with different LLM providers
16
+ #
17
+ # The Client class provides a facade that delegates operations like completion, chat,
18
+ # embedding, and streaming to the appropriate provider instance while handling parameter
19
+ # processing, model configuration, and providing convenience methods for quick text completion.
20
+ # The client automatically resolves provider classes based on the provider name and manages
21
+ # default parameters including model selection.
6
22
  class Client
23
+ # @return [Object] The underlying provider instance
7
24
  attr_reader :provider
25
+
26
+ # @return [String, nil] The default model to use for requests
8
27
  attr_accessor :model
9
28
 
29
+ # Initializes a new LLM client for the specified provider
30
+ #
31
+ # @param provider_name [Symbol, String] The name of the LLM provider (e.g., :openai, :anthropic)
32
+ # @param options [Hash] Configuration options for the provider and client
33
+ # @option options [String] :model The default model to use for requests
34
+ # @option options [String] 'model' Alternative string key for model
35
+ # @option options [String] :api_key API key for authentication (provider-specific)
36
+ # @raise [NameError] If the provider class cannot be found
10
37
  def initialize(provider_name, options = {})
38
+ @model = options.delete('model') || options.delete(:model) if options.key?('model') || options.key?(:model)
11
39
 
12
- if options['model'] || options[:model]
13
- @model = options.delete('model') || options.delete(:model)
14
- end
15
-
16
- provider_class = Durable::Llm::Providers.const_get(provider_name.to_s.capitalize)
40
+ provider_class = Durable::Llm::Providers.provider_class_for(provider_name)
17
41
 
18
42
  @provider = provider_class.new(**options)
19
43
  end
20
44
 
45
+ # Returns the default parameters to merge with request options
46
+ #
47
+ # @return [Hash] Default parameters including model if set
21
48
  def default_params
22
- { model: @model }
49
+ @model ? { model: @model } : {}
23
50
  end
24
- def quick_complete(text, opts = {})
25
51
 
26
- response = completion(process_params(messages:[{role: 'user', content: text}]))
52
+ # Performs a quick text completion with minimal configuration
53
+ #
54
+ # @param text [String] The input text to complete
55
+ # @param opts [Hash] Additional options (currently unused, reserved for future use)
56
+ # @return [String] The generated completion text
57
+ # @raise [Durable::Llm::APIError] If the API request fails
58
+ # @raise [IndexError] If the response contains no choices
59
+ # @raise [NoMethodError] If the response structure is unexpected
60
+ def quick_complete(text, _opts = {})
61
+ response = completion(process_params(messages: [{ role: 'user', content: text }]))
62
+
63
+ choice = response.choices.first
64
+ raise IndexError, 'No completion choices returned' unless choice
65
+
66
+ message = choice.message
67
+ raise NoMethodError, 'Response choice has no message' unless message
27
68
 
28
- response.choices.first.message.content
69
+ content = message.content
70
+ raise NoMethodError, 'Response message has no content' unless content
71
+
72
+ content
29
73
  end
74
+
75
+ # Performs a completion request
76
+ #
77
+ # @param params [Hash] The completion parameters
78
+ # @return [Object] The completion response object
79
+ # @raise [Durable::Llm::APIError] If the API request fails
30
80
  def completion(params = {})
31
81
  @provider.completion(process_params(params))
32
82
  end
33
83
 
84
+ # Performs a chat completion request (alias for completion)
85
+ #
86
+ # @param params [Hash] The chat parameters
87
+ # @return [Object] The chat response object
88
+ # @raise [Durable::Llm::APIError] If the API request fails
34
89
  def chat(params = {})
35
- @provider.chat(process_params(params))
90
+ @provider.completion(process_params(params))
36
91
  end
37
92
 
93
+ # Performs an embedding request
94
+ #
95
+ # @param params [Hash] The embedding parameters including model and input
96
+ # @return [Object] The embedding response object
97
+ # @raise [NotImplementedError] If the provider doesn't support embeddings
98
+ # @raise [Durable::Llm::APIError] If the API request fails
38
99
  def embed(params = {})
39
- @provider.embed(process_params(params))
100
+ @provider.embedding(**process_params(params))
101
+ rescue NotImplementedError
102
+ raise NotImplementedError, "#{@provider.class.name} does not support embeddings"
40
103
  end
41
104
 
105
+ # Performs a streaming completion request
106
+ #
107
+ # @param params [Hash] The streaming parameters
108
+ # @yield [Object] Yields stream response chunks as they arrive
109
+ # @return [Object] The final response object
110
+ # @raise [NotImplementedError] If the provider doesn't support streaming
111
+ # @raise [Durable::Llm::APIError] If the API request fails
42
112
  def stream(params = {}, &block)
43
113
  @provider.stream(process_params(params), &block)
114
+ rescue NotImplementedError
115
+ raise NotImplementedError, "#{@provider.class.name} does not support streaming"
116
+ end
117
+
118
+ # Checks if the provider supports streaming
119
+ #
120
+ # @return [Boolean] True if streaming is supported, false otherwise
121
+ def stream?
122
+ @provider.stream?
44
123
  end
45
124
 
46
125
  private
@@ -51,3 +130,5 @@ module Durable
51
130
  end
52
131
  end
53
132
  end
133
+
134
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.