durable-llm 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CONFIGURE.md +132 -0
  5. data/Gemfile +7 -9
  6. data/Gemfile.lock +3 -3
  7. data/README.md +1 -0
  8. data/Rakefile +6 -6
  9. data/devenv.lock +103 -0
  10. data/devenv.nix +9 -0
  11. data/devenv.yaml +15 -0
  12. data/durable-llm.gemspec +44 -0
  13. data/examples/openai_quick_complete.rb +3 -1
  14. data/lib/durable/llm/cli.rb +247 -60
  15. data/lib/durable/llm/client.rb +92 -11
  16. data/lib/durable/llm/configuration.rb +174 -23
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/providers/anthropic.rb +246 -36
  19. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  20. data/lib/durable/llm/providers/base.rb +106 -9
  21. data/lib/durable/llm/providers/cohere.rb +227 -0
  22. data/lib/durable/llm/providers/deepseek.rb +233 -0
  23. data/lib/durable/llm/providers/fireworks.rb +278 -0
  24. data/lib/durable/llm/providers/google.rb +301 -0
  25. data/lib/durable/llm/providers/groq.rb +108 -29
  26. data/lib/durable/llm/providers/huggingface.rb +122 -18
  27. data/lib/durable/llm/providers/mistral.rb +431 -0
  28. data/lib/durable/llm/providers/openai.rb +162 -25
  29. data/lib/durable/llm/providers/opencode.rb +253 -0
  30. data/lib/durable/llm/providers/openrouter.rb +256 -0
  31. data/lib/durable/llm/providers/perplexity.rb +273 -0
  32. data/lib/durable/llm/providers/together.rb +346 -0
  33. data/lib/durable/llm/providers/xai.rb +355 -0
  34. data/lib/durable/llm/providers.rb +103 -15
  35. data/lib/durable/llm/version.rb +5 -1
  36. data/lib/durable/llm.rb +143 -3
  37. data/lib/durable.rb +29 -4
  38. data/sig/durable/llm.rbs +302 -1
  39. metadata +50 -36
@@ -1,36 +1,121 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file serves as the main registry and loader for LLM providers in the Durable gem,
4
+ # providing a centralized interface to manage and discover available provider classes. It handles
5
+ # automatic loading of provider modules, maintains a dynamic list of registered providers, offers
6
+ # utility methods for model discovery and provider resolution based on model IDs, and includes
7
+ # provider aliases for backwards compatibility and convenience access.
8
+
1
9
  require 'durable/llm/providers/openai'
2
10
  require 'durable/llm/providers/anthropic'
11
+ require 'durable/llm/providers/cohere'
12
+ require 'durable/llm/providers/groq'
13
+ require 'durable/llm/providers/huggingface'
14
+ require 'durable/llm/providers/azure_openai'
15
+ require 'durable/llm/providers/deepseek'
16
+ require 'durable/llm/providers/fireworks'
17
+ require 'durable/llm/providers/google'
18
+ require 'durable/llm/providers/mistral'
19
+ require 'durable/llm/providers/opencode'
20
+ require 'durable/llm/providers/openrouter'
21
+ require 'durable/llm/providers/perplexity'
22
+ require 'durable/llm/providers/together'
23
+ require 'durable/llm/providers/xai'
3
24
 
4
25
  module Durable
5
26
  module Llm
27
+ # Main module for LLM providers, providing registry and utility methods
6
28
  module Providers
29
+ # Loads all provider files in the providers directory.
30
+ #
31
+ # This method dynamically requires all Ruby files in the providers subdirectory,
32
+ # ensuring that all provider classes are loaded and available for use.
33
+ #
34
+ # @return [void]
7
35
  def self.load_all
8
- Dir[File.join(__dir__, 'providers', '*.rb')].each { |file| require file }
36
+ Dir[File.join(__dir__, 'providers', '*.rb')].sort.each { |file| require file }
37
+ end
38
+
39
+ # Returns the provider class for a given provider symbol.
40
+ #
41
+ # This method handles the mapping from provider symbols to their corresponding
42
+ # class constants, including special cases where the symbol doesn't directly
43
+ # map to a capitalized class name.
44
+ #
45
+ # @param provider_sym [Symbol] The provider symbol (e.g., :openai, :anthropic)
46
+ # @return [Class] The provider class
47
+ # @raise [NameError] If the provider class cannot be found
48
+ def self.provider_class_for(provider_sym)
49
+ # Handle special cases where capitalize doesn't match
50
+ case provider_sym
51
+ when :deepseek
52
+ DeepSeek
53
+ when :openrouter
54
+ OpenRouter
55
+ when :azureopenai
56
+ AzureOpenai
57
+ when :opencode
58
+ Opencode
59
+ else
60
+ const_get(provider_sym.to_s.capitalize)
61
+ end
9
62
  end
10
63
 
64
+ # Returns a list of all available provider symbols.
65
+ #
66
+ # This method dynamically discovers all provider classes by inspecting the
67
+ # module's constants and filtering for classes that inherit from Base,
68
+ # excluding the Base class itself.
69
+ #
70
+ # @return [Array<Symbol>] Array of provider symbols
11
71
  def self.providers
72
+ @providers ||= begin
73
+ provider_classes = constants.select do |const_name|
74
+ const = const_get(const_name)
75
+ next if const.name.split('::').last == 'Base'
76
+
77
+ const.is_a?(Class) && const < Durable::Llm::Providers::Base
78
+ end
12
79
 
13
- @provider_list ||= begin
14
- constants.select do |const_name|
15
- const = const_get(const_name)
16
- last_component = const.name.split('::').last
17
- next if last_component == 'Base'
18
- const.is_a?(Class) && const.to_s.split('::').last.to_s == const_name.to_s
19
- end.map(&:to_s).map(&:downcase).map(&:to_sym)
20
- end
80
+ provider_classes.map do |const_name|
81
+ const_get(const_name).name.split('::').last.downcase.to_sym
82
+ end.uniq
83
+ end
21
84
  end
22
85
 
86
+ # Returns a flat list of all model IDs across all providers.
87
+ #
88
+ # This method aggregates model IDs from all available providers by calling
89
+ # their models method. If a provider fails to return models (e.g., due to
90
+ # missing API keys), it gracefully handles the error and continues.
91
+ #
92
+ # @return [Array<String>] Array of model IDs
23
93
  def self.model_ids
24
- providers.flat_map do |provider|
25
- provider_class = const_get(provider.to_s.capitalize)
26
- provider_class.models
94
+ providers.flat_map do |provider_sym|
95
+ provider_class = provider_class_for(provider_sym)
96
+ begin
97
+ provider_class.models
98
+ rescue StandardError
99
+ []
100
+ end
27
101
  end
28
102
  end
29
103
 
104
+ # Finds the provider class that supports a given model ID.
105
+ #
106
+ # This method searches through all providers to find which one supports
107
+ # the specified model ID. Returns nil if no provider supports the model.
108
+ #
109
+ # @param model_id [String] The model ID to search for
110
+ # @return [Class, nil] The provider class that supports the model, or nil
30
111
  def self.model_id_to_provider(model_id)
31
- providers.each do |provider|
32
- provider_class = const_get(provider.to_s.capitalize)
33
- return provider_class if provider_class.models.include?(model_id)
112
+ providers.each do |provider_sym|
113
+ provider_class = provider_class_for(provider_sym)
114
+ begin
115
+ return provider_class if provider_class.models.include?(model_id)
116
+ rescue StandardError
117
+ next
118
+ end
34
119
  end
35
120
  nil
36
121
  end
@@ -38,6 +123,9 @@ module Durable
38
123
  Openai = OpenAI
39
124
  Claude = Anthropic
40
125
  Claude3 = Anthropic
126
+ AzureOpenAI = AzureOpenai
41
127
  end
42
128
  end
43
129
  end
130
+
131
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
@@ -1,7 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Defines the version constant
4
+
3
5
  module Durable
4
6
  module Llm
5
- VERSION = "0.1.3"
7
+ VERSION = '0.1.5'
6
8
  end
7
9
  end
10
+
11
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
data/lib/durable/llm.rb CHANGED
@@ -1,20 +1,156 @@
1
- require "zeitwerk"
1
+ # frozen_string_literal: true
2
+
3
+ # Main entry point for the Durable::Llm module.
4
+ #
5
+ # This module provides a unified interface for interacting with multiple Large Language Model (LLM)
6
+ # providers through a consistent API. It handles configuration management, provider instantiation,
7
+ # and offers convenience methods for common LLM operations.
8
+ #
9
+ # The module uses Zeitwerk for efficient autoloading of its components and maintains a global
10
+ # configuration that can be customized through environment variables or programmatic setup.
11
+ #
12
+ # ## Basic Usage
13
+ #
14
+ # ```ruby
15
+ # require 'durable/llm'
16
+ #
17
+ # # Configure API keys
18
+ # Durable::Llm.configure do |config|
19
+ # config.openai.api_key = 'your-openai-key'
20
+ # end
21
+ #
22
+ # # Create a client and make a request
23
+ # client = Durable::Llm.new(:openai, model: 'gpt-4')
24
+ # response = client.quick_complete('Hello, world!')
25
+ # puts response # => "Hello! How can I help you today?"
26
+ # ```
27
+ #
28
+ # ## Configuration
29
+ #
30
+ # Configuration can be done via environment variables using the `DLLM__` prefix:
31
+ #
32
+ # ```bash
33
+ # export DLLM__OPENAI__API_KEY=your-key-here
34
+ # export DLLM__ANTHROPIC__API_KEY=your-anthropic-key
35
+ # ```
36
+ #
37
+ # Or programmatically:
38
+ #
39
+ # ```ruby
40
+ # Durable::Llm.configure do |config|
41
+ # config.openai.api_key = 'your-key'
42
+ # config.anthropic.api_key = 'your-anthropic-key'
43
+ # config.default_provider = 'anthropic'
44
+ # end
45
+ # ```
46
+ #
47
+ # ## Supported Providers
48
+ #
49
+ # - OpenAI (GPT models)
50
+ # - Anthropic (Claude models)
51
+ # - Google (Gemini models)
52
+ # - Cohere
53
+ # - Mistral AI
54
+ # - Groq
55
+ # - Fireworks AI
56
+ # - Together AI
57
+ # - DeepSeek
58
+ # - OpenRouter
59
+ # - Perplexity
60
+ # - xAI
61
+ # - Azure OpenAI
62
+ #
63
+ # @see Durable::Llm::Client For the main client interface
64
+ # @see Durable::Llm::Configuration For configuration options
65
+ # @see Durable::Llm::Providers For available providers
66
+
67
+ require 'zeitwerk'
2
68
  loader = Zeitwerk::Loader.new
3
- loader.tag = File.basename(__FILE__, ".rb")
69
+ loader.tag = File.basename(__FILE__, '.rb')
4
70
  loader.inflector = Zeitwerk::GemInflector.new(__FILE__)
5
- loader.push_dir(File.dirname(__FILE__) + '/..' )
71
+ loader.push_dir("#{File.dirname(__FILE__)}/..")
6
72
 
7
73
  require 'durable/llm/configuration'
74
+ require 'durable/llm/version'
8
75
 
9
76
  module Durable
77
+ # The Llm module provides a unified interface for Large Language Model operations.
78
+ #
79
+ # This module serves as the main entry point for the Durable LLM gem, offering:
80
+ # - Global configuration management
81
+ # - Provider-agnostic client creation
82
+ # - Convenience methods for common operations
83
+ # - Access to version information
84
+ #
85
+ # The module maintains a singleton configuration instance that can be customized
86
+ # to set API keys, default providers, and other global settings.
87
+ #
88
+ # @example Basic setup and usage
89
+ # Durable::Llm.configure do |config|
90
+ # config.openai.api_key = 'sk-...'
91
+ # end
92
+ #
93
+ # client = Durable::Llm.new(:openai)
94
+ # response = client.quick_complete('Hello!')
95
+ #
96
+ # @see Durable::Llm::Client
97
+ # @see Durable::Llm::Configuration
10
98
  module Llm
11
99
  class << self
100
+ # @return [Configuration] The global configuration instance
12
101
  attr_accessor :configuration
102
+
103
+ # Returns the current configuration instance.
104
+ #
105
+ # This is an alias for the configuration accessor, provided for convenience.
106
+ #
107
+ # @return [Configuration] The global configuration instance
108
+ # @see #configuration
13
109
  def config
14
110
  configuration
15
111
  end
112
+
113
+ # Creates a new LLM client for the specified provider.
114
+ #
115
+ # This is a convenience method that creates a new Client instance with the
116
+ # given provider and options. It's equivalent to calling
117
+ # `Durable::Llm::Client.new(provider, options)`.
118
+ #
119
+ # @param provider [Symbol, String] The provider name (e.g., :openai, :anthropic)
120
+ # @param options [Hash] Configuration options for the client
121
+ # @option options [String] :model The default model to use
122
+ # @option options [String] :api_key API key for authentication
123
+ # @return [Client] A new client instance
124
+ # @raise [NameError] If the provider is not found
125
+ # @example Create an OpenAI client
126
+ # client = Durable::Llm.new(:openai, api_key: 'sk-...', model: 'gpt-4')
127
+ # @example Create an Anthropic client
128
+ # client = Durable::Llm.new(:anthropic, api_key: 'sk-ant-...')
129
+ def new(provider, options = {})
130
+ Client.new(provider, options)
131
+ end
16
132
  end
17
133
 
134
+ # Configures the global LLM settings.
135
+ #
136
+ # This method initializes or yields the global configuration instance,
137
+ # allowing you to set API keys, default providers, and other global options.
138
+ #
139
+ # @yield [configuration] The configuration instance to modify
140
+ # @yieldparam configuration [Configuration] The global configuration object
141
+ # @return [void]
142
+ # @example Configure API keys
143
+ # Durable::Llm.configure do |config|
144
+ # config.openai.api_key = 'sk-...'
145
+ # config.anthropic.api_key = 'sk-ant-...'
146
+ # config.default_provider = 'openai'
147
+ # end
148
+ # @example Configure from environment
149
+ # # Environment variables are automatically loaded
150
+ # ENV['DLLM__OPENAI__API_KEY'] = 'sk-...'
151
+ # Durable::Llm.configure do |config|
152
+ # # Additional programmatic configuration
153
+ # end
18
154
  def self.configure
19
155
  self.configuration ||= Configuration.new
20
156
  yield(configuration)
@@ -25,3 +161,7 @@ end
25
161
  Durable::Llm.configure do
26
162
  end
27
163
 
164
+ require 'durable/llm/providers'
165
+ require 'durable/llm/client'
166
+
167
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
data/lib/durable.rb CHANGED
@@ -1,7 +1,32 @@
1
- require 'durable/llm'
1
+ # frozen_string_literal: true
2
2
 
3
+ # Main entry point for the Durable gem.
4
+ #
5
+ # This module provides a namespace for Durable Programming LLC's Ruby gems.
6
+ # It uses autoloading for efficient memory usage and lazy loading of components.
7
+ #
8
+ # Currently, it provides access to the LLM functionality through the Llm submodule.
9
+ #
10
+ # @example Basic usage
11
+ # require 'durable'
12
+ #
13
+ # # Access LLM functionality
14
+ # Durable::Llm.configure do |config|
15
+ # config.openai.api_key = 'your-key'
16
+ # end
17
+ #
18
+ # client = Durable::Llm.new(:openai)
19
+ # response = client.quick_complete('Hello!')
20
+ #
21
+ # @see Durable::Llm
22
+
23
+ # Namespace module for Durable Programming LLC's Ruby gems.
24
+ #
25
+ # This module serves as the root namespace for all Durable gems, providing
26
+ # autoloaded access to various components and functionality.
3
27
  module Durable
4
- # This module serves as a namespace for the Durable gem.
5
- # It currently only requires the Llm module, but can be expanded
6
- # in the future to include other Durable-related functionality.
28
+ # Autoload the Llm module for lazy loading
29
+ autoload :Llm, 'durable/llm'
7
30
  end
31
+
32
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
data/sig/durable/llm.rbs CHANGED
@@ -1,6 +1,307 @@
1
1
  module Durable
2
2
  module Llm
3
3
  VERSION: String
4
- # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
+
5
+ def self.configure: () { (Configuration) -> void } -> void
6
+ def self.configuration: () -> Configuration
7
+ def self.config: () -> Configuration
8
+
9
+ class Error < StandardError
10
+ end
11
+
12
+ class APIError < Error
13
+ end
14
+
15
+ class RateLimitError < Error
16
+ end
17
+
18
+ class AuthenticationError < Error
19
+ end
20
+
21
+ class InvalidRequestError < Error
22
+ end
23
+
24
+ class ResourceNotFoundError < Error
25
+ end
26
+
27
+ class TimeoutError < Error
28
+ end
29
+
30
+ class ServerError < Error
31
+ end
32
+
33
+ class UnsupportedProviderError < Error
34
+ end
35
+
36
+ class ConfigurationError < Error
37
+ end
38
+
39
+ class ModelNotFoundError < Error
40
+ end
41
+
42
+ class InsufficientQuotaError < Error
43
+ end
44
+
45
+ class InvalidResponseError < Error
46
+ end
47
+
48
+ class NetworkError < Error
49
+ end
50
+
51
+ class StreamingError < Error
52
+ end
53
+
54
+ class Configuration
55
+ attr_accessor default_provider: String
56
+ attr_reader providers: Hash[Symbol, untyped]
57
+
58
+ def initialize: () -> void
59
+ def clear: () -> void
60
+ def load_from_datasette: () -> void
61
+ def load_from_env: () -> void
62
+ def method_missing: (Symbol method_name, *untyped args) -> untyped
63
+ def respond_to_missing?: (Symbol method_name, ?bool include_private) -> bool
64
+ end
65
+
66
+ class Client
67
+ attr_reader provider: untyped
68
+ attr_accessor model: String?
69
+
70
+ def initialize: (Symbol | String provider_name, ?Hash[Symbol | String, untyped] options) -> void
71
+ def default_params: () -> Hash[Symbol, String?]
72
+ def quick_complete: (String text, ?Hash[Symbol, untyped] _opts) -> String
73
+ def completion: (?Hash[Symbol, untyped] params) -> untyped
74
+ def chat: (?Hash[Symbol, untyped] params) -> untyped
75
+ def embed: (?Hash[Symbol, untyped] params) -> untyped
76
+ def stream: (?Hash[Symbol, untyped] params) { (untyped) -> void } -> untyped
77
+ def stream?: () -> bool
78
+
79
+ private
80
+ def process_params: (?Hash[Symbol, untyped] opts) -> Hash[Symbol, untyped]
81
+ end
82
+
83
+ module Providers
84
+ def self.load_all: () -> void
85
+ def self.providers: () -> Array[Symbol]
86
+ def self.model_ids: () -> Array[String]
87
+ def self.model_id_to_provider: (String model_id) -> Class?
88
+
89
+ class Base
90
+ attr_accessor api_key: String?
91
+
92
+ def initialize: (?api_key: String?) -> void
93
+ def default_api_key: () -> String?
94
+ def completion: (Hash[Symbol | String, untyped] options) -> untyped
95
+ def self.models: () -> Array[String]
96
+ def models: () -> Array[String]
97
+ def self.stream?: () -> bool
98
+ def stream?: () -> bool
99
+ def stream: (Hash[Symbol | String, untyped] options) { (untyped) -> void } -> untyped
100
+ def embedding: (model: String, input: String | Array[String], **untyped options) -> untyped
101
+
102
+ private
103
+ def handle_response: (untyped response) -> untyped
104
+ end
105
+
106
+ class OpenAI < Base
107
+ BASE_URL: String
108
+ attr_accessor organization: String?
109
+
110
+ def initialize: (?api_key: String?, ?organization: String?) -> void
111
+ def completion: (Hash[Symbol | String, untyped] options) -> OpenAIResponse
112
+ def embedding: (model: String, input: String | Array[String], **untyped options) -> OpenAIEmbeddingResponse
113
+ def models: () -> Array[String]
114
+ def self.stream?: () -> true
115
+ def stream: (Hash[Symbol | String, untyped] options) { (OpenAIStreamResponse) -> void } -> void
116
+
117
+ class OpenAIResponse
118
+ attr_reader raw_response: Hash[String, untyped]
119
+ def initialize: (Hash[String, untyped] response) -> void
120
+ def choices: () -> Array[OpenAIChoice]
121
+ def data: () -> untyped
122
+ def embedding: () -> untyped
123
+ def to_s: () -> String
124
+ end
125
+
126
+ class OpenAIChoice
127
+ attr_reader message: OpenAIMessage
128
+ attr_reader finish_reason: String?
129
+ def initialize: (Hash[String, untyped] choice) -> void
130
+ def to_s: () -> String
131
+ end
132
+
133
+ class OpenAIMessage
134
+ attr_reader role: String
135
+ attr_reader content: String
136
+ def initialize: (Hash[String, untyped] message) -> void
137
+ def to_s: () -> String
138
+ end
139
+
140
+ class OpenAIStreamResponse
141
+ attr_reader choices: OpenAIStreamChoice
142
+ def initialize: (Hash[String, untyped] parsed) -> void
143
+ def to_s: () -> String
144
+ end
145
+
146
+ class OpenAIStreamChoice
147
+ attr_reader delta: OpenAIStreamDelta
148
+ attr_reader finish_reason: String?
149
+ def initialize: (Array[Hash[String, untyped]] | Hash[String, untyped] choice) -> void
150
+ def to_s: () -> String
151
+ end
152
+
153
+ class OpenAIStreamDelta
154
+ attr_reader role: String?
155
+ attr_reader content: String?
156
+ def initialize: (Hash[String, untyped] delta) -> void
157
+ def to_s: () -> String
158
+ end
159
+
160
+ class OpenAIEmbeddingResponse
161
+ attr_reader embedding: Array[Float]
162
+ def initialize: (Hash[String, untyped] data) -> void
163
+ def to_a: () -> Array[Float]
164
+ end
165
+ end
166
+
167
+ class Anthropic < Base
168
+ BASE_URL: String
169
+
170
+ def initialize: (?api_key: String?) -> void
171
+ def completion: (Hash[Symbol | String, untyped] options) -> AnthropicResponse
172
+ def models: () -> Array[String]
173
+ def self.models: () -> Array[String]
174
+ def self.stream?: () -> true
175
+ def stream: (Hash[Symbol | String, untyped] options) { (AnthropicStreamResponse) -> void } -> void
176
+
177
+ class AnthropicResponse
178
+ attr_reader raw_response: Hash[String, untyped]
179
+ def initialize: (Hash[String, untyped] response) -> void
180
+ def choices: () -> Array[AnthropicChoice]
181
+ def to_s: () -> String
182
+ end
183
+
184
+ class AnthropicChoice
185
+ attr_reader message: AnthropicMessage
186
+ def initialize: (untyped content) -> void
187
+ def to_s: () -> String
188
+ end
189
+
190
+ class AnthropicMessage
191
+ attr_reader role: String
192
+ attr_reader content: String
193
+ def initialize: (untyped content) -> void
194
+ def to_s: () -> String
195
+ end
196
+
197
+ class AnthropicStreamResponse
198
+ attr_reader choices: Array[AnthropicStreamChoice]
199
+ def initialize: (String fragment) -> void
200
+ def to_s: () -> String
201
+ end
202
+
203
+ class AnthropicStreamChoice
204
+ attr_reader delta: AnthropicStreamDelta
205
+ def initialize: (Hash[String, untyped] delta) -> void
206
+ def to_s: () -> String
207
+ end
208
+
209
+ class AnthropicStreamDelta
210
+ attr_reader type: String?
211
+ attr_reader text: String?
212
+ def initialize: (Hash[String, untyped] delta) -> void
213
+ def to_s: () -> String
214
+ end
215
+ end
216
+
217
+ class Groq < Base
218
+ BASE_URL: String
219
+
220
+ def self.conn: () -> untyped
221
+ def conn: () -> untyped
222
+ def initialize: (?api_key: String?) -> void
223
+ def completion: (Hash[Symbol | String, untyped] options) -> GroqResponse
224
+ def embedding: (model: String, input: String | Array[String], **untyped options) -> GroqResponse
225
+ def models: () -> Array[String]
226
+ def self.stream?: () -> false
227
+
228
+ class GroqResponse
229
+ attr_reader raw_response: Hash[String, untyped]
230
+ def initialize: (Hash[String, untyped] response) -> void
231
+ def choices: () -> Array[GroqChoice]
232
+ def to_s: () -> String
233
+ def to_h: () -> Hash[String, untyped]
234
+ end
235
+
236
+ class GroqChoice
237
+ attr_reader message: GroqMessage
238
+ attr_reader finish_reason: String?
239
+ def initialize: (Hash[String, untyped] choice) -> void
240
+ def to_s: () -> String
241
+ end
242
+
243
+ class GroqMessage
244
+ attr_reader role: String
245
+ attr_reader content: String
246
+ def initialize: (Hash[String, untyped] message) -> void
247
+ def to_s: () -> String
248
+ end
249
+ end
250
+
251
+ class Huggingface < Base
252
+ BASE_URL: String
253
+
254
+ def initialize: (?api_key: String?) -> void
255
+ def completion: (Hash[Symbol | String, untyped] options) -> HuggingfaceResponse
256
+ def models: () -> Array[String]
257
+ def self.models: () -> Array[String]
258
+
259
+ class HuggingfaceResponse
260
+ attr_reader raw_response: untyped
261
+ def initialize: (untyped response) -> void
262
+ def choices: () -> Array[HuggingfaceChoice]
263
+ def to_s: () -> String
264
+ end
265
+
266
+ class HuggingfaceChoice
267
+ attr_reader text: String
268
+ def initialize: (Hash[String, untyped] choice) -> void
269
+ def to_s: () -> String
270
+ end
271
+ end
272
+
273
+ class Cohere < Base
274
+ BASE_URL: String
275
+
276
+ def initialize: (?api_key: String?) -> void
277
+ def completion: (Hash[Symbol | String, untyped] options) -> CohereResponse
278
+ def models: () -> Array[String]
279
+ def self.stream?: () -> false
280
+
281
+ class CohereResponse
282
+ attr_reader raw_response: Hash[String, untyped]
283
+ def initialize: (Hash[String, untyped] response) -> void
284
+ def choices: () -> Array[CohereChoice]
285
+ def to_s: () -> String
286
+ end
287
+
288
+ class CohereChoice
289
+ attr_reader text: String
290
+ def initialize: (Hash[String, untyped] generation) -> void
291
+ def to_s: () -> String
292
+ end
293
+ end
294
+
295
+ Openai: singleton(OpenAI)
296
+ Claude: singleton(Anthropic)
297
+ Claude3: singleton(Anthropic)
298
+ end
299
+
300
+ class CLI < Thor
301
+ def self.exit_on_failure?: () -> true
302
+ def prompt: (*String prompt) -> void
303
+ def chat: () -> void
304
+ def models: () -> void
305
+ end
5
306
  end
6
307
  end