ai_client 0.2.4 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7b12150b0f3f29804d520aa319a99cc71adc96f023ec99bccc0a9a6682bdba6e
4
- data.tar.gz: 59d811a6b9de974f76672a9a1386d6efd3e4547d53ae7b4d24c532a250c00c86
3
+ metadata.gz: f2b330f59ff7f380306ec354ce5f7197adce8dd101a4b506ab137568f098242a
4
+ data.tar.gz: f169296b9f20479b1c608f2202d9ab8a8eb416aa80b07f4308ec4648b9c86fcc
5
5
  SHA512:
6
- metadata.gz: 0beacebae38d5c2101498a59245c5112469ff8fddda2723b5d916ae6d5f731fa6e9e55de46422a6daf463242356e1575c0bbdf77612f9edf9fd05d6b4f885407
7
- data.tar.gz: 36b2b5f5fd3d51577b31ac8cbcd57980cb7d0a912428f37e8f345764620aa02198a65b905505136ea40ea2866e274b1f8d6b9b1c9390ccd3608eaad138c37ebb
6
+ metadata.gz: ce0e2cddd8ec6935a5bea24125358fbcc8fda902340d7767972142505af41eaeef7d7f6f5791bd7d26469fd45e59020237fd0061e75d6a987d8a4181597cdbe9
7
+ data.tar.gz: 4b2513fba82802eda13cf26c1af0e73f0903a2140765401ed686540d80480d887f2152e342a5fe3546c89482320f61cc6bd04b4e41b8851668bbda31b2496fc9
data/CHANGELOG.md CHANGED
@@ -1,6 +1,10 @@
1
1
  ## [Unreleased]
2
2
 
3
3
  ## Released
4
+
5
+ ### [0.2.5] - 2024-10-11
6
+ - Added examples/tool.rb to demonstrate use of function callbacks to provide information to the LLM when it needs it.
7
+
4
8
  ### [0.2.4] - 2024-10-10
5
9
  - constrained gem omniai-openai to version 1.8.3+ for access to open_router.ai
6
10
  - caching models database from open_router.ai
data/README.md CHANGED
@@ -284,8 +284,7 @@ One of the latest innovations in LLMs is the ability to use functions (aka tools
284
284
 
285
285
  See [blog post](https://ksylvest.com/posts/2024-08-16/using-omniai-to-leverage-tools-with-llms) by Kevin Sylvestre, author of the OmniAI gem.
286
286
 
287
-
288
- TODO: Need to create an example RAG that does not need another access token to a service
287
+ Take a look at the [examples/tools.rb](examples/tools.rb) file to see different ways in which these callable processes can be defined.
289
288
 
290
289
 
291
290
  ## Best ?? Practices
data/examples/README.md CHANGED
@@ -7,6 +7,7 @@
7
7
  | embed.rb | Demonstrates using Ollama locally to vectorize text for embeddings|
8
8
  | speak.rb | Demonstrates using OpenAI's text to speech models |
9
9
  | text.rb | Demonstrates text-to-text transformers "chat" |
10
+ | tools.rb | Demonstrates usage of functional callbacks (i.e. tools) |
10
11
  | transcribe.rb | Uses OpenAI's audio-to-text model |
11
12
 
12
13
  Many of these example programs show both the raw response object as well as just the
data/examples/tools.rb ADDED
@@ -0,0 +1,90 @@
1
+ #!/usr/bin/env ruby
2
+ # examples/tools.rb
3
+ # See: https://ksylvest.com/posts/2024-08-16/using-omniai-to-leverage-tools-with-llms
4
+
5
+ require_relative 'common'
6
+
7
+ AI = AiClient.new('gpt-4o')
8
+
9
+ box "omniai-openai's random temp example"
10
+
11
+ my_weather_function = Proc.new do |location:, unit: 'Celsius'|
12
+ "#{rand(20..50)}° #{unit} in #{location}"
13
+ end
14
+
15
+ weather = AiClient::Tool.new(
16
+ my_weather_function,
17
+ name: 'weather',
18
+ description: 'Lookup the weather in a location',
19
+ parameters: AiClient::Tool::Parameters.new(
20
+ properties: {
21
+ location: AiClient::Tool::Property.string(description: 'e.g. Toronto'),
22
+ unit: AiClient::Tool::Property.string(enum: %w[Celsius Fahrenheit]),
23
+ },
24
+ required: %i[location]
25
+ )
26
+ )
27
+
28
+ simple_prompt = <<~TEXT
29
+ What is the weather in "London" in Celsius and "Paris" in Fahrenheit?
30
+ Also what are some ideas for activities in both cities given the weather?
31
+ TEXT
32
+
33
+ response = AI.chat(simple_prompt, tools: [weather])
34
+ puts response
35
+
36
+ ##########################################
37
+ box "Accessing a database to get information"
38
+
39
+ llm_db_function = Proc.new do |params|
40
+ records = AiClient::LLM.where(id: /#{params[:model_name]}/i)
41
+ records.inspect
42
+ end
43
+
44
+
45
+ llm_db = AiClient::Tool.new(
46
+ llm_db_function,
47
+ name: 'llm_db',
48
+ description: 'lookup details about an LLM model name',
49
+ parameters: AiClient::Tool::Parameters.new(
50
+ properties: {
51
+ model_name: AiClient::Tool::Property.string
52
+ },
53
+ required: %i[model_name]
54
+ )
55
+ )
56
+
57
+ response = AI.chat("Get details on an LLM model named bison. Which one is the cheapest per prompt token.", tools: [llm_db])
58
+ puts response
59
+
60
+ ##########################################
61
+
62
+ # TODO: Look at creating a better function
63
+ # process such that the tools parameter
64
+ # is an Array of Symbols which is
65
+ # maintained as a class variable.
66
+ # The symboles are looked up and the
67
+ # proper instance is inserted in its
68
+ # place.
69
+
70
+ box "Using a function class and multiple tools"
71
+
72
+ class FunctionClass
73
+ def self.call
74
+ "April 25th its not to hot nor too cold."
75
+ end
76
+
77
+ def function(my_name)
78
+ AiClient::Tool.new(
79
+ self.class, # with a self.call method
80
+ name: my_name,
81
+ description: 'what is the perfect date'
82
+ )
83
+ end
84
+ end
85
+
86
+ perfect_date = FunctionClass.new.function('perfect_date')
87
+
88
+ response = AI.chat("what is the perfect date for paris weather?", tools: [weather, perfect_date])
89
+ puts response
90
+ puts
@@ -92,7 +92,11 @@ class AiClient
92
92
  include Hashie::Extensions::Mash::SymbolizeKeys
93
93
  include Hashie::Extensions::Mash::DefineAccessors
94
94
 
95
-
95
+ # Saves the current configuration to the specified file.
96
+ #
97
+ # @param filepath [String] The path to the file where the configuration will be saved.
98
+ # Defaults to '~/aiclient_config.yml' if not provided.
99
+ #
96
100
  def save(filepath=ENV['HOME']+'/aiclient_config.yml')
97
101
  filepath = Pathname.new(filepath) unless filepath.is_a? Pathname
98
102
 
@@ -101,6 +105,13 @@ class AiClient
101
105
 
102
106
 
103
107
  class << self
108
+ # Loads configuration from the specified YAML file.
109
+ #
110
+ # @param filepath [String] The path to the configuration file.
111
+ # Defaults to 'config.yml' if not provided.
112
+ # @return [AiClient::Config] The loaded configuration.
113
+ # @raise [ArgumentError] If the specified file does not exist.
114
+ #
104
115
  def load(filepath=DEFAULT_CONFIG_FILEPATH)
105
116
  filepath = Pathname.new(filepath) unless Pathname == filepath.class
106
117
  if filepath.exist?
@@ -115,17 +126,30 @@ class AiClient
115
126
  class << self
116
127
  attr_accessor :class_config, :default_config
117
128
 
129
+ # Configures the AiClient with a given block.
130
+ #
131
+ # @yieldparam config [AiClient::Config] The configuration instance.
132
+ # @return [void]
133
+ #
118
134
  def configure(&block)
119
135
  yield(class_config)
120
136
  end
121
137
 
138
+ # Resets the default configuration to the value defined in the class.
139
+ #
140
+ # @return [void]
141
+ #
122
142
  def reset_default_config
123
143
  initialize_defaults
124
144
  .save(Config::DEFAULT_CONFIG_FILEPATH)
125
145
  end
126
146
 
127
147
  private
128
-
148
+
149
+ # Initializes the default configuration.
150
+ #
151
+ # @return [void]
152
+ #
129
153
  def initialize_defaults
130
154
  @default_config = Config.new(
131
155
  logger: Logger.new(STDOUT),
data/lib/ai_client/llm.rb CHANGED
@@ -9,11 +9,26 @@ class AiClient
9
9
  DATA_PATH = Pathname.new( __dir__ + '/models.yml')
10
10
  self.data = YAML.parse(DATA_PATH.read).to_ruby
11
11
 
12
+ # Extracts the model name from the LLM ID.
13
+ #
14
+ # @return [String] the model name.
15
+ #
12
16
  def model = id.split('/')[1]
17
+
18
+ # Extracts the provider name from the LLM ID.
19
+ #
20
+ # @return [String] the provider name.
21
+ #
13
22
  def provider = id.split('/')[0]
14
23
  end
15
24
 
16
25
  class << self
26
+
27
+ # Resets the LLM data by fetching models from the Orc client
28
+ # and writing it to the models.yml file.
29
+ #
30
+ # @return [void]
31
+ #
17
32
  def reset_llm_data
18
33
  orc_models = AiClient.orc_client.models
19
34
  AiClient::LLM.data = orc_models
@@ -16,10 +16,23 @@ class AiClient
16
16
  # )
17
17
 
18
18
  class LoggingMiddleware
19
+
20
+ # Initializes the LoggingMiddleware with a logger.
21
+ #
22
+ # @param logger [Logger] The logger used for logging middleware actions.
23
+ #
19
24
  def initialize(logger)
20
25
  @logger = logger
21
26
  end
22
27
 
28
+ # Calls the next middleware in the stack while logging the start and finish times.
29
+ #
30
+ # @param client [Object] The client instance.
31
+ # @param next_middleware [Proc] The next middleware to call.
32
+ # @param args [Array] The arguments passed to the middleware call, with the first being the method name.
33
+ #
34
+ # @return [Object] The result of the next middleware call.
35
+ #
23
36
  def call(client, next_middleware, *args)
24
37
  method_name = args.first.is_a?(Symbol) ? args.first : 'unknown method'
25
38
  @logger.info("Starting #{method_name} call")
@@ -8,9 +8,19 @@
8
8
  # Change this so that middleware can be added
9
9
  # and removed from an existing client.
10
10
 
11
-
11
+ # AiClient class that handles middleware functionality
12
+ # for API calls.
12
13
  class AiClient
13
14
 
15
+ # Calls the specified method with middlewares applied.
16
+ #
17
+ # @param method [Symbol] the name of the method to be called
18
+ # @param args [Array] additional arguments for the method
19
+ # @param kwargs [Hash] named parameters for the method
20
+ # @param block [Proc] optional block to be passed to the method
21
+ #
22
+ # @return [Object] result of the method call after applying middlewares
23
+ #
14
24
  def call_with_middlewares(method, *args, **kwargs, &block)
15
25
  stack = self.class.middlewares.reverse.reduce(-> { send(method, *args, **kwargs, &block) }) do |next_middleware, middleware|
16
26
  -> { middleware.call(self, next_middleware, *args, **kwargs) }
@@ -21,17 +31,30 @@ class AiClient
21
31
 
22
32
  class << self
23
33
 
34
+ # Returns the list of middlewares applied to the client.
35
+ #
36
+ # @return [Array] list of middlewares
37
+ #
24
38
  def middlewares
25
39
  @middlewares ||= []
26
40
  end
27
41
 
42
+ # Adds a middleware to the stack.
43
+ #
44
+ # @param middleware [Proc] the middleware to be added
45
+ #
46
+ # @return [void]
47
+ #
28
48
  def use(middleware)
29
49
  middlewares << middleware
30
50
  end
31
51
 
52
+ # Clears all middlewares from the client.
53
+ #
54
+ # @return [void]
55
+ #
32
56
  def clear_middlewares
33
57
  @middlewares = []
34
58
  end
35
59
  end
36
-
37
60
  end
@@ -8,14 +8,53 @@ require 'open_router'
8
8
  require 'yaml'
9
9
 
10
10
  class AiClient
11
+
12
+ # Retrieves the available models.
13
+ #
14
+ # @return [Array<String>] List of model IDs.
15
+ #
16
+ def models
17
+ self.class.models
18
+ end
19
+
20
+ # Retrieves the available providers.
21
+ #
22
+ # @return [Array<String>] List of provider names.
23
+ def providers
24
+ self.class.providers
25
+ end
26
+
27
+ # Retrieves model names, optionally filtered by provider.
28
+ #
29
+ # @param provider [String, nil] The provider to filter models by.
30
+ # @return [Array<String>] List of model names.
31
+ def model_names(provider = nil)
32
+ self.class.model_names(provider)
33
+ end
34
+
35
+ # Retrieves details for a specific model.
36
+ #
37
+ # @param a_model [String] The model ID to retrieve details for.
38
+ # @return [Hash, nil] Details of the model or nil if not found.
39
+ def model_details(a_model)
40
+ self.class.model_details(a_model)
41
+ end
42
+
43
+ # Finds models matching a given substring.
44
+ #
45
+ # @param a_model_substring [String] The substring to search for.
46
+ # @return [Array<String>] List of matching model names.
47
+ def find_model(a_model_substring)
48
+ self.class.find_model(a_model_substring)
49
+ end
11
50
 
12
- def models = self.class.models
13
- def providers = self.class.providers
14
- def model_names(a_provider=nil) = self.class.model_names(a_provider)
15
- def model_details(a_model) = self.class.model_details(a_model)
16
- def find_model(a_model_substring) = self.class.find_model(a_model_substring)
17
51
 
18
52
  class << self
53
+
54
+ # Adds OpenRouter extensions to AiClient.
55
+ #
56
+ # @return [void]
57
+ #
19
58
  def add_open_router_extensions
20
59
  access_token = fetch_access_token
21
60
 
@@ -25,10 +64,18 @@ class AiClient
25
64
  initialize_orc_client
26
65
  end
27
66
 
67
+ # Retrieves ORC client instance.
68
+ #
69
+ # @return [OpenRouter::Client] Instance of the OpenRouter client.
70
+ #
28
71
  def orc_client
29
72
  @orc_client ||= add_open_router_extensions || raise("OpenRouter extensions are not available")
30
73
  end
31
74
 
75
+ # Retrieves models from the ORC client.
76
+ #
77
+ # @return [Array<Hash>] List of models.
78
+ #
32
79
  def orc_models
33
80
  @orc_models ||= orc_client.models
34
81
  end
@@ -36,6 +83,11 @@ class AiClient
36
83
  # TODO: Refactor these DB like methods to take
37
84
  # advantage of AiClient::LLM
38
85
 
86
+ # Retrieves model names associated with a provider.
87
+ #
88
+ # @param provider [String, nil] The provider to filter models by.
89
+ # @return [Array<String>] List of model names.
90
+ #
39
91
  def model_names(provider=nil)
40
92
  model_ids = models.map { _1['id'] }
41
93
 
@@ -44,18 +96,36 @@ class AiClient
44
96
  model_ids.filter_map { _1.split('/')[1] if _1.start_with?(provider.to_s.downcase) }
45
97
  end
46
98
 
99
+ # Retrieves details of a specific model.
100
+ #
101
+ # @param model [String] The model ID to retrieve details for.
102
+ # @return [Hash, nil] Details of the model or nil if not found.
103
+ #
47
104
  def model_details(model)
48
105
  orc_models.find { _1['id'].include?(model) }
49
106
  end
50
107
 
108
+ # Retrieves the available providers.
109
+ #
110
+ # @return [Array<String>] List of unique provider names.
111
+ #
51
112
  def providers
52
113
  @providers ||= models.map{ _1['id'].split('/')[0] }.sort.uniq
53
114
  end
54
115
 
116
+ # Finds models matching a given substring.
117
+ #
118
+ # @param a_model_substring [String] The substring to search for.
119
+ # @return [Array<String>] List of matching model names.
120
+ #
55
121
  def find_model(a_model_substring)
56
122
  model_names.select{ _1.include?(a_model_substring) }
57
123
  end
58
124
 
125
+ # Resets LLM data with the available ORC models.
126
+ #
127
+ # @return [void]
128
+ #
59
129
  def reset_llm_data
60
130
  LLM.data = orc_models
61
131
  LLM::DATA_PATH.write(orc_models.to_yaml)
@@ -64,23 +134,34 @@ class AiClient
64
134
 
65
135
  private
66
136
 
67
- # Similar to fetch_api_key but for the class_config
137
+ # Fetches the access token from environment variables.
138
+ #
139
+ # @return [String, nil] The access token or nil if not found.
140
+ #
68
141
  def fetch_access_token
69
142
  class_config.envar_api_key_names.open_router
70
143
  .map { |key| ENV[key] }
71
144
  .compact
72
145
  .first
73
146
  end
74
-
147
+
148
+ # Configures the OpenRouter client with the access token.
149
+ #
150
+ # @param access_token [String] The access token to configure.
151
+ # @return [void]
152
+ #
75
153
  def configure_open_router(access_token)
76
154
  OpenRouter.configure { |config| config.access_token = access_token }
77
155
  end
78
156
 
157
+ # Initializes the ORC client.
158
+ #
159
+ # @return [void]
160
+ #
79
161
  def initialize_orc_client
80
162
  @orc_client ||= OpenRouter::Client.new
81
163
  end
82
164
  end
83
165
  end
84
166
 
85
-
86
167
  AiClient.add_open_router_extensions
@@ -11,12 +11,27 @@ class AiClient
11
11
  # )
12
12
  #
13
13
  class RetryMiddleware
14
+
15
+ # Initializes a new instance of RetryMiddleware.
16
+ #
17
+ # @param max_retries [Integer] The maximum number of retries to attempt (default: 3).
18
+ # @param base_delay [Integer] The base delay in seconds before retrying (default: 2).
19
+ # @param max_delay [Integer] The maximum delay in seconds between retries (default: 16).
20
+ #
14
21
  def initialize(max_retries: 3, base_delay: 2, max_delay: 16)
15
22
  @max_retries = max_retries
16
23
  @base_delay = base_delay
17
24
  @max_delay = max_delay
18
25
  end
19
26
 
27
+ # Calls the next middleware, retrying on specific errors.
28
+ #
29
+ # @param client [AiClient] The client instance that invokes the middleware.
30
+ # @param next_middleware [Proc] The next middleware in the chain to call.
31
+ # @param args [Array] Any additional arguments to pass to the next middleware.
32
+ #
33
+ # @raise [StandardError] Reraise the error if max retries are exceeded.
34
+ #
20
35
  def call(client, next_middleware, *args)
21
36
  retries = 0
22
37
  begin
@@ -0,0 +1,18 @@
1
+ # lib/ai_client/tool.rb
2
+
3
+ # TODO: Turn this into a Function class using the pattern
4
+ # in examples/tools.rb
5
+ # put the function names as symbols into a class Array
6
+ # In the AiClient class transform the tools: []
7
+ # parameter from an Array of Symbols into an Array
8
+ # of FUnction instances.
9
+
10
+ class AiClient::Tool < OmniAI::Tool
11
+
12
+ def xyzzy = self.class.xyzzy
13
+
14
+ class << self
15
+ def xyzzy = puts "Magic"
16
+ end
17
+ end
18
+
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class AiClient
4
- VERSION = "0.2.4"
4
+ VERSION = "0.2.5"
5
5
  end
data/lib/ai_client.rb CHANGED
@@ -16,6 +16,8 @@ require 'omniai/openai'
16
16
 
17
17
  require 'open_router'
18
18
 
19
+ require_relative 'ai_client/version'
20
+
19
21
  require_relative 'ai_client/chat'
20
22
  require_relative 'ai_client/embed'
21
23
  require_relative 'ai_client/speak'
@@ -23,10 +25,10 @@ require_relative 'ai_client/transcribe'
23
25
 
24
26
  require_relative 'ai_client/configuration'
25
27
  require_relative 'ai_client/middleware'
26
- require_relative 'ai_client/version'
27
28
 
28
29
  require_relative 'ai_client/open_router_extensions'
29
30
  require_relative 'ai_client/llm' # SMELL: must come after the open router stuff
31
+ require_relative 'ai_client/tool'
30
32
 
31
33
  # Create a generic client instance using only model name
32
34
  # client = AiClient.new('gpt-3.5-turbo')
@@ -76,6 +78,8 @@ class AiClient
76
78
  :timeout,
77
79
  :config # Instance configuration
78
80
 
81
+ # Initializes a new AiClient instance.
82
+ #
79
83
  # You can over-ride the class config by providing a block like this
80
84
  # c = AiClient.new(...) do |config|
81
85
  # config.logger = nil
@@ -90,6 +94,14 @@ class AiClient
90
94
  # The options object is basically those things that the
91
95
  # OmniAI clients want to see.
92
96
  #
97
+ # @param model [String] The model name to use for the client.
98
+ # @param options [Hash] Optional named parameters:
99
+ # - :provider [Symbol] Specify the provider.
100
+ # - :config [String] Path to a YAML configuration file.
101
+ # - :logger [Logger] Logger instance for the client.
102
+ # - :timeout [Integer] Timeout value for requests.
103
+ # @yield [config] An optional block to configure the instance.
104
+ #
93
105
  def initialize(model, **options, &block)
94
106
  # Assign the instance variable @config from the class variable @@config
95
107
  @config = self.class.class_config.dup
@@ -121,13 +133,34 @@ class AiClient
121
133
  @last_response = nil
122
134
  end
123
135
 
136
+ # TODO: Review these raw-ish methods are they really needed?
137
+ # raw? should be a private method ??
138
+
139
+ # Returns the last response received from the client.
140
+ #
141
+ # @return [OmniAI::Response] The last response.
142
+ #
124
143
  def response = last_response
144
+
145
+ # Checks if the client is set to return raw responses.
146
+ #
147
+ # @return [Boolean] True if raw responses are to be returned.
125
148
  def raw? = config.return_raw
126
149
 
150
+
151
+ # Sets whether to return raw responses.
152
+ #
153
+ # @param value [Boolean] The value to set for raw responses return.
154
+ #
127
155
  def raw=(value)
128
156
  config.return_raw = value
129
157
  end
130
158
 
159
+ # Extracts the content from the last response based on the provider.
160
+ #
161
+ # @return [String] The extracted content.
162
+ # @raise [NotImplementedError] If content extraction is not implemented for the provider.
163
+ #
131
164
  def content
132
165
  case @provider
133
166
  when :localai, :mistral, :ollama, :open_router, :openai
@@ -142,6 +175,13 @@ class AiClient
142
175
  end
143
176
  alias_method :text, :content
144
177
 
178
+ # Handles calls to methods that are missing on the AiClient instance.
179
+ #
180
+ # @param method_name [Symbol] The name of the method called.
181
+ # @param args [Array] Arguments passed to the method.
182
+ # @param block [Proc] Optional block associated with the method call.
183
+ # @return [Object] The result from the underlying client or raises NoMethodError.
184
+ #
145
185
  def method_missing(method_name, *args, &block)
146
186
  if @client.respond_to?(method_name)
147
187
  result = @client.send(method_name, *args, &block)
@@ -152,6 +192,12 @@ class AiClient
152
192
  end
153
193
  end
154
194
 
195
+ # Checks if the instance responds to the missing method.
196
+ #
197
+ # @param method_name [Symbol] The name of the method to check.
198
+ # @param include_private [Boolean] Whether to include private methods in the check.
199
+ # @return [Boolean] True if the method is supported by the client, false otherwise.
200
+ #
155
201
  def respond_to_missing?(method_name, include_private = false)
156
202
  @client.respond_to?(method_name) || super
157
203
  end
@@ -160,6 +206,12 @@ class AiClient
160
206
  ##############################################
161
207
  private
162
208
 
209
+ # Validates the specified provider.
210
+ #
211
+ # @param provider [Symbol] The provider to validate.
212
+ # @return [Symbol, nil] Returns the validated provider or nil.
213
+ # @raise [ArgumentError] If the provider is unsupported.
214
+ #
163
215
  def validate_provider(provider)
164
216
  return nil if provider.nil?
165
217
 
@@ -171,7 +223,11 @@ class AiClient
171
223
  provider
172
224
  end
173
225
 
174
-
226
+ # Creates an instance of the appropriate OmniAI client based on the provider.
227
+ #
228
+ # @return [OmniAI::Client] An instance of the configured OmniAI client.
229
+ # @raise [ArgumentError] If the provider is unsupported.
230
+ #
175
231
  def create_client
176
232
  client_options = {
177
233
  api_key: fetch_api_key,
@@ -209,7 +265,10 @@ class AiClient
209
265
  end
210
266
 
211
267
 
212
- # Similar to fetch_access_tokne but for the instance config
268
+ # Similar to fetch_access_token but for the instance config
269
+ #
270
+ # @return [String, nil] The retrieved API key or nil if not found.
271
+ #
213
272
  def fetch_api_key
214
273
  config.envar_api_key_names[@provider]
215
274
  &.map { |key| ENV[key] }
@@ -217,6 +276,12 @@ class AiClient
217
276
  &.first
218
277
  end
219
278
 
279
+ # Determines the provider based on the provided model.
280
+ #
281
+ # @param model [String] The model name.
282
+ # @return [Symbol] The corresponding provider.
283
+ # @raise [ArgumentError] If the model is unsupported.
284
+ #
220
285
  def determine_provider(model)
221
286
  config.provider_patterns.find { |provider, pattern| model.match?(pattern) }&.first ||
222
287
  raise(ArgumentError, "Unsupported model: #{model}")
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ai_client
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.4
4
+ version: 0.2.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-10-10 00:00:00.000000000 Z
11
+ date: 2024-10-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: active_hash
@@ -219,6 +219,7 @@ files:
219
219
  - examples/embed.rb
220
220
  - examples/speak.rb
221
221
  - examples/text.rb
222
+ - examples/tools.rb
222
223
  - examples/transcribe.rb
223
224
  - lib/ai_client.rb
224
225
  - lib/ai_client/chat.rb
@@ -232,6 +233,7 @@ files:
232
233
  - lib/ai_client/open_router_extensions.rb
233
234
  - lib/ai_client/retry_middleware.rb
234
235
  - lib/ai_client/speak.rb
236
+ - lib/ai_client/tool.rb
235
237
  - lib/ai_client/transcribe.rb
236
238
  - lib/ai_client/version.rb
237
239
  - sig/ai_client.rbs