ai_client 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +15 -0
- data/README.md +278 -9
- data/lib/ai_client/chat.rb +64 -7
- data/lib/ai_client/config.yml +11 -17
- data/lib/ai_client/configuration.rb +12 -1
- data/lib/ai_client/llm.rb +13 -2
- data/lib/ai_client/middleware.rb +2 -2
- data/lib/ai_client/models.yml +526 -416
- data/lib/ai_client/open_router_extensions.rb +63 -94
- data/lib/ai_client/tool.rb +4 -7
- data/lib/ai_client/version.rb +4 -1
- data/lib/ai_client.rb +83 -47
- metadata +3 -3
@@ -1,60 +1,76 @@
|
|
1
1
|
# lib/ai_client/open_router_extensions.rb
|
2
|
-
# frozen_string_literal: true
|
3
2
|
|
4
|
-
#
|
5
|
-
#
|
3
|
+
# OpenRouter Extensions for AiClient
|
4
|
+
#
|
5
|
+
# This file adds several public instance and class methods to the AiClient class
|
6
|
+
# to provide information about AI models and providers.
|
7
|
+
#
|
8
|
+
# Instance Methods:
|
9
|
+
# - model_details: Retrieves details for the current model.
|
10
|
+
# - models: Retrieves model names for the current provider.
|
11
|
+
#
|
12
|
+
# Class Methods:
|
13
|
+
# - providers: Retrieves all available providers.
|
14
|
+
# - models: Retrieves model names, optionally filtered by provider.
|
15
|
+
# - model_details: Retrieves details for a specific model.
|
16
|
+
#
|
17
|
+
# These methods utilize the AiClient::LLM class and the models.yml file
|
18
|
+
# for model information.
|
6
19
|
|
7
20
|
require 'open_router'
|
8
21
|
require 'yaml'
|
9
22
|
|
10
23
|
class AiClient
|
11
|
-
|
12
|
-
# Retrieves the available models.
|
13
|
-
#
|
14
|
-
# @return [Array<String>] List of model IDs.
|
15
|
-
#
|
16
|
-
def models
|
17
|
-
self.class.models
|
18
|
-
end
|
19
24
|
|
20
|
-
# Retrieves the
|
25
|
+
# Retrieves details for the current model.
|
21
26
|
#
|
22
|
-
# @return [
|
23
|
-
def
|
24
|
-
|
27
|
+
# @return [Hash, nil] Details of the current model or nil if not found.
|
28
|
+
def model_details
|
29
|
+
id = "#{@provider}/#{@model}"
|
30
|
+
LLM.find(id.downcase)
|
25
31
|
end
|
26
32
|
|
27
|
-
# Retrieves model names
|
33
|
+
# Retrieves model names for the current provider.
|
28
34
|
#
|
29
|
-
# @
|
30
|
-
|
31
|
-
def model_names(provider = nil)
|
32
|
-
self.class.model_names(provider)
|
33
|
-
end
|
35
|
+
# @return [Array<String>] List of model names for the current provider.
|
36
|
+
def models = LLM.models(@provider)
|
34
37
|
|
35
|
-
# Retrieves details for a specific model.
|
36
|
-
#
|
37
|
-
# @param a_model [String] The model ID to retrieve details for.
|
38
|
-
# @return [Hash, nil] Details of the model or nil if not found.
|
39
|
-
def model_details(a_model)
|
40
|
-
self.class.model_details(a_model)
|
41
|
-
end
|
42
38
|
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
end
|
39
|
+
class << self
|
40
|
+
|
41
|
+
# Retrieves all available providers.
|
42
|
+
#
|
43
|
+
# @return [Array<Symbol>] List of all provider names.
|
44
|
+
def providers = LLM.providers
|
50
45
|
|
51
46
|
|
52
|
-
|
53
|
-
|
54
|
-
#
|
47
|
+
# Retrieves model names, optionally filtered by provider.
|
48
|
+
#
|
49
|
+
# @param substring [String, nil] Optional substring to filter models by.
|
50
|
+
# @return [Array<String>] List of model names.
|
51
|
+
def models(substring = nil) = LLM.models(substring)
|
52
|
+
|
53
|
+
# Retrieves details for a specific model.
|
54
|
+
#
|
55
|
+
# @param model_id [String] The model ID to retrieve details for,
|
56
|
+
# in the pattern "provider/model".downcase
|
57
|
+
# @return [AiClient::LLM, nil] Details of the model or nil if not found.
|
58
|
+
def model_details(model_id) = LLM.find(model_id.downcase)
|
59
|
+
|
60
|
+
|
61
|
+
# Resets LLM data with the available ORC models.
|
55
62
|
#
|
56
63
|
# @return [void]
|
57
64
|
#
|
65
|
+
def reset_llm_data = LLM.reset_llm_data
|
66
|
+
|
67
|
+
|
68
|
+
# Initializes OpenRouter extensions for AiClient.
|
69
|
+
#
|
70
|
+
# This sets up the access token and initializes the ORC client.
|
71
|
+
#
|
72
|
+
# @return [void]
|
73
|
+
#
|
58
74
|
def add_open_router_extensions
|
59
75
|
access_token = fetch_access_token
|
60
76
|
|
@@ -64,7 +80,9 @@ class AiClient
|
|
64
80
|
initialize_orc_client
|
65
81
|
end
|
66
82
|
|
67
|
-
|
83
|
+
|
84
|
+
|
85
|
+
# Retrieves the ORC client instance.
|
68
86
|
#
|
69
87
|
# @return [OpenRouter::Client] Instance of the OpenRouter client.
|
70
88
|
#
|
@@ -72,6 +90,10 @@ class AiClient
|
|
72
90
|
@orc_client ||= add_open_router_extensions || raise("OpenRouter extensions are not available")
|
73
91
|
end
|
74
92
|
|
93
|
+
|
94
|
+
private
|
95
|
+
|
96
|
+
|
75
97
|
# Retrieves models from the ORC client.
|
76
98
|
#
|
77
99
|
# @return [Array<Hash>] List of models.
|
@@ -80,59 +102,7 @@ class AiClient
|
|
80
102
|
@orc_models ||= orc_client.models
|
81
103
|
end
|
82
104
|
|
83
|
-
# TODO: Refactor these DB like methods to take
|
84
|
-
# advantage of AiClient::LLM
|
85
105
|
|
86
|
-
# Retrieves model names associated with a provider.
|
87
|
-
#
|
88
|
-
# @param provider [String, nil] The provider to filter models by.
|
89
|
-
# @return [Array<String>] List of model names.
|
90
|
-
#
|
91
|
-
def model_names(provider=nil)
|
92
|
-
model_ids = models.map { _1['id'] }
|
93
|
-
|
94
|
-
return model_ids unless provider
|
95
|
-
|
96
|
-
model_ids.filter_map { _1.split('/')[1] if _1.start_with?(provider.to_s.downcase) }
|
97
|
-
end
|
98
|
-
|
99
|
-
# Retrieves details of a specific model.
|
100
|
-
#
|
101
|
-
# @param model [String] The model ID to retrieve details for.
|
102
|
-
# @return [Hash, nil] Details of the model or nil if not found.
|
103
|
-
#
|
104
|
-
def model_details(model)
|
105
|
-
orc_models.find { _1['id'].include?(model) }
|
106
|
-
end
|
107
|
-
|
108
|
-
# Retrieves the available providers.
|
109
|
-
#
|
110
|
-
# @return [Array<String>] List of unique provider names.
|
111
|
-
#
|
112
|
-
def providers
|
113
|
-
@providers ||= models.map{ _1['id'].split('/')[0] }.sort.uniq
|
114
|
-
end
|
115
|
-
|
116
|
-
# Finds models matching a given substring.
|
117
|
-
#
|
118
|
-
# @param a_model_substring [String] The substring to search for.
|
119
|
-
# @return [Array<String>] List of matching model names.
|
120
|
-
#
|
121
|
-
def find_model(a_model_substring)
|
122
|
-
model_names.select{ _1.include?(a_model_substring) }
|
123
|
-
end
|
124
|
-
|
125
|
-
# Resets LLM data with the available ORC models.
|
126
|
-
#
|
127
|
-
# @return [void]
|
128
|
-
#
|
129
|
-
def reset_llm_data
|
130
|
-
LLM.data = orc_models
|
131
|
-
LLM::DATA_PATH.write(orc_models.to_yaml)
|
132
|
-
end
|
133
|
-
|
134
|
-
|
135
|
-
private
|
136
106
|
|
137
107
|
# Fetches the access token from environment variables.
|
138
108
|
#
|
@@ -154,10 +124,9 @@ class AiClient
|
|
154
124
|
OpenRouter.configure { |config| config.access_token = access_token }
|
155
125
|
end
|
156
126
|
|
157
|
-
# Initializes the ORC client.
|
158
|
-
#
|
159
|
-
# @return [void]
|
127
|
+
# Initializes the ORC client instance.
|
160
128
|
#
|
129
|
+
# @return [OpenRouter::Client] Instance of the OpenRouter client.
|
161
130
|
def initialize_orc_client
|
162
131
|
@orc_client ||= OpenRouter::Client.new
|
163
132
|
end
|
data/lib/ai_client/tool.rb
CHANGED
@@ -1,14 +1,11 @@
|
|
1
1
|
# lib/ai_client/tool.rb
|
2
2
|
|
3
|
-
# TODO: Turn this into a Function class using the pattern
|
4
|
-
# in examples/tools.rb
|
5
|
-
# put the function names as symbols into a class Array
|
6
|
-
# In the AiClient class transform the tools: []
|
7
|
-
# parameter from an Array of Symbols into an Array
|
8
|
-
# of FUnction instances.
|
9
|
-
|
10
3
|
class AiClient::Tool < OmniAI::Tool
|
11
4
|
|
5
|
+
# TODO: Is there any additional functionality that
|
6
|
+
# needs to be added to the Rool class that would
|
7
|
+
# be helpful?
|
8
|
+
|
12
9
|
def xyzzy = self.class.xyzzy
|
13
10
|
|
14
11
|
class << self
|
data/lib/ai_client/version.rb
CHANGED
data/lib/ai_client.rb
CHANGED
@@ -1,6 +1,17 @@
|
|
1
1
|
# ai_client.rb
|
2
|
-
|
3
|
-
#
|
2
|
+
|
3
|
+
# A generic client to access various LLM providers
|
4
|
+
# Inspired by the SaaS "open router" concept
|
5
|
+
|
6
|
+
|
7
|
+
# AiClient: A unified interface for interacting with various LLM providers
|
8
|
+
#
|
9
|
+
# Usage:
|
10
|
+
# client = AiClient.new('gpt-3.5-turbo')
|
11
|
+
#
|
12
|
+
# Add middlewares:
|
13
|
+
# AiClient.use(RetryMiddleware.new(max_retries: 5, base_delay: 2, max_delay: 30))
|
14
|
+
# AiClient.use(LoggingMiddleware.new(AiClient.configuration.logger))
|
4
15
|
#
|
5
16
|
|
6
17
|
unless defined?(DebugMe)
|
@@ -74,10 +85,12 @@ class AiClient
|
|
74
85
|
attr_reader :client, # OmniAI's client instance
|
75
86
|
:provider, # [Symbol]
|
76
87
|
:model, # [String]
|
77
|
-
:logger,
|
88
|
+
:logger,
|
89
|
+
:last_message,
|
78
90
|
:last_response,
|
79
91
|
:timeout,
|
80
|
-
:config
|
92
|
+
:config, # Instance configuration
|
93
|
+
:context # chat-bot context
|
81
94
|
|
82
95
|
# Initializes a new AiClient instance.
|
83
96
|
#
|
@@ -103,72 +116,54 @@ class AiClient
|
|
103
116
|
# - :timeout [Integer] Timeout value for requests.
|
104
117
|
# @yield [config] An optional block to configure the instance.
|
105
118
|
#
|
106
|
-
def initialize(model, **options, &block)
|
107
|
-
|
108
|
-
@
|
109
|
-
|
110
|
-
# Yield the @config to a block if given
|
111
|
-
yield(@config) if block_given?
|
112
|
-
|
113
|
-
# Merge in an instance-specific YAML file
|
114
|
-
if options.has_key?(:config)
|
115
|
-
@config.merge! Config.load(options[:config])
|
116
|
-
options.delete(:config) # Lconfig not supported by OmniAI
|
117
|
-
end
|
118
|
-
|
119
|
-
@model = model
|
120
|
-
explicit_provider = options.fetch(:provider, config.provider)
|
121
|
-
|
122
|
-
@provider = validate_provider(explicit_provider) || determine_provider(model)
|
123
|
-
|
124
|
-
provider_config = @config.providers[@provider] || {}
|
125
|
-
|
126
|
-
@logger = options[:logger] || @config.logger
|
127
|
-
@timeout = options[:timeout] || @config.timeout
|
128
|
-
@base_url = options[:base_url] || provider_config[:base_url]
|
129
|
-
@options = options.merge(provider_config)
|
119
|
+
def initialize(model = nil, **options, &block)
|
120
|
+
@context = [] # An Array of String or response objects
|
121
|
+
@last_messages = nil
|
122
|
+
@last_response = nil
|
130
123
|
|
131
|
-
|
132
|
-
|
124
|
+
setup_config(options, &block)
|
125
|
+
set_provider_and_model(model, options[:provider])
|
126
|
+
setup_instance_variables(options)
|
133
127
|
|
134
|
-
@
|
128
|
+
@client = create_client
|
135
129
|
end
|
136
130
|
|
137
|
-
# TODO: Review these raw-ish methods are they really needed?
|
138
|
-
# raw? should be a private method ??
|
139
|
-
|
140
|
-
# Returns the last response received from the client.
|
141
|
-
#
|
142
|
-
# @return [OmniAI::Response] The last response.
|
143
|
-
#
|
144
|
-
def response = last_response
|
145
131
|
|
146
132
|
# Checks if the client is set to return raw responses.
|
147
133
|
#
|
148
134
|
# @return [Boolean] True if raw responses are to be returned.
|
149
|
-
def raw?
|
150
|
-
|
135
|
+
def raw?
|
136
|
+
config.return_raw
|
137
|
+
end
|
138
|
+
|
151
139
|
|
152
140
|
# Sets whether to return raw responses.
|
153
141
|
#
|
154
142
|
# @param value [Boolean] The value to set for raw responses return.
|
155
|
-
#
|
156
143
|
def raw=(value)
|
157
144
|
config.return_raw = value
|
158
145
|
end
|
159
146
|
|
147
|
+
|
148
|
+
# Returns the last response received from the client.
|
149
|
+
#
|
150
|
+
# @return [OmniAI::Response] The last response.
|
151
|
+
#
|
152
|
+
def response = last_response
|
153
|
+
|
154
|
+
|
160
155
|
# Extracts the content from the last response based on the provider.
|
161
156
|
#
|
162
157
|
# @return [String] The extracted content.
|
163
158
|
# @raise [NotImplementedError] If content extraction is not implemented for the provider.
|
164
159
|
#
|
165
|
-
def content
|
160
|
+
def content(response=last_response)
|
166
161
|
case @provider
|
167
162
|
when :localai, :mistral, :ollama, :open_router, :openai
|
168
|
-
|
163
|
+
response.data.tunnel 'content'
|
169
164
|
|
170
165
|
when :anthropic, :google
|
171
|
-
|
166
|
+
response.data.tunnel 'text'
|
172
167
|
|
173
168
|
else
|
174
169
|
raise NotImplementedError, "Content extraction not implemented for provider: #{@provider}"
|
@@ -207,6 +202,47 @@ class AiClient
|
|
207
202
|
##############################################
|
208
203
|
private
|
209
204
|
|
205
|
+
def setup_config(options, &block)
|
206
|
+
@config = self.class.class_config.dup
|
207
|
+
|
208
|
+
yield(@config) if block_given?
|
209
|
+
|
210
|
+
if options.key?(:config)
|
211
|
+
@config.merge!(Config.load(options[:config]))
|
212
|
+
options.delete(:config) # config not supported by OmniAI
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
|
217
|
+
def set_provider_and_model(my_model, my_provider)
|
218
|
+
if my_model.nil?
|
219
|
+
if my_provider.nil?
|
220
|
+
@provider = @config.default_provider.to_sym
|
221
|
+
else
|
222
|
+
@provider = validate_provider(my_provider)
|
223
|
+
end
|
224
|
+
@model = @config.default_model[@provider]
|
225
|
+
else
|
226
|
+
@model = my_model
|
227
|
+
if my_provider.nil?
|
228
|
+
@provider = determine_provider(my_model)
|
229
|
+
else
|
230
|
+
@provider = validate_provider(my_provider)
|
231
|
+
end
|
232
|
+
end
|
233
|
+
end
|
234
|
+
|
235
|
+
|
236
|
+
def setup_instance_variables(options)
|
237
|
+
provider_config = @config.providers[@provider] || {}
|
238
|
+
|
239
|
+
@logger = options[:logger] || @config.logger
|
240
|
+
@timeout = options[:timeout] || @config.timeout
|
241
|
+
@base_url = options[:base_url] || provider_config[:base_url]
|
242
|
+
@options = options.merge(provider_config)
|
243
|
+
end
|
244
|
+
|
245
|
+
|
210
246
|
# Validates the specified provider.
|
211
247
|
#
|
212
248
|
# @param provider [Symbol] The provider to validate.
|
@@ -284,9 +320,9 @@ class AiClient
|
|
284
320
|
# @raise [ArgumentError] If the model is unsupported.
|
285
321
|
#
|
286
322
|
def determine_provider(model)
|
323
|
+
return nil if model.nil? || model.empty?
|
324
|
+
|
287
325
|
config.provider_patterns.find { |provider, pattern| model.match?(pattern) }&.first ||
|
288
326
|
raise(ArgumentError, "Unsupported model: #{model}")
|
289
327
|
end
|
290
328
|
end
|
291
|
-
|
292
|
-
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ai_client
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Dewayne VanHoozer
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-10-
|
11
|
+
date: 2024-10-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: active_hash
|
@@ -262,7 +262,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
262
262
|
- !ruby/object:Gem::Version
|
263
263
|
version: '0'
|
264
264
|
requirements: []
|
265
|
-
rubygems_version: 3.5.
|
265
|
+
rubygems_version: 3.5.22
|
266
266
|
signing_key:
|
267
267
|
specification_version: 4
|
268
268
|
summary: A generic AI Client for many providers
|