ai_client 0.2.2 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,86 @@
1
+ # lib/ai_client/open_router_extensions.rb
2
+ # frozen_string_literal: true
3
+
4
+ # These extensions to AiClient are only available with
5
+ # a valid API Key for the open_router.ai web-service
6
+
7
+ require 'open_router'
8
+ require 'yaml'
9
+
10
+ class AiClient
11
+
12
+ def models = self.class.models
13
+ def providers = self.class.providers
14
+ def model_names(a_provider=nil) = self.class.model_names(a_provider)
15
+ def model_details(a_model) = self.class.model_details(a_model)
16
+ def find_model(a_model_substring) = self.class.find_model(a_model_substring)
17
+
18
+ class << self
19
+ def add_open_router_extensions
20
+ access_token = fetch_access_token
21
+
22
+ return unless access_token
23
+
24
+ configure_open_router(access_token)
25
+ initialize_orc_client
26
+ end
27
+
28
+ def orc_client
29
+ @orc_client ||= add_open_router_extensions || raise("OpenRouter extensions are not available")
30
+ end
31
+
32
+ def orc_models
33
+ @orc_models ||= orc_client.models
34
+ end
35
+
36
+ # TODO: Refactor these DB like methods to take
37
+ # advantage of AiClient::LLM
38
+
39
+ def model_names(provider=nil)
40
+ model_ids = models.map { _1['id'] }
41
+
42
+ return model_ids unless provider
43
+
44
+ model_ids.filter_map { _1.split('/')[1] if _1.start_with?(provider.to_s.downcase) }
45
+ end
46
+
47
+ def model_details(model)
48
+ orc_models.find { _1['id'].include?(model) }
49
+ end
50
+
51
+ def providers
52
+ @providers ||= models.map{ _1['id'].split('/')[0] }.sort.uniq
53
+ end
54
+
55
+ def find_model(a_model_substring)
56
+ model_names.select{ _1.include?(a_model_substring) }
57
+ end
58
+
59
+ def reset_llm_data
60
+ LLM.data = orc_models
61
+ LLM::DATA_PATH.write(orc_models.to_yaml)
62
+ end
63
+
64
+
65
+ private
66
+
67
+ # Similar to fetch_api_key but for the class_config
68
+ def fetch_access_token
69
+ class_config.envar_api_key_names.open_router
70
+ .map { |key| ENV[key] }
71
+ .compact
72
+ .first
73
+ end
74
+
75
+ def configure_open_router(access_token)
76
+ OpenRouter.configure { |config| config.access_token = access_token }
77
+ end
78
+
79
+ def initialize_orc_client
80
+ @orc_client ||= OpenRouter::Client.new
81
+ end
82
+ end
83
+ end
84
+
85
+
86
+ AiClient.add_open_router_extensions
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class AiClient
4
- VERSION = "0.2.2"
4
+ VERSION = "0.2.4"
5
5
  end
data/lib/ai_client.rb CHANGED
@@ -16,10 +16,6 @@ require 'omniai/openai'
16
16
 
17
17
  require 'open_router'
18
18
 
19
- require_relative 'extensions/omniai-localai'
20
- require_relative 'extensions/omniai-ollama'
21
- require_relative 'extensions/omniai-open_router'
22
-
23
19
  require_relative 'ai_client/chat'
24
20
  require_relative 'ai_client/embed'
25
21
  require_relative 'ai_client/speak'
@@ -29,6 +25,9 @@ require_relative 'ai_client/configuration'
29
25
  require_relative 'ai_client/middleware'
30
26
  require_relative 'ai_client/version'
31
27
 
28
+ require_relative 'ai_client/open_router_extensions'
29
+ require_relative 'ai_client/llm' # SMELL: must come after the open router stuff
30
+
32
31
  # Create a generic client instance using only model name
33
32
  # client = AiClient.new('gpt-3.5-turbo')
34
33
  #
@@ -131,25 +130,12 @@ class AiClient
131
130
 
132
131
  def content
133
132
  case @provider
134
- when :openai, :localai, :ollama
135
- # last_response.data.dig('choices', 0, 'message', 'content')
133
+ when :localai, :mistral, :ollama, :open_router, :openai
136
134
  last_response.data.tunnel 'content'
137
135
 
138
- when :anthropic
139
- # last_response.data.dig('content',0,'text')
136
+ when :anthropic, :google
140
137
  last_response.data.tunnel 'text'
141
138
 
142
- when :google
143
- # last_response.data.dig('candidates', 0, 'content', 'parts', 0, 'text')
144
- last_response.data.tunnel 'text'
145
-
146
- when :mistral
147
- # last_response.data.dig('choices', 0, 'message', 'content')
148
- last_response.data.tunnel 'content'
149
-
150
- when :open_router
151
- last_response.data.tunnel 'content'
152
-
153
139
  else
154
140
  raise NotImplementedError, "Content extraction not implemented for provider: #{@provider}"
155
141
  end
@@ -187,9 +173,8 @@ class AiClient
187
173
 
188
174
 
189
175
  def create_client
190
- api_key = fetch_api_key # Fetching the API key should only happen for valid providers
191
176
  client_options = {
192
- api_key: api_key,
177
+ api_key: fetch_api_key,
193
178
  logger: @logger,
194
179
  timeout: @timeout
195
180
  }
@@ -210,13 +195,13 @@ class AiClient
210
195
  OmniAI::Mistral::Client.new(**client_options)
211
196
 
212
197
  when :ollama
213
- OmniAI::Ollama::Client.new(**client_options)
198
+ OmniAI::OpenAI::Client.new(host: 'http://localhost:11434', api_key: nil, **client_options)
214
199
 
215
200
  when :localai
216
- OmniAI::LocalAI::Client.new(**client_options)
201
+ OmniAI::OpenAI::Client.new(host: 'http://localhost:8080', api_key: nil, **client_options)
217
202
 
218
203
  when :open_router
219
- OmniAI::OpenRouter::Client.new(**client_options)
204
+ OmniAI::OpenAI::Client.new(host: 'https://openrouter.ai', api_prefix: 'api', **client_options)
220
205
 
221
206
  else
222
207
  raise ArgumentError, "Unsupported provider: #{@provider}"
@@ -224,20 +209,14 @@ class AiClient
224
209
  end
225
210
 
226
211
 
212
+ # Similar to fetch_access_tokne but for the instance config
227
213
  def fetch_api_key
228
- env_var_name = "#{@provider.upcase}_API_KEY"
229
- api_key = ENV[env_var_name]
230
-
231
- if api_key.nil? || api_key.empty?
232
- unless [:localai, :ollama].include? provider
233
- raise ArgumentError, "API key not found in environment variable #{env_var_name}"
234
- end
235
- end
236
-
237
- api_key
214
+ config.envar_api_key_names[@provider]
215
+ &.map { |key| ENV[key] }
216
+ &.compact
217
+ &.first
238
218
  end
239
219
 
240
-
241
220
  def determine_provider(model)
242
221
  config.provider_patterns.find { |provider, pattern| model.match?(pattern) }&.first ||
243
222
  raise(ArgumentError, "Unsupported model: #{model}")
metadata CHANGED
@@ -1,15 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ai_client
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.2.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-10-08 00:00:00.000000000 Z
11
+ date: 2024-10-10 00:00:00.000000000 Z
12
12
  dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: active_hash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
13
27
  - !ruby/object:Gem::Dependency
14
28
  name: hashie
15
29
  requirement: !ruby/object:Gem::Requirement
@@ -86,14 +100,14 @@ dependencies:
86
100
  requirements:
87
101
  - - ">="
88
102
  - !ruby/object:Gem::Version
89
- version: '0'
103
+ version: 1.8.3
90
104
  type: :runtime
91
105
  prerelease: false
92
106
  version_requirements: !ruby/object:Gem::Requirement
93
107
  requirements:
94
108
  - - ">="
95
109
  - !ruby/object:Gem::Version
96
- version: '0'
110
+ version: 1.8.3
97
111
  - !ruby/object:Gem::Dependency
98
112
  name: open_router
99
113
  requirement: !ruby/object:Gem::Requirement
@@ -164,6 +178,20 @@ dependencies:
164
178
  - - ">="
165
179
  - !ruby/object:Gem::Version
166
180
  version: '0'
181
+ - !ruby/object:Gem::Dependency
182
+ name: tocer
183
+ requirement: !ruby/object:Gem::Requirement
184
+ requirements:
185
+ - - ">="
186
+ - !ruby/object:Gem::Version
187
+ version: '0'
188
+ type: :development
189
+ prerelease: false
190
+ version_requirements: !ruby/object:Gem::Requirement
191
+ requirements:
192
+ - - ">="
193
+ - !ruby/object:Gem::Version
194
+ version: '0'
167
195
  description: "`ai_client` is a versatile Ruby gem that offers a seamless interface
168
196
  \nfor integrating a wide range of AI service providers through a single, \nunified
169
197
  API. With `ai_client`, you can simply specify the model name \nand quickly leverage
@@ -197,16 +225,15 @@ files:
197
225
  - lib/ai_client/config.yml
198
226
  - lib/ai_client/configuration.rb
199
227
  - lib/ai_client/embed.rb
228
+ - lib/ai_client/llm.rb
200
229
  - lib/ai_client/logger_middleware.rb
201
230
  - lib/ai_client/middleware.rb
231
+ - lib/ai_client/models.yml
232
+ - lib/ai_client/open_router_extensions.rb
202
233
  - lib/ai_client/retry_middleware.rb
203
234
  - lib/ai_client/speak.rb
204
235
  - lib/ai_client/transcribe.rb
205
236
  - lib/ai_client/version.rb
206
- - lib/extensions/omniai-localai.rb
207
- - lib/extensions/omniai-ollama.rb
208
- - lib/extensions/omniai-open_router.rb
209
- - lib/extensions/open_router.md
210
237
  - sig/ai_client.rbs
211
238
  - the_ollama_model_problem.md
212
239
  homepage: https://github.com/MadBomber/ai_client
@@ -1,31 +0,0 @@
1
- # extensions/omniai-localai.rb
2
- # frozen_string_literal: true
3
-
4
- require 'omniai'
5
- require 'omniai/openai'
6
-
7
- module OmniAI
8
-
9
- # Create an alias for OmniAI::OpenAI module
10
- module LocalAI
11
- extend OmniAI::OpenAI
12
-
13
- # Alias classes from OmniAI::OpenAI
14
- class Client < OmniAI::OpenAI::Client
15
- def initialize(**options)
16
- options[:host] = 'http://localhost:8080' unless options.has_key?(:host)
17
- super(**options)
18
- end
19
- end
20
-
21
-
22
- Config = OmniAI::OpenAI::Config
23
-
24
- # Alias the Thread class and its nested classes
25
- Thread = OmniAI::OpenAI::Thread
26
- Annotation = OmniAI::OpenAI::Thread::Annotation
27
- Attachment = OmniAI::OpenAI::Thread::Attachment
28
- Message = OmniAI::OpenAI::Thread::Message
29
- Run = OmniAI::OpenAI::Thread::Run
30
- end
31
- end
@@ -1,30 +0,0 @@
1
- # extensions/omniai-ollama.rb
2
- # frozen_string_literal: true
3
-
4
- require 'omniai'
5
- require 'omniai/openai'
6
-
7
- module OmniAI
8
-
9
- # Create an alias for OmniAI::OpenAI module
10
- module Ollama
11
- extend OmniAI::OpenAI
12
-
13
- # Alias classes from OmniAI::OpenAI
14
- class Client < OmniAI::OpenAI::Client
15
- def initialize(**options)
16
- options[:host] = 'http://localhost:11434' unless options.has_key?(:host)
17
- super(**options)
18
- end
19
- end
20
-
21
- Config = OmniAI::OpenAI::Config
22
-
23
- # Alias the Thread class and its nested classes
24
- Thread = OmniAI::OpenAI::Thread
25
- Annotation = OmniAI::OpenAI::Thread::Annotation
26
- Attachment = OmniAI::OpenAI::Thread::Attachment
27
- Message = OmniAI::OpenAI::Thread::Message
28
- Run = OmniAI::OpenAI::Thread::Run
29
- end
30
- end
@@ -1,92 +0,0 @@
1
- # lib/extensions/omniai-open_router.rb
2
- # frozen_string_literal: true
3
-
4
- require 'omniai'
5
- require 'omniai/openai'
6
-
7
- module OmniAI
8
-
9
- # Create an alias for OmniAI::OpenAI module
10
- module OpenRouter
11
- extend OmniAI::OpenAI
12
-
13
- # Alias classes from OmniAI::OpenAI
14
- class Client < OmniAI::OpenAI::Client
15
- def initialize(**options)
16
- options[:host] = 'https://openrouter.ai/api/v1' unless options.has_key?(:host)
17
- super(**options)
18
- end
19
-
20
- def self.openrouter
21
- OmniAI::OpenRouter::Client
22
- end
23
-
24
- def self.open_router
25
- OmniAI::OpenRouter::Client
26
- end
27
-
28
- def self.find(provider:, **)
29
- return OmniAI.open_router.new(**) if :open_reouter == provider
30
-
31
- super(provider: provider.to_s, **)
32
- end
33
- end
34
-
35
- Chat = OmniAI::OpenAI::Chat
36
-
37
- class Chat
38
- def path
39
- "/api/v1/chat/completions"
40
- end
41
- end
42
-
43
- Config = OmniAI::OpenAI::Config
44
-
45
- # Alias the Thread class and its nested classes
46
- Thread = OmniAI::OpenAI::Thread
47
- Thread::Annotation = OmniAI::OpenAI::Thread::Annotation
48
- Thread::Attachment = OmniAI::OpenAI::Thread::Attachment
49
- Thread::Message = OmniAI::OpenAI::Thread::Message
50
- Thread::Run = OmniAI::OpenAI::Thread::Run
51
- end
52
- end
53
-
54
- ######################################################
55
- ## Extend Capabilities Using OpenRouter
56
- #
57
- # TODO: catch the models db
58
- # TODO: consider wrapping the models database in an ActiveModel
59
- #
60
- class AiClient
61
- class << self
62
- def orc_models
63
- @orc_models ||= ORC.models if defined?(ORC)
64
- end
65
-
66
- def orc_model_names(provider=nil)
67
- if provider.nil?
68
- orc_models.map{|e| e['id']}
69
- else
70
- orc_models
71
- .map{|e| e['id']}
72
- .select{|name| name.start_with? provider.to_s.downcase}
73
- .map{|e| e.split('/')[1]}
74
- end
75
- end
76
-
77
- def orc_model_details(model)
78
- orc_models.select{|e| e['id'].include?(model)}
79
- end
80
- end
81
- end
82
-
83
- if ENV.fetch('OPEN_ROUTER_API_KEY', nil)
84
- OpenRouter.configure do |config|
85
- config.access_token = ENV.fetch('OPEN_ROUTER_API_KEY', nil)
86
- end
87
-
88
- # Use a default provider/model
89
- AiClient::ORC = OpenRouter::Client.new
90
- end
91
-
92
-
@@ -1,97 +0,0 @@
1
- # Notes on OpenRouter
2
-
3
- OpenROuter is a web service that has a common API to many
4
- back-end LLM processors. Its goal is basically the same as the
5
- OmniAI gem - provide the flexibility of using multiple models
6
- processed by myltiple providers.
7
-
8
- ```ruby
9
- OpenRouter.configure do |config|
10
- config.access_token = ENV.fetch('OPEN_ROUTER_API_KEY', nil)
11
- end
12
-
13
- # Use a default provider/model
14
- AI = OpenRouter::Client.new
15
-
16
- # Returns an Array of Hash for supported
17
- # models/providers
18
- Models = AI.models
19
- ```
20
-
21
- models with a "/" are targeted to open router.
22
- before the "/" is the provider after it is the model name
23
-
24
- Will need to add this entriy to the AiClient::Config `provider_patterns` Hash:
25
-
26
- ```ruby
27
- open_router: /\//, # /(.*)\/(.*)/ provider / model name
28
- ```
29
-
30
- models can be an Array of Strings. The first is the primary while
31
- the rest are fallbacks in case there is one before fails
32
-
33
- ```ruby
34
- {
35
- "models": ["anthropic/claude-2.1", "gryphe/mythomax-l2-13b"],
36
- "route": "fallback",
37
- ... // Other params
38
- }
39
- ```
40
-
41
- You can have OpenRouter send your prompt to the best
42
- provider/model for the prompt like this:
43
-
44
- ```ruby
45
- require "open_router"
46
-
47
- OpenRouter.configure do |config|
48
- config.access_token = ENV["ACCESS_TOKEN"]
49
- config.site_name = "YOUR_APP_NAME"
50
- config.site_url = "YOUR_SITE_URL"
51
- end
52
-
53
- OpenRouter::Client.new.complete(
54
- model: "openrouter/auto",
55
- messages: [
56
- {
57
- "role": "user",
58
- "content": "What is the meaning of life?"
59
- }
60
- ]
61
- ).then do |response|
62
- puts response.dig("choices", 0, "message", "content")
63
- end
64
- ```
65
-
66
- OpenRouter can also support OpenAI's API by using this
67
- base_url: "https://openrouter.ai/api/v1",
68
-
69
- Request Format Documentation
70
- https://openrouter.ai/docs/requests
71
-
72
- Simple Quick Start ...
73
-
74
- ```ruby
75
- OpenRouter::Client.new.complete(
76
- model: "openai/gpt-3.5-turbo",
77
- messages: [
78
- {
79
- "role": "user",
80
- "content": "What is the meaning of life?"
81
- }
82
- ]
83
- ).then do |response|
84
- puts response.dig("choices", 0, "message", "content")
85
- end
86
- ```
87
-
88
- ## Design Approaches
89
-
90
- There are at least two different approaches to
91
- integrate the OpenRouter capability. 1) Use the open_router gem
92
- and forget about using the same common-ish
93
- API established by OmniAI; or 2) Take advantage
94
- or OpenRouter's OpenAI's API and do for it
95
- the same thing that was done of Ollama and LocalAI.
96
-
97
-