ai_client 0.4.3 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,191 @@
1
+ # lib/ai_client/ollama_extensions.rb
2
+
3
+ # Ollama Extensions for AiClient
4
+ #
5
+ # This file adds several public instance and class methods to the AiClient class
6
+ # to provide information about AI models and providers.
7
+ #
8
+ # Instance Methods:
9
+ # - model_details: Retrieves details for the current model.
10
+ # - models: Retrieves model names for the current provider.
11
+ #
12
+ # Class Methods:
13
+ # - providers: Retrieves all available providers.
14
+ # - models: Retrieves model names, optionally filtered by provider.
15
+ # - model_details: Retrieves details for a specific model.
16
+ #
17
+ # These methods utilize the AiClient::LLM class and the models.yml file
18
+ # for model information.
19
+
20
+ # TODO: consider incorporating Ollama AI extensions
21
+ # require 'ollama-ai'
22
+
23
+ require 'yaml'
24
+ require 'uri'
25
+ require 'json'
26
+ require 'net/http'
27
+
28
+ class AiClient
29
+
30
+ # Retrieves details for the current model.
31
+ #
32
+ # @return [Hash, nil] Details of the current model or nil if not found.
33
+ def model_details
34
+ id = "#{@provider}/#{@model}"
35
+ LLM.find(id.downcase)
36
+ end
37
+
38
+ # Retrieves model names for the current provider.
39
+ #
40
+ # @return [Array<String>] List of model names for the current provider.
41
+ def models = LLM.models(@provider)
42
+
43
+
44
+ class << self
45
+
46
+ # Retrieves all available providers.
47
+ #
48
+ # @return [Array<Symbol>] List of all provider names.
49
+ def providers = LLM.providers
50
+
51
+
52
+ # Retrieves model names, optionally filtered by provider.
53
+ #
54
+ # @param substring [String, nil] Optional substring to filter models by.
55
+ # @return [Array<String>] List of model names.
56
+ def models(substring = nil) = LLM.models(substring)
57
+
58
+ # Retrieves details for a specific model.
59
+ #
60
+ # @param model_id [String] The model ID to retrieve details for,
61
+ # in the pattern "provider/model".downcase
62
+ # @return [AiClient::LLM, nil] Details of the model or nil if not found.
63
+ def model_details(model_id) = LLM.find(model_id.downcase)
64
+
65
+
66
+ # Resets LLM data with the available ORC models.
67
+ #
68
+ # @return [void]
69
+ #
70
+ def reset_llm_data
71
+ # Simply delegate to LLM class if it has the method
72
+ if LLM.respond_to?(:reset_llm_data)
73
+ LLM.reset_llm_data
74
+ end
75
+ end
76
+
77
+
78
+ # Initializes Ollama extensions for AiClient.
79
+ #
80
+ # This sets up the access token and initializes the ORC client.
81
+ #
82
+ # @return [void]
83
+ #
84
+ def add_ollama_extensions
85
+ access_token = fetch_access_token
86
+
87
+ return unless access_token
88
+
89
+ configure_ollama(access_token)
90
+ initialize_ollama_client
91
+ end
92
+
93
+
94
+ # Retrieves the ORC client instance.
95
+ #
96
+ # @return [Ollama::Client] Instance of the Ollama client.
97
+ #
98
+ def ollama_client
99
+ @ollama_client ||= initialize_ollama_client
100
+ end
101
+
102
+
103
+ # Retrieves the available models from the Ollama server.
104
+ #
105
+ # @param host [String] Optional host URL for the Ollama server.
106
+ # Defaults to the configured host or http://localhost:11434 if not specified.
107
+ # @return [Array<Hash>] List of available models with their details.
108
+ #
109
+ def ollama_available_models(host = nil)
110
+ host ||= ollama_host
111
+
112
+ uri = URI("#{host}/api/tags")
113
+ response = Net::HTTP.get_response(uri)
114
+
115
+ if response.is_a?(Net::HTTPSuccess)
116
+ JSON.parse(response.body)["models"] rescue []
117
+ else
118
+ []
119
+ end
120
+ end
121
+
122
+ # Gets the configured Ollama host URL
123
+ #
124
+ # @return [String] The configured Ollama host URL
125
+ def ollama_host
126
+ class_config.providers[:ollama]&.dig(:host) || 'http://localhost:11434'
127
+ end
128
+
129
+ # Checks if a specific model exists on the Ollama server.
130
+ #
131
+ # @param model_name [String] The name of the model to check.
132
+ # @param host [String] Optional host URL for the Ollama server.
133
+ # Defaults to the configured host or http://localhost:11434 if not specified.
134
+ # @return [Boolean] True if the model exists, false otherwise.
135
+ #
136
+ def ollama_model_exists?(model_name, host = nil)
137
+ models = ollama_available_models(host)
138
+ models.any? { |m| m['name'] == model_name }
139
+ end
140
+
141
+
142
+ private
143
+
144
+
145
+ # Retrieves models from the ORC client.
146
+ #
147
+ # @return [Array<Hash>] List of models.
148
+ #
149
+ def ollama_models
150
+ [] # Simply return an empty array since we're not using the actual Ollama gem
151
+ end
152
+
153
+
154
+ # Fetches the access token from environment variables.
155
+ #
156
+ # @return [String, nil] The access token or nil if not found.
157
+ #
158
+ def fetch_access_token
159
+ # Check if the key exists in the configuration
160
+ return nil unless class_config.envar_api_key_names &&
161
+ class_config.envar_api_key_names[:ollama]
162
+
163
+ # Now safely access the array
164
+ class_config.envar_api_key_names[:ollama]
165
+ .map { |key| ENV[key] }
166
+ .compact
167
+ .first
168
+ end
169
+
170
+ # Configures the Ollama client with the access token.
171
+ #
172
+ # @param access_token [String] The access token to configure.
173
+ # @return [void]
174
+ #
175
+ def configure_ollama(access_token)
176
+ # No-op since we're not using the actual Ollama gem
177
+ end
178
+
179
+ # Initializes the ORC client instance.
180
+ #
181
+ # @return [OmniAI::Ollama::Client] Instance of the Ollama client.
182
+ def initialize_ollama_client
183
+ # Return a dummy object that won't raise errors
184
+ Object.new
185
+ end
186
+ end
187
+ end
188
+
189
+ # Don't try to initialize the Ollama extensions at load time
190
+ # because we're not requiring the Ollama gem
191
+ # AiClient.add_ollama_extensions
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class AiClient
4
- VERSION = "0.4.3"
4
+ VERSION = "0.4.4"
5
5
 
6
6
  def version = VERSION
7
7
  def self.version = VERSION
@@ -0,0 +1,35 @@
1
+ # lib/ai_client/xai.rb
2
+
3
+ module AiClient
4
+ module XAI
5
+ class Client
6
+ BASE_URI = 'https://api.x.ai/v1' # Replace with actual xAI API endpoint
7
+
8
+ def initialize(api_key: ENV['XAI_API_KEY'])
9
+ @api_key = api_key
10
+ @connection = OmniAI::HTTP::Connection.new(BASE_URI)
11
+ end
12
+
13
+ def chat(prompt:, model: 'grok3', **options)
14
+ response = @connection.post(
15
+ '/chat/completions', # Adjust endpoint based on xAI API docs
16
+ headers: { 'Authorization' => "Bearer #{@api_key}", 'Content-Type' => 'application/json' },
17
+ body: {
18
+ model: model,
19
+ messages: [{ role: 'user', content: prompt }],
20
+ temperature: options[:temperature] || 0.7,
21
+ max_tokens: options[:max_tokens] || 1024
22
+ }.to_json
23
+ )
24
+ parse_response(response)
25
+ end
26
+
27
+ private
28
+
29
+ def parse_response(response)
30
+ json = JSON.parse(response.body)
31
+ json['choices'][0]['message']['content'] # Adjust based on actual response structure
32
+ end
33
+ end
34
+ end
35
+ end
data/lib/ai_client.rb CHANGED
@@ -41,6 +41,7 @@ require_relative 'ai_client/open_router_extensions'
41
41
  require_relative 'ai_client/llm' # SMELL: must come after the open router stuff
42
42
  require_relative 'ai_client/tool'
43
43
  require_relative 'ai_client/function'
44
+ require_relative 'ai_client/ollama_extensions' # Added ollama extensions
44
45
 
45
46
  # Create a generic client instance using only model name
46
47
  # client = AiClient.new('gpt-3.5-turbo')
@@ -288,16 +289,23 @@ class AiClient
288
289
  OmniAI::Mistral::Client.new(**client_options)
289
290
 
290
291
  when :ollama
291
- # SMELL: what if ollama is not running on localhost?
292
- OmniAI::OpenAI::Client.new(host: 'http://localhost:11434', api_key: nil, **client_options)
292
+ provider_config = @config.providers[:ollama] || {}
293
+ host = provider_config[:host] || 'http://localhost:11434'
294
+ OmniAI::OpenAI::Client.new(host: host, api_key: nil, **client_options)
293
295
 
294
296
  when :localai
295
- # SMELL: what if localai is not running on localhost?
296
- OmniAI::OpenAI::Client.new(host: 'http://localhost:8080', api_key: nil, **client_options)
297
+ provider_config = @config.providers[:localai] || {}
298
+ host = provider_config[:host] || 'http://localhost:8080'
299
+ OmniAI::OpenAI::Client.new(host: host, api_key: nil, **client_options)
297
300
 
298
301
  when :open_router
299
302
  OmniAI::OpenAI::Client.new(host: 'https://openrouter.ai', api_prefix: 'api', **client_options)
300
303
 
304
+ when :xai
305
+ # SMELL: may want to make this an
306
+ # extension to OmniAI
307
+ AiClient::XAI::Client.new
308
+
301
309
  else
302
310
  raise ArgumentError, "Unsupported provider: #{@provider}"
303
311
  end
@@ -324,11 +332,9 @@ class AiClient
324
332
  def determine_provider(model)
325
333
  return nil if model.nil? || model.empty?
326
334
 
327
- # SMELL: ollama has many open-source models. Its hard to keep
328
- # the patterns updated; hgwever, ollama has an API
329
- # to list the currently installed models.
330
- # http://localhost:11434/api/tags
331
- #
335
+ # Ollama has many open-source models.
336
+ # We can use the Ollama API to list the currently installed models.
337
+ # http://localhost:11434/api/tags
332
338
  config.provider_patterns.find { |provider, pattern| model.match?(pattern) }&.first ||
333
339
  raise(ArgumentError, "Unsupported model: #{model}")
334
340
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ai_client
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.3
4
+ version: 0.4.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-02-01 00:00:00.000000000 Z
10
+ date: 2025-03-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: active_hash
@@ -230,12 +230,14 @@ files:
230
230
  - lib/ai_client/logger_middleware.rb
231
231
  - lib/ai_client/middleware.rb
232
232
  - lib/ai_client/models.yml
233
+ - lib/ai_client/ollama_extensions.rb
233
234
  - lib/ai_client/open_router_extensions.rb
234
235
  - lib/ai_client/retry_middleware.rb
235
236
  - lib/ai_client/speak.rb
236
237
  - lib/ai_client/tool.rb
237
238
  - lib/ai_client/transcribe.rb
238
239
  - lib/ai_client/version.rb
240
+ - lib/ai_client/xai.rb
239
241
  - sig/ai_client.rbs
240
242
  - the_ollama_model_problem.md
241
243
  homepage: https://github.com/MadBomber/ai_client
@@ -260,7 +262,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
260
262
  - !ruby/object:Gem::Version
261
263
  version: '0'
262
264
  requirements: []
263
- rubygems_version: 3.6.3
265
+ rubygems_version: 3.6.5
264
266
  specification_version: 4
265
267
  summary: A generic AI Client for many providers
266
268
  test_files: []