ai_client 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b12bef21b23a46a47cc409042afd5eaae36d6cf1cdd2fae47f12fd0b958ed961
4
- data.tar.gz: 0c70c7f7e562da50876ee65c1f1de81f6ee7d37e97a2a7c114bb692ce5d6155d
3
+ metadata.gz: 107e9d57f175adc72178f48365a833a4dab7fbe71088169774429f43624d7d9c
4
+ data.tar.gz: d308008540c4fbf309235c31d60b1a98b14a608a0f69d7ee1bac6911f34dad47
5
5
  SHA512:
6
- metadata.gz: 56d14adae8ab29719083dc7bb63edf33fa625237274edef043b3738f39757fa0c15983232c9aab0a2afbf8db33cbcafc89ed0cb39d0180d045e3c2f6f467a5fe
7
- data.tar.gz: 5c3d314a10a6aa6f10a51b6f8eaf574dea6d108feb5b5ec64f47fb0e494cc1ebe865a7b2fbc9401aada23c692cae2346a48dbcf062e84a202e080f25f0790d9d
6
+ metadata.gz: b001c640758c846f2372354660d4a4f4c7ef4a6c4671bbd743fcc713d543faff1fe54ff2e341285ec57dbaf284db3e3a18a50cbd6b420fd01ac911112a93ae8a
7
+ data.tar.gz: 0055623e374bb4cf111a234712e9ac1441d6ddb541f2f7ebadc604c255e27bdd6ad14d1b2a81d56cb6e2797e01f3d3398eb4b4b3e6c7d04802dadf620ecb0390
data/.irbrc ADDED
@@ -0,0 +1,3 @@
1
+ # .irbrc
2
+
3
+ require_relative 'lib/ai_client'
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  ## [Unreleased]
2
2
 
3
- ## [0.1.0] - 2024-10-02
3
+ ## Released
4
+ ### [0.2.0] - 2024-10-04
5
+ - Configuration is more robust. Still room for improvement.
6
+
7
+ ### [0.1.0] - 2024-10-02
4
8
 
5
9
  - Initial working release
data/README.md CHANGED
@@ -41,6 +41,30 @@ c1 = AiClient.new('nomic-embeddings-text')
41
41
  c2 = AiClient.new('gpt-4o-mini')
42
42
  ```
43
43
 
44
+ ### Configuration
45
+
46
+ There is an internal hard-coded configuration default. That default is duppled into a class-level configuration which can be over-ridden with a class-level config block like this ...
47
+
48
+ ```ruby
49
+ AiClient.configure do |config|
50
+ config.some_item = some_value
51
+ end
52
+ ```
53
+
54
+ Every instance of the AiClient inherents the class-level configuration; however, the instance configuration can also be over-ridden also with a block like this ...
55
+
56
+ ```ruby
57
+ client = AiClient.new('super-ai-overlord-model') do |config|
58
+ config.some_item = some_value
59
+ end
60
+ ```
61
+
62
+ But wait, there's more. You can also load a YAML file as a configuration of an instance like this ...
63
+
64
+ ```ruby
65
+ client = AiClient.new('baby-model', config: 'path/to/file.yml')
66
+ ```
67
+
44
68
  ### What Now?
45
69
 
46
70
  TODO: Document the methods and their options.
@@ -1,84 +1,83 @@
1
1
  # ai_client/configuration.rb
2
+ #
3
+ # Design Objective:
4
+ # AiClient.configure do |config|
5
+ # # global config items that over-ride the defaults
6
+ # end
7
+ #
8
+ # client = AiClient.new(...) do
9
+ # # client specific config items that over-ride the global config
10
+ # end
2
11
 
12
+ require 'hashie'
3
13
  require 'logger'
4
14
 
5
15
  class AiClient
6
- # TODO: Need a centralized service where
7
- # metadata about LLMs are available
8
- # via and API call. Would hope that
9
- # the providers would add a "list"
10
- # endpoint to their API which would
11
- # return the metadata for all of their
12
- # models.
13
-
14
- PROVIDER_PATTERNS = {
15
- anthropic: /^claude/i,
16
- openai: /^(gpt|davinci|curie|babbage|ada|whisper|tts|dall-e)/i,
17
- google: /^(gemini|palm)/i,
18
- mistral: /^(mistral|codestral)/i,
19
- localai: /^local-/i,
20
- ollama: /(llama-|nomic)/i
21
- }
22
-
23
- MODEL_TYPES = {
24
- text_to_text: /^(nomic|gpt|davinci|curie|babbage|ada|claude|gemini|palm|command|generate|j2-|mistral|codestral)/i,
25
- speech_to_text: /^whisper/i,
26
- text_to_speech: /^tts/i,
27
- text_to_image: /^dall-e/i
28
- }
29
-
30
- class << self
16
+ # TODO: Use system environment varibles
17
+ # AI_CLIENT_CONFIG_FILE
18
+ #
19
+ # TODO: Config.load('path/to/some_file.yml')
20
+ # @@default_config (on require from lib/config.yml)
21
+ # @@config (if the envar exists ?? merge with default)
22
+ # @config ... done
31
23
 
32
- def configure
33
- yield(configuration)
34
- end
24
+ class Config < Hashie::Mash
25
+ include Hashie::Extensions::Mash::PermissiveRespondTo
26
+ include Hashie::Extensions::Mash::SymbolizeKeys
27
+ include Hashie::Extensions::Mash::DefineAccessors
28
+
35
29
 
36
- def configuration
37
- @configuration ||= Configuration.new
38
- end
30
+ # I'm not sure about this ...
31
+ # def provider(name, &block)
32
+ # if block_given?
33
+ # providers[name] = block.call
34
+ # else
35
+ # providers[name] || {}
36
+ # end
37
+ # end
39
38
 
40
39
  end
41
40
 
42
41
 
42
+ # Class variables to hold default and current config
43
+ @@default_config = Config.new(
44
+ logger: Logger.new(STDOUT),
45
+ timeout: nil,
46
+ return_raw: false,
47
+ providers: {},
48
+ provider_patterns: {
49
+ anthropic: /^claude/i,
50
+ openai: /^(gpt|davinci|curie|babbage|ada|whisper|tts|dall-e)/i,
51
+ google: /^(gemini|palm)/i,
52
+ mistral: /^(mistral|codestral)/i,
53
+ localai: /^local-/i,
54
+ ollama: /(llama|nomic)/i
55
+ },
56
+ model_types: {
57
+ text_to_text: /^(nomic|gpt|davinci|curie|babbage|ada|claude|gemini|palm|command|generate|j2-|mistral|codestral)/i,
58
+ speech_to_text: /^whisper/i,
59
+ text_to_speech: /^tts/i,
60
+ text_to_image: /^dall-e/i
61
+ }
62
+ )
43
63
 
64
+ @@class_config = @@default_config.dup
44
65
 
45
- # Usage example:
46
- # Configure general settings
47
- # AiClient.configure do |config|
48
- # config.logger = Logger.new('ai_client.log')
49
- # config.return_raw = true
50
- # end
51
- #
52
- # Configure provider-specific settings
53
- # AiClient.configure do |config|
54
- # config.configure_provider(:openai) do
55
- # {
56
- # organization: 'org-123',
57
- # api_version: 'v1'
58
- # }
59
- # end
60
- # end
61
- #
62
-
63
- class Configuration
64
- attr_accessor :logger, :timeout, :return_raw
65
- attr_reader :providers, :provider_patterns, :model_types
66
+ class << self
67
+ def configure(&block)
68
+ yield(class_config)
69
+ end
66
70
 
67
- def initialize
68
- @logger = Logger.new(STDOUT)
69
- @timeout = nil
70
- @return_raw = false
71
- @providers = {}
72
- @provider_patterns = AiClient::PROVIDER_PATTERNS.dup
73
- @model_types = AiClient::MODEL_TYPES.dup
71
+ def class_config
72
+ @@class_config
73
+ end
74
+
75
+ def class_config=(value)
76
+ @@class_config = value
74
77
  end
75
78
 
76
- def provider(name, &block)
77
- if block_given?
78
- @providers[name] = block
79
- else
80
- @providers[name]&.call || {}
81
- end
79
+ def default_config
80
+ @@default_config
82
81
  end
83
82
  end
84
- end
83
+ end
@@ -0,0 +1,37 @@
1
+ # lib/ai_client/middleware.rb
2
+
3
+ # TODO: As concurrently designed the middleware must
4
+ # be set before an instance of AiClient is created.
5
+ # Any `use` commands for middleware made after
6
+ # the instance is created will not be available
7
+ # to that instance.
8
+ # Change this so that middleware can be added
9
+ # and removed from an existing client.
10
+
11
+
12
+ class AiClient
13
+
14
+ def call_with_middlewares(method, *args, **kwargs, &block)
15
+ stack = self.class.middlewares.reverse.reduce(-> { send(method, *args, **kwargs, &block) }) do |next_middleware, middleware|
16
+ -> { middleware.call(self, next_middleware, *args, **kwargs) }
17
+ end
18
+ stack.call
19
+ end
20
+
21
+
22
+ class << self
23
+
24
+ def middlewares
25
+ @middlewares ||= []
26
+ end
27
+
28
+ def use(middleware)
29
+ middlewares << middleware
30
+ end
31
+
32
+ def clear_middlewares
33
+ @middlewares = []
34
+ end
35
+ end
36
+
37
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class AiClient
4
- VERSION = "0.1.0"
4
+ VERSION = "0.2.0"
5
5
  end
data/lib/ai_client.rb CHANGED
@@ -17,6 +17,7 @@ require_relative 'extensions/omniai-ollama'
17
17
  require_relative 'extensions/omniai-localai'
18
18
 
19
19
  require_relative 'ai_client/configuration'
20
+ require_relative 'ai_client/middleware'
20
21
  require_relative 'ai_client/version'
21
22
 
22
23
  # Create a generic client instance using only model name
@@ -26,25 +27,53 @@ require_relative 'ai_client/version'
26
27
  # AiClient.use(RetryMiddleware.new(max_retries: 5, base_delay: 2, max_delay: 30))
27
28
  # AiClient.use(LoggingMiddleware.new(AiClient.configuration.logger))
28
29
  #
29
- # TODO: As concurrently designed the middleware must
30
- # be set before an instance of AiClient is created.
31
- # Any `use` commands for middleware made after
32
- # the instance is created will not be available
33
- # to that instance.
34
- # Change this so that middleware can be added
35
- # and removed from an existing client.
30
+
36
31
 
37
32
  class AiClient
38
33
 
39
- attr_reader :client, :provider, :model, :model_type, :logger, :last_response, :config
34
+ attr_reader :client, # OmniAI's client instance
35
+ :provider, # [Symbol]
36
+ :model, # [String]
37
+ :model_type, # [Symbol]
38
+ :logger,
39
+ :last_response,
40
+ :timeout,
41
+ :config # Instance configuration
42
+
43
+ # You can over-ride the class config by providing a block like this
44
+ # c = AiClient.new(...) do |config|
45
+ # config.logger = nil
46
+ # end
47
+ #
48
+ # You can also load an instance's config from a YAML file.
49
+ # c = AiClient.new('model_name'. cpmfog: 'path/to/file.yml', ...)
50
+ #
51
+ # ... and you can do both = load from a file and
52
+ # over-ride with a config block
53
+ #
54
+ # The options object is basically those things that the
55
+ # OmniAI clients want to see.
56
+ #
57
+ def initialize(model, **options, &block)
58
+ # Assign the instance variable @config from the class variable @@config
59
+ @config = self.class.class_config.dup
60
+
61
+ # Yield the @config to a block if given
62
+ yield(@config) if block_given?
63
+
64
+ # Merge in an instance-specific YAML file
65
+ if options.has_key?(:config)
66
+ @config.merge! Config.load(options[:config])
67
+ options.delete(:config) # Lconfig not supported by OmniAI
68
+ end
69
+
70
+ @model = model
71
+ explicit_provider = options.fetch(:provider, config.provider)
40
72
 
41
- def initialize(model, config: Configuration.new, **options)
42
- @model = model
43
- @config = config
44
- @provider = validate_provider(options[:provider]) || determine_provider(model)
73
+ @provider = validate_provider(explicit_provider) || determine_provider(model)
45
74
  @model_type = determine_model_type(model)
46
75
 
47
- provider_config = @config.provider(@provider)
76
+ provider_config = @config.providers[@provider] || {}
48
77
 
49
78
  @logger = options[:logger] || @config.logger
50
79
  @timeout = options[:timeout] || @config.timeout
@@ -58,6 +87,7 @@ class AiClient
58
87
  end
59
88
 
60
89
 
90
+
61
91
  def response = last_response
62
92
  def raw? = config.return_raw
63
93
 
@@ -71,7 +101,6 @@ class AiClient
71
101
  def chat(messages, **params)
72
102
  result = call_with_middlewares(:chat_without_middlewares, messages, **params)
73
103
  @last_response = result
74
- # debug_me print " (raw: #{raw?}) "
75
104
  raw? ? result : content
76
105
  end
77
106
 
@@ -114,14 +143,6 @@ class AiClient
114
143
  ######################################
115
144
  ## Utilities
116
145
 
117
- def call_with_middlewares(method, *args, **kwargs, &block)
118
- stack = self.class.middlewares.reverse.reduce(-> { send(method, *args, **kwargs, &block) }) do |next_middleware, middleware|
119
- -> { middleware.call(self, next_middleware, *args, **kwargs) }
120
- end
121
- stack.call
122
- end
123
-
124
-
125
146
  def content
126
147
  case @provider
127
148
  when :openai, :localai, :ollama
@@ -141,21 +162,6 @@ class AiClient
141
162
  ##############################################
142
163
  ## Public Class Methods
143
164
 
144
- class << self
145
-
146
- def middlewares
147
- @middlewares ||= []
148
- end
149
-
150
- def use(middleware)
151
- middlewares << middleware
152
- end
153
-
154
- def clear_middlewares
155
- @middlewares = []
156
- end
157
- end
158
-
159
165
  def method_missing(method_name, *args, &block)
160
166
  if @client.respond_to?(method_name)
161
167
  result = @client.send(method_name, *args, &block)
metadata CHANGED
@@ -1,15 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ai_client
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-10-03 00:00:00.000000000 Z
11
+ date: 2024-10-05 00:00:00.000000000 Z
12
12
  dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: hashie
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
13
27
  - !ruby/object:Gem::Dependency
14
28
  name: omniai
15
29
  requirement: !ruby/object:Gem::Requirement
@@ -141,6 +155,7 @@ extensions: []
141
155
  extra_rdoc_files: []
142
156
  files:
143
157
  - ".envrc"
158
+ - ".irbrc"
144
159
  - CHANGELOG.md
145
160
  - LICENSE
146
161
  - README.md
@@ -154,6 +169,7 @@ files:
154
169
  - lib/ai_client.rb
155
170
  - lib/ai_client/configuration.rb
156
171
  - lib/ai_client/logger_middleware.rb
172
+ - lib/ai_client/middleware.rb
157
173
  - lib/ai_client/retry_middleware.rb
158
174
  - lib/ai_client/version.rb
159
175
  - lib/extensions/omniai-localai.rb