ai_client 0.2.3 → 0.2.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +11 -0
- data/README.md +1 -2
- data/examples/README.md +1 -0
- data/examples/tools.rb +90 -0
- data/lib/ai_client/config.yml +1 -1
- data/lib/ai_client/configuration.rb +31 -2
- data/lib/ai_client/llm.rb +25 -13
- data/lib/ai_client/logger_middleware.rb +13 -0
- data/lib/ai_client/middleware.rb +25 -2
- data/lib/ai_client/models.yml +4839 -0
- data/lib/ai_client/open_router_extensions.rb +98 -10
- data/lib/ai_client/retry_middleware.rb +15 -0
- data/lib/ai_client/tool.rb +18 -0
- data/lib/ai_client/version.rb +1 -1
- data/lib/ai_client.rb +68 -3
- metadata +7 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f2b330f59ff7f380306ec354ce5f7197adce8dd101a4b506ab137568f098242a
|
4
|
+
data.tar.gz: f169296b9f20479b1c608f2202d9ab8a8eb416aa80b07f4308ec4648b9c86fcc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ce0e2cddd8ec6935a5bea24125358fbcc8fda902340d7767972142505af41eaeef7d7f6f5791bd7d26469fd45e59020237fd0061e75d6a987d8a4181597cdbe9
|
7
|
+
data.tar.gz: 4b2513fba82802eda13cf26c1af0e73f0903a2140765401ed686540d80480d887f2152e342a5fe3546c89482320f61cc6bd04b4e41b8851668bbda31b2496fc9
|
data/CHANGELOG.md
CHANGED
@@ -1,6 +1,17 @@
|
|
1
1
|
## [Unreleased]
|
2
2
|
|
3
3
|
## Released
|
4
|
+
|
5
|
+
### [0.2.5] - 2024-10-11
|
6
|
+
- Added examples/tool.rb to demonstrate use of function callbacks to provide information to the LLM when it needs it.
|
7
|
+
|
8
|
+
### [0.2.4] - 2024-10-10
|
9
|
+
- constrained gem omniai-openai to version 1.8.3+ for access to open_router.ai
|
10
|
+
- caching models database from open_router.ai
|
11
|
+
- added class methods reset_default_config and reset_llm_data
|
12
|
+
- support for open_router.ai should be fully integrated now that omniai-openai is at version 1.8.3
|
13
|
+
|
14
|
+
|
4
15
|
### [0.2.3] - 2024-10-08
|
5
16
|
- refactored the OmniAI extensions for Ollama, LocalAI and OpenRouter
|
6
17
|
- added a file for OpenRouter extensions
|
data/README.md
CHANGED
@@ -284,8 +284,7 @@ One of the latest innovations in LLMs is the ability to use functions (aka tools
|
|
284
284
|
|
285
285
|
See [blog post](https://ksylvest.com/posts/2024-08-16/using-omniai-to-leverage-tools-with-llms) by Kevin Sylvestre, author of the OmniAI gem.
|
286
286
|
|
287
|
-
|
288
|
-
TODO: Need to create an example RAG that does not need another access token to a service
|
287
|
+
Take a look at the [examples/tools.rb](examples/tools.rb) file to see different ways in which these callable processes can be defined.
|
289
288
|
|
290
289
|
|
291
290
|
## Best ?? Practices
|
data/examples/README.md
CHANGED
@@ -7,6 +7,7 @@
|
|
7
7
|
| embed.rb | Demonstrates using Ollama locally to vectorize text for embeddings|
|
8
8
|
| speak.rb | Demonstrates using OpenAI's text to speech models |
|
9
9
|
| text.rb | Demonstrates text-to-text transformers "chat" |
|
10
|
+
| tools.rb | Demonstrates usage of functional callbacks (i.e. tools) |
|
10
11
|
| transcribe.rb | Uses OpenAI's audio-to-text model |
|
11
12
|
|
12
13
|
Many of these example programs show both the raw response object as well as just the
|
data/examples/tools.rb
ADDED
@@ -0,0 +1,90 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# examples/tools.rb
|
3
|
+
# See: https://ksylvest.com/posts/2024-08-16/using-omniai-to-leverage-tools-with-llms
|
4
|
+
|
5
|
+
require_relative 'common'
|
6
|
+
|
7
|
+
AI = AiClient.new('gpt-4o')
|
8
|
+
|
9
|
+
box "omniai-openai's random temp example"
|
10
|
+
|
11
|
+
my_weather_function = Proc.new do |location:, unit: 'Celsius'|
|
12
|
+
"#{rand(20..50)}° #{unit} in #{location}"
|
13
|
+
end
|
14
|
+
|
15
|
+
weather = AiClient::Tool.new(
|
16
|
+
my_weather_function,
|
17
|
+
name: 'weather',
|
18
|
+
description: 'Lookup the weather in a location',
|
19
|
+
parameters: AiClient::Tool::Parameters.new(
|
20
|
+
properties: {
|
21
|
+
location: AiClient::Tool::Property.string(description: 'e.g. Toronto'),
|
22
|
+
unit: AiClient::Tool::Property.string(enum: %w[Celsius Fahrenheit]),
|
23
|
+
},
|
24
|
+
required: %i[location]
|
25
|
+
)
|
26
|
+
)
|
27
|
+
|
28
|
+
simple_prompt = <<~TEXT
|
29
|
+
What is the weather in "London" in Celsius and "Paris" in Fahrenheit?
|
30
|
+
Also what are some ideas for activities in both cities given the weather?
|
31
|
+
TEXT
|
32
|
+
|
33
|
+
response = AI.chat(simple_prompt, tools: [weather])
|
34
|
+
puts response
|
35
|
+
|
36
|
+
##########################################
|
37
|
+
box "Accessing a database to get information"
|
38
|
+
|
39
|
+
llm_db_function = Proc.new do |params|
|
40
|
+
records = AiClient::LLM.where(id: /#{params[:model_name]}/i)
|
41
|
+
records.inspect
|
42
|
+
end
|
43
|
+
|
44
|
+
|
45
|
+
llm_db = AiClient::Tool.new(
|
46
|
+
llm_db_function,
|
47
|
+
name: 'llm_db',
|
48
|
+
description: 'lookup details about an LLM model name',
|
49
|
+
parameters: AiClient::Tool::Parameters.new(
|
50
|
+
properties: {
|
51
|
+
model_name: AiClient::Tool::Property.string
|
52
|
+
},
|
53
|
+
required: %i[model_name]
|
54
|
+
)
|
55
|
+
)
|
56
|
+
|
57
|
+
response = AI.chat("Get details on an LLM model named bison. Which one is the cheapest per prompt token.", tools: [llm_db])
|
58
|
+
puts response
|
59
|
+
|
60
|
+
##########################################
|
61
|
+
|
62
|
+
# TODO: Look at creating a better function
|
63
|
+
# process such that the tools parameter
|
64
|
+
# is an Array of Symbols which is
|
65
|
+
# maintained as a class variable.
|
66
|
+
# The symboles are looked up and the
|
67
|
+
# proper instance is inserted in its
|
68
|
+
# place.
|
69
|
+
|
70
|
+
box "Using a function class and multiple tools"
|
71
|
+
|
72
|
+
class FunctionClass
|
73
|
+
def self.call
|
74
|
+
"April 25th its not to hot nor too cold."
|
75
|
+
end
|
76
|
+
|
77
|
+
def function(my_name)
|
78
|
+
AiClient::Tool.new(
|
79
|
+
self.class, # with a self.call method
|
80
|
+
name: my_name,
|
81
|
+
description: 'what is the perfect date'
|
82
|
+
)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
perfect_date = FunctionClass.new.function('perfect_date')
|
87
|
+
|
88
|
+
response = AI.chat("what is the perfect date for paris weather?", tools: [weather, perfect_date])
|
89
|
+
puts response
|
90
|
+
puts
|
data/lib/ai_client/config.yml
CHANGED
@@ -92,7 +92,11 @@ class AiClient
|
|
92
92
|
include Hashie::Extensions::Mash::SymbolizeKeys
|
93
93
|
include Hashie::Extensions::Mash::DefineAccessors
|
94
94
|
|
95
|
-
|
95
|
+
# Saves the current configuration to the specified file.
|
96
|
+
#
|
97
|
+
# @param filepath [String] The path to the file where the configuration will be saved.
|
98
|
+
# Defaults to '~/aiclient_config.yml' if not provided.
|
99
|
+
#
|
96
100
|
def save(filepath=ENV['HOME']+'/aiclient_config.yml')
|
97
101
|
filepath = Pathname.new(filepath) unless filepath.is_a? Pathname
|
98
102
|
|
@@ -101,6 +105,13 @@ class AiClient
|
|
101
105
|
|
102
106
|
|
103
107
|
class << self
|
108
|
+
# Loads configuration from the specified YAML file.
|
109
|
+
#
|
110
|
+
# @param filepath [String] The path to the configuration file.
|
111
|
+
# Defaults to 'config.yml' if not provided.
|
112
|
+
# @return [AiClient::Config] The loaded configuration.
|
113
|
+
# @raise [ArgumentError] If the specified file does not exist.
|
114
|
+
#
|
104
115
|
def load(filepath=DEFAULT_CONFIG_FILEPATH)
|
105
116
|
filepath = Pathname.new(filepath) unless Pathname == filepath.class
|
106
117
|
if filepath.exist?
|
@@ -115,12 +126,30 @@ class AiClient
|
|
115
126
|
class << self
|
116
127
|
attr_accessor :class_config, :default_config
|
117
128
|
|
129
|
+
# Configures the AiClient with a given block.
|
130
|
+
#
|
131
|
+
# @yieldparam config [AiClient::Config] The configuration instance.
|
132
|
+
# @return [void]
|
133
|
+
#
|
118
134
|
def configure(&block)
|
119
135
|
yield(class_config)
|
120
136
|
end
|
121
137
|
|
122
|
-
|
138
|
+
# Resets the default configuration to the value defined in the class.
|
139
|
+
#
|
140
|
+
# @return [void]
|
141
|
+
#
|
142
|
+
def reset_default_config
|
143
|
+
initialize_defaults
|
144
|
+
.save(Config::DEFAULT_CONFIG_FILEPATH)
|
145
|
+
end
|
123
146
|
|
147
|
+
private
|
148
|
+
|
149
|
+
# Initializes the default configuration.
|
150
|
+
#
|
151
|
+
# @return [void]
|
152
|
+
#
|
124
153
|
def initialize_defaults
|
125
154
|
@default_config = Config.new(
|
126
155
|
logger: Logger.new(STDOUT),
|
data/lib/ai_client/llm.rb
CHANGED
@@ -1,26 +1,38 @@
|
|
1
1
|
# lib/ai_client/llm.rb
|
2
2
|
|
3
3
|
require 'active_hash'
|
4
|
+
require 'yaml'
|
4
5
|
|
5
|
-
class AiClient
|
6
|
-
|
7
|
-
# TODO: Think about this for the OpenRouter modesl DB
|
8
|
-
# Might cahnge this to ActiveYaml
|
9
6
|
|
7
|
+
class AiClient
|
10
8
|
class LLM < ActiveHash::Base
|
11
|
-
|
9
|
+
DATA_PATH = Pathname.new( __dir__ + '/models.yml')
|
10
|
+
self.data = YAML.parse(DATA_PATH.read).to_ruby
|
12
11
|
|
12
|
+
# Extracts the model name from the LLM ID.
|
13
|
+
#
|
14
|
+
# @return [String] the model name.
|
15
|
+
#
|
13
16
|
def model = id.split('/')[1]
|
14
|
-
def provider = id.split('/')[0]
|
15
17
|
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
18
|
+
# Extracts the provider name from the LLM ID.
|
19
|
+
#
|
20
|
+
# @return [String] the provider name.
|
21
|
+
#
|
22
|
+
def provider = id.split('/')[0]
|
23
|
+
end
|
20
24
|
|
21
|
-
|
22
|
-
|
23
|
-
|
25
|
+
class << self
|
26
|
+
|
27
|
+
# Resets the LLM data by fetching models from the Orc client
|
28
|
+
# and writing it to the models.yml file.
|
29
|
+
#
|
30
|
+
# @return [void]
|
31
|
+
#
|
32
|
+
def reset_llm_data
|
33
|
+
orc_models = AiClient.orc_client.models
|
34
|
+
AiClient::LLM.data = orc_models
|
35
|
+
AiClient::LLM::DATA_PATH.write(orc_models.to_yaml)
|
24
36
|
end
|
25
37
|
end
|
26
38
|
end
|
@@ -16,10 +16,23 @@ class AiClient
|
|
16
16
|
# )
|
17
17
|
|
18
18
|
class LoggingMiddleware
|
19
|
+
|
20
|
+
# Initializes the LoggingMiddleware with a logger.
|
21
|
+
#
|
22
|
+
# @param logger [Logger] The logger used for logging middleware actions.
|
23
|
+
#
|
19
24
|
def initialize(logger)
|
20
25
|
@logger = logger
|
21
26
|
end
|
22
27
|
|
28
|
+
# Calls the next middleware in the stack while logging the start and finish times.
|
29
|
+
#
|
30
|
+
# @param client [Object] The client instance.
|
31
|
+
# @param next_middleware [Proc] The next middleware to call.
|
32
|
+
# @param args [Array] The arguments passed to the middleware call, with the first being the method name.
|
33
|
+
#
|
34
|
+
# @return [Object] The result of the next middleware call.
|
35
|
+
#
|
23
36
|
def call(client, next_middleware, *args)
|
24
37
|
method_name = args.first.is_a?(Symbol) ? args.first : 'unknown method'
|
25
38
|
@logger.info("Starting #{method_name} call")
|
data/lib/ai_client/middleware.rb
CHANGED
@@ -8,9 +8,19 @@
|
|
8
8
|
# Change this so that middleware can be added
|
9
9
|
# and removed from an existing client.
|
10
10
|
|
11
|
-
|
11
|
+
# AiClient class that handles middleware functionality
|
12
|
+
# for API calls.
|
12
13
|
class AiClient
|
13
14
|
|
15
|
+
# Calls the specified method with middlewares applied.
|
16
|
+
#
|
17
|
+
# @param method [Symbol] the name of the method to be called
|
18
|
+
# @param args [Array] additional arguments for the method
|
19
|
+
# @param kwargs [Hash] named parameters for the method
|
20
|
+
# @param block [Proc] optional block to be passed to the method
|
21
|
+
#
|
22
|
+
# @return [Object] result of the method call after applying middlewares
|
23
|
+
#
|
14
24
|
def call_with_middlewares(method, *args, **kwargs, &block)
|
15
25
|
stack = self.class.middlewares.reverse.reduce(-> { send(method, *args, **kwargs, &block) }) do |next_middleware, middleware|
|
16
26
|
-> { middleware.call(self, next_middleware, *args, **kwargs) }
|
@@ -21,17 +31,30 @@ class AiClient
|
|
21
31
|
|
22
32
|
class << self
|
23
33
|
|
34
|
+
# Returns the list of middlewares applied to the client.
|
35
|
+
#
|
36
|
+
# @return [Array] list of middlewares
|
37
|
+
#
|
24
38
|
def middlewares
|
25
39
|
@middlewares ||= []
|
26
40
|
end
|
27
41
|
|
42
|
+
# Adds a middleware to the stack.
|
43
|
+
#
|
44
|
+
# @param middleware [Proc] the middleware to be added
|
45
|
+
#
|
46
|
+
# @return [void]
|
47
|
+
#
|
28
48
|
def use(middleware)
|
29
49
|
middlewares << middleware
|
30
50
|
end
|
31
51
|
|
52
|
+
# Clears all middlewares from the client.
|
53
|
+
#
|
54
|
+
# @return [void]
|
55
|
+
#
|
32
56
|
def clear_middlewares
|
33
57
|
@middlewares = []
|
34
58
|
end
|
35
59
|
end
|
36
|
-
|
37
60
|
end
|