omniai 1.2.3 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/README.md +76 -1
- data/lib/omniai/client.rb +26 -6
- data/lib/omniai/config.rb +49 -2
- data/lib/omniai/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 341ce12cf0950b167a43c298f3ccb1df821bbdc469bc6824b14219bef4e38ab5
|
4
|
+
data.tar.gz: 9badfe7e48d20fe84054d9206e8572a333f382407c3236a3715e1d85cf6b6a84
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f81164e402f5e1e4b2ccc2e627675d42631cfda985bf91469eb32735a789eecbe69c7f10bd9d2d5b4959f2d0f2484ae1bf7ced58f607a7cae822846c831ce96c
|
7
|
+
data.tar.gz: '0182828dad6fb6f6d98550608c3bd57ee980ff4acbcc6dfb3ee7d61a16200566f1c8965424537583d0b73225298ca5eba6ddafd3c51791ca847f53264a7654fd'
|
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -2,13 +2,15 @@
|
|
2
2
|
|
3
3
|
[](https://circleci.com/gh/ksylvest/omniai)
|
4
4
|
|
5
|
-
OmniAI is a flexible AI library that standardizes the APIs
|
5
|
+
OmniAI is a flexible AI library that standardizes the APIs of many different AIs:
|
6
6
|
|
7
7
|
- [OmniAI::Anthropic](https://github.com/ksylvest/omniai-anthropic)
|
8
8
|
- [OmniAI::Google](https://github.com/ksylvest/omniai-google)
|
9
9
|
- [OmniAI::Mistral](https://github.com/ksylvest/omniai-mistral)
|
10
10
|
- [OmniAI::OpenAI](https://github.com/ksylvest/omniai-openai)
|
11
11
|
|
12
|
+
All libraries are community maintained.
|
13
|
+
|
12
14
|
## Installation
|
13
15
|
|
14
16
|
```sh
|
@@ -72,6 +74,79 @@ Ollama support is offered through [OmniAI::OpenAI](https://github.com/ksylvest/o
|
|
72
74
|
|
73
75
|
[Usage with Ollama](https://github.com/ksylvest/omniai-openai#usage-with-ollama)
|
74
76
|
|
77
|
+
#### Logging
|
78
|
+
|
79
|
+
Logging the **request** / **response** is configurable by passing a logger into any client:
|
80
|
+
|
81
|
+
```ruby
|
82
|
+
require 'omniai/openai'
|
83
|
+
require 'logger'
|
84
|
+
|
85
|
+
logger = Logger.new(STDOUT)
|
86
|
+
client = OmniAI::Example::Client.new(logger:)
|
87
|
+
```
|
88
|
+
|
89
|
+
```
|
90
|
+
I, [...] INFO -- : > POST https://...
|
91
|
+
D, [...] DEBUG -- : Authorization: Bearer ...
|
92
|
+
...
|
93
|
+
{"messages":[{"role":"user","content":"Tell me a joke!"}],"model":"..."}
|
94
|
+
I, [...] INFO -- : < 200 OK
|
95
|
+
D, [...] DEBUG -- : Date: ...
|
96
|
+
...
|
97
|
+
{
|
98
|
+
"id": "...",
|
99
|
+
"object": "...",
|
100
|
+
...
|
101
|
+
}
|
102
|
+
```
|
103
|
+
|
104
|
+
The level of the logger can be configured to either `INFO` and `DEBUG`:
|
105
|
+
|
106
|
+
**INFO**:
|
107
|
+
|
108
|
+
```ruby
|
109
|
+
logger.level = Logger::INFO
|
110
|
+
```
|
111
|
+
|
112
|
+
- Request: verb / URI
|
113
|
+
- Response: status
|
114
|
+
|
115
|
+
**DEBUG**:
|
116
|
+
|
117
|
+
```ruby
|
118
|
+
logger.level = Logger::DEBUG
|
119
|
+
```
|
120
|
+
|
121
|
+
- Request: verb / URI / headers / body
|
122
|
+
- Response: status / headers / body
|
123
|
+
|
124
|
+
#### Timeouts
|
125
|
+
|
126
|
+
Timeouts are configurable by passing a `timeout` an integer duration for the request / response of any APIs using:
|
127
|
+
|
128
|
+
```ruby
|
129
|
+
require 'omniai/openai'
|
130
|
+
require 'logger'
|
131
|
+
|
132
|
+
logger = Logger.new(STDOUT)
|
133
|
+
client = OmniAI::OpenAI::Client.new(timeout: 8) # i.e. 8 seconds
|
134
|
+
```
|
135
|
+
|
136
|
+
Timeouts are also be configurable by passing a `timeout` hash with `timeout` / `read` / `write` / `keys using:
|
137
|
+
|
138
|
+
```ruby
|
139
|
+
require 'omniai/openai'
|
140
|
+
require 'logger'
|
141
|
+
|
142
|
+
logger = Logger.new(STDOUT)
|
143
|
+
client = OmniAI::OpenAI::Client.new(timeout: {
|
144
|
+
read: 2, # i.e. 2 seconds
|
145
|
+
write: 3, # i.e. 3 seconds
|
146
|
+
connect: 4, # i.e. 4 seconds
|
147
|
+
})
|
148
|
+
```
|
149
|
+
|
75
150
|
### Chat
|
76
151
|
|
77
152
|
Clients that support chat (e.g. Anthropic w/ "Claude", Google w/ "Gemini", Mistral w/ "LeChat", OpenAI w/ "ChatGPT", etc) generate completions using the following calls:
|
data/lib/omniai/client.rb
CHANGED
@@ -9,19 +9,36 @@ module OmniAI
|
|
9
9
|
# def initialize(api_key: ENV.fetch('OPENAI_API_KEY'), logger: nil)
|
10
10
|
# super
|
11
11
|
# end
|
12
|
+
#
|
13
|
+
# # @return [HTTP::Client]
|
14
|
+
# def connection
|
15
|
+
# @connection ||= super.auth("Bearer: #{@api_key}")
|
16
|
+
# end
|
12
17
|
# end
|
13
18
|
class Client
|
14
19
|
class Error < StandardError; end
|
15
20
|
|
16
|
-
|
21
|
+
# @return [String, nil]
|
22
|
+
attr_accessor :api_key
|
23
|
+
|
24
|
+
# @return [Logger, nil]
|
25
|
+
attr_accessor :logger
|
26
|
+
|
27
|
+
# @return [String, nil]
|
28
|
+
attr_accessor :host
|
29
|
+
|
30
|
+
# @return [Integer, nil]
|
31
|
+
attr_accessor :timeout
|
17
32
|
|
18
|
-
# @param api_key [String] optional
|
19
|
-
# @param host [String] optional - supports for customzing the host of the client (e.g. 'http://localhost:8080')
|
20
|
-
# @param logger [Logger] optional
|
21
|
-
|
33
|
+
# @param api_key [String, nil] optional
|
34
|
+
# @param host [String, nil] optional - supports for customzing the host of the client (e.g. 'http://localhost:8080')
|
35
|
+
# @param logger [Logger, nil] optional
|
36
|
+
# @param timeout [Integer, nil] optional
|
37
|
+
def initialize(api_key: nil, logger: nil, host: nil, timeout: nil)
|
22
38
|
@api_key = api_key
|
23
39
|
@host = host
|
24
40
|
@logger = logger
|
41
|
+
@timeout = timeout
|
25
42
|
end
|
26
43
|
|
27
44
|
# @return [String]
|
@@ -39,7 +56,10 @@ module OmniAI
|
|
39
56
|
|
40
57
|
# @return [HTTP::Client]
|
41
58
|
def connection
|
42
|
-
|
59
|
+
http = HTTP.persistent(@host)
|
60
|
+
http = http.use(logging: { logger: @logger }) if @logger
|
61
|
+
http = http.timeout(@timeout) if @timeout
|
62
|
+
http
|
43
63
|
end
|
44
64
|
|
45
65
|
# @raise [OmniAI::Error]
|
data/lib/omniai/config.rb
CHANGED
@@ -1,9 +1,56 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module OmniAI
|
4
|
-
# A configuration for each agent w/ `api_key` / `host` / `logger`.
|
4
|
+
# A configuration for each agent w/ `api_key` / `host` / `logger`. Usage:
|
5
|
+
#
|
6
|
+
# OmniAI::OpenAI.config do |config|
|
7
|
+
# config.api_key = '...'
|
8
|
+
# config.host = 'http://localhost:8080'
|
9
|
+
# config.logger = Logger.new(STDOUT)
|
10
|
+
# config.timeout = 15
|
11
|
+
# config.chat_options = { ... }
|
12
|
+
# config.transcribe_options = { ... }
|
13
|
+
# config.speak_options = { ... }
|
14
|
+
# end
|
5
15
|
class Config
|
6
|
-
|
16
|
+
# @return [String, nil]
|
17
|
+
attr_accessor :api_key
|
18
|
+
|
19
|
+
# @return [String, nil]
|
20
|
+
attr_accessor :host
|
21
|
+
|
22
|
+
# @return [Logger, nil]
|
23
|
+
attr_accessor :logger
|
24
|
+
|
25
|
+
# @return [Integer, Hash{Symbol => Integer}, nil]
|
26
|
+
# @option timeout [Integer] :read
|
27
|
+
# @option timeout [Integer] :write
|
28
|
+
# @option timeout [Integer] :connect
|
29
|
+
attr_accessor :timeout
|
30
|
+
|
31
|
+
# @return [Hash]
|
32
|
+
attr_accessor :chat_options
|
33
|
+
|
34
|
+
# @return [Hash]
|
35
|
+
attr_accessor :transcribe_options
|
36
|
+
|
37
|
+
# @return [Hash]
|
38
|
+
attr_accessor :speak_options
|
39
|
+
|
40
|
+
# @param api_key [String] optional
|
41
|
+
# @param host [String] optional
|
42
|
+
# @param logger [Logger] optional
|
43
|
+
# @param timeout [Integer] optional
|
44
|
+
def initialize(api_key: nil, host: nil, logger: nil, timeout: nil)
|
45
|
+
@api_key = api_key
|
46
|
+
@host = host
|
47
|
+
@logger = logger
|
48
|
+
@timeout = timeout
|
49
|
+
|
50
|
+
@chat_options = {}
|
51
|
+
@transcribe_options = {}
|
52
|
+
@speak_options = {}
|
53
|
+
end
|
7
54
|
|
8
55
|
# @return [String]
|
9
56
|
def inspect
|
data/lib/omniai/version.rb
CHANGED