omniai 1.2.3 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/README.md +47 -1
- data/lib/omniai/chat.rb +1 -0
- data/lib/omniai/client.rb +26 -6
- data/lib/omniai/config.rb +49 -2
- data/lib/omniai/instrumentation.rb +37 -0
- data/lib/omniai/speak.rb +1 -0
- data/lib/omniai/transcribe.rb +1 -0
- data/lib/omniai/version.rb +1 -1
- metadata +2 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 28e6e2a736d78cbc0135308eb28320a85a319b78a888cd96fa3320456c3c0222
|
4
|
+
data.tar.gz: '044786d21ba8f2316599836af153c6dec80d055a0cbcf1e843c000296d1df350'
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c260f1b52fe5ffec70dc5c6755426c9798062bb234da303d76ec21319d450433e95f03ca787fcfa8cd966899235416d5a83580ffd11c24615d8a58df8d64efbc
|
7
|
+
data.tar.gz: 0acc9d0556d0bf8de430ca3a52977c88c6a43edfe017226d1831b4ccc034d2ba3bcde38fad112d61d8910bfc9a9ee440999354919a8d4c3cb614a0d0fc3e2314
|
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -2,13 +2,15 @@
|
|
2
2
|
|
3
3
|
[](https://circleci.com/gh/ksylvest/omniai)
|
4
4
|
|
5
|
-
OmniAI is a flexible AI library that standardizes the APIs
|
5
|
+
OmniAI is a flexible AI library that standardizes the APIs of many different AIs:
|
6
6
|
|
7
7
|
- [OmniAI::Anthropic](https://github.com/ksylvest/omniai-anthropic)
|
8
8
|
- [OmniAI::Google](https://github.com/ksylvest/omniai-google)
|
9
9
|
- [OmniAI::Mistral](https://github.com/ksylvest/omniai-mistral)
|
10
10
|
- [OmniAI::OpenAI](https://github.com/ksylvest/omniai-openai)
|
11
11
|
|
12
|
+
All libraries are community maintained.
|
13
|
+
|
12
14
|
## Installation
|
13
15
|
|
14
16
|
```sh
|
@@ -72,6 +74,50 @@ Ollama support is offered through [OmniAI::OpenAI](https://github.com/ksylvest/o
|
|
72
74
|
|
73
75
|
[Usage with Ollama](https://github.com/ksylvest/omniai-openai#usage-with-ollama)
|
74
76
|
|
77
|
+
#### Logging
|
78
|
+
|
79
|
+
Logging the **request** / **response** is configurable by passing a logger into any client:
|
80
|
+
|
81
|
+
```ruby
|
82
|
+
require 'omniai/openai'
|
83
|
+
require 'logger'
|
84
|
+
|
85
|
+
logger = Logger.new(STDOUT)
|
86
|
+
client = OmniAI::Example::Client.new(logger:)
|
87
|
+
```
|
88
|
+
|
89
|
+
```
|
90
|
+
[INFO]: POST https://...
|
91
|
+
[INFO]: 200 OK
|
92
|
+
...
|
93
|
+
```
|
94
|
+
|
95
|
+
#### Timeouts
|
96
|
+
|
97
|
+
Timeouts are configurable by passing a `timeout` an integer duration for the request / response of any APIs using:
|
98
|
+
|
99
|
+
```ruby
|
100
|
+
require 'omniai/openai'
|
101
|
+
require 'logger'
|
102
|
+
|
103
|
+
logger = Logger.new(STDOUT)
|
104
|
+
client = OmniAI::OpenAI::Client.new(timeout: 8) # i.e. 8 seconds
|
105
|
+
```
|
106
|
+
|
107
|
+
Timeouts are also be configurable by passing a `timeout` hash with `timeout` / `read` / `write` / `keys using:
|
108
|
+
|
109
|
+
```ruby
|
110
|
+
require 'omniai/openai'
|
111
|
+
require 'logger'
|
112
|
+
|
113
|
+
logger = Logger.new(STDOUT)
|
114
|
+
client = OmniAI::OpenAI::Client.new(timeout: {
|
115
|
+
read: 2, # i.e. 2 seconds
|
116
|
+
write: 3, # i.e. 3 seconds
|
117
|
+
connect: 4, # i.e. 4 seconds
|
118
|
+
})
|
119
|
+
```
|
120
|
+
|
75
121
|
### Chat
|
76
122
|
|
77
123
|
Clients that support chat (e.g. Anthropic w/ "Claude", Google w/ "Gemini", Mistral w/ "LeChat", OpenAI w/ "ChatGPT", etc) generate completions using the following calls:
|
data/lib/omniai/chat.rb
CHANGED
data/lib/omniai/client.rb
CHANGED
@@ -9,19 +9,36 @@ module OmniAI
|
|
9
9
|
# def initialize(api_key: ENV.fetch('OPENAI_API_KEY'), logger: nil)
|
10
10
|
# super
|
11
11
|
# end
|
12
|
+
#
|
13
|
+
# # @return [HTTP::Client]
|
14
|
+
# def connection
|
15
|
+
# @connection ||= super.auth("Bearer: #{@api_key}")
|
16
|
+
# end
|
12
17
|
# end
|
13
18
|
class Client
|
14
19
|
class Error < StandardError; end
|
15
20
|
|
16
|
-
|
21
|
+
# @return [String, nil]
|
22
|
+
attr_accessor :api_key
|
23
|
+
|
24
|
+
# @return [Logger, nil]
|
25
|
+
attr_accessor :logger
|
26
|
+
|
27
|
+
# @return [String, nil]
|
28
|
+
attr_accessor :host
|
29
|
+
|
30
|
+
# @return [Integer, nil]
|
31
|
+
attr_accessor :timeout
|
17
32
|
|
18
|
-
# @param api_key [String] optional
|
19
|
-
# @param host [String] optional - supports for customzing the host of the client (e.g. 'http://localhost:8080')
|
20
|
-
# @param logger [Logger] optional
|
21
|
-
|
33
|
+
# @param api_key [String, nil] optional
|
34
|
+
# @param host [String, nil] optional - supports for customzing the host of the client (e.g. 'http://localhost:8080')
|
35
|
+
# @param logger [Logger, nil] optional
|
36
|
+
# @param timeout [Integer, nil] optional
|
37
|
+
def initialize(api_key: nil, logger: nil, host: nil, timeout: nil)
|
22
38
|
@api_key = api_key
|
23
39
|
@host = host
|
24
40
|
@logger = logger
|
41
|
+
@timeout = timeout
|
25
42
|
end
|
26
43
|
|
27
44
|
# @return [String]
|
@@ -39,7 +56,10 @@ module OmniAI
|
|
39
56
|
|
40
57
|
# @return [HTTP::Client]
|
41
58
|
def connection
|
42
|
-
|
59
|
+
http = HTTP.persistent(@host)
|
60
|
+
http = http.use(instrumentation: { instrumenter: Instrumentation.new(logger: @logger) }) if @logger
|
61
|
+
http = http.timeout(@timeout) if @timeout
|
62
|
+
http
|
43
63
|
end
|
44
64
|
|
45
65
|
# @raise [OmniAI::Error]
|
data/lib/omniai/config.rb
CHANGED
@@ -1,9 +1,56 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module OmniAI
|
4
|
-
# A configuration for each agent w/ `api_key` / `host` / `logger`.
|
4
|
+
# A configuration for each agent w/ `api_key` / `host` / `logger`. Usage:
|
5
|
+
#
|
6
|
+
# OmniAI::OpenAI.config do |config|
|
7
|
+
# config.api_key = '...'
|
8
|
+
# config.host = 'http://localhost:8080'
|
9
|
+
# config.logger = Logger.new(STDOUT)
|
10
|
+
# config.timeout = 15
|
11
|
+
# config.chat_options = { ... }
|
12
|
+
# config.transcribe_options = { ... }
|
13
|
+
# config.speak_options = { ... }
|
14
|
+
# end
|
5
15
|
class Config
|
6
|
-
|
16
|
+
# @return [String, nil]
|
17
|
+
attr_accessor :api_key
|
18
|
+
|
19
|
+
# @return [String, nil]
|
20
|
+
attr_accessor :host
|
21
|
+
|
22
|
+
# @return [Logger, nil]
|
23
|
+
attr_accessor :logger
|
24
|
+
|
25
|
+
# @return [Integer, Hash{Symbol => Integer}, nil]
|
26
|
+
# @option timeout [Integer] :read
|
27
|
+
# @option timeout [Integer] :write
|
28
|
+
# @option timeout [Integer] :connect
|
29
|
+
attr_accessor :timeout
|
30
|
+
|
31
|
+
# @return [Hash]
|
32
|
+
attr_accessor :chat_options
|
33
|
+
|
34
|
+
# @return [Hash]
|
35
|
+
attr_accessor :transcribe_options
|
36
|
+
|
37
|
+
# @return [Hash]
|
38
|
+
attr_accessor :speak_options
|
39
|
+
|
40
|
+
# @param api_key [String] optional
|
41
|
+
# @param host [String] optional
|
42
|
+
# @param logger [Logger] optional
|
43
|
+
# @param timeout [Integer] optional
|
44
|
+
def initialize(api_key: nil, host: nil, logger: nil, timeout: nil)
|
45
|
+
@api_key = api_key
|
46
|
+
@host = host
|
47
|
+
@logger = logger
|
48
|
+
@timeout = timeout
|
49
|
+
|
50
|
+
@chat_options = {}
|
51
|
+
@transcribe_options = {}
|
52
|
+
@speak_options = {}
|
53
|
+
end
|
7
54
|
|
8
55
|
# @return [String]
|
9
56
|
def inspect
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
# Used for logging.
|
5
|
+
class Instrumentation
|
6
|
+
# @param logger [Logger]
|
7
|
+
def initialize(logger:)
|
8
|
+
@logger = logger
|
9
|
+
end
|
10
|
+
|
11
|
+
# @param name [String]
|
12
|
+
# @param payload [Hash]
|
13
|
+
# @option payload [Exception] :error
|
14
|
+
def instrument(name, payload = {})
|
15
|
+
error = payload[:error]
|
16
|
+
return unless error
|
17
|
+
|
18
|
+
@logger.error("#{name}: #{error.message}")
|
19
|
+
end
|
20
|
+
|
21
|
+
# @param name [String]
|
22
|
+
# @param payload [Hash]
|
23
|
+
# @option payload [HTTP::Request] :request
|
24
|
+
def start(_, payload)
|
25
|
+
request = payload[:request]
|
26
|
+
@logger.info("#{request.verb.upcase} #{request.uri}")
|
27
|
+
end
|
28
|
+
|
29
|
+
# @param name [String]
|
30
|
+
# @param payload [Hash]
|
31
|
+
# @option payload [HTTP::Response] :response
|
32
|
+
def finish(_, payload)
|
33
|
+
response = payload[:response]
|
34
|
+
@logger.info("#{response.status.code} #{response.status.reason}")
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
data/lib/omniai/speak.rb
CHANGED
data/lib/omniai/transcribe.rb
CHANGED
@@ -117,6 +117,7 @@ module OmniAI
|
|
117
117
|
# @return [OmniAI::Transcribe::Transcription]
|
118
118
|
def process!
|
119
119
|
response = request!
|
120
|
+
|
120
121
|
raise HTTPError, response.flush unless response.status.ok?
|
121
122
|
|
122
123
|
text = @format.nil? || @format.eql?(Format::JSON) ? response.parse['text'] : String(response.body)
|
data/lib/omniai/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.3.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
@@ -80,6 +80,7 @@ files:
|
|
80
80
|
- lib/omniai/chat/usage.rb
|
81
81
|
- lib/omniai/client.rb
|
82
82
|
- lib/omniai/config.rb
|
83
|
+
- lib/omniai/instrumentation.rb
|
83
84
|
- lib/omniai/speak.rb
|
84
85
|
- lib/omniai/transcribe.rb
|
85
86
|
- lib/omniai/transcribe/transcription.rb
|