omniai 1.3.0 → 1.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +37 -31
- data/exe/omniai +7 -0
- data/lib/omniai/chat.rb +10 -2
- data/lib/omniai/cli/base_handler.rb +38 -0
- data/lib/omniai/cli/chat_handler.rb +59 -0
- data/lib/omniai/cli.rb +64 -0
- data/lib/omniai/client.rb +63 -3
- data/lib/omniai/instrumentation.rb +37 -0
- data/lib/omniai/speak.rb +1 -0
- data/lib/omniai/transcribe.rb +1 -0
- data/lib/omniai/version.rb +1 -1
- data/lib/omniai.rb +2 -0
- metadata +9 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8b3d66712c9a2fd460abf00c29b056b87ff8cdd08c8b38d69ece0755d8c76578
|
4
|
+
data.tar.gz: f0fdf7b20e3f3cc3c4e2bee6d21b68bd917d9267c9f1297dec91f2f8bccc71e9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 98c6c3380181aaf7ad858968b998adab49f8cd200d94755ff6cf79423378dd401c4c538147aeb416b023c47fe7f1806a8742a9486429e90bcc487eb8944cba52
|
7
|
+
data.tar.gz: f251bb7436e9536275a285508f63964693a067378f55a72b3cf01fb9cb801f55ef96fd8a64c9b4e5790c6c400ee6adcee64f6dc5dc413e92b42d5f869afce45f
|
data/README.md
CHANGED
@@ -87,40 +87,11 @@ client = OmniAI::Example::Client.new(logger:)
|
|
87
87
|
```
|
88
88
|
|
89
89
|
```
|
90
|
-
|
91
|
-
|
90
|
+
[INFO]: POST https://...
|
91
|
+
[INFO]: 200 OK
|
92
92
|
...
|
93
|
-
{"messages":[{"role":"user","content":"Tell me a joke!"}],"model":"..."}
|
94
|
-
I, [...] INFO -- : < 200 OK
|
95
|
-
D, [...] DEBUG -- : Date: ...
|
96
|
-
...
|
97
|
-
{
|
98
|
-
"id": "...",
|
99
|
-
"object": "...",
|
100
|
-
...
|
101
|
-
}
|
102
93
|
```
|
103
94
|
|
104
|
-
The level of the logger can be configured to either `INFO` and `DEBUG`:
|
105
|
-
|
106
|
-
**INFO**:
|
107
|
-
|
108
|
-
```ruby
|
109
|
-
logger.level = Logger::INFO
|
110
|
-
```
|
111
|
-
|
112
|
-
- Request: verb / URI
|
113
|
-
- Response: status
|
114
|
-
|
115
|
-
**DEBUG**:
|
116
|
-
|
117
|
-
```ruby
|
118
|
-
logger.level = Logger::DEBUG
|
119
|
-
```
|
120
|
-
|
121
|
-
- Request: verb / URI / headers / body
|
122
|
-
- Response: status / headers / body
|
123
|
-
|
124
95
|
#### Timeouts
|
125
96
|
|
126
97
|
Timeouts are configurable by passing a `timeout` an integer duration for the request / response of any APIs using:
|
@@ -222,3 +193,38 @@ tempfile = client.speak('The quick brown fox jumps over a lazy dog.', voice: 'HA
|
|
222
193
|
tempfile.close
|
223
194
|
tempfile.unlink
|
224
195
|
```
|
196
|
+
|
197
|
+
## CLI
|
198
|
+
|
199
|
+
OmniAI packages a basic command line interface (CLI) to allow for exploration of various APIs. A detailed CLI documentation can be found via help:
|
200
|
+
|
201
|
+
```bash
|
202
|
+
omniai --help
|
203
|
+
```
|
204
|
+
|
205
|
+
### Chat
|
206
|
+
|
207
|
+
#### w/ a Prompt
|
208
|
+
|
209
|
+
```bash
|
210
|
+
omniai chat "What is the coldest place on earth?"
|
211
|
+
```
|
212
|
+
|
213
|
+
```
|
214
|
+
The coldest place on earth is Antarctica.
|
215
|
+
```
|
216
|
+
|
217
|
+
#### w/o a Prompt
|
218
|
+
|
219
|
+
```bash
|
220
|
+
omniai chat --provider="openai" --model="gpt-4" --temperature="0.5"
|
221
|
+
```
|
222
|
+
|
223
|
+
```
|
224
|
+
Type 'exit' or 'quit' to abort.
|
225
|
+
# What is the warmet place on earth?
|
226
|
+
```
|
227
|
+
|
228
|
+
```
|
229
|
+
The warmest place on earth is Africa.
|
230
|
+
```
|
data/exe/omniai
ADDED
data/lib/omniai/chat.rb
CHANGED
@@ -45,7 +45,7 @@ module OmniAI
|
|
45
45
|
# @param client [OmniAI::Client] the client
|
46
46
|
# @param model [String] required
|
47
47
|
# @param temperature [Float, nil] optional
|
48
|
-
# @param stream [Proc, nil] optional
|
48
|
+
# @param stream [Proc, IO, nil] optional
|
49
49
|
# @param format [Symbol, nil] optional - :json
|
50
50
|
def initialize(messages, client:, model:, temperature: nil, stream: nil, format: nil)
|
51
51
|
@messages = messages
|
@@ -59,6 +59,7 @@ module OmniAI
|
|
59
59
|
# @raise [HTTPError]
|
60
60
|
def process!
|
61
61
|
response = request!
|
62
|
+
|
62
63
|
raise HTTPError, response.flush unless response.status.ok?
|
63
64
|
|
64
65
|
parse!(response:)
|
@@ -96,7 +97,14 @@ module OmniAI
|
|
96
97
|
def stream!(response:)
|
97
98
|
raise Error, "#{self.class.name}#stream! unstreamable" unless @stream
|
98
99
|
|
99
|
-
Stream.new(response:).stream!
|
100
|
+
Stream.new(response:).stream! do |chunk|
|
101
|
+
case @stream
|
102
|
+
when IO then @stream << chunk
|
103
|
+
else @stream.call(chunk)
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
@stream.flush if @stream.is_a?(IO)
|
100
108
|
end
|
101
109
|
|
102
110
|
# @return [Array<Hash>]
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
class CLI
|
5
|
+
# A generic handler for CLI commands (e.g. 'omnia chat').
|
6
|
+
class BaseHandler
|
7
|
+
# @param stdin [IO] an optional stream for stdin
|
8
|
+
# @param stdout [IO] an optional stream for stdout
|
9
|
+
# @param provider [String] an optional provider (defaults to 'openai')
|
10
|
+
# @param argv [Array<String>]
|
11
|
+
def self.handle!(argv:, stdin: $stdin, stdout: $stdout, provider: 'openai')
|
12
|
+
new(stdin:, stdout:, provider:).handle!(argv:)
|
13
|
+
end
|
14
|
+
|
15
|
+
# @param stdin [IO] an optional stream for stdin
|
16
|
+
# @param stdout [IO] an optional stream for stdout
|
17
|
+
# @param provider [String] an optional provider (defaults to 'openai')
|
18
|
+
def initialize(stdin: $stdin, stdout: $stdout, provider: 'openai')
|
19
|
+
@stdin = stdin
|
20
|
+
@stdout = stdout
|
21
|
+
@provider = provider
|
22
|
+
@args = {}
|
23
|
+
end
|
24
|
+
|
25
|
+
# @param argv [Array<String>]
|
26
|
+
def handle!(argv:)
|
27
|
+
raise NotImplementedError, "#{self.class}#handle! undefined"
|
28
|
+
end
|
29
|
+
|
30
|
+
private
|
31
|
+
|
32
|
+
# @return [OmniAI::Client]
|
33
|
+
def client
|
34
|
+
@client ||= OmniAI::Client.find(provider: @provider)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
class CLI
|
5
|
+
# Used for CLI usage of 'omnia chat'.
|
6
|
+
class ChatHandler < BaseHandler
|
7
|
+
def handle!(argv:)
|
8
|
+
parser.parse!(argv)
|
9
|
+
|
10
|
+
if argv.empty?
|
11
|
+
listen!
|
12
|
+
else
|
13
|
+
chat(prompt: argv.join(' '))
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
private
|
18
|
+
|
19
|
+
def listen!
|
20
|
+
@stdout.puts('Type "exit" or "quit" to leave.')
|
21
|
+
|
22
|
+
loop do
|
23
|
+
@stdout.print('# ')
|
24
|
+
prompt = @stdin.gets&.chomp
|
25
|
+
|
26
|
+
break if prompt.nil? || prompt.match?(/\A(exit|quit)\z/i)
|
27
|
+
|
28
|
+
chat(prompt:)
|
29
|
+
rescue Interrupt
|
30
|
+
break
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
# @param prompt [String]
|
35
|
+
def chat(prompt:)
|
36
|
+
client.chat(prompt, **@args, stream: @stdout)
|
37
|
+
end
|
38
|
+
|
39
|
+
# @return [OptionParser]
|
40
|
+
def parser
|
41
|
+
OptionParser.new do |options|
|
42
|
+
options.banner = 'usage: omniai chat [options] "<prompt>"'
|
43
|
+
|
44
|
+
options.on('-h', '--help', 'help') do
|
45
|
+
@stdout.puts(options)
|
46
|
+
exit
|
47
|
+
end
|
48
|
+
|
49
|
+
options.on('-p', '--provider=PROVIDER', 'provider') { |provider| @provider = provider }
|
50
|
+
options.on('-m', '--model=MODEL', 'model') { |model| @args[:model] = model }
|
51
|
+
options.on('-t', '--temperature=TEMPERATURE', Float, 'temperature') do |temperature|
|
52
|
+
@args[:temperature] = temperature
|
53
|
+
end
|
54
|
+
options.on('-f', '--format=FORMAT', 'format') { |format| @args[:format] = format.intern }
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
data/lib/omniai/cli.rb
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'optparse'
|
4
|
+
|
5
|
+
module OmniAI
|
6
|
+
# Used when interacting with the suite from the command line interface (CLI).
|
7
|
+
#
|
8
|
+
# Usage:
|
9
|
+
#
|
10
|
+
# cli = OmniAI::CLI.new
|
11
|
+
# cli.parse
|
12
|
+
class CLI
|
13
|
+
ChatArgs = Struct.new(:provider, :model, :temperature)
|
14
|
+
|
15
|
+
# @param in [IO] a stream
|
16
|
+
# @param out [IO] a stream
|
17
|
+
# @param provider [String] a provider
|
18
|
+
def initialize(stdin: $stdin, stdout: $stdout, provider: 'openai')
|
19
|
+
@stdin = stdin
|
20
|
+
@stdout = stdout
|
21
|
+
@provider = provider
|
22
|
+
@args = {}
|
23
|
+
end
|
24
|
+
|
25
|
+
def parse(argv = ARGV)
|
26
|
+
parser.order!(argv)
|
27
|
+
command = argv.shift
|
28
|
+
return if command.nil?
|
29
|
+
|
30
|
+
case command
|
31
|
+
when 'chat' then ChatHandler.handle!(stdin: @stdin, stdout: @stdout, provider: @provider, argv:)
|
32
|
+
else raise Error, "unsupported command=#{command.inspect}"
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# @return [OptionParser]
|
39
|
+
def parser
|
40
|
+
OptionParser.new do |options|
|
41
|
+
options.banner = 'usage: omniai [options] <command> [<args>]'
|
42
|
+
|
43
|
+
options.on('-h', '--help', 'help') do
|
44
|
+
@stdout.puts(options)
|
45
|
+
exit
|
46
|
+
end
|
47
|
+
|
48
|
+
options.on('-v', '--version', 'version') do
|
49
|
+
@stdout.puts(VERSION)
|
50
|
+
exit
|
51
|
+
end
|
52
|
+
|
53
|
+
options.on('-p', '--provider=PROVIDER', 'provider (default="openai")') do |provider|
|
54
|
+
@provider = provider
|
55
|
+
end
|
56
|
+
|
57
|
+
options.separator <<~COMMANDS
|
58
|
+
commands:
|
59
|
+
chat
|
60
|
+
COMMANDS
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
data/lib/omniai/client.rb
CHANGED
@@ -16,8 +16,6 @@ module OmniAI
|
|
16
16
|
# end
|
17
17
|
# end
|
18
18
|
class Client
|
19
|
-
class Error < StandardError; end
|
20
|
-
|
21
19
|
# @return [String, nil]
|
22
20
|
attr_accessor :api_key
|
23
21
|
|
@@ -30,6 +28,68 @@ module OmniAI
|
|
30
28
|
# @return [Integer, nil]
|
31
29
|
attr_accessor :timeout
|
32
30
|
|
31
|
+
# Initialize a client for Anthropic. This method requires the provider if it is undefined.
|
32
|
+
#
|
33
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
34
|
+
# @return [Class<OmniAI::Client>]
|
35
|
+
def self.anthropic
|
36
|
+
require 'omniai/anthropic' unless defined?(OmniAI::Anthropic::Client)
|
37
|
+
OmniAI::Anthropic::Client
|
38
|
+
rescue LoadError
|
39
|
+
raise Error, "requires 'omniai-anthropic': `gem install omniai-anthropic`"
|
40
|
+
end
|
41
|
+
|
42
|
+
# Lookup the `OmniAI::Google::Client``. This method requires the provider if it is undefined.
|
43
|
+
#
|
44
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
45
|
+
# @return [Class<OmniAI::Client>]
|
46
|
+
def self.google
|
47
|
+
require 'omniai/google' unless defined?(OmniAI::Google::Client)
|
48
|
+
OmniAI::Google::Client
|
49
|
+
rescue LoadError
|
50
|
+
raise Error, "requires 'omniai-google': `gem install omniai-google`"
|
51
|
+
end
|
52
|
+
|
53
|
+
# Initialize a client for Mistral. This method requires the provider if it is undefined.
|
54
|
+
#
|
55
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
56
|
+
# @return [Class<OmniAI::Client>]
|
57
|
+
def self.mistral
|
58
|
+
require 'omniai/mistral' unless defined?(OmniAI::Mistral::Client)
|
59
|
+
OmniAI::Mistral::Client
|
60
|
+
rescue LoadError
|
61
|
+
raise Error, "requires 'omniai-mistral': `gem install omniai-mistral`"
|
62
|
+
end
|
63
|
+
|
64
|
+
# Initialize a client for OpenAI. This method requires the provider if it is undefined.
|
65
|
+
#
|
66
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
67
|
+
# @return [Class<OmniAI::Client>]
|
68
|
+
def self.openai
|
69
|
+
require 'omniai/openai' unless defined?(OmniAI::OpenAI::Client)
|
70
|
+
OmniAI::OpenAI::Client
|
71
|
+
rescue LoadError
|
72
|
+
raise Error, "requires 'omniai-openai': `gem install omniai-openai`"
|
73
|
+
end
|
74
|
+
|
75
|
+
# Initialize a client by provider (e.g. 'openai'). This method attempts to require the provider.
|
76
|
+
#
|
77
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
78
|
+
# @param provider [String] required (e.g. 'anthropic', 'google', 'mistral', 'openai', etc)
|
79
|
+
# @return [OmniAI::Client]
|
80
|
+
def self.find(provider:, **)
|
81
|
+
klass =
|
82
|
+
case provider
|
83
|
+
when 'anthropic' then anthropic
|
84
|
+
when 'google' then google
|
85
|
+
when 'mistral' then mistral
|
86
|
+
when 'openai' then openai
|
87
|
+
else raise Error, "unknown provider=#{provider.inspect}"
|
88
|
+
end
|
89
|
+
|
90
|
+
klass.new(**)
|
91
|
+
end
|
92
|
+
|
33
93
|
# @param api_key [String, nil] optional
|
34
94
|
# @param host [String, nil] optional - supports for customzing the host of the client (e.g. 'http://localhost:8080')
|
35
95
|
# @param logger [Logger, nil] optional
|
@@ -57,7 +117,7 @@ module OmniAI
|
|
57
117
|
# @return [HTTP::Client]
|
58
118
|
def connection
|
59
119
|
http = HTTP.persistent(@host)
|
60
|
-
http = http.use(
|
120
|
+
http = http.use(instrumentation: { instrumenter: Instrumentation.new(logger: @logger) }) if @logger
|
61
121
|
http = http.timeout(@timeout) if @timeout
|
62
122
|
http
|
63
123
|
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
# Used for logging.
|
5
|
+
class Instrumentation
|
6
|
+
# @param logger [Logger]
|
7
|
+
def initialize(logger:)
|
8
|
+
@logger = logger
|
9
|
+
end
|
10
|
+
|
11
|
+
# @param name [String]
|
12
|
+
# @param payload [Hash]
|
13
|
+
# @option payload [Exception] :error
|
14
|
+
def instrument(name, payload = {})
|
15
|
+
error = payload[:error]
|
16
|
+
return unless error
|
17
|
+
|
18
|
+
@logger.error("#{name}: #{error.message}")
|
19
|
+
end
|
20
|
+
|
21
|
+
# @param name [String]
|
22
|
+
# @param payload [Hash]
|
23
|
+
# @option payload [HTTP::Request] :request
|
24
|
+
def start(_, payload)
|
25
|
+
request = payload[:request]
|
26
|
+
@logger.info("#{request.verb.upcase} #{request.uri}")
|
27
|
+
end
|
28
|
+
|
29
|
+
# @param name [String]
|
30
|
+
# @param payload [Hash]
|
31
|
+
# @option payload [HTTP::Response] :response
|
32
|
+
def finish(_, payload)
|
33
|
+
response = payload[:response]
|
34
|
+
@logger.info("#{response.status.code} #{response.status.reason}")
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
data/lib/omniai/speak.rb
CHANGED
data/lib/omniai/transcribe.rb
CHANGED
@@ -117,6 +117,7 @@ module OmniAI
|
|
117
117
|
# @return [OmniAI::Transcribe::Transcription]
|
118
118
|
def process!
|
119
119
|
response = request!
|
120
|
+
|
120
121
|
raise HTTPError, response.flush unless response.status.ok?
|
121
122
|
|
122
123
|
text = @format.nil? || @format.eql?(Format::JSON) ? response.parse['text'] : String(response.body)
|
data/lib/omniai/version.rb
CHANGED
data/lib/omniai.rb
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require 'logger'
|
3
4
|
require 'event_stream_parser'
|
4
5
|
require 'http'
|
5
6
|
require 'uri'
|
@@ -8,6 +9,7 @@ require 'zeitwerk'
|
|
8
9
|
loader = Zeitwerk::Loader.for_gem
|
9
10
|
loader.inflector.inflect 'omniai' => 'OmniAI'
|
10
11
|
loader.inflector.inflect 'url' => 'URL'
|
12
|
+
loader.inflector.inflect 'cli' => 'CLI'
|
11
13
|
loader.setup
|
12
14
|
|
13
15
|
module OmniAI
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-07-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -56,7 +56,8 @@ description: An interface for OpenAI's ChatGPT, Google's Gemini, Anthropic's Cla
|
|
56
56
|
Mistral's LeChat, etc.
|
57
57
|
email:
|
58
58
|
- kevin@ksylvest.com
|
59
|
-
executables:
|
59
|
+
executables:
|
60
|
+
- omniai
|
60
61
|
extensions: []
|
61
62
|
extra_rdoc_files: []
|
62
63
|
files:
|
@@ -64,6 +65,7 @@ files:
|
|
64
65
|
- README.md
|
65
66
|
- bin/console
|
66
67
|
- bin/setup
|
68
|
+
- exe/omniai
|
67
69
|
- lib/omniai.rb
|
68
70
|
- lib/omniai/chat.rb
|
69
71
|
- lib/omniai/chat/chunk.rb
|
@@ -78,8 +80,12 @@ files:
|
|
78
80
|
- lib/omniai/chat/message_choice.rb
|
79
81
|
- lib/omniai/chat/stream.rb
|
80
82
|
- lib/omniai/chat/usage.rb
|
83
|
+
- lib/omniai/cli.rb
|
84
|
+
- lib/omniai/cli/base_handler.rb
|
85
|
+
- lib/omniai/cli/chat_handler.rb
|
81
86
|
- lib/omniai/client.rb
|
82
87
|
- lib/omniai/config.rb
|
88
|
+
- lib/omniai/instrumentation.rb
|
83
89
|
- lib/omniai/speak.rb
|
84
90
|
- lib/omniai/transcribe.rb
|
85
91
|
- lib/omniai/transcribe/transcription.rb
|