omniai 1.3.1 → 1.4.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +35 -0
- data/exe/omniai +7 -0
- data/lib/omniai/chat.rb +11 -2
- data/lib/omniai/cli/base_handler.rb +38 -0
- data/lib/omniai/cli/chat_handler.rb +60 -0
- data/lib/omniai/cli.rb +64 -0
- data/lib/omniai/client.rb +62 -2
- data/lib/omniai/version.rb +1 -1
- data/lib/omniai.rb +2 -0
- metadata +8 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2e94a411496cb1e8f10e775a2a6487de99def946de6a4cc1e6c1c873ce160760
|
4
|
+
data.tar.gz: 5ec7d8ba1b1bbaf67fd3fe633af99b2e4c3d95edc0114313446b8a666bb1f0d6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 4265b9413acdf6bfe5dde9c4081c9e83b212c7ac51b5a77f3bc4c1dd0d2d6c030f48f3830e431ee2556a1bc58cbef980c98757f714c97192fb88a948743ce1e5
|
7
|
+
data.tar.gz: 2d8ba0d77348b88ccdc064014528ae56c9be5fc14e716ed961c74f8d462b4dac822eacb1f493b28aa65a4178391a19c3f98e1974ca93393153cf9a8e87c09703
|
data/README.md
CHANGED
@@ -193,3 +193,38 @@ tempfile = client.speak('The quick brown fox jumps over a lazy dog.', voice: 'HA
|
|
193
193
|
tempfile.close
|
194
194
|
tempfile.unlink
|
195
195
|
```
|
196
|
+
|
197
|
+
## CLI
|
198
|
+
|
199
|
+
OmniAI packages a basic command line interface (CLI) to allow for exploration of various APIs. A detailed CLI documentation can be found via help:
|
200
|
+
|
201
|
+
```bash
|
202
|
+
omniai --help
|
203
|
+
```
|
204
|
+
|
205
|
+
### Chat
|
206
|
+
|
207
|
+
#### w/ a Prompt
|
208
|
+
|
209
|
+
```bash
|
210
|
+
omniai chat "What is the coldest place on earth?"
|
211
|
+
```
|
212
|
+
|
213
|
+
```
|
214
|
+
The coldest place on earth is Antarctica.
|
215
|
+
```
|
216
|
+
|
217
|
+
#### w/o a Prompt
|
218
|
+
|
219
|
+
```bash
|
220
|
+
omniai chat --provider="openai" --model="gpt-4" --temperature="0.5"
|
221
|
+
```
|
222
|
+
|
223
|
+
```
|
224
|
+
Type 'exit' or 'quit' to abort.
|
225
|
+
# What is the warmet place on earth?
|
226
|
+
```
|
227
|
+
|
228
|
+
```
|
229
|
+
The warmest place on earth is Africa.
|
230
|
+
```
|
data/exe/omniai
ADDED
data/lib/omniai/chat.rb
CHANGED
@@ -45,7 +45,7 @@ module OmniAI
|
|
45
45
|
# @param client [OmniAI::Client] the client
|
46
46
|
# @param model [String] required
|
47
47
|
# @param temperature [Float, nil] optional
|
48
|
-
# @param stream [Proc, nil] optional
|
48
|
+
# @param stream [Proc, IO, nil] optional
|
49
49
|
# @param format [Symbol, nil] optional - :json
|
50
50
|
def initialize(messages, client:, model:, temperature: nil, stream: nil, format: nil)
|
51
51
|
@messages = messages
|
@@ -97,7 +97,16 @@ module OmniAI
|
|
97
97
|
def stream!(response:)
|
98
98
|
raise Error, "#{self.class.name}#stream! unstreamable" unless @stream
|
99
99
|
|
100
|
-
Stream.new(response:).stream!
|
100
|
+
Stream.new(response:).stream! do |chunk|
|
101
|
+
case @stream
|
102
|
+
when IO
|
103
|
+
@stream << chunk.choice.delta.content
|
104
|
+
@stream.flush
|
105
|
+
else @stream.call(chunk)
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
@stream.puts if @stream.is_a?(IO)
|
101
110
|
end
|
102
111
|
|
103
112
|
# @return [Array<Hash>]
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
class CLI
|
5
|
+
# A generic handler for CLI commands (e.g. 'omnia chat').
|
6
|
+
class BaseHandler
|
7
|
+
# @param stdin [IO] an optional stream for stdin
|
8
|
+
# @param stdout [IO] an optional stream for stdout
|
9
|
+
# @param provider [String] an optional provider (defaults to 'openai')
|
10
|
+
# @param argv [Array<String>]
|
11
|
+
def self.handle!(argv:, stdin: $stdin, stdout: $stdout, provider: 'openai')
|
12
|
+
new(stdin:, stdout:, provider:).handle!(argv:)
|
13
|
+
end
|
14
|
+
|
15
|
+
# @param stdin [IO] an optional stream for stdin
|
16
|
+
# @param stdout [IO] an optional stream for stdout
|
17
|
+
# @param provider [String] an optional provider (defaults to 'openai')
|
18
|
+
def initialize(stdin: $stdin, stdout: $stdout, provider: 'openai')
|
19
|
+
@stdin = stdin
|
20
|
+
@stdout = stdout
|
21
|
+
@provider = provider
|
22
|
+
@args = {}
|
23
|
+
end
|
24
|
+
|
25
|
+
# @param argv [Array<String>]
|
26
|
+
def handle!(argv:)
|
27
|
+
raise NotImplementedError, "#{self.class}#handle! undefined"
|
28
|
+
end
|
29
|
+
|
30
|
+
private
|
31
|
+
|
32
|
+
# @return [OmniAI::Client]
|
33
|
+
def client
|
34
|
+
@client ||= OmniAI::Client.find(provider: @provider)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
class CLI
|
5
|
+
# Used for CLI usage of 'omnia chat'.
|
6
|
+
class ChatHandler < BaseHandler
|
7
|
+
def handle!(argv:)
|
8
|
+
parser.parse!(argv)
|
9
|
+
|
10
|
+
if argv.empty?
|
11
|
+
listen!
|
12
|
+
else
|
13
|
+
chat(prompt: argv.join(' '))
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
private
|
18
|
+
|
19
|
+
def listen!
|
20
|
+
@stdout.puts('Type "exit" or "quit" to leave.')
|
21
|
+
|
22
|
+
loop do
|
23
|
+
@stdout.print('# ')
|
24
|
+
@stdout.flush
|
25
|
+
prompt = @stdin.gets&.chomp
|
26
|
+
|
27
|
+
break if prompt.nil? || prompt.match?(/\A(exit|quit)\z/i)
|
28
|
+
|
29
|
+
chat(prompt:)
|
30
|
+
rescue Interrupt
|
31
|
+
break
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
# @param prompt [String]
|
36
|
+
def chat(prompt:)
|
37
|
+
client.chat(prompt, **@args, stream: @stdout)
|
38
|
+
end
|
39
|
+
|
40
|
+
# @return [OptionParser]
|
41
|
+
def parser
|
42
|
+
OptionParser.new do |options|
|
43
|
+
options.banner = 'usage: omniai chat [options] "<prompt>"'
|
44
|
+
|
45
|
+
options.on('-h', '--help', 'help') do
|
46
|
+
@stdout.puts(options)
|
47
|
+
exit
|
48
|
+
end
|
49
|
+
|
50
|
+
options.on('-p', '--provider=PROVIDER', 'provider') { |provider| @provider = provider }
|
51
|
+
options.on('-m', '--model=MODEL', 'model') { |model| @args[:model] = model }
|
52
|
+
options.on('-t', '--temperature=TEMPERATURE', Float, 'temperature') do |temperature|
|
53
|
+
@args[:temperature] = temperature
|
54
|
+
end
|
55
|
+
options.on('-f', '--format=FORMAT', 'format') { |format| @args[:format] = format.intern }
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
data/lib/omniai/cli.rb
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'optparse'
|
4
|
+
|
5
|
+
module OmniAI
|
6
|
+
# Used when interacting with the suite from the command line interface (CLI).
|
7
|
+
#
|
8
|
+
# Usage:
|
9
|
+
#
|
10
|
+
# cli = OmniAI::CLI.new
|
11
|
+
# cli.parse
|
12
|
+
class CLI
|
13
|
+
ChatArgs = Struct.new(:provider, :model, :temperature)
|
14
|
+
|
15
|
+
# @param in [IO] a stream
|
16
|
+
# @param out [IO] a stream
|
17
|
+
# @param provider [String] a provider
|
18
|
+
def initialize(stdin: $stdin, stdout: $stdout, provider: 'openai')
|
19
|
+
@stdin = stdin
|
20
|
+
@stdout = stdout
|
21
|
+
@provider = provider
|
22
|
+
@args = {}
|
23
|
+
end
|
24
|
+
|
25
|
+
def parse(argv = ARGV)
|
26
|
+
parser.order!(argv)
|
27
|
+
command = argv.shift
|
28
|
+
return if command.nil?
|
29
|
+
|
30
|
+
case command
|
31
|
+
when 'chat' then ChatHandler.handle!(stdin: @stdin, stdout: @stdout, provider: @provider, argv:)
|
32
|
+
else raise Error, "unsupported command=#{command.inspect}"
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# @return [OptionParser]
|
39
|
+
def parser
|
40
|
+
OptionParser.new do |options|
|
41
|
+
options.banner = 'usage: omniai [options] <command> [<args>]'
|
42
|
+
|
43
|
+
options.on('-h', '--help', 'help') do
|
44
|
+
@stdout.puts(options)
|
45
|
+
exit
|
46
|
+
end
|
47
|
+
|
48
|
+
options.on('-v', '--version', 'version') do
|
49
|
+
@stdout.puts(VERSION)
|
50
|
+
exit
|
51
|
+
end
|
52
|
+
|
53
|
+
options.on('-p', '--provider=PROVIDER', 'provider (default="openai")') do |provider|
|
54
|
+
@provider = provider
|
55
|
+
end
|
56
|
+
|
57
|
+
options.separator <<~COMMANDS
|
58
|
+
commands:
|
59
|
+
chat
|
60
|
+
COMMANDS
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
data/lib/omniai/client.rb
CHANGED
@@ -16,8 +16,6 @@ module OmniAI
|
|
16
16
|
# end
|
17
17
|
# end
|
18
18
|
class Client
|
19
|
-
class Error < StandardError; end
|
20
|
-
|
21
19
|
# @return [String, nil]
|
22
20
|
attr_accessor :api_key
|
23
21
|
|
@@ -30,6 +28,68 @@ module OmniAI
|
|
30
28
|
# @return [Integer, nil]
|
31
29
|
attr_accessor :timeout
|
32
30
|
|
31
|
+
# Initialize a client for Anthropic. This method requires the provider if it is undefined.
|
32
|
+
#
|
33
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
34
|
+
# @return [Class<OmniAI::Client>]
|
35
|
+
def self.anthropic
|
36
|
+
require 'omniai/anthropic' unless defined?(OmniAI::Anthropic::Client)
|
37
|
+
OmniAI::Anthropic::Client
|
38
|
+
rescue LoadError
|
39
|
+
raise Error, "requires 'omniai-anthropic': `gem install omniai-anthropic`"
|
40
|
+
end
|
41
|
+
|
42
|
+
# Lookup the `OmniAI::Google::Client``. This method requires the provider if it is undefined.
|
43
|
+
#
|
44
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
45
|
+
# @return [Class<OmniAI::Client>]
|
46
|
+
def self.google
|
47
|
+
require 'omniai/google' unless defined?(OmniAI::Google::Client)
|
48
|
+
OmniAI::Google::Client
|
49
|
+
rescue LoadError
|
50
|
+
raise Error, "requires 'omniai-google': `gem install omniai-google`"
|
51
|
+
end
|
52
|
+
|
53
|
+
# Initialize a client for Mistral. This method requires the provider if it is undefined.
|
54
|
+
#
|
55
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
56
|
+
# @return [Class<OmniAI::Client>]
|
57
|
+
def self.mistral
|
58
|
+
require 'omniai/mistral' unless defined?(OmniAI::Mistral::Client)
|
59
|
+
OmniAI::Mistral::Client
|
60
|
+
rescue LoadError
|
61
|
+
raise Error, "requires 'omniai-mistral': `gem install omniai-mistral`"
|
62
|
+
end
|
63
|
+
|
64
|
+
# Initialize a client for OpenAI. This method requires the provider if it is undefined.
|
65
|
+
#
|
66
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
67
|
+
# @return [Class<OmniAI::Client>]
|
68
|
+
def self.openai
|
69
|
+
require 'omniai/openai' unless defined?(OmniAI::OpenAI::Client)
|
70
|
+
OmniAI::OpenAI::Client
|
71
|
+
rescue LoadError
|
72
|
+
raise Error, "requires 'omniai-openai': `gem install omniai-openai`"
|
73
|
+
end
|
74
|
+
|
75
|
+
# Initialize a client by provider (e.g. 'openai'). This method attempts to require the provider.
|
76
|
+
#
|
77
|
+
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
78
|
+
# @param provider [String] required (e.g. 'anthropic', 'google', 'mistral', 'openai', etc)
|
79
|
+
# @return [OmniAI::Client]
|
80
|
+
def self.find(provider:, **)
|
81
|
+
klass =
|
82
|
+
case provider
|
83
|
+
when 'anthropic' then anthropic
|
84
|
+
when 'google' then google
|
85
|
+
when 'mistral' then mistral
|
86
|
+
when 'openai' then openai
|
87
|
+
else raise Error, "unknown provider=#{provider.inspect}"
|
88
|
+
end
|
89
|
+
|
90
|
+
klass.new(**)
|
91
|
+
end
|
92
|
+
|
33
93
|
# @param api_key [String, nil] optional
|
34
94
|
# @param host [String, nil] optional - supports for customzing the host of the client (e.g. 'http://localhost:8080')
|
35
95
|
# @param logger [Logger, nil] optional
|
data/lib/omniai/version.rb
CHANGED
data/lib/omniai.rb
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require 'logger'
|
3
4
|
require 'event_stream_parser'
|
4
5
|
require 'http'
|
5
6
|
require 'uri'
|
@@ -8,6 +9,7 @@ require 'zeitwerk'
|
|
8
9
|
loader = Zeitwerk::Loader.for_gem
|
9
10
|
loader.inflector.inflect 'omniai' => 'OmniAI'
|
10
11
|
loader.inflector.inflect 'url' => 'URL'
|
12
|
+
loader.inflector.inflect 'cli' => 'CLI'
|
11
13
|
loader.setup
|
12
14
|
|
13
15
|
module OmniAI
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.4.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-07-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -56,7 +56,8 @@ description: An interface for OpenAI's ChatGPT, Google's Gemini, Anthropic's Cla
|
|
56
56
|
Mistral's LeChat, etc.
|
57
57
|
email:
|
58
58
|
- kevin@ksylvest.com
|
59
|
-
executables:
|
59
|
+
executables:
|
60
|
+
- omniai
|
60
61
|
extensions: []
|
61
62
|
extra_rdoc_files: []
|
62
63
|
files:
|
@@ -64,6 +65,7 @@ files:
|
|
64
65
|
- README.md
|
65
66
|
- bin/console
|
66
67
|
- bin/setup
|
68
|
+
- exe/omniai
|
67
69
|
- lib/omniai.rb
|
68
70
|
- lib/omniai/chat.rb
|
69
71
|
- lib/omniai/chat/chunk.rb
|
@@ -78,6 +80,9 @@ files:
|
|
78
80
|
- lib/omniai/chat/message_choice.rb
|
79
81
|
- lib/omniai/chat/stream.rb
|
80
82
|
- lib/omniai/chat/usage.rb
|
83
|
+
- lib/omniai/cli.rb
|
84
|
+
- lib/omniai/cli/base_handler.rb
|
85
|
+
- lib/omniai/cli/chat_handler.rb
|
81
86
|
- lib/omniai/client.rb
|
82
87
|
- lib/omniai/config.rb
|
83
88
|
- lib/omniai/instrumentation.rb
|