omniai 1.8.1 → 1.8.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +131 -3
- data/lib/omniai/chat/choice.rb +3 -3
- data/lib/omniai/chat/tool_call_result.rb +1 -1
- data/lib/omniai/client.rb +5 -5
- data/lib/omniai/version.rb +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fca0a46de30a2f54bfab6cc05b65c5fb78b2292c147928c5fe2781e3e5bd3c68
|
4
|
+
data.tar.gz: dd6fcf4d76afeca2ff6a289ddb4a4c1d4bed13e976dd03e34da369ab2b3308b0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: bead375cec92e44876ca250ec8d9ba7e9e98e9b847dd6d41b5633fa1a9de32f134474bfaea6b85cd5f4c522e4389e6e2aadd91287e35b175e66d441affdd697f
|
7
|
+
data.tar.gz: f6a7202e05bbdf3bbf994d721960e267e72ecf8846aececda62ae8ecceb34448470e3282818e747a4408177ec37165f0e937f945dc76943af07cf4caa930ad33
|
data/README.md
CHANGED
@@ -2,14 +2,142 @@
|
|
2
2
|
|
3
3
|
[![CircleCI](https://circleci.com/gh/ksylvest/omniai.svg?style=svg)](https://circleci.com/gh/ksylvest/omniai)
|
4
4
|
|
5
|
-
OmniAI
|
5
|
+
OmniAI standardizes the APIs of various AI / LLM companies such as Anthropic, Google, Mistral and OpenAI for the generation of text, the conversion of text-to-speech, the conversion of speech-to-text, the generation of embeddings, and more. It offers a unified API regardless of the provider and task.
|
6
6
|
|
7
7
|
- [OmniAI::Anthropic](https://github.com/ksylvest/omniai-anthropic)
|
8
8
|
- [OmniAI::Google](https://github.com/ksylvest/omniai-google)
|
9
9
|
- [OmniAI::Mistral](https://github.com/ksylvest/omniai-mistral)
|
10
10
|
- [OmniAI::OpenAI](https://github.com/ksylvest/omniai-openai)
|
11
11
|
|
12
|
-
|
12
|
+
## Examples
|
13
|
+
|
14
|
+
### Example #1: [Chat](https://github.com/ksylvest/omniai/blob/main/examples/chat)
|
15
|
+
|
16
|
+
```ruby
|
17
|
+
require 'omniai/anthropic'
|
18
|
+
|
19
|
+
CLIENT = OmniAI::Anthropic::Client.new
|
20
|
+
|
21
|
+
CAT_URL = 'https://images.unsplash.com/photo-1472491235688-bdc81a63246e?q=80&w=1024&h=1024&fit=crop&fm=jpg'
|
22
|
+
DOG_URL = 'https://images.unsplash.com/photo-1517849845537-4d257902454a?q=80&w=1024&h=1024&fit=crop&fm=jpg'
|
23
|
+
|
24
|
+
CLIENT.chat(stream: $stdout) do |prompt|
|
25
|
+
prompt.system('You are a helpful biologist with an expertise in animals that responds with the latin names.')
|
26
|
+
prompt.user do |message|
|
27
|
+
message.text('What animals are in the attached photos?')
|
28
|
+
message.url(CAT_URL, 'image/jpeg')
|
29
|
+
message.url(DOG_URL, 'image/jpeg')
|
30
|
+
end
|
31
|
+
end
|
32
|
+
```
|
33
|
+
|
34
|
+
```
|
35
|
+
The animals in the photos are:
|
36
|
+
|
37
|
+
1. A cat (*Felis catus*).
|
38
|
+
2. A dog (*Canis familiaris*).
|
39
|
+
```
|
40
|
+
|
41
|
+
### Example #2: [Text-to-Speech](https://github.com/ksylvest/omniai/blob/main/examples/text_to_speech)
|
42
|
+
|
43
|
+
```ruby
|
44
|
+
require 'omniai/openai'
|
45
|
+
|
46
|
+
CLIENT = OmniAI::OpenAI::Client.new
|
47
|
+
|
48
|
+
File.open(File.join(__dir__, 'audio.wav'), 'wb') do |file|
|
49
|
+
CLIENT.speak('Sally sells seashells by the seashore.', format: OmniAI::Speak::Format::WAV) do |chunk|
|
50
|
+
file << chunk
|
51
|
+
end
|
52
|
+
end
|
53
|
+
```
|
54
|
+
|
55
|
+
### Example #3: [Speech-to-Text](https://github.com/ksylvest/omniai/blob/main/examples/speech_to_text)
|
56
|
+
|
57
|
+
```ruby
|
58
|
+
require 'omniai/openai'
|
59
|
+
|
60
|
+
CLIENT = OmniAI::OpenAI::Client.new
|
61
|
+
|
62
|
+
File.open(File.join(__dir__, 'audio.wav'), 'rb') do |file|
|
63
|
+
transcription = CLIENT.transcribe(file)
|
64
|
+
puts(transcription.text)
|
65
|
+
end
|
66
|
+
```
|
67
|
+
|
68
|
+
### Example #4: [Tools](https://github.com/ksylvest/omniai/blob/main/examples/tools)
|
69
|
+
|
70
|
+
```ruby
|
71
|
+
require 'omniai/google'
|
72
|
+
|
73
|
+
CLIENT = OmniAI::Google::Client.new
|
74
|
+
|
75
|
+
TOOL = OmniAI::Tool.new(
|
76
|
+
proc { |location:, unit: 'celsius'| "#{rand(20..50)}° #{unit} in #{location}" },
|
77
|
+
name: 'Weather',
|
78
|
+
description: 'Lookup the weather in a location',
|
79
|
+
parameters: OmniAI::Tool::Parameters.new(
|
80
|
+
properties: {
|
81
|
+
location: OmniAI::Tool::Property.string(description: 'e.g. Toronto'),
|
82
|
+
unit: OmniAI::Tool::Property.string(enum: %w[celcius fahrenheit]),
|
83
|
+
},
|
84
|
+
required: %i[location]
|
85
|
+
)
|
86
|
+
)
|
87
|
+
|
88
|
+
completion = CLIENT.chat(tools: [TOOL]) do |prompt|
|
89
|
+
prompt.user do |message|
|
90
|
+
message.text('What is the weather in "London" in celcius and "Seattle" in fahrenheit?')
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
puts(completion.text)
|
95
|
+
```
|
96
|
+
|
97
|
+
```
|
98
|
+
The weather is 24° celcius in London and 42° fahrenheit in Seattle.
|
99
|
+
```
|
100
|
+
|
101
|
+
### Example #5: [Embeddings](https://github.com/ksylvest/omniai/blob/main/examples/embeddings)
|
102
|
+
|
103
|
+
```ruby
|
104
|
+
require 'omniai/mistral'
|
105
|
+
|
106
|
+
CLIENT = OmniAI::Mistral::Client.new
|
107
|
+
|
108
|
+
Entry = Data.define(:text, :embedding) do
|
109
|
+
def initialize(text:)
|
110
|
+
super(text:, embedding: CLIENT.embed(text).embedding)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
ENTRIES = [
|
115
|
+
Entry.new(text: 'John is a musician.'),
|
116
|
+
Entry.new(text: 'Paul is a plumber.'),
|
117
|
+
Entry.new(text: 'George is a teacher.'),
|
118
|
+
Entry.new(text: 'Ringo is a doctor.'),
|
119
|
+
].freeze
|
120
|
+
|
121
|
+
def search(query)
|
122
|
+
embedding = CLIENT.embed(query).embedding
|
123
|
+
|
124
|
+
results = ENTRIES.sort_by do |data|
|
125
|
+
Math.sqrt(data.embedding.zip(embedding).map { |a, b| (a - b)**2 }.reduce(:+))
|
126
|
+
end
|
127
|
+
|
128
|
+
puts "'#{query}': '#{results.first.text}'"
|
129
|
+
end
|
130
|
+
|
131
|
+
search('What does George do?')
|
132
|
+
search('Who is a doctor?')
|
133
|
+
search('Who do you call to fix a toilet?')
|
134
|
+
```
|
135
|
+
|
136
|
+
```
|
137
|
+
'What does George do?': 'George is a teacher.'
|
138
|
+
'Who is a doctor?': 'Ringo is a doctor.'
|
139
|
+
'Who do you call to fix a toilet?': 'Paul is a plumber.'
|
140
|
+
```
|
13
141
|
|
14
142
|
## Installation
|
15
143
|
|
@@ -83,7 +211,7 @@ require 'omniai/openai'
|
|
83
211
|
require 'logger'
|
84
212
|
|
85
213
|
logger = Logger.new(STDOUT)
|
86
|
-
client = OmniAI::
|
214
|
+
client = OmniAI::OpenAI::Client.new(logger:)
|
87
215
|
```
|
88
216
|
|
89
217
|
```
|
data/lib/omniai/chat/choice.rb
CHANGED
@@ -2,7 +2,8 @@
|
|
2
2
|
|
3
3
|
module OmniAI
|
4
4
|
class Chat
|
5
|
-
#
|
5
|
+
# A choice wraps a message and index returned by an LLM. The default is to generate a single choice. Some LLMs
|
6
|
+
# support generating multiple choices at once (e.g. giving you multiple options to choose from).
|
6
7
|
class Choice
|
7
8
|
# @return [Integer]
|
8
9
|
attr_accessor :index
|
@@ -12,7 +13,6 @@ module OmniAI
|
|
12
13
|
|
13
14
|
# @param message [Message]
|
14
15
|
# @param index [Integer]
|
15
|
-
# @param tool_call_list [Array<ToolCall>]
|
16
16
|
def initialize(message:, index: 0)
|
17
17
|
@message = message
|
18
18
|
@index = index
|
@@ -59,7 +59,7 @@ module OmniAI
|
|
59
59
|
message.content
|
60
60
|
end
|
61
61
|
|
62
|
-
# @return [Array<ToolCall
|
62
|
+
# @return [Array<ToolCall>, nil]
|
63
63
|
def tool_call_list
|
64
64
|
message.tool_call_list
|
65
65
|
end
|
data/lib/omniai/client.rb
CHANGED
@@ -75,15 +75,15 @@ module OmniAI
|
|
75
75
|
# Initialize a client by provider (e.g. 'openai'). This method attempts to require the provider.
|
76
76
|
#
|
77
77
|
# @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
|
78
|
-
# @param provider [String] required (e.g. 'anthropic', 'google', 'mistral', 'openai', etc)
|
78
|
+
# @param provider [String, Symbol] required (e.g. 'anthropic', 'google', 'mistral', 'openai', etc)
|
79
79
|
# @return [OmniAI::Client]
|
80
80
|
def self.find(provider:, **)
|
81
81
|
klass =
|
82
82
|
case provider
|
83
|
-
when 'anthropic' then anthropic
|
84
|
-
when 'google' then google
|
85
|
-
when 'mistral' then mistral
|
86
|
-
when 'openai' then openai
|
83
|
+
when :anthropic, 'anthropic' then anthropic
|
84
|
+
when :google, 'google' then google
|
85
|
+
when :mistral, 'mistral' then mistral
|
86
|
+
when :openai, 'openai' then openai
|
87
87
|
else raise Error, "unknown provider=#{provider.inspect}"
|
88
88
|
end
|
89
89
|
|
data/lib/omniai/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.8.
|
4
|
+
version: 1.8.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-10-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -125,7 +125,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
125
125
|
- !ruby/object:Gem::Version
|
126
126
|
version: '0'
|
127
127
|
requirements: []
|
128
|
-
rubygems_version: 3.5.
|
128
|
+
rubygems_version: 3.5.18
|
129
129
|
signing_key:
|
130
130
|
specification_version: 4
|
131
131
|
summary: A generalized framework for interacting with many AI services
|