omniai-openai 0.1.0 → 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +15 -0
- data/README.md +90 -16
- data/lib/omniai/openai/chat/request.rb +46 -0
- data/lib/omniai/openai/chat.rb +43 -0
- data/lib/omniai/openai/client.rb +61 -0
- data/lib/omniai/openai/config.rb +18 -0
- data/lib/omniai/openai/version.rb +1 -1
- data/lib/omniai/openai.rb +18 -2
- metadata +17 -13
- data/.rspec +0 -3
- data/.rubocop.yml +0 -8
- data/Rakefile +0 -12
- data/sig/omniai/openai.rbs +0 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 046a2d4be43b4a9537a18b90fc69a9a7bfb191d76471d5a9374cc731c5e8b8a3
|
4
|
+
data.tar.gz: e1f2914a05ae5a527c09126d015df285e6efaa86d37fca2ac16ec694799f186f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cace4852fc0a5ce7b59cb0a74728324396384a3c7793611bc48c61081bb910ec2cfc3dd09e0b3d4bb14fe16767913663cc2621b964ac0e87f93dcb1fab10873c
|
7
|
+
data.tar.gz: 16fbad4c8b63260e069f07314811ad8cbbbb4bdbbcd2b412b63902372abf7271d9ccc87bbdc06cf529530f633408d80ab3d9f05d0032d1fc7a85d82ecd7f8d2c
|
data/Gemfile
ADDED
data/README.md
CHANGED
@@ -1,31 +1,105 @@
|
|
1
|
-
#
|
1
|
+
# OmniAI::OpenAI
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/omniai/openai`. To experiment with that code, run `bin/console` for an interactive prompt.
|
3
|
+
An OpenAI implementation of the [OmniAI](https://github.com/ksylvest/omniai) APIs.
|
6
4
|
|
7
5
|
## Installation
|
8
6
|
|
9
|
-
|
7
|
+
```sh
|
8
|
+
gem install omniai-openai
|
9
|
+
```
|
10
10
|
|
11
|
-
|
11
|
+
## Usage
|
12
12
|
|
13
|
-
|
13
|
+
### Client
|
14
14
|
|
15
|
-
|
15
|
+
A client is setup as follows if `ENV['OPENAI_API_KEY']` exists:
|
16
16
|
|
17
|
-
|
17
|
+
```ruby
|
18
|
+
client = OmniAI::OpenAI::Client.new
|
19
|
+
```
|
18
20
|
|
19
|
-
|
21
|
+
A client may also be passed the following options:
|
22
|
+
|
23
|
+
- `api_key` (required - default is `ENV['OPENAI_API_KEY']`)
|
24
|
+
- `organization` (optional)
|
25
|
+
- `project` (optional)
|
26
|
+
- `host` (optional)
|
27
|
+
|
28
|
+
### Configuration
|
29
|
+
|
30
|
+
Global configuration is supported for the following options:
|
31
|
+
|
32
|
+
```ruby
|
33
|
+
OmniAI::OpenAI.configure do |config|
|
34
|
+
config.api_key = 'sk-...' # default: ENV['OPENAI_API_KEY']
|
35
|
+
config.organization = '...' # default: ENV['OPENAI_ORGANIZATION']
|
36
|
+
config.project = '...' # default: ENV['OPENAI_PROJECT']
|
37
|
+
config.host = '...' # default: 'https://api.openai.com'
|
38
|
+
end
|
39
|
+
```
|
40
|
+
|
41
|
+
### Chat
|
42
|
+
|
43
|
+
A chat completion is generated by passing in prompts using any a variety of formats:
|
44
|
+
|
45
|
+
```ruby
|
46
|
+
completion = client.chat.completion('Tell me a joke!')
|
47
|
+
completion.choice.message.content # 'Why did the chicken cross the road? To get to the other side.'
|
48
|
+
```
|
49
|
+
|
50
|
+
```ruby
|
51
|
+
completion = client.chat.completion({
|
52
|
+
role: OmniAI::OpenAI::Chat::Role::USER,
|
53
|
+
content: 'Is it wise to jump off a bridge?'
|
54
|
+
})
|
55
|
+
completion.choice.message.content # 'No.'
|
56
|
+
```
|
57
|
+
|
58
|
+
```ruby
|
59
|
+
completion = client.chat.completion([
|
60
|
+
{
|
61
|
+
role: OmniAI::OpenAI::Chat::Role::SYSTEM,
|
62
|
+
content: 'You are a helpful assistant.'
|
63
|
+
},
|
64
|
+
'What is the capital of Canada?',
|
65
|
+
])
|
66
|
+
completion.choice.message.content # 'The capital of Canada is Ottawa.'
|
67
|
+
```
|
68
|
+
|
69
|
+
#### Model
|
70
|
+
|
71
|
+
`model` takes an optional string (default is `gtp-4o`):
|
72
|
+
|
73
|
+
```ruby
|
74
|
+
completion = client.chat.completion('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
|
75
|
+
completion.choice.message.content # 'A cheetah can reach speeds over 100 km/h.'
|
76
|
+
```
|
77
|
+
|
78
|
+
#### Temperature
|
79
|
+
|
80
|
+
`temperature` takes an optional float between `0.0` and `2.0` (defaults is `0.7`):
|
81
|
+
|
82
|
+
```ruby
|
83
|
+
completion = client.chat.completion('Pick a number between 1 and 5', temperature: 2.0)
|
84
|
+
completion.choice.message.content # '3'
|
85
|
+
```
|
20
86
|
|
21
|
-
|
87
|
+
#### Stream
|
22
88
|
|
23
|
-
|
89
|
+
`stream` takes an optional a proc to stream responses in real-time chunks instead of waiting for a complete response:
|
24
90
|
|
25
|
-
|
91
|
+
```ruby
|
92
|
+
stream = proc do |chunk|
|
93
|
+
print(chunk.choice.delta.content) # 'Better', 'three', 'hours', ...
|
94
|
+
end
|
95
|
+
client.chat.completion('Be poetic.', stream:)
|
96
|
+
```
|
26
97
|
|
27
|
-
|
98
|
+
#### Format
|
28
99
|
|
29
|
-
|
100
|
+
`format` takes an optional symbol (i.e. `:json`) and switches the client to parsing message content as `json`:
|
30
101
|
|
31
|
-
|
102
|
+
```ruby
|
103
|
+
completion = client.chat.completion('Please provide a color name / hex / hsl as JSON.', format: :json)
|
104
|
+
completion.choice.message.content # { "name": "Black", "hex": "#000", "hsl": { "h": 0, "s": 0, "l": 0 } }
|
105
|
+
```
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
class Chat
|
6
|
+
# An implementation of OmniAI::Chat::Request for OpenAI.
|
7
|
+
class Request < OmniAI::Chat::Request
|
8
|
+
protected
|
9
|
+
|
10
|
+
# @return [Hash]
|
11
|
+
def payload
|
12
|
+
{ messages: }.tap do |payload|
|
13
|
+
payload[:model] = @model
|
14
|
+
payload[:stream] = !@stream.nil? unless @stream.nil?
|
15
|
+
payload[:temperature] = @temperature if @temperature
|
16
|
+
payload[:response_format] = { type: 'json_object' } if @format.eql?(:json)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
# @return [Array<Hash>]
|
21
|
+
def messages
|
22
|
+
arrayify(@messages).map do |content|
|
23
|
+
case content
|
24
|
+
when String then { role: OmniAI::OpenAI::Chat::Role::USER, content: }
|
25
|
+
when Hash then content
|
26
|
+
else raise Error, "Unsupported content=#{content.inspect}"
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
# @return [String]
|
32
|
+
def path
|
33
|
+
"/#{OmniAI::OpenAI::Client::VERSION}/chat/completions"
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# @param value [Object, Array<Object>]
|
39
|
+
# @return [Array<Object>]
|
40
|
+
def arrayify(value)
|
41
|
+
value.is_a?(Array) ? value : [value]
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# An OpenAI chat implementation.
|
6
|
+
#
|
7
|
+
# Usage:
|
8
|
+
#
|
9
|
+
# chat = OmniAI::OpenAI::Chat.new(client: client)
|
10
|
+
# chat.completion('Tell me a joke.')
|
11
|
+
# chat.completion(['Tell me a joke.'])
|
12
|
+
# chat.completion({ role: 'user', content: 'Tell me a joke.' })
|
13
|
+
# chat.completion([{ role: 'system', content: 'Tell me a joke.' }])
|
14
|
+
class Chat < OmniAI::Chat
|
15
|
+
module Model
|
16
|
+
GPT_4O = 'gpt-4o'
|
17
|
+
GPT_4 = 'gpt-4'
|
18
|
+
GPT_4_TURBO = 'gpt-4-turbo'
|
19
|
+
GPT_3_5_TURBO = 'gpt-3.5-turbo'
|
20
|
+
end
|
21
|
+
|
22
|
+
module Role
|
23
|
+
ASSISTANT = 'assistant'
|
24
|
+
USER = 'user'
|
25
|
+
SYSTEM = 'system'
|
26
|
+
end
|
27
|
+
|
28
|
+
# @raise [OmniAI::Error]
|
29
|
+
#
|
30
|
+
# @param prompt [String]
|
31
|
+
# @param model [String] optional
|
32
|
+
# @param format [Symbol] optional :text or :json
|
33
|
+
# @param temperature [Float, nil] optional
|
34
|
+
# @param stream [Proc, nil] optional
|
35
|
+
#
|
36
|
+
# @return [OmniAI::OpenAi::Chat::Response]
|
37
|
+
def completion(messages, model: Model::GPT_4O, temperature: nil, format: nil, stream: nil)
|
38
|
+
request = Request.new(client: @client, messages:, model:, temperature:, format:, stream:)
|
39
|
+
request.process!
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# An OpenAI client implementation. Usage:
|
6
|
+
#
|
7
|
+
# w/ `api_key``:
|
8
|
+
# client = OmniAI::OpenAI::Client.new(api_key: '...')
|
9
|
+
#
|
10
|
+
# w/ ENV['OPENAI_API_KEY']:
|
11
|
+
#
|
12
|
+
# ENV['OPENAI_API_KEY'] = '...'
|
13
|
+
# client = OmniAI::OpenAI::Client.new
|
14
|
+
#
|
15
|
+
# w/ config:
|
16
|
+
#
|
17
|
+
# OmniAI::OpenAI.configure do |config|
|
18
|
+
# config.api_key = '...'
|
19
|
+
# end
|
20
|
+
#
|
21
|
+
# client = OmniAI::OpenAI::Client.new
|
22
|
+
class Client < OmniAI::Client
|
23
|
+
VERSION = 'v1'
|
24
|
+
|
25
|
+
# @param api_key [String] optional - defaults to `OmniAI::OpenAI.config.api_key`
|
26
|
+
# @param project_id [String] optional - defaults to `OmniAI::OpenAI.config.project`
|
27
|
+
# @param organization_id [String] optional - defaults to `OmniAI::OpenAI.config.organization`
|
28
|
+
# @param logger [Logger] optional - defaults to `OmniAI::OpenAI.config.logger`
|
29
|
+
def initialize(
|
30
|
+
api_key: OmniAI::OpenAI.config.api_key,
|
31
|
+
organization: OmniAI::OpenAI.config.organization,
|
32
|
+
project: OmniAI::OpenAI.config.project,
|
33
|
+
logger: OmniAI::OpenAI.config.logger,
|
34
|
+
host: OmniAI::OpenAI.config.host
|
35
|
+
)
|
36
|
+
raise(ArgumentError, %(ENV['OPENAI_API_KEY'] must be defined or `api_key` must be passed)) if api_key.nil?
|
37
|
+
|
38
|
+
super(api_key:, logger:)
|
39
|
+
|
40
|
+
@organization = organization
|
41
|
+
@project = project
|
42
|
+
@host = host
|
43
|
+
end
|
44
|
+
|
45
|
+
# @return [HTTP::Client]
|
46
|
+
def connection
|
47
|
+
@connection ||= begin
|
48
|
+
http = HTTP.auth("Bearer #{api_key}").persistent(@host)
|
49
|
+
http = http.headers('OpenAI-Organization': @organization) if @organization
|
50
|
+
http = http.headers('OpenAI-Project': @project) if @project
|
51
|
+
http
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
# @return [OmniAI::OpenAI::Chat]
|
56
|
+
def chat
|
57
|
+
Chat.new(client: self)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# Configuration for managing the OpenAI `api_key` / `organization` / `project` / `logger`.
|
6
|
+
class Config < OmniAI::Config
|
7
|
+
attr_accessor :organization, :project
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
super
|
11
|
+
@api_key = ENV.fetch('OPENAI_API_KEY', nil)
|
12
|
+
@organization = ENV.fetch('OPENAI_ORGANIZATION', nil)
|
13
|
+
@project = ENV.fetch('OPENAI_PROJECT', nil)
|
14
|
+
@host = ENV.fetch('OPENAI_HOST', 'https://api.openai.com')
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
data/lib/omniai/openai.rb
CHANGED
@@ -1,9 +1,25 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
3
|
+
require 'event_stream_parser'
|
4
|
+
require 'omniai'
|
5
|
+
require 'zeitwerk'
|
6
|
+
|
7
|
+
loader = Zeitwerk::Loader.for_gem
|
8
|
+
loader.push_dir(__dir__, namespace: OmniAI)
|
9
|
+
loader.inflector.inflect 'openai' => 'OpenAI'
|
10
|
+
loader.setup
|
4
11
|
|
5
12
|
module OmniAI
|
13
|
+
# A namespace for everything OpenAI.
|
6
14
|
module OpenAI
|
7
|
-
|
15
|
+
# @return [OmniAI::OpenAI::Config]
|
16
|
+
def self.config
|
17
|
+
@config ||= Config.new
|
18
|
+
end
|
19
|
+
|
20
|
+
# @yield [OmniAI::OpenAI::Config]
|
21
|
+
def self.configure
|
22
|
+
yield config
|
23
|
+
end
|
8
24
|
end
|
9
25
|
end
|
metadata
CHANGED
@@ -1,17 +1,17 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-06-
|
11
|
+
date: 2024-06-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name:
|
14
|
+
name: event_stream_parser
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
@@ -25,13 +25,13 @@ dependencies:
|
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: '0'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
|
-
name:
|
28
|
+
name: omniai
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
31
|
- - ">="
|
32
32
|
- !ruby/object:Gem::Version
|
33
33
|
version: '0'
|
34
|
-
type: :
|
34
|
+
type: :runtime
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
@@ -39,13 +39,13 @@ dependencies:
|
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
-
name:
|
42
|
+
name: zeitwerk
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - ">="
|
46
46
|
- !ruby/object:Gem::Version
|
47
47
|
version: '0'
|
48
|
-
type: :
|
48
|
+
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
@@ -59,16 +59,20 @@ executables: []
|
|
59
59
|
extensions: []
|
60
60
|
extra_rdoc_files: []
|
61
61
|
files:
|
62
|
-
-
|
63
|
-
- ".rubocop.yml"
|
62
|
+
- Gemfile
|
64
63
|
- README.md
|
65
|
-
- Rakefile
|
66
64
|
- lib/omniai/openai.rb
|
65
|
+
- lib/omniai/openai/chat.rb
|
66
|
+
- lib/omniai/openai/chat/request.rb
|
67
|
+
- lib/omniai/openai/client.rb
|
68
|
+
- lib/omniai/openai/config.rb
|
67
69
|
- lib/omniai/openai/version.rb
|
68
|
-
- sig/omniai/openai.rbs
|
69
70
|
homepage: https://github.com/ksylvest/omniai-openai
|
70
71
|
licenses: []
|
71
|
-
metadata:
|
72
|
+
metadata:
|
73
|
+
homepage_uri: https://github.com/ksylvest/omniai-openai
|
74
|
+
changelog_uri: https://github.com/ksylvest/omniai-openai/releases
|
75
|
+
rubygems_mfa_required: 'true'
|
72
76
|
post_install_message:
|
73
77
|
rdoc_options: []
|
74
78
|
require_paths:
|
@@ -77,7 +81,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
77
81
|
requirements:
|
78
82
|
- - ">="
|
79
83
|
- !ruby/object:Gem::Version
|
80
|
-
version: 3.
|
84
|
+
version: 3.3.0
|
81
85
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
82
86
|
requirements:
|
83
87
|
- - ">="
|
data/.rspec
DELETED
data/.rubocop.yml
DELETED
data/Rakefile
DELETED