omniai-openai 0.1.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +14 -0
- data/README.md +103 -16
- data/lib/omniai/openai/chat.rb +35 -0
- data/lib/omniai/openai/client.rb +69 -0
- data/lib/omniai/openai/config.rb +19 -0
- data/lib/omniai/openai/version.rb +1 -1
- data/lib/omniai/openai.rb +18 -2
- metadata +16 -13
- data/.rspec +0 -3
- data/.rubocop.yml +0 -8
- data/Rakefile +0 -12
- data/sig/omniai/openai.rbs +0 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 17b76524bf985fda37fec8e7cea18aece3380a1831be7de6abe2652767aafc92
|
4
|
+
data.tar.gz: e32a791f07f2697f56f2dfa15f11e8b05abf4416610925880d12cc7b5d0dda90
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9aa657f83ece505e056597a36c73a77f24abc45af4094c5e982b949bc9631ba6a401ff410220855c43073232578bda7fe37ed7003e006259caaf8d1333825dab
|
7
|
+
data.tar.gz: a96fe7cb53c737220b21c43013af12fab828476336827c446bfb12e64b02340ef55cac0a762facd0d5103db8afad078bf72b94fe760bbda2d36437eeee68468b
|
data/Gemfile
ADDED
data/README.md
CHANGED
@@ -1,31 +1,118 @@
|
|
1
|
-
#
|
1
|
+
# OmniAI::OpenAI
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/omniai/openai`. To experiment with that code, run `bin/console` for an interactive prompt.
|
3
|
+
An OpenAI implementation of the [OmniAI](https://github.com/ksylvest/omniai) APIs.
|
6
4
|
|
7
5
|
## Installation
|
8
6
|
|
9
|
-
|
7
|
+
```sh
|
8
|
+
gem install omniai-openai
|
9
|
+
```
|
10
10
|
|
11
|
-
|
11
|
+
## Usage
|
12
12
|
|
13
|
-
|
13
|
+
### Client
|
14
14
|
|
15
|
-
|
15
|
+
A client is setup as follows if `ENV['OPENAI_API_KEY']` exists:
|
16
16
|
|
17
|
-
|
17
|
+
```ruby
|
18
|
+
client = OmniAI::OpenAI::Client.new
|
19
|
+
```
|
18
20
|
|
19
|
-
|
21
|
+
A client may also be passed the following options:
|
22
|
+
|
23
|
+
- `api_key` (required - default is `ENV['OPENAI_API_KEY']`)
|
24
|
+
- `organization` (optional)
|
25
|
+
- `project` (optional)
|
26
|
+
- `host` (optional)
|
27
|
+
|
28
|
+
### Configuration
|
29
|
+
|
30
|
+
Global configuration is supported for the following options:
|
31
|
+
|
32
|
+
```ruby
|
33
|
+
OmniAI::OpenAI.configure do |config|
|
34
|
+
config.api_key = 'sk-...' # default: ENV['OPENAI_API_KEY']
|
35
|
+
config.organization = '...' # default: ENV['OPENAI_ORGANIZATION']
|
36
|
+
config.project = '...' # default: ENV['OPENAI_PROJECT']
|
37
|
+
config.host = '...' # default: 'https://api.openai.com'
|
38
|
+
end
|
39
|
+
```
|
40
|
+
|
41
|
+
### Chat
|
42
|
+
|
43
|
+
A chat completion is generated by passing in prompts using any a variety of formats:
|
44
|
+
|
45
|
+
```ruby
|
46
|
+
completion = client.chat.completion('Tell me a joke!')
|
47
|
+
completion.choice.message.content # 'Why did the chicken cross the road? To get to the other side.'
|
48
|
+
```
|
49
|
+
|
50
|
+
```ruby
|
51
|
+
completion = client.chat.completion({
|
52
|
+
role: OmniAI::OpenAI::Chat::Role::USER,
|
53
|
+
content: 'Is it wise to jump off a bridge?'
|
54
|
+
})
|
55
|
+
completion.choice.message.content # 'No.'
|
56
|
+
```
|
57
|
+
|
58
|
+
```ruby
|
59
|
+
completion = client.chat.completion([
|
60
|
+
{
|
61
|
+
role: OmniAI::OpenAI::Chat::Role::SYSTEM,
|
62
|
+
content: 'You are a helpful assistant.'
|
63
|
+
},
|
64
|
+
'What is the capital of Canada?',
|
65
|
+
])
|
66
|
+
completion.choice.message.content # 'The capital of Canada is Ottawa.'
|
67
|
+
```
|
68
|
+
|
69
|
+
#### Model
|
70
|
+
|
71
|
+
`model` takes an optional string (default is `gtp-4o`):
|
72
|
+
|
73
|
+
```ruby
|
74
|
+
completion = client.chat.completion('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
|
75
|
+
completion.choice.message.content # 'A cheetah can reach speeds over 100 km/h.'
|
76
|
+
```
|
77
|
+
|
78
|
+
[OpenAI API Reference `model`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-model)
|
79
|
+
|
80
|
+
#### Temperature
|
81
|
+
|
82
|
+
`temperature` takes an optional float between `0.0` and `2.0` (defaults is `0.7`):
|
83
|
+
|
84
|
+
```ruby
|
85
|
+
completion = client.chat.completion('Pick a number between 1 and 5', temperature: 2.0)
|
86
|
+
completion.choice.message.content # '3'
|
87
|
+
```
|
88
|
+
|
89
|
+
[OpenAI API Reference `temperature`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-temperature)
|
90
|
+
|
91
|
+
#### Stream
|
92
|
+
|
93
|
+
`stream` takes an optional a proc to stream responses in real-time chunks instead of waiting for a complete response:
|
94
|
+
|
95
|
+
```ruby
|
96
|
+
stream = proc do |chunk|
|
97
|
+
print(chunk.choice.delta.content) # 'Better', 'three', 'hours', ...
|
98
|
+
end
|
99
|
+
client.chat.completion('Be poetic.', stream:)
|
100
|
+
```
|
20
101
|
|
21
|
-
|
102
|
+
[OpenAI API Reference `stream`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream)
|
22
103
|
|
23
|
-
|
104
|
+
#### Format
|
24
105
|
|
25
|
-
|
106
|
+
`format` takes an optional symbol (`:json`) and that setes the `response_format` to `json_object`:
|
26
107
|
|
27
|
-
|
108
|
+
```ruby
|
109
|
+
completion = client.chat.completion([
|
110
|
+
{ role: OmniAI::Chat::Role::SYSTEM, content: OmniAI::Chat::JSON_PROMPT },
|
111
|
+
{ role: OmniAI::Chat::Role::USER, content: 'What is the name of the drummer for the Beatles?' }
|
112
|
+
], format: :json)
|
113
|
+
JSON.parse(completion.choice.message.content) # { "name": "Ringo" }
|
114
|
+
```
|
28
115
|
|
29
|
-
|
116
|
+
[OpenAI API Reference `response_format`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream)
|
30
117
|
|
31
|
-
|
118
|
+
> When using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message.
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# An OpenAI chat implementation.
|
6
|
+
class Chat < OmniAI::Chat
|
7
|
+
JSON_RESPONSE_FORMAT = { type: 'json_object' }.freeze
|
8
|
+
|
9
|
+
module Model
|
10
|
+
GPT_4O = 'gpt-4o'
|
11
|
+
GPT_4 = 'gpt-4'
|
12
|
+
GPT_4_TURBO = 'gpt-4-turbo'
|
13
|
+
GPT_3_5_TURBO = 'gpt-3.5-turbo'
|
14
|
+
end
|
15
|
+
|
16
|
+
protected
|
17
|
+
|
18
|
+
# @return [Hash]
|
19
|
+
def payload
|
20
|
+
OmniAI::OpenAI.config.chat_options.merge({
|
21
|
+
messages:,
|
22
|
+
model: @model,
|
23
|
+
stream: @stream.nil? ? nil : !@stream.nil?,
|
24
|
+
temperature: @temperature,
|
25
|
+
response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
|
26
|
+
}).compact
|
27
|
+
end
|
28
|
+
|
29
|
+
# @return [String]
|
30
|
+
def path
|
31
|
+
"/#{OmniAI::OpenAI::Client::VERSION}/chat/completions"
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,69 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# An OpenAI client implementation. Usage:
|
6
|
+
#
|
7
|
+
# w/ `api_key``:
|
8
|
+
# client = OmniAI::OpenAI::Client.new(api_key: '...')
|
9
|
+
#
|
10
|
+
# w/ ENV['OPENAI_API_KEY']:
|
11
|
+
#
|
12
|
+
# ENV['OPENAI_API_KEY'] = '...'
|
13
|
+
# client = OmniAI::OpenAI::Client.new
|
14
|
+
#
|
15
|
+
# w/ config:
|
16
|
+
#
|
17
|
+
# OmniAI::OpenAI.configure do |config|
|
18
|
+
# config.api_key = '...'
|
19
|
+
# end
|
20
|
+
#
|
21
|
+
# client = OmniAI::OpenAI::Client.new
|
22
|
+
class Client < OmniAI::Client
|
23
|
+
VERSION = 'v1'
|
24
|
+
|
25
|
+
# @param api_key [String] optional - defaults to `OmniAI::OpenAI.config.api_key`
|
26
|
+
# @param project_id [String] optional - defaults to `OmniAI::OpenAI.config.project`
|
27
|
+
# @param organization_id [String] optional - defaults to `OmniAI::OpenAI.config.organization`
|
28
|
+
# @param logger [Logger] optional - defaults to `OmniAI::OpenAI.config.logger`
|
29
|
+
def initialize(
|
30
|
+
api_key: OmniAI::OpenAI.config.api_key,
|
31
|
+
organization: OmniAI::OpenAI.config.organization,
|
32
|
+
project: OmniAI::OpenAI.config.project,
|
33
|
+
logger: OmniAI::OpenAI.config.logger,
|
34
|
+
host: OmniAI::OpenAI.config.host
|
35
|
+
)
|
36
|
+
raise(ArgumentError, %(ENV['OPENAI_API_KEY'] must be defined or `api_key` must be passed)) if api_key.nil?
|
37
|
+
|
38
|
+
super(api_key:, logger:)
|
39
|
+
|
40
|
+
@organization = organization
|
41
|
+
@project = project
|
42
|
+
@host = host
|
43
|
+
end
|
44
|
+
|
45
|
+
# @return [HTTP::Client]
|
46
|
+
def connection
|
47
|
+
@connection ||= begin
|
48
|
+
http = HTTP.auth("Bearer #{api_key}").persistent(@host)
|
49
|
+
http = http.headers('OpenAI-Organization': @organization) if @organization
|
50
|
+
http = http.headers('OpenAI-Project': @project) if @project
|
51
|
+
http
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
# @raise [OmniAI::Error]
|
56
|
+
#
|
57
|
+
# @param messages [String, Array, Hash]
|
58
|
+
# @param model [String] optional
|
59
|
+
# @param format [Symbol] optional :text or :json
|
60
|
+
# @param temperature [Float, nil] optional
|
61
|
+
# @param stream [Proc, nil] optional
|
62
|
+
#
|
63
|
+
# @return [OmniAI::Chat::Completion]
|
64
|
+
def chat(messages, model: Chat::Model::GPT_4O, temperature: nil, format: nil, stream: nil)
|
65
|
+
Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module OpenAI
|
5
|
+
# Configuration for managing the OpenAI `api_key` / `organization` / `project` / `logger`.
|
6
|
+
class Config < OmniAI::Config
|
7
|
+
attr_accessor :organization, :project, :chat_options
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
super
|
11
|
+
@api_key = ENV.fetch('OPENAI_API_KEY', nil)
|
12
|
+
@organization = ENV.fetch('OPENAI_ORGANIZATION', nil)
|
13
|
+
@project = ENV.fetch('OPENAI_PROJECT', nil)
|
14
|
+
@host = ENV.fetch('OPENAI_HOST', 'https://api.openai.com')
|
15
|
+
@chat_options = {}
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
data/lib/omniai/openai.rb
CHANGED
@@ -1,9 +1,25 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
3
|
+
require 'event_stream_parser'
|
4
|
+
require 'omniai'
|
5
|
+
require 'zeitwerk'
|
6
|
+
|
7
|
+
loader = Zeitwerk::Loader.for_gem
|
8
|
+
loader.push_dir(__dir__, namespace: OmniAI)
|
9
|
+
loader.inflector.inflect 'openai' => 'OpenAI'
|
10
|
+
loader.setup
|
4
11
|
|
5
12
|
module OmniAI
|
13
|
+
# A namespace for everything OpenAI.
|
6
14
|
module OpenAI
|
7
|
-
|
15
|
+
# @return [OmniAI::OpenAI::Config]
|
16
|
+
def self.config
|
17
|
+
@config ||= Config.new
|
18
|
+
end
|
19
|
+
|
20
|
+
# @yield [OmniAI::OpenAI::Config]
|
21
|
+
def self.configure
|
22
|
+
yield config
|
23
|
+
end
|
8
24
|
end
|
9
25
|
end
|
metadata
CHANGED
@@ -1,17 +1,17 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1
|
4
|
+
version: 1.0.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-06-
|
11
|
+
date: 2024-06-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name:
|
14
|
+
name: event_stream_parser
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
@@ -25,13 +25,13 @@ dependencies:
|
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: '0'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
|
-
name:
|
28
|
+
name: omniai
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
31
|
- - ">="
|
32
32
|
- !ruby/object:Gem::Version
|
33
33
|
version: '0'
|
34
|
-
type: :
|
34
|
+
type: :runtime
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
@@ -39,13 +39,13 @@ dependencies:
|
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
-
name:
|
42
|
+
name: zeitwerk
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - ">="
|
46
46
|
- !ruby/object:Gem::Version
|
47
47
|
version: '0'
|
48
|
-
type: :
|
48
|
+
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
@@ -59,16 +59,19 @@ executables: []
|
|
59
59
|
extensions: []
|
60
60
|
extra_rdoc_files: []
|
61
61
|
files:
|
62
|
-
-
|
63
|
-
- ".rubocop.yml"
|
62
|
+
- Gemfile
|
64
63
|
- README.md
|
65
|
-
- Rakefile
|
66
64
|
- lib/omniai/openai.rb
|
65
|
+
- lib/omniai/openai/chat.rb
|
66
|
+
- lib/omniai/openai/client.rb
|
67
|
+
- lib/omniai/openai/config.rb
|
67
68
|
- lib/omniai/openai/version.rb
|
68
|
-
- sig/omniai/openai.rbs
|
69
69
|
homepage: https://github.com/ksylvest/omniai-openai
|
70
70
|
licenses: []
|
71
|
-
metadata:
|
71
|
+
metadata:
|
72
|
+
homepage_uri: https://github.com/ksylvest/omniai-openai
|
73
|
+
changelog_uri: https://github.com/ksylvest/omniai-openai/releases
|
74
|
+
rubygems_mfa_required: 'true'
|
72
75
|
post_install_message:
|
73
76
|
rdoc_options: []
|
74
77
|
require_paths:
|
@@ -77,7 +80,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
77
80
|
requirements:
|
78
81
|
- - ">="
|
79
82
|
- !ruby/object:Gem::Version
|
80
|
-
version: 3.
|
83
|
+
version: 3.3.0
|
81
84
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
82
85
|
requirements:
|
83
86
|
- - ">="
|
data/.rspec
DELETED
data/.rubocop.yml
DELETED
data/Rakefile
DELETED