ruby-openai 3.6.0 → 3.7.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/Gemfile.lock +1 -1
- data/README.md +23 -14
- data/lib/openai/client.rb +3 -4
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +3 -1
- data/ruby-openai.gemspec +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 3a26687eb00056e37ab5f2eed5b7bf6e73b4a9479016a101a828ae014ed299aa
|
4
|
+
data.tar.gz: c729b1a8b2df16db50af716664108b1fb2418ef22f4159b7993148474cd93ad2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ddd2bf5992ade00580d0c2cd2b668e70eb999c39a487da4020e46ad8debe96a08b5e0e5560baa612b4e89d0f91765532394a1d69f3cd7214c6bb938edb10118f
|
7
|
+
data.tar.gz: aba44b222706820b3233fdcb623da8ba766c431485a723c870092c09608b4e1a7ab3466834d8683690b2873318bd807bcc3c57fe848831ed3c5fb8fc473d1a1a
|
data/CHANGELOG.md
CHANGED
@@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
7
7
|
|
8
|
+
## [3.7.0] - 2023-03-25
|
9
|
+
|
10
|
+
### Added
|
11
|
+
|
12
|
+
- Allow the usage of proxy base URIs like https://www.helicone.ai/. Thanks to [@mmmaia](https://github.com/mmmaia) for the PR!
|
13
|
+
|
8
14
|
## [3.6.0] - 2023-03-22
|
9
15
|
|
10
16
|
### Added
|
data/Gemfile.lock
CHANGED
data/README.md
CHANGED
@@ -7,7 +7,7 @@
|
|
7
7
|
|
8
8
|
Use the [OpenAI API](https://openai.com/blog/openai-api/) with Ruby! 🤖❤️
|
9
9
|
|
10
|
-
Generate text with ChatGPT, transcribe
|
10
|
+
Generate text with ChatGPT, transcribe and translate audio with Whisper, or create images with DALL·E...
|
11
11
|
|
12
12
|
Check out [Ruby AI Builders](https://discord.gg/k4Uc224xVD) on Discord!
|
13
13
|
|
@@ -69,21 +69,26 @@ Then you can create a client like this:
|
|
69
69
|
client = OpenAI::Client.new
|
70
70
|
```
|
71
71
|
|
72
|
-
####
|
72
|
+
#### Custom timeout or base URI
|
73
73
|
|
74
|
-
The default timeout for any OpenAI request is 120 seconds. You can change that passing the `request_timeout` when initializing the client:
|
74
|
+
The default timeout for any OpenAI request is 120 seconds. You can change that passing the `request_timeout` when initializing the client. You can also change the base URI used for all requests, eg. to use observability tools like [Helicone](https://docs.helicone.ai/quickstart/integrate-in-one-line-of-code):
|
75
75
|
|
76
76
|
```ruby
|
77
|
-
client = OpenAI::Client.new(
|
77
|
+
client = OpenAI::Client.new(
|
78
|
+
access_token: "access_token_goes_here",
|
79
|
+
uri_base: "https://oai.hconeai.com",
|
80
|
+
request_timeout: 240
|
81
|
+
)
|
78
82
|
```
|
79
83
|
|
80
84
|
or when configuring the gem:
|
81
85
|
|
82
86
|
```ruby
|
83
87
|
OpenAI.configure do |config|
|
84
|
-
config.access_token = ENV.fetch(
|
85
|
-
config.organization_id = ENV.fetch(
|
86
|
-
config.
|
88
|
+
config.access_token = ENV.fetch("OPENAI_ACCESS_TOKEN")
|
89
|
+
config.organization_id = ENV.fetch("OPENAI_ORGANIZATION_ID") # Optional
|
90
|
+
config.uri_base = "https://oai.hconeai.com" # Optional
|
91
|
+
config.request_timeout = 240 # Optional
|
87
92
|
end
|
88
93
|
```
|
89
94
|
|
@@ -98,14 +103,18 @@ client.models.retrieve(id: "text-ada-001")
|
|
98
103
|
|
99
104
|
#### Examples
|
100
105
|
|
101
|
-
- [GPT-
|
106
|
+
- [GPT-4 (limited beta)](https://platform.openai.com/docs/models/gpt-4)
|
107
|
+
- gpt-4
|
108
|
+
- gpt-4-0314
|
109
|
+
- gpt-4-32k
|
110
|
+
- [GPT-3.5](https://platform.openai.com/docs/models/gpt-3-5)
|
111
|
+
- gpt-3.5-turbo
|
112
|
+
- gpt-3.5-turbo-0301
|
113
|
+
- text-davinci-003
|
114
|
+
- [GPT-3](https://platform.openai.com/docs/models/gpt-3)
|
102
115
|
- text-ada-001
|
103
116
|
- text-babbage-001
|
104
117
|
- text-curie-001
|
105
|
-
- text-davinci-001
|
106
|
-
- [Codex (private beta)](https://beta.openai.com/docs/models/codex-series-private-beta)
|
107
|
-
- code-davinci-002
|
108
|
-
- code-cushman-001
|
109
118
|
|
110
119
|
### ChatGPT
|
111
120
|
|
@@ -297,7 +306,7 @@ The translations API takes as input the audio file in any of the supported langu
|
|
297
306
|
response = client.translate(
|
298
307
|
parameters: {
|
299
308
|
model: "whisper-1",
|
300
|
-
file: File.open('path_to_file'),
|
309
|
+
file: File.open('path_to_file', 'rb'),
|
301
310
|
})
|
302
311
|
puts response.parsed_response['text']
|
303
312
|
# => "Translation of the text"
|
@@ -311,7 +320,7 @@ The transcriptions API takes as input the audio file you want to transcribe and
|
|
311
320
|
response = client.transcribe(
|
312
321
|
parameters: {
|
313
322
|
model: "whisper-1",
|
314
|
-
file: File.open('path_to_file'),
|
323
|
+
file: File.open('path_to_file', 'rb'),
|
315
324
|
})
|
316
325
|
puts response.parsed_response['text']
|
317
326
|
# => "Transcription of the text"
|
data/lib/openai/client.rb
CHANGED
@@ -1,10 +1,9 @@
|
|
1
1
|
module OpenAI
|
2
2
|
class Client
|
3
|
-
|
4
|
-
|
5
|
-
def initialize(access_token: nil, organization_id: nil, request_timeout: nil)
|
3
|
+
def initialize(access_token: nil, organization_id: nil, uri_base: nil, request_timeout: nil)
|
6
4
|
OpenAI.configuration.access_token = access_token if access_token
|
7
5
|
OpenAI.configuration.organization_id = organization_id if organization_id
|
6
|
+
OpenAI.configuration.uri_base = uri_base if uri_base
|
8
7
|
OpenAI.configuration.request_timeout = request_timeout if request_timeout
|
9
8
|
end
|
10
9
|
|
@@ -87,7 +86,7 @@ module OpenAI
|
|
87
86
|
end
|
88
87
|
|
89
88
|
private_class_method def self.uri(path:)
|
90
|
-
|
89
|
+
OpenAI.configuration.uri_base + OpenAI.configuration.api_version + path
|
91
90
|
end
|
92
91
|
|
93
92
|
private_class_method def self.headers
|
data/lib/openai/version.rb
CHANGED
data/lib/openai.rb
CHANGED
@@ -13,15 +13,17 @@ module OpenAI
|
|
13
13
|
|
14
14
|
class Configuration
|
15
15
|
attr_writer :access_token
|
16
|
-
attr_accessor :api_version, :organization_id, :request_timeout
|
16
|
+
attr_accessor :api_version, :organization_id, :uri_base, :request_timeout
|
17
17
|
|
18
18
|
DEFAULT_API_VERSION = "v1".freeze
|
19
|
+
DEFAULT_URI_BASE = "https://api.openai.com/".freeze
|
19
20
|
DEFAULT_REQUEST_TIMEOUT = 120
|
20
21
|
|
21
22
|
def initialize
|
22
23
|
@access_token = nil
|
23
24
|
@api_version = DEFAULT_API_VERSION
|
24
25
|
@organization_id = nil
|
26
|
+
@uri_base = DEFAULT_URI_BASE
|
25
27
|
@request_timeout = DEFAULT_REQUEST_TIMEOUT
|
26
28
|
end
|
27
29
|
|
data/ruby-openai.gemspec
CHANGED
@@ -6,7 +6,7 @@ Gem::Specification.new do |spec|
|
|
6
6
|
spec.authors = ["Alex"]
|
7
7
|
spec.email = ["alexrudall@users.noreply.github.com"]
|
8
8
|
|
9
|
-
spec.summary = "
|
9
|
+
spec.summary = "OpenAI API + Ruby! 🤖❤️"
|
10
10
|
spec.homepage = "https://github.com/alexrudall/ruby-openai"
|
11
11
|
spec.license = "MIT"
|
12
12
|
spec.required_ruby_version = Gem::Requirement.new(">= 2.6.0")
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.
|
4
|
+
version: 3.7.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alex
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-03-
|
11
|
+
date: 2023-03-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: httparty
|
@@ -88,5 +88,5 @@ requirements: []
|
|
88
88
|
rubygems_version: 3.1.4
|
89
89
|
signing_key:
|
90
90
|
specification_version: 4
|
91
|
-
summary:
|
91
|
+
summary: "OpenAI API + Ruby! \U0001F916❤️"
|
92
92
|
test_files: []
|