ruby-openai 6.2.0 → 6.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/Gemfile.lock +3 -3
- data/README.md +62 -1
- data/lib/openai/client.rb +3 -2
- data/lib/openai/compatibility.rb +1 -0
- data/lib/openai/http.rb +6 -1
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +16 -0
- data/ruby-openai.gemspec +1 -1
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2e2bdf19ad32b8eb492faca3e937614c30ab57817c374797362ca27ffff1cf7e
|
4
|
+
data.tar.gz: b3d31aaa13bec5bdeb08718c04afad99b528e9e5c1bfbd279a8111e4fa12739c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: bf6f46dbb52890ff1468d727711681ad95bb82e26b77f092cf6a81be25dbdb7ef3b3a58626090160bfc927ec3585723c17a9c47005ff64b035a3af85ba887e51
|
7
|
+
data.tar.gz: cbe3a5d6c57757beee533c3b9c05aa43fb343f7da542af4fc58bf70223f84cc674900d032e824f1c34e19ef17a2ba0d366fe9353dae67db162ce435ef2f1a496
|
data/CHANGELOG.md
CHANGED
@@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
7
7
|
|
8
|
+
## [6.3.0] - 2023-11-26
|
9
|
+
|
10
|
+
### Added
|
11
|
+
|
12
|
+
- Add ability to pass [Faraday middleware](https://lostisland.github.io/faraday/#/middleware/index) to the client in a block, eg. to enable verbose logging - shout out to [@obie](https://github.com/obie) for pushing for this.
|
13
|
+
- Add better error logging to the client by default.
|
14
|
+
- Bump Event Source to v1, thank you [@atesgoral](https://github.com/atesgoral) @ Shopify!
|
15
|
+
|
8
16
|
## [6.2.0] - 2023-11-15
|
9
17
|
|
10
18
|
### Added
|
data/Gemfile.lock
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
ruby-openai (6.
|
5
|
-
event_stream_parser (>= 0.3.0, <
|
4
|
+
ruby-openai (6.3.0)
|
5
|
+
event_stream_parser (>= 0.3.0, < 2.0.0)
|
6
6
|
faraday (>= 1)
|
7
7
|
faraday-multipart (>= 1)
|
8
8
|
|
@@ -18,7 +18,7 @@ GEM
|
|
18
18
|
rexml
|
19
19
|
diff-lcs (1.5.0)
|
20
20
|
dotenv (2.8.1)
|
21
|
-
event_stream_parser (0.
|
21
|
+
event_stream_parser (1.0.0)
|
22
22
|
faraday (2.7.11)
|
23
23
|
base64
|
24
24
|
faraday-net_http (>= 2.0, < 3.1)
|
data/README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
# Ruby OpenAI
|
2
2
|
|
3
|
-
[![Gem Version](https://
|
3
|
+
[![Gem Version](https://img.shields.io/gem/v/ruby-openai.svg)](https://rubygems.org/gems/ruby-openai)
|
4
4
|
[![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/alexrudall/ruby-openai/blob/main/LICENSE.txt)
|
5
5
|
[![CircleCI Build Status](https://circleci.com/gh/alexrudall/ruby-openai.svg?style=shield)](https://circleci.com/gh/alexrudall/ruby-openai)
|
6
6
|
|
@@ -108,6 +108,16 @@ OpenAI.configure do |config|
|
|
108
108
|
end
|
109
109
|
```
|
110
110
|
|
111
|
+
#### Verbose Logging
|
112
|
+
|
113
|
+
You can pass [Faraday middleware](https://lostisland.github.io/faraday/#/middleware/index) to the client in a block, eg. to enable verbose logging with Ruby's [Logger](https://ruby-doc.org/3.2.2/stdlibs/logger/Logger.html):
|
114
|
+
|
115
|
+
```ruby
|
116
|
+
client = OpenAI::Client.new do |f|
|
117
|
+
f.response :logger, Logger.new($stdout), bodies: true
|
118
|
+
end
|
119
|
+
```
|
120
|
+
|
111
121
|
#### Azure
|
112
122
|
|
113
123
|
To use the [Azure OpenAI Service](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/) API, you can configure the gem like this:
|
@@ -217,6 +227,57 @@ puts response.dig("choices", 0, "message", "content")
|
|
217
227
|
# => "The image depicts a serene natural landscape featuring a long wooden boardwalk extending straight ahead"
|
218
228
|
```
|
219
229
|
|
230
|
+
#### JSON Mode
|
231
|
+
|
232
|
+
You can set the response_format to ask for responses in JSON (at least for `gpt-3.5-turbo-1106`):
|
233
|
+
|
234
|
+
```ruby
|
235
|
+
response = client.chat(
|
236
|
+
parameters: {
|
237
|
+
model: "gpt-3.5-turbo-1106",
|
238
|
+
response_format: { type: "json_object" },
|
239
|
+
messages: [{ role: "user", content: "Hello! Give me some JSON please."}],
|
240
|
+
temperature: 0.7,
|
241
|
+
})
|
242
|
+
puts response.dig("choices", 0, "message", "content")
|
243
|
+
{
|
244
|
+
"name": "John",
|
245
|
+
"age": 30,
|
246
|
+
"city": "New York",
|
247
|
+
"hobbies": ["reading", "traveling", "hiking"],
|
248
|
+
"isStudent": false
|
249
|
+
}
|
250
|
+
```
|
251
|
+
|
252
|
+
You can stream it as well!
|
253
|
+
|
254
|
+
```ruby
|
255
|
+
response = client.chat(
|
256
|
+
parameters: {
|
257
|
+
model: "gpt-3.5-turbo-1106",
|
258
|
+
messages: [{ role: "user", content: "Can I have some JSON please?"}],
|
259
|
+
response_format: { type: "json_object" },
|
260
|
+
stream: proc do |chunk, _bytesize|
|
261
|
+
print chunk.dig("choices", 0, "delta", "content")
|
262
|
+
end
|
263
|
+
})
|
264
|
+
{
|
265
|
+
"message": "Sure, please let me know what specific JSON data you are looking for.",
|
266
|
+
"JSON_data": {
|
267
|
+
"example_1": {
|
268
|
+
"key_1": "value_1",
|
269
|
+
"key_2": "value_2",
|
270
|
+
"key_3": "value_3"
|
271
|
+
},
|
272
|
+
"example_2": {
|
273
|
+
"key_4": "value_4",
|
274
|
+
"key_5": "value_5",
|
275
|
+
"key_6": "value_6"
|
276
|
+
}
|
277
|
+
}
|
278
|
+
}
|
279
|
+
```
|
280
|
+
|
220
281
|
### Functions
|
221
282
|
|
222
283
|
You can describe and pass in functions and the model will intelligently choose to output a JSON object containing arguments to call those them. For example, if you want the model to use your method `get_current_weather` to get the current weather in a given location:
|
data/lib/openai/client.rb
CHANGED
@@ -11,14 +11,15 @@ module OpenAI
|
|
11
11
|
request_timeout
|
12
12
|
extra_headers
|
13
13
|
].freeze
|
14
|
-
attr_reader *CONFIG_KEYS
|
14
|
+
attr_reader *CONFIG_KEYS, :faraday_middleware
|
15
15
|
|
16
|
-
def initialize(config = {})
|
16
|
+
def initialize(config = {}, &faraday_middleware)
|
17
17
|
CONFIG_KEYS.each do |key|
|
18
18
|
# Set instance variables like api_type & access_token. Fall back to global config
|
19
19
|
# if not present.
|
20
20
|
instance_variable_set("@#{key}", config[key] || OpenAI.configuration.send(key))
|
21
21
|
end
|
22
|
+
@faraday_middleware = faraday_middleware
|
22
23
|
end
|
23
24
|
|
24
25
|
def chat(parameters: {})
|
data/lib/openai/compatibility.rb
CHANGED
data/lib/openai/http.rb
CHANGED
@@ -71,12 +71,17 @@ module OpenAI
|
|
71
71
|
end
|
72
72
|
|
73
73
|
def conn(multipart: false)
|
74
|
-
Faraday.new do |f|
|
74
|
+
connection = Faraday.new do |f|
|
75
75
|
f.options[:timeout] = @request_timeout
|
76
76
|
f.request(:multipart) if multipart
|
77
|
+
f.use MiddlewareErrors
|
77
78
|
f.response :raise_error
|
78
79
|
f.response :json
|
79
80
|
end
|
81
|
+
|
82
|
+
@faraday_middleware&.call(connection)
|
83
|
+
|
84
|
+
connection
|
80
85
|
end
|
81
86
|
|
82
87
|
def uri(path:)
|
data/lib/openai/version.rb
CHANGED
data/lib/openai.rb
CHANGED
@@ -19,6 +19,22 @@ module OpenAI
|
|
19
19
|
class Error < StandardError; end
|
20
20
|
class ConfigurationError < Error; end
|
21
21
|
|
22
|
+
class MiddlewareErrors < Faraday::Middleware
|
23
|
+
def call(env)
|
24
|
+
@app.call(env)
|
25
|
+
rescue Faraday::Error => e
|
26
|
+
raise e unless e.response.is_a?(Hash)
|
27
|
+
|
28
|
+
logger = Logger.new($stdout)
|
29
|
+
logger.formatter = proc do |_severity, _datetime, _progname, msg|
|
30
|
+
"\033[31mOpenAI HTTP Error (spotted in ruby-openai #{VERSION}): #{msg}\n\033[0m"
|
31
|
+
end
|
32
|
+
logger.error(e.response[:body])
|
33
|
+
|
34
|
+
raise e
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
22
38
|
class Configuration
|
23
39
|
attr_writer :access_token
|
24
40
|
attr_accessor :api_type, :api_version, :organization_id, :uri_base, :request_timeout,
|
data/ruby-openai.gemspec
CHANGED
@@ -25,7 +25,7 @@ Gem::Specification.new do |spec|
|
|
25
25
|
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
26
26
|
spec.require_paths = ["lib"]
|
27
27
|
|
28
|
-
spec.add_dependency "event_stream_parser", ">= 0.3.0", "<
|
28
|
+
spec.add_dependency "event_stream_parser", ">= 0.3.0", "< 2.0.0"
|
29
29
|
spec.add_dependency "faraday", ">= 1"
|
30
30
|
spec.add_dependency "faraday-multipart", ">= 1"
|
31
31
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby-openai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 6.
|
4
|
+
version: 6.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alex
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-11-
|
11
|
+
date: 2023-11-26 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -19,7 +19,7 @@ dependencies:
|
|
19
19
|
version: 0.3.0
|
20
20
|
- - "<"
|
21
21
|
- !ruby/object:Gem::Version
|
22
|
-
version:
|
22
|
+
version: 2.0.0
|
23
23
|
type: :runtime
|
24
24
|
prerelease: false
|
25
25
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -29,7 +29,7 @@ dependencies:
|
|
29
29
|
version: 0.3.0
|
30
30
|
- - "<"
|
31
31
|
- !ruby/object:Gem::Version
|
32
|
-
version:
|
32
|
+
version: 2.0.0
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
34
|
name: faraday
|
35
35
|
requirement: !ruby/object:Gem::Requirement
|