ruby-openai 6.0.1 → 6.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 492acab028ee10ea62f7f95814d674299f0fb83de535322bfc935c48b41b74f3
4
- data.tar.gz: 290cd1cf80ac93bf880434e8c5969b22a077582bf07fc93f6718ad186ebd0d66
3
+ metadata.gz: e1c1c4aa39c55d6c907ed312d12319d15dd2c4f5e68dc4d43d2a505c07869fb5
4
+ data.tar.gz: ba6755693f91ce4f0ed1d03b2516e602f9eac382dde40f8241665618c9287767
5
5
  SHA512:
6
- metadata.gz: 6641fa5f1ecfccc6945ef479d4d59ae400f9f9032cee74a1b8580988c4c7c11aa363789fd1fec981bc32c3119748e813319f018c0d7bb0c7744c6fb8c12d7544
7
- data.tar.gz: bfc6ddbde1d9fc342551c49c3f6ab26fd23e165fb98e037cb9731c2cd300bfe11dc89fd2c20e5e1c3ad8564958fcaffabedc38611ca7eec3c9ea0b1ba9730fd0
6
+ metadata.gz: beaba2b0b90941e28b325950c9018a5a0a38f108989d7acce96ebdf84f761a01903e925d2f84812e6820bfd4917ace016590e18d4aee3960a7f6a936918ee445
7
+ data.tar.gz: 6d423030d756769ba75f4837797384c7d4e9c99367bed3fe8141fc04f4f4b070f84643afc107ebf4d7a63666fd83c39fb154a2eb1463851a19a65f86672bae54
data/CHANGELOG.md CHANGED
@@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [6.1.0] - 2023-11-14
9
+
10
+ ### Added
11
+
12
+ - Add support for Assistants, Threads, Messages and Runs. Thank you [@Haegin](https://github.com/Haegin) for the excellent work on this PR, and many reviewers for their contributions!
13
+
8
14
  ## [6.0.1] - 2023-11-07
9
15
 
10
16
  ### Fix
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (6.0.1)
4
+ ruby-openai (6.1.0)
5
5
  event_stream_parser (>= 0.3.0, < 1.0.0)
6
6
  faraday (>= 1)
7
7
  faraday-multipart (>= 1)
data/README.md CHANGED
@@ -174,7 +174,7 @@ puts response.dig("choices", 0, "message", "content")
174
174
  # => "Hello! How may I assist you today?"
175
175
  ```
176
176
 
177
- ### Streaming Chat
177
+ #### Streaming Chat
178
178
 
179
179
  [Quick guide to streaming Chat with Rails 7 and Hotwire](https://gist.github.com/alexrudall/cb5ee1e109353ef358adb4e66631799d)
180
180
 
@@ -195,6 +195,28 @@ client.chat(
195
195
 
196
196
  Note: OpenAPI currently does not report token usage for streaming responses. To count tokens while streaming, try `OpenAI.rough_token_count` or [tiktoken_ruby](https://github.com/IAPark/tiktoken_ruby). We think that each call to the stream proc corresponds to a single token, so you can also try counting the number of calls to the proc to get the completion token count.
197
197
 
198
+ #### Vision
199
+
200
+ You can use the GPT-4 Vision model to generate a description of an image:
201
+
202
+ ```ruby
203
+ messages = [
204
+ { "type": "text", "text": "What’s in this image?"},
205
+ { "type": "image_url",
206
+ "image_url": {
207
+ "url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
208
+ },
209
+ }
210
+ ]
211
+ response = client.chat(
212
+ parameters: {
213
+ model: "gpt-4-vision-preview", # Required.
214
+ messages: [{ role: "user", content: messages}], # Required.
215
+ })
216
+ puts response.dig("choices", 0, "message", "content")
217
+ # => "The image depicts a serene natural landscape featuring a long wooden boardwalk extending straight ahead"
218
+ ```
219
+
198
220
  ### Functions
199
221
 
200
222
  You can describe and pass in functions and the model will intelligently choose to output a JSON object containing arguments to call those them. For example, if you want the model to use your method `get_current_weather` to get the current weather in a given location:
@@ -438,7 +460,7 @@ puts response["text"]
438
460
  # => "Transcription of the text"
439
461
  ```
440
462
 
441
- #### Errors
463
+ ### Errors
442
464
 
443
465
  HTTP errors can be caught like this:
444
466
 
@@ -0,0 +1,27 @@
1
+ module OpenAI
2
+ class Assistants
3
+ def initialize(client:)
4
+ @client = client.beta(assistants: "v1")
5
+ end
6
+
7
+ def list
8
+ @client.get(path: "/assistants")
9
+ end
10
+
11
+ def retrieve(id:)
12
+ @client.get(path: "/assistants/#{id}")
13
+ end
14
+
15
+ def create(parameters: {})
16
+ @client.json_post(path: "/assistants", parameters: parameters)
17
+ end
18
+
19
+ def modify(id:, parameters: {})
20
+ @client.json_post(path: "/assistants/#{id}", parameters: parameters)
21
+ end
22
+
23
+ def delete(id:)
24
+ @client.delete(path: "/assistants/#{id}")
25
+ end
26
+ end
27
+ end
data/lib/openai/client.rb CHANGED
@@ -53,6 +53,26 @@ module OpenAI
53
53
  @models ||= OpenAI::Models.new(client: self)
54
54
  end
55
55
 
56
+ def assistants
57
+ @assistants ||= OpenAI::Assistants.new(client: self)
58
+ end
59
+
60
+ def threads
61
+ @threads ||= OpenAI::Threads.new(client: self)
62
+ end
63
+
64
+ def messages
65
+ @messages ||= OpenAI::Messages.new(client: self)
66
+ end
67
+
68
+ def runs
69
+ @runs ||= OpenAI::Runs.new(client: self)
70
+ end
71
+
72
+ def run_steps
73
+ @run_steps ||= OpenAI::RunSteps.new(client: self)
74
+ end
75
+
56
76
  def moderations(parameters: {})
57
77
  json_post(path: "/moderations", parameters: parameters)
58
78
  end
@@ -60,5 +80,11 @@ module OpenAI
60
80
  def azure?
61
81
  @api_type&.to_sym == :azure
62
82
  end
83
+
84
+ def beta(apis)
85
+ dup.tap do |client|
86
+ client.add_headers("OpenAI-Beta": apis.map { |k, v| "#{k}=#{v}" }.join(";"))
87
+ end
88
+ end
63
89
  end
64
90
  end
data/lib/openai/http.rb CHANGED
@@ -1,13 +1,23 @@
1
1
  require "event_stream_parser"
2
2
 
3
+ require_relative "http_headers"
4
+
3
5
  module OpenAI
4
6
  module HTTP
7
+ include HTTPHeaders
8
+
5
9
  def get(path:)
6
10
  parse_jsonl(conn.get(uri(path: path)) do |req|
7
11
  req.headers = headers
8
12
  end&.body)
9
13
  end
10
14
 
15
+ def post(path:)
16
+ parse_jsonl(conn.post(uri(path: path)) do |req|
17
+ req.headers = headers
18
+ end&.body)
19
+ end
20
+
11
21
  def json_post(path:, parameters:)
12
22
  conn.post(uri(path: path)) do |req|
13
23
  configure_json_post_request(req, parameters)
@@ -78,29 +88,6 @@ module OpenAI
78
88
  end
79
89
  end
80
90
 
81
- def headers
82
- if azure?
83
- azure_headers
84
- else
85
- openai_headers
86
- end.merge(@extra_headers || {})
87
- end
88
-
89
- def openai_headers
90
- {
91
- "Content-Type" => "application/json",
92
- "Authorization" => "Bearer #{@access_token}",
93
- "OpenAI-Organization" => @organization_id
94
- }
95
- end
96
-
97
- def azure_headers
98
- {
99
- "Content-Type" => "application/json",
100
- "api-key" => @access_token
101
- }
102
- end
103
-
104
91
  def multipart_parameters(parameters)
105
92
  parameters&.transform_values do |value|
106
93
  next value unless value.respond_to?(:close) # File or IO object.
@@ -0,0 +1,36 @@
1
+ module OpenAI
2
+ module HTTPHeaders
3
+ def add_headers(headers)
4
+ @extra_headers = extra_headers.merge(headers.transform_keys(&:to_s))
5
+ end
6
+
7
+ private
8
+
9
+ def headers
10
+ if azure?
11
+ azure_headers
12
+ else
13
+ openai_headers
14
+ end.merge(extra_headers)
15
+ end
16
+
17
+ def openai_headers
18
+ {
19
+ "Content-Type" => "application/json",
20
+ "Authorization" => "Bearer #{@access_token}",
21
+ "OpenAI-Organization" => @organization_id
22
+ }
23
+ end
24
+
25
+ def azure_headers
26
+ {
27
+ "Content-Type" => "application/json",
28
+ "api-key" => @access_token
29
+ }
30
+ end
31
+
32
+ def extra_headers
33
+ @extra_headers ||= {}
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,23 @@
1
+ module OpenAI
2
+ class Messages
3
+ def initialize(client:)
4
+ @client = client.beta(assistants: "v1")
5
+ end
6
+
7
+ def list(thread_id:)
8
+ @client.get(path: "/threads/#{thread_id}/messages")
9
+ end
10
+
11
+ def retrieve(thread_id:, id:)
12
+ @client.get(path: "/threads/#{thread_id}/messages/#{id}")
13
+ end
14
+
15
+ def create(thread_id:, parameters: {})
16
+ @client.json_post(path: "/threads/#{thread_id}/messages", parameters: parameters)
17
+ end
18
+
19
+ def modify(id:, thread_id:, parameters: {})
20
+ @client.json_post(path: "/threads/#{thread_id}/messages/#{id}", parameters: parameters)
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,15 @@
1
+ module OpenAI
2
+ class RunSteps
3
+ def initialize(client:)
4
+ @client = client.beta(assistants: "v1")
5
+ end
6
+
7
+ def list(thread_id:, run_id:)
8
+ @client.get(path: "/threads/#{thread_id}/runs/#{run_id}/steps")
9
+ end
10
+
11
+ def retrieve(thread_id:, run_id:, id:)
12
+ @client.get(path: "/threads/#{thread_id}/runs/#{run_id}/steps/#{id}")
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,32 @@
1
+ module OpenAI
2
+ class Runs
3
+ def initialize(client:)
4
+ @client = client.beta(assistants: "v1")
5
+ end
6
+
7
+ def list(thread_id:)
8
+ @client.get(path: "/threads/#{thread_id}/runs")
9
+ end
10
+
11
+ def retrieve(thread_id:, id:)
12
+ @client.get(path: "/threads/#{thread_id}/runs/#{id}")
13
+ end
14
+
15
+ def create(thread_id:, parameters: {})
16
+ @client.json_post(path: "/threads/#{thread_id}/runs", parameters: parameters)
17
+ end
18
+
19
+ def modify(id:, thread_id:, parameters: {})
20
+ @client.json_post(path: "/threads/#{thread_id}/runs/#{id}", parameters: parameters)
21
+ end
22
+
23
+ def cancel(id:, thread_id:)
24
+ @client.post(path: "/threads/#{thread_id}/runs/#{id}/cancel")
25
+ end
26
+
27
+ def submit_tool_outputs(thread_id:, run_id:, parameters: {})
28
+ @client.json_post(path: "/threads/#{thread_id}/runs/#{run_id}/submit_tool_outputs",
29
+ parameters: parameters)
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,27 @@
1
+ module OpenAI
2
+ class Threads
3
+ def initialize(client:)
4
+ @client = client.beta(assistants: "v1")
5
+ end
6
+
7
+ def list
8
+ @client.get(path: "/threads")
9
+ end
10
+
11
+ def retrieve(id:)
12
+ @client.get(path: "/threads/#{id}")
13
+ end
14
+
15
+ def create(parameters: {})
16
+ @client.json_post(path: "/threads", parameters: parameters)
17
+ end
18
+
19
+ def modify(id:, parameters: {})
20
+ @client.json_post(path: "/threads/#{id}", parameters: parameters)
21
+ end
22
+
23
+ def delete(id:)
24
+ @client.delete(path: "/threads/#{id}")
25
+ end
26
+ end
27
+ end
@@ -1,3 +1,3 @@
1
1
  module OpenAI
2
- VERSION = "6.0.1".freeze
2
+ VERSION = "6.1.0".freeze
3
3
  end
data/lib/openai.rb CHANGED
@@ -7,6 +7,11 @@ require_relative "openai/files"
7
7
  require_relative "openai/finetunes"
8
8
  require_relative "openai/images"
9
9
  require_relative "openai/models"
10
+ require_relative "openai/assistants"
11
+ require_relative "openai/threads"
12
+ require_relative "openai/messages"
13
+ require_relative "openai/runs"
14
+ require_relative "openai/run_steps"
10
15
  require_relative "openai/audio"
11
16
  require_relative "openai/version"
12
17
 
@@ -30,7 +35,7 @@ module OpenAI
30
35
  @organization_id = nil
31
36
  @uri_base = DEFAULT_URI_BASE
32
37
  @request_timeout = DEFAULT_REQUEST_TIMEOUT
33
- @extra_headers = nil
38
+ @extra_headers = {}
34
39
  end
35
40
 
36
41
  def access_token
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.0.1
4
+ version: 6.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-11-07 00:00:00.000000000 Z
11
+ date: 2023-11-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser
@@ -87,14 +87,20 @@ files:
87
87
  - bin/console
88
88
  - bin/setup
89
89
  - lib/openai.rb
90
+ - lib/openai/assistants.rb
90
91
  - lib/openai/audio.rb
91
92
  - lib/openai/client.rb
92
93
  - lib/openai/compatibility.rb
93
94
  - lib/openai/files.rb
94
95
  - lib/openai/finetunes.rb
95
96
  - lib/openai/http.rb
97
+ - lib/openai/http_headers.rb
96
98
  - lib/openai/images.rb
99
+ - lib/openai/messages.rb
97
100
  - lib/openai/models.rb
101
+ - lib/openai/run_steps.rb
102
+ - lib/openai/runs.rb
103
+ - lib/openai/threads.rb
98
104
  - lib/openai/version.rb
99
105
  - lib/ruby/openai.rb
100
106
  - pull_request_template.md