ruby-openai 1.5.0 → 2.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: eae2966d67581585125aac11431d33db3af2f472d0970dad242316783ded514e
4
- data.tar.gz: 53f357b92d77a79c48539b69216c4b06c2f0fd0584be17dfbf54576ea02ffed4
3
+ metadata.gz: 4fafce333da7c64cacff4af92a1aa6f9a6b852a6c84bcf139668ad6763e93be0
4
+ data.tar.gz: bd5ae87214057466912c2892123e5d1ab6efc2dc570cbbe00c13679417506706
5
5
  SHA512:
6
- metadata.gz: 67c875aa8dde9a199aead346a6b45af8aed00ddf81b2db667c055c1ef9e86dea2d43e5b3d23e1e659dbfb0468a40ab9c419b52115a61491217c8c1b0d65aff3c
7
- data.tar.gz: 4d55769b42b9b295ff4c065c9f4abdbaaf5e96b998eb707e3a64f4161355d20c5e8ee0b417c80029b376e70fe9c69e0521c397ac82523aa05874e8ecbeaac7ce
6
+ metadata.gz: 1eac6b65e9ed0b8f932f0f9b65670df4a96e988a56c8ea302abbcf675b670917d4f1dd9289cfc0fa0be5868811888e0e67851338258ccfa8c5954fee82b1df6a
7
+ data.tar.gz: e1b856074548b44c228170c8f211737df30d1106edf5097d115df5b286447959bbbd30f6d7d3d8b33a88ebcd64303596240804192c710329b421749da8eac709
data/.circleci/config.yml CHANGED
@@ -36,10 +36,9 @@ workflows:
36
36
  - rubocop
37
37
  - test:
38
38
  matrix:
39
- parameters:
40
- ruby-image:
41
- - cimg/ruby:2.5-node
42
- - cimg/ruby:2.6-node
43
- - cimg/ruby:2.7-node
44
- - cimg/ruby:3.0-node
45
- - cimg/ruby:3.1-node
39
+ parameters:
40
+ ruby-image:
41
+ - cimg/ruby:2.6-node
42
+ - cimg/ruby:2.7-node
43
+ - cimg/ruby:3.0-node
44
+ - cimg/ruby:3.1-node
data/.rubocop.yml CHANGED
@@ -1,5 +1,5 @@
1
1
  AllCops:
2
- TargetRubyVersion: 2.5
2
+ TargetRubyVersion: 2.6
3
3
  NewCops: enable
4
4
  SuggestExtensions: false
5
5
 
data/CHANGELOG.md CHANGED
@@ -5,6 +5,26 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [2.0.1] - 2022-10-22
9
+
10
+ ### Removed
11
+
12
+ - Deprecate Client#answers endpoint.
13
+ - Deprecate Client#classifications endpoint.
14
+
15
+ ## [2.0.0] - 2022-09-19
16
+
17
+ ### Removed
18
+
19
+ - [BREAKING] Remove support for Ruby 2.5.
20
+ - [BREAKING] Remove support for passing `query`, `documents` or `file` as top-level parameters to `Client#search`.
21
+ - Deprecate Client#search endpoint.
22
+ - Deprecate Client#engines endpoints.
23
+
24
+ ### Added
25
+
26
+ - Add Client#models endpoints to list and query available models.
27
+
8
28
  ## [1.5.0] - 2022-09-18
9
29
 
10
30
  ### Added
data/Gemfile CHANGED
@@ -6,6 +6,6 @@ gemspec
6
6
  gem "byebug", "~> 11.1.3"
7
7
  gem "rake", "~> 13.0"
8
8
  gem "rspec", "~> 3.11"
9
- gem "rubocop", "~> 1.28.2"
10
- gem "vcr", "~> 6.0.0"
9
+ gem "rubocop", "~> 1.37.0"
10
+ gem "vcr", "~> 6.1.0"
11
11
  gem "webmock", "~> 3.18.1"
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (1.5.0)
4
+ ruby-openai (2.0.1)
5
5
  dotenv (>= 2.7.6, < 2.9.0)
6
6
  httparty (>= 0.18.1, < 0.21.0)
7
7
 
@@ -20,17 +20,18 @@ GEM
20
20
  httparty (0.20.0)
21
21
  mime-types (~> 3.0)
22
22
  multi_xml (>= 0.5.2)
23
+ json (2.6.2)
23
24
  mime-types (3.4.1)
24
25
  mime-types-data (~> 3.2015)
25
26
  mime-types-data (3.2022.0105)
26
27
  multi_xml (0.6.0)
27
28
  parallel (1.22.1)
28
- parser (3.1.2.0)
29
+ parser (3.1.2.1)
29
30
  ast (~> 2.4.1)
30
31
  public_suffix (4.0.7)
31
32
  rainbow (3.1.1)
32
33
  rake (13.0.6)
33
- regexp_parser (2.3.1)
34
+ regexp_parser (2.6.0)
34
35
  rexml (3.2.5)
35
36
  rspec (3.11.0)
36
37
  rspec-core (~> 3.11.0)
@@ -45,20 +46,21 @@ GEM
45
46
  diff-lcs (>= 1.2.0, < 2.0)
46
47
  rspec-support (~> 3.11.0)
47
48
  rspec-support (3.11.0)
48
- rubocop (1.28.2)
49
+ rubocop (1.37.0)
50
+ json (~> 2.3)
49
51
  parallel (~> 1.10)
50
- parser (>= 3.1.0.0)
52
+ parser (>= 3.1.2.1)
51
53
  rainbow (>= 2.2.2, < 4.0)
52
54
  regexp_parser (>= 1.8, < 3.0)
53
- rexml
54
- rubocop-ast (>= 1.17.0, < 2.0)
55
+ rexml (>= 3.2.5, < 4.0)
56
+ rubocop-ast (>= 1.22.0, < 2.0)
55
57
  ruby-progressbar (~> 1.7)
56
58
  unicode-display_width (>= 1.4.0, < 3.0)
57
- rubocop-ast (1.17.0)
59
+ rubocop-ast (1.22.0)
58
60
  parser (>= 3.1.1.0)
59
61
  ruby-progressbar (1.11.0)
60
- unicode-display_width (2.1.0)
61
- vcr (6.0.0)
62
+ unicode-display_width (2.3.0)
63
+ vcr (6.1.0)
62
64
  webmock (3.18.1)
63
65
  addressable (>= 2.8.0)
64
66
  crack (>= 0.3.2)
@@ -71,9 +73,9 @@ DEPENDENCIES
71
73
  byebug (~> 11.1.3)
72
74
  rake (~> 13.0)
73
75
  rspec (~> 3.11)
74
- rubocop (~> 1.28.2)
76
+ rubocop (~> 1.37.0)
75
77
  ruby-openai!
76
- vcr (~> 6.0.0)
78
+ vcr (~> 6.1.0)
77
79
  webmock (~> 3.18.1)
78
80
 
79
81
  BUNDLED WITH
data/README.md CHANGED
@@ -14,7 +14,7 @@ Use the [OpenAI GPT-3 API](https://openai.com/blog/openai-api/) with Ruby! 🤖
14
14
  Add this line to your application's Gemfile:
15
15
 
16
16
  ```ruby
17
- gem 'ruby-openai'
17
+ gem "ruby-openai"
18
18
  ```
19
19
 
20
20
  And then execute:
@@ -59,26 +59,38 @@ Alternatively you can pass your key directly to a new client:
59
59
  client = OpenAI::Client.new(access_token: "access_token_goes_here")
60
60
  ```
61
61
 
62
+ ### Models
63
+
64
+ There are different models that can be used to generate text. For a full list and to retrieve information about a single models:
65
+
66
+ ```ruby
67
+ client.models.list
68
+ client.models.retrieve(id: "text-ada-001")
69
+ ```
70
+
62
71
  #### Examples
63
72
 
64
- - [GPT-3](https://beta.openai.com/docs/engines/gpt-3)
73
+ - [GPT-3](https://beta.openai.com/docs/models/gpt-3)
65
74
  - text-ada-001
66
75
  - text-babbage-001
67
76
  - text-curie-001
68
77
  - text-davinci-001
69
- - [Codex (private beta)](https://beta.openai.com/docs/engines/codex-series-private-beta)
78
+ - [Codex (private beta)](https://beta.openai.com/docs/models/codex-series-private-beta)
70
79
  - code-davinci-002
71
80
  - code-cushman-001
72
- - [Content Filter](https://beta.openai.com/docs/engines/content-filter)
73
- - content-filter-alpha
74
81
 
75
82
  ### Completions
76
83
 
77
84
  Hit the OpenAI API for a completion:
78
85
 
79
86
  ```ruby
80
- response = client.completions(engine: "text-davinci-001", parameters: { prompt: "Once upon a time", max_tokens: 5 })
81
- puts response.parsed_response['choices'].map{ |c| c["text"] }
87
+ response = client.completions(
88
+ parameters: {
89
+ model: "text-davinci-001",
90
+ prompt: "Once upon a time",
91
+ max_tokens: 5
92
+ })
93
+ puts response["choices"].map { |c| c["text"] }
82
94
  => [", there lived a great"]
83
95
  ```
84
96
 
@@ -104,9 +116,9 @@ You can use the embeddings endpoint to get a vector of numbers representing an i
104
116
 
105
117
  ```ruby
106
118
  client.embeddings(
107
- engine: "babbage-similarity",
108
119
  parameters: {
109
- input: "The food was delicious and the waiter..."
120
+ model: "babbage-similarity",
121
+ input: "The food was delicious and the waiter..."
110
122
  }
111
123
  )
112
124
  ```
@@ -123,7 +135,7 @@ Put your data in a `.jsonl` file like this:
123
135
  and pass the path to `client.files.upload` to upload it to OpenAI, and then interact with it:
124
136
 
125
137
  ```ruby
126
- client.files.upload(parameters: { file: 'path/to/puppy.jsonl', purpose: 'search' })
138
+ client.files.upload(parameters: { file: "path/to/puppy.jsonl", purpose: "search" })
127
139
  client.files.list
128
140
  client.files.retrieve(id: 123)
129
141
  client.files.delete(id: 123)
@@ -141,7 +153,7 @@ Put your fine-tuning data in a `.jsonl` file like this:
141
153
  and pass the path to `client.files.upload` to upload it to OpenAI and get its ID:
142
154
 
143
155
  ```ruby
144
- response = client.files.upload(parameters: { file: 'path/to/sentiment.jsonl', purpose: 'fine-tune' })
156
+ response = client.files.upload(parameters: { file: "path/to/sentiment.jsonl", purpose: "fine-tune" })
145
157
  file_id = JSON.parse(response.body)["id"]
146
158
  ```
147
159
 
@@ -182,8 +194,6 @@ This fine-tuned model name can then be used in classifications:
182
194
  JSON.parse(response.body)["choices"].map { |c| c["text"] }
183
195
  ```
184
196
 
185
- Do not pass the engine parameter when using a fine-tuned model.
186
-
187
197
  ### Moderations
188
198
 
189
199
  Pass a string to check if it violates OpenAI's Content Policy:
@@ -194,22 +204,6 @@ Pass a string to check if it violates OpenAI's Content Policy:
194
204
  => 5.505014632944949e-05
195
205
  ```
196
206
 
197
- ### Searches
198
-
199
- Pass documents and a query string to get semantic search scores against each document:
200
-
201
- ```ruby
202
- response = client.search(engine: "text-ada-001", parameters: { documents: %w[washington hospital school], query: "president" })
203
- puts response["data"].map { |d| d["score"] }
204
- => [202.0, 48.052, 19.247]
205
- ```
206
-
207
- You can alternatively search using the ID of a file you've uploaded:
208
-
209
- ```ruby
210
- client.search(engine: "text-ada-001", parameters: { file: "abc123", query: "happy" })
211
- ```
212
-
213
207
  ### Classifications
214
208
 
215
209
  Pass examples and a query to predict the most likely labels:
@@ -262,15 +256,6 @@ Or use the ID of a file you've uploaded:
262
256
  })
263
257
  ```
264
258
 
265
- ### Engines
266
-
267
- There are different engines that can be used to generate text. For a full list and to retrieve information about a single engine:
268
-
269
- ```ruby
270
- client.engines.list
271
- client.engines.retrieve(id: 'text-ada-001')
272
- ```
273
-
274
259
  ## Development
275
260
 
276
261
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
@@ -8,30 +8,41 @@ module OpenAI
8
8
  end
9
9
 
10
10
  def answers(version: default_version, parameters: {})
11
+ warn "[DEPRECATION WARNING] [ruby-openai] `Client#answers` is deprecated and will
12
+ be removed from the OpenAI API on 3 December 2022 and from ruby-openai v3.0.
13
+ More information: https://help.openai.com/en/articles/6233728-answers-transition-guide"
14
+
11
15
  post(url: "/#{version}/answers", parameters: parameters)
12
16
  end
13
17
 
14
18
  def classifications(version: default_version, parameters: {})
19
+ warn "[DEPRECATION WARNING] [ruby-openai] `Client#classifications` is deprecated and will
20
+ be removed from the OpenAI API on 3 December 2022 and from ruby-openai v3.0.
21
+ More information: https://help.openai.com/en/articles/6272941-classifications-transition-guide"
22
+
15
23
  post(url: "/#{version}/classifications", parameters: parameters)
16
24
  end
17
25
 
18
26
  def completions(engine: nil, version: default_version, parameters: {})
19
- if engine
20
- post(url: "/#{version}/engines/#{engine}/completions", parameters: parameters)
21
- else
22
- post(url: "/#{version}/completions", parameters: parameters)
23
- end
27
+ parameters = deprecate_engine(engine: engine, method: "completions", parameters: parameters)
28
+
29
+ post(url: "/#{version}/completions", parameters: parameters)
24
30
  end
25
31
 
26
32
  def edits(version: default_version, parameters: {})
27
33
  post(url: "/#{version}/edits", parameters: parameters)
28
34
  end
29
35
 
30
- def embeddings(engine:, version: default_version, parameters: {})
31
- post(url: "/#{version}/engines/#{engine}/embeddings", parameters: parameters)
36
+ def embeddings(engine: nil, version: default_version, parameters: {})
37
+ parameters = deprecate_engine(engine: engine, method: "embeddings", parameters: parameters)
38
+
39
+ post(url: "/#{version}/embeddings", parameters: parameters)
32
40
  end
33
41
 
34
42
  def engines
43
+ warn "[DEPRECATION WARNING] [ruby-openai] `Client#engines` is deprecated and will
44
+ be removed from ruby-openai v3.0. Use `Client#models` instead."
45
+
35
46
  @engines ||= OpenAI::Engines.new(access_token: @access_token)
36
47
  end
37
48
 
@@ -43,35 +54,35 @@ module OpenAI
43
54
  @finetunes ||= OpenAI::Finetunes.new(access_token: @access_token)
44
55
  end
45
56
 
57
+ def models
58
+ @models ||= OpenAI::Models.new(access_token: @access_token)
59
+ end
60
+
46
61
  def moderations(version: default_version, parameters: {})
47
62
  post(url: "/#{version}/moderations", parameters: parameters)
48
63
  end
49
64
 
50
- # rubocop:disable Layout/LineLength
51
- # rubocop:disable Metrics/ParameterLists
52
- def search(engine:, query: nil, documents: nil, file: nil, version: default_version, parameters: {})
53
- return legacy_search(engine: engine, query: query, documents: documents, file: file, version: version) if query || documents || file
65
+ def search(engine:, version: default_version, parameters: {})
66
+ warn "[DEPRECATION WARNING] [ruby-openai] `Client#search` is deprecated and will
67
+ be removed from the OpenAI API on 3 December 2022 and from ruby-openai v3.0.
68
+ More information: https://help.openai.com/en/articles/6272952-search-transition-guide"
54
69
 
55
70
  post(url: "/#{version}/engines/#{engine}/search", parameters: parameters)
56
71
  end
57
- # rubocop:enable Layout/LineLength
58
- # rubocop:enable Metrics/ParameterLists
59
72
 
60
73
  private
61
74
 
62
- # rubocop:disable Layout/LineLength
63
- def legacy_search(engine:, query:, documents: nil, file: nil, version: default_version)
64
- warn "[DEPRECATION] Passing `query`, `documents` or `file` directly to `Client#search` is deprecated and will be removed in a future version of ruby-openai.
65
- Please nest these terms within `parameters` instead, like this:
66
- client.search(engine: 'davinci', parameters: { query: 'president', documents: %w[washington hospital school] })
67
- "
75
+ def deprecate_engine(engine:, method:, parameters:)
76
+ return parameters unless engine
68
77
 
69
- post(
70
- url: "/#{version}/engines/#{engine}/search",
71
- parameters: { query: query }.merge(documents_or_file(documents: documents, file: file))
72
- )
78
+ parameters = { model: engine }.merge(parameters)
79
+
80
+ warn "[DEPRECATION WARNING] [ruby-openai] Passing `engine` directly to `Client##{method}` is
81
+ deprecated and will be removed in ruby-openai 3.0. Pass `model` within `parameters` instead:
82
+ client.completions(parameters: { #{parameters.map { |k, v| "#{k}: \"#{v}\"" }.join(', ')} })"
83
+
84
+ parameters
73
85
  end
74
- # rubocop:enable Layout/LineLength
75
86
 
76
87
  def default_version
77
88
  "v1".freeze
@@ -0,0 +1,36 @@
1
+ module OpenAI
2
+ class Models
3
+ include HTTParty
4
+ base_uri "https://api.openai.com"
5
+
6
+ def initialize(access_token: nil)
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
+ end
9
+
10
+ def list(version: default_version)
11
+ self.class.get(
12
+ "/#{version}/models",
13
+ headers: {
14
+ "Content-Type" => "application/json",
15
+ "Authorization" => "Bearer #{@access_token}"
16
+ }
17
+ )
18
+ end
19
+
20
+ def retrieve(id:, version: default_version)
21
+ self.class.get(
22
+ "/#{version}/models/#{id}",
23
+ headers: {
24
+ "Content-Type" => "application/json",
25
+ "Authorization" => "Bearer #{@access_token}"
26
+ }
27
+ )
28
+ end
29
+
30
+ private
31
+
32
+ def default_version
33
+ "v1".freeze
34
+ end
35
+ end
36
+ end
@@ -1,5 +1,5 @@
1
1
  module Ruby
2
2
  module OpenAI
3
- VERSION = "1.5.0".freeze
3
+ VERSION = "2.0.1".freeze
4
4
  end
5
5
  end
data/lib/ruby/openai.rb CHANGED
@@ -2,6 +2,7 @@ require "httparty"
2
2
  require "ruby/openai/engines"
3
3
  require "ruby/openai/files"
4
4
  require "ruby/openai/finetunes"
5
+ require "ruby/openai/models"
5
6
  require "ruby/openai/client"
6
7
  require "ruby/openai/version"
7
8
  require "dotenv/load"
data/ruby-openai.gemspec CHANGED
@@ -9,7 +9,7 @@ Gem::Specification.new do |spec|
9
9
  spec.summary = "A Ruby gem for the OpenAI GPT-3 API"
10
10
  spec.homepage = "https://github.com/alexrudall/ruby-openai"
11
11
  spec.license = "MIT"
12
- spec.required_ruby_version = Gem::Requirement.new(">= 2.5.0")
12
+ spec.required_ruby_version = Gem::Requirement.new(">= 2.6.0")
13
13
 
14
14
  spec.metadata["homepage_uri"] = spec.homepage
15
15
  spec.metadata["source_code_uri"] = "https://github.com/alexrudall/ruby-openai"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.5.0
4
+ version: 2.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2022-09-18 00:00:00.000000000 Z
11
+ date: 2022-10-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dotenv
@@ -80,6 +80,7 @@ files:
80
80
  - lib/ruby/openai/engines.rb
81
81
  - lib/ruby/openai/files.rb
82
82
  - lib/ruby/openai/finetunes.rb
83
+ - lib/ruby/openai/models.rb
83
84
  - lib/ruby/openai/version.rb
84
85
  - pull_request_template.md
85
86
  - ruby-openai.gemspec
@@ -99,7 +100,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
99
100
  requirements:
100
101
  - - ">="
101
102
  - !ruby/object:Gem::Version
102
- version: 2.5.0
103
+ version: 2.6.0
103
104
  required_rubygems_version: !ruby/object:Gem::Requirement
104
105
  requirements:
105
106
  - - ">="