ruby-openai 1.4.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 54bd37da173d97fdb58db219e057eec6997a3694c463d79cdc6e04fce86713fd
4
- data.tar.gz: 051e5bd9b7dfe58040e0c1107c96da85f9a8c8d655987a7214dc3ce9f4663829
3
+ metadata.gz: f5927989b832799b3620dd5e1d93796d44fd3fe353d2edc2d64a4be2162bc6ff
4
+ data.tar.gz: 3313d8fa4ce08438fda80850736c96d73397f8f9d7baf72a365dea839c0cd876
5
5
  SHA512:
6
- metadata.gz: bcf13d22abf4a3f46a82622d6f34be59cab1fd068d1fe29709da89fa434611b5a0ada0397cc3dc0e091406174b5ad8472c6647c553d2bdf6b8fcfe5d1d547c81
7
- data.tar.gz: 6ed690d12edb3d1aac43894f6704365ed1535b124286fd6b048dbe8606499d28e474f89096bba8780b8eadbebc201918a6e6a754cc03d942221bb31dad4de8c0
6
+ metadata.gz: 4604610b91ea93c5a8b08886700c656ca3651129542a6c257d5d000fc414f2a47e9ca8e8cadba622fb426a4e969fd0d058a83d857455893d172f3125852efd19
7
+ data.tar.gz: d8b959447a050615b4cdffd15cb7ac782b43b4d73953e6b0aa687bacf1f5872d4e9546547d4961fd0e1f5b07f40f460ae2cb136f717034e0a9199f4e5822946c
data/.circleci/config.yml CHANGED
@@ -8,7 +8,7 @@ jobs:
8
8
  rubocop:
9
9
  parallelism: 1
10
10
  docker:
11
- - image: cimg/ruby:3.0-node
11
+ - image: cimg/ruby:3.1-node
12
12
  steps:
13
13
  - checkout
14
14
  - ruby/install-deps
@@ -36,5 +36,9 @@ workflows:
36
36
  - rubocop
37
37
  - test:
38
38
  matrix:
39
- parameters:
40
- ruby-image: ['cimg/ruby:2.5-node', 'cimg/ruby:2.6-node', 'cimg/ruby:2.7-node', 'cimg/ruby:3.0-node']
39
+ parameters:
40
+ ruby-image:
41
+ - cimg/ruby:2.6-node
42
+ - cimg/ruby:2.7-node
43
+ - cimg/ruby:3.0-node
44
+ - cimg/ruby:3.1-node
data/.rubocop.yml CHANGED
@@ -1,5 +1,5 @@
1
1
  AllCops:
2
- TargetRubyVersion: 2.5
2
+ TargetRubyVersion: 2.6
3
3
  NewCops: enable
4
4
  SuggestExtensions: false
5
5
 
data/CHANGELOG.md CHANGED
@@ -5,6 +5,26 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [2.0.0] - 2022-09-19
9
+
10
+ ### Removed
11
+
12
+ - [BREAKING] Remove support for Ruby 2.5.
13
+ - [BREAKING] Remove support for passing `query`, `documents` or `file` as top-level parameters to `Client#search`.
14
+ - Deprecate Client#search endpoint.
15
+ - Deprecate Client#engines endpoints.
16
+
17
+ ### Added
18
+
19
+ - Add Client#models endpoints to list and query available models.
20
+
21
+ ## [1.5.0] - 2022-09-18
22
+
23
+ ### Added
24
+
25
+ - Add Client#moderations endpoint to check OpenAI's Content Policy.
26
+ - Add Client#edits endpoints to transform inputs according to instructions.
27
+
8
28
  ## [1.4.0] - 2021-12-11
9
29
 
10
30
  ### Added
data/Gemfile CHANGED
@@ -5,7 +5,7 @@ gemspec
5
5
 
6
6
  gem "byebug", "~> 11.1.3"
7
7
  gem "rake", "~> 13.0"
8
- gem "rspec", "~> 3.10"
9
- gem "rubocop", "~> 1.23.0"
10
- gem "vcr", "~> 6.0.0"
11
- gem "webmock", "~> 3.14.0"
8
+ gem "rspec", "~> 3.11"
9
+ gem "rubocop", "~> 1.36.0"
10
+ gem "vcr", "~> 6.1.0"
11
+ gem "webmock", "~> 3.18.1"
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (1.4.0)
5
- dotenv (~> 2.7.6)
4
+ ruby-openai (2.0.0)
5
+ dotenv (>= 2.7.6, < 2.9.0)
6
6
  httparty (>= 0.18.1, < 0.21.0)
7
7
 
8
8
  GEM
@@ -14,52 +14,54 @@ GEM
14
14
  byebug (11.1.3)
15
15
  crack (0.4.5)
16
16
  rexml
17
- diff-lcs (1.4.4)
18
- dotenv (2.7.6)
17
+ diff-lcs (1.5.0)
18
+ dotenv (2.8.1)
19
19
  hashdiff (1.0.1)
20
20
  httparty (0.20.0)
21
21
  mime-types (~> 3.0)
22
22
  multi_xml (>= 0.5.2)
23
- mime-types (3.3.1)
23
+ json (2.6.2)
24
+ mime-types (3.4.1)
24
25
  mime-types-data (~> 3.2015)
25
- mime-types-data (3.2021.0901)
26
+ mime-types-data (3.2022.0105)
26
27
  multi_xml (0.6.0)
27
- parallel (1.21.0)
28
- parser (3.0.2.0)
28
+ parallel (1.22.1)
29
+ parser (3.1.2.1)
29
30
  ast (~> 2.4.1)
30
- public_suffix (4.0.6)
31
- rainbow (3.0.0)
31
+ public_suffix (4.0.7)
32
+ rainbow (3.1.1)
32
33
  rake (13.0.6)
33
- regexp_parser (2.1.1)
34
+ regexp_parser (2.5.0)
34
35
  rexml (3.2.5)
35
- rspec (3.10.0)
36
- rspec-core (~> 3.10.0)
37
- rspec-expectations (~> 3.10.0)
38
- rspec-mocks (~> 3.10.0)
39
- rspec-core (3.10.1)
40
- rspec-support (~> 3.10.0)
41
- rspec-expectations (3.10.1)
36
+ rspec (3.11.0)
37
+ rspec-core (~> 3.11.0)
38
+ rspec-expectations (~> 3.11.0)
39
+ rspec-mocks (~> 3.11.0)
40
+ rspec-core (3.11.0)
41
+ rspec-support (~> 3.11.0)
42
+ rspec-expectations (3.11.0)
42
43
  diff-lcs (>= 1.2.0, < 2.0)
43
- rspec-support (~> 3.10.0)
44
- rspec-mocks (3.10.1)
44
+ rspec-support (~> 3.11.0)
45
+ rspec-mocks (3.11.0)
45
46
  diff-lcs (>= 1.2.0, < 2.0)
46
- rspec-support (~> 3.10.0)
47
- rspec-support (3.10.1)
48
- rubocop (1.23.0)
47
+ rspec-support (~> 3.11.0)
48
+ rspec-support (3.11.0)
49
+ rubocop (1.36.0)
50
+ json (~> 2.3)
49
51
  parallel (~> 1.10)
50
- parser (>= 3.0.0.0)
52
+ parser (>= 3.1.2.1)
51
53
  rainbow (>= 2.2.2, < 4.0)
52
54
  regexp_parser (>= 1.8, < 3.0)
53
- rexml
54
- rubocop-ast (>= 1.12.0, < 2.0)
55
+ rexml (>= 3.2.5, < 4.0)
56
+ rubocop-ast (>= 1.20.1, < 2.0)
55
57
  ruby-progressbar (~> 1.7)
56
58
  unicode-display_width (>= 1.4.0, < 3.0)
57
- rubocop-ast (1.13.0)
58
- parser (>= 3.0.1.1)
59
+ rubocop-ast (1.21.0)
60
+ parser (>= 3.1.1.0)
59
61
  ruby-progressbar (1.11.0)
60
- unicode-display_width (2.1.0)
61
- vcr (6.0.0)
62
- webmock (3.14.0)
62
+ unicode-display_width (2.3.0)
63
+ vcr (6.1.0)
64
+ webmock (3.18.1)
63
65
  addressable (>= 2.8.0)
64
66
  crack (>= 0.3.2)
65
67
  hashdiff (>= 0.4.0, < 2.0.0)
@@ -70,11 +72,11 @@ PLATFORMS
70
72
  DEPENDENCIES
71
73
  byebug (~> 11.1.3)
72
74
  rake (~> 13.0)
73
- rspec (~> 3.10)
74
- rubocop (~> 1.23.0)
75
+ rspec (~> 3.11)
76
+ rubocop (~> 1.36.0)
75
77
  ruby-openai!
76
- vcr (~> 6.0.0)
77
- webmock (~> 3.14.0)
78
+ vcr (~> 6.1.0)
79
+ webmock (~> 3.18.1)
78
80
 
79
81
  BUNDLED WITH
80
82
  2.2.20
data/README.md CHANGED
@@ -5,7 +5,7 @@
5
5
  [![CircleCI Build Status](https://circleci.com/gh/alexrudall/ruby-openai.svg?style=shield)](https://circleci.com/gh/alexrudall/ruby-openai)
6
6
  [![Maintainability](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability)
7
7
 
8
- A simple Ruby wrapper for the [OpenAI GPT-3 API](https://openai.com/blog/openai-api/).
8
+ Use the [OpenAI GPT-3 API](https://openai.com/blog/openai-api/) with Ruby! 🤖❤️
9
9
 
10
10
  ## Installation
11
11
 
@@ -14,7 +14,7 @@ A simple Ruby wrapper for the [OpenAI GPT-3 API](https://openai.com/blog/openai-
14
14
  Add this line to your application's Gemfile:
15
15
 
16
16
  ```ruby
17
- gem 'ruby-openai'
17
+ gem "ruby-openai"
18
18
  ```
19
19
 
20
20
  And then execute:
@@ -35,7 +35,7 @@ and require with:
35
35
 
36
36
  ## Usage
37
37
 
38
- Get your API key from [https://beta.openai.com/docs/developer-quickstart/your-api-keys](https://beta.openai.com/docs/developer-quickstart/your-api-keys)
38
+ Get your API key from [https://beta.openai.com/account/api-keys](https://beta.openai.com/account/api-keys)
39
39
 
40
40
  ### With dotenv
41
41
 
@@ -59,127 +59,86 @@ Alternatively you can pass your key directly to a new client:
59
59
  client = OpenAI::Client.new(access_token: "access_token_goes_here")
60
60
  ```
61
61
 
62
- ### Engines
63
-
64
- There are different engines that can be used to generate text. For a full list and to retrieve information about a single engine:
65
-
66
- ```ruby
67
- client.engines.list
68
- client.engines.retrieve(id: 'ada')
69
- ```
70
-
71
62
  #### Examples
72
63
 
73
- - [Base](https://beta.openai.com/docs/engines/base-series)
74
- - ada
75
- - babbage
76
- - curie
77
- - davinci
78
- - [Instruct](https://beta.openai.com/docs/engines/instruct-series-beta)
79
- - ada-instruct-beta
80
- - babbage-instruct-beta
81
- - curie-instruct-beta-v2
82
- - davinci-instruct-beta-v3
83
- - [Codex (private beta)](https://beta.openai.com/docs/engines/codex-series-private-beta)
84
- - davinci-codex
85
- - cushman-codex
86
- - [Content Filter](https://beta.openai.com/docs/engines/content-filter)
87
- - content-filter-alpha
64
+ - [GPT-3](https://beta.openai.com/docs/models/gpt-3)
65
+ - text-ada-001
66
+ - text-babbage-001
67
+ - text-curie-001
68
+ - text-davinci-001
69
+ - [Codex (private beta)](https://beta.openai.com/docs/models/codex-series-private-beta)
70
+ - code-davinci-002
71
+ - code-cushman-001
88
72
 
89
- ### Completions
73
+ ### Models
90
74
 
91
- Hit the OpenAI API for a completion:
75
+ There are different models that can be used to generate text. For a full list and to retrieve information about a single models:
92
76
 
93
77
  ```ruby
94
- response = client.completions(engine: "davinci", parameters: { prompt: "Once upon a time", max_tokens: 5 })
95
- puts response.parsed_response['choices'].map{ |c| c["text"] }
96
- => [", there lived a great"]
78
+ client.models.list
79
+ client.models.retrieve(id: "text-ada-001")
97
80
  ```
98
81
 
99
- ### Files
100
-
101
- Put your data in a `.jsonl` file like this:
102
-
103
- ```json
104
- {"text": "puppy A is happy", "metadata": "emotional state of puppy A"}
105
- {"text": "puppy B is sad", "metadata": "emotional state of puppy B"}
106
- ```
82
+ ### Completions
107
83
 
108
- and pass the path to `client.files.upload` to upload it to OpenAI, and then interact with it:
84
+ Hit the OpenAI API for a completion:
109
85
 
110
86
  ```ruby
111
- client.files.upload(parameters: { file: 'path/to/puppy.jsonl', purpose: 'search' })
112
- client.files.list
113
- client.files.retrieve(id: 123)
114
- client.files.delete(id: 123)
87
+ response = client.completions(
88
+ parameters: {
89
+ model: "text-davinci-001",
90
+ prompt: "Once upon a time",
91
+ max_tokens: 5
92
+ })
93
+ puts response["choices"].map { |c| c["text"] }
94
+ => [", there lived a great"]
115
95
  ```
116
96
 
117
- ### Search
97
+ ### Edits
118
98
 
119
- Pass documents and a query string to get semantic search scores against each document:
99
+ Send a string and some instructions for what to do to the string:
120
100
 
121
101
  ```ruby
122
- response = client.search(engine: "ada", parameters: { documents: %w[washington hospital school], query: "president" })
123
- puts response["data"].map { |d| d["score"] }
124
- => [202.0, 48.052, 19.247]
125
- ```
126
-
127
- You can alternatively search using the ID of a file you've uploaded:
128
-
129
- ```ruby
130
- client.search(engine: "ada", parameters: { file: "abc123", query: "happy" })
102
+ response = client.edits(
103
+ parameters: {
104
+ model: "text-davinci-edit-001",
105
+ input: "What day of the wek is it?",
106
+ instruction: "Fix the spelling mistakes"
107
+ }
108
+ )
109
+ puts response.dig("choices", 0, "text")
110
+ => What day of the week is it?
131
111
  ```
132
112
 
133
- ### Answers
134
-
135
- Pass documents, a question string, and an example question/response to get an answer to a question:
136
-
137
- ```ruby
138
- response = client.answers(parameters: {
139
- documents: ["Puppy A is happy.", "Puppy B is sad."],
140
- question: "which puppy is happy?",
141
- model: "curie",
142
- examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
143
- examples: [["What is human life expectancy in the United States?","78 years."]],
144
- })
145
- ```
113
+ ### Embeddings
146
114
 
147
- Or use the ID of a file you've uploaded:
115
+ You can use the embeddings endpoint to get a vector of numbers representing an input. You can then compare these vectors for different inputs to efficiently check how similar the inputs are.
148
116
 
149
117
  ```ruby
150
- response = client.answers(parameters: {
151
- file: "123abc",
152
- question: "which puppy is happy?",
153
- model: "curie",
154
- examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
155
- examples: [["What is human life expectancy in the United States?","78 years."]],
156
- })
118
+ client.embeddings(
119
+ parameters: {
120
+ model: "babbage-similarity",
121
+ input: "The food was delicious and the waiter..."
122
+ }
123
+ )
157
124
  ```
158
125
 
159
- ### Classifications
126
+ ### Files
160
127
 
161
- Pass examples and a query to predict the most likely labels:
128
+ Put your data in a `.jsonl` file like this:
162
129
 
163
- ```ruby
164
- response = client.classifications(parameters: {
165
- examples: [
166
- ["A happy moment", "Positive"],
167
- ["I am sad.", "Negative"],
168
- ["I am feeling awesome", "Positive"]
169
- ],
170
- query: "It is a raining day :(",
171
- model: "ada"
172
- })
130
+ ```json
131
+ {"text": "puppy A is happy", "metadata": "emotional state of puppy A"}
132
+ {"text": "puppy B is sad", "metadata": "emotional state of puppy B"}
173
133
  ```
174
134
 
175
- Or use the ID of a file you've uploaded:
135
+ and pass the path to `client.files.upload` to upload it to OpenAI, and then interact with it:
176
136
 
177
137
  ```ruby
178
- response = client.classifications(parameters: {
179
- file: "123abc,
180
- query: "It is a raining day :(",
181
- model: "ada"
182
- })
138
+ client.files.upload(parameters: { file: "path/to/puppy.jsonl", purpose: "search" })
139
+ client.files.list
140
+ client.files.retrieve(id: 123)
141
+ client.files.delete(id: 123)
183
142
  ```
184
143
 
185
144
  ### Fine-tunes
@@ -194,7 +153,7 @@ Put your fine-tuning data in a `.jsonl` file like this:
194
153
  and pass the path to `client.files.upload` to upload it to OpenAI and get its ID:
195
154
 
196
155
  ```ruby
197
- response = client.files.upload(parameters: { file: 'path/to/sentiment.jsonl', purpose: 'fine-tune' })
156
+ response = client.files.upload(parameters: { file: "path/to/sentiment.jsonl", purpose: "fine-tune" })
198
157
  file_id = JSON.parse(response.body)["id"]
199
158
  ```
200
159
 
@@ -204,7 +163,7 @@ You can then use this file ID to create a fine-tune model:
204
163
  response = client.finetunes.create(
205
164
  parameters: {
206
165
  training_file: file_id,
207
- model: "ada"
166
+ model: "text-ada-001"
208
167
  })
209
168
  fine_tune_id = JSON.parse(response.body)["id"]
210
169
  ```
@@ -235,19 +194,66 @@ This fine-tuned model name can then be used in classifications:
235
194
  JSON.parse(response.body)["choices"].map { |c| c["text"] }
236
195
  ```
237
196
 
238
- Do not pass the engine parameter when using a fine-tuned model.
197
+ ### Moderations
239
198
 
240
- ### Embeddings
199
+ Pass a string to check if it violates OpenAI's Content Policy:
241
200
 
242
- You can use the embeddings endpoint to get a vector of numbers representing an input. You can then compare these vectors for different inputs to efficiently check how similar the inputs are.
201
+ ```ruby
202
+ response = client.moderations(parameters: { input: "I'm worried about that." })
203
+ puts response.dig("results", 0, "category_scores", "hate")
204
+ => 5.505014632944949e-05
205
+ ```
206
+
207
+ ### Classifications
208
+
209
+ Pass examples and a query to predict the most likely labels:
243
210
 
244
211
  ```ruby
245
- client.embeddings(
246
- engine: "babbage-similarity",
247
- parameters: {
248
- input: "The food was delicious and the waiter..."
249
- }
250
- )
212
+ response = client.classifications(parameters: {
213
+ examples: [
214
+ ["A happy moment", "Positive"],
215
+ ["I am sad.", "Negative"],
216
+ ["I am feeling awesome", "Positive"]
217
+ ],
218
+ query: "It is a raining day :(",
219
+ model: "text-ada-001"
220
+ })
221
+ ```
222
+
223
+ Or use the ID of a file you've uploaded:
224
+
225
+ ```ruby
226
+ response = client.classifications(parameters: {
227
+ file: "123abc,
228
+ query: "It is a raining day :(",
229
+ model: "text-ada-001"
230
+ })
231
+ ```
232
+
233
+ ### Answers
234
+
235
+ Pass documents, a question string, and an example question/response to get an answer to a question:
236
+
237
+ ```ruby
238
+ response = client.answers(parameters: {
239
+ documents: ["Puppy A is happy.", "Puppy B is sad."],
240
+ question: "which puppy is happy?",
241
+ model: "text-curie-001",
242
+ examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
243
+ examples: [["What is human life expectancy in the United States?","78 years."]],
244
+ })
245
+ ```
246
+
247
+ Or use the ID of a file you've uploaded:
248
+
249
+ ```ruby
250
+ response = client.answers(parameters: {
251
+ file: "123abc",
252
+ question: "which puppy is happy?",
253
+ model: "text-curie-001",
254
+ examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
255
+ examples: [["What is human life expectancy in the United States?","78 years."]],
256
+ })
251
257
  ```
252
258
 
253
259
  ## Development
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def answers(version: default_version, parameters: {})
@@ -16,18 +16,25 @@ module OpenAI
16
16
  end
17
17
 
18
18
  def completions(engine: nil, version: default_version, parameters: {})
19
- if engine
20
- post(url: "/#{version}/engines/#{engine}/completions", parameters: parameters)
21
- else
22
- post(url: "/#{version}/completions", parameters: parameters)
23
- end
19
+ parameters = deprecate_engine(engine: engine, method: "completions", parameters: parameters)
20
+
21
+ post(url: "/#{version}/completions", parameters: parameters)
22
+ end
23
+
24
+ def edits(version: default_version, parameters: {})
25
+ post(url: "/#{version}/edits", parameters: parameters)
24
26
  end
25
27
 
26
- def embeddings(engine:, version: default_version, parameters: {})
27
- post(url: "/#{version}/engines/#{engine}/embeddings", parameters: parameters)
28
+ def embeddings(engine: nil, version: default_version, parameters: {})
29
+ parameters = deprecate_engine(engine: engine, method: "embeddings", parameters: parameters)
30
+
31
+ post(url: "/#{version}/embeddings", parameters: parameters)
28
32
  end
29
33
 
30
34
  def engines
35
+ warn "[DEPRECATION WARNING] [ruby-openai] `Client#engines` is deprecated and will
36
+ be removed from ruby-openai v3.0. Use `Client#models` instead."
37
+
31
38
  @engines ||= OpenAI::Engines.new(access_token: @access_token)
32
39
  end
33
40
 
@@ -39,31 +46,35 @@ module OpenAI
39
46
  @finetunes ||= OpenAI::Finetunes.new(access_token: @access_token)
40
47
  end
41
48
 
42
- # rubocop:disable Layout/LineLength
43
- # rubocop:disable Metrics/ParameterLists
44
- def search(engine:, query: nil, documents: nil, file: nil, version: default_version, parameters: {})
45
- return legacy_search(engine: engine, query: query, documents: documents, file: file, version: version) if query || documents || file
49
+ def models
50
+ @models ||= OpenAI::Models.new(access_token: @access_token)
51
+ end
52
+
53
+ def moderations(version: default_version, parameters: {})
54
+ post(url: "/#{version}/moderations", parameters: parameters)
55
+ end
56
+
57
+ def search(engine:, version: default_version, parameters: {})
58
+ warn "[DEPRECATION WARNING] [ruby-openai] `Client#search` is deprecated and will
59
+ be removed from the OpenAI API on 3 December 2022 and from ruby-openai v3.0.
60
+ More information: https://help.openai.com/en/articles/6272952-search-transition-guide"
46
61
 
47
62
  post(url: "/#{version}/engines/#{engine}/search", parameters: parameters)
48
63
  end
49
- # rubocop:enable Layout/LineLength
50
- # rubocop:enable Metrics/ParameterLists
51
64
 
52
65
  private
53
66
 
54
- # rubocop:disable Layout/LineLength
55
- def legacy_search(engine:, query:, documents: nil, file: nil, version: default_version)
56
- warn "[DEPRECATION] Passing `query`, `documents` or `file` directly to `Client#search` is deprecated and will be removed in a future version of ruby-openai.
57
- Please nest these terms within `parameters` instead, like this:
58
- client.search(engine: 'davinci', parameters: { query: 'president', documents: %w[washington hospital school] })
59
- "
67
+ def deprecate_engine(engine:, method:, parameters:)
68
+ return parameters unless engine
60
69
 
61
- post(
62
- url: "/#{version}/engines/#{engine}/search",
63
- parameters: { query: query }.merge(documents_or_file(documents: documents, file: file))
64
- )
70
+ parameters = { model: engine }.merge(parameters)
71
+
72
+ warn "[DEPRECATION WARNING] [ruby-openai] Passing `engine` directly to `Client##{method}` is
73
+ deprecated and will be removed in ruby-openai 3.0. Pass `model` within `parameters` instead:
74
+ client.completions(parameters: { #{parameters.map { |k, v| "#{k}: \"#{v}\"" }.join(', ')} })"
75
+
76
+ parameters
65
77
  end
66
- # rubocop:enable Layout/LineLength
67
78
 
68
79
  def default_version
69
80
  "v1".freeze
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def list(version: default_version)
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def list(version: default_version)
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def list(version: default_version)
@@ -0,0 +1,36 @@
1
+ module OpenAI
2
+ class Models
3
+ include HTTParty
4
+ base_uri "https://api.openai.com"
5
+
6
+ def initialize(access_token: nil)
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
+ end
9
+
10
+ def list(version: default_version)
11
+ self.class.get(
12
+ "/#{version}/models",
13
+ headers: {
14
+ "Content-Type" => "application/json",
15
+ "Authorization" => "Bearer #{@access_token}"
16
+ }
17
+ )
18
+ end
19
+
20
+ def retrieve(id:, version: default_version)
21
+ self.class.get(
22
+ "/#{version}/models/#{id}",
23
+ headers: {
24
+ "Content-Type" => "application/json",
25
+ "Authorization" => "Bearer #{@access_token}"
26
+ }
27
+ )
28
+ end
29
+
30
+ private
31
+
32
+ def default_version
33
+ "v1".freeze
34
+ end
35
+ end
36
+ end
@@ -1,5 +1,5 @@
1
1
  module Ruby
2
2
  module OpenAI
3
- VERSION = "1.4.0".freeze
3
+ VERSION = "2.0.0".freeze
4
4
  end
5
5
  end
data/lib/ruby/openai.rb CHANGED
@@ -2,6 +2,7 @@ require "httparty"
2
2
  require "ruby/openai/engines"
3
3
  require "ruby/openai/files"
4
4
  require "ruby/openai/finetunes"
5
+ require "ruby/openai/models"
5
6
  require "ruby/openai/client"
6
7
  require "ruby/openai/version"
7
8
  require "dotenv/load"
data/ruby-openai.gemspec CHANGED
@@ -9,7 +9,7 @@ Gem::Specification.new do |spec|
9
9
  spec.summary = "A Ruby gem for the OpenAI GPT-3 API"
10
10
  spec.homepage = "https://github.com/alexrudall/ruby-openai"
11
11
  spec.license = "MIT"
12
- spec.required_ruby_version = Gem::Requirement.new(">= 2.5.0")
12
+ spec.required_ruby_version = Gem::Requirement.new(">= 2.6.0")
13
13
 
14
14
  spec.metadata["homepage_uri"] = spec.homepage
15
15
  spec.metadata["source_code_uri"] = "https://github.com/alexrudall/ruby-openai"
@@ -25,6 +25,6 @@ Gem::Specification.new do |spec|
25
25
  spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
26
26
  spec.require_paths = ["lib"]
27
27
 
28
- spec.add_dependency "dotenv", "~> 2.7.6"
28
+ spec.add_dependency "dotenv", ">= 2.7.6", "< 2.9.0"
29
29
  spec.add_dependency "httparty", ">= 0.18.1", "< 0.21.0"
30
30
  end
metadata CHANGED
@@ -1,29 +1,35 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.0
4
+ version: 2.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2021-12-11 00:00:00.000000000 Z
11
+ date: 2022-09-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dotenv
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - ">="
18
18
  - !ruby/object:Gem::Version
19
19
  version: 2.7.6
20
+ - - "<"
21
+ - !ruby/object:Gem::Version
22
+ version: 2.9.0
20
23
  type: :runtime
21
24
  prerelease: false
22
25
  version_requirements: !ruby/object:Gem::Requirement
23
26
  requirements:
24
- - - "~>"
27
+ - - ">="
25
28
  - !ruby/object:Gem::Version
26
29
  version: 2.7.6
30
+ - - "<"
31
+ - !ruby/object:Gem::Version
32
+ version: 2.9.0
27
33
  - !ruby/object:Gem::Dependency
28
34
  name: httparty
29
35
  requirement: !ruby/object:Gem::Requirement
@@ -74,6 +80,7 @@ files:
74
80
  - lib/ruby/openai/engines.rb
75
81
  - lib/ruby/openai/files.rb
76
82
  - lib/ruby/openai/finetunes.rb
83
+ - lib/ruby/openai/models.rb
77
84
  - lib/ruby/openai/version.rb
78
85
  - pull_request_template.md
79
86
  - ruby-openai.gemspec
@@ -93,14 +100,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
93
100
  requirements:
94
101
  - - ">="
95
102
  - !ruby/object:Gem::Version
96
- version: 2.5.0
103
+ version: 2.6.0
97
104
  required_rubygems_version: !ruby/object:Gem::Requirement
98
105
  requirements:
99
106
  - - ">="
100
107
  - !ruby/object:Gem::Version
101
108
  version: '0'
102
109
  requirements: []
103
- rubygems_version: 3.1.6
110
+ rubygems_version: 3.2.33
104
111
  signing_key:
105
112
  specification_version: 4
106
113
  summary: A Ruby gem for the OpenAI GPT-3 API