ruby-openai 1.4.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 54bd37da173d97fdb58db219e057eec6997a3694c463d79cdc6e04fce86713fd
4
- data.tar.gz: 051e5bd9b7dfe58040e0c1107c96da85f9a8c8d655987a7214dc3ce9f4663829
3
+ metadata.gz: eae2966d67581585125aac11431d33db3af2f472d0970dad242316783ded514e
4
+ data.tar.gz: 53f357b92d77a79c48539b69216c4b06c2f0fd0584be17dfbf54576ea02ffed4
5
5
  SHA512:
6
- metadata.gz: bcf13d22abf4a3f46a82622d6f34be59cab1fd068d1fe29709da89fa434611b5a0ada0397cc3dc0e091406174b5ad8472c6647c553d2bdf6b8fcfe5d1d547c81
7
- data.tar.gz: 6ed690d12edb3d1aac43894f6704365ed1535b124286fd6b048dbe8606499d28e474f89096bba8780b8eadbebc201918a6e6a754cc03d942221bb31dad4de8c0
6
+ metadata.gz: 67c875aa8dde9a199aead346a6b45af8aed00ddf81b2db667c055c1ef9e86dea2d43e5b3d23e1e659dbfb0468a40ab9c419b52115a61491217c8c1b0d65aff3c
7
+ data.tar.gz: 4d55769b42b9b295ff4c065c9f4abdbaaf5e96b998eb707e3a64f4161355d20c5e8ee0b417c80029b376e70fe9c69e0521c397ac82523aa05874e8ecbeaac7ce
data/.circleci/config.yml CHANGED
@@ -8,7 +8,7 @@ jobs:
8
8
  rubocop:
9
9
  parallelism: 1
10
10
  docker:
11
- - image: cimg/ruby:3.0-node
11
+ - image: cimg/ruby:3.1-node
12
12
  steps:
13
13
  - checkout
14
14
  - ruby/install-deps
@@ -37,4 +37,9 @@ workflows:
37
37
  - test:
38
38
  matrix:
39
39
  parameters:
40
- ruby-image: ['cimg/ruby:2.5-node', 'cimg/ruby:2.6-node', 'cimg/ruby:2.7-node', 'cimg/ruby:3.0-node']
40
+ ruby-image:
41
+ - cimg/ruby:2.5-node
42
+ - cimg/ruby:2.6-node
43
+ - cimg/ruby:2.7-node
44
+ - cimg/ruby:3.0-node
45
+ - cimg/ruby:3.1-node
data/CHANGELOG.md CHANGED
@@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [1.5.0] - 2022-09-18
9
+
10
+ ### Added
11
+
12
+ - Add Client#moderations endpoint to check OpenAI's Content Policy.
13
+ - Add Client#edits endpoints to transform inputs according to instructions.
14
+
8
15
  ## [1.4.0] - 2021-12-11
9
16
 
10
17
  ### Added
data/Gemfile CHANGED
@@ -5,7 +5,7 @@ gemspec
5
5
 
6
6
  gem "byebug", "~> 11.1.3"
7
7
  gem "rake", "~> 13.0"
8
- gem "rspec", "~> 3.10"
9
- gem "rubocop", "~> 1.23.0"
8
+ gem "rspec", "~> 3.11"
9
+ gem "rubocop", "~> 1.28.2"
10
10
  gem "vcr", "~> 6.0.0"
11
- gem "webmock", "~> 3.14.0"
11
+ gem "webmock", "~> 3.18.1"
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (1.4.0)
5
- dotenv (~> 2.7.6)
4
+ ruby-openai (1.5.0)
5
+ dotenv (>= 2.7.6, < 2.9.0)
6
6
  httparty (>= 0.18.1, < 0.21.0)
7
7
 
8
8
  GEM
@@ -14,52 +14,52 @@ GEM
14
14
  byebug (11.1.3)
15
15
  crack (0.4.5)
16
16
  rexml
17
- diff-lcs (1.4.4)
18
- dotenv (2.7.6)
17
+ diff-lcs (1.5.0)
18
+ dotenv (2.8.1)
19
19
  hashdiff (1.0.1)
20
20
  httparty (0.20.0)
21
21
  mime-types (~> 3.0)
22
22
  multi_xml (>= 0.5.2)
23
- mime-types (3.3.1)
23
+ mime-types (3.4.1)
24
24
  mime-types-data (~> 3.2015)
25
- mime-types-data (3.2021.0901)
25
+ mime-types-data (3.2022.0105)
26
26
  multi_xml (0.6.0)
27
- parallel (1.21.0)
28
- parser (3.0.2.0)
27
+ parallel (1.22.1)
28
+ parser (3.1.2.0)
29
29
  ast (~> 2.4.1)
30
- public_suffix (4.0.6)
31
- rainbow (3.0.0)
30
+ public_suffix (4.0.7)
31
+ rainbow (3.1.1)
32
32
  rake (13.0.6)
33
- regexp_parser (2.1.1)
33
+ regexp_parser (2.3.1)
34
34
  rexml (3.2.5)
35
- rspec (3.10.0)
36
- rspec-core (~> 3.10.0)
37
- rspec-expectations (~> 3.10.0)
38
- rspec-mocks (~> 3.10.0)
39
- rspec-core (3.10.1)
40
- rspec-support (~> 3.10.0)
41
- rspec-expectations (3.10.1)
35
+ rspec (3.11.0)
36
+ rspec-core (~> 3.11.0)
37
+ rspec-expectations (~> 3.11.0)
38
+ rspec-mocks (~> 3.11.0)
39
+ rspec-core (3.11.0)
40
+ rspec-support (~> 3.11.0)
41
+ rspec-expectations (3.11.0)
42
42
  diff-lcs (>= 1.2.0, < 2.0)
43
- rspec-support (~> 3.10.0)
44
- rspec-mocks (3.10.1)
43
+ rspec-support (~> 3.11.0)
44
+ rspec-mocks (3.11.0)
45
45
  diff-lcs (>= 1.2.0, < 2.0)
46
- rspec-support (~> 3.10.0)
47
- rspec-support (3.10.1)
48
- rubocop (1.23.0)
46
+ rspec-support (~> 3.11.0)
47
+ rspec-support (3.11.0)
48
+ rubocop (1.28.2)
49
49
  parallel (~> 1.10)
50
- parser (>= 3.0.0.0)
50
+ parser (>= 3.1.0.0)
51
51
  rainbow (>= 2.2.2, < 4.0)
52
52
  regexp_parser (>= 1.8, < 3.0)
53
53
  rexml
54
- rubocop-ast (>= 1.12.0, < 2.0)
54
+ rubocop-ast (>= 1.17.0, < 2.0)
55
55
  ruby-progressbar (~> 1.7)
56
56
  unicode-display_width (>= 1.4.0, < 3.0)
57
- rubocop-ast (1.13.0)
58
- parser (>= 3.0.1.1)
57
+ rubocop-ast (1.17.0)
58
+ parser (>= 3.1.1.0)
59
59
  ruby-progressbar (1.11.0)
60
60
  unicode-display_width (2.1.0)
61
61
  vcr (6.0.0)
62
- webmock (3.14.0)
62
+ webmock (3.18.1)
63
63
  addressable (>= 2.8.0)
64
64
  crack (>= 0.3.2)
65
65
  hashdiff (>= 0.4.0, < 2.0.0)
@@ -70,11 +70,11 @@ PLATFORMS
70
70
  DEPENDENCIES
71
71
  byebug (~> 11.1.3)
72
72
  rake (~> 13.0)
73
- rspec (~> 3.10)
74
- rubocop (~> 1.23.0)
73
+ rspec (~> 3.11)
74
+ rubocop (~> 1.28.2)
75
75
  ruby-openai!
76
76
  vcr (~> 6.0.0)
77
- webmock (~> 3.14.0)
77
+ webmock (~> 3.18.1)
78
78
 
79
79
  BUNDLED WITH
80
80
  2.2.20
data/README.md CHANGED
@@ -5,7 +5,7 @@
5
5
  [![CircleCI Build Status](https://circleci.com/gh/alexrudall/ruby-openai.svg?style=shield)](https://circleci.com/gh/alexrudall/ruby-openai)
6
6
  [![Maintainability](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability)
7
7
 
8
- A simple Ruby wrapper for the [OpenAI GPT-3 API](https://openai.com/blog/openai-api/).
8
+ Use the [OpenAI GPT-3 API](https://openai.com/blog/openai-api/) with Ruby! 🤖❤️
9
9
 
10
10
  ## Installation
11
11
 
@@ -35,7 +35,7 @@ and require with:
35
35
 
36
36
  ## Usage
37
37
 
38
- Get your API key from [https://beta.openai.com/docs/developer-quickstart/your-api-keys](https://beta.openai.com/docs/developer-quickstart/your-api-keys)
38
+ Get your API key from [https://beta.openai.com/account/api-keys](https://beta.openai.com/account/api-keys)
39
39
 
40
40
  ### With dotenv
41
41
 
@@ -59,30 +59,16 @@ Alternatively you can pass your key directly to a new client:
59
59
  client = OpenAI::Client.new(access_token: "access_token_goes_here")
60
60
  ```
61
61
 
62
- ### Engines
63
-
64
- There are different engines that can be used to generate text. For a full list and to retrieve information about a single engine:
65
-
66
- ```ruby
67
- client.engines.list
68
- client.engines.retrieve(id: 'ada')
69
- ```
70
-
71
62
  #### Examples
72
63
 
73
- - [Base](https://beta.openai.com/docs/engines/base-series)
74
- - ada
75
- - babbage
76
- - curie
77
- - davinci
78
- - [Instruct](https://beta.openai.com/docs/engines/instruct-series-beta)
79
- - ada-instruct-beta
80
- - babbage-instruct-beta
81
- - curie-instruct-beta-v2
82
- - davinci-instruct-beta-v3
64
+ - [GPT-3](https://beta.openai.com/docs/engines/gpt-3)
65
+ - text-ada-001
66
+ - text-babbage-001
67
+ - text-curie-001
68
+ - text-davinci-001
83
69
  - [Codex (private beta)](https://beta.openai.com/docs/engines/codex-series-private-beta)
84
- - davinci-codex
85
- - cushman-codex
70
+ - code-davinci-002
71
+ - code-cushman-001
86
72
  - [Content Filter](https://beta.openai.com/docs/engines/content-filter)
87
73
  - content-filter-alpha
88
74
 
@@ -91,95 +77,56 @@ There are different engines that can be used to generate text. For a full list a
91
77
  Hit the OpenAI API for a completion:
92
78
 
93
79
  ```ruby
94
- response = client.completions(engine: "davinci", parameters: { prompt: "Once upon a time", max_tokens: 5 })
80
+ response = client.completions(engine: "text-davinci-001", parameters: { prompt: "Once upon a time", max_tokens: 5 })
95
81
  puts response.parsed_response['choices'].map{ |c| c["text"] }
96
82
  => [", there lived a great"]
97
83
  ```
98
84
 
99
- ### Files
100
-
101
- Put your data in a `.jsonl` file like this:
102
-
103
- ```json
104
- {"text": "puppy A is happy", "metadata": "emotional state of puppy A"}
105
- {"text": "puppy B is sad", "metadata": "emotional state of puppy B"}
106
- ```
107
-
108
- and pass the path to `client.files.upload` to upload it to OpenAI, and then interact with it:
109
-
110
- ```ruby
111
- client.files.upload(parameters: { file: 'path/to/puppy.jsonl', purpose: 'search' })
112
- client.files.list
113
- client.files.retrieve(id: 123)
114
- client.files.delete(id: 123)
115
- ```
116
-
117
- ### Search
85
+ ### Edits
118
86
 
119
- Pass documents and a query string to get semantic search scores against each document:
87
+ Send a string and some instructions for what to do to the string:
120
88
 
121
89
  ```ruby
122
- response = client.search(engine: "ada", parameters: { documents: %w[washington hospital school], query: "president" })
123
- puts response["data"].map { |d| d["score"] }
124
- => [202.0, 48.052, 19.247]
125
- ```
126
-
127
- You can alternatively search using the ID of a file you've uploaded:
128
-
129
- ```ruby
130
- client.search(engine: "ada", parameters: { file: "abc123", query: "happy" })
90
+ response = client.edits(
91
+ parameters: {
92
+ model: "text-davinci-edit-001",
93
+ input: "What day of the wek is it?",
94
+ instruction: "Fix the spelling mistakes"
95
+ }
96
+ )
97
+ puts response.dig("choices", 0, "text")
98
+ => What day of the week is it?
131
99
  ```
132
100
 
133
- ### Answers
134
-
135
- Pass documents, a question string, and an example question/response to get an answer to a question:
136
-
137
- ```ruby
138
- response = client.answers(parameters: {
139
- documents: ["Puppy A is happy.", "Puppy B is sad."],
140
- question: "which puppy is happy?",
141
- model: "curie",
142
- examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
143
- examples: [["What is human life expectancy in the United States?","78 years."]],
144
- })
145
- ```
101
+ ### Embeddings
146
102
 
147
- Or use the ID of a file you've uploaded:
103
+ You can use the embeddings endpoint to get a vector of numbers representing an input. You can then compare these vectors for different inputs to efficiently check how similar the inputs are.
148
104
 
149
105
  ```ruby
150
- response = client.answers(parameters: {
151
- file: "123abc",
152
- question: "which puppy is happy?",
153
- model: "curie",
154
- examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
155
- examples: [["What is human life expectancy in the United States?","78 years."]],
156
- })
106
+ client.embeddings(
107
+ engine: "babbage-similarity",
108
+ parameters: {
109
+ input: "The food was delicious and the waiter..."
110
+ }
111
+ )
157
112
  ```
158
113
 
159
- ### Classifications
114
+ ### Files
160
115
 
161
- Pass examples and a query to predict the most likely labels:
116
+ Put your data in a `.jsonl` file like this:
162
117
 
163
- ```ruby
164
- response = client.classifications(parameters: {
165
- examples: [
166
- ["A happy moment", "Positive"],
167
- ["I am sad.", "Negative"],
168
- ["I am feeling awesome", "Positive"]
169
- ],
170
- query: "It is a raining day :(",
171
- model: "ada"
172
- })
118
+ ```json
119
+ {"text": "puppy A is happy", "metadata": "emotional state of puppy A"}
120
+ {"text": "puppy B is sad", "metadata": "emotional state of puppy B"}
173
121
  ```
174
122
 
175
- Or use the ID of a file you've uploaded:
123
+ and pass the path to `client.files.upload` to upload it to OpenAI, and then interact with it:
176
124
 
177
125
  ```ruby
178
- response = client.classifications(parameters: {
179
- file: "123abc,
180
- query: "It is a raining day :(",
181
- model: "ada"
182
- })
126
+ client.files.upload(parameters: { file: 'path/to/puppy.jsonl', purpose: 'search' })
127
+ client.files.list
128
+ client.files.retrieve(id: 123)
129
+ client.files.delete(id: 123)
183
130
  ```
184
131
 
185
132
  ### Fine-tunes
@@ -204,7 +151,7 @@ You can then use this file ID to create a fine-tune model:
204
151
  response = client.finetunes.create(
205
152
  parameters: {
206
153
  training_file: file_id,
207
- model: "ada"
154
+ model: "text-ada-001"
208
155
  })
209
156
  fine_tune_id = JSON.parse(response.body)["id"]
210
157
  ```
@@ -237,17 +184,91 @@ This fine-tuned model name can then be used in classifications:
237
184
 
238
185
  Do not pass the engine parameter when using a fine-tuned model.
239
186
 
240
- ### Embeddings
187
+ ### Moderations
241
188
 
242
- You can use the embeddings endpoint to get a vector of numbers representing an input. You can then compare these vectors for different inputs to efficiently check how similar the inputs are.
189
+ Pass a string to check if it violates OpenAI's Content Policy:
243
190
 
244
191
  ```ruby
245
- client.embeddings(
246
- engine: "babbage-similarity",
247
- parameters: {
248
- input: "The food was delicious and the waiter..."
249
- }
250
- )
192
+ response = client.moderations(parameters: { input: "I'm worried about that." })
193
+ puts response.dig("results", 0, "category_scores", "hate")
194
+ => 5.505014632944949e-05
195
+ ```
196
+
197
+ ### Searches
198
+
199
+ Pass documents and a query string to get semantic search scores against each document:
200
+
201
+ ```ruby
202
+ response = client.search(engine: "text-ada-001", parameters: { documents: %w[washington hospital school], query: "president" })
203
+ puts response["data"].map { |d| d["score"] }
204
+ => [202.0, 48.052, 19.247]
205
+ ```
206
+
207
+ You can alternatively search using the ID of a file you've uploaded:
208
+
209
+ ```ruby
210
+ client.search(engine: "text-ada-001", parameters: { file: "abc123", query: "happy" })
211
+ ```
212
+
213
+ ### Classifications
214
+
215
+ Pass examples and a query to predict the most likely labels:
216
+
217
+ ```ruby
218
+ response = client.classifications(parameters: {
219
+ examples: [
220
+ ["A happy moment", "Positive"],
221
+ ["I am sad.", "Negative"],
222
+ ["I am feeling awesome", "Positive"]
223
+ ],
224
+ query: "It is a raining day :(",
225
+ model: "text-ada-001"
226
+ })
227
+ ```
228
+
229
+ Or use the ID of a file you've uploaded:
230
+
231
+ ```ruby
232
+ response = client.classifications(parameters: {
233
+ file: "123abc,
234
+ query: "It is a raining day :(",
235
+ model: "text-ada-001"
236
+ })
237
+ ```
238
+
239
+ ### Answers
240
+
241
+ Pass documents, a question string, and an example question/response to get an answer to a question:
242
+
243
+ ```ruby
244
+ response = client.answers(parameters: {
245
+ documents: ["Puppy A is happy.", "Puppy B is sad."],
246
+ question: "which puppy is happy?",
247
+ model: "text-curie-001",
248
+ examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
249
+ examples: [["What is human life expectancy in the United States?","78 years."]],
250
+ })
251
+ ```
252
+
253
+ Or use the ID of a file you've uploaded:
254
+
255
+ ```ruby
256
+ response = client.answers(parameters: {
257
+ file: "123abc",
258
+ question: "which puppy is happy?",
259
+ model: "text-curie-001",
260
+ examples_context: "In 2017, U.S. life expectancy was 78.6 years.",
261
+ examples: [["What is human life expectancy in the United States?","78 years."]],
262
+ })
263
+ ```
264
+
265
+ ### Engines
266
+
267
+ There are different engines that can be used to generate text. For a full list and to retrieve information about a single engine:
268
+
269
+ ```ruby
270
+ client.engines.list
271
+ client.engines.retrieve(id: 'text-ada-001')
251
272
  ```
252
273
 
253
274
  ## Development
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def answers(version: default_version, parameters: {})
@@ -23,6 +23,10 @@ module OpenAI
23
23
  end
24
24
  end
25
25
 
26
+ def edits(version: default_version, parameters: {})
27
+ post(url: "/#{version}/edits", parameters: parameters)
28
+ end
29
+
26
30
  def embeddings(engine:, version: default_version, parameters: {})
27
31
  post(url: "/#{version}/engines/#{engine}/embeddings", parameters: parameters)
28
32
  end
@@ -39,6 +43,10 @@ module OpenAI
39
43
  @finetunes ||= OpenAI::Finetunes.new(access_token: @access_token)
40
44
  end
41
45
 
46
+ def moderations(version: default_version, parameters: {})
47
+ post(url: "/#{version}/moderations", parameters: parameters)
48
+ end
49
+
42
50
  # rubocop:disable Layout/LineLength
43
51
  # rubocop:disable Metrics/ParameterLists
44
52
  def search(engine:, query: nil, documents: nil, file: nil, version: default_version, parameters: {})
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def list(version: default_version)
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def list(version: default_version)
@@ -4,7 +4,7 @@ module OpenAI
4
4
  base_uri "https://api.openai.com"
5
5
 
6
6
  def initialize(access_token: nil)
7
- @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
7
+ @access_token = access_token || ENV.fetch("OPENAI_ACCESS_TOKEN")
8
8
  end
9
9
 
10
10
  def list(version: default_version)
@@ -1,5 +1,5 @@
1
1
  module Ruby
2
2
  module OpenAI
3
- VERSION = "1.4.0".freeze
3
+ VERSION = "1.5.0".freeze
4
4
  end
5
5
  end
data/ruby-openai.gemspec CHANGED
@@ -25,6 +25,6 @@ Gem::Specification.new do |spec|
25
25
  spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
26
26
  spec.require_paths = ["lib"]
27
27
 
28
- spec.add_dependency "dotenv", "~> 2.7.6"
28
+ spec.add_dependency "dotenv", ">= 2.7.6", "< 2.9.0"
29
29
  spec.add_dependency "httparty", ">= 0.18.1", "< 0.21.0"
30
30
  end
metadata CHANGED
@@ -1,29 +1,35 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.0
4
+ version: 1.5.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2021-12-11 00:00:00.000000000 Z
11
+ date: 2022-09-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dotenv
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - ">="
18
18
  - !ruby/object:Gem::Version
19
19
  version: 2.7.6
20
+ - - "<"
21
+ - !ruby/object:Gem::Version
22
+ version: 2.9.0
20
23
  type: :runtime
21
24
  prerelease: false
22
25
  version_requirements: !ruby/object:Gem::Requirement
23
26
  requirements:
24
- - - "~>"
27
+ - - ">="
25
28
  - !ruby/object:Gem::Version
26
29
  version: 2.7.6
30
+ - - "<"
31
+ - !ruby/object:Gem::Version
32
+ version: 2.9.0
27
33
  - !ruby/object:Gem::Dependency
28
34
  name: httparty
29
35
  requirement: !ruby/object:Gem::Requirement
@@ -100,7 +106,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
100
106
  - !ruby/object:Gem::Version
101
107
  version: '0'
102
108
  requirements: []
103
- rubygems_version: 3.1.6
109
+ rubygems_version: 3.2.33
104
110
  signing_key:
105
111
  specification_version: 4
106
112
  summary: A Ruby gem for the OpenAI GPT-3 API