ruby-openai 1.2.1 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: eb5a5c5d8b8e04ba7f6c22de243bfd1e901b69e28cdc2bb234b8bdc46c58a110
4
- data.tar.gz: 138fa30d605a4ca116b98a3acb5b74617f27420a8110ef38605ec00f5896fbc3
3
+ metadata.gz: 54bd37da173d97fdb58db219e057eec6997a3694c463d79cdc6e04fce86713fd
4
+ data.tar.gz: 051e5bd9b7dfe58040e0c1107c96da85f9a8c8d655987a7214dc3ce9f4663829
5
5
  SHA512:
6
- metadata.gz: 2cf09eca86b84db3f6dce82589fd5c2ccd36259308e941427a5b5b9498ebccf3797a57cd6802a91879d0b6cdc87e2383e4ae4451b0941ef74beea64e721a120a
7
- data.tar.gz: f3f9dff8efd9ba3c4a32880940078b460c8d853e9a88b0e12fb360de1e3bcb8c3b88dda695b49de9a1c08f9947d69a421be2021a466b0693b74b252cb84348f9
6
+ metadata.gz: bcf13d22abf4a3f46a82622d6f34be59cab1fd068d1fe29709da89fa434611b5a0ada0397cc3dc0e091406174b5ad8472c6647c553d2bdf6b8fcfe5d1d547c81
7
+ data.tar.gz: 6ed690d12edb3d1aac43894f6704365ed1535b124286fd6b048dbe8606499d28e474f89096bba8780b8eadbebc201918a6e6a754cc03d942221bb31dad4de8c0
data/.circleci/config.yml CHANGED
@@ -8,7 +8,7 @@ jobs:
8
8
  rubocop:
9
9
  parallelism: 1
10
10
  docker:
11
- - image: cimg/ruby:2.7-node
11
+ - image: cimg/ruby:3.0-node
12
12
  steps:
13
13
  - checkout
14
14
  - ruby/install-deps
@@ -16,9 +16,12 @@ jobs:
16
16
  name: Run Rubocop
17
17
  command: bundle exec rubocop
18
18
  test:
19
+ parameters:
20
+ ruby-image:
21
+ type: string
19
22
  parallelism: 1
20
23
  docker:
21
- - image: cimg/ruby:2.7-node
24
+ - image: << parameters.ruby-image >>
22
25
  steps:
23
26
  - checkout
24
27
  - ruby/install-deps
@@ -31,4 +34,7 @@ workflows:
31
34
  checks:
32
35
  jobs:
33
36
  - rubocop
34
- - test
37
+ - test:
38
+ matrix:
39
+ parameters:
40
+ ruby-image: ['cimg/ruby:2.5-node', 'cimg/ruby:2.6-node', 'cimg/ruby:2.7-node', 'cimg/ruby:3.0-node']
@@ -0,0 +1,15 @@
1
+ version: 2
2
+ updates:
3
+ - package-ecosystem: bundler
4
+ directory: "/"
5
+ schedule:
6
+ interval: daily
7
+ open-pull-requests-limit: 10
8
+ ignore:
9
+ - dependency-name: webmock
10
+ versions:
11
+ - 3.11.1
12
+ - 3.11.3
13
+ - dependency-name: rspec
14
+ versions:
15
+ - 3.10.0
data/.rubocop.yml CHANGED
@@ -1,5 +1,5 @@
1
1
  AllCops:
2
- TargetRubyVersion: 2.7
2
+ TargetRubyVersion: 2.5
3
3
  NewCops: enable
4
4
  SuggestExtensions: false
5
5
 
data/CHANGELOG.md CHANGED
@@ -5,6 +5,39 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [1.4.0] - 2021-12-11
9
+
10
+ ### Added
11
+
12
+ - Add Client#engines endpoints to list and query available engines.
13
+ - Add Client#finetunes endpoints to create and use fine-tuned models.
14
+ - Add Client#embeddings endpoint to get vector representations of inputs.
15
+ - Add tests and examples for more engines.
16
+
17
+ ## [1.3.1] - 2021-07-14
18
+
19
+ ### Changed
20
+
21
+ - Add backwards compatibility from Ruby 2.5+.
22
+
23
+ ## [1.3.0] - 2021-04-18
24
+
25
+ ### Added
26
+
27
+ - Add Client#classifications to predict the most likely labels based on examples or a file.
28
+
29
+ ### Fixed
30
+
31
+ - Fixed Files#upload which was previously broken by the validation code!
32
+
33
+ ## [1.2.2] - 2021-04-18
34
+
35
+ ### Changed
36
+
37
+ - Add Client#search(parameters:) to allow passing `max_rerank` or `return_metadata`.
38
+ - Deprecate Client#search with query, file or document parameters at the top level.
39
+ - Thanks [@stevegeek](https://github.com/stevegeek) for pointing this issue out!
40
+
8
41
  ## [1.2.1] - 2021-04-11
9
42
 
10
43
  ### Added
data/Gemfile CHANGED
@@ -6,6 +6,6 @@ gemspec
6
6
  gem "byebug", "~> 11.1.3"
7
7
  gem "rake", "~> 13.0"
8
8
  gem "rspec", "~> 3.10"
9
- gem "rubocop", "~> 1.12.1"
9
+ gem "rubocop", "~> 1.23.0"
10
10
  gem "vcr", "~> 6.0.0"
11
- gem "webmock", "~> 3.12.2"
11
+ gem "webmock", "~> 3.14.0"
data/Gemfile.lock CHANGED
@@ -1,14 +1,14 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (1.2.1)
4
+ ruby-openai (1.4.0)
5
5
  dotenv (~> 2.7.6)
6
- httparty (~> 0.18.1)
6
+ httparty (>= 0.18.1, < 0.21.0)
7
7
 
8
8
  GEM
9
9
  remote: https://rubygems.org/
10
10
  specs:
11
- addressable (2.7.0)
11
+ addressable (2.8.0)
12
12
  public_suffix (>= 2.0.2, < 5.0)
13
13
  ast (2.4.2)
14
14
  byebug (11.1.3)
@@ -17,21 +17,21 @@ GEM
17
17
  diff-lcs (1.4.4)
18
18
  dotenv (2.7.6)
19
19
  hashdiff (1.0.1)
20
- httparty (0.18.1)
20
+ httparty (0.20.0)
21
21
  mime-types (~> 3.0)
22
22
  multi_xml (>= 0.5.2)
23
23
  mime-types (3.3.1)
24
24
  mime-types-data (~> 3.2015)
25
- mime-types-data (3.2021.0225)
25
+ mime-types-data (3.2021.0901)
26
26
  multi_xml (0.6.0)
27
- parallel (1.20.1)
28
- parser (3.0.0.0)
27
+ parallel (1.21.0)
28
+ parser (3.0.2.0)
29
29
  ast (~> 2.4.1)
30
30
  public_suffix (4.0.6)
31
31
  rainbow (3.0.0)
32
- rake (13.0.3)
32
+ rake (13.0.6)
33
33
  regexp_parser (2.1.1)
34
- rexml (3.2.4)
34
+ rexml (3.2.5)
35
35
  rspec (3.10.0)
36
36
  rspec-core (~> 3.10.0)
37
37
  rspec-expectations (~> 3.10.0)
@@ -45,22 +45,22 @@ GEM
45
45
  diff-lcs (>= 1.2.0, < 2.0)
46
46
  rspec-support (~> 3.10.0)
47
47
  rspec-support (3.10.1)
48
- rubocop (1.12.1)
48
+ rubocop (1.23.0)
49
49
  parallel (~> 1.10)
50
50
  parser (>= 3.0.0.0)
51
51
  rainbow (>= 2.2.2, < 4.0)
52
52
  regexp_parser (>= 1.8, < 3.0)
53
53
  rexml
54
- rubocop-ast (>= 1.2.0, < 2.0)
54
+ rubocop-ast (>= 1.12.0, < 2.0)
55
55
  ruby-progressbar (~> 1.7)
56
56
  unicode-display_width (>= 1.4.0, < 3.0)
57
- rubocop-ast (1.4.1)
58
- parser (>= 2.7.1.5)
57
+ rubocop-ast (1.13.0)
58
+ parser (>= 3.0.1.1)
59
59
  ruby-progressbar (1.11.0)
60
- unicode-display_width (2.0.0)
60
+ unicode-display_width (2.1.0)
61
61
  vcr (6.0.0)
62
- webmock (3.12.2)
63
- addressable (>= 2.3.6)
62
+ webmock (3.14.0)
63
+ addressable (>= 2.8.0)
64
64
  crack (>= 0.3.2)
65
65
  hashdiff (>= 0.4.0, < 2.0.0)
66
66
 
@@ -71,10 +71,10 @@ DEPENDENCIES
71
71
  byebug (~> 11.1.3)
72
72
  rake (~> 13.0)
73
73
  rspec (~> 3.10)
74
- rubocop (~> 1.12.1)
74
+ rubocop (~> 1.23.0)
75
75
  ruby-openai!
76
76
  vcr (~> 6.0.0)
77
- webmock (~> 3.12.2)
77
+ webmock (~> 3.14.0)
78
78
 
79
79
  BUNDLED WITH
80
- 2.2.3
80
+ 2.2.20
data/README.md CHANGED
@@ -3,24 +3,35 @@
3
3
  [![Gem Version](https://badge.fury.io/rb/ruby-openai.svg)](https://badge.fury.io/rb/ruby-openai)
4
4
  [![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/alexrudall/ruby-openai/blob/main/LICENSE.txt)
5
5
  [![CircleCI Build Status](https://circleci.com/gh/alexrudall/ruby-openai.svg?style=shield)](https://circleci.com/gh/alexrudall/ruby-openai)
6
+ [![Maintainability](https://api.codeclimate.com/v1/badges/a99a88d28ad37a79dbf6/maintainability)](https://codeclimate.com/github/codeclimate/codeclimate/maintainability)
6
7
 
7
8
  A simple Ruby wrapper for the [OpenAI GPT-3 API](https://openai.com/blog/openai-api/).
8
9
 
9
10
  ## Installation
10
11
 
12
+ ### Bundler
13
+
11
14
  Add this line to your application's Gemfile:
12
15
 
13
16
  ```ruby
14
- gem 'ruby-openai'
17
+ gem 'ruby-openai'
15
18
  ```
16
19
 
17
20
  And then execute:
18
21
 
19
- $ bundle install
22
+ $ bundle install
23
+
24
+ ### Gem install
25
+
26
+ Or install with:
20
27
 
21
- Or install it yourself as:
28
+ $ gem install ruby-openai
22
29
 
23
- $ gem install ruby-openai
30
+ and require with:
31
+
32
+ ```ruby
33
+ require "ruby/openai"
34
+ ```
24
35
 
25
36
  ## Usage
26
37
 
@@ -36,7 +47,7 @@ If you're using [dotenv](https://github.com/motdotla/dotenv), you can add your s
36
47
 
37
48
  And create a client:
38
49
 
39
- ```
50
+ ```ruby
40
51
  client = OpenAI::Client.new
41
52
  ```
42
53
 
@@ -44,15 +55,42 @@ And create a client:
44
55
 
45
56
  Alternatively you can pass your key directly to a new client:
46
57
 
47
- ```
58
+ ```ruby
48
59
  client = OpenAI::Client.new(access_token: "access_token_goes_here")
49
60
  ```
50
61
 
51
- ### Completions
62
+ ### Engines
52
63
 
53
- The engine options are currently "ada", "babbage", "curie" and "davinci". Hit the OpenAI API for a completion:
64
+ There are different engines that can be used to generate text. For a full list and to retrieve information about a single engine:
54
65
 
66
+ ```ruby
67
+ client.engines.list
68
+ client.engines.retrieve(id: 'ada')
55
69
  ```
70
+
71
+ #### Examples
72
+
73
+ - [Base](https://beta.openai.com/docs/engines/base-series)
74
+ - ada
75
+ - babbage
76
+ - curie
77
+ - davinci
78
+ - [Instruct](https://beta.openai.com/docs/engines/instruct-series-beta)
79
+ - ada-instruct-beta
80
+ - babbage-instruct-beta
81
+ - curie-instruct-beta-v2
82
+ - davinci-instruct-beta-v3
83
+ - [Codex (private beta)](https://beta.openai.com/docs/engines/codex-series-private-beta)
84
+ - davinci-codex
85
+ - cushman-codex
86
+ - [Content Filter](https://beta.openai.com/docs/engines/content-filter)
87
+ - content-filter-alpha
88
+
89
+ ### Completions
90
+
91
+ Hit the OpenAI API for a completion:
92
+
93
+ ```ruby
56
94
  response = client.completions(engine: "davinci", parameters: { prompt: "Once upon a time", max_tokens: 5 })
57
95
  puts response.parsed_response['choices'].map{ |c| c["text"] }
58
96
  => [", there lived a great"]
@@ -62,14 +100,14 @@ The engine options are currently "ada", "babbage", "curie" and "davinci". Hit th
62
100
 
63
101
  Put your data in a `.jsonl` file like this:
64
102
 
65
- ```
103
+ ```json
66
104
  {"text": "puppy A is happy", "metadata": "emotional state of puppy A"}
67
105
  {"text": "puppy B is sad", "metadata": "emotional state of puppy B"}
68
106
  ```
69
107
 
70
108
  and pass the path to `client.files.upload` to upload it to OpenAI, and then interact with it:
71
109
 
72
- ```
110
+ ```ruby
73
111
  client.files.upload(parameters: { file: 'path/to/puppy.jsonl', purpose: 'search' })
74
112
  client.files.list
75
113
  client.files.retrieve(id: 123)
@@ -80,23 +118,23 @@ and pass the path to `client.files.upload` to upload it to OpenAI, and then inte
80
118
 
81
119
  Pass documents and a query string to get semantic search scores against each document:
82
120
 
83
- ```
84
- response = client.search(engine: "ada", documents: %w[washington hospital school], query: "president")
121
+ ```ruby
122
+ response = client.search(engine: "ada", parameters: { documents: %w[washington hospital school], query: "president" })
85
123
  puts response["data"].map { |d| d["score"] }
86
124
  => [202.0, 48.052, 19.247]
87
125
  ```
88
126
 
89
127
  You can alternatively search using the ID of a file you've uploaded:
90
128
 
91
- ```
92
- client.search(engine: "ada", file: "abc123", query: "happy")
129
+ ```ruby
130
+ client.search(engine: "ada", parameters: { file: "abc123", query: "happy" })
93
131
  ```
94
132
 
95
133
  ### Answers
96
134
 
97
135
  Pass documents, a question string, and an example question/response to get an answer to a question:
98
136
 
99
- ```
137
+ ```ruby
100
138
  response = client.answers(parameters: {
101
139
  documents: ["Puppy A is happy.", "Puppy B is sad."],
102
140
  question: "which puppy is happy?",
@@ -106,9 +144,9 @@ Pass documents, a question string, and an example question/response to get an an
106
144
  })
107
145
  ```
108
146
 
109
- You can alternatively search using the ID of a file you've uploaded:
147
+ Or use the ID of a file you've uploaded:
110
148
 
111
- ```
149
+ ```ruby
112
150
  response = client.answers(parameters: {
113
151
  file: "123abc",
114
152
  question: "which puppy is happy?",
@@ -118,11 +156,105 @@ You can alternatively search using the ID of a file you've uploaded:
118
156
  })
119
157
  ```
120
158
 
159
+ ### Classifications
160
+
161
+ Pass examples and a query to predict the most likely labels:
162
+
163
+ ```ruby
164
+ response = client.classifications(parameters: {
165
+ examples: [
166
+ ["A happy moment", "Positive"],
167
+ ["I am sad.", "Negative"],
168
+ ["I am feeling awesome", "Positive"]
169
+ ],
170
+ query: "It is a raining day :(",
171
+ model: "ada"
172
+ })
173
+ ```
174
+
175
+ Or use the ID of a file you've uploaded:
176
+
177
+ ```ruby
178
+ response = client.classifications(parameters: {
179
+ file: "123abc,
180
+ query: "It is a raining day :(",
181
+ model: "ada"
182
+ })
183
+ ```
184
+
185
+ ### Fine-tunes
186
+
187
+ Put your fine-tuning data in a `.jsonl` file like this:
188
+
189
+ ```json
190
+ {"prompt":"Overjoyed with my new phone! ->", "completion":" positive"}
191
+ {"prompt":"@lakers disappoint for a third straight night ->", "completion":" negative"}
192
+ ```
193
+
194
+ and pass the path to `client.files.upload` to upload it to OpenAI and get its ID:
195
+
196
+ ```ruby
197
+ response = client.files.upload(parameters: { file: 'path/to/sentiment.jsonl', purpose: 'fine-tune' })
198
+ file_id = JSON.parse(response.body)["id"]
199
+ ```
200
+
201
+ You can then use this file ID to create a fine-tune model:
202
+
203
+ ```ruby
204
+ response = client.finetunes.create(
205
+ parameters: {
206
+ training_file: file_id,
207
+ model: "ada"
208
+ })
209
+ fine_tune_id = JSON.parse(response.body)["id"]
210
+ ```
211
+
212
+ That will give you the fine-tune ID. If you made a mistake you can cancel the fine-tune model before it is processed:
213
+
214
+ ```ruby
215
+ client.finetunes.cancel(id: fine_tune_id)
216
+ ```
217
+
218
+ You may need to wait a short time for processing to complete. Once processed, you can use list or retrieve to get the name of the fine-tuned model:
219
+
220
+ ```ruby
221
+ client.finetunes.list
222
+ response = client.finetunes.retrieve(id: fine_tune_id)
223
+ fine_tuned_model = JSON.parse(response.body)["fine_tuned_model"]
224
+ ```
225
+
226
+ This fine-tuned model name can then be used in classifications:
227
+
228
+ ```ruby
229
+ response = client.completions(
230
+ parameters: {
231
+ model: fine_tuned_model,
232
+ prompt: "I love Mondays!"
233
+ }
234
+ )
235
+ JSON.parse(response.body)["choices"].map { |c| c["text"] }
236
+ ```
237
+
238
+ Do not pass the engine parameter when using a fine-tuned model.
239
+
240
+ ### Embeddings
241
+
242
+ You can use the embeddings endpoint to get a vector of numbers representing an input. You can then compare these vectors for different inputs to efficiently check how similar the inputs are.
243
+
244
+ ```ruby
245
+ client.embeddings(
246
+ engine: "babbage-similarity",
247
+ parameters: {
248
+ input: "The food was delicious and the waiter..."
249
+ }
250
+ )
251
+ ```
252
+
121
253
  ## Development
122
254
 
123
255
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
124
256
 
125
- To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
257
+ To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, update `CHANGELOG.md`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
126
258
 
127
259
  ## Contributing
128
260
 
@@ -8,44 +8,63 @@ module OpenAI
8
8
  end
9
9
 
10
10
  def answers(version: default_version, parameters: {})
11
- self.class.post(
12
- "/#{version}/answers",
13
- headers: {
14
- "Content-Type" => "application/json",
15
- "Authorization" => "Bearer #{@access_token}"
16
- },
17
- body: parameters.to_json
18
- )
11
+ post(url: "/#{version}/answers", parameters: parameters)
19
12
  end
20
13
 
21
- def completions(engine:, version: default_version, parameters: {})
22
- self.class.post(
23
- "/#{version}/engines/#{engine}/completions",
24
- headers: {
25
- "Content-Type" => "application/json",
26
- "Authorization" => "Bearer #{@access_token}"
27
- },
28
- body: parameters.to_json
29
- )
14
+ def classifications(version: default_version, parameters: {})
15
+ post(url: "/#{version}/classifications", parameters: parameters)
16
+ end
17
+
18
+ def completions(engine: nil, version: default_version, parameters: {})
19
+ if engine
20
+ post(url: "/#{version}/engines/#{engine}/completions", parameters: parameters)
21
+ else
22
+ post(url: "/#{version}/completions", parameters: parameters)
23
+ end
24
+ end
25
+
26
+ def embeddings(engine:, version: default_version, parameters: {})
27
+ post(url: "/#{version}/engines/#{engine}/embeddings", parameters: parameters)
28
+ end
29
+
30
+ def engines
31
+ @engines ||= OpenAI::Engines.new(access_token: @access_token)
30
32
  end
31
33
 
32
34
  def files
33
35
  @files ||= OpenAI::Files.new(access_token: @access_token)
34
36
  end
35
37
 
36
- def search(engine:, query:, documents: nil, file: nil, version: default_version)
37
- self.class.post(
38
- "/#{version}/engines/#{engine}/search",
39
- headers: {
40
- "Content-Type" => "application/json",
41
- "Authorization" => "Bearer #{@access_token}"
42
- },
43
- body: { query: query }.merge(documents_or_file(documents: documents, file: file)).to_json
44
- )
38
+ def finetunes
39
+ @finetunes ||= OpenAI::Finetunes.new(access_token: @access_token)
45
40
  end
46
41
 
42
+ # rubocop:disable Layout/LineLength
43
+ # rubocop:disable Metrics/ParameterLists
44
+ def search(engine:, query: nil, documents: nil, file: nil, version: default_version, parameters: {})
45
+ return legacy_search(engine: engine, query: query, documents: documents, file: file, version: version) if query || documents || file
46
+
47
+ post(url: "/#{version}/engines/#{engine}/search", parameters: parameters)
48
+ end
49
+ # rubocop:enable Layout/LineLength
50
+ # rubocop:enable Metrics/ParameterLists
51
+
47
52
  private
48
53
 
54
+ # rubocop:disable Layout/LineLength
55
+ def legacy_search(engine:, query:, documents: nil, file: nil, version: default_version)
56
+ warn "[DEPRECATION] Passing `query`, `documents` or `file` directly to `Client#search` is deprecated and will be removed in a future version of ruby-openai.
57
+ Please nest these terms within `parameters` instead, like this:
58
+ client.search(engine: 'davinci', parameters: { query: 'president', documents: %w[washington hospital school] })
59
+ "
60
+
61
+ post(
62
+ url: "/#{version}/engines/#{engine}/search",
63
+ parameters: { query: query }.merge(documents_or_file(documents: documents, file: file))
64
+ )
65
+ end
66
+ # rubocop:enable Layout/LineLength
67
+
49
68
  def default_version
50
69
  "v1".freeze
51
70
  end
@@ -53,5 +72,16 @@ module OpenAI
53
72
  def documents_or_file(documents: nil, file: nil)
54
73
  documents ? { documents: documents } : { file: file }
55
74
  end
75
+
76
+ def post(url:, parameters:)
77
+ self.class.post(
78
+ url,
79
+ headers: {
80
+ "Content-Type" => "application/json",
81
+ "Authorization" => "Bearer #{@access_token}"
82
+ },
83
+ body: parameters.to_json
84
+ )
85
+ end
56
86
  end
57
87
  end
@@ -0,0 +1,36 @@
1
+ module OpenAI
2
+ class Engines
3
+ include HTTParty
4
+ base_uri "https://api.openai.com"
5
+
6
+ def initialize(access_token: nil)
7
+ @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
8
+ end
9
+
10
+ def list(version: default_version)
11
+ self.class.get(
12
+ "/#{version}/engines",
13
+ headers: {
14
+ "Content-Type" => "application/json",
15
+ "Authorization" => "Bearer #{@access_token}"
16
+ }
17
+ )
18
+ end
19
+
20
+ def retrieve(id:, version: default_version)
21
+ self.class.get(
22
+ "/#{version}/engines/#{id}",
23
+ headers: {
24
+ "Content-Type" => "application/json",
25
+ "Authorization" => "Bearer #{@access_token}"
26
+ }
27
+ )
28
+ end
29
+
30
+ private
31
+
32
+ def default_version
33
+ "v1".freeze
34
+ end
35
+ end
36
+ end
@@ -18,7 +18,7 @@ module OpenAI
18
18
  end
19
19
 
20
20
  def upload(version: default_version, parameters: {})
21
- file = validate(file: parameters[:file])
21
+ validate(file: parameters[:file])
22
22
 
23
23
  self.class.post(
24
24
  "/#{version}/files",
@@ -26,7 +26,7 @@ module OpenAI
26
26
  "Content-Type" => "application/json",
27
27
  "Authorization" => "Bearer #{@access_token}"
28
28
  },
29
- body: parameters.merge(file: file)
29
+ body: parameters.merge(file: File.open(parameters[:file]))
30
30
  )
31
31
  end
32
32
 
@@ -0,0 +1,67 @@
1
+ module OpenAI
2
+ class Finetunes
3
+ include HTTParty
4
+ base_uri "https://api.openai.com"
5
+
6
+ def initialize(access_token: nil)
7
+ @access_token = access_token || ENV["OPENAI_ACCESS_TOKEN"]
8
+ end
9
+
10
+ def list(version: default_version)
11
+ self.class.get(
12
+ "/#{version}/fine-tunes",
13
+ headers: {
14
+ "Content-Type" => "application/json",
15
+ "Authorization" => "Bearer #{@access_token}"
16
+ }
17
+ )
18
+ end
19
+
20
+ def create(version: default_version, parameters: {})
21
+ self.class.post(
22
+ "/#{version}/fine-tunes",
23
+ headers: {
24
+ "Content-Type" => "application/json",
25
+ "Authorization" => "Bearer #{@access_token}"
26
+ },
27
+ body: parameters.to_json
28
+ )
29
+ end
30
+
31
+ def retrieve(id:, version: default_version)
32
+ self.class.get(
33
+ "/#{version}/fine-tunes/#{id}",
34
+ headers: {
35
+ "Content-Type" => "application/json",
36
+ "Authorization" => "Bearer #{@access_token}"
37
+ }
38
+ )
39
+ end
40
+
41
+ def cancel(id:, version: default_version)
42
+ self.class.post(
43
+ "/#{version}/fine-tunes/#{id}/cancel",
44
+ headers: {
45
+ "Content-Type" => "application/json",
46
+ "Authorization" => "Bearer #{@access_token}"
47
+ }
48
+ )
49
+ end
50
+
51
+ def events(id:, version: default_version)
52
+ self.class.get(
53
+ "/#{version}/fine-tunes/#{id}/events",
54
+ headers: {
55
+ "Content-Type" => "application/json",
56
+ "Authorization" => "Bearer #{@access_token}"
57
+ }
58
+ )
59
+ end
60
+
61
+ private
62
+
63
+ def default_version
64
+ "v1".freeze
65
+ end
66
+ end
67
+ end
@@ -1,5 +1,5 @@
1
1
  module Ruby
2
2
  module OpenAI
3
- VERSION = "1.2.1".freeze
3
+ VERSION = "1.4.0".freeze
4
4
  end
5
5
  end
data/lib/ruby/openai.rb CHANGED
@@ -1,5 +1,7 @@
1
1
  require "httparty"
2
+ require "ruby/openai/engines"
2
3
  require "ruby/openai/files"
4
+ require "ruby/openai/finetunes"
3
5
  require "ruby/openai/client"
4
6
  require "ruby/openai/version"
5
7
  require "dotenv/load"
data/ruby-openai.gemspec CHANGED
@@ -9,11 +9,12 @@ Gem::Specification.new do |spec|
9
9
  spec.summary = "A Ruby gem for the OpenAI GPT-3 API"
10
10
  spec.homepage = "https://github.com/alexrudall/ruby-openai"
11
11
  spec.license = "MIT"
12
- spec.required_ruby_version = Gem::Requirement.new(">= 2.7.0")
12
+ spec.required_ruby_version = Gem::Requirement.new(">= 2.5.0")
13
13
 
14
14
  spec.metadata["homepage_uri"] = spec.homepage
15
15
  spec.metadata["source_code_uri"] = "https://github.com/alexrudall/ruby-openai"
16
16
  spec.metadata["changelog_uri"] = "https://github.com/alexrudall/ruby-openai/blob/main/CHANGELOG.md"
17
+ spec.metadata["rubygems_mfa_required"] = "true"
17
18
 
18
19
  # Specify which files should be added to the gem when it is released.
19
20
  # The `git ls-files -z` loads the files in the RubyGem that have been added into git.
@@ -25,5 +26,5 @@ Gem::Specification.new do |spec|
25
26
  spec.require_paths = ["lib"]
26
27
 
27
28
  spec.add_dependency "dotenv", "~> 2.7.6"
28
- spec.add_dependency "httparty", "~> 0.18.1"
29
+ spec.add_dependency "httparty", ">= 0.18.1", "< 0.21.0"
29
30
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.1
4
+ version: 1.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2021-04-11 00:00:00.000000000 Z
11
+ date: 2021-12-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dotenv
@@ -28,16 +28,22 @@ dependencies:
28
28
  name: httparty
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
- - - "~>"
31
+ - - ">="
32
32
  - !ruby/object:Gem::Version
33
33
  version: 0.18.1
34
+ - - "<"
35
+ - !ruby/object:Gem::Version
36
+ version: 0.21.0
34
37
  type: :runtime
35
38
  prerelease: false
36
39
  version_requirements: !ruby/object:Gem::Requirement
37
40
  requirements:
38
- - - "~>"
41
+ - - ">="
39
42
  - !ruby/object:Gem::Version
40
43
  version: 0.18.1
44
+ - - "<"
45
+ - !ruby/object:Gem::Version
46
+ version: 0.21.0
41
47
  description:
42
48
  email:
43
49
  - alexrudall@users.noreply.github.com
@@ -48,6 +54,7 @@ files:
48
54
  - ".circleci/config.yml"
49
55
  - ".github/ISSUE_TEMPLATE/bug_report.md"
50
56
  - ".github/ISSUE_TEMPLATE/feature_request.md"
57
+ - ".github/dependabot.yml"
51
58
  - ".gitignore"
52
59
  - ".rspec"
53
60
  - ".rubocop.yml"
@@ -64,7 +71,9 @@ files:
64
71
  - bin/setup
65
72
  - lib/ruby/openai.rb
66
73
  - lib/ruby/openai/client.rb
74
+ - lib/ruby/openai/engines.rb
67
75
  - lib/ruby/openai/files.rb
76
+ - lib/ruby/openai/finetunes.rb
68
77
  - lib/ruby/openai/version.rb
69
78
  - pull_request_template.md
70
79
  - ruby-openai.gemspec
@@ -75,6 +84,7 @@ metadata:
75
84
  homepage_uri: https://github.com/alexrudall/ruby-openai
76
85
  source_code_uri: https://github.com/alexrudall/ruby-openai
77
86
  changelog_uri: https://github.com/alexrudall/ruby-openai/blob/main/CHANGELOG.md
87
+ rubygems_mfa_required: 'true'
78
88
  post_install_message:
79
89
  rdoc_options: []
80
90
  require_paths:
@@ -83,14 +93,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
83
93
  requirements:
84
94
  - - ">="
85
95
  - !ruby/object:Gem::Version
86
- version: 2.7.0
96
+ version: 2.5.0
87
97
  required_rubygems_version: !ruby/object:Gem::Requirement
88
98
  requirements:
89
99
  - - ">="
90
100
  - !ruby/object:Gem::Version
91
101
  version: '0'
92
102
  requirements: []
93
- rubygems_version: 3.2.3
103
+ rubygems_version: 3.1.6
94
104
  signing_key:
95
105
  specification_version: 4
96
106
  summary: A Ruby gem for the OpenAI GPT-3 API