openai.rb 0.0.0 → 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e53fab536aad4674f7e0f4fee4dd5943ca9c89842a7c497e4385721d2f8b906c
4
- data.tar.gz: 876e5736c390a376f886a4dfc214645c94ccf7b54f067f0f969c6fd6890a28d6
3
+ metadata.gz: e4cc9dca4191bdbae2964d7389c67e7d43ce756481c66c5059721f0ff2404c16
4
+ data.tar.gz: 727d12a574d750d050073547699aa0b837e96982e3c660ded4be9434d0bacc80
5
5
  SHA512:
6
- metadata.gz: 162a9491e48177404cfcd7f0f962765c2adc5ae83dd913a5aa99b08c4cdcfa9a4ceb6975ff7dc52241b7c2874d67888acc4bd7750ca50e10325f981be94e7787
7
- data.tar.gz: 0ea4ed08887bf15d62db2a8393e8268cd5002011957c5268b93be44fac1e45c8fbd067ab0ef3493c5e9e22e3fac9e36b00b53980dd84975fe0dee6065556ca25
6
+ metadata.gz: 2d5415f3d67872d7b57bed8e1f8101873aee5b80559c8079c6ee7b450517a913b1d6f637bc5bae70ecb5ad9207d065f22459db699d65bf3d9217e124079dc32d
7
+ data.tar.gz: d4d6364fbcaf8534e2743f3f6056285731642474d8006a436c43cac0bb011c3c5fe25d75410649d8fe18ed85a634dca5fcf41bfba8dfefe9da595d1d44dd1e79
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 2.6.9
1
+ 2.7.7
data/Gemfile CHANGED
@@ -11,13 +11,13 @@ group :test do
11
11
  end
12
12
 
13
13
  group :lint do
14
- gem 'rubocop'
15
- gem 'rubocop-rspec'
14
+ gem 'rubocop', '~> 1.31.1'
15
+ gem 'rubocop-rspec', '~> 2.11.1'
16
16
  end
17
17
 
18
18
  gem 'pry', '~> 0.13.1'
19
19
  gem 'pry-byebug', '~> 3.9'
20
20
 
21
- gem "dotenv", "~> 2.8"
21
+ gem 'dotenv', '~> 2.8'
22
22
 
23
- gem "slop", "~> 4.10"
23
+ gem 'slop', '~> 4.10'
data/Gemfile.lock CHANGED
@@ -1,10 +1,14 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- openai.rb (0.0.0)
4
+ openai.rb (0.0.1)
5
+ abstract_type (~> 0.0.7)
5
6
  anima (~> 0.3)
6
7
  concord (~> 0.1)
7
8
  http (~> 5.1)
9
+ ice_nine (~> 0.11.x)
10
+ memoizable (~> 0.4.2)
11
+ tiktoken_ruby (~> 0.0.3)
8
12
 
9
13
  GEM
10
14
  remote: https://rubygems.org/
@@ -62,7 +66,7 @@ GEM
62
66
  public_suffix (5.0.1)
63
67
  rainbow (3.1.1)
64
68
  rake (13.0.6)
65
- regexp_parser (2.7.0)
69
+ regexp_parser (2.6.2)
66
70
  rexml (3.2.5)
67
71
  rspec (3.12.0)
68
72
  rspec-core (~> 3.12.0)
@@ -77,26 +81,24 @@ GEM
77
81
  diff-lcs (>= 1.2.0, < 2.0)
78
82
  rspec-support (~> 3.12.0)
79
83
  rspec-support (3.12.0)
80
- rubocop (1.48.1)
84
+ rubocop (1.31.2)
81
85
  json (~> 2.3)
82
86
  parallel (~> 1.10)
83
- parser (>= 3.2.0.0)
87
+ parser (>= 3.1.0.0)
84
88
  rainbow (>= 2.2.2, < 4.0)
85
89
  regexp_parser (>= 1.8, < 3.0)
86
90
  rexml (>= 3.2.5, < 4.0)
87
- rubocop-ast (>= 1.26.0, < 2.0)
91
+ rubocop-ast (>= 1.18.0, < 2.0)
88
92
  ruby-progressbar (~> 1.7)
89
- unicode-display_width (>= 2.4.0, < 3.0)
93
+ unicode-display_width (>= 1.4.0, < 3.0)
90
94
  rubocop-ast (1.28.0)
91
95
  parser (>= 3.2.1.0)
92
- rubocop-capybara (2.17.1)
93
- rubocop (~> 1.41)
94
- rubocop-rspec (2.19.0)
95
- rubocop (~> 1.33)
96
- rubocop-capybara (~> 2.17)
96
+ rubocop-rspec (2.11.1)
97
+ rubocop (~> 1.19)
97
98
  ruby-progressbar (1.13.0)
98
99
  slop (4.10.1)
99
100
  thread_safe (0.3.6)
101
+ tiktoken_ruby (0.0.3-arm64-darwin)
100
102
  unf (0.1.4)
101
103
  unf_ext
102
104
  unf_ext (0.0.8.2)
@@ -104,6 +106,7 @@ GEM
104
106
 
105
107
  PLATFORMS
106
108
  arm64-darwin-21
109
+ ruby
107
110
 
108
111
  DEPENDENCIES
109
112
  dotenv (~> 2.8)
@@ -111,12 +114,12 @@ DEPENDENCIES
111
114
  pry (~> 0.13.1)
112
115
  pry-byebug (~> 3.9)
113
116
  rspec (~> 3.12)
114
- rubocop
115
- rubocop-rspec
117
+ rubocop (~> 1.31.1)
118
+ rubocop-rspec (~> 2.11.1)
116
119
  slop (~> 4.10)
117
120
 
118
121
  RUBY VERSION
119
- ruby 2.6.9p207
122
+ ruby 2.7.7p221
120
123
 
121
124
  BUNDLED WITH
122
125
  2.3.9
data/README.md ADDED
@@ -0,0 +1,401 @@
1
+ # OpenAI.rb
2
+
3
+ A comprehensive (as of March 25th, 2023) OpenAI API wrapper with built-in support for:
4
+
5
+ * caching
6
+ * tokenization
7
+ * response streaming
8
+ * a simple chainable abstraction for chats
9
+
10
+ ## Install and Setup
11
+
12
+ To install, you should be able to do:
13
+
14
+ ```sh
15
+ $ gem install openai.rb
16
+ ```
17
+
18
+ Usage:
19
+
20
+ ```ruby
21
+ require 'openai'
22
+
23
+ openai = OpenAI.create(ENV.fetch('OPENAI_API_KEY'))
24
+ openai.api.chat_completions.create(...)
25
+ openai.api.embeddings.create(...)
26
+ openai.api.models.list
27
+ # etc
28
+ ```
29
+
30
+ ### Caching
31
+
32
+ Caching for requests is built-in. The supported caching strategy writes response files in a directory you
33
+ specify (I chose to write as separate files since I often want to dig around and see the raw data I'm getting
34
+ back).
35
+
36
+ To enable caching:
37
+
38
+ ```ruby
39
+ require 'openai'
40
+
41
+ # This directory should already exist
42
+ cache_dir = Pathname.new('~/.cache/openai')
43
+
44
+ openai = OpenAI.create(ENV.fetch('OPENAI_API_KEY'), cache: cache_dir)
45
+
46
+ # Will hit the API:
47
+ openai.api.completions.create(model: 'text-davinci-002', prompt: 'Say hi')
48
+ # Will reuse the cached response:
49
+ openai.api.completions.create(model: 'text-davinci-002', prompt: 'Say hi')
50
+ ```
51
+
52
+ NOTE: Delete requests are not cached
53
+
54
+ ### Tokens
55
+
56
+ ```ruby
57
+ # Get encoder for a model and encode
58
+ openai.tokens.for_model('gpt-4').encode('Hello world')
59
+
60
+ # Get encoder by name
61
+ openai.tokens.get('cl100k_base').encode('Hello world')
62
+
63
+ # Get number of tokens
64
+ openai.tokens.for_model('gpt-4').num_tokens('Hello, world!') # => 4
65
+ ```
66
+
67
+ ### Chat Abstraction
68
+
69
+ You can use `openai.chat` in order to create a simple chainable chat interface with a model:
70
+
71
+ ```ruby
72
+ openai = OpenAI.create(ENV.fetch('OPENAI_API_KEY'))
73
+
74
+ chat = openai.chat(model: 'gpt-3.5-turbo')
75
+
76
+ chat =
77
+ chat
78
+ .system('You are a chatbot that talks and acts like scooby doo.')
79
+ .user('Hi how are you doing today?')
80
+ .submit # API call
81
+ .user('Nice. What kind of snacks do you like?')
82
+ .submit # API call
83
+
84
+ puts chat.to_log_format
85
+ ```
86
+
87
+ Which results in this output:
88
+
89
+ > SYSTEM: You are a chatbot that talks and acts like scooby doo.
90
+ >
91
+ > USER: Hi how are you doing today?
92
+ >
93
+ > ASSISTANT: Ruh-roh! Hello there, buddy! Scooby-Dooby-Doo is doing great! How about you, pal?
94
+ >
95
+ > USER: Nice. What kind of snacks do you like?
96
+ >
97
+ > ASSISTANT: Ruh-roh! Scooby-Dooby-Doo loves all kinds of snacks, especially Scooby Snacks! They are my favorite! But I also love bones, pizza, hamburgers, and any kind of food that's tasty and yummy. How about you, buddy? Do you have a favorite snack?
98
+
99
+ ## API
100
+
101
+ ### Audio
102
+
103
+ Transcribing audio:
104
+
105
+ ```ruby
106
+ transcription = openai.api.audio.transcribe(
107
+ file: '/path/to/sample.mp3',
108
+ model: 'model-id'
109
+ )
110
+
111
+ transcription.text # => "Imagine the wildest idea that you've ever had..."
112
+ ```
113
+
114
+ Translating audio:
115
+
116
+ ```ruby
117
+ translation = openai.api.audio.translate(
118
+ file: '/path/to/french/sample.mp3',
119
+ model: 'model-id',
120
+ )
121
+
122
+ translation.text # => "Hello, my name is Wolfgang and I come from Germany. Where are you heading today?"
123
+ ```
124
+
125
+ ### Chat completions
126
+
127
+ Generating a chat completion:
128
+
129
+ ```ruby
130
+ completion = openai.api.chat_completions.create(
131
+ model: 'gpt-3.5-turbo',
132
+ messages: [
133
+ { role: "user", content: "Hello" }
134
+ ]
135
+ )
136
+
137
+ completion.choices.first.message.content # => "\n\nHello there, how may I assist you today?"
138
+ ```
139
+
140
+ Streaming chat completion responses:
141
+
142
+ ```ruby
143
+ completion = openai.api.chat_completions.create(
144
+ model: 'gpt-3.5-turbo',
145
+ messages: [{ role: "user", content: "Hello" }],
146
+ stream: true
147
+ ) do |completion|
148
+ print completion.choices.first.delta.content
149
+ end
150
+
151
+ # >> "\n\nHello there, how may I assist you today?"
152
+ ```
153
+
154
+ ### Completions
155
+
156
+ Generating a completion:
157
+
158
+ ```ruby
159
+ completion = openai.api.completions.create(
160
+ model: 'text-davinci-002',
161
+ prompt: 'Hello, world!'
162
+ )
163
+
164
+ completion.id # => "cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7"
165
+ completion.model # => "text-davinci-003"
166
+ completion.choices.first.text # => "\n\nThis is indeed a test"
167
+ ```
168
+
169
+ Streaming responses:
170
+
171
+ ```ruby
172
+ completion = openai.api.completions.create(
173
+ model: 'text-davinci-002',
174
+ prompt: 'Say hello world',
175
+ stream: true
176
+ ) do |completion|
177
+ puts completion.choices.first.text
178
+ end
179
+
180
+ # >> "He"
181
+ # >> "llo,"
182
+ # >> " world"
183
+ # >> "!"
184
+ ```
185
+
186
+ ### Edits
187
+
188
+ Creating an edit:
189
+
190
+ ```ruby
191
+ edit = openai.api.edits.create(
192
+ model: 'text-davinci-002',
193
+ input: "What day of the wek is it?",
194
+ instruction: "Fix the spelling mistake"
195
+ )
196
+
197
+ edit.object # => "edit"
198
+ edit.choices.first.text # => "What day of the week is it?"
199
+ ```
200
+
201
+ ### Embeddings
202
+
203
+ Creating an embedding vector for a given input text:
204
+
205
+ ```ruby
206
+ embedding = openai.api.embeddings.create(
207
+ model: 'text-embedding-ada-002',
208
+ input: 'Hello, world!'
209
+ )
210
+
211
+ embedding.object # => 'list'
212
+ embedding.data.first.object # => 'embedding'
213
+ embedding.data.first.embedding.size # => 1536
214
+ ```
215
+
216
+ ### Files
217
+
218
+ Upload a file:
219
+
220
+ ```ruby
221
+
222
+ file = openai.api.files.create(
223
+ file: '/path/to/file.jsonl',
224
+ purpose: 'fine-tune'
225
+ )
226
+
227
+ file.id # => 'file-XjGxS3KTG0uNmNOK362iJua3'
228
+ file.filename # => 'sample.jsonl'
229
+ file.purpose # => 'fine-tune'
230
+ file.deleted? # => nil
231
+ ```
232
+
233
+ Get a list of files:
234
+
235
+ ```ruby
236
+
237
+ files = openai.api.files.list
238
+
239
+ files.data.size # => 2
240
+ files.data.first.filename # => 'train.jsonl'
241
+ ```
242
+
243
+ Fetch a specific file’s information:
244
+
245
+ ```ruby
246
+
247
+ file = openai.api.files.fetch('file-XjGxS3KTG0uNmNOK362iJua3')
248
+
249
+ file.filename # => 'mydata.jsonl'
250
+ file.bytes # => 140
251
+ file.created_at # => 1613779657
252
+ file.purpose # => 'fine-tune'
253
+ file.object # => 'file'
254
+ ```
255
+
256
+ Get the contents of a file:
257
+
258
+ ```ruby
259
+
260
+ response = openai.api.files.get_content('file-XjGxS3KTG0uNmNOK362iJua3')
261
+
262
+ puts response # => (whatever you uploaded)
263
+ ```
264
+
265
+ Delete a file:
266
+
267
+ ```ruby
268
+
269
+ file = openai.api.files.delete('file-XjGxS3KTG0uNmNOK362iJua3')
270
+
271
+ file.deleted? # => true
272
+ ```
273
+
274
+ ### Fine-tunes
275
+
276
+ Creating a fine tune:
277
+
278
+ ```ruby
279
+ fine_tune = openai.api.fine_tunes.create(training_file: 'file-XGinujblHPwGLSztz8cPS8XY')
280
+
281
+ details.id # => "ft-AF1WoRqd3aJAHsqc9NY7iL8F"
282
+ ```
283
+
284
+ Listing fine tunes:
285
+
286
+ ```ruby
287
+ fine_tunes = openai.api.fine_tunes.list
288
+
289
+ fine_tunes.data.first.id # => "ft-AF1WoRqd3aJAHsqc9NY7iL8F"
290
+ fine_tunes.data.first.status # => "pending"
291
+ ```
292
+
293
+
294
+ Fetching a fine tune:
295
+
296
+ ```ruby
297
+ fine_tune = openai.api.fine_tunes.fetch('ft-AF1WoRqd3aJAHsqc9NY7iL8F')
298
+ fine_tune.id # => "ft-AF1WoRqd3aJAHsqc9NY7iL8F"
299
+ ```
300
+
301
+ Canceling a fine tune:
302
+
303
+ ```ruby
304
+
305
+ # Canceling a fine tune
306
+ fine_tune = openai.api.fine_tunes.cancel('ft-xhrpBbvVUzYGo8oUO1FY4nI7')
307
+ fine_tune.id # => "ft-xhrpBbvVUzYGo8oUO1FY4nI7"
308
+ fine_tune.status # => "cancelled"
309
+ ```
310
+
311
+ Listing fine tune events:
312
+
313
+ ```ruby
314
+ events = openai.api.fine_tunes.list_events('fine-tune-id')
315
+ ```
316
+
317
+ ### Images
318
+
319
+ #### Generating Images
320
+
321
+ Create an image with the specified prompt and size:
322
+
323
+ ```ruby
324
+ images = openai.api.images.create(prompt: 'a bird in the forest', size: '512x512')
325
+
326
+ images.data.first.url # => "https://example.com/image1.png"
327
+ ```
328
+
329
+ #### Editing Images
330
+
331
+ Edit an image with the specified parameters:
332
+
333
+ ```ruby
334
+ response = openai.api.images.edit(
335
+ image: '/path/to/some_rgba.png',
336
+ mask: '/path/to/some_rgba_mask.png',
337
+ prompt: 'Draw a red hat on the person in the image',
338
+ n: 1,
339
+ size: '512x512',
340
+ response_format: 'url',
341
+ user: 'user-123'
342
+ )
343
+
344
+ response.created # => 1589478378
345
+ response.data.first.url # => "https://..."
346
+ ```
347
+
348
+ #### Creating Image Variations
349
+
350
+ Create image variations of the specified image with the specified parameters:
351
+
352
+ ```ruby
353
+ image_variations = openai.api.images.create_variation(
354
+ image: '/path/to/some_rgba.png',
355
+ n: 2,
356
+ size: '512x512',
357
+ response_format: 'url',
358
+ user: 'user123'
359
+ )
360
+
361
+ image_variations.created # => 1589478378
362
+ image_variations.data.map(&:url) # => ["https://...", "https://..."]
363
+ ```
364
+
365
+ ### Models
366
+
367
+ Listing all models:
368
+
369
+ ```ruby
370
+ models = api.models.list
371
+
372
+ models.data.first.id # => "model-id-0"
373
+ models.data.size # => 3
374
+ ```
375
+
376
+ Retrieving a model:
377
+
378
+ ```ruby
379
+ model = api.models.fetch('text-davinci-002')
380
+
381
+ model.id # => "text-davinci-002"
382
+ model.object # => "model"
383
+ model.owned_by # => "openai"
384
+ model.permission # => ["query", "completions", "models:read", "models:write", "engine:read", "engine:write"]
385
+ ```
386
+
387
+ ### Moderations
388
+
389
+ Moderate text:
390
+
391
+ ```ruby
392
+ moderation = openai.api.moderations.create(
393
+ input: 'This is a test',
394
+ model: 'text-moderation-001'
395
+ )
396
+
397
+ moderation.id # => "modr-5MWoLO"
398
+ moderation.model # => "text-moderation-001"
399
+ moderation.results.first.categories.hate # => false
400
+ moderation.results.first.categories.hate_threatening # => true
401
+ ```
data/bin/codegen CHANGED
@@ -1,4 +1,5 @@
1
1
  #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
2
3
 
3
4
  require 'bundler/setup'
4
5
  require 'openai'
@@ -139,10 +140,12 @@ MSG
139
140
 
140
141
  assistant_response_json_example = <<~RUBY
141
142
  # api call
142
- def create_completion(model:, **kwargs)
143
- Response::Completion.from_json(
144
- post('/v1/completions', model: model, **kwargs)
145
- )
143
+ class Completion < self
144
+ def create(model:, **kwargs)
145
+ Response::Completion.from_json(
146
+ post('/v1/completions', model: model, **kwargs)
147
+ )
148
+ end
146
149
  end
147
150
 
148
151
  # wrapper
@@ -169,7 +172,11 @@ assistant_response_json_example = <<~RUBY
169
172
  end
170
173
 
171
174
  # test
172
- describe '#create_completion' do
175
+ RSpec.describe OpenAI::API, '#completions' do
176
+ include_context 'an API Resource'
177
+
178
+ let(:resource) { api.completions }
179
+
173
180
  let(:response_body) do
174
181
  {
175
182
  "id": 'cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7',
@@ -192,16 +199,8 @@ assistant_response_json_example = <<~RUBY
192
199
  }
193
200
  end
194
201
 
195
- let(:response) do
196
- instance_double(
197
- HTTP::Response,
198
- status: HTTP::Response::Status.new(200),
199
- body: JSON.dump(response_body)
200
- )
201
- end
202
-
203
202
  it 'can create a completion' do
204
- completion = client.create_completion(model: 'text-davinci-002', prompt: 'Hello, world!')
203
+ completion = resource.create(model: 'text-davinci-002', prompt: 'Hello, world!')
205
204
 
206
205
  expect(http)
207
206
  .to have_received(:post)
@@ -222,12 +221,12 @@ RUBY
222
221
 
223
222
  assistant_response_form_example = <<~RUBY
224
223
  # api call
225
- def create_file(file:, purpose:)
226
- absolute_path = Pathname.new(file).expand_path.to_s
227
- form_file = HTTP::FormData::File.new(absolute_path)
228
- Response::File.from_json(
229
- post_form_multipart('/v1/files', file: form_file, purpose: purpose)
230
- )
224
+ class File < self
225
+ def create(file:, purpose:)
226
+ Response::File.from_json(
227
+ post_form_multipart('/v1/files', file: form_file(file), purpose: purpose)
228
+ )
229
+ end
231
230
  end
232
231
 
233
232
  # wrapper
@@ -241,48 +240,58 @@ assistant_response_form_example = <<~RUBY
241
240
  optional_field :deleted?, path: :deleted
242
241
  end
243
242
 
243
+ class FileList < JSONPayload
244
+ field :data, wrapper: File
245
+ field :object
246
+ end
247
+
244
248
  # test
245
- describe '#create_file' do
249
+ RSpec.describe OpenAI::API, '#files' do
250
+ include_context 'an API Resource'
251
+
252
+ let(:resource) { api.files }
246
253
  let(:sample_file) { OpenAISpec::SPEC_ROOT.join('data/sample.jsonl') }
247
254
 
248
- let(:response_body) do
249
- {
250
- "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
251
- "object": 'file',
252
- "bytes": 140,
253
- "created_at": 1_613_779_121,
254
- "filename": 'sample.jsonl',
255
- "purpose": 'fine-tune'
256
- }
257
- end
255
+ context 'when creating a file' do
256
+ let(:response_body) do
257
+ {
258
+ "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
259
+ "object": 'file',
260
+ "bytes": 140,
261
+ "created_at": 1_613_779_121,
262
+ "filename": 'sample.jsonl',
263
+ "purpose": 'fine-tune'
264
+ }
265
+ end
258
266
 
259
- it 'can create a file' do
260
- file = client.create_file(
261
- file: sample_file,
262
- purpose: 'fine-tune'
263
- )
267
+ it 'can create a file' do
268
+ file = resource.create(
269
+ file: sample_file,
270
+ purpose: 'fine-tune'
271
+ )
264
272
 
265
- expect(http)
266
- .to have_received(:post)
267
- .with(
268
- 'https://api.openai.com/v1/files',
269
- hash_including(
270
- form: hash_including(
271
- {
272
- file: instance_of(HTTP::FormData::File),
273
- purpose: 'fine-tune'
274
- }
273
+ expect(http)
274
+ .to have_received(:post)
275
+ .with(
276
+ 'https://api.openai.com/v1/files',
277
+ hash_including(
278
+ form: hash_including(
279
+ {
280
+ file: instance_of(HTTP::FormData::File),
281
+ purpose: 'fine-tune'
282
+ }
283
+ )
275
284
  )
276
285
  )
277
- )
278
286
 
279
- expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
280
- expect(file.object).to eql('file')
281
- expect(file.bytes).to eql(140)
282
- expect(file.created_at).to eql(1_613_779_121)
283
- expect(file.filename).to eql('sample.jsonl')
284
- expect(file.purpose).to eql('fine-tune')
285
- expect(file.deleted?).to be(nil)
287
+ expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
288
+ expect(file.object).to eql('file')
289
+ expect(file.bytes).to eql(140)
290
+ expect(file.created_at).to eql(1_613_779_121)
291
+ expect(file.filename).to eql('sample.jsonl')
292
+ expect(file.purpose).to eql('fine-tune')
293
+ expect(file.deleted?).to be(nil)
294
+ end
286
295
  end
287
296
  end
288
297
  RUBY
@@ -359,7 +368,7 @@ cache_file = cache_dir.join("#{codegen.verb}_#{codegen.route.gsub('/', '_')}.txt
359
368
  if cache_file.file?
360
369
  puts cache_file.read
361
370
  else
362
- completion = api.create_chat_completion(
371
+ completion = api.chat_completions.create(
363
372
  model: 'gpt-3.5-turbo',
364
373
  messages: history,
365
374
  max_tokens: 2000,