openai.rb 0.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: e53fab536aad4674f7e0f4fee4dd5943ca9c89842a7c497e4385721d2f8b906c
4
+ data.tar.gz: 876e5736c390a376f886a4dfc214645c94ccf7b54f067f0f969c6fd6890a28d6
5
+ SHA512:
6
+ metadata.gz: 162a9491e48177404cfcd7f0f962765c2adc5ae83dd913a5aa99b08c4cdcfa9a4ceb6975ff7dc52241b7c2874d67888acc4bd7750ca50e10325f981be94e7787
7
+ data.tar.gz: 0ea4ed08887bf15d62db2a8393e8268cd5002011957c5268b93be44fac1e45c8fbd067ab0ef3493c5e9e22e3fac9e36b00b53980dd84975fe0dee6065556ca25
data/.gitignore ADDED
@@ -0,0 +1,3 @@
1
+ tmp
2
+ .bundle/
3
+ .env
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --order rand
2
+ --color
3
+ --require spec_helper
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ 2.6.9
data/Gemfile ADDED
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ source 'https://rubygems.org'
4
+
5
+ ruby File.read('.ruby-version').chomp
6
+
7
+ gemspec
8
+
9
+ group :test do
10
+ gem 'rspec', '~> 3.12'
11
+ end
12
+
13
+ group :lint do
14
+ gem 'rubocop'
15
+ gem 'rubocop-rspec'
16
+ end
17
+
18
+ gem 'pry', '~> 0.13.1'
19
+ gem 'pry-byebug', '~> 3.9'
20
+
21
+ gem "dotenv", "~> 2.8"
22
+
23
+ gem "slop", "~> 4.10"
data/Gemfile.lock ADDED
@@ -0,0 +1,122 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ openai.rb (0.0.0)
5
+ anima (~> 0.3)
6
+ concord (~> 0.1)
7
+ http (~> 5.1)
8
+
9
+ GEM
10
+ remote: https://rubygems.org/
11
+ specs:
12
+ abstract_type (0.0.7)
13
+ adamantium (0.2.0)
14
+ ice_nine (~> 0.11.0)
15
+ memoizable (~> 0.4.0)
16
+ addressable (2.8.1)
17
+ public_suffix (>= 2.0.2, < 6.0)
18
+ anima (0.3.2)
19
+ abstract_type (~> 0.0.7)
20
+ adamantium (~> 0.2)
21
+ equalizer (~> 0.0.11)
22
+ ast (2.4.2)
23
+ byebug (11.1.3)
24
+ coderay (1.1.3)
25
+ concord (0.1.6)
26
+ adamantium (~> 0.2.0)
27
+ equalizer (~> 0.0.9)
28
+ diff-lcs (1.5.0)
29
+ domain_name (0.5.20190701)
30
+ unf (>= 0.0.5, < 1.0.0)
31
+ dotenv (2.8.1)
32
+ equalizer (0.0.11)
33
+ ffi (1.15.5)
34
+ ffi-compiler (1.0.1)
35
+ ffi (>= 1.0.0)
36
+ rake
37
+ http (5.1.1)
38
+ addressable (~> 2.8)
39
+ http-cookie (~> 1.0)
40
+ http-form_data (~> 2.2)
41
+ llhttp-ffi (~> 0.4.0)
42
+ http-cookie (1.0.5)
43
+ domain_name (~> 0.5)
44
+ http-form_data (2.3.0)
45
+ ice_nine (0.11.2)
46
+ json (2.6.3)
47
+ llhttp-ffi (0.4.0)
48
+ ffi-compiler (~> 1.0)
49
+ rake (~> 13.0)
50
+ memoizable (0.4.2)
51
+ thread_safe (~> 0.3, >= 0.3.1)
52
+ method_source (1.0.0)
53
+ parallel (1.22.1)
54
+ parser (3.2.1.1)
55
+ ast (~> 2.4.1)
56
+ pry (0.13.1)
57
+ coderay (~> 1.1)
58
+ method_source (~> 1.0)
59
+ pry-byebug (3.9.0)
60
+ byebug (~> 11.0)
61
+ pry (~> 0.13.0)
62
+ public_suffix (5.0.1)
63
+ rainbow (3.1.1)
64
+ rake (13.0.6)
65
+ regexp_parser (2.7.0)
66
+ rexml (3.2.5)
67
+ rspec (3.12.0)
68
+ rspec-core (~> 3.12.0)
69
+ rspec-expectations (~> 3.12.0)
70
+ rspec-mocks (~> 3.12.0)
71
+ rspec-core (3.12.1)
72
+ rspec-support (~> 3.12.0)
73
+ rspec-expectations (3.12.2)
74
+ diff-lcs (>= 1.2.0, < 2.0)
75
+ rspec-support (~> 3.12.0)
76
+ rspec-mocks (3.12.4)
77
+ diff-lcs (>= 1.2.0, < 2.0)
78
+ rspec-support (~> 3.12.0)
79
+ rspec-support (3.12.0)
80
+ rubocop (1.48.1)
81
+ json (~> 2.3)
82
+ parallel (~> 1.10)
83
+ parser (>= 3.2.0.0)
84
+ rainbow (>= 2.2.2, < 4.0)
85
+ regexp_parser (>= 1.8, < 3.0)
86
+ rexml (>= 3.2.5, < 4.0)
87
+ rubocop-ast (>= 1.26.0, < 2.0)
88
+ ruby-progressbar (~> 1.7)
89
+ unicode-display_width (>= 2.4.0, < 3.0)
90
+ rubocop-ast (1.28.0)
91
+ parser (>= 3.2.1.0)
92
+ rubocop-capybara (2.17.1)
93
+ rubocop (~> 1.41)
94
+ rubocop-rspec (2.19.0)
95
+ rubocop (~> 1.33)
96
+ rubocop-capybara (~> 2.17)
97
+ ruby-progressbar (1.13.0)
98
+ slop (4.10.1)
99
+ thread_safe (0.3.6)
100
+ unf (0.1.4)
101
+ unf_ext
102
+ unf_ext (0.0.8.2)
103
+ unicode-display_width (2.4.2)
104
+
105
+ PLATFORMS
106
+ arm64-darwin-21
107
+
108
+ DEPENDENCIES
109
+ dotenv (~> 2.8)
110
+ openai.rb!
111
+ pry (~> 0.13.1)
112
+ pry-byebug (~> 3.9)
113
+ rspec (~> 3.12)
114
+ rubocop
115
+ rubocop-rspec
116
+ slop (~> 4.10)
117
+
118
+ RUBY VERSION
119
+ ruby 2.6.9p207
120
+
121
+ BUNDLED WITH
122
+ 2.3.9
data/bin/codegen ADDED
@@ -0,0 +1,371 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'bundler/setup'
4
+ require 'openai'
5
+ require 'dotenv'
6
+ require 'pry'
7
+ require 'pry-byebug'
8
+ require 'slop'
9
+ require 'yaml'
10
+
11
+ Dotenv.load
12
+ api = OpenAI.new(ENV.fetch('OPENAI_API_KEY'))
13
+
14
+ class Codegen
15
+ include Anima.new(:openapi_file, :route, :verb, :mime)
16
+ include Memoizable
17
+
18
+ def self.parse_cli(argv)
19
+ opts = Slop.parse(argv) do |o|
20
+ o.string '--openapi-file', 'Path to OpenAPI file', required: true
21
+ o.string '--route', 'API route', required: true
22
+ o.string '--verb', 'HTTP verb', required: true
23
+ o.string '--mime', 'Mime type', default: 'application/json'
24
+ end
25
+
26
+ openapi_file = Pathname.new(opts[:openapi_file])
27
+ raise ArgumentError, "OpenAPI file #{openapi_file} does not exist" unless openapi_file.exist?
28
+
29
+ route = opts[:route]
30
+ verb = opts[:verb]
31
+ mime = opts[:mime]
32
+
33
+ new(openapi_file: openapi_file, route: route, verb: verb, mime: mime)
34
+ end
35
+
36
+ def validate!
37
+ paths = doc.fetch('paths')
38
+ unless paths.key?(route)
39
+
40
+ raise <<~ERR
41
+ Invalid route!
42
+
43
+ Given: #{route}
44
+
45
+ Valid routes:
46
+
47
+ #{paths.keys.sort.join("\n")}
48
+ ERR
49
+
50
+ end
51
+
52
+ path_def = paths.fetch(route)
53
+
54
+ return if path_def.key?(verb)
55
+
56
+ raise <<~ERR
57
+ Invalid verb!
58
+
59
+ Given: #{verb}
60
+
61
+ Valid verbs: #{path_def.keys.sort.join(', ')}
62
+ ERR
63
+ end
64
+
65
+ def get?
66
+ verb == 'get'
67
+ end
68
+
69
+ def no_request_body?
70
+ !action.key?('requestBody')
71
+ end
72
+
73
+ def sample_response
74
+ action.fetch('x-oaiMeta').fetch('response')
75
+ end
76
+
77
+ def request_body_summary
78
+ fields = request_body.fetch('properties').keys
79
+ required = request_body.fetch('required')
80
+
81
+ { field: fields, required: required }
82
+ end
83
+
84
+ def request_body
85
+ ref =
86
+ if mime == 'application/json'
87
+ deep_fetch(action, ['requestBody', 'content', 'application/json', 'schema', '$ref'])
88
+ elsif mime == 'multipart/form-data'
89
+ deep_fetch(action, ['requestBody', 'content', 'multipart/form-data', 'schema', '$ref'])
90
+ else
91
+ raise "Unknown mime type #{mime}"
92
+ end
93
+ get_ref(ref)
94
+ end
95
+ memoize :request_body
96
+
97
+ def response_body
98
+ response = action.fetch('responses').first.last
99
+ ref = deep_fetch(response, %w[content application/json schema $ref])
100
+ get_ref(ref)
101
+ end
102
+
103
+ def get_ref(ref)
104
+ ref_path = ref.delete_prefix('#/').split('/')
105
+ deep_fetch(doc, ref_path)
106
+ end
107
+
108
+ def action
109
+ doc.fetch('paths').fetch(route).fetch(verb)
110
+ end
111
+
112
+ def doc
113
+ @doc ||= YAML.load_file(openapi_file)
114
+ end
115
+
116
+ def deep_fetch(obj, path)
117
+ path.reduce(obj) do |acc, key|
118
+ acc.fetch(key) do
119
+ raise "No key #{key} in #{acc.inspect}"
120
+ end
121
+ end
122
+ end
123
+ end
124
+
125
+ codegen = Codegen.parse_cli(ARGV)
126
+ codegen.validate!
127
+
128
+ user_message_template = <<~MSG
129
+ Please create an API call, a response wrapper, and a test for this request.
130
+ OpenAPI context in JSON:
131
+
132
+ ACTION: %<verb>s %<route>s
133
+ REQUEST MIME TYPE: %<mime>s
134
+
135
+ REQUEST SUMMARY: %<summary>s
136
+
137
+ SAMPLE RESPONSE: %<response>s
138
+ MSG
139
+
140
+ assistant_response_json_example = <<~RUBY
141
+ # api call
142
+ def create_completion(model:, **kwargs)
143
+ Response::Completion.from_json(
144
+ post('/v1/completions', model: model, **kwargs)
145
+ )
146
+ end
147
+
148
+ # wrapper
149
+ class Completion < JSONPayload
150
+ class Choice < JSONPayload
151
+ field :text
152
+ field :index
153
+ field :logprobs
154
+ field :finish_reason
155
+ end
156
+
157
+ class Usage < JSONPayload
158
+ field :prompt_tokens
159
+ field :completion_tokens
160
+ field :total_tokens
161
+ end
162
+
163
+ field :id
164
+ field :object
165
+ field :created
166
+ field :model
167
+ field :choices, wrapper: Choice
168
+ field :usage, wrapper: Usage
169
+ end
170
+
171
+ # test
172
+ describe '#create_completion' do
173
+ let(:response_body) do
174
+ {
175
+ "id": 'cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7',
176
+ "object": 'text_completion',
177
+ "created": 1_589_478_378,
178
+ "model": 'text-davinci-003',
179
+ "choices": [
180
+ {
181
+ "text": "\n\nThis is indeed a test",
182
+ "index": 0,
183
+ "logprobs": nil,
184
+ "finish_reason": 'length'
185
+ }
186
+ ],
187
+ "usage": {
188
+ "prompt_tokens": 5,
189
+ "completion_tokens": 7,
190
+ "total_tokens": 12
191
+ }
192
+ }
193
+ end
194
+
195
+ let(:response) do
196
+ instance_double(
197
+ HTTP::Response,
198
+ status: HTTP::Response::Status.new(200),
199
+ body: JSON.dump(response_body)
200
+ )
201
+ end
202
+
203
+ it 'can create a completion' do
204
+ completion = client.create_completion(model: 'text-davinci-002', prompt: 'Hello, world!')
205
+
206
+ expect(http)
207
+ .to have_received(:post)
208
+ .with('https://api.openai.com/v1/completions', hash_including(:json))
209
+
210
+ expect(completion.id).to eql('cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7')
211
+ expect(completion.model).to eql('text-davinci-003')
212
+ expect(completion.choices.first.text).to eql("\n\nThis is indeed a test")
213
+ expect(completion.choices.first.index).to eql(0)
214
+ expect(completion.choices.first.logprobs).to be_nil
215
+ expect(completion.choices.first.finish_reason).to eql('length')
216
+ expect(completion.usage.prompt_tokens).to eql(5)
217
+ expect(completion.usage.completion_tokens).to eql(7)
218
+ expect(completion.usage.total_tokens).to eql(12)
219
+ end
220
+ end
221
+ RUBY
222
+
223
+ assistant_response_form_example = <<~RUBY
224
+ # api call
225
+ def create_file(file:, purpose:)
226
+ absolute_path = Pathname.new(file).expand_path.to_s
227
+ form_file = HTTP::FormData::File.new(absolute_path)
228
+ Response::File.from_json(
229
+ post_form_multipart('/v1/files', file: form_file, purpose: purpose)
230
+ )
231
+ end
232
+
233
+ # wrapper
234
+ class File < JSONPayload
235
+ field :id
236
+ field :object
237
+ field :bytes
238
+ field :created_at
239
+ field :filename
240
+ field :purpose
241
+ optional_field :deleted?, path: :deleted
242
+ end
243
+
244
+ # test
245
+ describe '#create_file' do
246
+ let(:sample_file) { OpenAISpec::SPEC_ROOT.join('data/sample.jsonl') }
247
+
248
+ let(:response_body) do
249
+ {
250
+ "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
251
+ "object": 'file',
252
+ "bytes": 140,
253
+ "created_at": 1_613_779_121,
254
+ "filename": 'sample.jsonl',
255
+ "purpose": 'fine-tune'
256
+ }
257
+ end
258
+
259
+ it 'can create a file' do
260
+ file = client.create_file(
261
+ file: sample_file,
262
+ purpose: 'fine-tune'
263
+ )
264
+
265
+ expect(http)
266
+ .to have_received(:post)
267
+ .with(
268
+ 'https://api.openai.com/v1/files',
269
+ hash_including(
270
+ form: hash_including(
271
+ {
272
+ file: instance_of(HTTP::FormData::File),
273
+ purpose: 'fine-tune'
274
+ }
275
+ )
276
+ )
277
+ )
278
+
279
+ expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
280
+ expect(file.object).to eql('file')
281
+ expect(file.bytes).to eql(140)
282
+ expect(file.created_at).to eql(1_613_779_121)
283
+ expect(file.filename).to eql('sample.jsonl')
284
+ expect(file.purpose).to eql('fine-tune')
285
+ expect(file.deleted?).to be(nil)
286
+ end
287
+ end
288
+ RUBY
289
+
290
+ create_completion_example =
291
+ codegen.with(
292
+ route: '/completions',
293
+ verb: 'post',
294
+ mime: 'application/json'
295
+ )
296
+
297
+ history_json_example = [
298
+ {
299
+ role: 'user',
300
+ content: format(
301
+ user_message_template,
302
+ mime: create_completion_example.mime,
303
+ verb: create_completion_example.verb,
304
+ route: create_completion_example.route,
305
+ summary: create_completion_example.request_body_summary,
306
+ response: create_completion_example.sample_response
307
+ )
308
+ },
309
+ {
310
+ role: 'assistant',
311
+ content: assistant_response_json_example
312
+ }
313
+ ]
314
+
315
+ create_file_example =
316
+ codegen.with(
317
+ route: '/files',
318
+ verb: 'post',
319
+ mime: 'multipart/form-data'
320
+ )
321
+
322
+ history_form_example = [
323
+ {
324
+ role: 'user',
325
+ content: format(
326
+ user_message_template,
327
+ mime: create_file_example.mime,
328
+ verb: create_file_example.verb,
329
+ route: create_file_example.route,
330
+ summary: create_file_example.request_body_summary,
331
+ response: create_file_example.sample_response
332
+ )
333
+ },
334
+ {
335
+ role: 'assistant',
336
+ content: assistant_response_form_example
337
+ }
338
+ ]
339
+
340
+ history = [
341
+ *(codegen.mime == 'application/json' ? history_json_example : history_form_example),
342
+ {
343
+ role: 'user',
344
+ content: format(
345
+ user_message_template,
346
+ mime: codegen.mime,
347
+ verb: codegen.verb,
348
+ route: codegen.route,
349
+ summary: codegen.no_request_body? ? '(none)' : codegen.request_body_summary,
350
+ response: codegen.sample_response
351
+ )
352
+ }
353
+ ]
354
+
355
+ cache_dir = Pathname.new(__dir__).parent.join('tmp/codegen')
356
+ cache_dir.mkpath unless cache_dir.directory?
357
+ cache_file = cache_dir.join("#{codegen.verb}_#{codegen.route.gsub('/', '_')}.txt")
358
+
359
+ if cache_file.file?
360
+ puts cache_file.read
361
+ else
362
+ completion = api.create_chat_completion(
363
+ model: 'gpt-3.5-turbo',
364
+ messages: history,
365
+ max_tokens: 2000,
366
+ temperature: 0
367
+ )
368
+ output = completion.choices[0].message.content
369
+ cache_file.write(output)
370
+ puts output
371
+ end
data/bin/console ADDED
@@ -0,0 +1,16 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'bundler/setup'
4
+ require 'openai'
5
+ require 'dotenv'
6
+ require 'pry'
7
+ require 'pry-byebug'
8
+
9
+ Dotenv.load
10
+
11
+ def start_repl
12
+ api = OpenAI.new(ENV.fetch('OPENAI_API_KEY'))
13
+ binding.pry
14
+ end
15
+
16
+ start_repl
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ class OpenAI
4
+ VERSION = '0.0.0'
5
+ end