openai.rb 0.0.0 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/main.yml +27 -0
  3. data/.rubocop.yml +18 -0
  4. data/.ruby-version +1 -1
  5. data/Gemfile +9 -5
  6. data/Gemfile.lock +29 -24
  7. data/README.md +401 -0
  8. data/bin/console +9 -4
  9. data/lib/openai/api/cache.rb +137 -0
  10. data/lib/openai/api/client.rb +86 -0
  11. data/lib/openai/api/resource.rb +232 -0
  12. data/lib/openai/api/response.rb +384 -0
  13. data/lib/openai/api.rb +75 -0
  14. data/lib/openai/chat.rb +125 -0
  15. data/lib/openai/tokenizer.rb +50 -0
  16. data/lib/openai/util.rb +47 -0
  17. data/lib/openai/version.rb +1 -1
  18. data/lib/openai.rb +38 -357
  19. data/openai.gemspec +9 -3
  20. data/spec/data/sample_french.mp3 +0 -0
  21. data/spec/data/sample_image.png +0 -0
  22. data/spec/data/sample_image_mask.png +0 -0
  23. data/spec/shared/api_resource_context.rb +22 -0
  24. data/spec/spec_helper.rb +4 -0
  25. data/spec/unit/openai/api/audio_spec.rb +78 -0
  26. data/spec/unit/openai/api/cache_spec.rb +115 -0
  27. data/spec/unit/openai/api/chat_completions_spec.rb +130 -0
  28. data/spec/unit/openai/api/completions_spec.rb +125 -0
  29. data/spec/unit/openai/api/edits_spec.rb +40 -0
  30. data/spec/unit/openai/api/embeddings_spec.rb +45 -0
  31. data/spec/unit/openai/api/files_spec.rb +163 -0
  32. data/spec/unit/openai/api/fine_tunes_spec.rb +322 -0
  33. data/spec/unit/openai/api/images_spec.rb +137 -0
  34. data/spec/unit/openai/api/models_spec.rb +98 -0
  35. data/spec/unit/openai/api/moderations_spec.rb +63 -0
  36. data/spec/unit/openai/api/response_spec.rb +203 -0
  37. data/spec/unit/openai/chat_spec.rb +32 -0
  38. data/spec/unit/openai/tokenizer_spec.rb +45 -0
  39. data/spec/unit/openai_spec.rb +47 -736
  40. metadata +97 -7
  41. data/bin/codegen +0 -371
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: openai.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.0
4
+ version: 0.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - John
@@ -9,8 +9,22 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2023-03-25 00:00:00.000000000 Z
12
+ date: 2023-04-02 00:00:00.000000000 Z
13
13
  dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: abstract_type
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - "~>"
19
+ - !ruby/object:Gem::Version
20
+ version: 0.0.7
21
+ type: :runtime
22
+ prerelease: false
23
+ version_requirements: !ruby/object:Gem::Requirement
24
+ requirements:
25
+ - - "~>"
26
+ - !ruby/object:Gem::Version
27
+ version: 0.0.7
14
28
  - !ruby/object:Gem::Dependency
15
29
  name: anima
16
30
  requirement: !ruby/object:Gem::Requirement
@@ -41,18 +55,66 @@ dependencies:
41
55
  version: '0.1'
42
56
  - !ruby/object:Gem::Dependency
43
57
  name: http
58
+ requirement: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - ">="
61
+ - !ruby/object:Gem::Version
62
+ version: '4.4'
63
+ - - "<"
64
+ - !ruby/object:Gem::Version
65
+ version: '6.0'
66
+ type: :runtime
67
+ prerelease: false
68
+ version_requirements: !ruby/object:Gem::Requirement
69
+ requirements:
70
+ - - ">="
71
+ - !ruby/object:Gem::Version
72
+ version: '4.4'
73
+ - - "<"
74
+ - !ruby/object:Gem::Version
75
+ version: '6.0'
76
+ - !ruby/object:Gem::Dependency
77
+ name: ice_nine
44
78
  requirement: !ruby/object:Gem::Requirement
45
79
  requirements:
46
80
  - - "~>"
47
81
  - !ruby/object:Gem::Version
48
- version: '5.1'
82
+ version: 0.11.x
49
83
  type: :runtime
50
84
  prerelease: false
51
85
  version_requirements: !ruby/object:Gem::Requirement
52
86
  requirements:
53
87
  - - "~>"
54
88
  - !ruby/object:Gem::Version
55
- version: '5.1'
89
+ version: 0.11.x
90
+ - !ruby/object:Gem::Dependency
91
+ name: memoizable
92
+ requirement: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - "~>"
95
+ - !ruby/object:Gem::Version
96
+ version: 0.4.2
97
+ type: :runtime
98
+ prerelease: false
99
+ version_requirements: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: 0.4.2
104
+ - !ruby/object:Gem::Dependency
105
+ name: tiktoken_ruby
106
+ requirement: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: 0.0.3
111
+ type: :runtime
112
+ prerelease: false
113
+ version_requirements: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - "~>"
116
+ - !ruby/object:Gem::Version
117
+ version: 0.0.3
56
118
  description: OpenAI Ruby Wrapper
57
119
  email:
58
120
  - johncbackus@gmail.com
@@ -60,19 +122,47 @@ executables: []
60
122
  extensions: []
61
123
  extra_rdoc_files: []
62
124
  files:
125
+ - ".github/workflows/main.yml"
63
126
  - ".gitignore"
64
127
  - ".rspec"
128
+ - ".rubocop.yml"
65
129
  - ".ruby-version"
66
130
  - Gemfile
67
131
  - Gemfile.lock
68
- - bin/codegen
132
+ - README.md
69
133
  - bin/console
70
134
  - lib/openai.rb
135
+ - lib/openai/api.rb
136
+ - lib/openai/api/cache.rb
137
+ - lib/openai/api/client.rb
138
+ - lib/openai/api/resource.rb
139
+ - lib/openai/api/response.rb
140
+ - lib/openai/chat.rb
141
+ - lib/openai/tokenizer.rb
142
+ - lib/openai/util.rb
71
143
  - lib/openai/version.rb
72
144
  - openai.gemspec
73
145
  - spec/data/sample.jsonl
74
146
  - spec/data/sample.mp3
147
+ - spec/data/sample_french.mp3
148
+ - spec/data/sample_image.png
149
+ - spec/data/sample_image_mask.png
150
+ - spec/shared/api_resource_context.rb
75
151
  - spec/spec_helper.rb
152
+ - spec/unit/openai/api/audio_spec.rb
153
+ - spec/unit/openai/api/cache_spec.rb
154
+ - spec/unit/openai/api/chat_completions_spec.rb
155
+ - spec/unit/openai/api/completions_spec.rb
156
+ - spec/unit/openai/api/edits_spec.rb
157
+ - spec/unit/openai/api/embeddings_spec.rb
158
+ - spec/unit/openai/api/files_spec.rb
159
+ - spec/unit/openai/api/fine_tunes_spec.rb
160
+ - spec/unit/openai/api/images_spec.rb
161
+ - spec/unit/openai/api/models_spec.rb
162
+ - spec/unit/openai/api/moderations_spec.rb
163
+ - spec/unit/openai/api/response_spec.rb
164
+ - spec/unit/openai/chat_spec.rb
165
+ - spec/unit/openai/tokenizer_spec.rb
76
166
  - spec/unit/openai_spec.rb
77
167
  homepage: https://github.com/backus/openai-ruby
78
168
  licenses: []
@@ -85,14 +175,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
85
175
  requirements:
86
176
  - - ">="
87
177
  - !ruby/object:Gem::Version
88
- version: '0'
178
+ version: '2.7'
89
179
  required_rubygems_version: !ruby/object:Gem::Requirement
90
180
  requirements:
91
181
  - - ">="
92
182
  - !ruby/object:Gem::Version
93
183
  version: '0'
94
184
  requirements: []
95
- rubygems_version: 3.0.3.1
185
+ rubygems_version: 3.1.6
96
186
  signing_key:
97
187
  specification_version: 4
98
188
  summary: OpenAI Ruby Wrapper
data/bin/codegen DELETED
@@ -1,371 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- require 'bundler/setup'
4
- require 'openai'
5
- require 'dotenv'
6
- require 'pry'
7
- require 'pry-byebug'
8
- require 'slop'
9
- require 'yaml'
10
-
11
- Dotenv.load
12
- api = OpenAI.new(ENV.fetch('OPENAI_API_KEY'))
13
-
14
- class Codegen
15
- include Anima.new(:openapi_file, :route, :verb, :mime)
16
- include Memoizable
17
-
18
- def self.parse_cli(argv)
19
- opts = Slop.parse(argv) do |o|
20
- o.string '--openapi-file', 'Path to OpenAPI file', required: true
21
- o.string '--route', 'API route', required: true
22
- o.string '--verb', 'HTTP verb', required: true
23
- o.string '--mime', 'Mime type', default: 'application/json'
24
- end
25
-
26
- openapi_file = Pathname.new(opts[:openapi_file])
27
- raise ArgumentError, "OpenAPI file #{openapi_file} does not exist" unless openapi_file.exist?
28
-
29
- route = opts[:route]
30
- verb = opts[:verb]
31
- mime = opts[:mime]
32
-
33
- new(openapi_file: openapi_file, route: route, verb: verb, mime: mime)
34
- end
35
-
36
- def validate!
37
- paths = doc.fetch('paths')
38
- unless paths.key?(route)
39
-
40
- raise <<~ERR
41
- Invalid route!
42
-
43
- Given: #{route}
44
-
45
- Valid routes:
46
-
47
- #{paths.keys.sort.join("\n")}
48
- ERR
49
-
50
- end
51
-
52
- path_def = paths.fetch(route)
53
-
54
- return if path_def.key?(verb)
55
-
56
- raise <<~ERR
57
- Invalid verb!
58
-
59
- Given: #{verb}
60
-
61
- Valid verbs: #{path_def.keys.sort.join(', ')}
62
- ERR
63
- end
64
-
65
- def get?
66
- verb == 'get'
67
- end
68
-
69
- def no_request_body?
70
- !action.key?('requestBody')
71
- end
72
-
73
- def sample_response
74
- action.fetch('x-oaiMeta').fetch('response')
75
- end
76
-
77
- def request_body_summary
78
- fields = request_body.fetch('properties').keys
79
- required = request_body.fetch('required')
80
-
81
- { field: fields, required: required }
82
- end
83
-
84
- def request_body
85
- ref =
86
- if mime == 'application/json'
87
- deep_fetch(action, ['requestBody', 'content', 'application/json', 'schema', '$ref'])
88
- elsif mime == 'multipart/form-data'
89
- deep_fetch(action, ['requestBody', 'content', 'multipart/form-data', 'schema', '$ref'])
90
- else
91
- raise "Unknown mime type #{mime}"
92
- end
93
- get_ref(ref)
94
- end
95
- memoize :request_body
96
-
97
- def response_body
98
- response = action.fetch('responses').first.last
99
- ref = deep_fetch(response, %w[content application/json schema $ref])
100
- get_ref(ref)
101
- end
102
-
103
- def get_ref(ref)
104
- ref_path = ref.delete_prefix('#/').split('/')
105
- deep_fetch(doc, ref_path)
106
- end
107
-
108
- def action
109
- doc.fetch('paths').fetch(route).fetch(verb)
110
- end
111
-
112
- def doc
113
- @doc ||= YAML.load_file(openapi_file)
114
- end
115
-
116
- def deep_fetch(obj, path)
117
- path.reduce(obj) do |acc, key|
118
- acc.fetch(key) do
119
- raise "No key #{key} in #{acc.inspect}"
120
- end
121
- end
122
- end
123
- end
124
-
125
- codegen = Codegen.parse_cli(ARGV)
126
- codegen.validate!
127
-
128
- user_message_template = <<~MSG
129
- Please create an API call, a response wrapper, and a test for this request.
130
- OpenAPI context in JSON:
131
-
132
- ACTION: %<verb>s %<route>s
133
- REQUEST MIME TYPE: %<mime>s
134
-
135
- REQUEST SUMMARY: %<summary>s
136
-
137
- SAMPLE RESPONSE: %<response>s
138
- MSG
139
-
140
- assistant_response_json_example = <<~RUBY
141
- # api call
142
- def create_completion(model:, **kwargs)
143
- Response::Completion.from_json(
144
- post('/v1/completions', model: model, **kwargs)
145
- )
146
- end
147
-
148
- # wrapper
149
- class Completion < JSONPayload
150
- class Choice < JSONPayload
151
- field :text
152
- field :index
153
- field :logprobs
154
- field :finish_reason
155
- end
156
-
157
- class Usage < JSONPayload
158
- field :prompt_tokens
159
- field :completion_tokens
160
- field :total_tokens
161
- end
162
-
163
- field :id
164
- field :object
165
- field :created
166
- field :model
167
- field :choices, wrapper: Choice
168
- field :usage, wrapper: Usage
169
- end
170
-
171
- # test
172
- describe '#create_completion' do
173
- let(:response_body) do
174
- {
175
- "id": 'cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7',
176
- "object": 'text_completion',
177
- "created": 1_589_478_378,
178
- "model": 'text-davinci-003',
179
- "choices": [
180
- {
181
- "text": "\n\nThis is indeed a test",
182
- "index": 0,
183
- "logprobs": nil,
184
- "finish_reason": 'length'
185
- }
186
- ],
187
- "usage": {
188
- "prompt_tokens": 5,
189
- "completion_tokens": 7,
190
- "total_tokens": 12
191
- }
192
- }
193
- end
194
-
195
- let(:response) do
196
- instance_double(
197
- HTTP::Response,
198
- status: HTTP::Response::Status.new(200),
199
- body: JSON.dump(response_body)
200
- )
201
- end
202
-
203
- it 'can create a completion' do
204
- completion = client.create_completion(model: 'text-davinci-002', prompt: 'Hello, world!')
205
-
206
- expect(http)
207
- .to have_received(:post)
208
- .with('https://api.openai.com/v1/completions', hash_including(:json))
209
-
210
- expect(completion.id).to eql('cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7')
211
- expect(completion.model).to eql('text-davinci-003')
212
- expect(completion.choices.first.text).to eql("\n\nThis is indeed a test")
213
- expect(completion.choices.first.index).to eql(0)
214
- expect(completion.choices.first.logprobs).to be_nil
215
- expect(completion.choices.first.finish_reason).to eql('length')
216
- expect(completion.usage.prompt_tokens).to eql(5)
217
- expect(completion.usage.completion_tokens).to eql(7)
218
- expect(completion.usage.total_tokens).to eql(12)
219
- end
220
- end
221
- RUBY
222
-
223
- assistant_response_form_example = <<~RUBY
224
- # api call
225
- def create_file(file:, purpose:)
226
- absolute_path = Pathname.new(file).expand_path.to_s
227
- form_file = HTTP::FormData::File.new(absolute_path)
228
- Response::File.from_json(
229
- post_form_multipart('/v1/files', file: form_file, purpose: purpose)
230
- )
231
- end
232
-
233
- # wrapper
234
- class File < JSONPayload
235
- field :id
236
- field :object
237
- field :bytes
238
- field :created_at
239
- field :filename
240
- field :purpose
241
- optional_field :deleted?, path: :deleted
242
- end
243
-
244
- # test
245
- describe '#create_file' do
246
- let(:sample_file) { OpenAISpec::SPEC_ROOT.join('data/sample.jsonl') }
247
-
248
- let(:response_body) do
249
- {
250
- "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
251
- "object": 'file',
252
- "bytes": 140,
253
- "created_at": 1_613_779_121,
254
- "filename": 'sample.jsonl',
255
- "purpose": 'fine-tune'
256
- }
257
- end
258
-
259
- it 'can create a file' do
260
- file = client.create_file(
261
- file: sample_file,
262
- purpose: 'fine-tune'
263
- )
264
-
265
- expect(http)
266
- .to have_received(:post)
267
- .with(
268
- 'https://api.openai.com/v1/files',
269
- hash_including(
270
- form: hash_including(
271
- {
272
- file: instance_of(HTTP::FormData::File),
273
- purpose: 'fine-tune'
274
- }
275
- )
276
- )
277
- )
278
-
279
- expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
280
- expect(file.object).to eql('file')
281
- expect(file.bytes).to eql(140)
282
- expect(file.created_at).to eql(1_613_779_121)
283
- expect(file.filename).to eql('sample.jsonl')
284
- expect(file.purpose).to eql('fine-tune')
285
- expect(file.deleted?).to be(nil)
286
- end
287
- end
288
- RUBY
289
-
290
- create_completion_example =
291
- codegen.with(
292
- route: '/completions',
293
- verb: 'post',
294
- mime: 'application/json'
295
- )
296
-
297
- history_json_example = [
298
- {
299
- role: 'user',
300
- content: format(
301
- user_message_template,
302
- mime: create_completion_example.mime,
303
- verb: create_completion_example.verb,
304
- route: create_completion_example.route,
305
- summary: create_completion_example.request_body_summary,
306
- response: create_completion_example.sample_response
307
- )
308
- },
309
- {
310
- role: 'assistant',
311
- content: assistant_response_json_example
312
- }
313
- ]
314
-
315
- create_file_example =
316
- codegen.with(
317
- route: '/files',
318
- verb: 'post',
319
- mime: 'multipart/form-data'
320
- )
321
-
322
- history_form_example = [
323
- {
324
- role: 'user',
325
- content: format(
326
- user_message_template,
327
- mime: create_file_example.mime,
328
- verb: create_file_example.verb,
329
- route: create_file_example.route,
330
- summary: create_file_example.request_body_summary,
331
- response: create_file_example.sample_response
332
- )
333
- },
334
- {
335
- role: 'assistant',
336
- content: assistant_response_form_example
337
- }
338
- ]
339
-
340
- history = [
341
- *(codegen.mime == 'application/json' ? history_json_example : history_form_example),
342
- {
343
- role: 'user',
344
- content: format(
345
- user_message_template,
346
- mime: codegen.mime,
347
- verb: codegen.verb,
348
- route: codegen.route,
349
- summary: codegen.no_request_body? ? '(none)' : codegen.request_body_summary,
350
- response: codegen.sample_response
351
- )
352
- }
353
- ]
354
-
355
- cache_dir = Pathname.new(__dir__).parent.join('tmp/codegen')
356
- cache_dir.mkpath unless cache_dir.directory?
357
- cache_file = cache_dir.join("#{codegen.verb}_#{codegen.route.gsub('/', '_')}.txt")
358
-
359
- if cache_file.file?
360
- puts cache_file.read
361
- else
362
- completion = api.create_chat_completion(
363
- model: 'gpt-3.5-turbo',
364
- messages: history,
365
- max_tokens: 2000,
366
- temperature: 0
367
- )
368
- output = completion.choices[0].message.content
369
- cache_file.write(output)
370
- puts output
371
- end