openai.rb 0.0.0 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/main.yml +27 -0
  3. data/.rubocop.yml +18 -0
  4. data/.ruby-version +1 -1
  5. data/Gemfile +9 -5
  6. data/Gemfile.lock +29 -24
  7. data/README.md +401 -0
  8. data/bin/console +9 -4
  9. data/lib/openai/api/cache.rb +137 -0
  10. data/lib/openai/api/client.rb +86 -0
  11. data/lib/openai/api/resource.rb +232 -0
  12. data/lib/openai/api/response.rb +384 -0
  13. data/lib/openai/api.rb +75 -0
  14. data/lib/openai/chat.rb +125 -0
  15. data/lib/openai/tokenizer.rb +50 -0
  16. data/lib/openai/util.rb +47 -0
  17. data/lib/openai/version.rb +1 -1
  18. data/lib/openai.rb +38 -357
  19. data/openai.gemspec +9 -3
  20. data/spec/data/sample_french.mp3 +0 -0
  21. data/spec/data/sample_image.png +0 -0
  22. data/spec/data/sample_image_mask.png +0 -0
  23. data/spec/shared/api_resource_context.rb +22 -0
  24. data/spec/spec_helper.rb +4 -0
  25. data/spec/unit/openai/api/audio_spec.rb +78 -0
  26. data/spec/unit/openai/api/cache_spec.rb +115 -0
  27. data/spec/unit/openai/api/chat_completions_spec.rb +130 -0
  28. data/spec/unit/openai/api/completions_spec.rb +125 -0
  29. data/spec/unit/openai/api/edits_spec.rb +40 -0
  30. data/spec/unit/openai/api/embeddings_spec.rb +45 -0
  31. data/spec/unit/openai/api/files_spec.rb +163 -0
  32. data/spec/unit/openai/api/fine_tunes_spec.rb +322 -0
  33. data/spec/unit/openai/api/images_spec.rb +137 -0
  34. data/spec/unit/openai/api/models_spec.rb +98 -0
  35. data/spec/unit/openai/api/moderations_spec.rb +63 -0
  36. data/spec/unit/openai/api/response_spec.rb +203 -0
  37. data/spec/unit/openai/chat_spec.rb +32 -0
  38. data/spec/unit/openai/tokenizer_spec.rb +45 -0
  39. data/spec/unit/openai_spec.rb +47 -736
  40. metadata +97 -7
  41. data/bin/codegen +0 -371
@@ -0,0 +1,203 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe OpenAI::API::Response do
4
+ before do
5
+ user_class = Class.new(described_class) do
6
+ field :username, path: %i[handle]
7
+ end
8
+
9
+ stub_const('OpenAISpec::SampleResponse::User', user_class)
10
+
11
+ comment_class = Class.new(described_class) do
12
+ field :body
13
+ field :user, wrapper: OpenAISpec::SampleResponse::User
14
+ end
15
+
16
+ stub_const('OpenAISpec::SampleResponse::Comment', comment_class)
17
+
18
+ post_class = Class.new(described_class) do
19
+ field :created_at, path: %i[meta birth created]
20
+ field :text
21
+ field :comments, wrapper: OpenAISpec::SampleResponse::Comment
22
+ field :author, wrapper: OpenAISpec::SampleResponse::User
23
+ optional_field :co_author, wrapper: OpenAISpec::SampleResponse::User
24
+ optional_field :subtitle
25
+
26
+ # For demonstrating that we can use the instance method without specifying
27
+ # a wrapper class
28
+ define_method(:other_author) do
29
+ optional_field([:co_author])
30
+ end
31
+ end
32
+
33
+ stub_const('OpenAISpec::SampleResponse::Post', post_class)
34
+ end
35
+
36
+ let(:sample_response) do
37
+ OpenAISpec::SampleResponse::Post.new(sample_response_payload)
38
+ end
39
+
40
+ let(:sample_response_payload) do
41
+ {
42
+ meta: {
43
+ birth: {
44
+ created: Time.utc(2023).to_i
45
+ }
46
+ },
47
+ text: 'This is a post',
48
+ comments: [
49
+ {
50
+ body: 'This is a comment',
51
+ user: {
52
+ handle: 'alice'
53
+ }
54
+ },
55
+ {
56
+ body: 'This is a spicy comment',
57
+ user: {
58
+ handle: 'bob'
59
+ }
60
+ }
61
+ ],
62
+ author: {
63
+ handle: 'carl'
64
+ }
65
+ }
66
+ end
67
+
68
+ context 'when inspecting the response' do
69
+ # Define a smaller response payload so this is less annoying to test
70
+ let(:sample_response_payload) do
71
+ {
72
+ meta: { birth: { created: 1234 } },
73
+ text: 'This is a post',
74
+ comments: [],
75
+ author: { handle: 'carl' }
76
+ }
77
+ end
78
+
79
+ before do
80
+ # Mark a field as private so we can prove that the #inspect method
81
+ # should use __send__ in case a response class chooses to make a
82
+ # field private.
83
+ OpenAISpec::SampleResponse::Post.class_eval do
84
+ private(:created_at)
85
+ end
86
+ end
87
+
88
+ it 'defines a nice clean inspect method' do
89
+ expect(sample_response.inspect).to eql(
90
+ '#<OpenAISpec::SampleResponse::Post ' \
91
+ 'created_at=1234 ' \
92
+ 'text="This is a post" ' \
93
+ 'comments=[] ' \
94
+ 'author=#<OpenAISpec::SampleResponse::User username="carl"> ' \
95
+ 'co_author=nil ' \
96
+ 'subtitle=nil>'
97
+ )
98
+ end
99
+
100
+ it 'tracks the fields on a class for the sake of the #inspect method' do
101
+ expect(OpenAISpec::SampleResponse::Comment.__send__(:field_registry))
102
+ .to eql(%i[body user])
103
+ end
104
+ end
105
+
106
+ it 'can parse a JSON response' do
107
+ expect(
108
+ OpenAISpec::SampleResponse::Post.from_json(
109
+ JSON.dump(sample_response_payload)
110
+ )
111
+ ).to eql(sample_response)
112
+ end
113
+
114
+ it 'exposes the original payload' do
115
+ expect(sample_response.original_payload).to eql(sample_response_payload)
116
+ end
117
+
118
+ it 'deep freezes the original payload' do
119
+ original = sample_response.original_payload
120
+ expect(original).to be_frozen
121
+ expect(original[:comments]).to be_frozen
122
+ expect(original[:comments].first).to be_frozen
123
+ expect(original[:comments].first[:user]).to be_frozen
124
+ end
125
+
126
+ describe '.field' do
127
+ it 'exposes the field' do
128
+ expect(sample_response.text).to eql('This is a post')
129
+ expect(sample_response.created_at).to eql(1_672_531_200)
130
+ end
131
+
132
+ it 'can expose fields under a different name than the key path' do
133
+ expect(sample_response.author.username).to eql('carl')
134
+ end
135
+
136
+ it 'wraps the field if a wrapper is provided' do
137
+ expect(sample_response.author).to eql(
138
+ OpenAISpec::SampleResponse::User.new(handle: 'carl')
139
+ )
140
+ end
141
+
142
+ it 'wraps each element in a the wrapper if the value is an array' do
143
+ expect(sample_response.comments).to all(
144
+ be_an_instance_of(OpenAISpec::SampleResponse::Comment)
145
+ )
146
+ expect(sample_response.comments[0].user).to eql(
147
+ OpenAISpec::SampleResponse::User.new(handle: 'alice')
148
+ )
149
+ expect(sample_response.comments[1].user).to eql(
150
+ OpenAISpec::SampleResponse::User.new(handle: 'bob')
151
+ )
152
+ end
153
+
154
+ context 'when a required field is not present' do
155
+ let(:sample_response_payload) do
156
+ { meta: { error: 'you did something wrong bro' } }
157
+ end
158
+
159
+ it 'raises an error when the field is accessed' do
160
+ expect { sample_response.text }.to raise_error(
161
+ described_class::MissingFieldError, <<~ERROR
162
+ Missing field :text in response payload!
163
+ Was attempting to access value at path `[:text]`.
164
+ Payload: {
165
+ "meta": {
166
+ "error": "you did something wrong bro"
167
+ }
168
+ }
169
+ ERROR
170
+ )
171
+
172
+ expect { sample_response.created_at }.to raise_error(
173
+ described_class::MissingFieldError, <<~ERROR
174
+ Missing field :birth in response payload!
175
+ Was attempting to access value at path `[:meta, :birth, :created]`.
176
+ Payload: {
177
+ "meta": {
178
+ "error": "you did something wrong bro"
179
+ }
180
+ }
181
+ ERROR
182
+ )
183
+ end
184
+ end
185
+ end
186
+
187
+ describe '.optional_field' do
188
+ it 'does not raise an error when a field is not present' do
189
+ expect(sample_response.co_author).to be_nil
190
+ expect(sample_response.other_author).to be_nil
191
+ end
192
+
193
+ context 'when the optional field is present' do
194
+ let(:sample_response_payload) do
195
+ super().merge(co_author: { handle: 'dave' })
196
+ end
197
+
198
+ it 'exposes the field' do
199
+ expect(sample_response.co_author.username).to eql('dave')
200
+ end
201
+ end
202
+ end
203
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe OpenAI::Chat do
4
+ let(:messages) { [{ role: 'user', content: 'Hello' }] }
5
+ let(:settings) { { model: 'gpt-3' } }
6
+ let(:openai) { double('OpenAI') }
7
+
8
+ describe 'initialization and adding messages' do
9
+ it 'initializes with messages and adds user, system, and assistant messages' do
10
+ chat = OpenAI::Chat.new(messages: messages, settings: settings, openai: openai)
11
+
12
+ expect(chat.messages.count).to eq(1)
13
+ expect(chat.messages.first.role).to eq('user')
14
+ expect(chat.messages.first.content).to eq('Hello')
15
+
16
+ chat = chat.add_user_message('How are you?')
17
+ expect(chat.messages.count).to eq(2)
18
+ expect(chat.messages.last.role).to eq('user')
19
+ expect(chat.messages.last.content).to eq('How are you?')
20
+
21
+ chat = chat.add_system_message('System message')
22
+ expect(chat.messages.count).to eq(3)
23
+ expect(chat.messages.last.role).to eq('system')
24
+ expect(chat.messages.last.content).to eq('System message')
25
+
26
+ chat = chat.add_assistant_message('I am fine, thank you.')
27
+ expect(chat.messages.count).to eq(4)
28
+ expect(chat.messages.last.role).to eq('assistant')
29
+ expect(chat.messages.last.content).to eq('I am fine, thank you.')
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe OpenAI::Tokenizer do
4
+ let(:tokenizer) { described_class.new }
5
+
6
+ it 'can get an encoder by model name' do
7
+ expect(tokenizer.for_model('gpt-4')).to eql(
8
+ OpenAI::Tokenizer::Encoding.new(:cl100k_base)
9
+ )
10
+ end
11
+
12
+ it 'can get an encoder by name' do
13
+ expect(tokenizer.get(:cl100k_base)).to eql(
14
+ OpenAI::Tokenizer::Encoding.new(:cl100k_base)
15
+ )
16
+ end
17
+
18
+ it 'raises an error if the model name is not valid' do
19
+ expect { tokenizer.for_model('gpt-42') }.to raise_error(
20
+ 'Invalid model name or not recognized by Tiktoken: "gpt-42"'
21
+ )
22
+ end
23
+
24
+ it 'raises an error if the encoding name is not valid' do
25
+ expect { tokenizer.get('aaaaaaaaaaaa') }.to raise_error(
26
+ 'Invalid encoding name or not recognized by Tiktoken: "aaaaaaaaaaaa"'
27
+ )
28
+ end
29
+
30
+ it 'can encode text' do
31
+ expect(tokenizer.for_model('gpt-4').encode('Hello, world!')).to eql(
32
+ [9906, 11, 1917, 0]
33
+ )
34
+ end
35
+
36
+ it 'can decode tokens' do
37
+ expect(tokenizer.for_model('gpt-4').decode([9906, 11, 1917, 0])).to eql(
38
+ 'Hello, world!'
39
+ )
40
+ end
41
+
42
+ it 'can count the number of tokens in text' do
43
+ expect(tokenizer.for_model('gpt-4').num_tokens('Hello, world!')).to eql(4)
44
+ end
45
+ end