openai.rb 0.0.0 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
 - data/.github/workflows/main.yml +27 -0
 - data/.rubocop.yml +18 -0
 - data/.ruby-version +1 -1
 - data/Gemfile +9 -5
 - data/Gemfile.lock +29 -24
 - data/README.md +401 -0
 - data/bin/console +9 -4
 - data/lib/openai/api/cache.rb +137 -0
 - data/lib/openai/api/client.rb +86 -0
 - data/lib/openai/api/resource.rb +232 -0
 - data/lib/openai/api/response.rb +384 -0
 - data/lib/openai/api.rb +75 -0
 - data/lib/openai/chat.rb +125 -0
 - data/lib/openai/tokenizer.rb +50 -0
 - data/lib/openai/util.rb +47 -0
 - data/lib/openai/version.rb +1 -1
 - data/lib/openai.rb +38 -357
 - data/openai.gemspec +9 -3
 - data/spec/data/sample_french.mp3 +0 -0
 - data/spec/data/sample_image.png +0 -0
 - data/spec/data/sample_image_mask.png +0 -0
 - data/spec/shared/api_resource_context.rb +22 -0
 - data/spec/spec_helper.rb +4 -0
 - data/spec/unit/openai/api/audio_spec.rb +78 -0
 - data/spec/unit/openai/api/cache_spec.rb +115 -0
 - data/spec/unit/openai/api/chat_completions_spec.rb +130 -0
 - data/spec/unit/openai/api/completions_spec.rb +125 -0
 - data/spec/unit/openai/api/edits_spec.rb +40 -0
 - data/spec/unit/openai/api/embeddings_spec.rb +45 -0
 - data/spec/unit/openai/api/files_spec.rb +163 -0
 - data/spec/unit/openai/api/fine_tunes_spec.rb +322 -0
 - data/spec/unit/openai/api/images_spec.rb +137 -0
 - data/spec/unit/openai/api/models_spec.rb +98 -0
 - data/spec/unit/openai/api/moderations_spec.rb +63 -0
 - data/spec/unit/openai/api/response_spec.rb +203 -0
 - data/spec/unit/openai/chat_spec.rb +32 -0
 - data/spec/unit/openai/tokenizer_spec.rb +45 -0
 - data/spec/unit/openai_spec.rb +47 -736
 - metadata +97 -7
 - data/bin/codegen +0 -371
 
| 
         @@ -0,0 +1,130 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            # frozen_string_literal: true
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            RSpec.describe OpenAI::API, '#chat_completions' do
         
     | 
| 
      
 4 
     | 
    
         
            +
              include_context 'an API Resource'
         
     | 
| 
      
 5 
     | 
    
         
            +
             
     | 
| 
      
 6 
     | 
    
         
            +
              let(:resource) { api.chat_completions }
         
     | 
| 
      
 7 
     | 
    
         
            +
              let(:response_body) do
         
     | 
| 
      
 8 
     | 
    
         
            +
                {
         
     | 
| 
      
 9 
     | 
    
         
            +
                  "id": 'chatcmpl-123',
         
     | 
| 
      
 10 
     | 
    
         
            +
                  "object": 'chat.completion',
         
     | 
| 
      
 11 
     | 
    
         
            +
                  "created": 1_677_652_288,
         
     | 
| 
      
 12 
     | 
    
         
            +
                  "choices": [
         
     | 
| 
      
 13 
     | 
    
         
            +
                    {
         
     | 
| 
      
 14 
     | 
    
         
            +
                      "index": 0,
         
     | 
| 
      
 15 
     | 
    
         
            +
                      "message": {
         
     | 
| 
      
 16 
     | 
    
         
            +
                        "role": 'assistant',
         
     | 
| 
      
 17 
     | 
    
         
            +
                        "content": "\n\nHello there, how may I assist you today?"
         
     | 
| 
      
 18 
     | 
    
         
            +
                      },
         
     | 
| 
      
 19 
     | 
    
         
            +
                      "finish_reason": 'stop'
         
     | 
| 
      
 20 
     | 
    
         
            +
                    }
         
     | 
| 
      
 21 
     | 
    
         
            +
                  ],
         
     | 
| 
      
 22 
     | 
    
         
            +
                  "usage": {
         
     | 
| 
      
 23 
     | 
    
         
            +
                    "prompt_tokens": 9,
         
     | 
| 
      
 24 
     | 
    
         
            +
                    "completion_tokens": 12,
         
     | 
| 
      
 25 
     | 
    
         
            +
                    "total_tokens": 21
         
     | 
| 
      
 26 
     | 
    
         
            +
                  }
         
     | 
| 
      
 27 
     | 
    
         
            +
                }
         
     | 
| 
      
 28 
     | 
    
         
            +
              end
         
     | 
| 
      
 29 
     | 
    
         
            +
             
     | 
| 
      
 30 
     | 
    
         
            +
              let(:completion) do
         
     | 
| 
      
 31 
     | 
    
         
            +
                messages = [
         
     | 
| 
      
 32 
     | 
    
         
            +
                  { "text": 'Hello there!', "user": 'customer' },
         
     | 
| 
      
 33 
     | 
    
         
            +
                  { "text": 'Can you help me with my order?', "user": 'customer' },
         
     | 
| 
      
 34 
     | 
    
         
            +
                  { "text": 'Sure, what would you like to do?', "user": 'assistant' }
         
     | 
| 
      
 35 
     | 
    
         
            +
                ]
         
     | 
| 
      
 36 
     | 
    
         
            +
                resource.create(model: 'text-davinci-002', messages: messages)
         
     | 
| 
      
 37 
     | 
    
         
            +
              end
         
     | 
| 
      
 38 
     | 
    
         
            +
             
     | 
| 
      
 39 
     | 
    
         
            +
              it 'can create a chat completion' do
         
     | 
| 
      
 40 
     | 
    
         
            +
                expect(completion.id).to eql('chatcmpl-123')
         
     | 
| 
      
 41 
     | 
    
         
            +
                expect(completion.choices.first.index).to eql(0)
         
     | 
| 
      
 42 
     | 
    
         
            +
                expect(completion.choices.first.message.role).to eql('assistant')
         
     | 
| 
      
 43 
     | 
    
         
            +
                expect(completion.choices.first.message.content).to eql("\n\nHello there, how may I assist you today?")
         
     | 
| 
      
 44 
     | 
    
         
            +
                expect(completion.choices.first.finish_reason).to eql('stop')
         
     | 
| 
      
 45 
     | 
    
         
            +
                expect(completion.usage.prompt_tokens).to eql(9)
         
     | 
| 
      
 46 
     | 
    
         
            +
                expect(completion.usage.completion_tokens).to eql(12)
         
     | 
| 
      
 47 
     | 
    
         
            +
                expect(completion.usage.total_tokens).to eql(21)
         
     | 
| 
      
 48 
     | 
    
         
            +
              end
         
     | 
| 
      
 49 
     | 
    
         
            +
             
     | 
| 
      
 50 
     | 
    
         
            +
              it 'exposes a #response_text helper method' do
         
     | 
| 
      
 51 
     | 
    
         
            +
                expect(completion.response_text).to eql("\n\nHello there, how may I assist you today?")
         
     | 
| 
      
 52 
     | 
    
         
            +
              end
         
     | 
| 
      
 53 
     | 
    
         
            +
             
     | 
| 
      
 54 
     | 
    
         
            +
              it 'exposes a #response helper method' do
         
     | 
| 
      
 55 
     | 
    
         
            +
                expect(completion.response.content).to eql("\n\nHello there, how may I assist you today?")
         
     | 
| 
      
 56 
     | 
    
         
            +
                expect(completion.response.role).to eql('assistant')
         
     | 
| 
      
 57 
     | 
    
         
            +
              end
         
     | 
| 
      
 58 
     | 
    
         
            +
             
     | 
| 
      
 59 
     | 
    
         
            +
              it 'raises when a block is given for a non-streaming request' do
         
     | 
| 
      
 60 
     | 
    
         
            +
                expect { resource.create(model: 'text-davinci-002', messages: []) { print 'noop' } }
         
     | 
| 
      
 61 
     | 
    
         
            +
                  .to raise_error('Non-streaming responses do not support blocks')
         
     | 
| 
      
 62 
     | 
    
         
            +
              end
         
     | 
| 
      
 63 
     | 
    
         
            +
             
     | 
| 
      
 64 
     | 
    
         
            +
              context 'when streaming is enabled' do
         
     | 
| 
      
 65 
     | 
    
         
            +
                let(:response_chunks) do
         
     | 
| 
      
 66 
     | 
    
         
            +
                  [
         
     | 
| 
      
 67 
     | 
    
         
            +
                    chunk(role: 'assistant'),
         
     | 
| 
      
 68 
     | 
    
         
            +
                    chunk(content: 'He'),
         
     | 
| 
      
 69 
     | 
    
         
            +
                    chunk(content: 'llo,'),
         
     | 
| 
      
 70 
     | 
    
         
            +
                    chunk(content: ' world'),
         
     | 
| 
      
 71 
     | 
    
         
            +
                    chunk({ content: '!' }, finish_reason: 'stop')
         
     | 
| 
      
 72 
     | 
    
         
            +
                  ]
         
     | 
| 
      
 73 
     | 
    
         
            +
                end
         
     | 
| 
      
 74 
     | 
    
         
            +
             
     | 
| 
      
 75 
     | 
    
         
            +
                let(:response) do
         
     | 
| 
      
 76 
     | 
    
         
            +
                  instance_double(
         
     | 
| 
      
 77 
     | 
    
         
            +
                    HTTP::Response,
         
     | 
| 
      
 78 
     | 
    
         
            +
                    status: HTTP::Response::Status.new(response_status_code),
         
     | 
| 
      
 79 
     | 
    
         
            +
                    body: response_body
         
     | 
| 
      
 80 
     | 
    
         
            +
                  )
         
     | 
| 
      
 81 
     | 
    
         
            +
                end
         
     | 
| 
      
 82 
     | 
    
         
            +
             
     | 
| 
      
 83 
     | 
    
         
            +
                let(:response_body) do
         
     | 
| 
      
 84 
     | 
    
         
            +
                  instance_double(HTTP::Response::Body).tap do |double|
         
     | 
| 
      
 85 
     | 
    
         
            +
                    allow(double).to receive(:each)
         
     | 
| 
      
 86 
     | 
    
         
            +
                      .and_yield(response_chunks.first)
         
     | 
| 
      
 87 
     | 
    
         
            +
                      .and_yield(response_chunks[1])
         
     | 
| 
      
 88 
     | 
    
         
            +
                      .and_yield(response_chunks[2])
         
     | 
| 
      
 89 
     | 
    
         
            +
                      .and_yield(response_chunks[3])
         
     | 
| 
      
 90 
     | 
    
         
            +
                      .and_yield(response_chunks[4])
         
     | 
| 
      
 91 
     | 
    
         
            +
                      .and_yield('data: [DONE]')
         
     | 
| 
      
 92 
     | 
    
         
            +
                  end
         
     | 
| 
      
 93 
     | 
    
         
            +
                end
         
     | 
| 
      
 94 
     | 
    
         
            +
             
     | 
| 
      
 95 
     | 
    
         
            +
                before do
         
     | 
| 
      
 96 
     | 
    
         
            +
                  allow(http).to receive(:persistent).and_yield(http)
         
     | 
| 
      
 97 
     | 
    
         
            +
                end
         
     | 
| 
      
 98 
     | 
    
         
            +
             
     | 
| 
      
 99 
     | 
    
         
            +
                def chunk(delta, finish_reason: nil)
         
     | 
| 
      
 100 
     | 
    
         
            +
                  data = {
         
     | 
| 
      
 101 
     | 
    
         
            +
                    id: 'chatcmpl-6y5rBH2fvMeGqAAH81Wkp8QdqESEx',
         
     | 
| 
      
 102 
     | 
    
         
            +
                    object: 'chat.completion.chunk',
         
     | 
| 
      
 103 
     | 
    
         
            +
                    created: 1_679_780_213,
         
     | 
| 
      
 104 
     | 
    
         
            +
                    model: 'gpt-3.5-turbo-0301',
         
     | 
| 
      
 105 
     | 
    
         
            +
                    choices: [delta: delta, index: 0, finish_reason: finish_reason]
         
     | 
| 
      
 106 
     | 
    
         
            +
                  }
         
     | 
| 
      
 107 
     | 
    
         
            +
             
     | 
| 
      
 108 
     | 
    
         
            +
                  "data: #{JSON.dump(data)}"
         
     | 
| 
      
 109 
     | 
    
         
            +
                end
         
     | 
| 
      
 110 
     | 
    
         
            +
             
     | 
| 
      
 111 
     | 
    
         
            +
                it 'yields chunks as they are served' do
         
     | 
| 
      
 112 
     | 
    
         
            +
                  chunks = []
         
     | 
| 
      
 113 
     | 
    
         
            +
                  resource.create(model: 'text-davinci-002', messages: [], stream: true) do |chunk|
         
     | 
| 
      
 114 
     | 
    
         
            +
                    chunks << chunk
         
     | 
| 
      
 115 
     | 
    
         
            +
                  end
         
     | 
| 
      
 116 
     | 
    
         
            +
             
     | 
| 
      
 117 
     | 
    
         
            +
                  expect(chunks).to all(be_an_instance_of(OpenAI::API::Response::ChatCompletionChunk))
         
     | 
| 
      
 118 
     | 
    
         
            +
                  texts = chunks.map { |chunk| chunk.choices.first.delta.content }
         
     | 
| 
      
 119 
     | 
    
         
            +
                  expect(texts.join('')).to eql('Hello, world!')
         
     | 
| 
      
 120 
     | 
    
         
            +
             
     | 
| 
      
 121 
     | 
    
         
            +
                  expect(chunks[0].response.role).to eql('assistant')
         
     | 
| 
      
 122 
     | 
    
         
            +
                  expect(chunks[1].response_text).to eql('He')
         
     | 
| 
      
 123 
     | 
    
         
            +
                end
         
     | 
| 
      
 124 
     | 
    
         
            +
             
     | 
| 
      
 125 
     | 
    
         
            +
                it 'raises when a block is not given' do
         
     | 
| 
      
 126 
     | 
    
         
            +
                  expect { resource.create(model: 'text-davinci-002', messages: [], stream: true) }
         
     | 
| 
      
 127 
     | 
    
         
            +
                    .to raise_error('Streaming responses require a block')
         
     | 
| 
      
 128 
     | 
    
         
            +
                end
         
     | 
| 
      
 129 
     | 
    
         
            +
              end
         
     | 
| 
      
 130 
     | 
    
         
            +
            end
         
     | 
| 
         @@ -0,0 +1,125 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            # frozen_string_literal: true
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            RSpec.describe OpenAI::API, '#completions' do
         
     | 
| 
      
 4 
     | 
    
         
            +
              include_context 'an API Resource'
         
     | 
| 
      
 5 
     | 
    
         
            +
             
     | 
| 
      
 6 
     | 
    
         
            +
              let(:resource) { api.completions }
         
     | 
| 
      
 7 
     | 
    
         
            +
             
     | 
| 
      
 8 
     | 
    
         
            +
              let(:response_body) do
         
     | 
| 
      
 9 
     | 
    
         
            +
                {
         
     | 
| 
      
 10 
     | 
    
         
            +
                  "id": 'cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7',
         
     | 
| 
      
 11 
     | 
    
         
            +
                  "object": 'text_completion',
         
     | 
| 
      
 12 
     | 
    
         
            +
                  "created": 1_589_478_378,
         
     | 
| 
      
 13 
     | 
    
         
            +
                  "model": 'text-davinci-003',
         
     | 
| 
      
 14 
     | 
    
         
            +
                  "choices": [
         
     | 
| 
      
 15 
     | 
    
         
            +
                    {
         
     | 
| 
      
 16 
     | 
    
         
            +
                      "text": "\n\nThis is indeed a test",
         
     | 
| 
      
 17 
     | 
    
         
            +
                      "index": 0,
         
     | 
| 
      
 18 
     | 
    
         
            +
                      "logprobs": nil,
         
     | 
| 
      
 19 
     | 
    
         
            +
                      "finish_reason": 'length'
         
     | 
| 
      
 20 
     | 
    
         
            +
                    }
         
     | 
| 
      
 21 
     | 
    
         
            +
                  ],
         
     | 
| 
      
 22 
     | 
    
         
            +
                  "usage": {
         
     | 
| 
      
 23 
     | 
    
         
            +
                    "prompt_tokens": 5,
         
     | 
| 
      
 24 
     | 
    
         
            +
                    "completion_tokens": 7,
         
     | 
| 
      
 25 
     | 
    
         
            +
                    "total_tokens": 12
         
     | 
| 
      
 26 
     | 
    
         
            +
                  }
         
     | 
| 
      
 27 
     | 
    
         
            +
                }
         
     | 
| 
      
 28 
     | 
    
         
            +
              end
         
     | 
| 
      
 29 
     | 
    
         
            +
             
     | 
| 
      
 30 
     | 
    
         
            +
              let(:completion) { resource.create(model: 'text-davinci-002', prompt: 'Hello, world!') }
         
     | 
| 
      
 31 
     | 
    
         
            +
             
     | 
| 
      
 32 
     | 
    
         
            +
              it 'can create a completion' do
         
     | 
| 
      
 33 
     | 
    
         
            +
                completion
         
     | 
| 
      
 34 
     | 
    
         
            +
             
     | 
| 
      
 35 
     | 
    
         
            +
                expect(http)
         
     | 
| 
      
 36 
     | 
    
         
            +
                  .to have_received(:post)
         
     | 
| 
      
 37 
     | 
    
         
            +
                  .with('https://api.openai.com/v1/completions', hash_including(:json))
         
     | 
| 
      
 38 
     | 
    
         
            +
             
     | 
| 
      
 39 
     | 
    
         
            +
                expect(completion.id).to eql('cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7')
         
     | 
| 
      
 40 
     | 
    
         
            +
                expect(completion.model).to eql('text-davinci-003')
         
     | 
| 
      
 41 
     | 
    
         
            +
                expect(completion.choices.first.text).to eql("\n\nThis is indeed a test")
         
     | 
| 
      
 42 
     | 
    
         
            +
                expect(completion.choices.first.index).to eql(0)
         
     | 
| 
      
 43 
     | 
    
         
            +
                expect(completion.choices.first.logprobs).to be_nil
         
     | 
| 
      
 44 
     | 
    
         
            +
                expect(completion.choices.first.finish_reason).to eql('length')
         
     | 
| 
      
 45 
     | 
    
         
            +
                expect(completion.usage.prompt_tokens).to eql(5)
         
     | 
| 
      
 46 
     | 
    
         
            +
                expect(completion.usage.completion_tokens).to eql(7)
         
     | 
| 
      
 47 
     | 
    
         
            +
                expect(completion.usage.total_tokens).to eql(12)
         
     | 
| 
      
 48 
     | 
    
         
            +
              end
         
     | 
| 
      
 49 
     | 
    
         
            +
             
     | 
| 
      
 50 
     | 
    
         
            +
              it 'raises when a block is given for a non-streaming request' do
         
     | 
| 
      
 51 
     | 
    
         
            +
                expect { resource.create(model: 'text-davinci-002', prompt: 'Hello, world!') { print 'noop' } }
         
     | 
| 
      
 52 
     | 
    
         
            +
                  .to raise_error('Non-streaming responses do not support blocks')
         
     | 
| 
      
 53 
     | 
    
         
            +
              end
         
     | 
| 
      
 54 
     | 
    
         
            +
             
     | 
| 
      
 55 
     | 
    
         
            +
              it 'exposes a #response_text helper method' do
         
     | 
| 
      
 56 
     | 
    
         
            +
                expect(completion.response_text).to eql("\n\nThis is indeed a test")
         
     | 
| 
      
 57 
     | 
    
         
            +
              end
         
     | 
| 
      
 58 
     | 
    
         
            +
             
     | 
| 
      
 59 
     | 
    
         
            +
              context 'when streaming is enabled' do
         
     | 
| 
      
 60 
     | 
    
         
            +
                let(:response_chunks) do
         
     | 
| 
      
 61 
     | 
    
         
            +
                  [
         
     | 
| 
      
 62 
     | 
    
         
            +
                    chunk('He'),
         
     | 
| 
      
 63 
     | 
    
         
            +
                    chunk('llo,'),
         
     | 
| 
      
 64 
     | 
    
         
            +
                    chunk(' world'),
         
     | 
| 
      
 65 
     | 
    
         
            +
                    chunk('!', finish_reason: 'stop')
         
     | 
| 
      
 66 
     | 
    
         
            +
                  ]
         
     | 
| 
      
 67 
     | 
    
         
            +
                end
         
     | 
| 
      
 68 
     | 
    
         
            +
             
     | 
| 
      
 69 
     | 
    
         
            +
                let(:response) do
         
     | 
| 
      
 70 
     | 
    
         
            +
                  instance_double(
         
     | 
| 
      
 71 
     | 
    
         
            +
                    HTTP::Response,
         
     | 
| 
      
 72 
     | 
    
         
            +
                    status: HTTP::Response::Status.new(response_status_code),
         
     | 
| 
      
 73 
     | 
    
         
            +
                    body: response_body
         
     | 
| 
      
 74 
     | 
    
         
            +
                  )
         
     | 
| 
      
 75 
     | 
    
         
            +
                end
         
     | 
| 
      
 76 
     | 
    
         
            +
             
     | 
| 
      
 77 
     | 
    
         
            +
                let(:response_body) do
         
     | 
| 
      
 78 
     | 
    
         
            +
                  instance_double(HTTP::Response::Body).tap do |double|
         
     | 
| 
      
 79 
     | 
    
         
            +
                    allow(double).to receive(:each)
         
     | 
| 
      
 80 
     | 
    
         
            +
                      .and_yield(response_chunks.first)
         
     | 
| 
      
 81 
     | 
    
         
            +
                      .and_yield(response_chunks[1])
         
     | 
| 
      
 82 
     | 
    
         
            +
                      .and_yield(response_chunks[2])
         
     | 
| 
      
 83 
     | 
    
         
            +
                      .and_yield(response_chunks[3])
         
     | 
| 
      
 84 
     | 
    
         
            +
                      .and_yield('data: [DONE]')
         
     | 
| 
      
 85 
     | 
    
         
            +
                  end
         
     | 
| 
      
 86 
     | 
    
         
            +
                end
         
     | 
| 
      
 87 
     | 
    
         
            +
             
     | 
| 
      
 88 
     | 
    
         
            +
                before do
         
     | 
| 
      
 89 
     | 
    
         
            +
                  allow(http).to receive(:persistent).and_yield(http)
         
     | 
| 
      
 90 
     | 
    
         
            +
                end
         
     | 
| 
      
 91 
     | 
    
         
            +
             
     | 
| 
      
 92 
     | 
    
         
            +
                def chunk(text, finish_reason: nil)
         
     | 
| 
      
 93 
     | 
    
         
            +
                  data = {
         
     | 
| 
      
 94 
     | 
    
         
            +
                    "id": 'cmpl-6y5B6Ak8wBk2nKsqVtSlFeJAG1dUM',
         
     | 
| 
      
 95 
     | 
    
         
            +
                    "object": 'text_completion',
         
     | 
| 
      
 96 
     | 
    
         
            +
                    "created": 1_679_777_604,
         
     | 
| 
      
 97 
     | 
    
         
            +
                    "choices": [{
         
     | 
| 
      
 98 
     | 
    
         
            +
                      "text": text,
         
     | 
| 
      
 99 
     | 
    
         
            +
                      "index": 0,
         
     | 
| 
      
 100 
     | 
    
         
            +
                      "logprobs": nil,
         
     | 
| 
      
 101 
     | 
    
         
            +
                      "finish_reason": finish_reason
         
     | 
| 
      
 102 
     | 
    
         
            +
                    }],
         
     | 
| 
      
 103 
     | 
    
         
            +
                    "model": 'text-davinci-002'
         
     | 
| 
      
 104 
     | 
    
         
            +
                  }
         
     | 
| 
      
 105 
     | 
    
         
            +
             
     | 
| 
      
 106 
     | 
    
         
            +
                  "data: #{JSON.dump(data)}"
         
     | 
| 
      
 107 
     | 
    
         
            +
                end
         
     | 
| 
      
 108 
     | 
    
         
            +
             
     | 
| 
      
 109 
     | 
    
         
            +
                it 'yields chunks as they are served' do
         
     | 
| 
      
 110 
     | 
    
         
            +
                  chunks = []
         
     | 
| 
      
 111 
     | 
    
         
            +
                  resource.create(model: 'text-davinci-002', prompt: 'Hello, world!', stream: true) do |chunk|
         
     | 
| 
      
 112 
     | 
    
         
            +
                    chunks << chunk
         
     | 
| 
      
 113 
     | 
    
         
            +
                  end
         
     | 
| 
      
 114 
     | 
    
         
            +
             
     | 
| 
      
 115 
     | 
    
         
            +
                  expect(chunks).to all(be_an_instance_of(OpenAI::API::Response::Completion))
         
     | 
| 
      
 116 
     | 
    
         
            +
                  texts = chunks.map { |chunk| chunk.choices.first.text }
         
     | 
| 
      
 117 
     | 
    
         
            +
                  expect(texts.join('')).to eql('Hello, world!')
         
     | 
| 
      
 118 
     | 
    
         
            +
                end
         
     | 
| 
      
 119 
     | 
    
         
            +
             
     | 
| 
      
 120 
     | 
    
         
            +
                it 'raises when a block is not given' do
         
     | 
| 
      
 121 
     | 
    
         
            +
                  expect { resource.create(model: 'text-davinci-002', prompt: 'Hello, world!', stream: true) }
         
     | 
| 
      
 122 
     | 
    
         
            +
                    .to raise_error('Streaming responses require a block')
         
     | 
| 
      
 123 
     | 
    
         
            +
                end
         
     | 
| 
      
 124 
     | 
    
         
            +
              end
         
     | 
| 
      
 125 
     | 
    
         
            +
            end
         
     | 
| 
         @@ -0,0 +1,40 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            # frozen_string_literal: true
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            RSpec.describe OpenAI::API, '#edits' do
         
     | 
| 
      
 4 
     | 
    
         
            +
              include_context 'an API Resource'
         
     | 
| 
      
 5 
     | 
    
         
            +
             
     | 
| 
      
 6 
     | 
    
         
            +
              let(:resource) { api.edits }
         
     | 
| 
      
 7 
     | 
    
         
            +
              let(:response_body) do
         
     | 
| 
      
 8 
     | 
    
         
            +
                {
         
     | 
| 
      
 9 
     | 
    
         
            +
                  "object": 'edit',
         
     | 
| 
      
 10 
     | 
    
         
            +
                  "created": 1_589_478_378,
         
     | 
| 
      
 11 
     | 
    
         
            +
                  "choices": [
         
     | 
| 
      
 12 
     | 
    
         
            +
                    {
         
     | 
| 
      
 13 
     | 
    
         
            +
                      "text": 'What day of the week is it?',
         
     | 
| 
      
 14 
     | 
    
         
            +
                      "index": 0
         
     | 
| 
      
 15 
     | 
    
         
            +
                    }
         
     | 
| 
      
 16 
     | 
    
         
            +
                  ],
         
     | 
| 
      
 17 
     | 
    
         
            +
                  "usage": {
         
     | 
| 
      
 18 
     | 
    
         
            +
                    "prompt_tokens": 25,
         
     | 
| 
      
 19 
     | 
    
         
            +
                    "completion_tokens": 32,
         
     | 
| 
      
 20 
     | 
    
         
            +
                    "total_tokens": 57
         
     | 
| 
      
 21 
     | 
    
         
            +
                  }
         
     | 
| 
      
 22 
     | 
    
         
            +
                }
         
     | 
| 
      
 23 
     | 
    
         
            +
              end
         
     | 
| 
      
 24 
     | 
    
         
            +
             
     | 
| 
      
 25 
     | 
    
         
            +
              it 'can create an edit' do
         
     | 
| 
      
 26 
     | 
    
         
            +
                edit = resource.create(model: 'text-davinci-002',
         
     | 
| 
      
 27 
     | 
    
         
            +
                                       instruction: 'Change "world" to "solar system" in the following text: "Hello, world!"')
         
     | 
| 
      
 28 
     | 
    
         
            +
             
     | 
| 
      
 29 
     | 
    
         
            +
                expect(http)
         
     | 
| 
      
 30 
     | 
    
         
            +
                  .to have_received(:post)
         
     | 
| 
      
 31 
     | 
    
         
            +
                  .with('https://api.openai.com/v1/edits', hash_including(:json))
         
     | 
| 
      
 32 
     | 
    
         
            +
             
     | 
| 
      
 33 
     | 
    
         
            +
                expect(edit.object).to eql('edit')
         
     | 
| 
      
 34 
     | 
    
         
            +
                expect(edit.choices.first.text).to eql('What day of the week is it?')
         
     | 
| 
      
 35 
     | 
    
         
            +
                expect(edit.choices.first.index).to eql(0)
         
     | 
| 
      
 36 
     | 
    
         
            +
                expect(edit.usage.prompt_tokens).to eql(25)
         
     | 
| 
      
 37 
     | 
    
         
            +
                expect(edit.usage.completion_tokens).to eql(32)
         
     | 
| 
      
 38 
     | 
    
         
            +
                expect(edit.usage.total_tokens).to eql(57)
         
     | 
| 
      
 39 
     | 
    
         
            +
              end
         
     | 
| 
      
 40 
     | 
    
         
            +
            end
         
     | 
| 
         @@ -0,0 +1,45 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            # frozen_string_literal: true
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            RSpec.describe OpenAI::API, '#embeddings' do
         
     | 
| 
      
 4 
     | 
    
         
            +
              include_context 'an API Resource'
         
     | 
| 
      
 5 
     | 
    
         
            +
             
     | 
| 
      
 6 
     | 
    
         
            +
              let(:resource) { api.embeddings }
         
     | 
| 
      
 7 
     | 
    
         
            +
              let(:response_body) do
         
     | 
| 
      
 8 
     | 
    
         
            +
                {
         
     | 
| 
      
 9 
     | 
    
         
            +
                  "object": 'list',
         
     | 
| 
      
 10 
     | 
    
         
            +
                  "data": [
         
     | 
| 
      
 11 
     | 
    
         
            +
                    {
         
     | 
| 
      
 12 
     | 
    
         
            +
                      "object": 'embedding',
         
     | 
| 
      
 13 
     | 
    
         
            +
                      "embedding": [
         
     | 
| 
      
 14 
     | 
    
         
            +
                        0.0023064255,
         
     | 
| 
      
 15 
     | 
    
         
            +
                        -0.009327292,
         
     | 
| 
      
 16 
     | 
    
         
            +
                        -0.0028842222
         
     | 
| 
      
 17 
     | 
    
         
            +
                      ],
         
     | 
| 
      
 18 
     | 
    
         
            +
                      "index": 0
         
     | 
| 
      
 19 
     | 
    
         
            +
                    }
         
     | 
| 
      
 20 
     | 
    
         
            +
                  ],
         
     | 
| 
      
 21 
     | 
    
         
            +
                  "model": 'text-embedding-ada-002',
         
     | 
| 
      
 22 
     | 
    
         
            +
                  "usage": {
         
     | 
| 
      
 23 
     | 
    
         
            +
                    "prompt_tokens": 8,
         
     | 
| 
      
 24 
     | 
    
         
            +
                    "total_tokens": 8
         
     | 
| 
      
 25 
     | 
    
         
            +
                  }
         
     | 
| 
      
 26 
     | 
    
         
            +
                }
         
     | 
| 
      
 27 
     | 
    
         
            +
              end
         
     | 
| 
      
 28 
     | 
    
         
            +
             
     | 
| 
      
 29 
     | 
    
         
            +
              it 'can create an embedding' do
         
     | 
| 
      
 30 
     | 
    
         
            +
                embedding = resource.create(model: 'text-embedding-ada-002', input: 'Hello, world!')
         
     | 
| 
      
 31 
     | 
    
         
            +
             
     | 
| 
      
 32 
     | 
    
         
            +
                expect(http)
         
     | 
| 
      
 33 
     | 
    
         
            +
                  .to have_received(:post)
         
     | 
| 
      
 34 
     | 
    
         
            +
                  .with('https://api.openai.com/v1/embeddings', hash_including(:json))
         
     | 
| 
      
 35 
     | 
    
         
            +
             
     | 
| 
      
 36 
     | 
    
         
            +
                expect(embedding.object).to eql('list')
         
     | 
| 
      
 37 
     | 
    
         
            +
                expect(embedding.data.first.object).to eql('embedding')
         
     | 
| 
      
 38 
     | 
    
         
            +
                expect(embedding.data.first.embedding.length).to eql(3)
         
     | 
| 
      
 39 
     | 
    
         
            +
                expect(embedding.data.first.embedding.first).to eql(0.0023064255)
         
     | 
| 
      
 40 
     | 
    
         
            +
                expect(embedding.data.first.index).to eql(0)
         
     | 
| 
      
 41 
     | 
    
         
            +
                expect(embedding.model).to eql('text-embedding-ada-002')
         
     | 
| 
      
 42 
     | 
    
         
            +
                expect(embedding.usage.prompt_tokens).to eql(8)
         
     | 
| 
      
 43 
     | 
    
         
            +
                expect(embedding.usage.total_tokens).to eql(8)
         
     | 
| 
      
 44 
     | 
    
         
            +
              end
         
     | 
| 
      
 45 
     | 
    
         
            +
            end
         
     | 
| 
         @@ -0,0 +1,163 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            # frozen_string_literal: true
         
     | 
| 
      
 2 
     | 
    
         
            +
             
     | 
| 
      
 3 
     | 
    
         
            +
            RSpec.describe OpenAI::API, '#files' do
         
     | 
| 
      
 4 
     | 
    
         
            +
              include_context 'an API Resource'
         
     | 
| 
      
 5 
     | 
    
         
            +
             
     | 
| 
      
 6 
     | 
    
         
            +
              let(:resource) { api.files }
         
     | 
| 
      
 7 
     | 
    
         
            +
              let(:sample_file) { OpenAISpec::SPEC_ROOT.join('data/sample.jsonl') }
         
     | 
| 
      
 8 
     | 
    
         
            +
             
     | 
| 
      
 9 
     | 
    
         
            +
              context 'when creating a file' do
         
     | 
| 
      
 10 
     | 
    
         
            +
                let(:response_body) do
         
     | 
| 
      
 11 
     | 
    
         
            +
                  {
         
     | 
| 
      
 12 
     | 
    
         
            +
                    "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
         
     | 
| 
      
 13 
     | 
    
         
            +
                    "object": 'file',
         
     | 
| 
      
 14 
     | 
    
         
            +
                    "bytes": 140,
         
     | 
| 
      
 15 
     | 
    
         
            +
                    "created_at": 1_613_779_121,
         
     | 
| 
      
 16 
     | 
    
         
            +
                    "filename": 'sample.jsonl',
         
     | 
| 
      
 17 
     | 
    
         
            +
                    "purpose": 'fine-tune'
         
     | 
| 
      
 18 
     | 
    
         
            +
                  }
         
     | 
| 
      
 19 
     | 
    
         
            +
                end
         
     | 
| 
      
 20 
     | 
    
         
            +
             
     | 
| 
      
 21 
     | 
    
         
            +
                it 'can create a file' do
         
     | 
| 
      
 22 
     | 
    
         
            +
                  file = resource.create(
         
     | 
| 
      
 23 
     | 
    
         
            +
                    file: sample_file,
         
     | 
| 
      
 24 
     | 
    
         
            +
                    purpose: 'fine-tune'
         
     | 
| 
      
 25 
     | 
    
         
            +
                  )
         
     | 
| 
      
 26 
     | 
    
         
            +
             
     | 
| 
      
 27 
     | 
    
         
            +
                  expect(http)
         
     | 
| 
      
 28 
     | 
    
         
            +
                    .to have_received(:post)
         
     | 
| 
      
 29 
     | 
    
         
            +
                    .with(
         
     | 
| 
      
 30 
     | 
    
         
            +
                      'https://api.openai.com/v1/files',
         
     | 
| 
      
 31 
     | 
    
         
            +
                      hash_including(
         
     | 
| 
      
 32 
     | 
    
         
            +
                        form: hash_including(
         
     | 
| 
      
 33 
     | 
    
         
            +
                          {
         
     | 
| 
      
 34 
     | 
    
         
            +
                            file: instance_of(HTTP::FormData::File),
         
     | 
| 
      
 35 
     | 
    
         
            +
                            purpose: 'fine-tune'
         
     | 
| 
      
 36 
     | 
    
         
            +
                          }
         
     | 
| 
      
 37 
     | 
    
         
            +
                        )
         
     | 
| 
      
 38 
     | 
    
         
            +
                      )
         
     | 
| 
      
 39 
     | 
    
         
            +
                    )
         
     | 
| 
      
 40 
     | 
    
         
            +
             
     | 
| 
      
 41 
     | 
    
         
            +
                  expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 42 
     | 
    
         
            +
                  expect(file.object).to eql('file')
         
     | 
| 
      
 43 
     | 
    
         
            +
                  expect(file.bytes).to eql(140)
         
     | 
| 
      
 44 
     | 
    
         
            +
                  expect(file.created_at).to eql(1_613_779_121)
         
     | 
| 
      
 45 
     | 
    
         
            +
                  expect(file.filename).to eql('sample.jsonl')
         
     | 
| 
      
 46 
     | 
    
         
            +
                  expect(file.purpose).to eql('fine-tune')
         
     | 
| 
      
 47 
     | 
    
         
            +
                  expect(file.deleted?).to be(nil)
         
     | 
| 
      
 48 
     | 
    
         
            +
                end
         
     | 
| 
      
 49 
     | 
    
         
            +
              end
         
     | 
| 
      
 50 
     | 
    
         
            +
             
     | 
| 
      
 51 
     | 
    
         
            +
              context 'when listing a file' do
         
     | 
| 
      
 52 
     | 
    
         
            +
                let(:response_body) do
         
     | 
| 
      
 53 
     | 
    
         
            +
                  {
         
     | 
| 
      
 54 
     | 
    
         
            +
                    "data": [
         
     | 
| 
      
 55 
     | 
    
         
            +
                      {
         
     | 
| 
      
 56 
     | 
    
         
            +
                        "id": 'file-ccdDZrC3iZVNiQVeEA6Z66wf',
         
     | 
| 
      
 57 
     | 
    
         
            +
                        "object": 'file',
         
     | 
| 
      
 58 
     | 
    
         
            +
                        "bytes": 175,
         
     | 
| 
      
 59 
     | 
    
         
            +
                        "created_at": 1_613_677_385,
         
     | 
| 
      
 60 
     | 
    
         
            +
                        "filename": 'train.jsonl',
         
     | 
| 
      
 61 
     | 
    
         
            +
                        "purpose": 'search'
         
     | 
| 
      
 62 
     | 
    
         
            +
                      },
         
     | 
| 
      
 63 
     | 
    
         
            +
                      {
         
     | 
| 
      
 64 
     | 
    
         
            +
                        "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
         
     | 
| 
      
 65 
     | 
    
         
            +
                        "object": 'file',
         
     | 
| 
      
 66 
     | 
    
         
            +
                        "bytes": 140,
         
     | 
| 
      
 67 
     | 
    
         
            +
                        "created_at": 1_613_779_121,
         
     | 
| 
      
 68 
     | 
    
         
            +
                        "filename": 'puppy.jsonl',
         
     | 
| 
      
 69 
     | 
    
         
            +
                        "purpose": 'search'
         
     | 
| 
      
 70 
     | 
    
         
            +
                      }
         
     | 
| 
      
 71 
     | 
    
         
            +
                    ],
         
     | 
| 
      
 72 
     | 
    
         
            +
                    "object": 'list'
         
     | 
| 
      
 73 
     | 
    
         
            +
                  }
         
     | 
| 
      
 74 
     | 
    
         
            +
                end
         
     | 
| 
      
 75 
     | 
    
         
            +
             
     | 
| 
      
 76 
     | 
    
         
            +
                it 'can get a list of files' do
         
     | 
| 
      
 77 
     | 
    
         
            +
                  files = resource.list
         
     | 
| 
      
 78 
     | 
    
         
            +
             
     | 
| 
      
 79 
     | 
    
         
            +
                  expect(http)
         
     | 
| 
      
 80 
     | 
    
         
            +
                    .to have_received(:get)
         
     | 
| 
      
 81 
     | 
    
         
            +
                    .with('https://api.openai.com/v1/files')
         
     | 
| 
      
 82 
     | 
    
         
            +
             
     | 
| 
      
 83 
     | 
    
         
            +
                  expect(files.data.size).to eql(2)
         
     | 
| 
      
 84 
     | 
    
         
            +
                  expect(files.data.first.id).to eql('file-ccdDZrC3iZVNiQVeEA6Z66wf')
         
     | 
| 
      
 85 
     | 
    
         
            +
                  expect(files.data.first.object).to eql('file')
         
     | 
| 
      
 86 
     | 
    
         
            +
                  expect(files.data.first.bytes).to eql(175)
         
     | 
| 
      
 87 
     | 
    
         
            +
                  expect(files.data.first.created_at).to eql(1_613_677_385)
         
     | 
| 
      
 88 
     | 
    
         
            +
                  expect(files.data.first.filename).to eql('train.jsonl')
         
     | 
| 
      
 89 
     | 
    
         
            +
                  expect(files.data.first.purpose).to eql('search')
         
     | 
| 
      
 90 
     | 
    
         
            +
                  expect(files.object).to eql('list')
         
     | 
| 
      
 91 
     | 
    
         
            +
                end
         
     | 
| 
      
 92 
     | 
    
         
            +
              end
         
     | 
| 
      
 93 
     | 
    
         
            +
             
     | 
| 
      
 94 
     | 
    
         
            +
              context 'when deleting a file' do
         
     | 
| 
      
 95 
     | 
    
         
            +
                let(:response_body) do
         
     | 
| 
      
 96 
     | 
    
         
            +
                  {
         
     | 
| 
      
 97 
     | 
    
         
            +
                    "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
         
     | 
| 
      
 98 
     | 
    
         
            +
                    "object": 'file',
         
     | 
| 
      
 99 
     | 
    
         
            +
                    "deleted": true
         
     | 
| 
      
 100 
     | 
    
         
            +
                  }
         
     | 
| 
      
 101 
     | 
    
         
            +
                end
         
     | 
| 
      
 102 
     | 
    
         
            +
             
     | 
| 
      
 103 
     | 
    
         
            +
                it 'can delete a file' do
         
     | 
| 
      
 104 
     | 
    
         
            +
                  file = resource.delete('file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 105 
     | 
    
         
            +
             
     | 
| 
      
 106 
     | 
    
         
            +
                  expect(http)
         
     | 
| 
      
 107 
     | 
    
         
            +
                    .to have_received(:delete)
         
     | 
| 
      
 108 
     | 
    
         
            +
                    .with('https://api.openai.com/v1/files/file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 109 
     | 
    
         
            +
             
     | 
| 
      
 110 
     | 
    
         
            +
                  expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 111 
     | 
    
         
            +
                  expect(file.object).to eql('file')
         
     | 
| 
      
 112 
     | 
    
         
            +
                  expect(file.deleted?).to be_truthy
         
     | 
| 
      
 113 
     | 
    
         
            +
                end
         
     | 
| 
      
 114 
     | 
    
         
            +
              end
         
     | 
| 
      
 115 
     | 
    
         
            +
             
     | 
| 
      
 116 
     | 
    
         
            +
              context 'when fetching a file' do
         
     | 
| 
      
 117 
     | 
    
         
            +
                let(:response_body) do
         
     | 
| 
      
 118 
     | 
    
         
            +
                  {
         
     | 
| 
      
 119 
     | 
    
         
            +
                    "id": 'file-XjGxS3KTG0uNmNOK362iJua3',
         
     | 
| 
      
 120 
     | 
    
         
            +
                    "object": 'file',
         
     | 
| 
      
 121 
     | 
    
         
            +
                    "bytes": 140,
         
     | 
| 
      
 122 
     | 
    
         
            +
                    "created_at": 1_613_779_657,
         
     | 
| 
      
 123 
     | 
    
         
            +
                    "filename": 'mydata.jsonl',
         
     | 
| 
      
 124 
     | 
    
         
            +
                    "purpose": 'fine-tune'
         
     | 
| 
      
 125 
     | 
    
         
            +
                  }
         
     | 
| 
      
 126 
     | 
    
         
            +
                end
         
     | 
| 
      
 127 
     | 
    
         
            +
             
     | 
| 
      
 128 
     | 
    
         
            +
                it 'can get a file' do
         
     | 
| 
      
 129 
     | 
    
         
            +
                  file = resource.fetch('file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 130 
     | 
    
         
            +
             
     | 
| 
      
 131 
     | 
    
         
            +
                  expect(http)
         
     | 
| 
      
 132 
     | 
    
         
            +
                    .to have_received(:get)
         
     | 
| 
      
 133 
     | 
    
         
            +
                    .with('https://api.openai.com/v1/files/file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 134 
     | 
    
         
            +
             
     | 
| 
      
 135 
     | 
    
         
            +
                  expect(file.id).to eql('file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 136 
     | 
    
         
            +
                  expect(file.object).to eql('file')
         
     | 
| 
      
 137 
     | 
    
         
            +
                  expect(file.bytes).to eql(140)
         
     | 
| 
      
 138 
     | 
    
         
            +
                  expect(file.created_at).to eql(1_613_779_657)
         
     | 
| 
      
 139 
     | 
    
         
            +
                  expect(file.filename).to eql('mydata.jsonl')
         
     | 
| 
      
 140 
     | 
    
         
            +
                  expect(file.purpose).to eql('fine-tune')
         
     | 
| 
      
 141 
     | 
    
         
            +
                end
         
     | 
| 
      
 142 
     | 
    
         
            +
              end
         
     | 
| 
      
 143 
     | 
    
         
            +
             
     | 
| 
      
 144 
     | 
    
         
            +
              context 'when fetching a file contents' do
         
     | 
| 
      
 145 
     | 
    
         
            +
                let(:response) do
         
     | 
| 
      
 146 
     | 
    
         
            +
                  instance_double(
         
     | 
| 
      
 147 
     | 
    
         
            +
                    HTTP::Response,
         
     | 
| 
      
 148 
     | 
    
         
            +
                    status: HTTP::Response::Status.new(200),
         
     | 
| 
      
 149 
     | 
    
         
            +
                    body: '(raw)'
         
     | 
| 
      
 150 
     | 
    
         
            +
                  )
         
     | 
| 
      
 151 
     | 
    
         
            +
                end
         
     | 
| 
      
 152 
     | 
    
         
            +
             
     | 
| 
      
 153 
     | 
    
         
            +
                it 'can get a file contents' do
         
     | 
| 
      
 154 
     | 
    
         
            +
                  response = resource.get_content('file-XjGxS3KTG0uNmNOK362iJua3')
         
     | 
| 
      
 155 
     | 
    
         
            +
             
     | 
| 
      
 156 
     | 
    
         
            +
                  expect(http)
         
     | 
| 
      
 157 
     | 
    
         
            +
                    .to have_received(:get)
         
     | 
| 
      
 158 
     | 
    
         
            +
                    .with('https://api.openai.com/v1/files/file-XjGxS3KTG0uNmNOK362iJua3/content')
         
     | 
| 
      
 159 
     | 
    
         
            +
             
     | 
| 
      
 160 
     | 
    
         
            +
                  expect(response).to eql('(raw)')
         
     | 
| 
      
 161 
     | 
    
         
            +
                end
         
     | 
| 
      
 162 
     | 
    
         
            +
              end
         
     | 
| 
      
 163 
     | 
    
         
            +
            end
         
     |