llm.rb 0.4.0 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +17 -0
- data/lib/llm/version.rb +1 -1
- data/llm.gemspec +2 -3
- metadata +2 -24
- data/spec/anthropic/completion_spec.rb +0 -96
- data/spec/anthropic/embedding_spec.rb +0 -25
- data/spec/anthropic/models_spec.rb +0 -21
- data/spec/gemini/completion_spec.rb +0 -85
- data/spec/gemini/conversation_spec.rb +0 -31
- data/spec/gemini/embedding_spec.rb +0 -25
- data/spec/gemini/files_spec.rb +0 -124
- data/spec/gemini/images_spec.rb +0 -39
- data/spec/gemini/models_spec.rb +0 -21
- data/spec/llm/conversation_spec.rb +0 -261
- data/spec/ollama/completion_spec.rb +0 -43
- data/spec/ollama/conversation_spec.rb +0 -31
- data/spec/ollama/embedding_spec.rb +0 -24
- data/spec/ollama/models_spec.rb +0 -20
- data/spec/openai/audio_spec.rb +0 -55
- data/spec/openai/completion_spec.rb +0 -116
- data/spec/openai/embedding_spec.rb +0 -25
- data/spec/openai/files_spec.rb +0 -204
- data/spec/openai/images_spec.rb +0 -91
- data/spec/openai/models_spec.rb +0 -21
- data/spec/openai/responses_spec.rb +0 -51
- data/spec/readme_spec.rb +0 -61
- data/spec/setup.rb +0 -28
    
        checksums.yaml
    CHANGED
    
    | @@ -1,7 +1,7 @@ | |
| 1 1 | 
             
            ---
         | 
| 2 2 | 
             
            SHA256:
         | 
| 3 | 
            -
              metadata.gz:  | 
| 4 | 
            -
              data.tar.gz:  | 
| 3 | 
            +
              metadata.gz: 46af7f9487355b1ad33501bdc2602f2bfbb9ac3a7a3377eb601d367167373b69
         | 
| 4 | 
            +
              data.tar.gz: f1bb4dc1b423e5335d19540c664af714d523ecded97476fb379711f2015fa93a
         | 
| 5 5 | 
             
            SHA512:
         | 
| 6 | 
            -
              metadata.gz:  | 
| 7 | 
            -
              data.tar.gz:  | 
| 6 | 
            +
              metadata.gz: b249df823bedda3041df65b4f17b861d7529737855c2395dbcb978838098eecb0950ec935e06f290d9737f7b780c64a42cd21bf54e8077360947c1e70c3bad04
         | 
| 7 | 
            +
              data.tar.gz: 0a35370e5cd41488dfcbc05259c28ac906e1a3b217f4882aebfb87a8ee5b9d04ca771df9ed77d32efc0782510bc35c33db7a0704a81841ae1b8f9d832d5f3af7
         | 
    
        data/LICENSE
    ADDED
    
    | @@ -0,0 +1,17 @@ | |
| 1 | 
            +
            Copyright (C) 2025
         | 
| 2 | 
            +
            Antar Azri <azantar@proton.me>
         | 
| 3 | 
            +
            0x1eef <0x1eef@proton.me>
         | 
| 4 | 
            +
             | 
| 5 | 
            +
            Permission to use, copy, modify, and/or distribute this
         | 
| 6 | 
            +
            software for any purpose with or without fee is hereby
         | 
| 7 | 
            +
            granted.
         | 
| 8 | 
            +
             | 
| 9 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS
         | 
| 10 | 
            +
            ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
         | 
| 11 | 
            +
            IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
         | 
| 12 | 
            +
            EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
         | 
| 13 | 
            +
            INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
         | 
| 14 | 
            +
            RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
         | 
| 15 | 
            +
            ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
         | 
| 16 | 
            +
            ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
         | 
| 17 | 
            +
            OF THIS SOFTWARE.
         | 
    
        data/lib/llm/version.rb
    CHANGED
    
    
    
        data/llm.gemspec
    CHANGED
    
    | @@ -21,10 +21,9 @@ Gem::Specification.new do |spec| | |
| 21 21 | 
             
              spec.metadata["source_code_uri"] = "https://github.com/llmrb/llm"
         | 
| 22 22 |  | 
| 23 23 | 
             
              spec.files = Dir[
         | 
| 24 | 
            -
                "README.md", "LICENSE | 
| 24 | 
            +
                "README.md", "LICENSE",
         | 
| 25 25 | 
             
                "lib/*.rb", "lib/**/*.rb",
         | 
| 26 | 
            -
                " | 
| 27 | 
            -
                "share/llm/models/*.yml", "llm.gemspec"
         | 
| 26 | 
            +
                "llm.gemspec"
         | 
| 28 27 | 
             
              ]
         | 
| 29 28 | 
             
              spec.require_paths = ["lib"]
         | 
| 30 29 |  | 
    
        metadata
    CHANGED
    
    | @@ -1,7 +1,7 @@ | |
| 1 1 | 
             
            --- !ruby/object:Gem::Specification
         | 
| 2 2 | 
             
            name: llm.rb
         | 
| 3 3 | 
             
            version: !ruby/object:Gem::Version
         | 
| 4 | 
            -
              version: 0.4. | 
| 4 | 
            +
              version: 0.4.1
         | 
| 5 5 | 
             
            platform: ruby
         | 
| 6 6 | 
             
            authors:
         | 
| 7 7 | 
             
            - Antar Azri
         | 
| @@ -147,6 +147,7 @@ executables: [] | |
| 147 147 | 
             
            extensions: []
         | 
| 148 148 | 
             
            extra_rdoc_files: []
         | 
| 149 149 | 
             
            files:
         | 
| 150 | 
            +
            - LICENSE
         | 
| 150 151 | 
             
            - README.md
         | 
| 151 152 | 
             
            - lib/json/schema.rb
         | 
| 152 153 | 
             
            - lib/json/schema/array.rb
         | 
| @@ -213,29 +214,6 @@ files: | |
| 213 214 | 
             
            - lib/llm/utils.rb
         | 
| 214 215 | 
             
            - lib/llm/version.rb
         | 
| 215 216 | 
             
            - llm.gemspec
         | 
| 216 | 
            -
            - spec/anthropic/completion_spec.rb
         | 
| 217 | 
            -
            - spec/anthropic/embedding_spec.rb
         | 
| 218 | 
            -
            - spec/anthropic/models_spec.rb
         | 
| 219 | 
            -
            - spec/gemini/completion_spec.rb
         | 
| 220 | 
            -
            - spec/gemini/conversation_spec.rb
         | 
| 221 | 
            -
            - spec/gemini/embedding_spec.rb
         | 
| 222 | 
            -
            - spec/gemini/files_spec.rb
         | 
| 223 | 
            -
            - spec/gemini/images_spec.rb
         | 
| 224 | 
            -
            - spec/gemini/models_spec.rb
         | 
| 225 | 
            -
            - spec/llm/conversation_spec.rb
         | 
| 226 | 
            -
            - spec/ollama/completion_spec.rb
         | 
| 227 | 
            -
            - spec/ollama/conversation_spec.rb
         | 
| 228 | 
            -
            - spec/ollama/embedding_spec.rb
         | 
| 229 | 
            -
            - spec/ollama/models_spec.rb
         | 
| 230 | 
            -
            - spec/openai/audio_spec.rb
         | 
| 231 | 
            -
            - spec/openai/completion_spec.rb
         | 
| 232 | 
            -
            - spec/openai/embedding_spec.rb
         | 
| 233 | 
            -
            - spec/openai/files_spec.rb
         | 
| 234 | 
            -
            - spec/openai/images_spec.rb
         | 
| 235 | 
            -
            - spec/openai/models_spec.rb
         | 
| 236 | 
            -
            - spec/openai/responses_spec.rb
         | 
| 237 | 
            -
            - spec/readme_spec.rb
         | 
| 238 | 
            -
            - spec/setup.rb
         | 
| 239 217 | 
             
            homepage: https://github.com/llmrb/llm
         | 
| 240 218 | 
             
            licenses:
         | 
| 241 219 | 
             
            - 0BSDL
         | 
| @@ -1,96 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Anthropic: completions" do
         | 
| 6 | 
            -
              subject(:anthropic) { LLM.anthropic(token) }
         | 
| 7 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful response",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "anthropic/completions/successful_response"} do
         | 
| 11 | 
            -
                subject(:response) { anthropic.complete("Hello, world", :user) }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "returns a completion" do
         | 
| 14 | 
            -
                  expect(response).to be_a(LLM::Response::Completion)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns a model" do
         | 
| 18 | 
            -
                  expect(response.model).to eq("claude-3-5-sonnet-20240620")
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
             | 
| 21 | 
            -
                it "includes token usage" do
         | 
| 22 | 
            -
                  expect(response).to have_attributes(
         | 
| 23 | 
            -
                    prompt_tokens: 10,
         | 
| 24 | 
            -
                    completion_tokens: 30,
         | 
| 25 | 
            -
                    total_tokens: 40
         | 
| 26 | 
            -
                  )
         | 
| 27 | 
            -
                end
         | 
| 28 | 
            -
             | 
| 29 | 
            -
                context "with a choice" do
         | 
| 30 | 
            -
                  subject(:choice) { response.choices[0] }
         | 
| 31 | 
            -
             | 
| 32 | 
            -
                  it "has choices" do
         | 
| 33 | 
            -
                    expect(choice).to have_attributes(
         | 
| 34 | 
            -
                      role: "assistant",
         | 
| 35 | 
            -
                      content: "Hello! How can I assist you today? Feel free to ask me any questions or let me know if you need help with anything."
         | 
| 36 | 
            -
                    )
         | 
| 37 | 
            -
                  end
         | 
| 38 | 
            -
             | 
| 39 | 
            -
                  it "includes the response" do
         | 
| 40 | 
            -
                    expect(choice.extra[:response]).to eq(response)
         | 
| 41 | 
            -
                  end
         | 
| 42 | 
            -
                end
         | 
| 43 | 
            -
              end
         | 
| 44 | 
            -
             | 
| 45 | 
            -
              context "when given a URI to an image",
         | 
| 46 | 
            -
                      vcr: {cassette_name: "anthropic/completions/successful_response_uri_image"} do
         | 
| 47 | 
            -
                subject { response.choices[0].content.downcase[0..2] }
         | 
| 48 | 
            -
                let(:response) do
         | 
| 49 | 
            -
                  anthropic.complete([
         | 
| 50 | 
            -
                    "Is this image the flag of brazil ? ",
         | 
| 51 | 
            -
                    "Answer with yes or no. ",
         | 
| 52 | 
            -
                    "Nothing else.",
         | 
| 53 | 
            -
                    uri
         | 
| 54 | 
            -
                  ], :user)
         | 
| 55 | 
            -
                end
         | 
| 56 | 
            -
                let(:uri) { URI("https://upload.wikimedia.org/wikipedia/en/thumb/0/05/Flag_of_Brazil.svg/250px-Flag_of_Brazil.svg.png") }
         | 
| 57 | 
            -
             | 
| 58 | 
            -
                it "describes the image" do
         | 
| 59 | 
            -
                  is_expected.to eq("yes")
         | 
| 60 | 
            -
                end
         | 
| 61 | 
            -
              end
         | 
| 62 | 
            -
             | 
| 63 | 
            -
              context "when given a local reference to an image",
         | 
| 64 | 
            -
                      vcr: {cassette_name: "anthropic/completions/successful_response_file_image"} do
         | 
| 65 | 
            -
                subject { response.choices[0].content.downcase[0..2] }
         | 
| 66 | 
            -
                let(:response) do
         | 
| 67 | 
            -
                  anthropic.complete([
         | 
| 68 | 
            -
                    "Is this image a representation of a blue book ?",
         | 
| 69 | 
            -
                    "Answer with yes or no.",
         | 
| 70 | 
            -
                    "Nothing else.",
         | 
| 71 | 
            -
                    file
         | 
| 72 | 
            -
                  ], :user)
         | 
| 73 | 
            -
                end
         | 
| 74 | 
            -
                let(:file) { LLM::File("spec/fixtures/images/bluebook.png") }
         | 
| 75 | 
            -
             | 
| 76 | 
            -
                it "describes the image" do
         | 
| 77 | 
            -
                  is_expected.to eq("yes")
         | 
| 78 | 
            -
                end
         | 
| 79 | 
            -
              end
         | 
| 80 | 
            -
             | 
| 81 | 
            -
              context "when given an unauthorized response",
         | 
| 82 | 
            -
                      vcr: {cassette_name: "anthropic/completions/unauthorized_response"} do
         | 
| 83 | 
            -
                subject(:response) { anthropic.complete("Hello", :user) }
         | 
| 84 | 
            -
                let(:token) { "BADTOKEN" }
         | 
| 85 | 
            -
             | 
| 86 | 
            -
                it "raises an error" do
         | 
| 87 | 
            -
                  expect { response }.to raise_error(LLM::Error::Unauthorized)
         | 
| 88 | 
            -
                end
         | 
| 89 | 
            -
             | 
| 90 | 
            -
                it "includes the response" do
         | 
| 91 | 
            -
                  response
         | 
| 92 | 
            -
                rescue LLM::Error::Unauthorized => ex
         | 
| 93 | 
            -
                  expect(ex.response).to be_kind_of(Net::HTTPResponse)
         | 
| 94 | 
            -
                end
         | 
| 95 | 
            -
              end
         | 
| 96 | 
            -
            end
         | 
| @@ -1,25 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Anthropic: embeddings" do
         | 
| 6 | 
            -
              let(:anthropic) { LLM.anthropic(token) }
         | 
| 7 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful response",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "anthropic/embeddings/successful_response"} do
         | 
| 11 | 
            -
                subject(:response) { anthropic.embed("Hello, world", token:) }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "returns an embedding" do
         | 
| 14 | 
            -
                  expect(response).to be_instance_of(LLM::Response::Embedding)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns a model" do
         | 
| 18 | 
            -
                  expect(response.model).to eq("voyage-2")
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
             | 
| 21 | 
            -
                it "has embeddings" do
         | 
| 22 | 
            -
                  expect(response.embeddings).to be_instance_of(Array)
         | 
| 23 | 
            -
                end
         | 
| 24 | 
            -
              end
         | 
| 25 | 
            -
            end
         | 
| @@ -1,21 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Anthropic::Models" do
         | 
| 6 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 7 | 
            -
              let(:provider) { LLM.anthropic(token) }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful list operation",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "anthropic/models/successful_list"} do
         | 
| 11 | 
            -
                subject { provider.models.all }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "is successful" do
         | 
| 14 | 
            -
                  is_expected.to be_instance_of(LLM::Response::ModelList)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns a list of models" do
         | 
| 18 | 
            -
                  expect(subject.models).to all(be_a(LLM::Model))
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
              end
         | 
| 21 | 
            -
            end
         | 
| @@ -1,85 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Gemini: completions" do
         | 
| 6 | 
            -
              subject(:gemini) { LLM.gemini(token) }
         | 
| 7 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful response",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "gemini/completions/successful_response"} do
         | 
| 11 | 
            -
                subject(:response) { gemini.complete("Hello!", :user) }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "returns a completion" do
         | 
| 14 | 
            -
                  expect(response).to be_a(LLM::Response::Completion)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns a model" do
         | 
| 18 | 
            -
                  expect(response.model).to eq("gemini-1.5-flash")
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
             | 
| 21 | 
            -
                it "includes token usage" do
         | 
| 22 | 
            -
                  expect(response).to have_attributes(
         | 
| 23 | 
            -
                    prompt_tokens: 2,
         | 
| 24 | 
            -
                    completion_tokens: 11,
         | 
| 25 | 
            -
                    total_tokens: 13
         | 
| 26 | 
            -
                  )
         | 
| 27 | 
            -
                end
         | 
| 28 | 
            -
             | 
| 29 | 
            -
                context "with a choice" do
         | 
| 30 | 
            -
                  subject(:choice) { response.choices[0] }
         | 
| 31 | 
            -
             | 
| 32 | 
            -
                  it "has choices" do
         | 
| 33 | 
            -
                    expect(response).to be_a(LLM::Response::Completion).and have_attributes(
         | 
| 34 | 
            -
                      choices: [
         | 
| 35 | 
            -
                        have_attributes(
         | 
| 36 | 
            -
                          role: "model",
         | 
| 37 | 
            -
                          content: "Hello there! How can I help you today?\n"
         | 
| 38 | 
            -
                        )
         | 
| 39 | 
            -
                      ]
         | 
| 40 | 
            -
                    )
         | 
| 41 | 
            -
                  end
         | 
| 42 | 
            -
             | 
| 43 | 
            -
                  it "includes the response" do
         | 
| 44 | 
            -
                    expect(choice.extra[:response]).to eq(response)
         | 
| 45 | 
            -
                  end
         | 
| 46 | 
            -
                end
         | 
| 47 | 
            -
              end
         | 
| 48 | 
            -
             | 
| 49 | 
            -
              context "when given a thread of messages",
         | 
| 50 | 
            -
                      vcr: {cassette_name: "gemini/completions/successful_response_thread"} do
         | 
| 51 | 
            -
                subject(:response) do
         | 
| 52 | 
            -
                  gemini.complete "What is your name? What age are you?", :user, messages: [
         | 
| 53 | 
            -
                    {role: "user", content: "Answer all of my questions"},
         | 
| 54 | 
            -
                    {role: "user", content: "Your name is Pablo, you are 25 years old and you are my amigo"}
         | 
| 55 | 
            -
                  ]
         | 
| 56 | 
            -
                end
         | 
| 57 | 
            -
             | 
| 58 | 
            -
                it "has choices" do
         | 
| 59 | 
            -
                  expect(response).to have_attributes(
         | 
| 60 | 
            -
                    choices: [
         | 
| 61 | 
            -
                      have_attributes(
         | 
| 62 | 
            -
                        role: "model",
         | 
| 63 | 
            -
                        content: "My name is Pablo, and I am 25 years old.  ¡Amigo!\n"
         | 
| 64 | 
            -
                      )
         | 
| 65 | 
            -
                    ]
         | 
| 66 | 
            -
                  )
         | 
| 67 | 
            -
                end
         | 
| 68 | 
            -
              end
         | 
| 69 | 
            -
             | 
| 70 | 
            -
              context "when given an unauthorized response",
         | 
| 71 | 
            -
                      vcr: {cassette_name: "gemini/completions/unauthorized_response"} do
         | 
| 72 | 
            -
                subject(:response) { gemini.complete("Hello!", :user) }
         | 
| 73 | 
            -
                let(:token) { "BADTOKEN" }
         | 
| 74 | 
            -
             | 
| 75 | 
            -
                it "raises an error" do
         | 
| 76 | 
            -
                  expect { response }.to raise_error(LLM::Error::Unauthorized)
         | 
| 77 | 
            -
                end
         | 
| 78 | 
            -
             | 
| 79 | 
            -
                it "includes a response" do
         | 
| 80 | 
            -
                  response
         | 
| 81 | 
            -
                rescue LLM::Error::Unauthorized => ex
         | 
| 82 | 
            -
                  expect(ex.response).to be_kind_of(Net::HTTPResponse)
         | 
| 83 | 
            -
                end
         | 
| 84 | 
            -
              end
         | 
| 85 | 
            -
            end
         | 
| @@ -1,31 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Chat: gemini" do
         | 
| 6 | 
            -
              let(:described_class) { LLM::Chat }
         | 
| 7 | 
            -
              let(:provider) { LLM.gemini(token) }
         | 
| 8 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 9 | 
            -
              let(:conversation) { described_class.new(provider, **params).lazy }
         | 
| 10 | 
            -
             | 
| 11 | 
            -
              context "when asked to describe an image",
         | 
| 12 | 
            -
                      vcr: {cassette_name: "gemini/conversations/multimodal_response"} do
         | 
| 13 | 
            -
                subject { conversation.last_message }
         | 
| 14 | 
            -
             | 
| 15 | 
            -
                let(:params) { {} }
         | 
| 16 | 
            -
                let(:image) { LLM::File("spec/fixtures/images/bluebook.png") }
         | 
| 17 | 
            -
             | 
| 18 | 
            -
                before do
         | 
| 19 | 
            -
                  conversation.chat(image, :user)
         | 
| 20 | 
            -
                  conversation.chat("Describe the image with a short sentance", :user)
         | 
| 21 | 
            -
                end
         | 
| 22 | 
            -
             | 
| 23 | 
            -
                it "describes the image" do
         | 
| 24 | 
            -
                  is_expected.to have_attributes(
         | 
| 25 | 
            -
                    role: "model",
         | 
| 26 | 
            -
                    content: "That's a simple illustration of a book " \
         | 
| 27 | 
            -
                             "resting on a blue, X-shaped book stand.\n"
         | 
| 28 | 
            -
                  )
         | 
| 29 | 
            -
                end
         | 
| 30 | 
            -
              end
         | 
| 31 | 
            -
            end
         | 
| @@ -1,25 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::OpenAI: embeddings" do
         | 
| 6 | 
            -
              let(:gemini) { LLM.gemini(token) }
         | 
| 7 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful response",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "gemini/embeddings/successful_response"} do
         | 
| 11 | 
            -
                subject(:response) { gemini.embed("Hello, world") }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "returns an embedding" do
         | 
| 14 | 
            -
                  expect(response).to be_instance_of(LLM::Response::Embedding)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns a model" do
         | 
| 18 | 
            -
                  expect(response.model).to eq("text-embedding-004")
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
             | 
| 21 | 
            -
                it "has embeddings" do
         | 
| 22 | 
            -
                  expect(response.embeddings).to be_instance_of(Array)
         | 
| 23 | 
            -
                end
         | 
| 24 | 
            -
              end
         | 
| 25 | 
            -
            end
         | 
    
        data/spec/gemini/files_spec.rb
    DELETED
    
    | @@ -1,124 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Gemini::Files" do
         | 
| 6 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 7 | 
            -
              let(:provider) { LLM.gemini(token) }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful create operation (bismillah.mp3)",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "gemini/files/successful_create_bismillah"} do
         | 
| 11 | 
            -
                subject(:file) { provider.files.create(file: LLM::File("spec/fixtures/audio/bismillah.mp3")) }
         | 
| 12 | 
            -
                after { provider.files.delete(file:) }
         | 
| 13 | 
            -
             | 
| 14 | 
            -
                it "is successful" do
         | 
| 15 | 
            -
                  expect(file).to be_instance_of(LLM::Response::File)
         | 
| 16 | 
            -
                end
         | 
| 17 | 
            -
             | 
| 18 | 
            -
                it "returns a file object" do
         | 
| 19 | 
            -
                  expect(file).to have_attributes(
         | 
| 20 | 
            -
                    name: instance_of(String),
         | 
| 21 | 
            -
                    display_name: "bismillah.mp3"
         | 
| 22 | 
            -
                  )
         | 
| 23 | 
            -
                end
         | 
| 24 | 
            -
              end
         | 
| 25 | 
            -
             | 
| 26 | 
            -
              context "when given a successful delete operation (bismillah.mp3)",
         | 
| 27 | 
            -
                      vcr: {cassette_name: "gemini/files/successful_delete_bismillah"} do
         | 
| 28 | 
            -
                let(:file) { provider.files.create(file: LLM::File("spec/fixtures/audio/bismillah.mp3")) }
         | 
| 29 | 
            -
                subject { provider.files.delete(file:) }
         | 
| 30 | 
            -
             | 
| 31 | 
            -
                it "is successful" do
         | 
| 32 | 
            -
                  is_expected.to be_instance_of(Net::HTTPOK)
         | 
| 33 | 
            -
                end
         | 
| 34 | 
            -
              end
         | 
| 35 | 
            -
             | 
| 36 | 
            -
              context "when given a successful get operation (bismillah.mp3)",
         | 
| 37 | 
            -
                      vcr: {cassette_name: "gemini/files/successful_get_bismillah"} do
         | 
| 38 | 
            -
                let(:file) { provider.files.create(file: LLM::File("spec/fixtures/audio/bismillah.mp3")) }
         | 
| 39 | 
            -
                subject { provider.files.get(file:) }
         | 
| 40 | 
            -
                after { provider.files.delete(file:) }
         | 
| 41 | 
            -
             | 
| 42 | 
            -
                it "is successful" do
         | 
| 43 | 
            -
                  is_expected.to be_instance_of(LLM::Response::File)
         | 
| 44 | 
            -
                end
         | 
| 45 | 
            -
             | 
| 46 | 
            -
                it "returns a file object" do
         | 
| 47 | 
            -
                  is_expected.to have_attributes(
         | 
| 48 | 
            -
                    name: instance_of(String),
         | 
| 49 | 
            -
                    display_name: "bismillah.mp3"
         | 
| 50 | 
            -
                  )
         | 
| 51 | 
            -
                end
         | 
| 52 | 
            -
              end
         | 
| 53 | 
            -
             | 
| 54 | 
            -
              context "when given a successful translation operation (bismillah.mp3)",
         | 
| 55 | 
            -
                      vcr: {cassette_name: "gemini/files/successful_translation_bismillah"} do
         | 
| 56 | 
            -
                subject { bot.last_message.content }
         | 
| 57 | 
            -
                let(:file) { provider.files.create(file: LLM::File("spec/fixtures/audio/bismillah.mp3")) }
         | 
| 58 | 
            -
                let(:bot) { LLM::Chat.new(provider).lazy }
         | 
| 59 | 
            -
                after { provider.files.delete(file:) }
         | 
| 60 | 
            -
             | 
| 61 | 
            -
                before do
         | 
| 62 | 
            -
                  bot.chat file
         | 
| 63 | 
            -
                  bot.chat "Translate the contents of the audio file into English"
         | 
| 64 | 
            -
                  bot.chat "The audio is referenced in the first message I sent to you"
         | 
| 65 | 
            -
                  bot.chat "Provide no other content except the translation"
         | 
| 66 | 
            -
                end
         | 
| 67 | 
            -
             | 
| 68 | 
            -
                it "translates the audio clip" do
         | 
| 69 | 
            -
                  is_expected.to eq("In the name of Allah, the Most Gracious, the Most Merciful.\n")
         | 
| 70 | 
            -
                end
         | 
| 71 | 
            -
              end
         | 
| 72 | 
            -
             | 
| 73 | 
            -
              context "when given a successful translation operation (alhamdullilah.mp3)",
         | 
| 74 | 
            -
                      vcr: {cassette_name: "gemini/files/successful_translation_alhamdullilah"} do
         | 
| 75 | 
            -
                subject { bot.last_message.content }
         | 
| 76 | 
            -
                let(:file) { provider.files.create(file: LLM::File("spec/fixtures/audio/alhamdullilah.mp3")) }
         | 
| 77 | 
            -
                let(:bot) { LLM::Chat.new(provider).lazy }
         | 
| 78 | 
            -
                after { provider.files.delete(file:) }
         | 
| 79 | 
            -
             | 
| 80 | 
            -
                before do
         | 
| 81 | 
            -
                  bot.chat [
         | 
| 82 | 
            -
                    "Translate the contents of the audio file into English",
         | 
| 83 | 
            -
                    "Provide no other content except the translation",
         | 
| 84 | 
            -
                    file
         | 
| 85 | 
            -
                  ]
         | 
| 86 | 
            -
                end
         | 
| 87 | 
            -
             | 
| 88 | 
            -
                it "translates the audio clip" do
         | 
| 89 | 
            -
                  is_expected.to eq("All praise is due to Allah, Lord of the worlds.\n")
         | 
| 90 | 
            -
                end
         | 
| 91 | 
            -
              end
         | 
| 92 | 
            -
             | 
| 93 | 
            -
              context "when given a successful all operation",
         | 
| 94 | 
            -
                      vcr: {cassette_name: "gemini/files/successful_all"} do
         | 
| 95 | 
            -
                let!(:files) do
         | 
| 96 | 
            -
                  [
         | 
| 97 | 
            -
                    provider.files.create(file: LLM::File("spec/fixtures/audio/bismillah.mp3")),
         | 
| 98 | 
            -
                    provider.files.create(file: LLM::File("spec/fixtures/audio/alhamdullilah.mp3"))
         | 
| 99 | 
            -
                  ]
         | 
| 100 | 
            -
                end
         | 
| 101 | 
            -
             | 
| 102 | 
            -
                subject(:response) { provider.files.all }
         | 
| 103 | 
            -
                after { files.each { |file| provider.files.delete(file:) } }
         | 
| 104 | 
            -
             | 
| 105 | 
            -
                it "is successful" do
         | 
| 106 | 
            -
                  expect(response).to be_instance_of(LLM::Response::FileList)
         | 
| 107 | 
            -
                end
         | 
| 108 | 
            -
             | 
| 109 | 
            -
                it "returns an array of file objects" do
         | 
| 110 | 
            -
                  expect(response).to match_array(
         | 
| 111 | 
            -
                    [
         | 
| 112 | 
            -
                      have_attributes(
         | 
| 113 | 
            -
                        name: instance_of(String),
         | 
| 114 | 
            -
                        display_name: "bismillah.mp3"
         | 
| 115 | 
            -
                      ),
         | 
| 116 | 
            -
                      have_attributes(
         | 
| 117 | 
            -
                        name: instance_of(String),
         | 
| 118 | 
            -
                        display_name: "alhamdullilah.mp3"
         | 
| 119 | 
            -
                      )
         | 
| 120 | 
            -
                    ]
         | 
| 121 | 
            -
                  )
         | 
| 122 | 
            -
                end
         | 
| 123 | 
            -
              end
         | 
| 124 | 
            -
            end
         | 
    
        data/spec/gemini/images_spec.rb
    DELETED
    
    | @@ -1,39 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Gemini::Images" do
         | 
| 6 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 7 | 
            -
              let(:provider) { LLM.gemini(token) }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful create operation",
         | 
| 10 | 
            -
                    vcr: {cassette_name: "gemini/images/successful_create"} do
         | 
| 11 | 
            -
                subject(:response) { provider.images.create(prompt: "A dog on a rocket to the moon") }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "is successful" do
         | 
| 14 | 
            -
                  expect(response).to be_instance_of(LLM::Response::Image)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns an IO-like object" do
         | 
| 18 | 
            -
                  expect(response.images[0]).to be_instance_of(StringIO)
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
              end
         | 
| 21 | 
            -
             | 
| 22 | 
            -
              context "when given a successful edit operation",
         | 
| 23 | 
            -
                    vcr: {cassette_name: "gemini/images/successful_edit"} do
         | 
| 24 | 
            -
                subject(:response) do
         | 
| 25 | 
            -
                  provider.images.edit(
         | 
| 26 | 
            -
                    image: LLM::File("spec/fixtures/images/bluebook.png"),
         | 
| 27 | 
            -
                    prompt: "Book is floating in the clouds"
         | 
| 28 | 
            -
                  )
         | 
| 29 | 
            -
                end
         | 
| 30 | 
            -
             | 
| 31 | 
            -
                it "is successful" do
         | 
| 32 | 
            -
                  expect(response).to be_instance_of(LLM::Response::Image)
         | 
| 33 | 
            -
                end
         | 
| 34 | 
            -
             | 
| 35 | 
            -
                it "returns an IO-like object" do
         | 
| 36 | 
            -
                  expect(response.images[0]).to be_instance_of(StringIO)
         | 
| 37 | 
            -
                end
         | 
| 38 | 
            -
              end
         | 
| 39 | 
            -
            end
         | 
    
        data/spec/gemini/models_spec.rb
    DELETED
    
    | @@ -1,21 +0,0 @@ | |
| 1 | 
            -
            # frozen_string_literal: true
         | 
| 2 | 
            -
             | 
| 3 | 
            -
            require "setup"
         | 
| 4 | 
            -
             | 
| 5 | 
            -
            RSpec.describe "LLM::Gemini::Models" do
         | 
| 6 | 
            -
              let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
         | 
| 7 | 
            -
              let(:provider) { LLM.gemini(token) }
         | 
| 8 | 
            -
             | 
| 9 | 
            -
              context "when given a successful list operation",
         | 
| 10 | 
            -
                      vcr: {cassette_name: "gemini/models/successful_list"} do
         | 
| 11 | 
            -
                subject { provider.models.all }
         | 
| 12 | 
            -
             | 
| 13 | 
            -
                it "is successful" do
         | 
| 14 | 
            -
                  is_expected.to be_instance_of(LLM::Response::ModelList)
         | 
| 15 | 
            -
                end
         | 
| 16 | 
            -
             | 
| 17 | 
            -
                it "returns a list of models" do
         | 
| 18 | 
            -
                  expect(subject.models).to all(be_a(LLM::Model))
         | 
| 19 | 
            -
                end
         | 
| 20 | 
            -
              end
         | 
| 21 | 
            -
            end
         |