llm.rb 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE.txt +21 -0
  3. data/README.md +146 -0
  4. data/lib/llm/conversation.rb +38 -0
  5. data/lib/llm/core_ext/ostruct.rb +37 -0
  6. data/lib/llm/error.rb +28 -0
  7. data/lib/llm/file.rb +66 -0
  8. data/lib/llm/http_client.rb +29 -0
  9. data/lib/llm/lazy_conversation.rb +39 -0
  10. data/lib/llm/message.rb +55 -0
  11. data/lib/llm/message_queue.rb +47 -0
  12. data/lib/llm/provider.rb +114 -0
  13. data/lib/llm/providers/anthropic/error_handler.rb +32 -0
  14. data/lib/llm/providers/anthropic/format.rb +31 -0
  15. data/lib/llm/providers/anthropic/response_parser.rb +29 -0
  16. data/lib/llm/providers/anthropic.rb +63 -0
  17. data/lib/llm/providers/gemini/error_handler.rb +43 -0
  18. data/lib/llm/providers/gemini/format.rb +31 -0
  19. data/lib/llm/providers/gemini/response_parser.rb +31 -0
  20. data/lib/llm/providers/gemini.rb +64 -0
  21. data/lib/llm/providers/ollama/error_handler.rb +32 -0
  22. data/lib/llm/providers/ollama/format.rb +28 -0
  23. data/lib/llm/providers/ollama/response_parser.rb +18 -0
  24. data/lib/llm/providers/ollama.rb +51 -0
  25. data/lib/llm/providers/openai/error_handler.rb +32 -0
  26. data/lib/llm/providers/openai/format.rb +28 -0
  27. data/lib/llm/providers/openai/response_parser.rb +35 -0
  28. data/lib/llm/providers/openai.rb +62 -0
  29. data/lib/llm/response/completion.rb +50 -0
  30. data/lib/llm/response/embedding.rb +23 -0
  31. data/lib/llm/response.rb +24 -0
  32. data/lib/llm/version.rb +5 -0
  33. data/lib/llm.rb +47 -0
  34. data/llm.gemspec +40 -0
  35. data/spec/anthropic/completion_spec.rb +76 -0
  36. data/spec/gemini/completion_spec.rb +80 -0
  37. data/spec/gemini/embedding_spec.rb +33 -0
  38. data/spec/llm/conversation_spec.rb +56 -0
  39. data/spec/llm/lazy_conversation_spec.rb +110 -0
  40. data/spec/ollama/completion_spec.rb +52 -0
  41. data/spec/ollama/embedding_spec.rb +15 -0
  42. data/spec/openai/completion_spec.rb +99 -0
  43. data/spec/openai/embedding_spec.rb +33 -0
  44. data/spec/readme_spec.rb +64 -0
  45. data/spec/setup.rb +29 -0
  46. metadata +194 -0
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "setup"
4
+ require "test/cmd"
5
+
6
+ RSpec.describe "The README examples" do
7
+ before { ENV["key"] = key }
8
+ after { ENV["key"] = nil }
9
+ let(:key) { "" }
10
+
11
+ context "when given the lazy conversation example" do
12
+ subject(:command) do
13
+ cmd RbConfig.ruby,
14
+ "-Ilib",
15
+ "-r", webmock("lazy_conversation.rb"),
16
+ readme_example("lazy_conversation.rb")
17
+ end
18
+
19
+ let(:actual_conversation) do
20
+ command.stdout.each_line.map(&:strip)
21
+ end
22
+
23
+ let(:expected_conversation) do
24
+ [
25
+ "[system] You are a friendly chatbot. Sometimes, you like to tell a joke.",
26
+ "But the joke must be based on the given inputs.",
27
+ "I will provide you a set of messages. Reply to all of them.",
28
+ "A message is considered unanswered if there is no corresponding assistant response.",
29
+
30
+ "[user] What color is the sky?",
31
+ "[user] What color is an orange?",
32
+ "[user] I like Ruby",
33
+
34
+ "[assistant] The sky is typically blue during the day, but it can have beautiful",
35
+ "hues of pink, orange, and purple during sunset! As for an orange,",
36
+ "it's typically orange in color - funny how that works, right?",
37
+ "I love Ruby too! Did you know that a Ruby is not only a beautiful",
38
+ "gemstone, but it's also a programming language that's both elegant",
39
+ "and powerful! Speaking of colors, why did the orange stop?",
40
+ "Because it ran out of juice!"
41
+ ].map(&:strip)
42
+ end
43
+
44
+ it "is successful" do
45
+ is_expected.to be_success
46
+ end
47
+
48
+ it "emits output" do
49
+ expect(join(actual_conversation)).to eq(join(expected_conversation))
50
+ end
51
+ end
52
+
53
+ def webmock(example)
54
+ File.join(Dir.getwd, "share", "llm", "webmocks", example)
55
+ end
56
+
57
+ def readme_example(example)
58
+ File.join(Dir.getwd, "share", "llm", "examples", example)
59
+ end
60
+
61
+ def join(lines)
62
+ lines.reject(&:empty?).join("\n")
63
+ end
64
+ end
data/spec/setup.rb ADDED
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "llm"
4
+ require "webmock/rspec"
5
+
6
+ RSpec.configure do |config|
7
+ config.disable_monkey_patching!
8
+
9
+ config.expect_with :rspec do |c|
10
+ c.syntax = :expect
11
+ end
12
+
13
+ config.include Module.new {
14
+ def request_fixture(file)
15
+ path = File.join(fixtures, "requests", file)
16
+ File.read(path).chomp
17
+ end
18
+
19
+ def response_fixture(file)
20
+ path = File.join(fixtures, "responses", file)
21
+ File.read(path).chomp
22
+ end
23
+ alias_method :fixture, :response_fixture
24
+
25
+ def fixtures
26
+ File.join(__dir__, "fixtures")
27
+ end
28
+ }
29
+ end
metadata ADDED
@@ -0,0 +1,194 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: llm.rb
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Antar Azri
8
+ - '0x1eef'
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2025-03-25 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: net-http
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - "~>"
19
+ - !ruby/object:Gem::Version
20
+ version: 0.6.0
21
+ type: :runtime
22
+ prerelease: false
23
+ version_requirements: !ruby/object:Gem::Requirement
24
+ requirements:
25
+ - - "~>"
26
+ - !ruby/object:Gem::Version
27
+ version: 0.6.0
28
+ - !ruby/object:Gem::Dependency
29
+ name: json
30
+ requirement: !ruby/object:Gem::Requirement
31
+ requirements:
32
+ - - ">="
33
+ - !ruby/object:Gem::Version
34
+ version: '0'
35
+ type: :runtime
36
+ prerelease: false
37
+ version_requirements: !ruby/object:Gem::Requirement
38
+ requirements:
39
+ - - ">="
40
+ - !ruby/object:Gem::Version
41
+ version: '0'
42
+ - !ruby/object:Gem::Dependency
43
+ name: webmock
44
+ requirement: !ruby/object:Gem::Requirement
45
+ requirements:
46
+ - - "~>"
47
+ - !ruby/object:Gem::Version
48
+ version: 3.24.0
49
+ type: :development
50
+ prerelease: false
51
+ version_requirements: !ruby/object:Gem::Requirement
52
+ requirements:
53
+ - - "~>"
54
+ - !ruby/object:Gem::Version
55
+ version: 3.24.0
56
+ - !ruby/object:Gem::Dependency
57
+ name: yard
58
+ requirement: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - "~>"
61
+ - !ruby/object:Gem::Version
62
+ version: 0.9.37
63
+ type: :development
64
+ prerelease: false
65
+ version_requirements: !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - "~>"
68
+ - !ruby/object:Gem::Version
69
+ version: 0.9.37
70
+ - !ruby/object:Gem::Dependency
71
+ name: kramdown
72
+ requirement: !ruby/object:Gem::Requirement
73
+ requirements:
74
+ - - "~>"
75
+ - !ruby/object:Gem::Version
76
+ version: '2.4'
77
+ type: :development
78
+ prerelease: false
79
+ version_requirements: !ruby/object:Gem::Requirement
80
+ requirements:
81
+ - - "~>"
82
+ - !ruby/object:Gem::Version
83
+ version: '2.4'
84
+ - !ruby/object:Gem::Dependency
85
+ name: webrick
86
+ requirement: !ruby/object:Gem::Requirement
87
+ requirements:
88
+ - - "~>"
89
+ - !ruby/object:Gem::Version
90
+ version: '1.8'
91
+ type: :development
92
+ prerelease: false
93
+ version_requirements: !ruby/object:Gem::Requirement
94
+ requirements:
95
+ - - "~>"
96
+ - !ruby/object:Gem::Version
97
+ version: '1.8'
98
+ - !ruby/object:Gem::Dependency
99
+ name: test-cmd.rb
100
+ requirement: !ruby/object:Gem::Requirement
101
+ requirements:
102
+ - - "~>"
103
+ - !ruby/object:Gem::Version
104
+ version: 0.12.0
105
+ type: :development
106
+ prerelease: false
107
+ version_requirements: !ruby/object:Gem::Requirement
108
+ requirements:
109
+ - - "~>"
110
+ - !ruby/object:Gem::Version
111
+ version: 0.12.0
112
+ description: llm.rb is a lightweight Ruby library that provides a common interface
113
+ and set of functionality for multple Large Language Models (LLMs). It is designed
114
+ to be simple, flexible, and easy to use.
115
+ email:
116
+ - azantar@proton.me
117
+ - 0x1eef@proton.me
118
+ executables: []
119
+ extensions: []
120
+ extra_rdoc_files: []
121
+ files:
122
+ - LICENSE.txt
123
+ - README.md
124
+ - lib/llm.rb
125
+ - lib/llm/conversation.rb
126
+ - lib/llm/core_ext/ostruct.rb
127
+ - lib/llm/error.rb
128
+ - lib/llm/file.rb
129
+ - lib/llm/http_client.rb
130
+ - lib/llm/lazy_conversation.rb
131
+ - lib/llm/message.rb
132
+ - lib/llm/message_queue.rb
133
+ - lib/llm/provider.rb
134
+ - lib/llm/providers/anthropic.rb
135
+ - lib/llm/providers/anthropic/error_handler.rb
136
+ - lib/llm/providers/anthropic/format.rb
137
+ - lib/llm/providers/anthropic/response_parser.rb
138
+ - lib/llm/providers/gemini.rb
139
+ - lib/llm/providers/gemini/error_handler.rb
140
+ - lib/llm/providers/gemini/format.rb
141
+ - lib/llm/providers/gemini/response_parser.rb
142
+ - lib/llm/providers/ollama.rb
143
+ - lib/llm/providers/ollama/error_handler.rb
144
+ - lib/llm/providers/ollama/format.rb
145
+ - lib/llm/providers/ollama/response_parser.rb
146
+ - lib/llm/providers/openai.rb
147
+ - lib/llm/providers/openai/error_handler.rb
148
+ - lib/llm/providers/openai/format.rb
149
+ - lib/llm/providers/openai/response_parser.rb
150
+ - lib/llm/response.rb
151
+ - lib/llm/response/completion.rb
152
+ - lib/llm/response/embedding.rb
153
+ - lib/llm/version.rb
154
+ - llm.gemspec
155
+ - spec/anthropic/completion_spec.rb
156
+ - spec/gemini/completion_spec.rb
157
+ - spec/gemini/embedding_spec.rb
158
+ - spec/llm/conversation_spec.rb
159
+ - spec/llm/lazy_conversation_spec.rb
160
+ - spec/ollama/completion_spec.rb
161
+ - spec/ollama/embedding_spec.rb
162
+ - spec/openai/completion_spec.rb
163
+ - spec/openai/embedding_spec.rb
164
+ - spec/readme_spec.rb
165
+ - spec/setup.rb
166
+ homepage: https://github.com/llmrb/llm
167
+ licenses:
168
+ - MIT
169
+ metadata:
170
+ homepage_uri: https://github.com/llmrb/llm
171
+ source_code_uri: https://github.com/llmrb/llm
172
+ changelog_uri: https://github.com/llmrb/llm/blob/main/CHANGELOG.md
173
+ post_install_message:
174
+ rdoc_options: []
175
+ require_paths:
176
+ - lib
177
+ required_ruby_version: !ruby/object:Gem::Requirement
178
+ requirements:
179
+ - - ">="
180
+ - !ruby/object:Gem::Version
181
+ version: 3.0.0
182
+ required_rubygems_version: !ruby/object:Gem::Requirement
183
+ requirements:
184
+ - - ">="
185
+ - !ruby/object:Gem::Version
186
+ version: '0'
187
+ requirements: []
188
+ rubygems_version: 3.5.23
189
+ signing_key:
190
+ specification_version: 4
191
+ summary: llm.rb is a lightweight Ruby library that provides a common interface and
192
+ set of functionality for multple Large Language Models (LLMs). It is designed to
193
+ be simple, flexible, and easy to use.
194
+ test_files: []