llm_ruby 0.3.0 → 0.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CLAUDE.md +22 -0
- data/README.md +20 -1
- data/lib/llm/clients/open_ai.rb +5 -1
- data/lib/llm/config.rb +12 -0
- data/lib/llm.rb +6 -0
- data/lib/llm_ruby.rb +1 -0
- metadata +18 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 44e89bad9131f49c40cf578fac3790c1e3e5f266aead751ecea606ac2a4ad5aa
|
4
|
+
data.tar.gz: 03ccc6124c9c3d92dda0828100cec0c9beb0d4bb90baa464c47640528265cc3c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c5bee46644a205067b9e33a749e042a971b066f978de234bec0841f89cea4aa8b3d2b42e0d890b073ad78ebd770416aaae2fbe5181e5e6926d59bd7b9c7dc58d
|
7
|
+
data.tar.gz: f35f5f988cb54f199d52e324385a0a16e1eee720585a6118d21459e4a1ad7e526f7e77618d0124ea4b35da18a0c078f6f00648cd90203bd5517806289956d2f1
|
data/CLAUDE.md
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
# LLM Ruby Development Guide
|
2
|
+
|
3
|
+
## Build & Test Commands
|
4
|
+
- Setup: `bin/setup`
|
5
|
+
- Install locally: `bundle exec rake install`
|
6
|
+
- Run all tests: `bundle exec rake spec`
|
7
|
+
- Run single test: `bundle exec rspec spec/path/to_spec.rb:LINE_NUMBER`
|
8
|
+
- Run linter: `bundle exec rake standard`
|
9
|
+
- Interactive console: `bin/console`
|
10
|
+
|
11
|
+
## Code Style Guidelines
|
12
|
+
- Use `frozen_string_literal: true` at the top of all Ruby files
|
13
|
+
- Follow Standard Ruby style (`standard` gem)
|
14
|
+
- Naming: snake_case for methods/variables, CamelCase for classes
|
15
|
+
- Use double quotes consistently for strings
|
16
|
+
- Explicit returns are optional (prefer implicit returns when possible)
|
17
|
+
- Class organization: constants first, then class methods, then initialize, then instance methods
|
18
|
+
- Keep methods and classes small and focused (single responsibility)
|
19
|
+
- Cover all code with tests in the `spec` directory
|
20
|
+
- API credentials should be read from environment variables
|
21
|
+
- Use proper type inflection (e.g., "llm" => "LLM", "open_ai" => "OpenAI")
|
22
|
+
- Use VCR/webmock for testing HTTP interactions
|
data/README.md
CHANGED
@@ -5,7 +5,6 @@
|
|
5
5
|
[](https://opensource.org/licenses/MIT)
|
6
6
|
|
7
7
|
|
8
|
-
|
9
8
|
LLMRuby is a Ruby gem that provides a consistent interface for interacting with multiple Large Language Model (LLM) APIs. Most OpenAI, Anthropic and Gemini models are currently supported.
|
10
9
|
|
11
10
|
## Installation
|
@@ -172,6 +171,26 @@ export ANTHROPIC_API_KEY=your_api_key_here
|
|
172
171
|
export GEMINI_API_KEY=your_api_key_here
|
173
172
|
```
|
174
173
|
|
174
|
+
## Structured Outputs
|
175
|
+
|
176
|
+
OpenAI and Gemini models can be configured to generate responses that adhere to a provided schema. Even though each use a different format for configuring this schema, `llm_ruby` can handle the translation for you, so that you can share a single schema definition across models.
|
177
|
+
|
178
|
+
```ruby
|
179
|
+
|
180
|
+
llm = LLM.from_string!("gpt-4o")
|
181
|
+
|
182
|
+
# Create a client
|
183
|
+
client = llm.client
|
184
|
+
|
185
|
+
# Send a chat message
|
186
|
+
response_format = LLM::Schema.new("test_schema", {"type" => "object", "properties" => {"name" => {"type" => "string"}, "age" => {"type" => "integer"}}, "additionalProperties" => false, "required" => ["name", "age"]})
|
187
|
+
# or load the schema from a file: LLM::Schema.from_file('myschema.json')
|
188
|
+
response = client.chat([{role: :user, content: "Hello, world!"}], response_format: response_format)
|
189
|
+
|
190
|
+
response.structured_output[:name] # Alex
|
191
|
+
response.structured_output_object.name # Alex
|
192
|
+
```
|
193
|
+
|
175
194
|
## Development
|
176
195
|
|
177
196
|
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
data/lib/llm/clients/open_ai.rb
CHANGED
@@ -11,6 +11,7 @@ class LLM
|
|
11
11
|
|
12
12
|
def initialize(llm:)
|
13
13
|
@llm = llm
|
14
|
+
@logger = LLM.config.logger
|
14
15
|
end
|
15
16
|
|
16
17
|
def chat(messages, options = {})
|
@@ -95,7 +96,10 @@ class LLM
|
|
95
96
|
end
|
96
97
|
|
97
98
|
def post_url(url, **kwargs)
|
98
|
-
|
99
|
+
@logger.debug("Request: #{kwargs.inspect}")
|
100
|
+
resp = self.class.post(url, **kwargs.merge(headers: default_headers))
|
101
|
+
@logger.debug("Response: #{resp.body}")
|
102
|
+
resp
|
99
103
|
end
|
100
104
|
|
101
105
|
def post_url_streaming(url, **kwargs, &block)
|
data/lib/llm/config.rb
ADDED
data/lib/llm.rb
CHANGED
@@ -1,11 +1,13 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require "zeitwerk"
|
4
|
+
|
4
5
|
loader = Zeitwerk::Loader.for_gem
|
5
6
|
loader.inflector.inflect(
|
6
7
|
"llm" => "LLM",
|
7
8
|
"open_ai" => "OpenAI"
|
8
9
|
)
|
10
|
+
loader.ignore("#{__dir__}/llm_ruby.rb")
|
9
11
|
loader.setup
|
10
12
|
|
11
13
|
class LLM
|
@@ -31,6 +33,10 @@ class LLM
|
|
31
33
|
@supports_structured_outputs
|
32
34
|
end
|
33
35
|
|
36
|
+
def self.config
|
37
|
+
@config ||= LLM::Config.new
|
38
|
+
end
|
39
|
+
|
34
40
|
private
|
35
41
|
|
36
42
|
attr_reader :client_class
|
data/lib/llm_ruby.rb
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require "llm"
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm_ruby
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.3.
|
4
|
+
version: 0.3.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alex Gamble
|
@@ -51,6 +51,20 @@ dependencies:
|
|
51
51
|
- - "~>"
|
52
52
|
- !ruby/object:Gem::Version
|
53
53
|
version: 2.6.0
|
54
|
+
- !ruby/object:Gem::Dependency
|
55
|
+
name: logger
|
56
|
+
requirement: !ruby/object:Gem::Requirement
|
57
|
+
requirements:
|
58
|
+
- - "~>"
|
59
|
+
- !ruby/object:Gem::Version
|
60
|
+
version: 1.6.0
|
61
|
+
type: :runtime
|
62
|
+
prerelease: false
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - "~>"
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: 1.6.0
|
54
68
|
- !ruby/object:Gem::Dependency
|
55
69
|
name: dotenv
|
56
70
|
requirement: !ruby/object:Gem::Requirement
|
@@ -141,6 +155,7 @@ extra_rdoc_files: []
|
|
141
155
|
files:
|
142
156
|
- ".rspec"
|
143
157
|
- ".standard.yml"
|
158
|
+
- CLAUDE.md
|
144
159
|
- LICENSE.txt
|
145
160
|
- README.md
|
146
161
|
- Rakefile
|
@@ -152,12 +167,14 @@ files:
|
|
152
167
|
- lib/llm/clients/gemini/response.rb
|
153
168
|
- lib/llm/clients/open_ai.rb
|
154
169
|
- lib/llm/clients/open_ai/response.rb
|
170
|
+
- lib/llm/config.rb
|
155
171
|
- lib/llm/info.rb
|
156
172
|
- lib/llm/provider.rb
|
157
173
|
- lib/llm/response.rb
|
158
174
|
- lib/llm/schema.rb
|
159
175
|
- lib/llm/stop_reason.rb
|
160
176
|
- lib/llm/version.rb
|
177
|
+
- lib/llm_ruby.rb
|
161
178
|
homepage: https://github.com/agamble/llm_ruby
|
162
179
|
licenses:
|
163
180
|
- MIT
|