ruby_llm 0.1.0.pre49 â 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/cicd.yml +49 -0
- data/.gitignore +2 -1
- data/.overcommit.yml +1 -1
- data/CONTRIBUTING.md +207 -0
- data/Gemfile +1 -0
- data/README.md +50 -18
- data/lib/ruby_llm/chat.rb +1 -1
- data/lib/ruby_llm/models.json +389 -43
- data/lib/ruby_llm/models.rb +22 -16
- data/lib/ruby_llm/provider.rb +12 -2
- data/lib/ruby_llm/providers/gemini/models.rb +0 -8
- data/lib/ruby_llm/providers/openai/capabilities.rb +41 -4
- data/lib/ruby_llm/tool.rb +15 -7
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +2 -0
- data/lib/tasks/models.rake +6 -10
- data/lib/tasks/vcr.rake +114 -0
- data/ruby_llm.gemspec +1 -0
- metadata +18 -3
- data/.rspec_status +0 -50
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fd4d6fd8e0894176fff57bd2a3132b965afbb912108d45d94cba3b4b3106ff7c
|
4
|
+
data.tar.gz: 071a8ff4dcffcba66d042d3e822c2297b8a2c4485c4ee1fb48d0a03ec49c0547
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5d995c552810743910286207c47d2342de7e2ee14b833b627267d4fcec3c02931650b499de048319de293914fd0c19689124f818e994e74de04e8c819b934e91
|
7
|
+
data.tar.gz: 85598c0a2b633092eb5d5fa5b13b59f03b2c3c5effd826f483d64b87f58b8caaf3637b28308d027eaeed3a03d708a0267b420556ca54206de9a9ac69ab97455c
|
data/.github/workflows/cicd.yml
CHANGED
@@ -9,10 +9,23 @@ on:
|
|
9
9
|
- 'Gemfile'
|
10
10
|
- 'Rakefile'
|
11
11
|
- 'ruby_llm.gemspec'
|
12
|
+
- '.github/workflows/cicd.yml'
|
12
13
|
pull_request:
|
13
14
|
branches: [ "main" ]
|
15
|
+
paths:
|
16
|
+
- 'lib/**'
|
17
|
+
- 'spec/**'
|
18
|
+
- 'Gemfile'
|
19
|
+
- 'Rakefile'
|
20
|
+
- 'ruby_llm.gemspec'
|
21
|
+
- '.github/workflows/cicd.yml'
|
14
22
|
workflow_call:
|
15
23
|
|
24
|
+
# Define default permissions for this workflow
|
25
|
+
permissions:
|
26
|
+
contents: read
|
27
|
+
packages: write # Needed for publishing to GitHub Packages
|
28
|
+
|
16
29
|
jobs:
|
17
30
|
test:
|
18
31
|
runs-on: ubuntu-latest
|
@@ -37,6 +50,8 @@ jobs:
|
|
37
50
|
|
38
51
|
- name: Run tests
|
39
52
|
env:
|
53
|
+
# For PRs, we use VCR cassettes
|
54
|
+
# For main branch, we use real API keys for verification
|
40
55
|
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
41
56
|
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
42
57
|
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
|
@@ -69,7 +84,40 @@ jobs:
|
|
69
84
|
ruby-version: '3.3'
|
70
85
|
bundler-cache: true
|
71
86
|
|
87
|
+
- name: Check if version has changed
|
88
|
+
id: check_version
|
89
|
+
run: |
|
90
|
+
VERSION=$(ruby -r ./lib/ruby_llm/version.rb -e "puts RubyLLM::VERSION")
|
91
|
+
echo "Current version: $VERSION"
|
92
|
+
|
93
|
+
# Try to fetch from RubyGems
|
94
|
+
PUBLISHED_VERSION=$(gem list ruby_llm -r | grep -o '[0-9]\+\.[0-9]\+\.[0-9]\+' || echo "0.0.0")
|
95
|
+
echo "Published version: $PUBLISHED_VERSION"
|
96
|
+
|
97
|
+
if [ "$VERSION" = "$PUBLISHED_VERSION" ]; then
|
98
|
+
echo "Version has not changed, skipping publish"
|
99
|
+
echo "version_changed=false" >> $GITHUB_OUTPUT
|
100
|
+
else
|
101
|
+
echo "Version has changed from $PUBLISHED_VERSION to $VERSION"
|
102
|
+
echo "version_changed=true" >> $GITHUB_OUTPUT
|
103
|
+
fi
|
104
|
+
|
105
|
+
- name: Test with real APIs before publishing
|
106
|
+
if: steps.check_version.outputs.version_changed == 'true'
|
107
|
+
run: |
|
108
|
+
echo "Removing all VCR cassettes to test against real APIs..."
|
109
|
+
rm -rf spec/fixtures/vcr_cassettes
|
110
|
+
|
111
|
+
echo "Running tests with real API calls..."
|
112
|
+
env -u CI bundle exec rspec
|
113
|
+
env:
|
114
|
+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
115
|
+
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
116
|
+
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
|
117
|
+
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
|
118
|
+
|
72
119
|
- name: Publish to GPR
|
120
|
+
if: steps.check_version.outputs.version_changed == 'true'
|
73
121
|
run: |
|
74
122
|
mkdir -p $HOME/.gem
|
75
123
|
touch $HOME/.gem/credentials
|
@@ -90,6 +138,7 @@ jobs:
|
|
90
138
|
OWNER: ${{ github.repository_owner }}
|
91
139
|
|
92
140
|
- name: Publish to RubyGems
|
141
|
+
if: steps.check_version.outputs.version_changed == 'true'
|
93
142
|
run: |
|
94
143
|
mkdir -p $HOME/.gem
|
95
144
|
touch $HOME/.gem/credentials
|
data/.gitignore
CHANGED
@@ -9,6 +9,7 @@
|
|
9
9
|
/test/tmp/
|
10
10
|
/test/version_tmp/
|
11
11
|
/tmp/
|
12
|
+
.rspec_status
|
12
13
|
|
13
14
|
# Used by dotenv library to load environment variables.
|
14
15
|
.env
|
@@ -55,4 +56,4 @@ Gemfile.lock
|
|
55
56
|
# Used by RuboCop. Remote config files pulled in from inherit_from directive.
|
56
57
|
# .rubocop-https?--*
|
57
58
|
|
58
|
-
repomix-output
|
59
|
+
repomix-output.*
|
data/.overcommit.yml
CHANGED
data/CONTRIBUTING.md
ADDED
@@ -0,0 +1,207 @@
|
|
1
|
+
# Contributing to RubyLLM
|
2
|
+
|
3
|
+
First off, thank you for considering contributing to RubyLLM! It's people like you that make RubyLLM such a great tool.
|
4
|
+
|
5
|
+
## Development Setup
|
6
|
+
|
7
|
+
Here's how to get started:
|
8
|
+
|
9
|
+
```bash
|
10
|
+
# Clone the repository
|
11
|
+
gh repo clone crmne/ruby_llm
|
12
|
+
cd ruby_llm
|
13
|
+
|
14
|
+
# Install dependencies
|
15
|
+
bundle install
|
16
|
+
|
17
|
+
# Set up git hooks
|
18
|
+
overcommit --install
|
19
|
+
|
20
|
+
# Run the tests (uses VCR cassettes)
|
21
|
+
bundle exec rspec
|
22
|
+
```
|
23
|
+
|
24
|
+
## Development Workflow
|
25
|
+
|
26
|
+
We recommend using GitHub CLI to simplify the workflow:
|
27
|
+
|
28
|
+
```bash
|
29
|
+
# Create a new branch for your feature
|
30
|
+
gh repo fork crmne/ruby_llm --clone
|
31
|
+
cd ruby_llm
|
32
|
+
|
33
|
+
# Find or make an issue for the feature on GitHub and then:
|
34
|
+
gh issue develop 123 --checkout # Substitute 123 with the issue number
|
35
|
+
|
36
|
+
# Make your changes and test them
|
37
|
+
# ...
|
38
|
+
|
39
|
+
# Commit your changes
|
40
|
+
git commit
|
41
|
+
|
42
|
+
# Create a PR
|
43
|
+
gh pr create --web
|
44
|
+
```
|
45
|
+
|
46
|
+
## Model Naming Convention & Provider Strategy
|
47
|
+
|
48
|
+
When adding new providers to RubyLLM, please follow these guidelines:
|
49
|
+
|
50
|
+
### Normalized Model IDs
|
51
|
+
|
52
|
+
We use a consistent approach separating **what** (model) from **where** (provider):
|
53
|
+
|
54
|
+
```ruby
|
55
|
+
# Default way (from the native provider)
|
56
|
+
chat = RubyLLM.chat(model: "claude-3-5-sonnet")
|
57
|
+
|
58
|
+
# Same model via different provider
|
59
|
+
chat = RubyLLM.chat(model: "claude-3-5-sonnet", provider: :bedrock)
|
60
|
+
```
|
61
|
+
|
62
|
+
### Implementing a Provider
|
63
|
+
|
64
|
+
If you're adding a new provider:
|
65
|
+
|
66
|
+
1. **Use normalized model IDs** - Don't include provider prefixes in the model ID itself
|
67
|
+
2. **Add provider mapping** - Map the normalized IDs to your provider's specific format internally
|
68
|
+
3. **Preserve capabilities** - Ensure models accessed through your provider report the same capabilities as their native counterparts
|
69
|
+
4. **Update models.json** - Include your provider's models in models.json
|
70
|
+
5. **Update aliases.json** - Add entries to aliases.json for models accessible through your provider
|
71
|
+
6. **Implement refresh mechanism** - Ensure your provider supports the `list_models` method for refreshing
|
72
|
+
|
73
|
+
### Model Aliases
|
74
|
+
|
75
|
+
For providers that use complex model identifiers (like Bedrock's `anthropic.claude-3-5-sonnet-20241022-v2:0:200k`), add mappings to the global aliases.json file:
|
76
|
+
|
77
|
+
```json
|
78
|
+
{
|
79
|
+
"claude-3-5-sonnet": {
|
80
|
+
"anthropic": "claude-3-5-sonnet-20241022",
|
81
|
+
"bedrock": "anthropic.claude-3-5-sonnet-20241022-v2:0:200k",
|
82
|
+
"openrouter": "anthropic/claude-3.5-sonnet"
|
83
|
+
},
|
84
|
+
"gpt-4o": {
|
85
|
+
"openai": "gpt-4o-2024-05-13",
|
86
|
+
"bedrock": "anthropic.gpt-4o-2024-05-13",
|
87
|
+
"openrouter": "openai/gpt-4o"
|
88
|
+
}
|
89
|
+
}
|
90
|
+
```
|
91
|
+
|
92
|
+
If a model can't be found with the provided ID and provider, a `ModelNotFoundError` will be raised with an informative message. Your implementation should make this error helpful by suggesting available alternatives.
|
93
|
+
|
94
|
+
When the same model has multiple versions and context windows e.g.
|
95
|
+
|
96
|
+
```
|
97
|
+
anthropic.claude-3-5-sonnet-20240620-v1:0
|
98
|
+
anthropic.claude-3-5-sonnet-20240620-v1:0:18k
|
99
|
+
anthropic.claude-3-5-sonnet-20240620-v1:0:200k
|
100
|
+
anthropic.claude-3-5-sonnet-20240620-v1:0:51k
|
101
|
+
anthropic.claude-3-5-sonnet-20241022-v2:0
|
102
|
+
anthropic.claude-3-5-sonnet-20241022-v2:0:18k
|
103
|
+
anthropic.claude-3-5-sonnet-20241022-v2:0:200k
|
104
|
+
anthropic.claude-3-5-sonnet-20241022-v2:0:51k
|
105
|
+
```
|
106
|
+
|
107
|
+
We default all aliases to the biggest context window, and the main alias (without date) to the latest version:
|
108
|
+
|
109
|
+
```json
|
110
|
+
"claude-3-5-sonnet": {
|
111
|
+
"anthropic": "claude-3-5-sonnet-20241022",
|
112
|
+
"bedrock": "anthropic.claude-3-5-sonnet-20241022-v2:0:200k",
|
113
|
+
"openrouter": "anthropic/claude-3.5-sonnet"
|
114
|
+
},
|
115
|
+
"claude-3-5-sonnet-20241022": {
|
116
|
+
"anthropic": "claude-3-5-sonnet-20241022",
|
117
|
+
"bedrock": "anthropic.claude-3-5-sonnet-20241022-v2:0:200k",
|
118
|
+
"openrouter": "anthropic/claude-3.5-sonnet"
|
119
|
+
},
|
120
|
+
"claude-3-5-sonnet-20240620": {
|
121
|
+
"anthropic": "claude-3-5-sonnet-20240620",
|
122
|
+
"bedrock": "anthropic.claude-3-5-sonnet-20240620-v1:0:200k"
|
123
|
+
},
|
124
|
+
```
|
125
|
+
|
126
|
+
## Running Tests
|
127
|
+
|
128
|
+
Tests automatically use VCR to record and replay HTTP interactions, so you don't need real API keys for testing:
|
129
|
+
|
130
|
+
```bash
|
131
|
+
# Run all tests (using existing VCR cassettes)
|
132
|
+
bundle exec rspec
|
133
|
+
|
134
|
+
# Run a specific test file
|
135
|
+
bundle exec rspec spec/ruby_llm/chat_spec.rb
|
136
|
+
```
|
137
|
+
|
138
|
+
### Recording VCR Cassettes
|
139
|
+
|
140
|
+
When you make changes that affect API interactions, you can record new VCR cassettes.
|
141
|
+
|
142
|
+
If you have keys for all providers:
|
143
|
+
|
144
|
+
```bash
|
145
|
+
# Re-record all cassettes
|
146
|
+
bundle exec rake vcr:record[all]
|
147
|
+
```
|
148
|
+
|
149
|
+
If you only have keys for specific providers (e.g., just OpenAI):
|
150
|
+
|
151
|
+
```bash
|
152
|
+
# Set the API keys you have
|
153
|
+
export OPENAI_API_KEY=your_openai_key
|
154
|
+
|
155
|
+
# Find and remove only cassettes for OpenAI, then run tests to re-record them
|
156
|
+
bundle exec rake vcr:record[openai]
|
157
|
+
|
158
|
+
# You can also specify multiple providers
|
159
|
+
bundle exec rake vcr:record[openai,anthropic]
|
160
|
+
```
|
161
|
+
|
162
|
+
Important: After recording new cassettes, please **manually check** them for any sensitive information that might have been missed by the automatic filters.
|
163
|
+
|
164
|
+
## Adding New Tests
|
165
|
+
|
166
|
+
Tests automatically create VCR cassettes based on their descriptions, so make sure your test descriptions are unique and descriptive.
|
167
|
+
|
168
|
+
## Coding Style
|
169
|
+
|
170
|
+
We follow the [Standard Ruby](https://github.com/testdouble/standard) style. Please ensure your contributions adhere to this style.
|
171
|
+
|
172
|
+
```bash
|
173
|
+
# Check your code style
|
174
|
+
bundle exec rubocop
|
175
|
+
|
176
|
+
# Auto-fix style issues where possible
|
177
|
+
bundle exec rubocop -A
|
178
|
+
```
|
179
|
+
|
180
|
+
## Documentation
|
181
|
+
|
182
|
+
When adding new features, please include documentation updates:
|
183
|
+
|
184
|
+
- Update relevant guides in the `docs/guides/` directory
|
185
|
+
- Add inline documentation using YARD comments
|
186
|
+
- Keep the README clean and focused on helping new users get started quickly
|
187
|
+
|
188
|
+
## Philosophy
|
189
|
+
|
190
|
+
RubyLLM follows certain design philosophies and conventions. Please refer to our [Philosophy Guide](https://rubyllm.com/philosophy) to ensure your contributions align with the project's vision.
|
191
|
+
|
192
|
+
## Discussions and Issues
|
193
|
+
|
194
|
+
- For questions and discussions, please use [GitHub Discussions](https://github.com/crmne/ruby_llm/discussions)
|
195
|
+
- For bugs and feature requests, please use [GitHub Issues](https://github.com/crmne/ruby_llm/issues)
|
196
|
+
|
197
|
+
## Release Process
|
198
|
+
|
199
|
+
Gem versioning follows [Semantic Versioning](https://semver.org/):
|
200
|
+
|
201
|
+
1. MAJOR version for incompatible API changes
|
202
|
+
2. MINOR version for backwards-compatible functionality
|
203
|
+
3. PATCH version for backwards-compatible bug fixes
|
204
|
+
|
205
|
+
Releases are handled by the maintainers through the CI/CD pipeline.
|
206
|
+
|
207
|
+
Thanks for helping make RubyLLM better!
|
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -1,8 +1,8 @@
|
|
1
|
-
|
1
|
+
<img src="/docs/assets/images/logotype.svg" alt="RubyLLM" height="120" width="250">
|
2
2
|
|
3
3
|
A delightful Ruby way to work with AI. No configuration madness, no complex callbacks, no handler hell â just beautiful, expressive Ruby code.
|
4
4
|
|
5
|
-
<
|
5
|
+
<div style="display: flex; align-items: center; flex-wrap: wrap; gap: 4px;">
|
6
6
|
<img src="https://upload.wikimedia.org/wikipedia/commons/4/4d/OpenAI_Logo.svg" alt="OpenAI" height="40" width="120">
|
7
7
|
|
8
8
|
<img src="https://upload.wikimedia.org/wikipedia/commons/7/78/Anthropic_logo.svg" alt="Anthropic" height="40" width="120">
|
@@ -10,14 +10,12 @@ A delightful Ruby way to work with AI. No configuration madness, no complex call
|
|
10
10
|
<img src="https://upload.wikimedia.org/wikipedia/commons/8/8a/Google_Gemini_logo.svg" alt="Google" height="40" width="120">
|
11
11
|
|
12
12
|
<img src="https://upload.wikimedia.org/wikipedia/commons/e/ec/DeepSeek_logo.svg" alt="DeepSeek" height="40" width="120">
|
13
|
-
</
|
13
|
+
</div>
|
14
14
|
|
15
|
-
<
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
<a href="https://codecov.io/gh/crmne/ruby_llm"><img src="https://codecov.io/gh/crmne/ruby_llm/branch/main/graph/badge.svg" alt="codecov" /></a>
|
20
|
-
</p>
|
15
|
+
<a href="https://badge.fury.io/rb/ruby_llm"><img src="https://badge.fury.io/rb/ruby_llm.svg?dummy=unused" alt="Gem Version" /></a>
|
16
|
+
<a href="https://github.com/testdouble/standard"><img src="https://img.shields.io/badge/code_style-standard-brightgreen.svg" alt="Ruby Style Guide" /></a>
|
17
|
+
<a href="https://rubygems.org/gems/ruby_llm"><img alt="Gem Downloads" src="https://img.shields.io/gem/dt/ruby_llm"></a>
|
18
|
+
<a href="https://codecov.io/gh/crmne/ruby_llm"><img src="https://codecov.io/gh/crmne/ruby_llm/branch/main/graph/badge.svg" alt="codecov" /></a>
|
21
19
|
|
22
20
|
ðĪš Battle tested at [ðŽ Chat with Work](https://chatwithwork.com)
|
23
21
|
|
@@ -27,6 +25,17 @@ Every AI provider comes with its own client library, its own response format, it
|
|
27
25
|
|
28
26
|
RubyLLM fixes all that. One beautiful API for everything. One consistent format. Minimal dependencies â just Faraday and Zeitwerk. Because working with AI should be a joy, not a chore.
|
29
27
|
|
28
|
+
## Features
|
29
|
+
|
30
|
+
- ðŽ **Chat** with OpenAI, Anthropic, Gemini, and DeepSeek models
|
31
|
+
- ðïļ **Vision and Audio** understanding
|
32
|
+
- ð **PDF Analysis** for analyzing documents
|
33
|
+
- ðžïļ **Image generation** with DALL-E and other providers
|
34
|
+
- ð **Embeddings** for vector search and semantic analysis
|
35
|
+
- ð§ **Tools** that let AI use your Ruby code
|
36
|
+
- ð **Rails integration** to persist chats and messages with ActiveRecord
|
37
|
+
- ð **Streaming** responses with proper Ruby patterns
|
38
|
+
|
30
39
|
## What makes it great
|
31
40
|
|
32
41
|
```ruby
|
@@ -43,6 +52,11 @@ chat.ask "Describe this meeting", with: { audio: "meeting.wav" }
|
|
43
52
|
# Analyze documents
|
44
53
|
chat.ask "Summarize this document", with: { pdf: "contract.pdf" }
|
45
54
|
|
55
|
+
# Stream responses in real-time
|
56
|
+
chat.ask "Tell me a story about a Ruby programmer" do |chunk|
|
57
|
+
print chunk.content
|
58
|
+
end
|
59
|
+
|
46
60
|
# Generate images
|
47
61
|
RubyLLM.paint "a sunset over mountains in watercolor style"
|
48
62
|
|
@@ -50,16 +64,22 @@ RubyLLM.paint "a sunset over mountains in watercolor style"
|
|
50
64
|
RubyLLM.embed "Ruby is elegant and expressive"
|
51
65
|
|
52
66
|
# Let AI use your code
|
53
|
-
class
|
54
|
-
description "
|
55
|
-
param :
|
56
|
-
|
57
|
-
|
58
|
-
|
67
|
+
class Weather < RubyLLM::Tool
|
68
|
+
description "Gets current weather for a location"
|
69
|
+
param :latitude, desc: "Latitude (e.g., 52.5200)"
|
70
|
+
param :longitude, desc: "Longitude (e.g., 13.4050)"
|
71
|
+
|
72
|
+
def execute(latitude:, longitude:)
|
73
|
+
url = "https://api.open-meteo.com/v1/forecast?latitude=#{latitude}&longitude=#{longitude}¤t=temperature_2m,wind_speed_10m"
|
74
|
+
|
75
|
+
response = Faraday.get(url)
|
76
|
+
data = JSON.parse(response.body)
|
77
|
+
rescue => e
|
78
|
+
{ error: e.message }
|
59
79
|
end
|
60
80
|
end
|
61
81
|
|
62
|
-
chat.with_tool(
|
82
|
+
chat.with_tool(Weather).ask "What's the weather in Berlin? (52.5200, 13.4050)"
|
63
83
|
```
|
64
84
|
|
65
85
|
## Installation
|
@@ -82,7 +102,7 @@ RubyLLM.configure do |config|
|
|
82
102
|
config.openai_api_key = ENV['OPENAI_API_KEY']
|
83
103
|
config.anthropic_api_key = ENV['ANTHROPIC_API_KEY']
|
84
104
|
config.gemini_api_key = ENV['GEMINI_API_KEY']
|
85
|
-
config.deepseek_api_key = ENV['DEEPSEEK_API_KEY']
|
105
|
+
config.deepseek_api_key = ENV['DEEPSEEK_API_KEY']
|
86
106
|
end
|
87
107
|
```
|
88
108
|
|
@@ -173,6 +193,18 @@ chat.with_tool(Search).ask "Find documents about Ruby 3.3 features"
|
|
173
193
|
|
174
194
|
Check out the guides at https://rubyllm.com for deeper dives into conversations with tools, streaming responses, embedding generations, and more.
|
175
195
|
|
196
|
+
## Contributing
|
197
|
+
|
198
|
+
We welcome contributions to RubyLLM!
|
199
|
+
|
200
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for detailed instructions on how to:
|
201
|
+
- Run the test suite
|
202
|
+
- Add new features
|
203
|
+
- Update documentation
|
204
|
+
- Re-record VCR cassettes when needed
|
205
|
+
|
206
|
+
We appreciate your help making RubyLLM better!
|
207
|
+
|
176
208
|
## License
|
177
209
|
|
178
|
-
Released under the MIT License.
|
210
|
+
Released under the MIT License.
|
data/lib/ruby_llm/chat.rb
CHANGED