ruby_llm 1.6.0 â 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +8 -34
- data/lib/ruby_llm/active_record/acts_as.rb +19 -5
- data/lib/ruby_llm/aliases.json +0 -4
- data/lib/ruby_llm/chat.rb +4 -11
- data/lib/ruby_llm/configuration.rb +0 -2
- data/lib/ruby_llm/content.rb +1 -1
- data/lib/ruby_llm/error.rb +0 -2
- data/lib/ruby_llm/models.json +369 -708
- data/lib/ruby_llm/models.rb +1 -5
- data/lib/ruby_llm/providers/bedrock/streaming/base.rb +1 -1
- data/lib/ruby_llm/providers/gemini/chat.rb +7 -1
- data/lib/ruby_llm/providers/gemini/streaming.rb +4 -1
- data/lib/ruby_llm/providers/openai/capabilities.rb +3 -3
- data/lib/ruby_llm/providers/openai/chat.rb +1 -4
- data/lib/ruby_llm/stream_accumulator.rb +2 -2
- data/lib/ruby_llm/streaming.rb +1 -1
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/tasks/aliases.rake +2 -2
- data/lib/tasks/models_docs.rake +4 -4
- metadata +9 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1b210b6577ab40b05d222354a9cb4c9dd115d7237e4b88be3cd63ee42aa2e122
|
4
|
+
data.tar.gz: 58d0df110e469fef90f5c5591bed4dd3a2d4369afa982217793c11fecb00e320
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3979e1be85c20e8961eb54bcc1d3cf0e8f49dc10fbb5bbbf57e292a098b4c09b053101473c59aef2186c5583276ea5c909b461ba34ac4f8c8eb0f0a5e69cc76d
|
7
|
+
data.tar.gz: 452f86034599f6d8b2d200c3357137f0ec93087b410c4eef5e5393976414b8a986871fa532c6149fb69b081d66f1a823891ab27bd186fb9a31a1ca232707a13d
|
data/README.md
CHANGED
@@ -1,44 +1,18 @@
|
|
1
|
-
<
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/bedrock-color.svg" alt="Bedrock" class="logo-medium">
|
9
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/bedrock-text.svg" alt="Bedrock" class="logo-small">
|
10
|
-
|
11
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-color.svg" alt="DeepSeek" class="logo-medium">
|
12
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/deepseek-text.svg" alt="DeepSeek" class="logo-small">
|
13
|
-
|
14
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/gemini-brand-color.svg" alt="Gemini" class="logo-large">
|
15
|
-
<br>
|
16
|
-
<img src="https://raw.githubusercontent.com/gpustack/gpustack/main/docs/assets/gpustack-logo.png" alt="GPUStack" class="logo-medium" height="16">
|
17
|
-
|
18
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/mistral-color.svg" alt="Mistral" class="logo-medium">
|
19
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/mistral-text.svg" alt="Mistral" class="logo-small">
|
20
|
-
|
21
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama.svg" alt="Ollama" class="logo-medium">
|
22
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/ollama-text.svg" alt="Ollama" class="logo-medium">
|
23
|
-
|
24
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai.svg" alt="OpenAI" class="logo-medium">
|
25
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openai-text.svg" alt="OpenAI" class="logo-medium">
|
26
|
-
|
27
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter.svg" alt="OpenRouter" class="logo-medium">
|
28
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/openrouter-text.svg" alt="OpenRouter" class="logo-small">
|
29
|
-
|
30
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/perplexity-color.svg" alt="Perplexity" class="logo-medium">
|
31
|
-
<img src="https://registry.npmmirror.com/@lobehub/icons-static-svg/latest/files/icons/perplexity-text.svg" alt="Perplexity" class="logo-small">
|
32
|
-
</div>
|
1
|
+
<picture>
|
2
|
+
<source media="(prefers-color-scheme: dark)" srcset="/docs/assets/images/logotype_dark.svg">
|
3
|
+
<img src="/docs/assets/images/logotype.svg" alt="RubyLLM" height="120" width="250">
|
4
|
+
</picture>
|
5
|
+
|
6
|
+
**One *beautiful* Ruby API for GPT, Claude, Gemini, and more.** Easily build chatbots, AI agents, RAG applications, and content generators. Features chat (text, images, audio, PDFs), image generation, embeddings, tools (function calling), structured output, Rails integration, and streaming. Works with OpenAI, Anthropic, Google Gemini, AWS Bedrock, DeepSeek, Mistral, Ollama (local models), OpenRouter, Perplexity, GPUStack, and any OpenAI-compatible API.
|
33
7
|
|
34
8
|
<div class="badge-container">
|
35
|
-
<a href="https://badge.fury.io/rb/ruby_llm"><img src="https://badge.fury.io/rb/ruby_llm.svg?a=
|
9
|
+
<a href="https://badge.fury.io/rb/ruby_llm"><img src="https://badge.fury.io/rb/ruby_llm.svg?a=4" alt="Gem Version" /></a>
|
36
10
|
<a href="https://github.com/testdouble/standard"><img src="https://img.shields.io/badge/code_style-standard-brightgreen.svg" alt="Ruby Style Guide" /></a>
|
37
11
|
<a href="https://rubygems.org/gems/ruby_llm"><img alt="Gem Downloads" src="https://img.shields.io/gem/dt/ruby_llm"></a>
|
38
12
|
<a href="https://codecov.io/gh/crmne/ruby_llm"><img src="https://codecov.io/gh/crmne/ruby_llm/branch/main/graph/badge.svg" alt="codecov" /></a>
|
39
13
|
</div>
|
40
14
|
|
41
|
-
|
15
|
+
Battle tested at [<picture><source media="(prefers-color-scheme: dark)" srcset="https://chatwithwork.com/logotype-dark.svg"><img src="https://chatwithwork.com/logotype.svg" alt="Chat with Work" height="30" align="absmiddle"></picture>](https://chatwithwork.com) â *Claude Code for your documents*
|
42
16
|
|
43
17
|
## The problem with AI libraries
|
44
18
|
|
@@ -198,15 +198,29 @@ module RubyLLM
|
|
198
198
|
def complete(...)
|
199
199
|
to_llm.complete(...)
|
200
200
|
rescue RubyLLM::Error => e
|
201
|
-
if @message&.persisted? && @message.content.blank?
|
202
|
-
|
203
|
-
@message.destroy
|
204
|
-
end
|
201
|
+
cleanup_failed_messages if @message&.persisted? && @message.content.blank?
|
202
|
+
cleanup_orphaned_tool_results
|
205
203
|
raise e
|
206
204
|
end
|
207
205
|
|
208
206
|
private
|
209
207
|
|
208
|
+
def cleanup_failed_messages
|
209
|
+
RubyLLM.logger.debug "RubyLLM: API call failed, destroying message: #{@message.id}"
|
210
|
+
@message.destroy
|
211
|
+
end
|
212
|
+
|
213
|
+
def cleanup_orphaned_tool_results
|
214
|
+
loop do
|
215
|
+
messages.reload
|
216
|
+
last = messages.order(:id).last
|
217
|
+
|
218
|
+
break unless last&.tool_call? || last&.tool_result?
|
219
|
+
|
220
|
+
last.destroy
|
221
|
+
end
|
222
|
+
end
|
223
|
+
|
210
224
|
def setup_persistence_callbacks
|
211
225
|
# Only set up once per chat instance
|
212
226
|
return @chat if @chat.instance_variable_get(:@_persistence_callbacks_setup)
|
@@ -220,7 +234,7 @@ module RubyLLM
|
|
220
234
|
end
|
221
235
|
|
222
236
|
def persist_new_message
|
223
|
-
@message = messages.create!(role: :assistant, content:
|
237
|
+
@message = messages.create!(role: :assistant, content: '')
|
224
238
|
end
|
225
239
|
|
226
240
|
def persist_message_completion(message)
|
data/lib/ruby_llm/aliases.json
CHANGED
data/lib/ruby_llm/chat.rb
CHANGED
@@ -51,17 +51,14 @@ module RubyLLM
|
|
51
51
|
end
|
52
52
|
|
53
53
|
def with_tool(tool)
|
54
|
-
unless @model.supports_functions?
|
55
|
-
raise UnsupportedFunctionsError, "Model #{@model.id} doesn't support function calling"
|
56
|
-
end
|
57
|
-
|
58
54
|
tool_instance = tool.is_a?(Class) ? tool.new : tool
|
59
55
|
@tools[tool_instance.name.to_sym] = tool_instance
|
60
56
|
self
|
61
57
|
end
|
62
58
|
|
63
|
-
def with_tools(*tools)
|
64
|
-
tools.
|
59
|
+
def with_tools(*tools, replace: false)
|
60
|
+
@tools.clear if replace
|
61
|
+
tools.compact.each { |tool| with_tool tool }
|
65
62
|
self
|
66
63
|
end
|
67
64
|
|
@@ -93,11 +90,7 @@ module RubyLLM
|
|
93
90
|
self
|
94
91
|
end
|
95
92
|
|
96
|
-
def with_schema(schema
|
97
|
-
unless force || @model.structured_output?
|
98
|
-
raise UnsupportedStructuredOutputError, "Model #{@model.id} doesn't support structured output"
|
99
|
-
end
|
100
|
-
|
93
|
+
def with_schema(schema)
|
101
94
|
schema_instance = schema.is_a?(Class) ? schema.new : schema
|
102
95
|
|
103
96
|
# Accept both RubyLLM::Schema instances and plain JSON schemas
|
@@ -44,7 +44,6 @@ module RubyLLM
|
|
44
44
|
:logger,
|
45
45
|
:log_file,
|
46
46
|
:log_level,
|
47
|
-
:log_assume_model_exists,
|
48
47
|
:log_stream_debug
|
49
48
|
|
50
49
|
def initialize
|
@@ -64,7 +63,6 @@ module RubyLLM
|
|
64
63
|
# Logging configuration
|
65
64
|
@log_file = $stdout
|
66
65
|
@log_level = ENV['RUBYLLM_DEBUG'] ? Logger::DEBUG : Logger::INFO
|
67
|
-
@log_assume_model_exists = true
|
68
66
|
@log_stream_debug = ENV['RUBYLLM_STREAM_DEBUG'] == 'true'
|
69
67
|
end
|
70
68
|
|
data/lib/ruby_llm/content.rb
CHANGED
@@ -43,7 +43,7 @@ module RubyLLM
|
|
43
43
|
def process_attachments(attachments)
|
44
44
|
if attachments.is_a?(Hash)
|
45
45
|
# Ignores types (like :image, :audio, :text, :pdf) since we have robust MIME type detection
|
46
|
-
attachments.each_value(
|
46
|
+
attachments.each_value { |attachment| process_attachments_array_or_string(attachment) }
|
47
47
|
else
|
48
48
|
process_attachments_array_or_string attachments
|
49
49
|
end
|
data/lib/ruby_llm/error.rb
CHANGED
@@ -23,9 +23,7 @@ module RubyLLM
|
|
23
23
|
class ConfigurationError < StandardError; end
|
24
24
|
class InvalidRoleError < StandardError; end
|
25
25
|
class ModelNotFoundError < StandardError; end
|
26
|
-
class UnsupportedFunctionsError < StandardError; end
|
27
26
|
class UnsupportedAttachmentError < StandardError; end
|
28
|
-
class UnsupportedStructuredOutputError < StandardError; end
|
29
27
|
|
30
28
|
# Error classes for different HTTP status codes
|
31
29
|
class BadRequestError < Error; end
|