ruby_llm 0.1.0.pre31 → 0.1.0.pre34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/cicd.yml +26 -2
- data/.rspec_status +35 -27
- data/README.md +59 -5
- data/lib/ruby_llm/configuration.rb +2 -0
- data/lib/ruby_llm/content.rb +4 -2
- data/lib/ruby_llm/image.rb +24 -0
- data/lib/ruby_llm/provider.rb +21 -5
- data/lib/ruby_llm/providers/openai/chat.rb +1 -1
- data/lib/ruby_llm/providers/openai/images.rb +38 -0
- data/lib/ruby_llm/providers/openai/media.rb +52 -0
- data/lib/ruby_llm/providers/openai.rb +4 -0
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +4 -0
- data/ruby_llm.gemspec +7 -5
- metadata +40 -8
- /data/lib/ruby_llm/providers/deepseek/{capabilites.rb → capabilities.rb} +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 77ec20e57439d352e965de0e435d0c43b3acd53de2d6035345f6ac6e716e7fef
|
4
|
+
data.tar.gz: 51d838bc1411303fd96c2f28d160ad99657f44846bd12ad2967ee329880e8922
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9b0ca7a80113ed498125c9e23460879328926e6f3de1e6fe7e7a63453deb94fbdebbe7961d529e6cd50db7ad91776cb92ffeaed29f396aa523f931b12f835f7d
|
7
|
+
data.tar.gz: bcb30504074335dbdea39d171a3d810cfb14c7f5f9a3cc5b10e1a80970ce06f37775c7b1c5352a2e5fef0a14ea0d6603ae63197be6d2a866c7003b091036c842
|
data/.github/workflows/cicd.yml
CHANGED
@@ -37,6 +37,14 @@ jobs:
|
|
37
37
|
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
|
38
38
|
run: bundle exec rspec
|
39
39
|
|
40
|
+
- name: Upload coverage to Codecov
|
41
|
+
uses: codecov/codecov-action@v5
|
42
|
+
env:
|
43
|
+
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
44
|
+
with:
|
45
|
+
files: ./coverage/coverage.xml
|
46
|
+
fail_ci_if_error: false
|
47
|
+
|
40
48
|
publish:
|
41
49
|
name: Build + Publish
|
42
50
|
needs: test
|
@@ -62,7 +70,15 @@ jobs:
|
|
62
70
|
chmod 0600 $HOME/.gem/credentials
|
63
71
|
printf -- "---\n:github: ${GEM_HOST_API_KEY}\n" > $HOME/.gem/credentials
|
64
72
|
gem build *.gemspec
|
65
|
-
gem push --KEY github --host https://rubygems.pkg.github.com/${OWNER} *.gem
|
73
|
+
output=$(gem push --KEY github --host https://rubygems.pkg.github.com/${OWNER} *.gem 2>&1) || {
|
74
|
+
echo "$output"
|
75
|
+
if echo "$output" | grep -q "already been pushed"; then
|
76
|
+
echo "Version already exists, skipping"
|
77
|
+
exit 0
|
78
|
+
else
|
79
|
+
exit 1
|
80
|
+
fi
|
81
|
+
}
|
66
82
|
env:
|
67
83
|
GEM_HOST_API_KEY: "Bearer ${{secrets.GITHUB_TOKEN}}"
|
68
84
|
OWNER: ${{ github.repository_owner }}
|
@@ -74,6 +90,14 @@ jobs:
|
|
74
90
|
chmod 0600 $HOME/.gem/credentials
|
75
91
|
printf -- "---\n:rubygems_api_key: ${GEM_HOST_API_KEY}\n" > $HOME/.gem/credentials
|
76
92
|
gem build *.gemspec
|
77
|
-
gem push *.gem
|
93
|
+
output=$(gem push *.gem 2>&1) || {
|
94
|
+
echo "$output"
|
95
|
+
if echo "$output" | grep -q "Repushing of gem versions is not allowed"; then
|
96
|
+
echo "Version already exists, skipping"
|
97
|
+
exit 0
|
98
|
+
else
|
99
|
+
exit 1
|
100
|
+
fi
|
101
|
+
}
|
78
102
|
env:
|
79
103
|
GEM_HOST_API_KEY: "${{secrets.RUBYGEMS_AUTH_TOKEN}}"
|
data/.rspec_status
CHANGED
@@ -1,27 +1,35 @@
|
|
1
|
-
example_id
|
2
|
-
|
3
|
-
./spec/integration/chat_spec.rb[1:1:1:1]
|
4
|
-
./spec/integration/chat_spec.rb[1:1:1:2]
|
5
|
-
./spec/integration/chat_spec.rb[1:1:2:1]
|
6
|
-
./spec/integration/chat_spec.rb[1:1:2:2]
|
7
|
-
./spec/integration/chat_spec.rb[1:1:3:1]
|
8
|
-
./spec/integration/chat_spec.rb[1:1:3:2]
|
9
|
-
./spec/integration/
|
10
|
-
./spec/integration/
|
11
|
-
./spec/integration/
|
12
|
-
./spec/integration/
|
13
|
-
./spec/integration/
|
14
|
-
./spec/integration/
|
15
|
-
./spec/integration/
|
16
|
-
./spec/integration/
|
17
|
-
./spec/integration/
|
18
|
-
./spec/integration/
|
19
|
-
./spec/integration/
|
20
|
-
./spec/integration/
|
21
|
-
./spec/integration/
|
22
|
-
./spec/integration/
|
23
|
-
./spec/integration/
|
24
|
-
./spec/integration/
|
25
|
-
./spec/integration/
|
26
|
-
./spec/integration/
|
27
|
-
./spec/integration/
|
1
|
+
example_id | status | run_time |
|
2
|
+
-------------------------------------------------- | ------ | --------------- |
|
3
|
+
./spec/integration/chat_spec.rb[1:1:1:1] | passed | 0.84333 seconds |
|
4
|
+
./spec/integration/chat_spec.rb[1:1:1:2] | passed | 3.81 seconds |
|
5
|
+
./spec/integration/chat_spec.rb[1:1:2:1] | passed | 0.54605 seconds |
|
6
|
+
./spec/integration/chat_spec.rb[1:1:2:2] | passed | 1.25 seconds |
|
7
|
+
./spec/integration/chat_spec.rb[1:1:3:1] | passed | 0.66439 seconds |
|
8
|
+
./spec/integration/chat_spec.rb[1:1:3:2] | passed | 2.84 seconds |
|
9
|
+
./spec/integration/content_spec.rb[1:1:1] | passed | 3.51 seconds |
|
10
|
+
./spec/integration/content_spec.rb[1:1:2] | passed | 1.11 seconds |
|
11
|
+
./spec/integration/content_spec.rb[1:1:3] | passed | 1.77 seconds |
|
12
|
+
./spec/integration/content_spec.rb[1:2:1] | passed | 1.68 seconds |
|
13
|
+
./spec/integration/content_spec.rb[1:2:2] | passed | 2.01 seconds |
|
14
|
+
./spec/integration/embeddings_spec.rb[1:1:1:1] | passed | 0.28694 seconds |
|
15
|
+
./spec/integration/embeddings_spec.rb[1:1:1:2] | passed | 0.32456 seconds |
|
16
|
+
./spec/integration/embeddings_spec.rb[1:1:2:1] | passed | 0.85006 seconds |
|
17
|
+
./spec/integration/embeddings_spec.rb[1:1:2:2] | passed | 0.82832 seconds |
|
18
|
+
./spec/integration/error_handling_spec.rb[1:1] | passed | 0.19746 seconds |
|
19
|
+
./spec/integration/image_generation_spec.rb[1:1:1] | passed | 10.73 seconds |
|
20
|
+
./spec/integration/image_generation_spec.rb[1:1:2] | passed | 16.95 seconds |
|
21
|
+
./spec/integration/image_generation_spec.rb[1:1:3] | passed | 0.00024 seconds |
|
22
|
+
./spec/integration/rails_spec.rb[1:1] | passed | 3.39 seconds |
|
23
|
+
./spec/integration/rails_spec.rb[1:2] | passed | 1.74 seconds |
|
24
|
+
./spec/integration/streaming_spec.rb[1:1:1:1] | passed | 0.56418 seconds |
|
25
|
+
./spec/integration/streaming_spec.rb[1:1:1:2] | passed | 6.33 seconds |
|
26
|
+
./spec/integration/streaming_spec.rb[1:1:2:1] | passed | 0.51911 seconds |
|
27
|
+
./spec/integration/streaming_spec.rb[1:1:2:2] | passed | 2.31 seconds |
|
28
|
+
./spec/integration/streaming_spec.rb[1:1:3:1] | passed | 0.78299 seconds |
|
29
|
+
./spec/integration/streaming_spec.rb[1:1:3:2] | passed | 3.82 seconds |
|
30
|
+
./spec/integration/tools_spec.rb[1:1:1:1] | passed | 3.89 seconds |
|
31
|
+
./spec/integration/tools_spec.rb[1:1:1:2] | passed | 7.78 seconds |
|
32
|
+
./spec/integration/tools_spec.rb[1:1:2:1] | passed | 1.25 seconds |
|
33
|
+
./spec/integration/tools_spec.rb[1:1:2:2] | passed | 2.1 seconds |
|
34
|
+
./spec/integration/tools_spec.rb[1:1:3:1] | passed | 1.59 seconds |
|
35
|
+
./spec/integration/tools_spec.rb[1:1:3:2] | passed | 3.05 seconds |
|
data/README.md
CHANGED
@@ -1,19 +1,37 @@
|
|
1
1
|
# RubyLLM
|
2
2
|
|
3
|
-
A delightful Ruby way to work with AI
|
3
|
+
A delightful Ruby way to work with AI. Chat in text, analyze and generate images, understand audio, and use tools through a unified interface to OpenAI, Anthropic, Google, and DeepSeek. Built for developer happiness with automatic token counting, proper streaming, and Rails integration. No wrapping your head around multiple APIs - just clean Ruby code that works.
|
4
4
|
|
5
5
|
<p align="center">
|
6
6
|
<img src="https://upload.wikimedia.org/wikipedia/commons/4/4d/OpenAI_Logo.svg" alt="OpenAI" height="40" width="120">
|
7
7
|
|
8
8
|
<img src="https://upload.wikimedia.org/wikipedia/commons/7/78/Anthropic_logo.svg" alt="Anthropic" height="40" width="120">
|
9
9
|
|
10
|
-
<img src="https://upload.wikimedia.org/wikipedia/commons/8/8a/Google_Gemini_logo.svg" alt="Google" height="40" width="120">
|
10
|
+
<img src="https://upload.wikimedia.org/wikipedia/commons/8/8a/Google_Gemini_logo.svg" alt="Google" height="40" width="120">
|
11
11
|
|
12
|
-
<img src="https://upload.wikimedia.org/wikipedia/commons/e/ec/DeepSeek_logo.svg" alt="DeepSeek" height="40" width="120"
|
12
|
+
<img src="https://upload.wikimedia.org/wikipedia/commons/e/ec/DeepSeek_logo.svg" alt="DeepSeek" height="40" width="120">
|
13
13
|
</p>
|
14
14
|
|
15
|
-
|
16
|
-
|
15
|
+
<p align="center">
|
16
|
+
<a href="https://badge.fury.io/rb/ruby_llm"><img src="https://badge.fury.io/rb/ruby_llm.svg" alt="Gem Version" /></a>
|
17
|
+
<a href="https://github.com/testdouble/standard"><img src="https://img.shields.io/badge/code_style-standard-brightgreen.svg" alt="Ruby Style Guide" /></a>
|
18
|
+
<a href="https://rubygems.org/gems/ruby_llm"><img alt="Gem Total Downloads" src="https://img.shields.io/gem/dt/ruby_llm"></a>
|
19
|
+
<a href="https://github.com/crmne/ruby_llm/actions/workflows/cicd.yml"><img src="https://github.com/crmne/ruby_llm/actions/workflows/cicd.yml/badge.svg" alt="CI" /></a>
|
20
|
+
<a href="https://codecov.io/gh/crmne/ruby_llm"><img src="https://codecov.io/gh/crmne/ruby_llm/branch/main/graph/badge.svg" alt="codecov" /></a>
|
21
|
+
</p>
|
22
|
+
|
23
|
+
## Features
|
24
|
+
|
25
|
+
- 💬 **Beautiful Chat Interface** - Converse with AI models as easily as `RubyLLM.chat.ask "teach me Ruby"`
|
26
|
+
- 🎵 **Audio Analysis** - Get audio transcription and understanding with `chat.ask "what's said here?", with: { audio: "clip.wav" }`
|
27
|
+
- 👁️ **Vision Understanding** - Let AIs analyze images with a simple `chat.ask "what's in this?", with: { image: "photo.jpg" }`
|
28
|
+
- 🌊 **Streaming** - Real-time responses with proper Ruby streaming with `chat.ask "hello" do |chunk| puts chunk.content end`
|
29
|
+
- 🚂 **Rails Integration** - Persist chats and messages with ActiveRecord with `acts_as_{chat|message|tool_call}`
|
30
|
+
- 🛠️ **Tool Support** - Give AIs access to your Ruby code with `chat.with_tool(Calculator).ask "what's 2+2?"`
|
31
|
+
- 🎨 **Paint with AI** - Create images as easily as `RubyLLM.paint "a sunset over mountains"`
|
32
|
+
- 📊 **Embeddings** - Generate vector embeddings for your text with `RubyLLM.embed "hello"`
|
33
|
+
- 🔄 **Multi-Provider Support** - Works with OpenAI, Anthropic, Google, and DeepSeek
|
34
|
+
- 🎯 **Token Tracking** - Automatic usage tracking across providers
|
17
35
|
|
18
36
|
## Installation
|
19
37
|
|
@@ -87,11 +105,47 @@ chat.ask "Tell me a story about a Ruby programmer" do |chunk|
|
|
87
105
|
print chunk.content
|
88
106
|
end
|
89
107
|
|
108
|
+
# Ask about images
|
109
|
+
chat.ask "What do you see in this image?", with: { image: "ruby_logo.png" }
|
110
|
+
|
111
|
+
# Get analysis of audio content
|
112
|
+
chat.ask "What's being said in this recording?", with: { audio: "meeting.wav" }
|
113
|
+
|
114
|
+
# Combine multiple pieces of content
|
115
|
+
chat.ask "Compare these diagrams", with: { image: ["diagram1.png", "diagram2.png"] }
|
116
|
+
|
90
117
|
# Check token usage
|
91
118
|
last_message = chat.messages.last
|
92
119
|
puts "Conversation used #{last_message.input_tokens} input tokens and #{last_message.output_tokens} output tokens"
|
93
120
|
```
|
94
121
|
|
122
|
+
You can provide content as local files or URLs - RubyLLM handles the rest. Vision and audio capabilities are available with compatible models. The API stays clean and consistent whether you're working with text, images, or audio.
|
123
|
+
|
124
|
+
## Image Generation
|
125
|
+
|
126
|
+
Want to create AI-generated images? RubyLLM makes it super simple:
|
127
|
+
|
128
|
+
```ruby
|
129
|
+
# Paint a picture!
|
130
|
+
image = RubyLLM.paint "a starry night over San Francisco in Van Gogh's style"
|
131
|
+
image.url # => "https://..."
|
132
|
+
image.revised_prompt # Shows how DALL-E interpreted your prompt
|
133
|
+
|
134
|
+
# Choose size and model
|
135
|
+
image = RubyLLM.paint(
|
136
|
+
"a cyberpunk cityscape at sunset",
|
137
|
+
model: "dall-e-3",
|
138
|
+
size: "1792x1024"
|
139
|
+
)
|
140
|
+
|
141
|
+
# Set your default model
|
142
|
+
RubyLLM.configure do |config|
|
143
|
+
config.default_image_model = "dall-e-3"
|
144
|
+
end
|
145
|
+
```
|
146
|
+
|
147
|
+
RubyLLM automatically handles all the complexities of the DALL-E API, token/credit management, and error handling, so you can focus on being creative.
|
148
|
+
|
95
149
|
## Text Embeddings
|
96
150
|
|
97
151
|
Need vector embeddings for your text? RubyLLM makes it simple:
|
@@ -16,6 +16,7 @@ module RubyLLM
|
|
16
16
|
:deepseek_api_key,
|
17
17
|
:default_model,
|
18
18
|
:default_embedding_model,
|
19
|
+
:default_image_model,
|
19
20
|
:request_timeout,
|
20
21
|
:max_retries
|
21
22
|
|
@@ -24,6 +25,7 @@ module RubyLLM
|
|
24
25
|
@max_retries = 3
|
25
26
|
@default_model = 'gpt-4o-mini'
|
26
27
|
@default_embedding_model = 'text-embedding-3-small'
|
28
|
+
@default_image_model = 'dall-e-3'
|
27
29
|
end
|
28
30
|
end
|
29
31
|
end
|
data/lib/ruby_llm/content.rb
CHANGED
@@ -1,7 +1,9 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module RubyLLM
|
4
|
-
# Represents the content received from
|
4
|
+
# Represents the content sent to or received from an LLM.
|
5
|
+
# Stores data in a standard internal format, letting providers
|
6
|
+
# handle their own formatting needs.
|
5
7
|
class Content
|
6
8
|
def initialize(text = nil, attachments = {})
|
7
9
|
@parts = []
|
@@ -33,7 +35,7 @@ module RubyLLM
|
|
33
35
|
def attach_image(source) # rubocop:disable Metrics/MethodLength
|
34
36
|
source = File.expand_path(source) unless source.start_with?('http')
|
35
37
|
|
36
|
-
return { type: '
|
38
|
+
return { type: 'image', source: { url: source } } if source.start_with?('http')
|
37
39
|
|
38
40
|
data = Base64.strict_encode64(File.read(source))
|
39
41
|
mime_type = mime_type_for(source)
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
# Represents a generated image from an AI model.
|
5
|
+
# Provides an interface to image generation capabilities
|
6
|
+
# from providers like DALL-E.
|
7
|
+
class Image
|
8
|
+
attr_reader :url, :revised_prompt, :model_id
|
9
|
+
|
10
|
+
def initialize(url:, revised_prompt: nil, model_id: nil)
|
11
|
+
@url = url
|
12
|
+
@revised_prompt = revised_prompt
|
13
|
+
@model_id = model_id
|
14
|
+
end
|
15
|
+
|
16
|
+
def self.paint(prompt, model: nil, size: '1024x1024')
|
17
|
+
model_id = model || RubyLLM.config.default_image_model
|
18
|
+
Models.find(model_id) # Validate model exists
|
19
|
+
|
20
|
+
provider = Provider.for(model_id)
|
21
|
+
provider.paint(prompt, model: model_id, size: size)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
data/lib/ruby_llm/provider.rb
CHANGED
@@ -7,7 +7,7 @@ module RubyLLM
|
|
7
7
|
module Provider
|
8
8
|
# Common functionality for all LLM providers. Implements the core provider
|
9
9
|
# interface so specific providers only need to implement a few key methods.
|
10
|
-
module Methods
|
10
|
+
module Methods # rubocop:disable Metrics/ModuleLength
|
11
11
|
def complete(messages, tools:, temperature:, model:, &block)
|
12
12
|
payload = render_payload messages, tools: tools, temperature: temperature, model: model, stream: block_given?
|
13
13
|
|
@@ -32,6 +32,13 @@ module RubyLLM
|
|
32
32
|
parse_embedding_response response
|
33
33
|
end
|
34
34
|
|
35
|
+
def paint(prompt, model:, size:)
|
36
|
+
payload = render_image_payload(prompt, model:, size:)
|
37
|
+
|
38
|
+
response = post(images_url, payload)
|
39
|
+
parse_image_response(response)
|
40
|
+
end
|
41
|
+
|
35
42
|
private
|
36
43
|
|
37
44
|
def sync_response(payload)
|
@@ -59,11 +66,21 @@ module RubyLLM
|
|
59
66
|
end
|
60
67
|
end
|
61
68
|
|
62
|
-
def connection # rubocop:disable Metrics/MethodLength
|
63
|
-
@connection ||= Faraday.new(api_base) do |f|
|
69
|
+
def connection # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
|
70
|
+
@connection ||= Faraday.new(api_base) do |f| # rubocop:disable Metrics/BlockLength
|
64
71
|
f.options.timeout = RubyLLM.config.request_timeout
|
65
72
|
|
66
|
-
|
73
|
+
f.response :logger,
|
74
|
+
RubyLLM.logger,
|
75
|
+
bodies: true,
|
76
|
+
response: true,
|
77
|
+
errors: true,
|
78
|
+
headers: false,
|
79
|
+
log_level: :debug do |logger|
|
80
|
+
logger.filter(%r{"[A-Za-z0-9+/=]{100,}"}, 'data":"[BASE64 DATA]"')
|
81
|
+
logger.filter(/[-\d.e,\s]{100,}/, '[EMBEDDINGS ARRAY]')
|
82
|
+
end
|
83
|
+
|
67
84
|
f.request :retry, {
|
68
85
|
max: RubyLLM.config.max_retries,
|
69
86
|
interval: 0.05,
|
@@ -86,7 +103,6 @@ module RubyLLM
|
|
86
103
|
f.response :json
|
87
104
|
f.adapter Faraday.default_adapter
|
88
105
|
f.use :llm_errors, provider: self
|
89
|
-
f.response :logger, RubyLLM.logger, { headers: false, bodies: true, errors: true, log_level: :debug }
|
90
106
|
end
|
91
107
|
end
|
92
108
|
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
module OpenAI
|
6
|
+
# Image generation methods for the OpenAI API integration
|
7
|
+
module Images
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def images_url
|
11
|
+
'images/generations'
|
12
|
+
end
|
13
|
+
|
14
|
+
def render_image_payload(prompt, model:, size:)
|
15
|
+
{
|
16
|
+
model: model,
|
17
|
+
prompt: prompt,
|
18
|
+
n: 1,
|
19
|
+
size: size
|
20
|
+
}
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
def parse_image_response(response)
|
26
|
+
data = response.body
|
27
|
+
image_data = data['data'].first
|
28
|
+
|
29
|
+
Image.new(
|
30
|
+
url: image_data['url'],
|
31
|
+
revised_prompt: image_data['revised_prompt'],
|
32
|
+
model_id: data['model']
|
33
|
+
)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
module OpenAI
|
6
|
+
# Handles formatting of media content (images, audio) for OpenAI APIs
|
7
|
+
module Media
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def format_content(content) # rubocop:disable Metrics/MethodLength
|
11
|
+
return content unless content.is_a?(Array)
|
12
|
+
|
13
|
+
content.map do |part|
|
14
|
+
case part[:type]
|
15
|
+
when 'image'
|
16
|
+
format_image(part)
|
17
|
+
when 'input_audio'
|
18
|
+
format_audio(part)
|
19
|
+
else
|
20
|
+
part
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
def format_image(part)
|
26
|
+
{
|
27
|
+
type: 'image_url',
|
28
|
+
image_url: {
|
29
|
+
url: format_data_url(part[:source]),
|
30
|
+
detail: 'auto'
|
31
|
+
}
|
32
|
+
}
|
33
|
+
end
|
34
|
+
|
35
|
+
def format_audio(part)
|
36
|
+
{
|
37
|
+
type: 'input_audio',
|
38
|
+
input_audio: part[:input_audio]
|
39
|
+
}
|
40
|
+
end
|
41
|
+
|
42
|
+
def format_data_url(source)
|
43
|
+
if source[:type] == 'base64'
|
44
|
+
"data:#{source[:media_type]};base64,#{source[:data]}"
|
45
|
+
else
|
46
|
+
source[:url]
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -12,6 +12,8 @@ module RubyLLM
|
|
12
12
|
extend OpenAI::Models
|
13
13
|
extend OpenAI::Streaming
|
14
14
|
extend OpenAI::Tools
|
15
|
+
extend OpenAI::Images
|
16
|
+
extend OpenAI::Media
|
15
17
|
|
16
18
|
def self.extended(base)
|
17
19
|
base.extend(Provider)
|
@@ -20,6 +22,8 @@ module RubyLLM
|
|
20
22
|
base.extend(OpenAI::Models)
|
21
23
|
base.extend(OpenAI::Streaming)
|
22
24
|
base.extend(OpenAI::Tools)
|
25
|
+
base.extend(OpenAI::Images)
|
26
|
+
base.extend(OpenAI::Media)
|
23
27
|
end
|
24
28
|
|
25
29
|
module_function
|
data/lib/ruby_llm/version.rb
CHANGED
data/lib/ruby_llm.rb
CHANGED
data/ruby_llm.gemspec
CHANGED
@@ -8,11 +8,11 @@ Gem::Specification.new do |spec|
|
|
8
8
|
spec.authors = ['Carmine Paolino']
|
9
9
|
spec.email = ['carmine@paolino.me']
|
10
10
|
|
11
|
-
spec.summary = '
|
12
|
-
spec.description = 'A delightful Ruby way to work with AI
|
13
|
-
'
|
14
|
-
'
|
15
|
-
' - just clean Ruby code that works.'
|
11
|
+
spec.summary = 'Beautiful Ruby interface to modern AI'
|
12
|
+
spec.description = 'A delightful Ruby way to work with AI. Chat in text, analyze and generate images, understand' \
|
13
|
+
' audio, and use tools through a unified interface to OpenAI, Anthropic, Google, and DeepSeek.' \
|
14
|
+
' Built for developer happiness with automatic token counting, proper streaming, and Rails' \
|
15
|
+
' integration. No wrapping your head around multiple APIs - just clean Ruby code that works.'
|
16
16
|
spec.homepage = 'https://github.com/crmne/ruby_llm'
|
17
17
|
spec.license = 'MIT'
|
18
18
|
spec.required_ruby_version = Gem::Requirement.new('>= 3.1.0')
|
@@ -45,6 +45,7 @@ Gem::Specification.new do |spec|
|
|
45
45
|
|
46
46
|
# Development dependencies
|
47
47
|
spec.add_development_dependency 'bundler', '>= 2.0'
|
48
|
+
spec.add_development_dependency 'codecov'
|
48
49
|
spec.add_development_dependency 'dotenv'
|
49
50
|
spec.add_development_dependency 'irb'
|
50
51
|
spec.add_development_dependency 'nokogiri'
|
@@ -57,6 +58,7 @@ Gem::Specification.new do |spec|
|
|
57
58
|
spec.add_development_dependency 'rubocop', '>= 1.0'
|
58
59
|
spec.add_development_dependency 'rubocop-rake', '>= 0.6'
|
59
60
|
spec.add_development_dependency 'simplecov', '>= 0.21'
|
61
|
+
spec.add_development_dependency 'simplecov-cobertura'
|
60
62
|
spec.add_development_dependency 'sqlite3'
|
61
63
|
spec.add_development_dependency 'webmock', '~> 3.18'
|
62
64
|
spec.add_development_dependency 'yard', '>= 0.9'
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby_llm
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.0.
|
4
|
+
version: 0.1.0.pre34
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Carmine Paolino
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2025-02-
|
11
|
+
date: 2025-02-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -140,6 +140,20 @@ dependencies:
|
|
140
140
|
- - ">="
|
141
141
|
- !ruby/object:Gem::Version
|
142
142
|
version: '2.0'
|
143
|
+
- !ruby/object:Gem::Dependency
|
144
|
+
name: codecov
|
145
|
+
requirement: !ruby/object:Gem::Requirement
|
146
|
+
requirements:
|
147
|
+
- - ">="
|
148
|
+
- !ruby/object:Gem::Version
|
149
|
+
version: '0'
|
150
|
+
type: :development
|
151
|
+
prerelease: false
|
152
|
+
version_requirements: !ruby/object:Gem::Requirement
|
153
|
+
requirements:
|
154
|
+
- - ">="
|
155
|
+
- !ruby/object:Gem::Version
|
156
|
+
version: '0'
|
143
157
|
- !ruby/object:Gem::Dependency
|
144
158
|
name: dotenv
|
145
159
|
requirement: !ruby/object:Gem::Requirement
|
@@ -308,6 +322,20 @@ dependencies:
|
|
308
322
|
- - ">="
|
309
323
|
- !ruby/object:Gem::Version
|
310
324
|
version: '0.21'
|
325
|
+
- !ruby/object:Gem::Dependency
|
326
|
+
name: simplecov-cobertura
|
327
|
+
requirement: !ruby/object:Gem::Requirement
|
328
|
+
requirements:
|
329
|
+
- - ">="
|
330
|
+
- !ruby/object:Gem::Version
|
331
|
+
version: '0'
|
332
|
+
type: :development
|
333
|
+
prerelease: false
|
334
|
+
version_requirements: !ruby/object:Gem::Requirement
|
335
|
+
requirements:
|
336
|
+
- - ">="
|
337
|
+
- !ruby/object:Gem::Version
|
338
|
+
version: '0'
|
311
339
|
- !ruby/object:Gem::Dependency
|
312
340
|
name: sqlite3
|
313
341
|
requirement: !ruby/object:Gem::Requirement
|
@@ -350,10 +378,11 @@ dependencies:
|
|
350
378
|
- - ">="
|
351
379
|
- !ruby/object:Gem::Version
|
352
380
|
version: '0.9'
|
353
|
-
description: A delightful Ruby way to work with AI
|
354
|
-
|
355
|
-
|
356
|
-
|
381
|
+
description: A delightful Ruby way to work with AI. Chat in text, analyze and generate
|
382
|
+
images, understand audio, and use tools through a unified interface to OpenAI, Anthropic,
|
383
|
+
Google, and DeepSeek. Built for developer happiness with automatic token counting,
|
384
|
+
proper streaming, and Rails integration. No wrapping your head around multiple APIs
|
385
|
+
- just clean Ruby code that works.
|
357
386
|
email:
|
358
387
|
- carmine@paolino.me
|
359
388
|
executables: []
|
@@ -380,6 +409,7 @@ files:
|
|
380
409
|
- lib/ruby_llm/content.rb
|
381
410
|
- lib/ruby_llm/embedding.rb
|
382
411
|
- lib/ruby_llm/error.rb
|
412
|
+
- lib/ruby_llm/image.rb
|
383
413
|
- lib/ruby_llm/message.rb
|
384
414
|
- lib/ruby_llm/model_info.rb
|
385
415
|
- lib/ruby_llm/models.json
|
@@ -393,7 +423,7 @@ files:
|
|
393
423
|
- lib/ruby_llm/providers/anthropic/streaming.rb
|
394
424
|
- lib/ruby_llm/providers/anthropic/tools.rb
|
395
425
|
- lib/ruby_llm/providers/deepseek.rb
|
396
|
-
- lib/ruby_llm/providers/deepseek/
|
426
|
+
- lib/ruby_llm/providers/deepseek/capabilities.rb
|
397
427
|
- lib/ruby_llm/providers/gemini.rb
|
398
428
|
- lib/ruby_llm/providers/gemini/capabilities.rb
|
399
429
|
- lib/ruby_llm/providers/gemini/models.rb
|
@@ -401,6 +431,8 @@ files:
|
|
401
431
|
- lib/ruby_llm/providers/openai/capabilities.rb
|
402
432
|
- lib/ruby_llm/providers/openai/chat.rb
|
403
433
|
- lib/ruby_llm/providers/openai/embeddings.rb
|
434
|
+
- lib/ruby_llm/providers/openai/images.rb
|
435
|
+
- lib/ruby_llm/providers/openai/media.rb
|
404
436
|
- lib/ruby_llm/providers/openai/models.rb
|
405
437
|
- lib/ruby_llm/providers/openai/streaming.rb
|
406
438
|
- lib/ruby_llm/providers/openai/tools.rb
|
@@ -438,5 +470,5 @@ requirements: []
|
|
438
470
|
rubygems_version: 3.5.22
|
439
471
|
signing_key:
|
440
472
|
specification_version: 4
|
441
|
-
summary:
|
473
|
+
summary: Beautiful Ruby interface to modern AI
|
442
474
|
test_files: []
|
File without changes
|