ruby_llm 0.1.0.pre30 → 0.1.0.pre33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/{gem-push.yml → cicd.yml} +32 -4
- data/.rspec_status +38 -0
- data/README.md +52 -3
- data/lib/ruby_llm/active_record/acts_as.rb +5 -5
- data/lib/ruby_llm/chat.rb +2 -2
- data/lib/ruby_llm/configuration.rb +5 -1
- data/lib/ruby_llm/content.rb +81 -0
- data/lib/ruby_llm/embedding.rb +9 -3
- data/lib/ruby_llm/image.rb +24 -0
- data/lib/ruby_llm/message.rb +9 -1
- data/lib/ruby_llm/models.json +14 -14
- data/lib/ruby_llm/provider.rb +57 -16
- data/lib/ruby_llm/providers/anthropic/capabilities.rb +81 -0
- data/lib/ruby_llm/providers/anthropic/chat.rb +86 -0
- data/lib/ruby_llm/providers/anthropic/embeddings.rb +20 -0
- data/lib/ruby_llm/providers/anthropic/models.rb +48 -0
- data/lib/ruby_llm/providers/anthropic/streaming.rb +37 -0
- data/lib/ruby_llm/providers/anthropic/tools.rb +97 -0
- data/lib/ruby_llm/providers/anthropic.rb +8 -234
- data/lib/ruby_llm/providers/deepseek/capabilites.rb +101 -0
- data/lib/ruby_llm/providers/deepseek.rb +4 -2
- data/lib/ruby_llm/providers/gemini/capabilities.rb +191 -0
- data/lib/ruby_llm/providers/gemini/models.rb +20 -0
- data/lib/ruby_llm/providers/gemini.rb +5 -10
- data/lib/ruby_llm/providers/openai/capabilities.rb +191 -0
- data/lib/ruby_llm/providers/openai/chat.rb +68 -0
- data/lib/ruby_llm/providers/openai/embeddings.rb +39 -0
- data/lib/ruby_llm/providers/openai/images.rb +38 -0
- data/lib/ruby_llm/providers/openai/media.rb +52 -0
- data/lib/ruby_llm/providers/openai/models.rb +40 -0
- data/lib/ruby_llm/providers/openai/streaming.rb +31 -0
- data/lib/ruby_llm/providers/openai/tools.rb +69 -0
- data/lib/ruby_llm/providers/openai.rb +22 -200
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +8 -2
- data/ruby_llm.gemspec +7 -5
- metadata +57 -13
- data/.github/workflows/test.yml +0 -35
- data/lib/ruby_llm/model_capabilities/anthropic.rb +0 -79
- data/lib/ruby_llm/model_capabilities/deepseek.rb +0 -132
- data/lib/ruby_llm/model_capabilities/gemini.rb +0 -190
- data/lib/ruby_llm/model_capabilities/openai.rb +0 -189
@@ -0,0 +1,52 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
module OpenAI
|
6
|
+
# Handles formatting of media content (images, audio) for OpenAI APIs
|
7
|
+
module Media
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def format_content(content) # rubocop:disable Metrics/MethodLength
|
11
|
+
return content unless content.is_a?(Array)
|
12
|
+
|
13
|
+
content.map do |part|
|
14
|
+
case part[:type]
|
15
|
+
when 'image'
|
16
|
+
format_image(part)
|
17
|
+
when 'input_audio'
|
18
|
+
format_audio(part)
|
19
|
+
else
|
20
|
+
part
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
def format_image(part)
|
26
|
+
{
|
27
|
+
type: 'image_url',
|
28
|
+
image_url: {
|
29
|
+
url: format_data_url(part[:source]),
|
30
|
+
detail: 'auto'
|
31
|
+
}
|
32
|
+
}
|
33
|
+
end
|
34
|
+
|
35
|
+
def format_audio(part)
|
36
|
+
{
|
37
|
+
type: 'input_audio',
|
38
|
+
input_audio: part[:input_audio]
|
39
|
+
}
|
40
|
+
end
|
41
|
+
|
42
|
+
def format_data_url(source)
|
43
|
+
if source[:type] == 'base64'
|
44
|
+
"data:#{source[:media_type]};base64,#{source[:data]}"
|
45
|
+
else
|
46
|
+
source[:url]
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
module OpenAI
|
6
|
+
# Models methods of the OpenAI API integration
|
7
|
+
module Models
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def models_url
|
11
|
+
'models'
|
12
|
+
end
|
13
|
+
|
14
|
+
def parse_list_models_response(response) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
|
15
|
+
(response.body['data'] || []).map do |model|
|
16
|
+
ModelInfo.new(
|
17
|
+
id: model['id'],
|
18
|
+
created_at: model['created'] ? Time.at(model['created']) : nil,
|
19
|
+
display_name: capabilities.format_display_name(model['id']),
|
20
|
+
provider: slug,
|
21
|
+
type: capabilities.model_type(model['id']),
|
22
|
+
family: capabilities.model_family(model['id']),
|
23
|
+
metadata: {
|
24
|
+
object: model['object'],
|
25
|
+
owned_by: model['owned_by']
|
26
|
+
},
|
27
|
+
context_window: capabilities.context_window_for(model['id']),
|
28
|
+
max_tokens: capabilities.max_tokens_for(model['id']),
|
29
|
+
supports_vision: capabilities.supports_vision?(model['id']),
|
30
|
+
supports_functions: capabilities.supports_functions?(model['id']),
|
31
|
+
supports_json_mode: capabilities.supports_json_mode?(model['id']),
|
32
|
+
input_price_per_million: capabilities.input_price_for(model['id']),
|
33
|
+
output_price_per_million: capabilities.output_price_for(model['id'])
|
34
|
+
)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
module OpenAI
|
6
|
+
# Streaming methods of the OpenAI API integration
|
7
|
+
module Streaming
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def stream_url
|
11
|
+
completion_url
|
12
|
+
end
|
13
|
+
|
14
|
+
def handle_stream(&block) # rubocop:disable Metrics/MethodLength
|
15
|
+
to_json_stream do |data|
|
16
|
+
block.call(
|
17
|
+
Chunk.new(
|
18
|
+
role: :assistant,
|
19
|
+
model_id: data['model'],
|
20
|
+
content: data.dig('choices', 0, 'delta', 'content'),
|
21
|
+
tool_calls: parse_tool_calls(data.dig('choices', 0, 'delta', 'tool_calls'), parse_arguments: false),
|
22
|
+
input_tokens: data.dig('usage', 'prompt_tokens'),
|
23
|
+
output_tokens: data.dig('usage', 'completion_tokens')
|
24
|
+
)
|
25
|
+
)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,69 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyLLM
|
4
|
+
module Providers
|
5
|
+
module OpenAI
|
6
|
+
# Tools methods of the OpenAI API integration
|
7
|
+
module Tools
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def tool_for(tool) # rubocop:disable Metrics/MethodLength
|
11
|
+
{
|
12
|
+
type: 'function',
|
13
|
+
function: {
|
14
|
+
name: tool.name,
|
15
|
+
description: tool.description,
|
16
|
+
parameters: {
|
17
|
+
type: 'object',
|
18
|
+
properties: tool.parameters.transform_values { |param| param_schema(param) },
|
19
|
+
required: tool.parameters.select { |_, p| p.required }.keys
|
20
|
+
}
|
21
|
+
}
|
22
|
+
}
|
23
|
+
end
|
24
|
+
|
25
|
+
def param_schema(param)
|
26
|
+
{
|
27
|
+
type: param.type,
|
28
|
+
description: param.description
|
29
|
+
}.compact
|
30
|
+
end
|
31
|
+
|
32
|
+
def format_tool_calls(tool_calls) # rubocop:disable Metrics/MethodLength
|
33
|
+
return nil unless tool_calls&.any?
|
34
|
+
|
35
|
+
tool_calls.map do |_, tc|
|
36
|
+
{
|
37
|
+
id: tc.id,
|
38
|
+
type: 'function',
|
39
|
+
function: {
|
40
|
+
name: tc.name,
|
41
|
+
arguments: JSON.generate(tc.arguments)
|
42
|
+
}
|
43
|
+
}
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def parse_tool_calls(tool_calls, parse_arguments: true) # rubocop:disable Metrics/MethodLength
|
48
|
+
return nil unless tool_calls&.any?
|
49
|
+
|
50
|
+
tool_calls.to_h do |tc|
|
51
|
+
[
|
52
|
+
tc['id'],
|
53
|
+
ToolCall.new(
|
54
|
+
id: tc['id'],
|
55
|
+
name: tc.dig('function', 'name'),
|
56
|
+
arguments: if parse_arguments
|
57
|
+
JSON.parse(tc.dig('function',
|
58
|
+
'arguments'))
|
59
|
+
else
|
60
|
+
tc.dig('function', 'arguments')
|
61
|
+
end
|
62
|
+
)
|
63
|
+
]
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
@@ -5,17 +5,28 @@ module RubyLLM
|
|
5
5
|
# OpenAI API integration. Handles chat completion, function calling,
|
6
6
|
# and OpenAI's unique streaming format. Supports GPT-4, GPT-3.5,
|
7
7
|
# and other OpenAI models.
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
8
|
+
module OpenAI
|
9
|
+
extend Provider
|
10
|
+
extend OpenAI::Chat
|
11
|
+
extend OpenAI::Embeddings
|
12
|
+
extend OpenAI::Models
|
13
|
+
extend OpenAI::Streaming
|
14
|
+
extend OpenAI::Tools
|
15
|
+
extend OpenAI::Images
|
16
|
+
extend OpenAI::Media
|
17
|
+
|
18
|
+
def self.extended(base)
|
19
|
+
base.extend(Provider)
|
20
|
+
base.extend(OpenAI::Chat)
|
21
|
+
base.extend(OpenAI::Embeddings)
|
22
|
+
base.extend(OpenAI::Models)
|
23
|
+
base.extend(OpenAI::Streaming)
|
24
|
+
base.extend(OpenAI::Tools)
|
25
|
+
base.extend(OpenAI::Images)
|
26
|
+
base.extend(OpenAI::Media)
|
27
|
+
end
|
28
|
+
|
29
|
+
module_function
|
19
30
|
|
20
31
|
def api_base
|
21
32
|
'https://api.openai.com/v1'
|
@@ -26,195 +37,6 @@ module RubyLLM
|
|
26
37
|
'Authorization' => "Bearer #{RubyLLM.config.openai_api_key}"
|
27
38
|
}
|
28
39
|
end
|
29
|
-
|
30
|
-
def completion_url
|
31
|
-
'chat/completions'
|
32
|
-
end
|
33
|
-
|
34
|
-
def stream_url
|
35
|
-
completion_url
|
36
|
-
end
|
37
|
-
|
38
|
-
def models_url
|
39
|
-
'models'
|
40
|
-
end
|
41
|
-
|
42
|
-
def embedding_url
|
43
|
-
'embeddings'
|
44
|
-
end
|
45
|
-
|
46
|
-
def build_payload(messages, tools:, temperature:, model:, stream: false) # rubocop:disable Metrics/MethodLength
|
47
|
-
{
|
48
|
-
model: model,
|
49
|
-
messages: format_messages(messages),
|
50
|
-
temperature: temperature,
|
51
|
-
stream: stream
|
52
|
-
}.tap do |payload|
|
53
|
-
if tools.any?
|
54
|
-
payload[:tools] = tools.map { |_, tool| tool_for(tool) }
|
55
|
-
payload[:tool_choice] = 'auto'
|
56
|
-
end
|
57
|
-
payload[:stream_options] = { include_usage: true } if stream
|
58
|
-
end
|
59
|
-
end
|
60
|
-
|
61
|
-
def format_messages(messages)
|
62
|
-
messages.map do |msg|
|
63
|
-
{
|
64
|
-
role: format_role(msg.role),
|
65
|
-
content: msg.content,
|
66
|
-
tool_calls: format_tool_calls(msg.tool_calls),
|
67
|
-
tool_call_id: msg.tool_call_id
|
68
|
-
}.compact
|
69
|
-
end
|
70
|
-
end
|
71
|
-
|
72
|
-
def format_role(role)
|
73
|
-
case role
|
74
|
-
when :system
|
75
|
-
'developer'
|
76
|
-
else
|
77
|
-
role.to_s
|
78
|
-
end
|
79
|
-
end
|
80
|
-
|
81
|
-
def build_embedding_payload(text, model:)
|
82
|
-
{
|
83
|
-
model: model,
|
84
|
-
input: text
|
85
|
-
}
|
86
|
-
end
|
87
|
-
|
88
|
-
def parse_embedding_response(response)
|
89
|
-
embeddings = response.body['data'].map { |d| d['embedding'] }
|
90
|
-
embeddings.size == 1 ? embeddings.first : embeddings
|
91
|
-
end
|
92
|
-
|
93
|
-
def format_tool_calls(tool_calls) # rubocop:disable Metrics/MethodLength
|
94
|
-
return nil unless tool_calls&.any?
|
95
|
-
|
96
|
-
tool_calls.map do |_, tc|
|
97
|
-
{
|
98
|
-
id: tc.id,
|
99
|
-
type: 'function',
|
100
|
-
function: {
|
101
|
-
name: tc.name,
|
102
|
-
arguments: JSON.generate(tc.arguments)
|
103
|
-
}
|
104
|
-
}
|
105
|
-
end
|
106
|
-
end
|
107
|
-
|
108
|
-
def tool_for(tool) # rubocop:disable Metrics/MethodLength
|
109
|
-
{
|
110
|
-
type: 'function',
|
111
|
-
function: {
|
112
|
-
name: tool.name,
|
113
|
-
description: tool.description,
|
114
|
-
parameters: {
|
115
|
-
type: 'object',
|
116
|
-
properties: tool.parameters.transform_values { |param| param_schema(param) },
|
117
|
-
required: tool.parameters.select { |_, p| p.required }.keys
|
118
|
-
}
|
119
|
-
}
|
120
|
-
}
|
121
|
-
end
|
122
|
-
|
123
|
-
def param_schema(param)
|
124
|
-
{
|
125
|
-
type: param.type,
|
126
|
-
description: param.description
|
127
|
-
}.compact
|
128
|
-
end
|
129
|
-
|
130
|
-
def parse_completion_response(response) # rubocop:disable Metrics/MethodLength
|
131
|
-
data = response.body
|
132
|
-
return if data.empty?
|
133
|
-
|
134
|
-
message_data = data.dig('choices', 0, 'message')
|
135
|
-
return unless message_data
|
136
|
-
|
137
|
-
Message.new(
|
138
|
-
role: :assistant,
|
139
|
-
content: message_data['content'],
|
140
|
-
tool_calls: parse_tool_calls(message_data['tool_calls']),
|
141
|
-
input_tokens: data['usage']['prompt_tokens'],
|
142
|
-
output_tokens: data['usage']['completion_tokens'],
|
143
|
-
model_id: data['model']
|
144
|
-
)
|
145
|
-
end
|
146
|
-
|
147
|
-
def parse_tool_calls(tool_calls, parse_arguments: true) # rubocop:disable Metrics/MethodLength
|
148
|
-
return nil unless tool_calls&.any?
|
149
|
-
|
150
|
-
tool_calls.to_h do |tc|
|
151
|
-
[
|
152
|
-
tc['id'],
|
153
|
-
ToolCall.new(
|
154
|
-
id: tc['id'],
|
155
|
-
name: tc.dig('function', 'name'),
|
156
|
-
arguments: parse_arguments ? JSON.parse(tc.dig('function', 'arguments')) : tc.dig('function', 'arguments')
|
157
|
-
)
|
158
|
-
]
|
159
|
-
end
|
160
|
-
end
|
161
|
-
|
162
|
-
def parse_models_response(response) # rubocop:disable Metrics/MethodLength
|
163
|
-
(response.body['data'] || []).map do |model|
|
164
|
-
model_info = begin
|
165
|
-
Models.find(model['id'])
|
166
|
-
rescue StandardError
|
167
|
-
nil
|
168
|
-
end
|
169
|
-
next unless model_info
|
170
|
-
|
171
|
-
model_info.tap do |info|
|
172
|
-
info.metadata.merge!(
|
173
|
-
object: model['object'],
|
174
|
-
owned_by: model['owned_by']
|
175
|
-
)
|
176
|
-
end
|
177
|
-
end.compact
|
178
|
-
end
|
179
|
-
|
180
|
-
def handle_stream(&block) # rubocop:disable Metrics/MethodLength
|
181
|
-
to_json_stream do |data|
|
182
|
-
block.call(
|
183
|
-
Chunk.new(
|
184
|
-
role: :assistant,
|
185
|
-
model_id: data['model'],
|
186
|
-
content: data.dig('choices', 0, 'delta', 'content'),
|
187
|
-
tool_calls: parse_tool_calls(data.dig('choices', 0, 'delta', 'tool_calls'), parse_arguments: false),
|
188
|
-
input_tokens: data.dig('usage', 'prompt_tokens'),
|
189
|
-
output_tokens: data.dig('usage', 'completion_tokens')
|
190
|
-
)
|
191
|
-
)
|
192
|
-
end
|
193
|
-
end
|
194
|
-
|
195
|
-
def parse_list_models_response(response) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
|
196
|
-
(response.body['data'] || []).map do |model|
|
197
|
-
ModelInfo.new(
|
198
|
-
id: model['id'],
|
199
|
-
created_at: model['created'] ? Time.at(model['created']) : nil,
|
200
|
-
display_name: capabilities.format_display_name(model['id']),
|
201
|
-
provider: slug,
|
202
|
-
type: capabilities.model_type(model['id']),
|
203
|
-
family: capabilities.model_family(model['id']),
|
204
|
-
metadata: {
|
205
|
-
object: model['object'],
|
206
|
-
owned_by: model['owned_by']
|
207
|
-
},
|
208
|
-
context_window: capabilities.context_window_for(model['id']),
|
209
|
-
max_tokens: capabilities.max_tokens_for(model['id']),
|
210
|
-
supports_vision: capabilities.supports_vision?(model['id']),
|
211
|
-
supports_functions: capabilities.supports_functions?(model['id']),
|
212
|
-
supports_json_mode: capabilities.supports_json_mode?(model['id']),
|
213
|
-
input_price_per_million: capabilities.input_price_for(model['id']),
|
214
|
-
output_price_per_million: capabilities.output_price_for(model['id'])
|
215
|
-
)
|
216
|
-
end
|
217
|
-
end
|
218
40
|
end
|
219
41
|
end
|
220
42
|
end
|
data/lib/ruby_llm/version.rb
CHANGED
data/lib/ruby_llm.rb
CHANGED
@@ -1,11 +1,13 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require '
|
3
|
+
require 'base64'
|
4
|
+
require 'event_stream_parser'
|
4
5
|
require 'faraday'
|
6
|
+
require 'faraday/retry'
|
5
7
|
require 'json'
|
6
8
|
require 'logger'
|
7
|
-
require 'event_stream_parser'
|
8
9
|
require 'securerandom'
|
10
|
+
require 'zeitwerk'
|
9
11
|
|
10
12
|
loader = Zeitwerk::Loader.for_gem
|
11
13
|
loader.inflector.inflect(
|
@@ -32,6 +34,10 @@ module RubyLLM
|
|
32
34
|
Embedding.embed(...)
|
33
35
|
end
|
34
36
|
|
37
|
+
def paint(...)
|
38
|
+
Image.paint(...)
|
39
|
+
end
|
40
|
+
|
35
41
|
def models
|
36
42
|
Models
|
37
43
|
end
|
data/ruby_llm.gemspec
CHANGED
@@ -8,11 +8,11 @@ Gem::Specification.new do |spec|
|
|
8
8
|
spec.authors = ['Carmine Paolino']
|
9
9
|
spec.email = ['carmine@paolino.me']
|
10
10
|
|
11
|
-
spec.summary = '
|
12
|
-
spec.description = 'A delightful Ruby way to work with AI
|
13
|
-
'
|
14
|
-
'
|
15
|
-
' - just clean Ruby code that works.'
|
11
|
+
spec.summary = 'Beautiful Ruby interface to modern AI'
|
12
|
+
spec.description = 'A delightful Ruby way to work with AI. Chat in text, analyze and generate images, understand' \
|
13
|
+
' audio, and use tools through a unified interface to OpenAI, Anthropic, Google, and DeepSeek.' \
|
14
|
+
' Built for developer happiness with automatic token counting, proper streaming, and Rails' \
|
15
|
+
' integration. No wrapping your head around multiple APIs - just clean Ruby code that works.'
|
16
16
|
spec.homepage = 'https://github.com/crmne/ruby_llm'
|
17
17
|
spec.license = 'MIT'
|
18
18
|
spec.required_ruby_version = Gem::Requirement.new('>= 3.1.0')
|
@@ -36,6 +36,7 @@ Gem::Specification.new do |spec|
|
|
36
36
|
spec.add_dependency 'event_stream_parser', '>= 0.3.0', '< 2.0.0'
|
37
37
|
spec.add_dependency 'faraday', '>= 2.0'
|
38
38
|
spec.add_dependency 'faraday-multipart', '>= 1.0'
|
39
|
+
spec.add_dependency 'faraday-retry', '>= 2.0'
|
39
40
|
spec.add_dependency 'zeitwerk', '>= 2.6'
|
40
41
|
|
41
42
|
# Rails integration dependencies
|
@@ -56,6 +57,7 @@ Gem::Specification.new do |spec|
|
|
56
57
|
spec.add_development_dependency 'rubocop', '>= 1.0'
|
57
58
|
spec.add_development_dependency 'rubocop-rake', '>= 0.6'
|
58
59
|
spec.add_development_dependency 'simplecov', '>= 0.21'
|
60
|
+
spec.add_development_dependency 'sqlite3'
|
59
61
|
spec.add_development_dependency 'webmock', '~> 3.18'
|
60
62
|
spec.add_development_dependency 'yard', '>= 0.9'
|
61
63
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ruby_llm
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.0.
|
4
|
+
version: 0.1.0.pre33
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Carmine Paolino
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2025-02-
|
11
|
+
date: 2025-02-20 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -58,6 +58,20 @@ dependencies:
|
|
58
58
|
- - ">="
|
59
59
|
- !ruby/object:Gem::Version
|
60
60
|
version: '1.0'
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
name: faraday-retry
|
63
|
+
requirement: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - ">="
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '2.0'
|
68
|
+
type: :runtime
|
69
|
+
prerelease: false
|
70
|
+
version_requirements: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - ">="
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '2.0'
|
61
75
|
- !ruby/object:Gem::Dependency
|
62
76
|
name: zeitwerk
|
63
77
|
requirement: !ruby/object:Gem::Requirement
|
@@ -294,6 +308,20 @@ dependencies:
|
|
294
308
|
- - ">="
|
295
309
|
- !ruby/object:Gem::Version
|
296
310
|
version: '0.21'
|
311
|
+
- !ruby/object:Gem::Dependency
|
312
|
+
name: sqlite3
|
313
|
+
requirement: !ruby/object:Gem::Requirement
|
314
|
+
requirements:
|
315
|
+
- - ">="
|
316
|
+
- !ruby/object:Gem::Version
|
317
|
+
version: '0'
|
318
|
+
type: :development
|
319
|
+
prerelease: false
|
320
|
+
version_requirements: !ruby/object:Gem::Requirement
|
321
|
+
requirements:
|
322
|
+
- - ">="
|
323
|
+
- !ruby/object:Gem::Version
|
324
|
+
version: '0'
|
297
325
|
- !ruby/object:Gem::Dependency
|
298
326
|
name: webmock
|
299
327
|
requirement: !ruby/object:Gem::Requirement
|
@@ -322,21 +350,22 @@ dependencies:
|
|
322
350
|
- - ">="
|
323
351
|
- !ruby/object:Gem::Version
|
324
352
|
version: '0.9'
|
325
|
-
description: A delightful Ruby way to work with AI
|
326
|
-
|
327
|
-
|
328
|
-
|
353
|
+
description: A delightful Ruby way to work with AI. Chat in text, analyze and generate
|
354
|
+
images, understand audio, and use tools through a unified interface to OpenAI, Anthropic,
|
355
|
+
Google, and DeepSeek. Built for developer happiness with automatic token counting,
|
356
|
+
proper streaming, and Rails integration. No wrapping your head around multiple APIs
|
357
|
+
- just clean Ruby code that works.
|
329
358
|
email:
|
330
359
|
- carmine@paolino.me
|
331
360
|
executables: []
|
332
361
|
extensions: []
|
333
362
|
extra_rdoc_files: []
|
334
363
|
files:
|
335
|
-
- ".github/workflows/
|
336
|
-
- ".github/workflows/test.yml"
|
364
|
+
- ".github/workflows/cicd.yml"
|
337
365
|
- ".gitignore"
|
338
366
|
- ".overcommit.yml"
|
339
367
|
- ".rspec"
|
368
|
+
- ".rspec_status"
|
340
369
|
- ".rubocop.yml"
|
341
370
|
- Gemfile
|
342
371
|
- LICENSE
|
@@ -349,21 +378,36 @@ files:
|
|
349
378
|
- lib/ruby_llm/chat.rb
|
350
379
|
- lib/ruby_llm/chunk.rb
|
351
380
|
- lib/ruby_llm/configuration.rb
|
381
|
+
- lib/ruby_llm/content.rb
|
352
382
|
- lib/ruby_llm/embedding.rb
|
353
383
|
- lib/ruby_llm/error.rb
|
384
|
+
- lib/ruby_llm/image.rb
|
354
385
|
- lib/ruby_llm/message.rb
|
355
|
-
- lib/ruby_llm/model_capabilities/anthropic.rb
|
356
|
-
- lib/ruby_llm/model_capabilities/deepseek.rb
|
357
|
-
- lib/ruby_llm/model_capabilities/gemini.rb
|
358
|
-
- lib/ruby_llm/model_capabilities/openai.rb
|
359
386
|
- lib/ruby_llm/model_info.rb
|
360
387
|
- lib/ruby_llm/models.json
|
361
388
|
- lib/ruby_llm/models.rb
|
362
389
|
- lib/ruby_llm/provider.rb
|
363
390
|
- lib/ruby_llm/providers/anthropic.rb
|
391
|
+
- lib/ruby_llm/providers/anthropic/capabilities.rb
|
392
|
+
- lib/ruby_llm/providers/anthropic/chat.rb
|
393
|
+
- lib/ruby_llm/providers/anthropic/embeddings.rb
|
394
|
+
- lib/ruby_llm/providers/anthropic/models.rb
|
395
|
+
- lib/ruby_llm/providers/anthropic/streaming.rb
|
396
|
+
- lib/ruby_llm/providers/anthropic/tools.rb
|
364
397
|
- lib/ruby_llm/providers/deepseek.rb
|
398
|
+
- lib/ruby_llm/providers/deepseek/capabilites.rb
|
365
399
|
- lib/ruby_llm/providers/gemini.rb
|
400
|
+
- lib/ruby_llm/providers/gemini/capabilities.rb
|
401
|
+
- lib/ruby_llm/providers/gemini/models.rb
|
366
402
|
- lib/ruby_llm/providers/openai.rb
|
403
|
+
- lib/ruby_llm/providers/openai/capabilities.rb
|
404
|
+
- lib/ruby_llm/providers/openai/chat.rb
|
405
|
+
- lib/ruby_llm/providers/openai/embeddings.rb
|
406
|
+
- lib/ruby_llm/providers/openai/images.rb
|
407
|
+
- lib/ruby_llm/providers/openai/media.rb
|
408
|
+
- lib/ruby_llm/providers/openai/models.rb
|
409
|
+
- lib/ruby_llm/providers/openai/streaming.rb
|
410
|
+
- lib/ruby_llm/providers/openai/tools.rb
|
367
411
|
- lib/ruby_llm/railtie.rb
|
368
412
|
- lib/ruby_llm/stream_accumulator.rb
|
369
413
|
- lib/ruby_llm/tool.rb
|
@@ -398,5 +442,5 @@ requirements: []
|
|
398
442
|
rubygems_version: 3.5.22
|
399
443
|
signing_key:
|
400
444
|
specification_version: 4
|
401
|
-
summary:
|
445
|
+
summary: Beautiful Ruby interface to modern AI
|
402
446
|
test_files: []
|
data/.github/workflows/test.yml
DELETED
@@ -1,35 +0,0 @@
|
|
1
|
-
name: Test
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
on:
|
6
|
-
push:
|
7
|
-
branches: [ "main" ]
|
8
|
-
pull_request:
|
9
|
-
branches: [ "main" ]
|
10
|
-
workflow_call:
|
11
|
-
|
12
|
-
jobs:
|
13
|
-
test:
|
14
|
-
runs-on: ubuntu-latest
|
15
|
-
strategy:
|
16
|
-
matrix:
|
17
|
-
ruby-version: ['3.1', '3.2', '3.3']
|
18
|
-
|
19
|
-
steps:
|
20
|
-
- uses: actions/checkout@v4
|
21
|
-
|
22
|
-
- name: Set up Ruby
|
23
|
-
uses: ruby/setup-ruby@v1
|
24
|
-
with:
|
25
|
-
ruby-version: ${{ matrix.ruby-version }}
|
26
|
-
bundler-cache: true
|
27
|
-
|
28
|
-
- name: Install dependencies
|
29
|
-
run: bundle install
|
30
|
-
|
31
|
-
- name: Check code format
|
32
|
-
run: bundle exec rubocop
|
33
|
-
|
34
|
-
# - name: Run tests
|
35
|
-
# run: bundle exec rspec
|