ruby_llm_community 1.2.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +13 -9
- data/lib/generators/ruby_llm/chat_ui/chat_ui_generator.rb +127 -67
- data/lib/generators/ruby_llm/chat_ui/templates/controllers/chats_controller.rb.tt +12 -12
- data/lib/generators/ruby_llm/chat_ui/templates/controllers/messages_controller.rb.tt +7 -7
- data/lib/generators/ruby_llm/chat_ui/templates/controllers/models_controller.rb.tt +4 -4
- data/lib/generators/ruby_llm/chat_ui/templates/jobs/chat_response_job.rb.tt +6 -6
- data/lib/generators/ruby_llm/chat_ui/templates/views/chats/_chat.html.erb.tt +4 -4
- data/lib/generators/ruby_llm/chat_ui/templates/views/chats/_form.html.erb.tt +5 -5
- data/lib/generators/ruby_llm/chat_ui/templates/views/chats/index.html.erb.tt +5 -5
- data/lib/generators/ruby_llm/chat_ui/templates/views/chats/new.html.erb.tt +4 -4
- data/lib/generators/ruby_llm/chat_ui/templates/views/chats/show.html.erb.tt +8 -8
- data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_content.html.erb.tt +1 -0
- data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_form.html.erb.tt +5 -5
- data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_message.html.erb.tt +9 -6
- data/lib/generators/ruby_llm/chat_ui/templates/views/messages/_tool_calls.html.erb.tt +7 -0
- data/lib/generators/ruby_llm/chat_ui/templates/views/messages/create.turbo_stream.erb.tt +5 -5
- data/lib/generators/ruby_llm/chat_ui/templates/views/models/_model.html.erb.tt +9 -9
- data/lib/generators/ruby_llm/chat_ui/templates/views/models/index.html.erb.tt +4 -6
- data/lib/generators/ruby_llm/chat_ui/templates/views/models/show.html.erb.tt +11 -11
- data/lib/generators/ruby_llm/generator_helpers.rb +152 -87
- data/lib/generators/ruby_llm/install/install_generator.rb +75 -79
- data/lib/generators/ruby_llm/install/templates/create_messages_migration.rb.tt +3 -0
- data/lib/generators/ruby_llm/install/templates/create_models_migration.rb.tt +5 -0
- data/lib/generators/ruby_llm/install/templates/create_tool_calls_migration.rb.tt +7 -1
- data/lib/generators/ruby_llm/install/templates/initializer.rb.tt +1 -1
- data/lib/generators/ruby_llm/upgrade_to_v1_7/upgrade_to_v1_7_generator.rb +88 -85
- data/lib/generators/ruby_llm/upgrade_to_v1_9/templates/add_v1_9_message_columns.rb.tt +15 -0
- data/lib/generators/ruby_llm/upgrade_to_v1_9/upgrade_to_v1_9_generator.rb +49 -0
- data/lib/ruby_llm/active_record/acts_as.rb +23 -16
- data/lib/ruby_llm/active_record/chat_methods.rb +41 -13
- data/lib/ruby_llm/active_record/message_methods.rb +11 -2
- data/lib/ruby_llm/active_record/model_methods.rb +1 -1
- data/lib/ruby_llm/aliases.json +61 -32
- data/lib/ruby_llm/attachment.rb +42 -11
- data/lib/ruby_llm/chat.rb +13 -2
- data/lib/ruby_llm/configuration.rb +6 -1
- data/lib/ruby_llm/connection.rb +4 -4
- data/lib/ruby_llm/content.rb +23 -0
- data/lib/ruby_llm/message.rb +17 -9
- data/lib/ruby_llm/model/info.rb +4 -0
- data/lib/ruby_llm/models.json +7157 -6089
- data/lib/ruby_llm/models.rb +14 -22
- data/lib/ruby_llm/provider.rb +27 -5
- data/lib/ruby_llm/providers/anthropic/chat.rb +18 -5
- data/lib/ruby_llm/providers/anthropic/content.rb +44 -0
- data/lib/ruby_llm/providers/anthropic/media.rb +6 -5
- data/lib/ruby_llm/providers/anthropic/models.rb +9 -2
- data/lib/ruby_llm/providers/anthropic/tools.rb +20 -18
- data/lib/ruby_llm/providers/bedrock/media.rb +2 -1
- data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +9 -2
- data/lib/ruby_llm/providers/gemini/chat.rb +353 -72
- data/lib/ruby_llm/providers/gemini/media.rb +59 -1
- data/lib/ruby_llm/providers/gemini/tools.rb +146 -25
- data/lib/ruby_llm/providers/gemini/transcription.rb +116 -0
- data/lib/ruby_llm/providers/gemini.rb +2 -1
- data/lib/ruby_llm/providers/gpustack/media.rb +1 -0
- data/lib/ruby_llm/providers/ollama/media.rb +1 -0
- data/lib/ruby_llm/providers/openai/capabilities.rb +15 -7
- data/lib/ruby_llm/providers/openai/chat.rb +7 -3
- data/lib/ruby_llm/providers/openai/media.rb +2 -1
- data/lib/ruby_llm/providers/openai/streaming.rb +7 -3
- data/lib/ruby_llm/providers/openai/tools.rb +34 -12
- data/lib/ruby_llm/providers/openai/transcription.rb +70 -0
- data/lib/ruby_llm/providers/openai_base.rb +1 -0
- data/lib/ruby_llm/providers/vertexai/transcription.rb +16 -0
- data/lib/ruby_llm/providers/vertexai.rb +11 -11
- data/lib/ruby_llm/railtie.rb +24 -22
- data/lib/ruby_llm/stream_accumulator.rb +8 -12
- data/lib/ruby_llm/tool.rb +126 -0
- data/lib/ruby_llm/transcription.rb +35 -0
- data/lib/ruby_llm/utils.rb +46 -0
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm_community.rb +7 -1
- metadata +27 -3
data/lib/ruby_llm/aliases.json
CHANGED
|
@@ -8,16 +8,17 @@
|
|
|
8
8
|
"openrouter": "anthropic/claude-3.5-haiku",
|
|
9
9
|
"bedrock": "anthropic.claude-3-5-haiku-20241022-v1:0"
|
|
10
10
|
},
|
|
11
|
-
"claude-3-5-
|
|
12
|
-
"anthropic": "claude-3-5-
|
|
13
|
-
"openrouter": "anthropic/claude-3.5-sonnet",
|
|
14
|
-
"bedrock": "anthropic.claude-3-5-sonnet-20240620-v1:0:200k"
|
|
11
|
+
"claude-3-5-haiku-latest": {
|
|
12
|
+
"anthropic": "claude-3-5-haiku-latest"
|
|
15
13
|
},
|
|
16
14
|
"claude-3-7-sonnet": {
|
|
17
15
|
"anthropic": "claude-3-7-sonnet-20250219",
|
|
18
16
|
"openrouter": "anthropic/claude-3.7-sonnet",
|
|
19
17
|
"bedrock": "us.anthropic.claude-3-7-sonnet-20250219-v1:0"
|
|
20
18
|
},
|
|
19
|
+
"claude-3-7-sonnet-latest": {
|
|
20
|
+
"anthropic": "claude-3-7-sonnet-latest"
|
|
21
|
+
},
|
|
21
22
|
"claude-3-haiku": {
|
|
22
23
|
"anthropic": "claude-3-haiku-20240307",
|
|
23
24
|
"openrouter": "anthropic/claude-3-haiku",
|
|
@@ -31,11 +32,19 @@
|
|
|
31
32
|
"claude-3-sonnet": {
|
|
32
33
|
"bedrock": "anthropic.claude-3-sonnet-20240229-v1:0"
|
|
33
34
|
},
|
|
35
|
+
"claude-haiku-4-5": {
|
|
36
|
+
"anthropic": "claude-haiku-4-5-20251001",
|
|
37
|
+
"openrouter": "anthropic/claude-haiku-4.5",
|
|
38
|
+
"bedrock": "us.anthropic.claude-haiku-4-5-20251001-v1:0"
|
|
39
|
+
},
|
|
34
40
|
"claude-opus-4": {
|
|
35
41
|
"anthropic": "claude-opus-4-20250514",
|
|
36
42
|
"openrouter": "anthropic/claude-opus-4",
|
|
37
43
|
"bedrock": "us.anthropic.claude-opus-4-1-20250805-v1:0"
|
|
38
44
|
},
|
|
45
|
+
"claude-opus-4-0": {
|
|
46
|
+
"anthropic": "claude-opus-4-0"
|
|
47
|
+
},
|
|
39
48
|
"claude-opus-4-1": {
|
|
40
49
|
"anthropic": "claude-opus-4-1-20250805",
|
|
41
50
|
"openrouter": "anthropic/claude-opus-4.1",
|
|
@@ -46,30 +55,18 @@
|
|
|
46
55
|
"openrouter": "anthropic/claude-sonnet-4",
|
|
47
56
|
"bedrock": "us.anthropic.claude-sonnet-4-20250514-v1:0"
|
|
48
57
|
},
|
|
58
|
+
"claude-sonnet-4-0": {
|
|
59
|
+
"anthropic": "claude-sonnet-4-0"
|
|
60
|
+
},
|
|
61
|
+
"claude-sonnet-4-5": {
|
|
62
|
+
"anthropic": "claude-sonnet-4-5-20250929",
|
|
63
|
+
"openrouter": "anthropic/claude-sonnet-4.5",
|
|
64
|
+
"bedrock": "us.anthropic.claude-sonnet-4-5-20250929-v1:0"
|
|
65
|
+
},
|
|
49
66
|
"deepseek-chat": {
|
|
50
67
|
"deepseek": "deepseek-chat",
|
|
51
68
|
"openrouter": "deepseek/deepseek-chat"
|
|
52
69
|
},
|
|
53
|
-
"gemini-1.5-flash": {
|
|
54
|
-
"gemini": "gemini-1.5-flash",
|
|
55
|
-
"vertexai": "gemini-1.5-flash"
|
|
56
|
-
},
|
|
57
|
-
"gemini-1.5-flash-002": {
|
|
58
|
-
"gemini": "gemini-1.5-flash-002",
|
|
59
|
-
"vertexai": "gemini-1.5-flash-002"
|
|
60
|
-
},
|
|
61
|
-
"gemini-1.5-flash-8b": {
|
|
62
|
-
"gemini": "gemini-1.5-flash-8b",
|
|
63
|
-
"vertexai": "gemini-1.5-flash-8b"
|
|
64
|
-
},
|
|
65
|
-
"gemini-1.5-pro": {
|
|
66
|
-
"gemini": "gemini-1.5-pro",
|
|
67
|
-
"vertexai": "gemini-1.5-pro"
|
|
68
|
-
},
|
|
69
|
-
"gemini-1.5-pro-002": {
|
|
70
|
-
"gemini": "gemini-1.5-pro-002",
|
|
71
|
-
"vertexai": "gemini-1.5-pro-002"
|
|
72
|
-
},
|
|
73
70
|
"gemini-2.0-flash": {
|
|
74
71
|
"gemini": "gemini-2.0-flash",
|
|
75
72
|
"vertexai": "gemini-2.0-flash"
|
|
@@ -93,6 +90,10 @@
|
|
|
93
90
|
"openrouter": "google/gemini-2.5-flash",
|
|
94
91
|
"vertexai": "gemini-2.5-flash"
|
|
95
92
|
},
|
|
93
|
+
"gemini-2.5-flash-image": {
|
|
94
|
+
"gemini": "gemini-2.5-flash-image",
|
|
95
|
+
"openrouter": "google/gemini-2.5-flash-image"
|
|
96
|
+
},
|
|
96
97
|
"gemini-2.5-flash-image-preview": {
|
|
97
98
|
"gemini": "gemini-2.5-flash-image-preview",
|
|
98
99
|
"openrouter": "google/gemini-2.5-flash-image-preview"
|
|
@@ -106,6 +107,14 @@
|
|
|
106
107
|
"gemini": "gemini-2.5-flash-lite-preview-06-17",
|
|
107
108
|
"openrouter": "google/gemini-2.5-flash-lite-preview-06-17"
|
|
108
109
|
},
|
|
110
|
+
"gemini-2.5-flash-lite-preview-09-2025": {
|
|
111
|
+
"gemini": "gemini-2.5-flash-lite-preview-09-2025",
|
|
112
|
+
"openrouter": "google/gemini-2.5-flash-lite-preview-09-2025"
|
|
113
|
+
},
|
|
114
|
+
"gemini-2.5-flash-preview-09-2025": {
|
|
115
|
+
"gemini": "gemini-2.5-flash-preview-09-2025",
|
|
116
|
+
"openrouter": "google/gemini-2.5-flash-preview-09-2025"
|
|
117
|
+
},
|
|
109
118
|
"gemini-2.5-pro": {
|
|
110
119
|
"gemini": "gemini-2.5-pro",
|
|
111
120
|
"openrouter": "google/gemini-2.5-pro",
|
|
@@ -219,6 +228,10 @@
|
|
|
219
228
|
"openai": "gpt-5",
|
|
220
229
|
"openrouter": "openai/gpt-5"
|
|
221
230
|
},
|
|
231
|
+
"gpt-5-codex": {
|
|
232
|
+
"openai": "gpt-5-codex",
|
|
233
|
+
"openrouter": "openai/gpt-5-codex"
|
|
234
|
+
},
|
|
222
235
|
"gpt-5-mini": {
|
|
223
236
|
"openai": "gpt-5-mini",
|
|
224
237
|
"openrouter": "openai/gpt-5-mini"
|
|
@@ -227,6 +240,22 @@
|
|
|
227
240
|
"openai": "gpt-5-nano",
|
|
228
241
|
"openrouter": "openai/gpt-5-nano"
|
|
229
242
|
},
|
|
243
|
+
"gpt-5-pro": {
|
|
244
|
+
"openai": "gpt-5-pro",
|
|
245
|
+
"openrouter": "openai/gpt-5-pro"
|
|
246
|
+
},
|
|
247
|
+
"gpt-oss-120b": {
|
|
248
|
+
"openai": "gpt-oss-120b",
|
|
249
|
+
"openrouter": "openai/gpt-oss-120b"
|
|
250
|
+
},
|
|
251
|
+
"gpt-oss-20b": {
|
|
252
|
+
"openai": "gpt-oss-20b",
|
|
253
|
+
"openrouter": "openai/gpt-oss-20b"
|
|
254
|
+
},
|
|
255
|
+
"imagen-4.0-generate-001": {
|
|
256
|
+
"gemini": "imagen-4.0-generate-001",
|
|
257
|
+
"vertexai": "imagen-4.0-generate-001"
|
|
258
|
+
},
|
|
230
259
|
"grok-2": {
|
|
231
260
|
"xai": "grok-2-1212"
|
|
232
261
|
},
|
|
@@ -305,14 +334,6 @@
|
|
|
305
334
|
"openai": "o1",
|
|
306
335
|
"openrouter": "openai/o1"
|
|
307
336
|
},
|
|
308
|
-
"o1-mini": {
|
|
309
|
-
"openai": "o1-mini",
|
|
310
|
-
"openrouter": "openai/o1-mini"
|
|
311
|
-
},
|
|
312
|
-
"o1-mini-2024-09-12": {
|
|
313
|
-
"openai": "o1-mini-2024-09-12",
|
|
314
|
-
"openrouter": "openai/o1-mini-2024-09-12"
|
|
315
|
-
},
|
|
316
337
|
"o1-pro": {
|
|
317
338
|
"openai": "o1-pro",
|
|
318
339
|
"openrouter": "openai/o1-pro"
|
|
@@ -321,6 +342,10 @@
|
|
|
321
342
|
"openai": "o3",
|
|
322
343
|
"openrouter": "openai/o3"
|
|
323
344
|
},
|
|
345
|
+
"o3-deep-research": {
|
|
346
|
+
"openai": "o3-deep-research",
|
|
347
|
+
"openrouter": "openai/o3-deep-research"
|
|
348
|
+
},
|
|
324
349
|
"o3-mini": {
|
|
325
350
|
"openai": "o3-mini",
|
|
326
351
|
"openrouter": "openai/o3-mini"
|
|
@@ -333,6 +358,10 @@
|
|
|
333
358
|
"openai": "o4-mini",
|
|
334
359
|
"openrouter": "openai/o4-mini"
|
|
335
360
|
},
|
|
361
|
+
"o4-mini-deep-research": {
|
|
362
|
+
"openai": "o4-mini-deep-research",
|
|
363
|
+
"openrouter": "openai/o4-mini-deep-research"
|
|
364
|
+
},
|
|
336
365
|
"text-embedding-004": {
|
|
337
366
|
"gemini": "text-embedding-004",
|
|
338
367
|
"vertexai": "text-embedding-004"
|
data/lib/ruby_llm/attachment.rb
CHANGED
|
@@ -7,17 +7,8 @@ module RubyLLM
|
|
|
7
7
|
|
|
8
8
|
def initialize(source, filename: nil)
|
|
9
9
|
@source = source
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
@filename = filename || File.basename(@source.path).to_s
|
|
13
|
-
elsif path?
|
|
14
|
-
@source = Pathname.new source
|
|
15
|
-
@filename = filename || @source.basename.to_s
|
|
16
|
-
elsif active_storage?
|
|
17
|
-
@filename = filename || extract_filename_from_active_storage
|
|
18
|
-
else
|
|
19
|
-
@filename = filename
|
|
20
|
-
end
|
|
10
|
+
@source = source_type_cast
|
|
11
|
+
@filename = filename || source_filename
|
|
21
12
|
|
|
22
13
|
determine_mime_type
|
|
23
14
|
end
|
|
@@ -67,6 +58,14 @@ module RubyLLM
|
|
|
67
58
|
Base64.strict_encode64(content)
|
|
68
59
|
end
|
|
69
60
|
|
|
61
|
+
def save(path)
|
|
62
|
+
return unless io_like?
|
|
63
|
+
|
|
64
|
+
File.open(path, 'w') do |f|
|
|
65
|
+
f.puts(@source.read)
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
70
69
|
def for_llm
|
|
71
70
|
case type
|
|
72
71
|
when :text
|
|
@@ -160,6 +159,38 @@ module RubyLLM
|
|
|
160
159
|
end
|
|
161
160
|
end
|
|
162
161
|
|
|
162
|
+
def source_type_cast
|
|
163
|
+
if url?
|
|
164
|
+
URI(@source)
|
|
165
|
+
elsif path?
|
|
166
|
+
Pathname.new(@source)
|
|
167
|
+
else
|
|
168
|
+
@source
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
def source_filename
|
|
173
|
+
if url?
|
|
174
|
+
File.basename(@source.path).to_s
|
|
175
|
+
elsif path?
|
|
176
|
+
@source.basename.to_s
|
|
177
|
+
elsif io_like?
|
|
178
|
+
extract_filename_from_io
|
|
179
|
+
elsif active_storage?
|
|
180
|
+
extract_filename_from_active_storage
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
def extract_filename_from_io
|
|
185
|
+
if defined?(ActionDispatch::Http::UploadedFile) && @source.is_a?(ActionDispatch::Http::UploadedFile)
|
|
186
|
+
@source.original_filename.to_s
|
|
187
|
+
elsif @source.respond_to?(:path)
|
|
188
|
+
File.basename(@source.path).to_s
|
|
189
|
+
else
|
|
190
|
+
'attachment'
|
|
191
|
+
end
|
|
192
|
+
end
|
|
193
|
+
|
|
163
194
|
def extract_filename_from_active_storage # rubocop:disable Metrics/PerceivedComplexity
|
|
164
195
|
return 'attachment' unless defined?(ActiveStorage)
|
|
165
196
|
|
data/lib/ruby_llm/chat.rb
CHANGED
|
@@ -32,7 +32,7 @@ module RubyLLM
|
|
|
32
32
|
end
|
|
33
33
|
|
|
34
34
|
def ask(message = nil, with: nil, &)
|
|
35
|
-
add_message role: :user, content:
|
|
35
|
+
add_message role: :user, content: build_content(message, with)
|
|
36
36
|
complete(&)
|
|
37
37
|
end
|
|
38
38
|
|
|
@@ -200,7 +200,8 @@ module RubyLLM
|
|
|
200
200
|
@on[:tool_call]&.call(tool_call)
|
|
201
201
|
result = execute_tool tool_call
|
|
202
202
|
@on[:tool_result]&.call(result)
|
|
203
|
-
|
|
203
|
+
tool_payload = result.is_a?(Tool::Halt) ? result.content : result
|
|
204
|
+
content = content_like?(tool_payload) ? tool_payload : tool_payload.to_s
|
|
204
205
|
message = add_message role: :tool, content:, tool_call_id: tool_call.id
|
|
205
206
|
@on[:end_message]&.call(message)
|
|
206
207
|
|
|
@@ -215,5 +216,15 @@ module RubyLLM
|
|
|
215
216
|
args = tool_call.arguments
|
|
216
217
|
tool.call(args)
|
|
217
218
|
end
|
|
219
|
+
|
|
220
|
+
def build_content(message, attachments)
|
|
221
|
+
return message if content_like?(message)
|
|
222
|
+
|
|
223
|
+
Content.new(message, attachments)
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
def content_like?(object)
|
|
227
|
+
object.is_a?(Content) || object.is_a?(Content::Raw)
|
|
228
|
+
end
|
|
218
229
|
end
|
|
219
230
|
end
|
|
@@ -10,6 +10,7 @@ module RubyLLM
|
|
|
10
10
|
:openai_use_system_role,
|
|
11
11
|
:anthropic_api_key,
|
|
12
12
|
:gemini_api_key,
|
|
13
|
+
:gemini_api_base,
|
|
13
14
|
:vertexai_project_id,
|
|
14
15
|
:vertexai_location,
|
|
15
16
|
:deepseek_api_key,
|
|
@@ -31,7 +32,9 @@ module RubyLLM
|
|
|
31
32
|
:default_embedding_model,
|
|
32
33
|
:default_moderation_model,
|
|
33
34
|
:default_image_model,
|
|
35
|
+
:default_transcription_model,
|
|
34
36
|
# Model registry
|
|
37
|
+
:model_registry_file,
|
|
35
38
|
:model_registry_class,
|
|
36
39
|
# Rails integration
|
|
37
40
|
:use_new_acts_as,
|
|
@@ -49,7 +52,7 @@ module RubyLLM
|
|
|
49
52
|
:log_stream_debug
|
|
50
53
|
|
|
51
54
|
def initialize
|
|
52
|
-
@request_timeout =
|
|
55
|
+
@request_timeout = 300
|
|
53
56
|
@max_retries = 3
|
|
54
57
|
@retry_interval = 0.1
|
|
55
58
|
@retry_backoff_factor = 2
|
|
@@ -60,7 +63,9 @@ module RubyLLM
|
|
|
60
63
|
@default_embedding_model = 'text-embedding-3-small'
|
|
61
64
|
@default_moderation_model = 'omni-moderation-latest'
|
|
62
65
|
@default_image_model = 'gpt-image-1'
|
|
66
|
+
@default_transcription_model = 'whisper-1'
|
|
63
67
|
|
|
68
|
+
@model_registry_file = File.expand_path('models.json', __dir__)
|
|
64
69
|
@model_registry_class = 'Model'
|
|
65
70
|
@use_new_acts_as = false
|
|
66
71
|
|
data/lib/ruby_llm/connection.rb
CHANGED
|
@@ -34,8 +34,7 @@ module RubyLLM
|
|
|
34
34
|
end
|
|
35
35
|
|
|
36
36
|
def post(url, payload, &)
|
|
37
|
-
|
|
38
|
-
@connection.post url, body do |req|
|
|
37
|
+
@connection.post url, payload do |req|
|
|
39
38
|
req.headers.merge! @provider.headers if @provider.respond_to?(:headers)
|
|
40
39
|
yield req if block_given?
|
|
41
40
|
end
|
|
@@ -66,7 +65,7 @@ module RubyLLM
|
|
|
66
65
|
errors: true,
|
|
67
66
|
headers: false,
|
|
68
67
|
log_level: :debug do |logger|
|
|
69
|
-
logger.filter(%r{[A-Za-z0-9+/=]{100,}}, '
|
|
68
|
+
logger.filter(%r{[A-Za-z0-9+/=]{100,}}, '[BASE64 DATA]')
|
|
70
69
|
logger.filter(/[-\d.e,\s]{100,}/, '[EMBEDDINGS ARRAY]')
|
|
71
70
|
end
|
|
72
71
|
end
|
|
@@ -83,9 +82,10 @@ module RubyLLM
|
|
|
83
82
|
end
|
|
84
83
|
|
|
85
84
|
def setup_middleware(faraday)
|
|
85
|
+
faraday.request :multipart
|
|
86
86
|
faraday.request :json
|
|
87
87
|
faraday.response :json
|
|
88
|
-
faraday.adapter
|
|
88
|
+
faraday.adapter :net_http
|
|
89
89
|
faraday.use :llm_errors, provider: @provider
|
|
90
90
|
end
|
|
91
91
|
|
data/lib/ruby_llm/content.rb
CHANGED
|
@@ -53,3 +53,26 @@ module RubyLLM
|
|
|
53
53
|
end
|
|
54
54
|
end
|
|
55
55
|
end
|
|
56
|
+
|
|
57
|
+
module RubyLLM
|
|
58
|
+
class Content
|
|
59
|
+
# Represents provider-specific payloads that should bypass RubyLLM formatting.
|
|
60
|
+
class Raw
|
|
61
|
+
attr_reader :value
|
|
62
|
+
|
|
63
|
+
def initialize(value)
|
|
64
|
+
raise ArgumentError, 'Raw content payload cannot be nil' if value.nil?
|
|
65
|
+
|
|
66
|
+
@value = value
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def format
|
|
70
|
+
@value
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def to_h
|
|
74
|
+
@value
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
end
|
data/lib/ruby_llm/message.rb
CHANGED
|
@@ -5,22 +5,22 @@ module RubyLLM
|
|
|
5
5
|
class Message
|
|
6
6
|
ROLES = %i[system user assistant tool].freeze
|
|
7
7
|
|
|
8
|
-
attr_reader :role, :tool_calls, :tool_call_id, :input_tokens, :output_tokens,
|
|
9
|
-
:cached_tokens, :cache_creation_tokens, :reasoning_id
|
|
8
|
+
attr_reader :role, :model_id, :tool_calls, :tool_call_id, :input_tokens, :output_tokens,
|
|
9
|
+
:cached_tokens, :cache_creation_tokens, :raw, :reasoning_id
|
|
10
10
|
attr_writer :content
|
|
11
11
|
|
|
12
12
|
def initialize(options = {})
|
|
13
13
|
@role = options.fetch(:role).to_sym
|
|
14
14
|
@content = normalize_content(options.fetch(:content))
|
|
15
|
+
@model_id = options[:model_id]
|
|
15
16
|
@tool_calls = options[:tool_calls]
|
|
17
|
+
@tool_call_id = options[:tool_call_id]
|
|
16
18
|
@input_tokens = options[:input_tokens]
|
|
17
19
|
@output_tokens = options[:output_tokens]
|
|
18
|
-
@model_id = options[:model_id]
|
|
19
|
-
@tool_call_id = options[:tool_call_id]
|
|
20
20
|
@cached_tokens = options[:cached_tokens]
|
|
21
21
|
@cache_creation_tokens = options[:cache_creation_tokens]
|
|
22
|
-
@reasoning_id = options[:reasoning_id]
|
|
23
22
|
@raw = options[:raw]
|
|
23
|
+
@reasoning_id = options[:reasoning_id]
|
|
24
24
|
|
|
25
25
|
ensure_valid_role
|
|
26
26
|
end
|
|
@@ -46,16 +46,24 @@ module RubyLLM
|
|
|
46
46
|
end
|
|
47
47
|
|
|
48
48
|
def to_h
|
|
49
|
+
content_value = content
|
|
50
|
+
content_value = content_value.to_h if content_value.is_a?(Content) || content_value.is_a?(Content::Raw)
|
|
51
|
+
|
|
52
|
+
tool_calls_value = tool_calls
|
|
53
|
+
if tool_calls_value.is_a?(Hash) && tool_calls_value.values.any?(ToolCall)
|
|
54
|
+
tool_calls_value = tool_calls_value&.transform_values(&:to_h)
|
|
55
|
+
end
|
|
56
|
+
|
|
49
57
|
{
|
|
50
58
|
role: role,
|
|
51
|
-
content:
|
|
52
|
-
|
|
59
|
+
content: content_value,
|
|
60
|
+
model_id: model_id,
|
|
61
|
+
tool_calls: tool_calls_value,
|
|
53
62
|
tool_call_id: tool_call_id,
|
|
54
63
|
input_tokens: input_tokens,
|
|
55
64
|
output_tokens: output_tokens,
|
|
56
|
-
model_id: model_id,
|
|
57
|
-
cache_creation_tokens: cache_creation_tokens,
|
|
58
65
|
cached_tokens: cached_tokens,
|
|
66
|
+
cache_creation_tokens: cache_creation_tokens,
|
|
59
67
|
reasoning_id: reasoning_id
|
|
60
68
|
}.compact
|
|
61
69
|
end
|
data/lib/ruby_llm/model/info.rb
CHANGED
|
@@ -72,6 +72,10 @@ module RubyLLM
|
|
|
72
72
|
pricing.text_tokens.output
|
|
73
73
|
end
|
|
74
74
|
|
|
75
|
+
def provider_class
|
|
76
|
+
RubyLLM::Provider.resolve provider
|
|
77
|
+
end
|
|
78
|
+
|
|
75
79
|
def type # rubocop:disable Metrics/PerceivedComplexity
|
|
76
80
|
if modalities.output.include?('embeddings') && !modalities.output.include?('text')
|
|
77
81
|
'embedding'
|