ruby_llm 1.3.0rc1 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +13 -9
- data/lib/ruby_llm/active_record/acts_as.rb +67 -148
- data/lib/ruby_llm/aliases.json +178 -42
- data/lib/ruby_llm/attachment.rb +164 -0
- data/lib/ruby_llm/chat.rb +12 -4
- data/lib/ruby_llm/configuration.rb +6 -1
- data/lib/ruby_llm/connection.rb +28 -2
- data/lib/ruby_llm/content.rb +9 -40
- data/lib/ruby_llm/error.rb +1 -0
- data/lib/ruby_llm/image.rb +2 -3
- data/lib/ruby_llm/message.rb +2 -2
- data/lib/ruby_llm/mime_type.rb +67 -0
- data/lib/ruby_llm/model/info.rb +101 -0
- data/lib/ruby_llm/model/modalities.rb +22 -0
- data/lib/ruby_llm/model/pricing.rb +51 -0
- data/lib/ruby_llm/model/pricing_category.rb +48 -0
- data/lib/ruby_llm/model/pricing_tier.rb +34 -0
- data/lib/ruby_llm/model.rb +7 -0
- data/lib/ruby_llm/models.json +2646 -2201
- data/lib/ruby_llm/models.rb +20 -20
- data/lib/ruby_llm/provider.rb +1 -1
- data/lib/ruby_llm/providers/anthropic/media.rb +14 -3
- data/lib/ruby_llm/providers/anthropic/models.rb +1 -1
- data/lib/ruby_llm/providers/anthropic/tools.rb +5 -4
- data/lib/ruby_llm/providers/bedrock/media.rb +7 -4
- data/lib/ruby_llm/providers/bedrock/models.rb +2 -2
- data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +3 -3
- data/lib/ruby_llm/providers/gemini/images.rb +3 -2
- data/lib/ruby_llm/providers/gemini/media.rb +12 -24
- data/lib/ruby_llm/providers/gemini/models.rb +1 -1
- data/lib/ruby_llm/providers/ollama/media.rb +8 -4
- data/lib/ruby_llm/providers/openai/capabilities.rb +5 -2
- data/lib/ruby_llm/providers/openai/chat.rb +12 -8
- data/lib/ruby_llm/providers/openai/images.rb +3 -2
- data/lib/ruby_llm/providers/openai/media.rb +18 -8
- data/lib/ruby_llm/providers/openai/models.rb +1 -1
- data/lib/ruby_llm/providers/openrouter/models.rb +1 -1
- data/lib/ruby_llm/streaming.rb +46 -11
- data/lib/ruby_llm/tool.rb +8 -8
- data/lib/ruby_llm/utils.rb +14 -9
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +1 -1
- data/lib/tasks/aliases.rake +235 -0
- data/lib/tasks/models_docs.rake +13 -7
- data/lib/tasks/release.rake +32 -0
- metadata +40 -25
- data/lib/ruby_llm/attachments/audio.rb +0 -12
- data/lib/ruby_llm/attachments/image.rb +0 -9
- data/lib/ruby_llm/attachments/pdf.rb +0 -9
- data/lib/ruby_llm/attachments.rb +0 -78
- data/lib/ruby_llm/mime_types.rb +0 -713
- data/lib/ruby_llm/model_info.rb +0 -237
- data/lib/tasks/{models.rake → models_update.rake} +13 -13
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a2287ea41c7591e593a315a35fd34ef8e32968c1b7337042c0b08338caeda784
|
4
|
+
data.tar.gz: 55a2690e1faa46d9ecdda3e8115ae64549be8d52c80fb5ffdb2aadd6fa8887dd
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8d5c7d2b3b73f289bdad5c5faac487a03221101302567a1d36b073de84e97e3650cc4d103ce6f9e641659c0ecc69484593535ed964b933f7d14b19b5dce34e81
|
7
|
+
data.tar.gz: 1515124ad74198461fdbc8105f7173392df7bee2038c701ad85d6da258107bb3ddd366878fa140bd5de354b41dfe8abc5887a78adf2c7cb3beb0cf6caa64f764
|
data/README.md
CHANGED
@@ -37,7 +37,7 @@
|
|
37
37
|
|
38
38
|
Every AI provider comes with its own client library, its own response format, its own conventions for streaming, and its own way of handling errors. Want to use multiple providers? Prepare to juggle incompatible APIs and bloated dependencies.
|
39
39
|
|
40
|
-
RubyLLM fixes all that. One beautiful API for everything. One consistent format. Minimal dependencies — just Faraday and
|
40
|
+
RubyLLM fixes all that. One beautiful API for everything. One consistent format. Minimal dependencies — just Faraday, Zeitwerk, and Marcel. Because working with AI should be a joy, not a chore.
|
41
41
|
|
42
42
|
## What makes it great
|
43
43
|
|
@@ -46,14 +46,14 @@ RubyLLM fixes all that. One beautiful API for everything. One consistent format.
|
|
46
46
|
chat = RubyLLM.chat
|
47
47
|
chat.ask "What's the best way to learn Ruby?"
|
48
48
|
|
49
|
-
# Analyze images
|
50
|
-
chat.ask "What's in this image?", with:
|
49
|
+
# Analyze images, audio, documents, and text files
|
50
|
+
chat.ask "What's in this image?", with: "ruby_conf.jpg"
|
51
|
+
chat.ask "Describe this meeting", with: "meeting.wav"
|
52
|
+
chat.ask "Summarize this document", with: "contract.pdf"
|
53
|
+
chat.ask "Explain this code", with: "app.rb"
|
51
54
|
|
52
|
-
#
|
53
|
-
chat.ask "
|
54
|
-
|
55
|
-
# Analyze documents
|
56
|
-
chat.ask "Summarize this document", with: { pdf: "contract.pdf" }
|
55
|
+
# Multiple files at once - types automatically detected
|
56
|
+
chat.ask "Analyze these files", with: ["diagram.png", "report.pdf", "notes.txt"]
|
57
57
|
|
58
58
|
# Stream responses in real-time
|
59
59
|
chat.ask "Tell me a story about a Ruby programmer" do |chunk|
|
@@ -90,7 +90,7 @@ chat.with_tool(Weather).ask "What's the weather in Berlin? (52.5200, 13.4050)"
|
|
90
90
|
* 💬 **Unified Chat:** Converse with models from OpenAI, Anthropic, Gemini, Bedrock, OpenRouter, DeepSeek, Ollama, or any OpenAI-compatible API using `RubyLLM.chat`.
|
91
91
|
* 👁️ **Vision:** Analyze images within chats.
|
92
92
|
* 🔊 **Audio:** Transcribe and understand audio content.
|
93
|
-
* 📄 **
|
93
|
+
* 📄 **Document Analysis:** Extract information from PDFs, text files, and other documents.
|
94
94
|
* 🖼️ **Image Generation:** Create images with `RubyLLM.paint`.
|
95
95
|
* 📊 **Embeddings:** Generate text embeddings for vector search with `RubyLLM.embed`.
|
96
96
|
* 🔧 **Tools (Function Calling):** Let AI models call your Ruby code using `RubyLLM::Tool`.
|
@@ -143,6 +143,10 @@ end
|
|
143
143
|
# Now interacting with a Chat record persists the conversation:
|
144
144
|
chat_record = Chat.create!(model_id: "gpt-4.1-nano")
|
145
145
|
chat_record.ask("Explain Active Record callbacks.") # User & Assistant messages saved
|
146
|
+
|
147
|
+
# Works seamlessly with file attachments - types automatically detected
|
148
|
+
chat_record.ask("What's in this file?", with: "report.pdf")
|
149
|
+
chat_record.ask("Analyze these", with: ["image.jpg", "data.csv", "notes.txt"])
|
146
150
|
```
|
147
151
|
Check the [Rails Integration Guide](https://rubyllm.com/guides/rails) for more.
|
148
152
|
|
@@ -18,10 +18,10 @@ module RubyLLM
|
|
18
18
|
has_many :messages,
|
19
19
|
-> { order(created_at: :asc) },
|
20
20
|
class_name: @message_class,
|
21
|
+
inverse_of: :chat,
|
21
22
|
dependent: :destroy
|
22
23
|
|
23
|
-
delegate :add_message,
|
24
|
-
to: :to_llm
|
24
|
+
delegate :add_message, to: :to_llm
|
25
25
|
end
|
26
26
|
|
27
27
|
def acts_as_message(chat_class: 'Chat',
|
@@ -88,26 +88,18 @@ module RubyLLM
|
|
88
88
|
@chat ||= RubyLLM.chat(model: model_id)
|
89
89
|
@chat.reset_messages!
|
90
90
|
|
91
|
-
# Load existing messages into chat
|
92
91
|
messages.each do |msg|
|
93
92
|
@chat.add_message(msg.to_llm)
|
94
93
|
end
|
95
94
|
|
96
|
-
# Set up message persistence
|
97
95
|
@chat.on_new_message { persist_new_message }
|
98
96
|
.on_end_message { |msg| persist_message_completion(msg) }
|
99
97
|
end
|
100
98
|
|
101
99
|
def with_instructions(instructions, replace: false)
|
102
100
|
transaction do
|
103
|
-
# If replace is true, remove existing system messages
|
104
101
|
messages.where(role: :system).destroy_all if replace
|
105
|
-
|
106
|
-
# Create the new system message
|
107
|
-
messages.create!(
|
108
|
-
role: :system,
|
109
|
-
content: instructions
|
110
|
-
)
|
102
|
+
messages.create!(role: :system, content: instructions)
|
111
103
|
end
|
112
104
|
to_llm.with_instructions(instructions)
|
113
105
|
self
|
@@ -124,7 +116,7 @@ module RubyLLM
|
|
124
116
|
end
|
125
117
|
|
126
118
|
def with_model(...)
|
127
|
-
to_llm.with_model(...)
|
119
|
+
update(model_id: to_llm.with_model(...).model.id)
|
128
120
|
self
|
129
121
|
end
|
130
122
|
|
@@ -133,6 +125,11 @@ module RubyLLM
|
|
133
125
|
self
|
134
126
|
end
|
135
127
|
|
128
|
+
def with_context(...)
|
129
|
+
to_llm.with_context(...)
|
130
|
+
self
|
131
|
+
end
|
132
|
+
|
136
133
|
def on_new_message(...)
|
137
134
|
to_llm.on_new_message(...)
|
138
135
|
self
|
@@ -144,21 +141,8 @@ module RubyLLM
|
|
144
141
|
end
|
145
142
|
|
146
143
|
def create_user_message(content, with: nil)
|
147
|
-
message_record = messages.create!(
|
148
|
-
|
149
|
-
content: content
|
150
|
-
)
|
151
|
-
|
152
|
-
if with.present?
|
153
|
-
files = Array(with).reject(&:blank?)
|
154
|
-
|
155
|
-
if files.any? && files.first.is_a?(ActionDispatch::Http::UploadedFile)
|
156
|
-
message_record.attachments.attach(files)
|
157
|
-
else
|
158
|
-
attach_files(message_record, process_attachments(with))
|
159
|
-
end
|
160
|
-
end
|
161
|
-
|
144
|
+
message_record = messages.create!(role: :user, content: content)
|
145
|
+
persist_content(message_record, with) if with.present?
|
162
146
|
message_record
|
163
147
|
end
|
164
148
|
|
@@ -182,21 +166,16 @@ module RubyLLM
|
|
182
166
|
private
|
183
167
|
|
184
168
|
def persist_new_message
|
185
|
-
@message = messages.create!(
|
186
|
-
role: :assistant,
|
187
|
-
content: String.new
|
188
|
-
)
|
169
|
+
@message = messages.create!(role: :assistant, content: String.new)
|
189
170
|
end
|
190
171
|
|
191
172
|
def persist_message_completion(message)
|
192
173
|
return unless message
|
193
174
|
|
194
|
-
if message.tool_call_id
|
195
|
-
tool_call_id = self.class.tool_call_class.constantize.find_by(tool_call_id: message.tool_call_id)&.id
|
196
|
-
end
|
175
|
+
tool_call_id = find_tool_call_id(message.tool_call_id) if message.tool_call_id
|
197
176
|
|
198
177
|
transaction do
|
199
|
-
@message.update(
|
178
|
+
@message.update!(
|
200
179
|
role: message.role,
|
201
180
|
content: message.content,
|
202
181
|
model_id: message.model_id,
|
@@ -217,84 +196,46 @@ module RubyLLM
|
|
217
196
|
end
|
218
197
|
end
|
219
198
|
|
220
|
-
def
|
221
|
-
|
222
|
-
|
223
|
-
result = {}
|
224
|
-
files = Array(attachments)
|
225
|
-
|
226
|
-
files.each do |file|
|
227
|
-
content_type = if file.respond_to?(:content_type)
|
228
|
-
file.content_type
|
229
|
-
elsif file.is_a?(ActiveStorage::Attachment)
|
230
|
-
file.blob.content_type
|
231
|
-
else
|
232
|
-
RubyLLM::MimeTypes.detect_from_path(file.to_s)
|
233
|
-
end
|
234
|
-
|
235
|
-
if RubyLLM::MimeTypes.image?(content_type)
|
236
|
-
result[:image] ||= []
|
237
|
-
result[:image] << file
|
238
|
-
elsif RubyLLM::MimeTypes.audio?(content_type)
|
239
|
-
result[:audio] ||= []
|
240
|
-
result[:audio] << file
|
241
|
-
else
|
242
|
-
# Default to PDF for unknown types
|
243
|
-
result[:pdf] ||= []
|
244
|
-
result[:pdf] << file
|
245
|
-
end
|
246
|
-
end
|
247
|
-
|
248
|
-
result
|
199
|
+
def find_tool_call_id(tool_call_id)
|
200
|
+
self.class.tool_call_class.constantize.find_by(tool_call_id: tool_call_id)&.id
|
249
201
|
end
|
250
202
|
|
251
|
-
def
|
252
|
-
return unless
|
203
|
+
def persist_content(message_record, attachments)
|
204
|
+
return unless message_record.respond_to?(:attachments)
|
253
205
|
|
254
|
-
|
255
|
-
|
256
|
-
attach_file(message, file_source)
|
257
|
-
end
|
258
|
-
end
|
206
|
+
attachables = prepare_for_active_storage(attachments)
|
207
|
+
message_record.attachments.attach(attachables) if attachables.any?
|
259
208
|
end
|
260
209
|
|
261
|
-
def
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
elsif file_source.respond_to?(:read)
|
275
|
-
# Handle various file source types
|
276
|
-
message.attachments.attach(
|
277
|
-
io: file_source,
|
278
|
-
filename: extract_filename(file_source),
|
279
|
-
content_type: RubyLLM::MimeTypes.detect_from_path(extract_filename(file_source))
|
280
|
-
) # Already a file-like object
|
281
|
-
elsif file_source.is_a?(::ActiveStorage::Attachment)
|
282
|
-
# Copy from existing ActiveStorage attachment
|
283
|
-
message.attachments.attach(file_source.blob)
|
284
|
-
elsif file_source.is_a?(::ActiveStorage::Blob)
|
285
|
-
message.attachments.attach(file_source)
|
286
|
-
else
|
287
|
-
# Local file path
|
288
|
-
message.attachments.attach(
|
289
|
-
io: File.open(file_source),
|
290
|
-
filename: File.basename(file_source),
|
291
|
-
content_type: RubyLLM::MimeTypes.detect_from_path(file_source)
|
292
|
-
)
|
293
|
-
end
|
210
|
+
def prepare_for_active_storage(attachments)
|
211
|
+
Utils.to_safe_array(attachments).filter_map do |attachment|
|
212
|
+
case attachment
|
213
|
+
when ActionDispatch::Http::UploadedFile, ActiveStorage::Blob
|
214
|
+
attachment
|
215
|
+
when ActiveStorage::Attached::One, ActiveStorage::Attached::Many
|
216
|
+
attachment.blobs
|
217
|
+
when Hash
|
218
|
+
attachment.values.map { |v| prepare_for_active_storage(v) }
|
219
|
+
else
|
220
|
+
convert_to_active_storage_format(attachment)
|
221
|
+
end
|
222
|
+
end.flatten.compact
|
294
223
|
end
|
295
224
|
|
296
|
-
def
|
297
|
-
|
225
|
+
def convert_to_active_storage_format(source)
|
226
|
+
return if source.blank?
|
227
|
+
|
228
|
+
# Let RubyLLM::Attachment handle the heavy lifting
|
229
|
+
attachment = RubyLLM::Attachment.new(source)
|
230
|
+
|
231
|
+
{
|
232
|
+
io: StringIO.new(attachment.content),
|
233
|
+
filename: attachment.filename,
|
234
|
+
content_type: attachment.mime_type
|
235
|
+
}
|
236
|
+
rescue StandardError => e
|
237
|
+
RubyLLM.logger.warn "Failed to process attachment #{source}: #{e.message}"
|
238
|
+
nil
|
298
239
|
end
|
299
240
|
end
|
300
241
|
|
@@ -319,6 +260,8 @@ module RubyLLM
|
|
319
260
|
)
|
320
261
|
end
|
321
262
|
|
263
|
+
private
|
264
|
+
|
322
265
|
def extract_tool_calls
|
323
266
|
tool_calls.to_h do |tool_call|
|
324
267
|
[
|
@@ -336,55 +279,31 @@ module RubyLLM
|
|
336
279
|
parent_tool_call&.tool_call_id
|
337
280
|
end
|
338
281
|
|
339
|
-
def extract_content
|
282
|
+
def extract_content
|
340
283
|
return content unless respond_to?(:attachments) && attachments.attached?
|
341
284
|
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
attachment_data = if attachment.metadata&.key?('original_url')
|
349
|
-
attachment.metadata['original_url']
|
350
|
-
elsif defined?(ActiveJob) && caller.any? { |c| c.include?('active_job') }
|
351
|
-
# We're in a background job - need to download the data
|
352
|
-
temp_file = Tempfile.new([File.basename(attachment.filename.to_s, '.*'),
|
353
|
-
File.extname(attachment.filename.to_s)])
|
354
|
-
temp_file.binmode
|
355
|
-
temp_file.write(attachment.download)
|
356
|
-
temp_file.flush
|
357
|
-
temp_file.rewind
|
358
|
-
|
359
|
-
# Store the tempfile reference in the instance variable to prevent GC
|
360
|
-
@_tempfiles << temp_file
|
361
|
-
|
362
|
-
# Return the file object itself, not just the path
|
363
|
-
temp_file
|
364
|
-
else
|
365
|
-
blob_path_for(attachment)
|
366
|
-
end
|
367
|
-
|
368
|
-
if RubyLLM::MimeTypes.image?(attachment.content_type)
|
369
|
-
content_obj.add_image(attachment_data)
|
370
|
-
elsif RubyLLM::MimeTypes.audio?(attachment.content_type)
|
371
|
-
content_obj.add_audio(attachment_data)
|
372
|
-
elsif RubyLLM::MimeTypes.pdf?(attachment.content_type)
|
373
|
-
content_obj.add_pdf(attachment_data)
|
285
|
+
RubyLLM::Content.new(content).tap do |content_obj|
|
286
|
+
@_tempfiles = []
|
287
|
+
|
288
|
+
attachments.each do |attachment|
|
289
|
+
tempfile = download_attachment(attachment)
|
290
|
+
content_obj.add_attachment(tempfile, filename: attachment.filename.to_s)
|
374
291
|
end
|
375
292
|
end
|
376
|
-
|
377
|
-
content_obj
|
378
293
|
end
|
379
294
|
|
380
|
-
|
295
|
+
def download_attachment(attachment)
|
296
|
+
ext = File.extname(attachment.filename.to_s)
|
297
|
+
basename = File.basename(attachment.filename.to_s, ext)
|
298
|
+
tempfile = Tempfile.new([basename, ext])
|
299
|
+
tempfile.binmode
|
381
300
|
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
301
|
+
attachment.download { |chunk| tempfile.write(chunk) }
|
302
|
+
|
303
|
+
tempfile.flush
|
304
|
+
tempfile.rewind
|
305
|
+
@_tempfiles << tempfile
|
306
|
+
tempfile
|
388
307
|
end
|
389
308
|
end
|
390
309
|
end
|
data/lib/ruby_llm/aliases.json
CHANGED
@@ -1,80 +1,216 @@
|
|
1
1
|
{
|
2
|
-
"
|
3
|
-
"
|
4
|
-
"
|
5
|
-
|
2
|
+
"chatgpt-4o": {
|
3
|
+
"openai": "chatgpt-4o-latest",
|
4
|
+
"openrouter": "openai/chatgpt-4o-latest"
|
5
|
+
},
|
6
|
+
"claude-2.0": {
|
7
|
+
"anthropic": "claude-2.0",
|
8
|
+
"openrouter": "anthropic/claude-2.0",
|
9
|
+
"bedrock": "anthropic.claude-v2:1:200k"
|
10
|
+
},
|
11
|
+
"claude-2.1": {
|
12
|
+
"anthropic": "claude-2.1",
|
13
|
+
"openrouter": "anthropic/claude-2.1",
|
14
|
+
"bedrock": "anthropic.claude-v2:1:200k"
|
6
15
|
},
|
7
16
|
"claude-3-5-haiku": {
|
8
17
|
"anthropic": "claude-3-5-haiku-20241022",
|
9
|
-
"
|
10
|
-
"
|
18
|
+
"openrouter": "anthropic/claude-3.5-haiku",
|
19
|
+
"bedrock": "anthropic.claude-3-5-haiku-20241022-v1:0"
|
20
|
+
},
|
21
|
+
"claude-3-5-sonnet": {
|
22
|
+
"anthropic": "claude-3-5-sonnet-20241022",
|
23
|
+
"openrouter": "anthropic/claude-3.5-sonnet",
|
24
|
+
"bedrock": "anthropic.claude-3-5-sonnet-20240620-v1:0:200k"
|
11
25
|
},
|
12
26
|
"claude-3-7-sonnet": {
|
13
27
|
"anthropic": "claude-3-7-sonnet-20250219",
|
14
|
-
"
|
15
|
-
"
|
28
|
+
"openrouter": "anthropic/claude-3.7-sonnet",
|
29
|
+
"bedrock": "us.anthropic.claude-3-7-sonnet-20250219-v1:0"
|
30
|
+
},
|
31
|
+
"claude-3-haiku": {
|
32
|
+
"anthropic": "claude-3-haiku-20240307",
|
33
|
+
"openrouter": "anthropic/claude-3-haiku",
|
34
|
+
"bedrock": "anthropic.claude-3-haiku-20240307-v1:0:200k"
|
16
35
|
},
|
17
36
|
"claude-3-opus": {
|
18
37
|
"anthropic": "claude-3-opus-20240229",
|
19
|
-
"
|
38
|
+
"openrouter": "anthropic/claude-3-opus",
|
39
|
+
"bedrock": "anthropic.claude-3-opus-20240229-v1:0:200k"
|
20
40
|
},
|
21
41
|
"claude-3-sonnet": {
|
22
42
|
"anthropic": "claude-3-sonnet-20240229",
|
23
|
-
"
|
43
|
+
"openrouter": "anthropic/claude-3-sonnet",
|
44
|
+
"bedrock": "anthropic.claude-3-sonnet-20240229-v1:0:200k"
|
24
45
|
},
|
25
|
-
"claude-
|
26
|
-
"anthropic": "claude-
|
27
|
-
"
|
46
|
+
"claude-opus-4": {
|
47
|
+
"anthropic": "claude-opus-4-20250514",
|
48
|
+
"openrouter": "anthropic/claude-opus-4",
|
49
|
+
"bedrock": "us.anthropic.claude-opus-4-20250514-v1:0"
|
28
50
|
},
|
29
|
-
"claude-
|
30
|
-
"anthropic": "claude-
|
31
|
-
"
|
32
|
-
"
|
51
|
+
"claude-sonnet-4": {
|
52
|
+
"anthropic": "claude-sonnet-4-20250514",
|
53
|
+
"openrouter": "anthropic/claude-sonnet-4",
|
54
|
+
"bedrock": "us.anthropic.claude-sonnet-4-20250514-v1:0"
|
33
55
|
},
|
34
|
-
"
|
35
|
-
"
|
36
|
-
"
|
37
|
-
"openrouter": "anthropic/claude-2"
|
56
|
+
"deepseek-chat": {
|
57
|
+
"deepseek": "deepseek-chat",
|
58
|
+
"openrouter": "deepseek/deepseek-chat"
|
38
59
|
},
|
39
|
-
"
|
40
|
-
"
|
41
|
-
"
|
42
|
-
|
60
|
+
"gemini-2.0-flash-001": {
|
61
|
+
"gemini": "gemini-2.0-flash-001",
|
62
|
+
"openrouter": "google/gemini-2.0-flash-001"
|
63
|
+
},
|
64
|
+
"gemini-2.0-flash-lite-001": {
|
65
|
+
"gemini": "gemini-2.0-flash-lite-001",
|
66
|
+
"openrouter": "google/gemini-2.0-flash-lite-001"
|
67
|
+
},
|
68
|
+
"gemini-2.5-flash-preview-05-20": {
|
69
|
+
"gemini": "gemini-2.5-flash-preview-05-20",
|
70
|
+
"openrouter": "google/gemini-2.5-flash-preview-05-20"
|
71
|
+
},
|
72
|
+
"gemini-2.5-pro-exp-03-25": {
|
73
|
+
"gemini": "gemini-2.5-pro-exp-03-25",
|
74
|
+
"openrouter": "google/gemini-2.5-pro-exp-03-25"
|
75
|
+
},
|
76
|
+
"gemini-2.5-pro-preview-05-06": {
|
77
|
+
"gemini": "gemini-2.5-pro-preview-05-06",
|
78
|
+
"openrouter": "google/gemini-2.5-pro-preview-05-06"
|
79
|
+
},
|
80
|
+
"gemma-3-12b-it": {
|
81
|
+
"gemini": "gemma-3-12b-it",
|
82
|
+
"openrouter": "google/gemma-3-12b-it"
|
83
|
+
},
|
84
|
+
"gemma-3-27b-it": {
|
85
|
+
"gemini": "gemma-3-27b-it",
|
86
|
+
"openrouter": "google/gemma-3-27b-it"
|
87
|
+
},
|
88
|
+
"gemma-3-4b-it": {
|
89
|
+
"gemini": "gemma-3-4b-it",
|
90
|
+
"openrouter": "google/gemma-3-4b-it"
|
91
|
+
},
|
92
|
+
"gpt-3.5-turbo": {
|
93
|
+
"openai": "gpt-3.5-turbo",
|
94
|
+
"openrouter": "openai/gpt-3.5-turbo"
|
95
|
+
},
|
96
|
+
"gpt-3.5-turbo-0125": {
|
97
|
+
"openai": "gpt-3.5-turbo-0125",
|
98
|
+
"openrouter": "openai/gpt-3.5-turbo-0125"
|
99
|
+
},
|
100
|
+
"gpt-3.5-turbo-1106": {
|
101
|
+
"openai": "gpt-3.5-turbo-1106",
|
102
|
+
"openrouter": "openai/gpt-3.5-turbo-1106"
|
103
|
+
},
|
104
|
+
"gpt-3.5-turbo-16k": {
|
105
|
+
"openai": "gpt-3.5-turbo-16k",
|
106
|
+
"openrouter": "openai/gpt-3.5-turbo-16k"
|
107
|
+
},
|
108
|
+
"gpt-3.5-turbo-instruct": {
|
109
|
+
"openai": "gpt-3.5-turbo-instruct",
|
110
|
+
"openrouter": "openai/gpt-3.5-turbo-instruct"
|
111
|
+
},
|
112
|
+
"gpt-4": {
|
113
|
+
"openai": "gpt-4",
|
114
|
+
"openrouter": "openai/gpt-4"
|
115
|
+
},
|
116
|
+
"gpt-4-1106-preview": {
|
117
|
+
"openai": "gpt-4-1106-preview",
|
118
|
+
"openrouter": "openai/gpt-4-1106-preview"
|
119
|
+
},
|
120
|
+
"gpt-4-turbo": {
|
121
|
+
"openai": "gpt-4-turbo",
|
122
|
+
"openrouter": "openai/gpt-4-turbo"
|
123
|
+
},
|
124
|
+
"gpt-4-turbo-preview": {
|
125
|
+
"openai": "gpt-4-turbo-preview",
|
126
|
+
"openrouter": "openai/gpt-4-turbo-preview"
|
127
|
+
},
|
128
|
+
"gpt-4.1": {
|
129
|
+
"openai": "gpt-4.1",
|
130
|
+
"openrouter": "openai/gpt-4.1"
|
131
|
+
},
|
132
|
+
"gpt-4.1-mini": {
|
133
|
+
"openai": "gpt-4.1-mini",
|
134
|
+
"openrouter": "openai/gpt-4.1-mini"
|
135
|
+
},
|
136
|
+
"gpt-4.1-nano": {
|
137
|
+
"openai": "gpt-4.1-nano",
|
138
|
+
"openrouter": "openai/gpt-4.1-nano"
|
139
|
+
},
|
140
|
+
"gpt-4.5-preview": {
|
141
|
+
"openai": "gpt-4.5-preview",
|
142
|
+
"openrouter": "openai/gpt-4.5-preview"
|
43
143
|
},
|
44
144
|
"gpt-4o": {
|
45
145
|
"openai": "gpt-4o",
|
46
146
|
"openrouter": "openai/gpt-4o"
|
47
147
|
},
|
148
|
+
"gpt-4o-2024-05-13": {
|
149
|
+
"openai": "gpt-4o-2024-05-13",
|
150
|
+
"openrouter": "openai/gpt-4o-2024-05-13"
|
151
|
+
},
|
152
|
+
"gpt-4o-2024-08-06": {
|
153
|
+
"openai": "gpt-4o-2024-08-06",
|
154
|
+
"openrouter": "openai/gpt-4o-2024-08-06"
|
155
|
+
},
|
156
|
+
"gpt-4o-2024-11-20": {
|
157
|
+
"openai": "gpt-4o-2024-11-20",
|
158
|
+
"openrouter": "openai/gpt-4o-2024-11-20"
|
159
|
+
},
|
48
160
|
"gpt-4o-mini": {
|
49
161
|
"openai": "gpt-4o-mini",
|
50
162
|
"openrouter": "openai/gpt-4o-mini"
|
51
163
|
},
|
52
|
-
"gpt-
|
53
|
-
"openai": "gpt-
|
54
|
-
"openrouter": "openai/gpt-
|
55
|
-
},
|
56
|
-
"gemini-1.5-flash": {
|
57
|
-
"gemini": "gemini-1.5-flash",
|
58
|
-
"openrouter": "google/gemini-flash-1.5"
|
164
|
+
"gpt-4o-mini-2024-07-18": {
|
165
|
+
"openai": "gpt-4o-mini-2024-07-18",
|
166
|
+
"openrouter": "openai/gpt-4o-mini-2024-07-18"
|
59
167
|
},
|
60
|
-
"
|
61
|
-
"
|
62
|
-
"openrouter": "
|
168
|
+
"gpt-4o-mini-search-preview": {
|
169
|
+
"openai": "gpt-4o-mini-search-preview",
|
170
|
+
"openrouter": "openai/gpt-4o-mini-search-preview"
|
63
171
|
},
|
64
|
-
"
|
65
|
-
"
|
66
|
-
"openrouter": "
|
67
|
-
},
|
68
|
-
"gemini-2.0-flash": {
|
69
|
-
"gemini": "gemini-2.0-flash",
|
70
|
-
"openrouter": "google/gemini-2.0-flash-001"
|
172
|
+
"gpt-4o-search-preview": {
|
173
|
+
"openai": "gpt-4o-search-preview",
|
174
|
+
"openrouter": "openai/gpt-4o-search-preview"
|
71
175
|
},
|
72
176
|
"o1": {
|
73
177
|
"openai": "o1",
|
74
178
|
"openrouter": "openai/o1"
|
75
179
|
},
|
180
|
+
"o1-mini": {
|
181
|
+
"openai": "o1-mini",
|
182
|
+
"openrouter": "openai/o1-mini"
|
183
|
+
},
|
184
|
+
"o1-mini-2024-09-12": {
|
185
|
+
"openai": "o1-mini-2024-09-12",
|
186
|
+
"openrouter": "openai/o1-mini-2024-09-12"
|
187
|
+
},
|
188
|
+
"o1-preview": {
|
189
|
+
"openai": "o1-preview",
|
190
|
+
"openrouter": "openai/o1-preview"
|
191
|
+
},
|
192
|
+
"o1-preview-2024-09-12": {
|
193
|
+
"openai": "o1-preview-2024-09-12",
|
194
|
+
"openrouter": "openai/o1-preview-2024-09-12"
|
195
|
+
},
|
196
|
+
"o1-pro": {
|
197
|
+
"openai": "o1-pro",
|
198
|
+
"openrouter": "openai/o1-pro"
|
199
|
+
},
|
200
|
+
"o3": {
|
201
|
+
"openai": "o3",
|
202
|
+
"openrouter": "openai/o3"
|
203
|
+
},
|
76
204
|
"o3-mini": {
|
77
205
|
"openai": "o3-mini",
|
78
206
|
"openrouter": "openai/o3-mini"
|
207
|
+
},
|
208
|
+
"o3-pro": {
|
209
|
+
"openai": "o3-pro",
|
210
|
+
"openrouter": "openai/o3-pro"
|
211
|
+
},
|
212
|
+
"o4-mini": {
|
213
|
+
"openai": "o4-mini",
|
214
|
+
"openrouter": "openai/o4-mini"
|
79
215
|
}
|
80
216
|
}
|