ruby_llm 1.11.0 → 1.12.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +12 -0
- data/lib/ruby_llm/active_record/acts_as.rb +0 -2
- data/lib/ruby_llm/active_record/acts_as_legacy.rb +97 -27
- data/lib/ruby_llm/active_record/chat_methods.rb +73 -19
- data/lib/ruby_llm/agent.rb +326 -0
- data/lib/ruby_llm/aliases.json +47 -29
- data/lib/ruby_llm/chat.rb +27 -3
- data/lib/ruby_llm/configuration.rb +3 -0
- data/lib/ruby_llm/content.rb +6 -0
- data/lib/ruby_llm/models.json +19090 -5190
- data/lib/ruby_llm/models.rb +35 -6
- data/lib/ruby_llm/provider.rb +8 -0
- data/lib/ruby_llm/providers/azure/chat.rb +29 -0
- data/lib/ruby_llm/providers/azure/embeddings.rb +24 -0
- data/lib/ruby_llm/providers/azure/media.rb +45 -0
- data/lib/ruby_llm/providers/azure/models.rb +14 -0
- data/lib/ruby_llm/providers/azure.rb +56 -0
- data/lib/ruby_llm/providers/bedrock/auth.rb +122 -0
- data/lib/ruby_llm/providers/bedrock/chat.rb +296 -64
- data/lib/ruby_llm/providers/bedrock/media.rb +62 -33
- data/lib/ruby_llm/providers/bedrock/models.rb +88 -65
- data/lib/ruby_llm/providers/bedrock/streaming.rb +305 -8
- data/lib/ruby_llm/providers/bedrock.rb +61 -52
- data/lib/ruby_llm/version.rb +1 -1
- data/lib/ruby_llm.rb +4 -0
- data/lib/tasks/models.rake +10 -5
- data/lib/tasks/vcr.rake +32 -0
- metadata +17 -17
- data/lib/ruby_llm/providers/bedrock/capabilities.rb +0 -167
- data/lib/ruby_llm/providers/bedrock/signing.rb +0 -831
- data/lib/ruby_llm/providers/bedrock/streaming/base.rb +0 -51
- data/lib/ruby_llm/providers/bedrock/streaming/content_extraction.rb +0 -128
- data/lib/ruby_llm/providers/bedrock/streaming/message_processing.rb +0 -67
- data/lib/ruby_llm/providers/bedrock/streaming/payload_processing.rb +0 -85
- data/lib/ruby_llm/providers/bedrock/streaming/prelude_handling.rb +0 -78
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'erb'
|
|
4
|
+
require 'forwardable'
|
|
5
|
+
require 'pathname'
|
|
6
|
+
|
|
7
|
+
module RubyLLM
|
|
8
|
+
# Base class for simple, class-configured agents.
|
|
9
|
+
class Agent
|
|
10
|
+
extend Forwardable
|
|
11
|
+
include Enumerable
|
|
12
|
+
|
|
13
|
+
class << self
|
|
14
|
+
def inherited(subclass)
|
|
15
|
+
super
|
|
16
|
+
subclass.instance_variable_set(:@chat_kwargs, (@chat_kwargs || {}).dup)
|
|
17
|
+
subclass.instance_variable_set(:@tools, (@tools || []).dup)
|
|
18
|
+
subclass.instance_variable_set(:@instructions, @instructions)
|
|
19
|
+
subclass.instance_variable_set(:@temperature, @temperature)
|
|
20
|
+
subclass.instance_variable_set(:@thinking, @thinking)
|
|
21
|
+
subclass.instance_variable_set(:@params, (@params || {}).dup)
|
|
22
|
+
subclass.instance_variable_set(:@headers, (@headers || {}).dup)
|
|
23
|
+
subclass.instance_variable_set(:@schema, @schema)
|
|
24
|
+
subclass.instance_variable_set(:@context, @context)
|
|
25
|
+
subclass.instance_variable_set(:@chat_model, @chat_model)
|
|
26
|
+
subclass.instance_variable_set(:@input_names, (@input_names || []).dup)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def model(model_id = nil, **options)
|
|
30
|
+
options[:model] = model_id unless model_id.nil?
|
|
31
|
+
@chat_kwargs = options
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def tools(*tools, &block)
|
|
35
|
+
return @tools || [] if tools.empty? && !block_given?
|
|
36
|
+
|
|
37
|
+
@tools = block_given? ? block : tools.flatten
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def instructions(text = nil, **prompt_locals, &block)
|
|
41
|
+
if text.nil? && prompt_locals.empty? && !block_given?
|
|
42
|
+
@instructions ||= { prompt: 'instructions', locals: {} }
|
|
43
|
+
return @instructions
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
@instructions = block || text || { prompt: 'instructions', locals: prompt_locals }
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def temperature(value = nil)
|
|
50
|
+
return @temperature if value.nil?
|
|
51
|
+
|
|
52
|
+
@temperature = value
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def thinking(effort: nil, budget: nil)
|
|
56
|
+
return @thinking if effort.nil? && budget.nil?
|
|
57
|
+
|
|
58
|
+
@thinking = { effort: effort, budget: budget }
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def params(**params, &block)
|
|
62
|
+
return @params || {} if params.empty? && !block_given?
|
|
63
|
+
|
|
64
|
+
@params = block_given? ? block : params
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def headers(**headers, &block)
|
|
68
|
+
return @headers || {} if headers.empty? && !block_given?
|
|
69
|
+
|
|
70
|
+
@headers = block_given? ? block : headers
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def schema(value = nil, &block)
|
|
74
|
+
return @schema if value.nil? && !block_given?
|
|
75
|
+
|
|
76
|
+
@schema = block_given? ? block : value
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def context(value = nil)
|
|
80
|
+
return @context if value.nil?
|
|
81
|
+
|
|
82
|
+
@context = value
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def chat_model(value = nil)
|
|
86
|
+
return @chat_model if value.nil?
|
|
87
|
+
|
|
88
|
+
@chat_model = value
|
|
89
|
+
remove_instance_variable(:@resolved_chat_model) if instance_variable_defined?(:@resolved_chat_model)
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def inputs(*names)
|
|
93
|
+
return @input_names || [] if names.empty?
|
|
94
|
+
|
|
95
|
+
@input_names = names.flatten.map(&:to_sym)
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
def chat_kwargs
|
|
99
|
+
@chat_kwargs || {}
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def chat(**kwargs)
|
|
103
|
+
input_values, chat_options = partition_inputs(kwargs)
|
|
104
|
+
chat = RubyLLM.chat(**chat_kwargs, **chat_options)
|
|
105
|
+
apply_configuration(chat, input_values:, persist_instructions: true)
|
|
106
|
+
chat
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def create(**kwargs)
|
|
110
|
+
with_rails_chat_record(:create, **kwargs)
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def create!(**kwargs)
|
|
114
|
+
with_rails_chat_record(:create!, **kwargs)
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def find(id, **kwargs)
|
|
118
|
+
raise ArgumentError, 'chat_model must be configured to use find' unless resolved_chat_model
|
|
119
|
+
|
|
120
|
+
input_values, = partition_inputs(kwargs)
|
|
121
|
+
record = resolved_chat_model.find(id)
|
|
122
|
+
apply_configuration(record, input_values:, persist_instructions: false)
|
|
123
|
+
record
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def sync_instructions!(chat_or_id, **kwargs)
|
|
127
|
+
raise ArgumentError, 'chat_model must be configured to use sync_instructions!' unless resolved_chat_model
|
|
128
|
+
|
|
129
|
+
input_values, = partition_inputs(kwargs)
|
|
130
|
+
record = chat_or_id.is_a?(resolved_chat_model) ? chat_or_id : resolved_chat_model.find(chat_or_id)
|
|
131
|
+
runtime = runtime_context(chat: record, inputs: input_values)
|
|
132
|
+
instructions_value = resolved_instructions_value(record, runtime, inputs: input_values)
|
|
133
|
+
return record if instructions_value.nil?
|
|
134
|
+
|
|
135
|
+
record.with_instructions(instructions_value)
|
|
136
|
+
record
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
def render_prompt(name, chat:, inputs:, locals:)
|
|
140
|
+
path = prompt_path_for(name)
|
|
141
|
+
return nil unless File.exist?(path)
|
|
142
|
+
|
|
143
|
+
resolved_locals = resolve_prompt_locals(locals, runtime: runtime_context(chat:, inputs:), chat:, inputs:)
|
|
144
|
+
ERB.new(File.read(path)).result_with_hash(resolved_locals)
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
private
|
|
148
|
+
|
|
149
|
+
def with_rails_chat_record(method_name, **kwargs)
|
|
150
|
+
raise ArgumentError, 'chat_model must be configured to use create/create!' unless resolved_chat_model
|
|
151
|
+
|
|
152
|
+
input_values, chat_options = partition_inputs(kwargs)
|
|
153
|
+
record = resolved_chat_model.public_send(method_name, **chat_kwargs, **chat_options)
|
|
154
|
+
apply_configuration(record, input_values:, persist_instructions: true) if record
|
|
155
|
+
record
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
def apply_configuration(chat_object, input_values:, persist_instructions:)
|
|
159
|
+
runtime = runtime_context(chat: chat_object, inputs: input_values)
|
|
160
|
+
llm_chat = llm_chat_for(chat_object)
|
|
161
|
+
|
|
162
|
+
apply_context(llm_chat)
|
|
163
|
+
apply_instructions(chat_object, runtime, inputs: input_values, persist: persist_instructions)
|
|
164
|
+
apply_tools(llm_chat, runtime)
|
|
165
|
+
apply_temperature(llm_chat)
|
|
166
|
+
apply_thinking(llm_chat)
|
|
167
|
+
apply_params(llm_chat, runtime)
|
|
168
|
+
apply_headers(llm_chat, runtime)
|
|
169
|
+
apply_schema(llm_chat, runtime)
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
def apply_context(llm_chat)
|
|
173
|
+
llm_chat.with_context(context) if context
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
def apply_instructions(chat_object, runtime, inputs:, persist:)
|
|
177
|
+
value = resolved_instructions_value(chat_object, runtime, inputs:)
|
|
178
|
+
return if value.nil?
|
|
179
|
+
|
|
180
|
+
instruction_target(chat_object, persist:).with_instructions(value)
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
def apply_tools(llm_chat, runtime)
|
|
184
|
+
tools_to_apply = Array(evaluate(tools, runtime))
|
|
185
|
+
llm_chat.with_tools(*tools_to_apply) unless tools_to_apply.empty?
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
def apply_temperature(llm_chat)
|
|
189
|
+
llm_chat.with_temperature(temperature) unless temperature.nil?
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def apply_thinking(llm_chat)
|
|
193
|
+
llm_chat.with_thinking(**thinking) if thinking
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
def apply_params(llm_chat, runtime)
|
|
197
|
+
value = evaluate(params, runtime)
|
|
198
|
+
llm_chat.with_params(**value) if value && !value.empty?
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
def apply_headers(llm_chat, runtime)
|
|
202
|
+
value = evaluate(headers, runtime)
|
|
203
|
+
llm_chat.with_headers(**value) if value && !value.empty?
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
def apply_schema(llm_chat, runtime)
|
|
207
|
+
value = evaluate(schema, runtime)
|
|
208
|
+
llm_chat.with_schema(value) if value
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def llm_chat_for(chat_object)
|
|
212
|
+
chat_object.respond_to?(:to_llm) ? chat_object.to_llm : chat_object
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def evaluate(value, runtime)
|
|
216
|
+
value.is_a?(Proc) ? runtime.instance_exec(&value) : value
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
def resolved_instructions_value(chat_object, runtime, inputs:)
|
|
220
|
+
value = evaluate(instructions, runtime)
|
|
221
|
+
return value unless prompt_instruction?(value)
|
|
222
|
+
|
|
223
|
+
runtime.prompt(
|
|
224
|
+
value[:prompt],
|
|
225
|
+
**resolve_prompt_locals(value[:locals] || {}, runtime:, chat: chat_object, inputs:)
|
|
226
|
+
)
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
def prompt_instruction?(value)
|
|
230
|
+
value.is_a?(Hash) && value[:prompt]
|
|
231
|
+
end
|
|
232
|
+
|
|
233
|
+
def instruction_target(chat_object, persist:)
|
|
234
|
+
if persist || !chat_object.respond_to?(:to_llm)
|
|
235
|
+
chat_object
|
|
236
|
+
else
|
|
237
|
+
chat_object.to_llm
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
def resolve_prompt_locals(locals, runtime:, chat:, inputs:)
|
|
242
|
+
base = { chat: chat }.merge(inputs)
|
|
243
|
+
evaluated = locals.each_with_object({}) do |(key, value), acc|
|
|
244
|
+
acc[key.to_sym] = value.is_a?(Proc) ? runtime.instance_exec(&value) : value
|
|
245
|
+
end
|
|
246
|
+
base.merge(evaluated)
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
def partition_inputs(kwargs)
|
|
250
|
+
input_values = {}
|
|
251
|
+
chat_options = {}
|
|
252
|
+
|
|
253
|
+
kwargs.each do |key, value|
|
|
254
|
+
symbolized_key = key.to_sym
|
|
255
|
+
if inputs.include?(symbolized_key)
|
|
256
|
+
input_values[symbolized_key] = value
|
|
257
|
+
else
|
|
258
|
+
chat_options[symbolized_key] = value
|
|
259
|
+
end
|
|
260
|
+
end
|
|
261
|
+
|
|
262
|
+
[input_values, chat_options]
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
def runtime_context(chat:, inputs:)
|
|
266
|
+
agent_class = self
|
|
267
|
+
Object.new.tap do |runtime|
|
|
268
|
+
runtime.define_singleton_method(:chat) { chat }
|
|
269
|
+
runtime.define_singleton_method(:prompt) do |name, **locals|
|
|
270
|
+
agent_class.render_prompt(name, chat:, inputs:, locals:)
|
|
271
|
+
end
|
|
272
|
+
|
|
273
|
+
inputs.each do |name, value|
|
|
274
|
+
runtime.define_singleton_method(name) { value }
|
|
275
|
+
end
|
|
276
|
+
end
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
def prompt_path_for(name)
|
|
280
|
+
filename = name.to_s
|
|
281
|
+
filename += '.txt.erb' unless filename.end_with?('.txt.erb')
|
|
282
|
+
prompt_root.join(prompt_agent_path, filename)
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
def prompt_agent_path
|
|
286
|
+
class_name = name || 'agent'
|
|
287
|
+
class_name.gsub('::', '/')
|
|
288
|
+
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
|
|
289
|
+
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
|
|
290
|
+
.tr('-', '_')
|
|
291
|
+
.downcase
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
def prompt_root
|
|
295
|
+
if defined?(Rails) && Rails.respond_to?(:root) && Rails.root
|
|
296
|
+
Rails.root.join('app/prompts')
|
|
297
|
+
else
|
|
298
|
+
Pathname.new(Dir.pwd).join('app/prompts')
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
def resolved_chat_model
|
|
303
|
+
return @resolved_chat_model if defined?(@resolved_chat_model)
|
|
304
|
+
|
|
305
|
+
@resolved_chat_model = case @chat_model
|
|
306
|
+
when String then Object.const_get(@chat_model)
|
|
307
|
+
else @chat_model
|
|
308
|
+
end
|
|
309
|
+
end
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
def initialize(chat: nil, inputs: nil, persist_instructions: true, **kwargs)
|
|
313
|
+
input_values, chat_options = self.class.send(:partition_inputs, kwargs)
|
|
314
|
+
@chat = chat || RubyLLM.chat(**self.class.chat_kwargs, **chat_options)
|
|
315
|
+
self.class.send(:apply_configuration, @chat, input_values: input_values.merge(inputs || {}),
|
|
316
|
+
persist_instructions:)
|
|
317
|
+
end
|
|
318
|
+
|
|
319
|
+
attr_reader :chat
|
|
320
|
+
|
|
321
|
+
def_delegators :chat, :model, :messages, :tools, :params, :headers, :schema, :ask, :say, :with_tool, :with_tools,
|
|
322
|
+
:with_model, :with_temperature, :with_thinking, :with_context, :with_params, :with_headers,
|
|
323
|
+
:with_schema, :on_new_message, :on_end_message, :on_tool_call, :on_tool_result, :each, :complete,
|
|
324
|
+
:add_message, :reset_messages!
|
|
325
|
+
end
|
|
326
|
+
end
|
data/lib/ruby_llm/aliases.json
CHANGED
|
@@ -40,7 +40,8 @@
|
|
|
40
40
|
"claude-haiku-4-5": {
|
|
41
41
|
"anthropic": "claude-haiku-4-5-20251001",
|
|
42
42
|
"openrouter": "anthropic/claude-haiku-4.5",
|
|
43
|
-
"bedrock": "anthropic.claude-haiku-4-5-20251001-v1:0"
|
|
43
|
+
"bedrock": "anthropic.claude-haiku-4-5-20251001-v1:0",
|
|
44
|
+
"azure": "claude-haiku-4-5-20251001"
|
|
44
45
|
},
|
|
45
46
|
"claude-opus-4": {
|
|
46
47
|
"anthropic": "claude-opus-4-20250514",
|
|
@@ -53,12 +54,19 @@
|
|
|
53
54
|
"claude-opus-4-1": {
|
|
54
55
|
"anthropic": "claude-opus-4-1-20250805",
|
|
55
56
|
"openrouter": "anthropic/claude-opus-4.1",
|
|
56
|
-
"bedrock": "anthropic.claude-opus-4-1-20250805-v1:0"
|
|
57
|
+
"bedrock": "anthropic.claude-opus-4-1-20250805-v1:0",
|
|
58
|
+
"azure": "claude-opus-4-1-20250805"
|
|
57
59
|
},
|
|
58
60
|
"claude-opus-4-5": {
|
|
59
61
|
"anthropic": "claude-opus-4-5-20251101",
|
|
60
62
|
"openrouter": "anthropic/claude-opus-4.5",
|
|
61
|
-
"bedrock": "anthropic.claude-opus-4-5-20251101-v1:0"
|
|
63
|
+
"bedrock": "anthropic.claude-opus-4-5-20251101-v1:0",
|
|
64
|
+
"azure": "claude-opus-4-5-20251101"
|
|
65
|
+
},
|
|
66
|
+
"claude-opus-4-6": {
|
|
67
|
+
"anthropic": "claude-opus-4-6",
|
|
68
|
+
"openrouter": "anthropic/claude-opus-4.6",
|
|
69
|
+
"bedrock": "anthropic.claude-opus-4-6-v1"
|
|
62
70
|
},
|
|
63
71
|
"claude-sonnet-4": {
|
|
64
72
|
"anthropic": "claude-sonnet-4-20250514",
|
|
@@ -71,7 +79,8 @@
|
|
|
71
79
|
"claude-sonnet-4-5": {
|
|
72
80
|
"anthropic": "claude-sonnet-4-5-20250929",
|
|
73
81
|
"openrouter": "anthropic/claude-sonnet-4.5",
|
|
74
|
-
"bedrock": "anthropic.claude-sonnet-4-5-20250929-v1:0"
|
|
82
|
+
"bedrock": "anthropic.claude-sonnet-4-5-20250929-v1:0",
|
|
83
|
+
"azure": "claude-sonnet-4-5-20250929"
|
|
75
84
|
},
|
|
76
85
|
"deepseek-chat": {
|
|
77
86
|
"deepseek": "deepseek-chat",
|
|
@@ -98,10 +107,6 @@
|
|
|
98
107
|
"openrouter": "google/gemini-2.0-flash-001",
|
|
99
108
|
"vertexai": "gemini-2.0-flash-001"
|
|
100
109
|
},
|
|
101
|
-
"gemini-2.0-flash-exp": {
|
|
102
|
-
"gemini": "gemini-2.0-flash-exp",
|
|
103
|
-
"vertexai": "gemini-2.0-flash-exp"
|
|
104
|
-
},
|
|
105
110
|
"gemini-2.0-flash-lite": {
|
|
106
111
|
"gemini": "gemini-2.0-flash-lite",
|
|
107
112
|
"vertexai": "gemini-2.0-flash-lite"
|
|
@@ -222,7 +227,8 @@
|
|
|
222
227
|
},
|
|
223
228
|
"gpt-4": {
|
|
224
229
|
"openai": "gpt-4",
|
|
225
|
-
"openrouter": "openai/gpt-4"
|
|
230
|
+
"openrouter": "openai/gpt-4",
|
|
231
|
+
"azure": "gpt-4"
|
|
226
232
|
},
|
|
227
233
|
"gpt-4-1106-preview": {
|
|
228
234
|
"openai": "gpt-4-1106-preview",
|
|
@@ -238,31 +244,38 @@
|
|
|
238
244
|
},
|
|
239
245
|
"gpt-4.1": {
|
|
240
246
|
"openai": "gpt-4.1",
|
|
241
|
-
"openrouter": "openai/gpt-4.1"
|
|
247
|
+
"openrouter": "openai/gpt-4.1",
|
|
248
|
+
"azure": "gpt-4.1"
|
|
242
249
|
},
|
|
243
250
|
"gpt-4.1-mini": {
|
|
244
251
|
"openai": "gpt-4.1-mini",
|
|
245
|
-
"openrouter": "openai/gpt-4.1-mini"
|
|
252
|
+
"openrouter": "openai/gpt-4.1-mini",
|
|
253
|
+
"azure": "gpt-4.1-mini"
|
|
246
254
|
},
|
|
247
255
|
"gpt-4.1-nano": {
|
|
248
256
|
"openai": "gpt-4.1-nano",
|
|
249
|
-
"openrouter": "openai/gpt-4.1-nano"
|
|
257
|
+
"openrouter": "openai/gpt-4.1-nano",
|
|
258
|
+
"azure": "gpt-4.1-nano"
|
|
250
259
|
},
|
|
251
260
|
"gpt-4o": {
|
|
252
261
|
"openai": "gpt-4o",
|
|
253
|
-
"openrouter": "openai/gpt-4o"
|
|
262
|
+
"openrouter": "openai/gpt-4o",
|
|
263
|
+
"azure": "gpt-4o"
|
|
254
264
|
},
|
|
255
265
|
"gpt-4o-2024-05-13": {
|
|
256
266
|
"openai": "gpt-4o-2024-05-13",
|
|
257
|
-
"openrouter": "openai/gpt-4o-2024-05-13"
|
|
267
|
+
"openrouter": "openai/gpt-4o-2024-05-13",
|
|
268
|
+
"azure": "gpt-4o-2024-05-13"
|
|
258
269
|
},
|
|
259
270
|
"gpt-4o-2024-08-06": {
|
|
260
271
|
"openai": "gpt-4o-2024-08-06",
|
|
261
|
-
"openrouter": "openai/gpt-4o-2024-08-06"
|
|
272
|
+
"openrouter": "openai/gpt-4o-2024-08-06",
|
|
273
|
+
"azure": "gpt-4o-2024-08-06"
|
|
262
274
|
},
|
|
263
275
|
"gpt-4o-2024-11-20": {
|
|
264
276
|
"openai": "gpt-4o-2024-11-20",
|
|
265
|
-
"openrouter": "openai/gpt-4o-2024-11-20"
|
|
277
|
+
"openrouter": "openai/gpt-4o-2024-11-20",
|
|
278
|
+
"azure": "gpt-4o-2024-11-20"
|
|
266
279
|
},
|
|
267
280
|
"gpt-4o-audio-preview": {
|
|
268
281
|
"openai": "gpt-4o-audio-preview",
|
|
@@ -270,11 +283,13 @@
|
|
|
270
283
|
},
|
|
271
284
|
"gpt-4o-mini": {
|
|
272
285
|
"openai": "gpt-4o-mini",
|
|
273
|
-
"openrouter": "openai/gpt-4o-mini"
|
|
286
|
+
"openrouter": "openai/gpt-4o-mini",
|
|
287
|
+
"azure": "gpt-4o-mini"
|
|
274
288
|
},
|
|
275
289
|
"gpt-4o-mini-2024-07-18": {
|
|
276
290
|
"openai": "gpt-4o-mini-2024-07-18",
|
|
277
|
-
"openrouter": "openai/gpt-4o-mini-2024-07-18"
|
|
291
|
+
"openrouter": "openai/gpt-4o-mini-2024-07-18",
|
|
292
|
+
"azure": "gpt-4o-mini-2024-07-18"
|
|
278
293
|
},
|
|
279
294
|
"gpt-4o-mini-search-preview": {
|
|
280
295
|
"openai": "gpt-4o-mini-search-preview",
|
|
@@ -324,10 +339,6 @@
|
|
|
324
339
|
"openai": "gpt-5.2",
|
|
325
340
|
"openrouter": "openai/gpt-5.2"
|
|
326
341
|
},
|
|
327
|
-
"gpt-5.2-chat": {
|
|
328
|
-
"openai": "gpt-5.2-chat-latest",
|
|
329
|
-
"openrouter": "openai/gpt-5.2-chat-latest"
|
|
330
|
-
},
|
|
331
342
|
"gpt-5.2-codex": {
|
|
332
343
|
"openai": "gpt-5.2-codex",
|
|
333
344
|
"openrouter": "openai/gpt-5.2-codex"
|
|
@@ -336,13 +347,22 @@
|
|
|
336
347
|
"openai": "gpt-5.2-pro",
|
|
337
348
|
"openrouter": "openai/gpt-5.2-pro"
|
|
338
349
|
},
|
|
350
|
+
"gpt-audio": {
|
|
351
|
+
"openai": "gpt-audio",
|
|
352
|
+
"openrouter": "openai/gpt-audio"
|
|
353
|
+
},
|
|
354
|
+
"gpt-audio-mini": {
|
|
355
|
+
"openai": "gpt-audio-mini",
|
|
356
|
+
"openrouter": "openai/gpt-audio-mini"
|
|
357
|
+
},
|
|
339
358
|
"o1": {
|
|
340
359
|
"openai": "o1",
|
|
341
360
|
"openrouter": "openai/o1"
|
|
342
361
|
},
|
|
343
362
|
"o1-pro": {
|
|
344
363
|
"openai": "o1-pro",
|
|
345
|
-
"openrouter": "openai/o1-pro"
|
|
364
|
+
"openrouter": "openai/o1-pro",
|
|
365
|
+
"azure": "o1-pro"
|
|
346
366
|
},
|
|
347
367
|
"o3": {
|
|
348
368
|
"openai": "o3",
|
|
@@ -354,7 +374,8 @@
|
|
|
354
374
|
},
|
|
355
375
|
"o3-mini": {
|
|
356
376
|
"openai": "o3-mini",
|
|
357
|
-
"openrouter": "openai/o3-mini"
|
|
377
|
+
"openrouter": "openai/o3-mini",
|
|
378
|
+
"azure": "o3-mini"
|
|
358
379
|
},
|
|
359
380
|
"o3-pro": {
|
|
360
381
|
"openai": "o3-pro",
|
|
@@ -362,14 +383,11 @@
|
|
|
362
383
|
},
|
|
363
384
|
"o4-mini": {
|
|
364
385
|
"openai": "o4-mini",
|
|
365
|
-
"openrouter": "openai/o4-mini"
|
|
386
|
+
"openrouter": "openai/o4-mini",
|
|
387
|
+
"azure": "o4-mini"
|
|
366
388
|
},
|
|
367
389
|
"o4-mini-deep-research": {
|
|
368
390
|
"openai": "o4-mini-deep-research",
|
|
369
391
|
"openrouter": "openai/o4-mini-deep-research"
|
|
370
|
-
},
|
|
371
|
-
"text-embedding-004": {
|
|
372
|
-
"gemini": "text-embedding-004",
|
|
373
|
-
"vertexai": "text-embedding-004"
|
|
374
392
|
}
|
|
375
393
|
}
|
data/lib/ruby_llm/chat.rb
CHANGED
|
@@ -38,10 +38,15 @@ module RubyLLM
|
|
|
38
38
|
|
|
39
39
|
alias say ask
|
|
40
40
|
|
|
41
|
-
def with_instructions(instructions, replace:
|
|
42
|
-
|
|
41
|
+
def with_instructions(instructions, append: false, replace: nil)
|
|
42
|
+
append ||= (replace == false) unless replace.nil?
|
|
43
|
+
|
|
44
|
+
if append
|
|
45
|
+
append_system_instruction(instructions)
|
|
46
|
+
else
|
|
47
|
+
replace_system_instruction(instructions)
|
|
48
|
+
end
|
|
43
49
|
|
|
44
|
-
add_message role: :system, content: instructions
|
|
45
50
|
self
|
|
46
51
|
end
|
|
47
52
|
|
|
@@ -222,5 +227,24 @@ module RubyLLM
|
|
|
222
227
|
def content_like?(object)
|
|
223
228
|
object.is_a?(Content) || object.is_a?(Content::Raw)
|
|
224
229
|
end
|
|
230
|
+
|
|
231
|
+
def append_system_instruction(instructions)
|
|
232
|
+
system_messages, non_system_messages = @messages.partition { |msg| msg.role == :system }
|
|
233
|
+
system_messages << Message.new(role: :system, content: instructions)
|
|
234
|
+
@messages = system_messages + non_system_messages
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
def replace_system_instruction(instructions)
|
|
238
|
+
system_messages, non_system_messages = @messages.partition { |msg| msg.role == :system }
|
|
239
|
+
|
|
240
|
+
if system_messages.empty?
|
|
241
|
+
system_messages = [Message.new(role: :system, content: instructions)]
|
|
242
|
+
else
|
|
243
|
+
system_messages.first.content = instructions
|
|
244
|
+
system_messages = [system_messages.first]
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
@messages = system_messages + non_system_messages
|
|
248
|
+
end
|
|
225
249
|
end
|
|
226
250
|
end
|
data/lib/ruby_llm/content.rb
CHANGED
|
@@ -35,10 +35,16 @@ module RubyLLM
|
|
|
35
35
|
|
|
36
36
|
def process_attachments_array_or_string(attachments)
|
|
37
37
|
Utils.to_safe_array(attachments).each do |file|
|
|
38
|
+
next if blank_attachment_entry?(file)
|
|
39
|
+
|
|
38
40
|
add_attachment(file)
|
|
39
41
|
end
|
|
40
42
|
end
|
|
41
43
|
|
|
44
|
+
def blank_attachment_entry?(file)
|
|
45
|
+
file.nil? || (file.is_a?(String) && file.strip.empty?)
|
|
46
|
+
end
|
|
47
|
+
|
|
42
48
|
def process_attachments(attachments)
|
|
43
49
|
if attachments.is_a?(Hash)
|
|
44
50
|
attachments.each_value { |attachment| process_attachments_array_or_string(attachment) }
|