ruby_llm 1.2.0 → 1.3.0rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +80 -133
  3. data/lib/ruby_llm/active_record/acts_as.rb +212 -33
  4. data/lib/ruby_llm/aliases.json +48 -6
  5. data/lib/ruby_llm/attachments/audio.rb +12 -0
  6. data/lib/ruby_llm/attachments/image.rb +9 -0
  7. data/lib/ruby_llm/attachments/pdf.rb +9 -0
  8. data/lib/ruby_llm/attachments.rb +78 -0
  9. data/lib/ruby_llm/chat.rb +22 -19
  10. data/lib/ruby_llm/configuration.rb +30 -1
  11. data/lib/ruby_llm/connection.rb +95 -0
  12. data/lib/ruby_llm/content.rb +51 -72
  13. data/lib/ruby_llm/context.rb +30 -0
  14. data/lib/ruby_llm/embedding.rb +13 -5
  15. data/lib/ruby_llm/error.rb +1 -1
  16. data/lib/ruby_llm/image.rb +13 -5
  17. data/lib/ruby_llm/message.rb +12 -4
  18. data/lib/ruby_llm/mime_types.rb +713 -0
  19. data/lib/ruby_llm/model_info.rb +208 -27
  20. data/lib/ruby_llm/models.json +25766 -2154
  21. data/lib/ruby_llm/models.rb +95 -14
  22. data/lib/ruby_llm/provider.rb +48 -90
  23. data/lib/ruby_llm/providers/anthropic/capabilities.rb +76 -13
  24. data/lib/ruby_llm/providers/anthropic/chat.rb +7 -14
  25. data/lib/ruby_llm/providers/anthropic/media.rb +44 -34
  26. data/lib/ruby_llm/providers/anthropic/models.rb +15 -15
  27. data/lib/ruby_llm/providers/anthropic/tools.rb +2 -2
  28. data/lib/ruby_llm/providers/anthropic.rb +3 -3
  29. data/lib/ruby_llm/providers/bedrock/capabilities.rb +61 -2
  30. data/lib/ruby_llm/providers/bedrock/chat.rb +30 -73
  31. data/lib/ruby_llm/providers/bedrock/media.rb +56 -0
  32. data/lib/ruby_llm/providers/bedrock/models.rb +50 -58
  33. data/lib/ruby_llm/providers/bedrock/streaming/base.rb +16 -0
  34. data/lib/ruby_llm/providers/bedrock.rb +14 -25
  35. data/lib/ruby_llm/providers/deepseek/capabilities.rb +35 -2
  36. data/lib/ruby_llm/providers/deepseek.rb +3 -3
  37. data/lib/ruby_llm/providers/gemini/capabilities.rb +84 -3
  38. data/lib/ruby_llm/providers/gemini/chat.rb +8 -37
  39. data/lib/ruby_llm/providers/gemini/embeddings.rb +18 -34
  40. data/lib/ruby_llm/providers/gemini/images.rb +2 -2
  41. data/lib/ruby_llm/providers/gemini/media.rb +39 -110
  42. data/lib/ruby_llm/providers/gemini/models.rb +16 -22
  43. data/lib/ruby_llm/providers/gemini/tools.rb +1 -1
  44. data/lib/ruby_llm/providers/gemini.rb +3 -3
  45. data/lib/ruby_llm/providers/ollama/chat.rb +28 -0
  46. data/lib/ruby_llm/providers/ollama/media.rb +44 -0
  47. data/lib/ruby_llm/providers/ollama.rb +34 -0
  48. data/lib/ruby_llm/providers/openai/capabilities.rb +78 -3
  49. data/lib/ruby_llm/providers/openai/chat.rb +6 -4
  50. data/lib/ruby_llm/providers/openai/embeddings.rb +8 -12
  51. data/lib/ruby_llm/providers/openai/media.rb +38 -21
  52. data/lib/ruby_llm/providers/openai/models.rb +16 -17
  53. data/lib/ruby_llm/providers/openai/tools.rb +9 -5
  54. data/lib/ruby_llm/providers/openai.rb +7 -5
  55. data/lib/ruby_llm/providers/openrouter/models.rb +88 -0
  56. data/lib/ruby_llm/providers/openrouter.rb +31 -0
  57. data/lib/ruby_llm/stream_accumulator.rb +4 -4
  58. data/lib/ruby_llm/streaming.rb +3 -3
  59. data/lib/ruby_llm/utils.rb +22 -0
  60. data/lib/ruby_llm/version.rb +1 -1
  61. data/lib/ruby_llm.rb +15 -5
  62. data/lib/tasks/models.rake +69 -33
  63. data/lib/tasks/models_docs.rake +164 -121
  64. data/lib/tasks/vcr.rake +4 -2
  65. metadata +23 -14
  66. data/lib/tasks/browser_helper.rb +0 -97
  67. data/lib/tasks/capability_generator.rb +0 -123
  68. data/lib/tasks/capability_scraper.rb +0 -224
  69. data/lib/tasks/cli_helper.rb +0 -22
  70. data/lib/tasks/code_validator.rb +0 -29
  71. data/lib/tasks/model_updater.rb +0 -66
@@ -3,7 +3,8 @@
3
3
  module RubyLLM
4
4
  module Providers
5
5
  module OpenAI
6
- module Capabilities # rubocop:disable Metrics/ModuleLength,Style/Documentation
6
+ # Determines capabilities and pricing for OpenAI models
7
+ module Capabilities
7
8
  module_function
8
9
 
9
10
  MODEL_PATTERNS = {
@@ -40,7 +41,7 @@ module RubyLLM
40
41
  moderation: /^(?:omni|text)-moderation/
41
42
  }.freeze
42
43
 
43
- def context_window_for(model_id) # rubocop:disable Metrics/CyclomaticComplexity,Metrics/MethodLength
44
+ def context_window_for(model_id)
44
45
  case model_family(model_id)
45
46
  when 'gpt41', 'gpt41_mini', 'gpt41_nano' then 1_047_576
46
47
  when 'chatgpt4o', 'gpt4_turbo', 'gpt4o', 'gpt4o_audio', 'gpt4o_mini',
@@ -56,7 +57,7 @@ module RubyLLM
56
57
  end
57
58
  end
58
59
 
59
- def max_tokens_for(model_id) # rubocop:disable Metrics/CyclomaticComplexity,Metrics/MethodLength
60
+ def max_tokens_for(model_id)
60
61
  case model_family(model_id)
61
62
  when 'gpt41', 'gpt41_mini', 'gpt41_nano' then 32_768
62
63
  when 'chatgpt4o', 'gpt4o', 'gpt4o_mini', 'gpt4o_mini_search' then 16_384
@@ -222,6 +223,80 @@ module RubyLLM
222
223
  temperature
223
224
  end
224
225
  end
226
+
227
+ def modalities_for(model_id)
228
+ modalities = {
229
+ input: ['text'],
230
+ output: ['text']
231
+ }
232
+
233
+ # Vision support
234
+ modalities[:input] << 'image' if supports_vision?(model_id)
235
+
236
+ # Audio support
237
+ modalities[:input] << 'audio' if model_id.match?(/whisper|audio|tts|transcribe/)
238
+
239
+ # PDF support
240
+ modalities[:input] << 'pdf' if supports_vision?(model_id)
241
+
242
+ # Output modalities
243
+ modalities[:output] << 'audio' if model_id.match?(/tts|audio/)
244
+
245
+ modalities[:output] << 'image' if model_id.match?(/dall-e|image/)
246
+
247
+ modalities[:output] << 'embeddings' if model_id.match?(/embedding/)
248
+
249
+ modalities[:output] << 'moderation' if model_id.match?(/moderation/)
250
+
251
+ modalities
252
+ end
253
+
254
+ def capabilities_for(model_id) # rubocop:disable Metrics/PerceivedComplexity
255
+ capabilities = []
256
+
257
+ # Common capabilities
258
+ capabilities << 'streaming' unless model_id.match?(/moderation|embedding/)
259
+ capabilities << 'function_calling' if supports_functions?(model_id)
260
+ capabilities << 'structured_output' if supports_json_mode?(model_id)
261
+ capabilities << 'batch' if model_id.match?(/embedding|batch/)
262
+
263
+ # Advanced capabilities
264
+ capabilities << 'reasoning' if model_id.match?(/o1/)
265
+
266
+ if model_id.match?(/gpt-4-turbo|gpt-4o|claude/)
267
+ capabilities << 'image_generation' if model_id.match?(/vision/)
268
+ capabilities << 'speech_generation' if model_id.match?(/audio/)
269
+ capabilities << 'transcription' if model_id.match?(/audio/)
270
+ end
271
+
272
+ capabilities
273
+ end
274
+
275
+ def pricing_for(model_id)
276
+ standard_pricing = {
277
+ input_per_million: input_price_for(model_id),
278
+ output_per_million: output_price_for(model_id)
279
+ }
280
+
281
+ # Add cached pricing if available
282
+ if respond_to?(:cached_input_price_for)
283
+ cached_price = cached_input_price_for(model_id)
284
+ standard_pricing[:cached_input_per_million] = cached_price if cached_price
285
+ end
286
+
287
+ # Pricing structure
288
+ pricing = { text_tokens: { standard: standard_pricing } }
289
+
290
+ # Add batch pricing if applicable
291
+ if model_id.match?(/embedding|batch/)
292
+ pricing[:text_tokens][:batch] = {
293
+ input_per_million: standard_pricing[:input_per_million] * 0.5,
294
+ output_per_million: standard_pricing[:output_per_million] * 0.5
295
+ }
296
+ end
297
+
298
+ pricing
299
+ end
225
300
  end
226
301
  end
227
302
  end
@@ -5,13 +5,13 @@ module RubyLLM
5
5
  module OpenAI
6
6
  # Chat methods of the OpenAI API integration
7
7
  module Chat
8
- module_function
9
-
10
8
  def completion_url
11
9
  'chat/completions'
12
10
  end
13
11
 
14
- def render_payload(messages, tools:, temperature:, model:, stream: false) # rubocop:disable Metrics/MethodLength
12
+ module_function
13
+
14
+ def render_payload(messages, tools:, temperature:, model:, stream: false)
15
15
  {
16
16
  model: model,
17
17
  messages: format_messages(messages),
@@ -26,10 +26,12 @@ module RubyLLM
26
26
  end
27
27
  end
28
28
 
29
- def parse_completion_response(response) # rubocop:disable Metrics/MethodLength
29
+ def parse_completion_response(response)
30
30
  data = response.body
31
31
  return if data.empty?
32
32
 
33
+ raise Error.new(response, data.dig('error', 'message')) if data.dig('error', 'message')
34
+
33
35
  message_data = data.dig('choices', 0, 'message')
34
36
  return unless message_data
35
37
 
@@ -7,31 +7,27 @@ module RubyLLM
7
7
  module Embeddings
8
8
  module_function
9
9
 
10
- def embedding_url
10
+ def embedding_url(...)
11
11
  'embeddings'
12
12
  end
13
13
 
14
- def render_embedding_payload(text, model:)
14
+ def render_embedding_payload(text, model:, dimensions:)
15
15
  {
16
16
  model: model,
17
- input: text
18
- }
17
+ input: text,
18
+ dimensions: dimensions
19
+ }.compact
19
20
  end
20
21
 
21
- def parse_embedding_response(response)
22
+ def parse_embedding_response(response, model:)
22
23
  data = response.body
23
- model_id = data['model']
24
24
  input_tokens = data.dig('usage', 'prompt_tokens') || 0
25
25
  vectors = data['data'].map { |d| d['embedding'] }
26
26
 
27
27
  # If we only got one embedding, return it as a single vector
28
- vectors = vectors.first if vectors.size == 1
28
+ vectors in [vectors]
29
29
 
30
- Embedding.new(
31
- vectors: vectors,
32
- model: model_id,
33
- input_tokens: input_tokens
34
- )
30
+ Embedding.new(vectors:, model:, input_tokens:)
35
31
  end
36
32
  end
37
33
  end
@@ -7,44 +7,61 @@ module RubyLLM
7
7
  module Media
8
8
  module_function
9
9
 
10
- def format_content(content) # rubocop:disable Metrics/MethodLength
11
- return content unless content.is_a?(Array)
12
-
13
- content.map do |part|
14
- case part[:type]
15
- when 'image'
16
- format_image(part)
17
- when 'input_audio'
18
- format_audio(part)
19
- else
20
- part
10
+ def format_content(content)
11
+ return content unless content.is_a?(Content)
12
+
13
+ parts = []
14
+ parts << format_text(content.text) if content.text
15
+
16
+ content.attachments.each do |attachment|
17
+ case attachment
18
+ when Attachments::Image
19
+ parts << format_image(attachment)
20
+ when Attachments::PDF
21
+ parts << format_pdf(attachment)
22
+ when Attachments::Audio
23
+ parts << format_audio(attachment)
21
24
  end
22
25
  end
26
+
27
+ parts
23
28
  end
24
29
 
25
- def format_image(part)
30
+ def format_image(image)
26
31
  {
27
32
  type: 'image_url',
28
33
  image_url: {
29
- url: format_data_url(part[:source]),
34
+ url: image.url? ? image.source : "data:#{image.mime_type};base64,#{image.encoded}",
30
35
  detail: 'auto'
31
36
  }
32
37
  }
33
38
  end
34
39
 
35
- def format_audio(part)
40
+ def format_pdf(pdf)
41
+ {
42
+ type: 'file',
43
+ file: {
44
+ filename: File.basename(pdf.source),
45
+ file_data: "data:#{pdf.mime_type};base64,#{pdf.encoded}"
46
+ }
47
+ }
48
+ end
49
+
50
+ def format_audio(audio)
36
51
  {
37
52
  type: 'input_audio',
38
- input_audio: part[:input_audio]
53
+ input_audio: {
54
+ data: audio.encoded,
55
+ format: audio.format
56
+ }
39
57
  }
40
58
  end
41
59
 
42
- def format_data_url(source)
43
- if source[:type] == 'base64'
44
- "data:#{source[:media_type]};base64,#{source[:data]}"
45
- else
46
- source[:url]
47
- end
60
+ def format_text(text)
61
+ {
62
+ type: 'text',
63
+ text: text
64
+ }
48
65
  end
49
66
  end
50
67
  end
@@ -11,26 +11,25 @@ module RubyLLM
11
11
  'models'
12
12
  end
13
13
 
14
- def parse_list_models_response(response, slug, capabilities) # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
15
- (response.body['data'] || []).map do |model|
14
+ def parse_list_models_response(response, slug, capabilities)
15
+ Array(response.body['data']).map do |model_data|
16
+ model_id = model_data['id']
17
+
16
18
  ModelInfo.new(
17
- id: model['id'],
18
- created_at: model['created'] ? Time.at(model['created']) : nil,
19
- display_name: capabilities.format_display_name(model['id']),
19
+ id: model_id,
20
+ name: capabilities.format_display_name(model_id),
20
21
  provider: slug,
21
- type: capabilities.model_type(model['id']),
22
- family: capabilities.model_family(model['id']),
22
+ family: capabilities.model_family(model_id),
23
+ created_at: model_data['created'] ? Time.at(model_data['created']) : nil,
24
+ context_window: capabilities.context_window_for(model_id),
25
+ max_output_tokens: capabilities.max_tokens_for(model_id),
26
+ modalities: capabilities.modalities_for(model_id),
27
+ capabilities: capabilities.capabilities_for(model_id),
28
+ pricing: capabilities.pricing_for(model_id),
23
29
  metadata: {
24
- object: model['object'],
25
- owned_by: model['owned_by']
26
- },
27
- context_window: capabilities.context_window_for(model['id']),
28
- max_tokens: capabilities.max_tokens_for(model['id']),
29
- supports_vision: capabilities.supports_vision?(model['id']),
30
- supports_functions: capabilities.supports_functions?(model['id']),
31
- supports_json_mode: capabilities.supports_json_mode?(model['id']),
32
- input_price_per_million: capabilities.input_price_for(model['id']),
33
- output_price_per_million: capabilities.output_price_for(model['id'])
30
+ object: model_data['object'],
31
+ owned_by: model_data['owned_by']
32
+ }
34
33
  )
35
34
  end
36
35
  end
@@ -7,7 +7,7 @@ module RubyLLM
7
7
  module Tools
8
8
  module_function
9
9
 
10
- def tool_for(tool) # rubocop:disable Metrics/MethodLength
10
+ def tool_for(tool)
11
11
  {
12
12
  type: 'function',
13
13
  function: {
@@ -29,7 +29,7 @@ module RubyLLM
29
29
  }.compact
30
30
  end
31
31
 
32
- def format_tool_calls(tool_calls) # rubocop:disable Metrics/MethodLength
32
+ def format_tool_calls(tool_calls)
33
33
  return nil unless tool_calls&.any?
34
34
 
35
35
  tool_calls.map do |_, tc|
@@ -44,7 +44,7 @@ module RubyLLM
44
44
  end
45
45
  end
46
46
 
47
- def parse_tool_calls(tool_calls, parse_arguments: true) # rubocop:disable Metrics/MethodLength
47
+ def parse_tool_calls(tool_calls, parse_arguments: true)
48
48
  return nil unless tool_calls&.any?
49
49
 
50
50
  tool_calls.to_h do |tc|
@@ -54,8 +54,12 @@ module RubyLLM
54
54
  id: tc['id'],
55
55
  name: tc.dig('function', 'name'),
56
56
  arguments: if parse_arguments
57
- JSON.parse(tc.dig('function',
58
- 'arguments'))
57
+ if tc.dig('function', 'arguments').empty?
58
+ {}
59
+ else
60
+ JSON.parse(tc.dig('function',
61
+ 'arguments'))
62
+ end
59
63
  else
60
64
  tc.dig('function', 'arguments')
61
65
  end
@@ -28,14 +28,16 @@ module RubyLLM
28
28
 
29
29
  module_function
30
30
 
31
- def api_base
32
- RubyLLM.config.openai_api_base || 'https://api.openai.com/v1'
31
+ def api_base(config)
32
+ config.openai_api_base || 'https://api.openai.com/v1'
33
33
  end
34
34
 
35
- def headers
35
+ def headers(config)
36
36
  {
37
- 'Authorization' => "Bearer #{RubyLLM.config.openai_api_key}"
38
- }
37
+ 'Authorization' => "Bearer #{config.openai_api_key}",
38
+ 'OpenAI-Organization' => config.openai_organization_id,
39
+ 'OpenAI-Project' => config.openai_project_id
40
+ }.compact
39
41
  end
40
42
 
41
43
  def capabilities
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ module OpenRouter
6
+ # Models methods of the OpenRouter API integration
7
+ module Models
8
+ module_function
9
+
10
+ def models_url
11
+ 'models'
12
+ end
13
+
14
+ def parse_list_models_response(response, slug, _capabilities)
15
+ Array(response.body['data']).map do |model_data| # rubocop:disable Metrics/BlockLength
16
+ # Extract modalities directly from architecture
17
+ modalities = {
18
+ input: Array(model_data.dig('architecture', 'input_modalities')),
19
+ output: Array(model_data.dig('architecture', 'output_modalities'))
20
+ }
21
+
22
+ # Construct pricing from API data, only adding non-zero values
23
+ pricing = { text_tokens: { standard: {} } }
24
+
25
+ pricing_types = {
26
+ prompt: :input_per_million,
27
+ completion: :output_per_million,
28
+ input_cache_read: :cached_input_per_million,
29
+ internal_reasoning: :reasoning_output_per_million
30
+ }
31
+
32
+ pricing_types.each do |source_key, target_key|
33
+ value = model_data.dig('pricing', source_key.to_s).to_f
34
+ pricing[:text_tokens][:standard][target_key] = value * 1_000_000 if value.positive?
35
+ end
36
+
37
+ # Convert OpenRouter's supported parameters to our capability format
38
+ capabilities = supported_parameters_to_capabilities(model_data['supported_parameters'])
39
+
40
+ ModelInfo.new(
41
+ id: model_data['id'],
42
+ name: model_data['name'],
43
+ provider: slug,
44
+ family: model_data['id'].split('/').first,
45
+ created_at: model_data['created'] ? Time.at(model_data['created']) : nil,
46
+ context_window: model_data['context_length'],
47
+ max_output_tokens: model_data.dig('top_provider', 'max_completion_tokens'),
48
+ modalities: modalities,
49
+ capabilities: capabilities,
50
+ pricing: pricing,
51
+ metadata: {
52
+ description: model_data['description'],
53
+ architecture: model_data['architecture'],
54
+ top_provider: model_data['top_provider'],
55
+ per_request_limits: model_data['per_request_limits'],
56
+ supported_parameters: model_data['supported_parameters']
57
+ }
58
+ )
59
+ end
60
+ end
61
+
62
+ def supported_parameters_to_capabilities(params)
63
+ return [] unless params
64
+
65
+ capabilities = []
66
+
67
+ # Standard capabilities mapping
68
+ capabilities << 'streaming' # Assume all OpenRouter models support streaming
69
+
70
+ # Function calling capability
71
+ capabilities << 'function_calling' if params.include?('tools') || params.include?('tool_choice')
72
+
73
+ # Structured output capability
74
+ capabilities << 'structured_output' if params.include?('response_format')
75
+
76
+ # Batch capability
77
+ capabilities << 'batch' if params.include?('batch')
78
+
79
+ # Additional mappings based on params
80
+ # Handles advanced model capabilities that might be inferred from supported params
81
+ capabilities << 'predicted_outputs' if params.include?('logit_bias') && params.include?('top_k')
82
+
83
+ capabilities
84
+ end
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ module Providers
5
+ # OpenRouter API integration.
6
+ module OpenRouter
7
+ extend OpenAI
8
+ extend OpenRouter::Models
9
+
10
+ module_function
11
+
12
+ def api_base(_config)
13
+ 'https://openrouter.ai/api/v1'
14
+ end
15
+
16
+ def headers(config)
17
+ {
18
+ 'Authorization' => "Bearer #{config.openrouter_api_key}"
19
+ }
20
+ end
21
+
22
+ def slug
23
+ 'openrouter'
24
+ end
25
+
26
+ def configuration_requirements
27
+ %i[openrouter_api_key]
28
+ end
29
+ end
30
+ end
31
+ end
@@ -42,7 +42,7 @@ module RubyLLM
42
42
 
43
43
  private
44
44
 
45
- def tool_calls_from_stream # rubocop:disable Metrics/MethodLength
45
+ def tool_calls_from_stream
46
46
  tool_calls.transform_values do |tc|
47
47
  arguments = if tc.arguments.is_a?(String) && !tc.arguments.empty?
48
48
  JSON.parse(tc.arguments)
@@ -60,7 +60,7 @@ module RubyLLM
60
60
  end
61
61
  end
62
62
 
63
- def accumulate_tool_calls(new_tool_calls) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
63
+ def accumulate_tool_calls(new_tool_calls)
64
64
  RubyLLM.logger.debug "Accumulating tool calls: #{new_tool_calls}"
65
65
  new_tool_calls.each_value do |tool_call|
66
66
  if tool_call.id
@@ -89,8 +89,8 @@ module RubyLLM
89
89
  end
90
90
 
91
91
  def count_tokens(chunk)
92
- @input_tokens += chunk.input_tokens if chunk.input_tokens
93
- @output_tokens += chunk.output_tokens if chunk.output_tokens
92
+ @input_tokens = chunk.input_tokens if chunk.input_tokens
93
+ @output_tokens = chunk.output_tokens if chunk.output_tokens
94
94
  end
95
95
  end
96
96
  end
@@ -8,10 +8,10 @@ module RubyLLM
8
8
  module Streaming
9
9
  module_function
10
10
 
11
- def stream_response(payload, &block)
11
+ def stream_response(connection, payload, &block)
12
12
  accumulator = StreamAccumulator.new
13
13
 
14
- post stream_url, payload do |req|
14
+ connection.post stream_url, payload do |req|
15
15
  req.options.on_data = handle_stream do |chunk|
16
16
  accumulator.add chunk
17
17
  block.call chunk
@@ -29,7 +29,7 @@ module RubyLLM
29
29
 
30
30
  private
31
31
 
32
- def to_json_stream(&block) # rubocop:disable Metrics/MethodLength
32
+ def to_json_stream(&block)
33
33
  buffer = String.new
34
34
  parser = EventStreamParser::Parser.new
35
35
 
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RubyLLM
4
+ # Provides utility functions for data manipulation within the RubyLLM library
5
+ module Utils
6
+ module_function
7
+
8
+ def deep_symbolize_keys(value)
9
+ case value
10
+ when Hash
11
+ value.each_with_object({}) do |(k, v), new_hash|
12
+ new_key = k.is_a?(String) ? k.to_sym : k
13
+ new_hash[new_key] = deep_symbolize_keys(v)
14
+ end
15
+ when Array
16
+ value.map { |v| deep_symbolize_keys(v) }
17
+ else
18
+ value
19
+ end
20
+ end
21
+ end
22
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- VERSION = '1.2.0'
4
+ VERSION = '1.3.0rc1'
5
5
  end
data/lib/ruby_llm.rb CHANGED
@@ -16,7 +16,9 @@ loader.inflector.inflect(
16
16
  'openai' => 'OpenAI',
17
17
  'api' => 'API',
18
18
  'deepseek' => 'DeepSeek',
19
- 'bedrock' => 'Bedrock'
19
+ 'bedrock' => 'Bedrock',
20
+ 'openrouter' => 'OpenRouter',
21
+ 'pdf' => 'PDF'
20
22
  )
21
23
  loader.ignore("#{__dir__}/tasks")
22
24
  loader.ignore("#{__dir__}/ruby_llm/railtie")
@@ -30,8 +32,14 @@ module RubyLLM
30
32
  class Error < StandardError; end
31
33
 
32
34
  class << self
33
- def chat(model: nil, provider: nil, assume_model_exists: false)
34
- Chat.new(model:, provider:, assume_model_exists:)
35
+ def context
36
+ context_config = config.dup
37
+ yield context_config if block_given?
38
+ Context.new(context_config)
39
+ end
40
+
41
+ def chat(...)
42
+ Chat.new(...)
35
43
  end
36
44
 
37
45
  def embed(...)
@@ -60,9 +68,9 @@ module RubyLLM
60
68
 
61
69
  def logger
62
70
  @logger ||= Logger.new(
63
- $stdout,
71
+ config.log_file,
64
72
  progname: 'RubyLLM',
65
- level: ENV['RUBYLLM_DEBUG'] ? Logger::DEBUG : Logger::INFO
73
+ level: config.log_level
66
74
  )
67
75
  end
68
76
  end
@@ -73,6 +81,8 @@ RubyLLM::Provider.register :anthropic, RubyLLM::Providers::Anthropic
73
81
  RubyLLM::Provider.register :gemini, RubyLLM::Providers::Gemini
74
82
  RubyLLM::Provider.register :deepseek, RubyLLM::Providers::DeepSeek
75
83
  RubyLLM::Provider.register :bedrock, RubyLLM::Providers::Bedrock
84
+ RubyLLM::Provider.register :openrouter, RubyLLM::Providers::OpenRouter
85
+ RubyLLM::Provider.register :ollama, RubyLLM::Providers::Ollama
76
86
 
77
87
  if defined?(Rails::Railtie)
78
88
  require 'ruby_llm/railtie'