ruby_llm 1.1.0rc2 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 76bc09be7c31380ca96740730c8d0124847936f26c8067be26d431c670d81855
4
- data.tar.gz: 280026eba72f99a48ecf1660d8a0b4f6b07ceff08cdb05d94ede589aa2d597fc
3
+ metadata.gz: d07eaf11ea6e6cc923921ebaa92341c91c0aab15021c7347cee4db960defea3f
4
+ data.tar.gz: e0a024fe5f82ecada1ecb6d3bc9ab3e3b24f8488cf10cf44ddaf8c1621746255
5
5
  SHA512:
6
- metadata.gz: 4bae50d060f691e26508a24b51bf5b022a65ce2707426b2616ceeec6895e22eab1663c1b4687a6830566622a100530ecdc569b6f6ec7cdc1aaeadb0324cad69a
7
- data.tar.gz: 0d824847a5ebc619339830cc48ac4d48038d52b180d7cb4f6c90812c6899fc09dd416b67f6d0f9e9cb820ff177a9afc8898e2b27f7201e14d497fd092d9b5a48
6
+ metadata.gz: 33667bbaf9573ed597f29580a7f61855ddddad0d844891aa0b0085ed444a4bd5f36d78d930f9069ca09bee8f89f957eee1570a8df6f547a5ea34cb0b0c332787
7
+ data.tar.gz: bbcd322f99ee88b1a588743bd0e3f29baf461a1b7548b4c0024b52afc918ffe03ac413e82fc66fc476ed3b5dd8448ae724c97bd81189b1cacb2bbd1ca0db0abb
data/README.md CHANGED
@@ -135,7 +135,7 @@ chat.ask "Tell me a story about a Ruby programmer" do |chunk|
135
135
  print chunk.content
136
136
  end
137
137
 
138
- # Set personality or behavior with instructions (aka system prompts) - available from 1.1.0
138
+ # Set personality or behavior with instructions (aka system prompts)
139
139
  chat.with_instructions "You are a friendly Ruby expert who loves to help beginners"
140
140
 
141
141
  # Understand content in multiple forms
@@ -171,7 +171,7 @@ end
171
171
  # In a background job
172
172
  chat = Chat.create! model_id: "gpt-4o-mini"
173
173
 
174
- # Set personality or behavior with instructions (aka system prompts) - they're persisted too! - available from 1.1.0
174
+ # Set personality or behavior with instructions (aka system prompts) - they're persisted too!
175
175
  chat.with_instructions "You are a friendly Ruby expert who loves to help beginners"
176
176
 
177
177
  chat.ask("What's your favorite Ruby gem?") do |chunk|
@@ -25,13 +25,13 @@ module RubyLLM
25
25
  to: :to_llm
26
26
  end
27
27
 
28
- def acts_as_message(chat_class: 'Chat', tool_call_class: 'ToolCall') # rubocop:disable Metrics/MethodLength
28
+ def acts_as_message(chat_class: 'Chat', tool_call_class: 'ToolCall', touch_chat: false) # rubocop:disable Metrics/MethodLength
29
29
  include MessageMethods
30
30
 
31
31
  @chat_class = chat_class.to_s
32
32
  @tool_call_class = tool_call_class.to_s
33
33
 
34
- belongs_to :chat, class_name: @chat_class
34
+ belongs_to :chat, class_name: @chat_class, touch: touch_chat
35
35
  has_many :tool_calls, class_name: @tool_call_class, dependent: :destroy
36
36
 
37
37
  belongs_to :parent_tool_call,
@@ -34,32 +34,5 @@
34
34
  "claude-2-1": {
35
35
  "anthropic": "claude-2.1",
36
36
  "bedrock": "anthropic.claude-2.1"
37
- },
38
- "gpt-4o": {
39
- "openai": "gpt-4o-2024-11-20"
40
- },
41
- "gpt-4o-mini": {
42
- "openai": "gpt-4o-mini-2024-07-18"
43
- },
44
- "gpt-4-turbo": {
45
- "openai": "gpt-4-turbo-2024-04-09"
46
- },
47
- "gemini-1.5-flash": {
48
- "gemini": "gemini-1.5-flash-002"
49
- },
50
- "gemini-1.5-flash-8b": {
51
- "gemini": "gemini-1.5-flash-8b-001"
52
- },
53
- "gemini-1.5-pro": {
54
- "gemini": "gemini-1.5-pro-002"
55
- },
56
- "gemini-2.0-flash": {
57
- "gemini": "gemini-2.0-flash-001"
58
- },
59
- "o1": {
60
- "openai": "o1-2024-12-17"
61
- },
62
- "o3-mini": {
63
- "openai": "o3-mini-2025-01-31"
64
37
  }
65
38
  }
@@ -10,6 +10,7 @@ module RubyLLM
10
10
  # config.anthropic_api_key = ENV['ANTHROPIC_API_KEY']
11
11
  # end
12
12
  class Configuration
13
+ # Provider-specific configuration
13
14
  attr_accessor :openai_api_key,
14
15
  :anthropic_api_key,
15
16
  :gemini_api_key,
@@ -18,15 +19,26 @@ module RubyLLM
18
19
  :bedrock_secret_key,
19
20
  :bedrock_region,
20
21
  :bedrock_session_token,
22
+ # Default models
21
23
  :default_model,
22
24
  :default_embedding_model,
23
25
  :default_image_model,
26
+ # Connection configuration
24
27
  :request_timeout,
25
- :max_retries
28
+ :max_retries,
29
+ :retry_interval,
30
+ :retry_backoff_factor,
31
+ :retry_interval_randomness
26
32
 
27
33
  def initialize
34
+ # Connection configuration
28
35
  @request_timeout = 120
29
36
  @max_retries = 3
37
+ @retry_interval = 0.1
38
+ @retry_backoff_factor = 2
39
+ @retry_interval_randomness = 0.5
40
+
41
+ # Default models
30
42
  @default_model = 'gpt-4o-mini'
31
43
  @default_embedding_model = 'text-embedding-3-small'
32
44
  @default_image_model = 'dall-e-3'
@@ -7,7 +7,7 @@ module RubyLLM
7
7
  module Provider
8
8
  # Common functionality for all LLM providers. Implements the core provider
9
9
  # interface so specific providers only need to implement a few key methods.
10
- module Methods
10
+ module Methods # rubocop:disable Metrics/ModuleLength
11
11
  extend Streaming
12
12
 
13
13
  def complete(messages, tools:, temperature:, model:, &block) # rubocop:disable Metrics/MethodLength
@@ -108,9 +108,9 @@ module RubyLLM
108
108
 
109
109
  f.request :retry, {
110
110
  max: RubyLLM.config.max_retries,
111
- interval: 0.05,
112
- interval_randomness: 0.5,
113
- backoff_factor: 2,
111
+ interval: RubyLLM.config.retry_interval,
112
+ interval_randomness: RubyLLM.config.retry_interval_randomness,
113
+ backoff_factor: RubyLLM.config.retry_backoff_factor,
114
114
  exceptions: [
115
115
  Errno::ETIMEDOUT,
116
116
  Timeout::Error,
@@ -119,9 +119,10 @@ module RubyLLM
119
119
  Faraday::RetriableResponse,
120
120
  RubyLLM::RateLimitError,
121
121
  RubyLLM::ServerError,
122
- RubyLLM::ServiceUnavailableError
122
+ RubyLLM::ServiceUnavailableError,
123
+ RubyLLM::OverloadedError
123
124
  ],
124
- retry_statuses: [429, 500, 502, 503, 504]
125
+ retry_statuses: [429, 500, 502, 503, 504, 529]
125
126
  }
126
127
 
127
128
  f.request :json
@@ -42,12 +42,20 @@ module RubyLLM
42
42
 
43
43
  private
44
44
 
45
- def tool_calls_from_stream
45
+ def tool_calls_from_stream # rubocop:disable Metrics/MethodLength
46
46
  tool_calls.transform_values do |tc|
47
+ arguments = if tc.arguments.is_a?(String) && !tc.arguments.empty?
48
+ JSON.parse(tc.arguments)
49
+ elsif tc.arguments.is_a?(String)
50
+ {} # Return empty hash for empty string arguments
51
+ else
52
+ tc.arguments
53
+ end
54
+
47
55
  ToolCall.new(
48
56
  id: tc.id,
49
57
  name: tc.name,
50
- arguments: tc.arguments.is_a?(String) ? JSON.parse(tc.arguments) : tc.arguments
58
+ arguments: arguments
51
59
  )
52
60
  end
53
61
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module RubyLLM
4
- VERSION = '1.1.0rc2'
4
+ VERSION = '1.1.1'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby_llm
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0rc2
4
+ version: 1.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Carmine Paolino
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-04-04 00:00:00.000000000 Z
11
+ date: 2025-04-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: base64
@@ -66,6 +66,20 @@ dependencies:
66
66
  - - "~>"
67
67
  - !ruby/object:Gem::Version
68
68
  version: '1'
69
+ - !ruby/object:Gem::Dependency
70
+ name: faraday-net_http
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '3'
76
+ type: :runtime
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '3'
69
83
  - !ruby/object:Gem::Dependency
70
84
  name: faraday-retry
71
85
  requirement: !ruby/object:Gem::Requirement