rach 0.2.7 → 0.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6def772228179c4cb7342ba70486cf07d0438805dd1595f6e40bfaf60d71a5c4
4
- data.tar.gz: f356a968c75213658a27063713c66b12afcfa7584d27bfe517bf3191851a52c1
3
+ metadata.gz: d38fee5588b8410bd344b1848429f4cd5534dbfc46bdb3c7c55193156b3f78b5
4
+ data.tar.gz: 8a8b353426c367c4b0a7663d66a0a82a91c594a23384f12ed17f3484513aa105
5
5
  SHA512:
6
- metadata.gz: 5e8898c1990f62bd29e807a84f9a5ae9dd3a1482c31169e5c82fc80b5e9eae86674005ef94ed867db71f6336cf6829189333beccb4eb20eb062fbece5e510e98
7
- data.tar.gz: 9407df8d6215c215109e9ae221f166641d3385353ca76662719a7b9c0f6953f09327419aebb4977a493049341c502a280f6037fb300435b7379425d7acfc4baf
6
+ metadata.gz: 0c963bd5389c85b39ac2f2fb8ef94f3e35167bd265e2dfa77c6a63642ae42d156954e9d151fbea994b3c12fe5d5a7db15f2671501449a74df17e4e26a1e3383d
7
+ data.tar.gz: 8f3e9700244b3fc29414a512b1cbf84818b72d73cc44d60a6dc581835a77d680f0616008477bbaf54f68e2fcf2dbfa540fc104aa346cd3882c47826c578a90d9
data/README.md CHANGED
@@ -41,26 +41,32 @@ require 'rach'
41
41
 
42
42
  client = Rach::Client.new(access_token: YOUR_OPENAI_API_KEY, model: "gpt-4o")
43
43
  convo = Rach::Conversation.new
44
- convo.system "You teach the German language."
45
- convo.user "Translate: There are two birds singing outside my window."
46
44
 
45
+ convo.system "You are a helpful historian."
46
+
47
+ # 1. First user question
48
+ convo.user "When was the Statue of Liberty dedicated? Provide only the year."
47
49
  response = client.chat(convo)
48
- response.content
49
- # => "Es gibt zwei Vögel, die draußen vor meinem Fenster singen."
50
+ puts response.content
51
+ # => "1886"
50
52
 
51
- # Continue the conversation...
53
+ # 2. Add the response to the conversation
52
54
  convo.add_response(response)
53
- convo.user "What are the verbs in your translation?"
55
+
56
+ # 3. Continue the conversation (the answer references the original fact: 1886)
57
+ convo.user "Name a major city that was incorporated in that year"
54
58
  response = client.chat(convo)
55
- response.content
56
- # => "The verbs in the translation \"Es gibt zwei Vögel, die vor meinem Fenster singen\" are \"gibt\" and \"singen.\""
59
+ puts response.content
60
+ # => "Vancouver"
57
61
 
58
- # Remove the last message from the conversation history and continue
62
+ # 4. Remove the most recent user message ("Name a major city...")
59
63
  convo.pop
60
- convo.user "Explain the structure of your translation."
64
+
65
+ # 5. Ask a new question that references the Statue of Liberty, not Vancouver
66
+ convo.user "Why was it built?"
61
67
  response = client.chat(convo)
62
- response.content
63
- # => "Your last message to me was: \"Translate: There are two birds singing outside my window.\""
68
+ puts response.content
69
+ # => "The Statue of Liberty was built to commemorate the 100th anniversary of the signing of the Declaration of Independence."
64
70
  ```
65
71
 
66
72
  ### Response Formatting
@@ -148,6 +154,29 @@ response = client.chat("Hi there!", model: "claude-3-5-sonnet-20241022")
148
154
  puts response.content
149
155
  ```
150
156
 
157
+ Rach uses the gems `ruby-openai` and `anthropic` to make API calls to each provider. You can configure the providers separately in the `providers` hash, and they will be used in the respective clients:
158
+
159
+ ```ruby
160
+ client = Rach::Client.new(
161
+ providers: {
162
+ openai: {
163
+ access_token: YOUR_OPENAI_API_KEY,
164
+ uri_base: "https://oai.hconeai.com/",
165
+ extra_headers: {
166
+ "X-Proxy-TTL" => "43200",
167
+ "X-Proxy-Refresh": "true",
168
+ "Helicone-Auth": "Bearer HELICONE_API_KEY",
169
+ "helicone-stream-force-format" => "true",
170
+ }
171
+ },
172
+ anthropic: {
173
+ access_token: YOUR_ANTHROPIC_API_KEY,
174
+ request_timeout: 240
175
+ }
176
+ }
177
+ )
178
+ ```
179
+
151
180
  ### Logging
152
181
 
153
182
  Rach supports logging of API calls and their parameters. You can provide any logger that responds to the `info` method:
@@ -83,11 +83,13 @@ module Rach
83
83
 
84
84
  private
85
85
 
86
- def create_client(access_token, **kwargs)
87
- ::Anthropic::Client.new(
88
- access_token: access_token,
89
- **kwargs
90
- )
86
+ def create_client(access_token, **config)
87
+ client_config = {
88
+ access_token: access_token
89
+ }
90
+ client_config.merge!(config)
91
+
92
+ ::Anthropic::Client.new(**client_config)
91
93
  end
92
94
 
93
95
  def convert_tools(functions)
@@ -1,8 +1,11 @@
1
1
  module Rach
2
2
  module Provider
3
3
  class Base
4
- def initialize(access_token, **kwargs)
5
- @client = create_client(access_token, **kwargs)
4
+ attr_reader :client, :logger
5
+
6
+ def initialize(access_token: nil, logger: nil, **config)
7
+ @logger = logger
8
+ @client = create_client(access_token, **config)
6
9
  end
7
10
 
8
11
  def self.key
@@ -19,7 +22,7 @@ module Rach
19
22
 
20
23
  private
21
24
 
22
- def create_client(access_token, **kwargs)
25
+ def create_client(access_token, **config)
23
26
  raise NotImplementedError
24
27
  end
25
28
  end
@@ -0,0 +1,77 @@
1
+ require 'gemini-ai'
2
+
3
+ module Rach
4
+ module Provider
5
+ class Google < Base
6
+
7
+ def initialize(access_token: nil, logger: nil, **kwargs)
8
+ @client = create_client(access_token, **kwargs)
9
+ @logger = logger
10
+ end
11
+
12
+ def chat(**parameters)
13
+ messages = parameters.dig(:parameters, :messages) || []
14
+ system_message = messages.find { |msg| msg[:role] == "system" }
15
+ messages = messages.reject { |msg| msg[:role] == "system" } if system_message
16
+
17
+ # Convert messages to Gemini format
18
+ contents = messages.map do |msg|
19
+ {
20
+ role: msg[:role] == "assistant" ? "model" : "user",
21
+ parts: { text: msg[:content] }
22
+ }
23
+ end
24
+
25
+ # If there's a system message, prepend it to user's first message
26
+ if system_message
27
+ first_user_message = contents.find { |msg| msg[:role] == "user" }
28
+ if first_user_message
29
+ first_user_message[:parts][:text] = "#{system_message[:content]}\n\n#{first_user_message[:parts][:text]}"
30
+ end
31
+ end
32
+
33
+ if @logger
34
+ @logger.info("Making API call to Google Gemini")
35
+ @logger.info("Request parameters: #{contents.inspect}")
36
+ end
37
+
38
+ raw_response = @client.generate_content({ contents: contents })
39
+
40
+ if @logger
41
+ @logger.info("Response: #{raw_response.inspect}")
42
+ end
43
+
44
+ Response.new(
45
+ id: raw_response["candidates"][0]["content"]["parts"][0]["text"],
46
+ model: raw_response["modelVersion"],
47
+ content: raw_response["candidates"][0]["content"]["parts"][0]["text"],
48
+ usage: {
49
+ "prompt_tokens" => raw_response["usageMetadata"]["promptTokenCount"],
50
+ "completion_tokens" => raw_response["usageMetadata"]["candidatesTokenCount"],
51
+ "total_tokens" => raw_response["usageMetadata"]["totalTokenCount"]
52
+ },
53
+ raw_response: raw_response
54
+ )
55
+ end
56
+
57
+ def self.supports?(model)
58
+ model.start_with?("gemini")
59
+ end
60
+
61
+ private
62
+
63
+ def create_client(access_token, **config)
64
+ client_config = {
65
+ credentials: {
66
+ service: 'generative-language-api',
67
+ api_key: access_token
68
+ },
69
+ options: { model: 'gemini-pro' }
70
+ }
71
+ client_config[:options].merge!(config)
72
+
73
+ ::Gemini.new(**client_config)
74
+ end
75
+ end
76
+ end
77
+ end
@@ -34,12 +34,14 @@ module Rach
34
34
 
35
35
  private
36
36
 
37
- def create_client(access_token, **kwargs)
38
- ::OpenAI::Client.new(
37
+ def create_client(access_token, **config)
38
+ client_config = {
39
39
  access_token: access_token,
40
- log_errors: true,
41
- **kwargs
42
- )
40
+ log_errors: true
41
+ }
42
+ client_config.merge!(config)
43
+
44
+ ::OpenAI::Client.new(**client_config)
43
45
  end
44
46
 
45
47
  def convert_params(parameters)
data/lib/rach/provider.rb CHANGED
@@ -3,7 +3,8 @@ module Rach
3
3
 
4
4
  AVAILABLE_PROVIDERS = [
5
5
  Provider::OpenAI,
6
- Provider::Anthropic
6
+ Provider::Anthropic,
7
+ Provider::Google
7
8
  ].to_h { |p| [p.key, p] }.freeze
8
9
 
9
10
  def self.for(model)
data/lib/rach/response.rb CHANGED
@@ -19,14 +19,19 @@ module Rach
19
19
  !tool_calls.nil? && !tool_calls.empty?
20
20
  end
21
21
 
22
- def function_name
23
- return nil unless function_call?
24
- tool_calls.first.dig("function", "name")
22
+ def function_name(tool_call)
23
+ tool_call.dig("function", "name")
25
24
  end
26
25
 
27
- def function_arguments
26
+ def each_tool_call
28
27
  return nil unless function_call?
29
- JSON.parse(tool_calls.first.dig("function", "arguments"))
28
+ tool_calls.each do |tool_call|
29
+ yield tool_call
30
+ end
31
+ end
32
+
33
+ def function_arguments(tool_call)
34
+ JSON.parse(tool_call.dig("function", "arguments"))
30
35
  rescue JSON::ParserError
31
36
  raise ParseError, "Function arguments are not valid JSON"
32
37
  end
@@ -46,12 +51,15 @@ module Rach
46
51
  def on_function(function_class = nil, &block)
47
52
  return self unless function_call?
48
53
 
49
- function = function_class.new
50
- return self unless function.function_name == function_name
54
+ each_tool_call do |tool_call|
55
+ function = function_class.new
56
+ next unless function.function_name == function_name(tool_call)
57
+
58
+ args = function_arguments(tool_call).transform_keys(&:to_sym)
59
+ function.validate_arguments!(args)
60
+ block.call(function, args)
61
+ end
51
62
 
52
- args = function_arguments.transform_keys(&:to_sym)
53
- function.validate_arguments!(args)
54
- block.call(function, args)
55
63
  self
56
64
  end
57
65
 
data/lib/rach/version.rb CHANGED
@@ -1,4 +1,4 @@
1
1
  module Rach
2
- VERSION = "0.2.7"
2
+ VERSION = "0.2.9"
3
3
  end
4
4
 
data/lib/rach.rb CHANGED
@@ -14,6 +14,7 @@ require_relative "rach/function"
14
14
  require_relative "rach/provider/base"
15
15
  require_relative "rach/provider/openai"
16
16
  require_relative "rach/provider/anthropic"
17
+ require_relative "rach/provider/google"
17
18
  require_relative "rach/provider"
18
19
  require_relative "rach/prompt"
19
20
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rach
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.7
4
+ version: 0.2.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Roger Garcia
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-01-26 00:00:00.000000000 Z
11
+ date: 2025-02-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
@@ -100,6 +100,20 @@ dependencies:
100
100
  - - "~>"
101
101
  - !ruby/object:Gem::Version
102
102
  version: '7.3'
103
+ - !ruby/object:Gem::Dependency
104
+ name: gemini-ai
105
+ requirement: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - "~>"
108
+ - !ruby/object:Gem::Version
109
+ version: 4.2.0
110
+ type: :runtime
111
+ prerelease: false
112
+ version_requirements: !ruby/object:Gem::Requirement
113
+ requirements:
114
+ - - "~>"
115
+ - !ruby/object:Gem::Version
116
+ version: 4.2.0
103
117
  description: Rach is a lightweight framework for orchestrating AI agents
104
118
  email:
105
119
  - rach@rogergarcia.me
@@ -119,6 +133,7 @@ files:
119
133
  - lib/rach/provider.rb
120
134
  - lib/rach/provider/anthropic.rb
121
135
  - lib/rach/provider/base.rb
136
+ - lib/rach/provider/google.rb
122
137
  - lib/rach/provider/openai.rb
123
138
  - lib/rach/response.rb
124
139
  - lib/rach/response_format.rb
@@ -143,7 +158,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
143
158
  - !ruby/object:Gem::Version
144
159
  version: '0'
145
160
  requirements: []
146
- rubygems_version: 3.5.9
161
+ rubygems_version: 3.5.3
147
162
  signing_key:
148
163
  specification_version: 4
149
164
  summary: Orchestrate AI agents like a virtuoso