helicone-rb 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.tool-versions +1 -0
- data/LICENSE +21 -0
- data/README.md +263 -0
- data/Rakefile +8 -0
- data/helicone.gemspec +38 -0
- data/lib/helicone/agent.rb +134 -0
- data/lib/helicone/agent_result.rb +50 -0
- data/lib/helicone/client.rb +129 -0
- data/lib/helicone/configuration.rb +44 -0
- data/lib/helicone/message.rb +151 -0
- data/lib/helicone/response.rb +128 -0
- data/lib/helicone/tool.rb +102 -0
- data/lib/helicone/tool_call.rb +96 -0
- data/lib/helicone/version.rb +5 -0
- data/lib/helicone.rb +18 -0
- metadata +117 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: 2be72b17bc38b9d451b2888a29407c7af83212d26c580ec22de03cdb9a97c0fb
|
|
4
|
+
data.tar.gz: 17682bc18311ebc1c2be4e3607db90b3e9d7c2fc90bfeedae38e8bef67e79321
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 5641f51284e72fdcc5a74f68b38fb9afe0cd11607dda855e6193ec555c6c8aae5d2507b4ce0a7e39161ec5a6fd2e22d0a0d19200e895641167a93cffd50914ef
|
|
7
|
+
data.tar.gz: 5ea90b79077f3ffc5b19c98ae8ab998790bd8580bb63ecf70fc7f3ee74d9e447f61a4d518007bcc2c17a988adcda0df26cfa0f6e1ae110838b2037f8dab3ff80
|
data/.rspec
ADDED
data/.tool-versions
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ruby 3.4.6
|
data/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Genevere
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
data/README.md
ADDED
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
# Helicone Ruby Client
|
|
2
|
+
|
|
3
|
+
A Ruby client for the [Helicone AI Gateway](https://helicone.ai), wrapping the OpenAI API with built-in session tracking, cost attribution, and an agentic framework for building AI applications with tool/function calling.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
Add this line to your application's Gemfile:
|
|
8
|
+
|
|
9
|
+
```ruby
|
|
10
|
+
gem 'helicone-rb'
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
And then execute:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
bundle install
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
Or install it yourself as:
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
gem install helicone-rb
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Configuration
|
|
26
|
+
|
|
27
|
+
Configure the gem with your API key and optional settings:
|
|
28
|
+
|
|
29
|
+
```ruby
|
|
30
|
+
Helicone.configure do |config|
|
|
31
|
+
config.api_key = ENV['OPENAI_API_KEY']
|
|
32
|
+
config.uri_base = "https://oai.helicone.ai/v1" # default
|
|
33
|
+
config.default_model = "gpt-4" # default
|
|
34
|
+
config.logger = Logger.new($stdout) # default
|
|
35
|
+
end
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
## Basic Usage
|
|
39
|
+
|
|
40
|
+
### Simple Chat
|
|
41
|
+
|
|
42
|
+
```ruby
|
|
43
|
+
client = Helicone::Client.new
|
|
44
|
+
|
|
45
|
+
# Single-turn conversation
|
|
46
|
+
response = client.ask("What is the capital of France?")
|
|
47
|
+
# => "The capital of France is Paris."
|
|
48
|
+
|
|
49
|
+
# With system prompt
|
|
50
|
+
response = client.ask(
|
|
51
|
+
"Explain quantum computing",
|
|
52
|
+
system: "You are a physics teacher. Explain concepts simply."
|
|
53
|
+
)
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
### Multi-turn Conversations
|
|
57
|
+
|
|
58
|
+
```ruby
|
|
59
|
+
client = Helicone::Client.new
|
|
60
|
+
|
|
61
|
+
messages = [
|
|
62
|
+
Helicone::Message.system("You are a helpful assistant."),
|
|
63
|
+
Helicone::Message.user_text("My name is Alice."),
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
response = client.chat(messages: messages)
|
|
67
|
+
puts response.content
|
|
68
|
+
# => "Nice to meet you, Alice!"
|
|
69
|
+
|
|
70
|
+
# Continue the conversation
|
|
71
|
+
messages << response.to_message
|
|
72
|
+
messages << Helicone::Message.user_text("What's my name?")
|
|
73
|
+
|
|
74
|
+
response = client.chat(messages: messages)
|
|
75
|
+
puts response.content
|
|
76
|
+
# => "Your name is Alice."
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Vision (Images)
|
|
80
|
+
|
|
81
|
+
```ruby
|
|
82
|
+
client = Helicone::Client.new
|
|
83
|
+
|
|
84
|
+
# Single image
|
|
85
|
+
response = client.ask_with_image(
|
|
86
|
+
"What's in this image?",
|
|
87
|
+
"https://example.com/image.jpg"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Multiple images
|
|
91
|
+
message = Helicone::Message.user_with_images(
|
|
92
|
+
"Compare these two images",
|
|
93
|
+
["https://example.com/a.jpg", "https://example.com/b.jpg"],
|
|
94
|
+
detail: "high"
|
|
95
|
+
)
|
|
96
|
+
response = client.chat(messages: [message])
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
## Session and Account Tracking
|
|
100
|
+
|
|
101
|
+
Track conversations and costs in the Helicone dashboard:
|
|
102
|
+
|
|
103
|
+
```ruby
|
|
104
|
+
client = Helicone::Client.new(
|
|
105
|
+
session_id: conversation.id,
|
|
106
|
+
session_name: "Support Chat ##{conversation.id}",
|
|
107
|
+
account_id: user.account_id,
|
|
108
|
+
account_name: user.account.name
|
|
109
|
+
)
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
## Agentic Framework
|
|
113
|
+
|
|
114
|
+
Build AI agents that can use tools to accomplish tasks.
|
|
115
|
+
|
|
116
|
+
### Defining Tools
|
|
117
|
+
|
|
118
|
+
Tools use JSON Schema for parameter definitions. The schema is passed through to the underlying [ruby-openai](https://github.com/alexrudall/ruby-openai) gem, so any schema format that OpenAI's function calling API accepts will work:
|
|
119
|
+
|
|
120
|
+
```ruby
|
|
121
|
+
class WeatherTool < Helicone::Tool
|
|
122
|
+
description "Get current weather for a location"
|
|
123
|
+
|
|
124
|
+
parameters(
|
|
125
|
+
type: "object",
|
|
126
|
+
properties: {
|
|
127
|
+
location: {
|
|
128
|
+
type: "string",
|
|
129
|
+
description: "City and state, e.g. 'San Francisco, CA'"
|
|
130
|
+
},
|
|
131
|
+
unit: {
|
|
132
|
+
type: "string",
|
|
133
|
+
enum: ["celsius", "fahrenheit"],
|
|
134
|
+
description: "Temperature unit"
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
required: ["location"]
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
def execute(location:, unit: "fahrenheit")
|
|
141
|
+
# Your implementation here
|
|
142
|
+
weather_api.get(location, unit: unit)
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
Complex nested schemas are supported:
|
|
148
|
+
|
|
149
|
+
```ruby
|
|
150
|
+
class CreateOrderTool < Helicone::Tool
|
|
151
|
+
description "Create a new order"
|
|
152
|
+
|
|
153
|
+
parameters(
|
|
154
|
+
type: "object",
|
|
155
|
+
properties: {
|
|
156
|
+
customer: {
|
|
157
|
+
type: "object",
|
|
158
|
+
properties: {
|
|
159
|
+
name: { type: "string" },
|
|
160
|
+
email: { type: "string", format: "email" }
|
|
161
|
+
},
|
|
162
|
+
required: ["name", "email"]
|
|
163
|
+
},
|
|
164
|
+
items: {
|
|
165
|
+
type: "array",
|
|
166
|
+
items: {
|
|
167
|
+
type: "object",
|
|
168
|
+
properties: {
|
|
169
|
+
product_id: { type: "string" },
|
|
170
|
+
quantity: { type: "integer", minimum: 1 }
|
|
171
|
+
},
|
|
172
|
+
required: ["product_id", "quantity"]
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
},
|
|
176
|
+
required: ["customer", "items"]
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
def execute(customer:, items:)
|
|
180
|
+
Order.create!(customer: customer, items: items)
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
### Running an Agent
|
|
186
|
+
|
|
187
|
+
```ruby
|
|
188
|
+
agent = Helicone::Agent.new(
|
|
189
|
+
tools: [WeatherTool, CalendarTool],
|
|
190
|
+
system: "You are a helpful assistant with access to weather and calendar tools.",
|
|
191
|
+
context: current_user # Passed to tool#initialize
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
result = agent.run("What's the weather in Tokyo and do I have any meetings today?")
|
|
195
|
+
|
|
196
|
+
puts result.content # Final response text
|
|
197
|
+
puts result.iterations # Number of tool execution loops
|
|
198
|
+
puts result.tool_calls_made # Total tool calls executed
|
|
199
|
+
puts result.success? # Whether it completed successfully
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
### Continuing Conversations
|
|
203
|
+
|
|
204
|
+
```ruby
|
|
205
|
+
agent = Helicone::Agent.new(tools: [WeatherTool])
|
|
206
|
+
|
|
207
|
+
result = agent.run("What's the weather in Paris?")
|
|
208
|
+
puts result.content
|
|
209
|
+
|
|
210
|
+
# Continue with follow-up
|
|
211
|
+
result = agent.continue("What about London?")
|
|
212
|
+
puts result.content
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
## Message Types
|
|
216
|
+
|
|
217
|
+
```ruby
|
|
218
|
+
# Text messages
|
|
219
|
+
Helicone::Message.user_text("Hello")
|
|
220
|
+
Helicone::Message.assistant_text("Hi there!")
|
|
221
|
+
Helicone::Message.system("You are helpful")
|
|
222
|
+
|
|
223
|
+
# Images
|
|
224
|
+
Helicone::Message.user_image("https://example.com/img.jpg", text: "Describe this")
|
|
225
|
+
Helicone::Message.user_with_images("Compare", ["url1", "url2"])
|
|
226
|
+
|
|
227
|
+
# Tool results (internal use)
|
|
228
|
+
Helicone::Message.tool_result(tool_call_id: "call_123", content: { data: "result" })
|
|
229
|
+
```
|
|
230
|
+
|
|
231
|
+
## Response Object
|
|
232
|
+
|
|
233
|
+
```ruby
|
|
234
|
+
response = client.chat(messages: messages)
|
|
235
|
+
|
|
236
|
+
response.content # Text content
|
|
237
|
+
response.role # "assistant"
|
|
238
|
+
response.finish_reason # "stop", "length", "tool_calls"
|
|
239
|
+
response.tool_calls # Array of tool calls if any
|
|
240
|
+
response.model # Model used
|
|
241
|
+
response.usage # Token usage stats
|
|
242
|
+
response.prompt_tokens
|
|
243
|
+
response.completion_tokens
|
|
244
|
+
response.total_tokens
|
|
245
|
+
response.success? # Quick success check
|
|
246
|
+
```
|
|
247
|
+
|
|
248
|
+
## Development
|
|
249
|
+
|
|
250
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests.
|
|
251
|
+
|
|
252
|
+
```bash
|
|
253
|
+
bundle install
|
|
254
|
+
bundle exec rspec
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
## Contributing
|
|
258
|
+
|
|
259
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/genevere-inc/helicone-rb.
|
|
260
|
+
|
|
261
|
+
## License
|
|
262
|
+
|
|
263
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
data/Rakefile
ADDED
data/helicone.gemspec
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "lib/helicone/version"
|
|
4
|
+
|
|
5
|
+
Gem::Specification.new do |spec|
|
|
6
|
+
spec.name = "helicone-rb"
|
|
7
|
+
spec.version = Helicone::VERSION
|
|
8
|
+
spec.authors = ["Genevere"]
|
|
9
|
+
spec.email = ["hello@genevere.com"]
|
|
10
|
+
|
|
11
|
+
spec.summary = "Ruby client for Helicone AI Gateway with agentic tool support"
|
|
12
|
+
spec.description = "A Ruby client that wraps the OpenAI API through the Helicone AI Gateway, " \
|
|
13
|
+
"providing session tracking, cost attribution, and an agentic framework " \
|
|
14
|
+
"for building AI applications with tool/function calling."
|
|
15
|
+
spec.homepage = "https://github.com/genevere-inc/helicone-rb"
|
|
16
|
+
spec.license = "MIT"
|
|
17
|
+
spec.required_ruby_version = ">= 3.0"
|
|
18
|
+
|
|
19
|
+
spec.metadata["homepage_uri"] = spec.homepage
|
|
20
|
+
spec.metadata["source_code_uri"] = "https://github.com/genevere-inc/helicone-rb"
|
|
21
|
+
spec.metadata["changelog_uri"] = "https://github.com/genevere-inc/helicone-rb/blob/main/CHANGELOG.md"
|
|
22
|
+
|
|
23
|
+
spec.files = Dir.chdir(__dir__) do
|
|
24
|
+
`git ls-files -z`.split("\x0").reject do |f|
|
|
25
|
+
(File.expand_path(f) == __FILE__) ||
|
|
26
|
+
f.start_with?(*%w[bin/ test/ spec/ features/ .git .github appveyor Gemfile])
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
spec.bindir = "exe"
|
|
30
|
+
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
|
31
|
+
spec.require_paths = ["lib"]
|
|
32
|
+
|
|
33
|
+
spec.add_dependency "ruby-openai", "~> 7.0"
|
|
34
|
+
|
|
35
|
+
spec.add_development_dependency "rspec", "~> 3.0"
|
|
36
|
+
spec.add_development_dependency "rake", "~> 13.0"
|
|
37
|
+
spec.add_development_dependency "rubocop", "~> 1.0"
|
|
38
|
+
end
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class Agent
|
|
5
|
+
MAX_ITERATIONS = 10
|
|
6
|
+
|
|
7
|
+
attr_reader :client, :tools, :messages, :context
|
|
8
|
+
|
|
9
|
+
# Create an agent with tools and optional context
|
|
10
|
+
#
|
|
11
|
+
# @param client [Helicone::Client] Optional client (creates new one if not provided)
|
|
12
|
+
# @param tools [Array<Class>] Array of Tool subclasses
|
|
13
|
+
# @param context [Object] Context object passed to tool#initialize
|
|
14
|
+
# @param system [String] System prompt
|
|
15
|
+
# @param messages [Array<Helicone::Message>] Initial messages (for continuing conversations)
|
|
16
|
+
def initialize(client: nil, tools: [], context: nil, system: nil, messages: [])
|
|
17
|
+
@client = client || Client.new
|
|
18
|
+
@tools = tools
|
|
19
|
+
@context = context
|
|
20
|
+
@messages = messages.dup
|
|
21
|
+
|
|
22
|
+
# Add system message at the start if provided and not already present
|
|
23
|
+
if system && @messages.none? { |m| m.respond_to?(:role) && m.role == "system" }
|
|
24
|
+
@messages.unshift(Message.system(system))
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Run the agent with a prompt, executing tools until done
|
|
29
|
+
#
|
|
30
|
+
# @param prompt [String] User prompt to start with
|
|
31
|
+
# @param max_iterations [Integer] Maximum tool execution loops
|
|
32
|
+
# @return [AgentResult]
|
|
33
|
+
def run(prompt, max_iterations: MAX_ITERATIONS)
|
|
34
|
+
@messages << Message.user_text(prompt)
|
|
35
|
+
|
|
36
|
+
iterations = 0
|
|
37
|
+
while iterations < max_iterations
|
|
38
|
+
response = call_llm
|
|
39
|
+
|
|
40
|
+
tool_calls = response.tool_calls
|
|
41
|
+
if tool_calls && !tool_calls.empty?
|
|
42
|
+
# Add assistant message with tool calls to conversation
|
|
43
|
+
@messages << Message.assistant_with_tool_calls(response.message)
|
|
44
|
+
|
|
45
|
+
# Execute each tool and add results
|
|
46
|
+
response.tool_calls.each do |tc|
|
|
47
|
+
tool_call = ToolCall.from_response([tc]).first
|
|
48
|
+
result = execute_tool(tool_call)
|
|
49
|
+
@messages << Message.tool_result(
|
|
50
|
+
tool_call_id: tool_call.id,
|
|
51
|
+
content: result
|
|
52
|
+
)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
iterations += 1
|
|
56
|
+
else
|
|
57
|
+
# No tool calls - we're done
|
|
58
|
+
return AgentResult.new(
|
|
59
|
+
content: response.content,
|
|
60
|
+
messages: @messages,
|
|
61
|
+
iterations: iterations,
|
|
62
|
+
response: response
|
|
63
|
+
)
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Max iterations reached - make one final call without tools to get a response
|
|
68
|
+
final_response = @client.chat(messages: @messages)
|
|
69
|
+
|
|
70
|
+
AgentResult.new(
|
|
71
|
+
content: final_response.content,
|
|
72
|
+
messages: @messages,
|
|
73
|
+
iterations: iterations,
|
|
74
|
+
response: final_response,
|
|
75
|
+
max_iterations_reached: true
|
|
76
|
+
)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Continue the conversation with a new prompt
|
|
80
|
+
#
|
|
81
|
+
# @param prompt [String] User prompt to continue with
|
|
82
|
+
# @param max_iterations [Integer] Maximum tool execution loops
|
|
83
|
+
# @return [AgentResult]
|
|
84
|
+
def continue(prompt, max_iterations: MAX_ITERATIONS)
|
|
85
|
+
run(prompt, max_iterations: max_iterations)
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
private
|
|
89
|
+
|
|
90
|
+
def logger
|
|
91
|
+
Helicone.configuration.logger
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def call_llm
|
|
95
|
+
@client.chat(
|
|
96
|
+
messages: @messages,
|
|
97
|
+
tools: tools_for_api,
|
|
98
|
+
tool_choice: "auto"
|
|
99
|
+
)
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def tools_for_api
|
|
103
|
+
return nil if @tools.empty?
|
|
104
|
+
@tools.map(&:to_openai_tool)
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def execute_tool(tool_call)
|
|
108
|
+
tool_class = find_tool_class(tool_call.name)
|
|
109
|
+
|
|
110
|
+
unless tool_class
|
|
111
|
+
logger.warn("[Helicone::Agent] Unknown tool: #{tool_call.name}")
|
|
112
|
+
return { error: "Unknown tool: #{tool_call.name}" }
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
logger.info("[Helicone::Agent] Executing tool: #{tool_call.name} with #{tool_call.arguments}")
|
|
116
|
+
|
|
117
|
+
tool_instance = tool_class.new(@context)
|
|
118
|
+
result = tool_instance.execute(**tool_call.arguments)
|
|
119
|
+
|
|
120
|
+
result_preview = result.inspect
|
|
121
|
+
result_preview = result_preview[0, 197] + "..." if result_preview.length > 200
|
|
122
|
+
logger.info("[Helicone::Agent] Tool result: #{result_preview}")
|
|
123
|
+
result
|
|
124
|
+
rescue => e
|
|
125
|
+
logger.error("[Helicone::Agent] Tool execution error: #{e.message}")
|
|
126
|
+
logger.error(e.backtrace.first(5).join("\n"))
|
|
127
|
+
{ error: e.message }
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def find_tool_class(name)
|
|
131
|
+
@tools.find { |t| t.function_name == name }
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
end
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class AgentResult
|
|
5
|
+
attr_reader :content, :messages, :iterations, :response
|
|
6
|
+
|
|
7
|
+
# Initialize an agent result
|
|
8
|
+
#
|
|
9
|
+
# @param content [String] Final text response from the agent
|
|
10
|
+
# @param messages [Array<Helicone::Message>] Full conversation history
|
|
11
|
+
# @param iterations [Integer] Number of tool execution loops
|
|
12
|
+
# @param response [Helicone::Response] The final API response
|
|
13
|
+
# @param max_iterations_reached [Boolean] Whether the agent hit the iteration limit
|
|
14
|
+
def initialize(content:, messages:, iterations:, response: nil, max_iterations_reached: false)
|
|
15
|
+
@content = content
|
|
16
|
+
@messages = messages
|
|
17
|
+
@iterations = iterations
|
|
18
|
+
@response = response
|
|
19
|
+
@max_iterations_reached = max_iterations_reached
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Check if the agent hit the iteration limit
|
|
23
|
+
#
|
|
24
|
+
# @return [Boolean]
|
|
25
|
+
def max_iterations_reached?
|
|
26
|
+
@max_iterations_reached
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
# Check if the agent completed successfully
|
|
30
|
+
#
|
|
31
|
+
# @return [Boolean]
|
|
32
|
+
def success?
|
|
33
|
+
!@max_iterations_reached && !@content.nil? && !@content.empty?
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# Count of tool calls executed during the run
|
|
37
|
+
#
|
|
38
|
+
# @return [Integer]
|
|
39
|
+
def tool_calls_made
|
|
40
|
+
@messages.count { |m| m.respond_to?(:role) && m.role == "tool" }
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Get all tool result messages from the conversation
|
|
44
|
+
#
|
|
45
|
+
# @return [Array<Helicone::Message>] Messages with role "tool"
|
|
46
|
+
def tool_results
|
|
47
|
+
@messages.select { |m| m.respond_to?(:role) && m.role == "tool" }
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
end
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class Client
|
|
5
|
+
attr_reader :client
|
|
6
|
+
|
|
7
|
+
# Initialize with optional session/account context for Helicone tracking
|
|
8
|
+
#
|
|
9
|
+
# @param api_key [String] OpenAI API key (defaults to Helicone.configuration.api_key)
|
|
10
|
+
# @param session_id [String, Integer] Conversation/session ID for Helicone grouping
|
|
11
|
+
# @param session_name [String] Human-readable session name
|
|
12
|
+
# @param account_id [String, Integer] Account ID for cost tracking per account
|
|
13
|
+
# @param account_name [String] Human-readable account name
|
|
14
|
+
def initialize(api_key: nil, session_id: nil, session_name: nil, account_id: nil, account_name: nil)
|
|
15
|
+
config = Helicone.configuration
|
|
16
|
+
|
|
17
|
+
@client = OpenAI::Client.new(
|
|
18
|
+
access_token: api_key || config.api_key,
|
|
19
|
+
uri_base: config.uri_base
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
# Add Helicone session headers if provided
|
|
23
|
+
if session_id
|
|
24
|
+
@client.add_headers(
|
|
25
|
+
"Helicone-Session-Id" => session_id.to_s,
|
|
26
|
+
"Helicone-Session-Name" => session_name || "Conversation ##{session_id}"
|
|
27
|
+
)
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
# Add Helicone account/user headers if provided
|
|
31
|
+
if account_id
|
|
32
|
+
@client.add_headers(
|
|
33
|
+
"Helicone-User-Id" => account_id.to_s,
|
|
34
|
+
"Helicone-Property-Account" => account_name || account_id.to_s
|
|
35
|
+
)
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
# Send a chat completion request
|
|
40
|
+
#
|
|
41
|
+
# @param messages [Array<Helicone::Message, Hash>] Array of messages (Message objects or hashes)
|
|
42
|
+
# @param model [String] Model ID to use for completion
|
|
43
|
+
# @param tools [Array<Hash>] OpenAI tool definitions for function calling
|
|
44
|
+
# @param tool_choice [String, Hash] Tool choice strategy ("auto", "none", or specific tool)
|
|
45
|
+
# @param options [Hash] Additional options passed to the API
|
|
46
|
+
# @return [Helicone::Response] Wrapped API response
|
|
47
|
+
def chat(messages:, model: nil, tools: nil, tool_choice: nil, **options)
|
|
48
|
+
model ||= Helicone.configuration.default_model
|
|
49
|
+
|
|
50
|
+
# Convert Message objects to hashes if needed
|
|
51
|
+
message_hashes = messages.map { |m| m.respond_to?(:to_h) ? m.to_h : m }
|
|
52
|
+
|
|
53
|
+
params = {
|
|
54
|
+
model: model,
|
|
55
|
+
messages: message_hashes,
|
|
56
|
+
**options
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
# Add tools if provided
|
|
60
|
+
params[:tools] = tools if tools && !tools.empty?
|
|
61
|
+
params[:tool_choice] = tool_choice if tool_choice
|
|
62
|
+
|
|
63
|
+
raw_response = @client.chat(parameters: params)
|
|
64
|
+
|
|
65
|
+
Response.new(deep_symbolize_keys(raw_response))
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# Convenience method for simple single-turn requests
|
|
69
|
+
#
|
|
70
|
+
# @param prompt [String] User prompt text
|
|
71
|
+
# @param model [String] Model ID to use for completion
|
|
72
|
+
# @param system [String] Optional system prompt
|
|
73
|
+
# @param options [Hash] Additional options passed to chat
|
|
74
|
+
# @return [String] The text content of the response
|
|
75
|
+
def ask(prompt, model: nil, system: nil, **options)
|
|
76
|
+
messages = []
|
|
77
|
+
messages << Message.system(system) if system
|
|
78
|
+
messages << Message.user_text(prompt)
|
|
79
|
+
|
|
80
|
+
response = chat(messages: messages, model: model, **options)
|
|
81
|
+
response.content
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Ask with an image
|
|
85
|
+
#
|
|
86
|
+
# @param prompt [String] User prompt text
|
|
87
|
+
# @param image_url [String] URL or base64 data URI of the image
|
|
88
|
+
# @param model [String] Model ID to use for completion
|
|
89
|
+
# @param system [String] Optional system prompt
|
|
90
|
+
# @param detail [String] Image detail level: "auto", "low", or "high"
|
|
91
|
+
# @param options [Hash] Additional options passed to chat
|
|
92
|
+
# @return [String] The text content of the response
|
|
93
|
+
def ask_with_image(prompt, image_url, model: nil, system: nil, detail: "auto", **options)
|
|
94
|
+
messages = []
|
|
95
|
+
messages << Message.system(system) if system
|
|
96
|
+
messages << Message.user_with_images(prompt, image_url, detail: detail)
|
|
97
|
+
|
|
98
|
+
response = chat(messages: messages, model: model, **options)
|
|
99
|
+
response.content
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Add additional headers at any time
|
|
103
|
+
#
|
|
104
|
+
# @param headers [Hash] Headers to add to subsequent requests
|
|
105
|
+
# @return [void]
|
|
106
|
+
def add_headers(headers)
|
|
107
|
+
@client.add_headers(headers)
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
private
|
|
111
|
+
|
|
112
|
+
# Recursively symbolize keys in a hash
|
|
113
|
+
#
|
|
114
|
+
# @param obj [Object] Object to process
|
|
115
|
+
# @return [Object] Object with symbolized keys
|
|
116
|
+
def deep_symbolize_keys(obj)
|
|
117
|
+
case obj
|
|
118
|
+
when Hash
|
|
119
|
+
obj.each_with_object({}) do |(key, value), result|
|
|
120
|
+
result[key.to_sym] = deep_symbolize_keys(value)
|
|
121
|
+
end
|
|
122
|
+
when Array
|
|
123
|
+
obj.map { |item| deep_symbolize_keys(item) }
|
|
124
|
+
else
|
|
125
|
+
obj
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
end
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "logger"
|
|
4
|
+
|
|
5
|
+
module Helicone
|
|
6
|
+
class Configuration
|
|
7
|
+
attr_accessor :api_key, :uri_base, :logger, :default_model
|
|
8
|
+
|
|
9
|
+
# Initialize configuration with defaults
|
|
10
|
+
#
|
|
11
|
+
# @return [Configuration]
|
|
12
|
+
def initialize
|
|
13
|
+
@uri_base = "https://oai.helicone.ai/v1"
|
|
14
|
+
@default_model = "gpt-4"
|
|
15
|
+
@logger = Logger.new($stdout, level: Logger::INFO)
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
class << self
|
|
20
|
+
attr_writer :configuration
|
|
21
|
+
|
|
22
|
+
# Get the current configuration
|
|
23
|
+
#
|
|
24
|
+
# @return [Configuration]
|
|
25
|
+
def configuration
|
|
26
|
+
@configuration ||= Configuration.new
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
# Configure the gem
|
|
30
|
+
#
|
|
31
|
+
# @yield [Configuration] configuration object
|
|
32
|
+
# @return [void]
|
|
33
|
+
def configure
|
|
34
|
+
yield(configuration)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Reset configuration to defaults
|
|
38
|
+
#
|
|
39
|
+
# @return [Configuration]
|
|
40
|
+
def reset_configuration!
|
|
41
|
+
@configuration = Configuration.new
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class Message
|
|
5
|
+
attr_reader :role, :content, :tool_call_id
|
|
6
|
+
|
|
7
|
+
# Initialize a message
|
|
8
|
+
#
|
|
9
|
+
# @param role [String, Symbol] Message role ("user", "assistant", "system", "tool")
|
|
10
|
+
# @param content [String, Array<Hash>] Message content (text or structured content)
|
|
11
|
+
# @param tool_call_id [String] Tool call ID (required for tool result messages)
|
|
12
|
+
def initialize(role:, content:, tool_call_id: nil)
|
|
13
|
+
@role = role.to_s
|
|
14
|
+
@content = content
|
|
15
|
+
@tool_call_id = tool_call_id
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Build a user message with text
|
|
19
|
+
#
|
|
20
|
+
# @param text [String] The text content
|
|
21
|
+
# @return [Helicone::Message]
|
|
22
|
+
def self.user_text(text)
|
|
23
|
+
new(role: "user", content: text)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Build an assistant message with text
|
|
27
|
+
#
|
|
28
|
+
# @param text [String] The text content
|
|
29
|
+
# @return [Helicone::Message]
|
|
30
|
+
def self.assistant_text(text)
|
|
31
|
+
new(role: "assistant", content: text)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Build a system message
|
|
35
|
+
#
|
|
36
|
+
# @param text [String] The system prompt text
|
|
37
|
+
# @return [Helicone::Message]
|
|
38
|
+
def self.system(text)
|
|
39
|
+
new(role: "system", content: text)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Build a user message with text and images
|
|
43
|
+
#
|
|
44
|
+
# @param text [String] The text content
|
|
45
|
+
# @param images [Array<String>, String] Image URL(s) or base64 data URI(s)
|
|
46
|
+
# @param detail [String] Image detail level: "auto", "low", or "high"
|
|
47
|
+
# @return [Helicone::Message]
|
|
48
|
+
def self.user_with_images(text, images, detail: "auto")
|
|
49
|
+
content = []
|
|
50
|
+
content << { type: "text", text: text }
|
|
51
|
+
|
|
52
|
+
Array(images).each do |image|
|
|
53
|
+
content << {
|
|
54
|
+
type: "image_url",
|
|
55
|
+
image_url: {
|
|
56
|
+
url: image,
|
|
57
|
+
detail: detail
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
new(role: "user", content: content)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Build a user message with a single image
|
|
66
|
+
#
|
|
67
|
+
# @param image_url [String] URL or base64 data URI of the image
|
|
68
|
+
# @param text [String] Optional text content to include
|
|
69
|
+
# @param detail [String] Image detail level: "auto", "low", or "high"
|
|
70
|
+
# @return [Helicone::Message]
|
|
71
|
+
def self.user_image(image_url, text: nil, detail: "auto")
|
|
72
|
+
content = []
|
|
73
|
+
content << { type: "text", text: text } if text
|
|
74
|
+
content << {
|
|
75
|
+
type: "image_url",
|
|
76
|
+
image_url: {
|
|
77
|
+
url: image_url,
|
|
78
|
+
detail: detail
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
new(role: "user", content: content)
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Build a tool result message
|
|
86
|
+
#
|
|
87
|
+
# @param tool_call_id [String] The ID of the tool call being responded to
|
|
88
|
+
# @param content [String, Hash] The tool result (will be JSON-encoded if not a string)
|
|
89
|
+
# @return [Helicone::Message]
|
|
90
|
+
def self.tool_result(tool_call_id:, content:)
|
|
91
|
+
content_str = content.is_a?(String) ? content : content.to_json
|
|
92
|
+
new(role: "tool", content: content_str, tool_call_id: tool_call_id)
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Build an assistant message that contains tool_calls (from API response)
|
|
96
|
+
# This stores the raw message so it can be returned as-is for the API
|
|
97
|
+
# Note: We transform null content to empty string as the API rejects null
|
|
98
|
+
#
|
|
99
|
+
# @param raw_message [Hash] The raw message hash from the API response
|
|
100
|
+
# @return [Helicone::Message]
|
|
101
|
+
def self.assistant_with_tool_calls(raw_message)
|
|
102
|
+
msg = new(role: "assistant", content: nil)
|
|
103
|
+
# Deep duplicate the message to avoid mutating the original
|
|
104
|
+
sanitized_message = deep_dup(raw_message)
|
|
105
|
+
if sanitized_message[:content].nil?
|
|
106
|
+
sanitized_message[:content] = ""
|
|
107
|
+
end
|
|
108
|
+
msg.instance_variable_set(:@raw_message, sanitized_message)
|
|
109
|
+
msg
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
# Convert to hash for API request
|
|
113
|
+
#
|
|
114
|
+
# @return [Hash] Message formatted for the API
|
|
115
|
+
def to_h
|
|
116
|
+
# If this is a raw message (assistant with tool_calls), return it as-is
|
|
117
|
+
if @raw_message
|
|
118
|
+
@raw_message
|
|
119
|
+
else
|
|
120
|
+
hash = { role: role, content: content }
|
|
121
|
+
hash[:tool_call_id] = tool_call_id if tool_call_id
|
|
122
|
+
hash
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Check if this message has tool calls
|
|
127
|
+
#
|
|
128
|
+
# @return [Boolean]
|
|
129
|
+
def tool_calls?
|
|
130
|
+
tool_calls = @raw_message&.dig(:tool_calls)
|
|
131
|
+
!tool_calls.nil? && !tool_calls.empty?
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
alias_method :to_hash, :to_h
|
|
135
|
+
|
|
136
|
+
# Deep duplicate a hash/array structure
|
|
137
|
+
#
|
|
138
|
+
# @param obj [Object] Object to duplicate
|
|
139
|
+
# @return [Object] Deep copy of the object
|
|
140
|
+
def self.deep_dup(obj)
|
|
141
|
+
case obj
|
|
142
|
+
when Hash
|
|
143
|
+
obj.each_with_object({}) { |(k, v), h| h[k] = deep_dup(v) }
|
|
144
|
+
when Array
|
|
145
|
+
obj.map { |v| deep_dup(v) }
|
|
146
|
+
else
|
|
147
|
+
obj.respond_to?(:dup) ? obj.dup : obj
|
|
148
|
+
end
|
|
149
|
+
end
|
|
150
|
+
end
|
|
151
|
+
end
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class Response
|
|
5
|
+
attr_reader :raw
|
|
6
|
+
|
|
7
|
+
# Initialize a response wrapper
|
|
8
|
+
#
|
|
9
|
+
# @param raw [Hash] The raw API response (symbolized keys)
|
|
10
|
+
def initialize(raw)
|
|
11
|
+
@raw = raw
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
# The assistant's text response content
|
|
15
|
+
#
|
|
16
|
+
# @return [String, nil]
|
|
17
|
+
def content
|
|
18
|
+
message&.dig(:content)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# The full message object from the first choice
|
|
22
|
+
#
|
|
23
|
+
# @return [Hash, nil]
|
|
24
|
+
def message
|
|
25
|
+
raw.dig(:choices, 0, :message)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# The role of the response (usually "assistant")
|
|
29
|
+
#
|
|
30
|
+
# @return [String, nil]
|
|
31
|
+
def role
|
|
32
|
+
message&.dig(:role)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
# All choices returned (for n > 1)
|
|
36
|
+
#
|
|
37
|
+
# @return [Array<Hash>]
|
|
38
|
+
def choices
|
|
39
|
+
raw[:choices] || []
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# The finish reason: "stop", "length", "tool_calls", etc.
|
|
43
|
+
#
|
|
44
|
+
# @return [String, nil]
|
|
45
|
+
def finish_reason
|
|
46
|
+
raw.dig(:choices, 0, :finish_reason)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Usage statistics
|
|
50
|
+
#
|
|
51
|
+
# @return [Hash, nil]
|
|
52
|
+
def usage
|
|
53
|
+
raw[:usage]
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Number of tokens in the prompt
|
|
57
|
+
#
|
|
58
|
+
# @return [Integer, nil]
|
|
59
|
+
def prompt_tokens
|
|
60
|
+
usage&.dig(:prompt_tokens)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Number of tokens in the completion
|
|
64
|
+
#
|
|
65
|
+
# @return [Integer, nil]
|
|
66
|
+
def completion_tokens
|
|
67
|
+
usage&.dig(:completion_tokens)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Total tokens used (prompt + completion)
|
|
71
|
+
#
|
|
72
|
+
# @return [Integer, nil]
|
|
73
|
+
def total_tokens
|
|
74
|
+
usage&.dig(:total_tokens)
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Model used for the completion
|
|
78
|
+
#
|
|
79
|
+
# @return [String, nil]
|
|
80
|
+
def model
|
|
81
|
+
raw[:model]
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# Unique ID for this completion
|
|
85
|
+
#
|
|
86
|
+
# @return [String, nil]
|
|
87
|
+
def id
|
|
88
|
+
raw[:id]
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Whether the response completed successfully
|
|
92
|
+
#
|
|
93
|
+
# @return [Boolean]
|
|
94
|
+
def success?
|
|
95
|
+
(content && !content.empty?) || finish_reason == "stop"
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Tool calls if any were made
|
|
99
|
+
#
|
|
100
|
+
# @return [Array<Hash>, nil]
|
|
101
|
+
def tool_calls
|
|
102
|
+
message&.dig(:tool_calls)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Convert response content back to a Message for conversation history
|
|
106
|
+
#
|
|
107
|
+
# @return [Helicone::Message]
|
|
108
|
+
def to_message
|
|
109
|
+
Message.new(role: role, content: content)
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
# Delegate hash-like access to raw response
|
|
113
|
+
#
|
|
114
|
+
# @param key [Symbol] Key to access
|
|
115
|
+
# @return [Object, nil]
|
|
116
|
+
def [](key)
|
|
117
|
+
raw[key]
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
# Dig into nested data in raw response
|
|
121
|
+
#
|
|
122
|
+
# @param keys [Array<Symbol>] Keys to dig through
|
|
123
|
+
# @return [Object, nil]
|
|
124
|
+
def dig(*keys)
|
|
125
|
+
raw.dig(*keys)
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
end
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class Tool
|
|
5
|
+
class << self
|
|
6
|
+
attr_reader :tool_description, :tool_parameters
|
|
7
|
+
|
|
8
|
+
def inherited(subclass)
|
|
9
|
+
super
|
|
10
|
+
# Ensure subclasses get their own class instance variables
|
|
11
|
+
subclass.instance_variable_set(:@tool_description, nil)
|
|
12
|
+
subclass.instance_variable_set(:@tool_parameters, nil)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Set the tool description
|
|
16
|
+
#
|
|
17
|
+
# @param text [String] Description of what this tool does
|
|
18
|
+
# @return [void]
|
|
19
|
+
def description(text)
|
|
20
|
+
@tool_description = text.strip
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Set the tool parameters schema
|
|
24
|
+
#
|
|
25
|
+
# @param schema [Hash] JSON Schema for the tool parameters
|
|
26
|
+
# @return [void]
|
|
27
|
+
def parameters(schema)
|
|
28
|
+
@tool_parameters = schema
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Get or set a custom tool name
|
|
32
|
+
#
|
|
33
|
+
# @param custom_name [String, nil] Custom name to set, or nil to get current name
|
|
34
|
+
# @return [String] The tool name
|
|
35
|
+
def tool_name(custom_name = nil)
|
|
36
|
+
if custom_name
|
|
37
|
+
@tool_name = custom_name
|
|
38
|
+
else
|
|
39
|
+
@tool_name || derive_function_name
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Generate the function name from class name
|
|
44
|
+
#
|
|
45
|
+
# @return [String] The function name for API calls
|
|
46
|
+
def function_name
|
|
47
|
+
@tool_name || derive_function_name
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Generate OpenAI tool definition
|
|
51
|
+
#
|
|
52
|
+
# @return [Hash] Tool definition formatted for OpenAI API
|
|
53
|
+
def to_openai_tool
|
|
54
|
+
{
|
|
55
|
+
type: "function",
|
|
56
|
+
function: {
|
|
57
|
+
name: function_name,
|
|
58
|
+
description: tool_description,
|
|
59
|
+
parameters: tool_parameters || { type: "object", properties: {}, required: [] }
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
private
|
|
65
|
+
|
|
66
|
+
# Derive function name from class name without Rails dependencies
|
|
67
|
+
# Converts "MyModule::SomeToolClass" to "some_tool_class" (without _tool suffix)
|
|
68
|
+
#
|
|
69
|
+
# @return [String]
|
|
70
|
+
def derive_function_name
|
|
71
|
+
# Get the class name without module prefix (like demodulize)
|
|
72
|
+
class_name = name.to_s.split("::").last
|
|
73
|
+
|
|
74
|
+
# Convert CamelCase to snake_case (like underscore)
|
|
75
|
+
snake_case = class_name
|
|
76
|
+
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
|
|
77
|
+
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
|
|
78
|
+
.downcase
|
|
79
|
+
|
|
80
|
+
# Remove _tool suffix if present
|
|
81
|
+
snake_case.sub(/_tool$/, "")
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
attr_reader :context
|
|
86
|
+
|
|
87
|
+
# Initialize a tool instance
|
|
88
|
+
#
|
|
89
|
+
# @param context [Object] Context object passed from the agent
|
|
90
|
+
def initialize(context = nil)
|
|
91
|
+
@context = context
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
# Execute the tool with the given arguments
|
|
95
|
+
#
|
|
96
|
+
# @param args [Hash] Arguments parsed from the tool call
|
|
97
|
+
# @return [Hash] Result to be returned to the LLM
|
|
98
|
+
def execute(**args)
|
|
99
|
+
raise NotImplementedError, "Subclasses must implement #execute"
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Helicone
|
|
4
|
+
class ToolCall
|
|
5
|
+
attr_reader :id, :name, :arguments
|
|
6
|
+
|
|
7
|
+
# Initialize a tool call
|
|
8
|
+
#
|
|
9
|
+
# @param id [String] The unique ID for this tool call
|
|
10
|
+
# @param name [String] The name of the function to call
|
|
11
|
+
# @param arguments [String, Hash, nil] Arguments (JSON string or Hash)
|
|
12
|
+
def initialize(id:, name:, arguments:)
|
|
13
|
+
@id = id
|
|
14
|
+
@name = name
|
|
15
|
+
@arguments = if arguments.nil?
|
|
16
|
+
{}
|
|
17
|
+
elsif arguments.is_a?(String)
|
|
18
|
+
deep_symbolize_keys(JSON.parse(arguments))
|
|
19
|
+
else
|
|
20
|
+
deep_symbolize_keys(arguments)
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Parse tool calls from an API response (expects symbolized keys)
|
|
25
|
+
#
|
|
26
|
+
# @param response [Helicone::Response, Array<Hash>] Response object or tool_calls array
|
|
27
|
+
# @return [Array<Helicone::ToolCall>]
|
|
28
|
+
def self.from_response(response)
|
|
29
|
+
tool_calls = response.is_a?(Response) ? response.tool_calls : response
|
|
30
|
+
return [] if tool_calls.nil?
|
|
31
|
+
|
|
32
|
+
tool_calls.map do |tc|
|
|
33
|
+
new(
|
|
34
|
+
id: tc[:id],
|
|
35
|
+
name: tc.dig(:function, :name),
|
|
36
|
+
arguments: tc.dig(:function, :arguments)
|
|
37
|
+
)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Build a tool result message to send back to the API
|
|
42
|
+
#
|
|
43
|
+
# @param tool_call_id [String] The ID of the tool call being responded to
|
|
44
|
+
# @param content [String, Hash] The tool result
|
|
45
|
+
# @return [Helicone::Message]
|
|
46
|
+
def self.result(tool_call_id:, content:)
|
|
47
|
+
Message.tool_result(tool_call_id: tool_call_id, content: content)
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Convert to hash for inspection
|
|
51
|
+
#
|
|
52
|
+
# @return [Hash]
|
|
53
|
+
def to_h
|
|
54
|
+
{
|
|
55
|
+
id: id,
|
|
56
|
+
name: name,
|
|
57
|
+
arguments: arguments
|
|
58
|
+
}
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Access arguments like a hash (supports both string and symbol keys)
|
|
62
|
+
#
|
|
63
|
+
# @param key [String, Symbol] Key to access
|
|
64
|
+
# @return [Object, nil]
|
|
65
|
+
def [](key)
|
|
66
|
+
arguments[key.to_sym]
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# Dig into nested data in arguments
|
|
70
|
+
#
|
|
71
|
+
# @param keys [Array<String, Symbol>] Keys to dig through
|
|
72
|
+
# @return [Object, nil]
|
|
73
|
+
def dig(*keys)
|
|
74
|
+
arguments.dig(*keys.map(&:to_sym))
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
private
|
|
78
|
+
|
|
79
|
+
# Recursively symbolize keys in a hash
|
|
80
|
+
#
|
|
81
|
+
# @param obj [Object] Object to process
|
|
82
|
+
# @return [Object] Object with symbolized keys
|
|
83
|
+
def deep_symbolize_keys(obj)
|
|
84
|
+
case obj
|
|
85
|
+
when Hash
|
|
86
|
+
obj.each_with_object({}) do |(key, value), result|
|
|
87
|
+
result[key.to_sym] = deep_symbolize_keys(value)
|
|
88
|
+
end
|
|
89
|
+
when Array
|
|
90
|
+
obj.map { |item| deep_symbolize_keys(item) }
|
|
91
|
+
else
|
|
92
|
+
obj
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
end
|
data/lib/helicone.rb
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "openai"
|
|
4
|
+
require "json"
|
|
5
|
+
|
|
6
|
+
require_relative "helicone/version"
|
|
7
|
+
require_relative "helicone/configuration"
|
|
8
|
+
require_relative "helicone/message"
|
|
9
|
+
require_relative "helicone/response"
|
|
10
|
+
require_relative "helicone/tool_call"
|
|
11
|
+
require_relative "helicone/tool"
|
|
12
|
+
require_relative "helicone/agent"
|
|
13
|
+
require_relative "helicone/agent_result"
|
|
14
|
+
require_relative "helicone/client"
|
|
15
|
+
|
|
16
|
+
module Helicone
|
|
17
|
+
class Error < StandardError; end
|
|
18
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: helicone-rb
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.0.1
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Genevere
|
|
8
|
+
bindir: exe
|
|
9
|
+
cert_chain: []
|
|
10
|
+
date: 1980-01-02 00:00:00.000000000 Z
|
|
11
|
+
dependencies:
|
|
12
|
+
- !ruby/object:Gem::Dependency
|
|
13
|
+
name: ruby-openai
|
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
|
15
|
+
requirements:
|
|
16
|
+
- - "~>"
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: '7.0'
|
|
19
|
+
type: :runtime
|
|
20
|
+
prerelease: false
|
|
21
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
22
|
+
requirements:
|
|
23
|
+
- - "~>"
|
|
24
|
+
- !ruby/object:Gem::Version
|
|
25
|
+
version: '7.0'
|
|
26
|
+
- !ruby/object:Gem::Dependency
|
|
27
|
+
name: rspec
|
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
|
29
|
+
requirements:
|
|
30
|
+
- - "~>"
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '3.0'
|
|
33
|
+
type: :development
|
|
34
|
+
prerelease: false
|
|
35
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
36
|
+
requirements:
|
|
37
|
+
- - "~>"
|
|
38
|
+
- !ruby/object:Gem::Version
|
|
39
|
+
version: '3.0'
|
|
40
|
+
- !ruby/object:Gem::Dependency
|
|
41
|
+
name: rake
|
|
42
|
+
requirement: !ruby/object:Gem::Requirement
|
|
43
|
+
requirements:
|
|
44
|
+
- - "~>"
|
|
45
|
+
- !ruby/object:Gem::Version
|
|
46
|
+
version: '13.0'
|
|
47
|
+
type: :development
|
|
48
|
+
prerelease: false
|
|
49
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
50
|
+
requirements:
|
|
51
|
+
- - "~>"
|
|
52
|
+
- !ruby/object:Gem::Version
|
|
53
|
+
version: '13.0'
|
|
54
|
+
- !ruby/object:Gem::Dependency
|
|
55
|
+
name: rubocop
|
|
56
|
+
requirement: !ruby/object:Gem::Requirement
|
|
57
|
+
requirements:
|
|
58
|
+
- - "~>"
|
|
59
|
+
- !ruby/object:Gem::Version
|
|
60
|
+
version: '1.0'
|
|
61
|
+
type: :development
|
|
62
|
+
prerelease: false
|
|
63
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
64
|
+
requirements:
|
|
65
|
+
- - "~>"
|
|
66
|
+
- !ruby/object:Gem::Version
|
|
67
|
+
version: '1.0'
|
|
68
|
+
description: A Ruby client that wraps the OpenAI API through the Helicone AI Gateway,
|
|
69
|
+
providing session tracking, cost attribution, and an agentic framework for building
|
|
70
|
+
AI applications with tool/function calling.
|
|
71
|
+
email:
|
|
72
|
+
- hello@genevere.com
|
|
73
|
+
executables: []
|
|
74
|
+
extensions: []
|
|
75
|
+
extra_rdoc_files: []
|
|
76
|
+
files:
|
|
77
|
+
- ".rspec"
|
|
78
|
+
- ".tool-versions"
|
|
79
|
+
- LICENSE
|
|
80
|
+
- README.md
|
|
81
|
+
- Rakefile
|
|
82
|
+
- helicone.gemspec
|
|
83
|
+
- lib/helicone.rb
|
|
84
|
+
- lib/helicone/agent.rb
|
|
85
|
+
- lib/helicone/agent_result.rb
|
|
86
|
+
- lib/helicone/client.rb
|
|
87
|
+
- lib/helicone/configuration.rb
|
|
88
|
+
- lib/helicone/message.rb
|
|
89
|
+
- lib/helicone/response.rb
|
|
90
|
+
- lib/helicone/tool.rb
|
|
91
|
+
- lib/helicone/tool_call.rb
|
|
92
|
+
- lib/helicone/version.rb
|
|
93
|
+
homepage: https://github.com/genevere-inc/helicone-rb
|
|
94
|
+
licenses:
|
|
95
|
+
- MIT
|
|
96
|
+
metadata:
|
|
97
|
+
homepage_uri: https://github.com/genevere-inc/helicone-rb
|
|
98
|
+
source_code_uri: https://github.com/genevere-inc/helicone-rb
|
|
99
|
+
changelog_uri: https://github.com/genevere-inc/helicone-rb/blob/main/CHANGELOG.md
|
|
100
|
+
rdoc_options: []
|
|
101
|
+
require_paths:
|
|
102
|
+
- lib
|
|
103
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
104
|
+
requirements:
|
|
105
|
+
- - ">="
|
|
106
|
+
- !ruby/object:Gem::Version
|
|
107
|
+
version: '3.0'
|
|
108
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
109
|
+
requirements:
|
|
110
|
+
- - ">="
|
|
111
|
+
- !ruby/object:Gem::Version
|
|
112
|
+
version: '0'
|
|
113
|
+
requirements: []
|
|
114
|
+
rubygems_version: 3.6.9
|
|
115
|
+
specification_version: 4
|
|
116
|
+
summary: Ruby client for Helicone AI Gateway with agentic tool support
|
|
117
|
+
test_files: []
|