llm_gateway 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 98a42a5943a3b9797fa7ea4d782785e4932c65f7183eab6a452a86b2ca57e585
4
- data.tar.gz: cbe9e03dc5d76ebfdc8ba911b6c077b803113922f3c470c67fdadcac1b779ff8
3
+ metadata.gz: 21b3998df57de474c78626d8267db572418f80426ddaf82730cf8738e181d96c
4
+ data.tar.gz: 9a37ff5a3907a8b0d48dbd0fc83be2e50cd5757a60cacd051e93ff1dc63d734e
5
5
  SHA512:
6
- metadata.gz: f58cb7936e3dcc9d643c1747f2d7404b2c1ca7474ad444f31178ce403351fba5ba1c981f91fabd1971097e7cb639b17709c9b62be9f784444d47d516801404b4
7
- data.tar.gz: 45de5da177dbdd30d1490d1f5f2115a5b5d4e7adeb213d5a4a9c874bf99ed84d557e0e9988a77194bdcac2c491eb5903e4b66659d985284032bb6bfccba6cd1f
6
+ metadata.gz: 1be591a45a6fbee0b89846c679e0a9709e3a5493757eac3196be8194f8e592615b177c15f20d6763cf6a444bf8dbf3d899cf6e9fd9b11de7e2f02845f53ab1ff
7
+ data.tar.gz: ed5a981d3e7ff311d26fe4924760e286e7370da598464e4b00ad004c940015432d873be73afae7067d5fa659adbe7af701b5b6f4163432c10cdf7f950532d690
data/CHANGELOG.md CHANGED
@@ -6,6 +6,7 @@
6
6
 
7
7
  **Merged pull requests:**
8
8
 
9
+ - feat: add tool base class [\#4](https://github.com/Hyper-Unearthing/llm_gateway/pull/4) ([billybonks](https://github.com/billybonks))
9
10
  - feat: add prompt base class [\#3](https://github.com/Hyper-Unearthing/llm_gateway/pull/3) ([billybonks](https://github.com/billybonks))
10
11
  - lint files and add coverage [\#2](https://github.com/Hyper-Unearthing/llm_gateway/pull/2) ([billybonks](https://github.com/billybonks))
11
12
  - test: vcr lookup was not working when using different commands [\#1](https://github.com/Hyper-Unearthing/llm_gateway/pull/1) ([billybonks](https://github.com/billybonks))
data/README.md CHANGED
@@ -90,6 +90,24 @@ result = teacher_prompt.run
90
90
  You can combine the Prompt class with tools for more complex interactions:
91
91
 
92
92
  ```ruby
93
+ # Define a tool class
94
+ class GetWeatherTool < LlmGateway::Tool
95
+ name 'get_weather'
96
+ description 'Get current weather for a location'
97
+ input_schema({
98
+ type: 'object',
99
+ properties: {
100
+ location: { type: 'string', description: 'City name' }
101
+ },
102
+ required: ['location']
103
+ })
104
+
105
+ def execute(input, login = nil)
106
+ # Your weather API implementation here
107
+ "The weather in #{input['location']} is sunny and 25°C"
108
+ end
109
+ end
110
+
93
111
  class WeatherAssistantPrompt < LlmGateway::Prompt
94
112
  def initialize(model, location)
95
113
  super(model)
@@ -105,17 +123,7 @@ class WeatherAssistantPrompt < LlmGateway::Prompt
105
123
  end
106
124
 
107
125
  def tools
108
- [{
109
- name: 'get_weather',
110
- description: 'Get current weather for a location',
111
- input_schema: {
112
- type: 'object',
113
- properties: {
114
- location: { type: 'string', description: 'City name' }
115
- },
116
- required: ['location']
117
- }
118
- }]
126
+ [GetWeatherTool]
119
127
  end
120
128
  end
121
129
 
@@ -127,7 +135,25 @@ result = weather_prompt.run
127
135
  ### Tool Usage (Function Calling)
128
136
 
129
137
  ```ruby
130
- # Define a tool
138
+ # Define a tool class
139
+ class GetWeatherTool < LlmGateway::Tool
140
+ name 'get_weather'
141
+ description 'Get current weather for a location'
142
+ input_schema({
143
+ type: 'object',
144
+ properties: {
145
+ location: { type: 'string', description: 'City name' }
146
+ },
147
+ required: ['location']
148
+ })
149
+
150
+ def execute(input, login = nil)
151
+ # Your weather API implementation here
152
+ "The weather in #{input['location']} is sunny and 25°C"
153
+ end
154
+ end
155
+
156
+ # Use the tool
131
157
  weather_tool = {
132
158
  name: 'get_weather',
133
159
  description: 'Get current weather for a location',
@@ -140,7 +166,6 @@ weather_tool = {
140
166
  }
141
167
  }
142
168
 
143
- # Use the tool
144
169
  result = LlmGateway::Client.chat(
145
170
  'claude-sonnet-4-20250514',
146
171
  'What\'s the weather in Singapore?',
@@ -155,8 +180,21 @@ result = LlmGateway::Client.chat(
155
180
  class WeatherAssistant
156
181
  def initialize
157
182
  @transcript = []
183
+ @weather_tool = {
184
+ name: 'get_weather',
185
+ description: 'Get current weather for a location',
186
+ input_schema: {
187
+ type: 'object',
188
+ properties: {
189
+ location: { type: 'string', description: 'City name' }
190
+ },
191
+ required: ['location']
192
+ }
193
+ }
158
194
  end
159
195
 
196
+ attr_reader :weather_tool
197
+
160
198
  def process_message(content)
161
199
  # Add user message to transcript
162
200
  @transcript << { role: 'user', content: [{ type: 'text', text: content }] }
@@ -164,7 +202,7 @@ class WeatherAssistant
164
202
  result = LlmGateway::Client.chat(
165
203
  'claude-sonnet-4-20250514',
166
204
  @transcript,
167
- tools: [weather_tool],
205
+ tools: [@weather_tool],
168
206
  system: 'You are a helpful weather assistant.'
169
207
  )
170
208
 
@@ -195,7 +233,7 @@ class WeatherAssistant
195
233
  follow_up = LlmGateway::Client.chat(
196
234
  'claude-sonnet-4-20250514',
197
235
  @transcript,
198
- tools: [weather_tool],
236
+ tools: [@weather_tool],
199
237
  system: 'You are a helpful weather assistant.'
200
238
  )
201
239
 
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LlmGateway
4
+ class Tool
5
+ def initialize(*args)
6
+ # Empty constructor to allow subclasses to call super
7
+ end
8
+
9
+ def self.name(value = nil)
10
+ @name = value if value
11
+ @name
12
+ end
13
+
14
+ def self.description(value = nil)
15
+ @description = value if value
16
+ @description
17
+ end
18
+
19
+ def self.input_schema(value = nil)
20
+ @input_schema = value if value
21
+ @input_schema
22
+ end
23
+
24
+ def self.cache(value = nil)
25
+ @cache = value if value
26
+ @cache
27
+ end
28
+
29
+ def self.definition
30
+ {
31
+ name: @name,
32
+ description: @description,
33
+ input_schema: @input_schema,
34
+ cache_control: @cache ? { type: "ephemeral" } : nil
35
+ }.compact
36
+ end
37
+
38
+ def self.tool_name
39
+ definition[:name]
40
+ end
41
+
42
+ def execute(input, login)
43
+ raise NotImplementedError, "Subclasses must implement execute"
44
+ end
45
+ end
46
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmGateway
4
- VERSION = "0.1.2"
4
+ VERSION = "0.1.3"
5
5
  end
data/lib/llm_gateway.rb CHANGED
@@ -7,6 +7,7 @@ require_relative "llm_gateway/fluent_mapper"
7
7
  require_relative "llm_gateway/base_client"
8
8
  require_relative "llm_gateway/client"
9
9
  require_relative "llm_gateway/prompt"
10
+ require_relative "llm_gateway/tool"
10
11
 
11
12
  # Load adapters - order matters for inheritance
12
13
  require_relative "llm_gateway/adapters/claude/client"
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_gateway
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - billybonks
@@ -40,6 +40,7 @@ files:
40
40
  - lib/llm_gateway/errors.rb
41
41
  - lib/llm_gateway/fluent_mapper.rb
42
42
  - lib/llm_gateway/prompt.rb
43
+ - lib/llm_gateway/tool.rb
43
44
  - lib/llm_gateway/utils.rb
44
45
  - lib/llm_gateway/version.rb
45
46
  - sig/llm_gateway.rbs