riffer 0.6.1 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/.agents/architecture.md +113 -0
  3. data/.agents/code-style.md +42 -0
  4. data/.agents/providers.md +46 -0
  5. data/.agents/rdoc.md +51 -0
  6. data/.agents/testing.md +56 -0
  7. data/.release-please-manifest.json +1 -1
  8. data/AGENTS.md +28 -0
  9. data/CHANGELOG.md +17 -0
  10. data/README.md +26 -36
  11. data/Rakefile +1 -1
  12. data/docs/01_OVERVIEW.md +106 -0
  13. data/docs/02_GETTING_STARTED.md +128 -0
  14. data/docs/03_AGENTS.md +226 -0
  15. data/docs/04_TOOLS.md +251 -0
  16. data/docs/05_MESSAGES.md +173 -0
  17. data/docs/06_STREAM_EVENTS.md +191 -0
  18. data/docs/07_CONFIGURATION.md +195 -0
  19. data/docs_providers/01_PROVIDERS.md +168 -0
  20. data/docs_providers/02_AMAZON_BEDROCK.md +196 -0
  21. data/docs_providers/03_ANTHROPIC.md +211 -0
  22. data/docs_providers/04_OPENAI.md +157 -0
  23. data/docs_providers/05_TEST_PROVIDER.md +163 -0
  24. data/docs_providers/06_CUSTOM_PROVIDERS.md +304 -0
  25. data/lib/riffer/agent.rb +220 -57
  26. data/lib/riffer/config.rb +20 -12
  27. data/lib/riffer/core.rb +7 -7
  28. data/lib/riffer/helpers/class_name_converter.rb +6 -3
  29. data/lib/riffer/helpers/dependencies.rb +18 -0
  30. data/lib/riffer/helpers/validations.rb +9 -0
  31. data/lib/riffer/messages/assistant.rb +23 -1
  32. data/lib/riffer/messages/base.rb +15 -0
  33. data/lib/riffer/messages/converter.rb +15 -5
  34. data/lib/riffer/messages/system.rb +8 -1
  35. data/lib/riffer/messages/tool.rb +58 -4
  36. data/lib/riffer/messages/user.rb +8 -1
  37. data/lib/riffer/messages.rb +7 -0
  38. data/lib/riffer/providers/amazon_bedrock.rb +128 -13
  39. data/lib/riffer/providers/anthropic.rb +209 -0
  40. data/lib/riffer/providers/base.rb +23 -18
  41. data/lib/riffer/providers/open_ai.rb +119 -39
  42. data/lib/riffer/providers/repository.rb +9 -4
  43. data/lib/riffer/providers/test.rb +78 -24
  44. data/lib/riffer/providers.rb +6 -0
  45. data/lib/riffer/stream_events/base.rb +13 -1
  46. data/lib/riffer/stream_events/reasoning_delta.rb +15 -1
  47. data/lib/riffer/stream_events/reasoning_done.rb +15 -1
  48. data/lib/riffer/stream_events/text_delta.rb +14 -1
  49. data/lib/riffer/stream_events/text_done.rb +14 -1
  50. data/lib/riffer/stream_events/tool_call_delta.rb +35 -0
  51. data/lib/riffer/stream_events/tool_call_done.rb +40 -0
  52. data/lib/riffer/stream_events.rb +9 -0
  53. data/lib/riffer/tool.rb +120 -0
  54. data/lib/riffer/tools/param.rb +68 -0
  55. data/lib/riffer/tools/params.rb +118 -0
  56. data/lib/riffer/tools.rb +9 -0
  57. data/lib/riffer/version.rb +1 -1
  58. data/lib/riffer.rb +23 -19
  59. metadata +41 -2
  60. data/CLAUDE.md +0 -73
@@ -0,0 +1,157 @@
1
+ # OpenAI Provider
2
+
3
+ The OpenAI provider connects to OpenAI's API for GPT models.
4
+
5
+ ## Installation
6
+
7
+ Add the OpenAI gem to your Gemfile:
8
+
9
+ ```ruby
10
+ gem 'openai'
11
+ ```
12
+
13
+ ## Configuration
14
+
15
+ Set your API key globally:
16
+
17
+ ```ruby
18
+ Riffer.configure do |config|
19
+ config.openai.api_key = ENV['OPENAI_API_KEY']
20
+ end
21
+ ```
22
+
23
+ Or per-agent:
24
+
25
+ ```ruby
26
+ class MyAgent < Riffer::Agent
27
+ model 'openai/gpt-4o'
28
+ provider_options api_key: ENV['CUSTOM_API_KEY']
29
+ end
30
+ ```
31
+
32
+ ## Supported Models
33
+
34
+ Use any OpenAI model in the `openai/model` format:
35
+
36
+ ```ruby
37
+ model 'openai/gpt-4o'
38
+ model 'openai/gpt-4o-mini'
39
+ model 'openai/gpt-4-turbo'
40
+ model 'openai/gpt-3.5-turbo'
41
+ ```
42
+
43
+ ## Model Options
44
+
45
+ ### temperature
46
+
47
+ Controls randomness (0.0-2.0):
48
+
49
+ ```ruby
50
+ model_options temperature: 0.7
51
+ ```
52
+
53
+ ### max_tokens
54
+
55
+ Maximum tokens in response:
56
+
57
+ ```ruby
58
+ model_options max_tokens: 4096
59
+ ```
60
+
61
+ ### reasoning
62
+
63
+ Enables extended thinking (for supported models):
64
+
65
+ ```ruby
66
+ model_options reasoning: 'medium' # 'low', 'medium', or 'high'
67
+ ```
68
+
69
+ When reasoning is enabled, you'll receive `ReasoningDelta` and `ReasoningDone` events during streaming.
70
+
71
+ ## Example
72
+
73
+ ```ruby
74
+ Riffer.configure do |config|
75
+ config.openai.api_key = ENV['OPENAI_API_KEY']
76
+ end
77
+
78
+ class CodeReviewAgent < Riffer::Agent
79
+ model 'openai/gpt-4o'
80
+ instructions 'You are a code reviewer. Provide constructive feedback.'
81
+ model_options temperature: 0.3, reasoning: 'medium'
82
+ end
83
+
84
+ agent = CodeReviewAgent.new
85
+ puts agent.generate("Review this code: def add(a,b); a+b; end")
86
+ ```
87
+
88
+ ## Streaming
89
+
90
+ ```ruby
91
+ agent.stream("Explain Ruby blocks").each do |event|
92
+ case event
93
+ when Riffer::StreamEvents::TextDelta
94
+ print event.content
95
+ when Riffer::StreamEvents::ReasoningDelta
96
+ # Extended thinking content
97
+ print "[thinking] #{event.content}"
98
+ when Riffer::StreamEvents::ReasoningDone
99
+ puts "\n[reasoning complete]"
100
+ end
101
+ end
102
+ ```
103
+
104
+ ## Tool Calling
105
+
106
+ OpenAI provider converts tools to function calling format with strict mode:
107
+
108
+ ```ruby
109
+ class CalculatorTool < Riffer::Tool
110
+ description "Performs basic math operations"
111
+
112
+ params do
113
+ required :operation, String, enum: ["add", "subtract", "multiply", "divide"]
114
+ required :a, Float, description: "First number"
115
+ required :b, Float, description: "Second number"
116
+ end
117
+
118
+ def call(context:, operation:, a:, b:)
119
+ case operation
120
+ when "add" then a + b
121
+ when "subtract" then a - b
122
+ when "multiply" then a * b
123
+ when "divide" then a / b
124
+ end.to_s
125
+ end
126
+ end
127
+
128
+ class MathAgent < Riffer::Agent
129
+ model 'openai/gpt-4o'
130
+ uses_tools [CalculatorTool]
131
+ end
132
+ ```
133
+
134
+ ## Message Format
135
+
136
+ The provider converts Riffer messages to OpenAI format:
137
+
138
+ | Riffer Message | OpenAI Role |
139
+ | -------------- | ---------------------- |
140
+ | `System` | `developer` |
141
+ | `User` | `user` |
142
+ | `Assistant` | `assistant` |
143
+ | `Tool` | `function_call_output` |
144
+
145
+ ## Direct Provider Usage
146
+
147
+ ```ruby
148
+ provider = Riffer::Providers::OpenAI.new(api_key: ENV['OPENAI_API_KEY'])
149
+
150
+ response = provider.generate_text(
151
+ prompt: "Hello!",
152
+ model: "gpt-4o",
153
+ temperature: 0.7
154
+ )
155
+
156
+ puts response.content
157
+ ```
@@ -0,0 +1,163 @@
1
+ # Test Provider
2
+
3
+ The Test provider is a mock provider for testing agents without making real API calls.
4
+
5
+ ## Usage
6
+
7
+ No additional gems required. Use the `test` provider identifier:
8
+
9
+ ```ruby
10
+ class TestableAgent < Riffer::Agent
11
+ model 'test/any' # The model name doesn't matter for test provider
12
+ instructions 'You are helpful.'
13
+ uses_tools [MyTool]
14
+ end
15
+ ```
16
+
17
+ ## Stubbing Responses
18
+
19
+ Use `stub_response` to queue responses:
20
+
21
+ ```ruby
22
+ # Get the provider instance from the agent
23
+ agent = TestableAgent.new
24
+ provider = agent.send(:provider_instance)
25
+
26
+ # Stub a simple text response
27
+ provider.stub_response("Hello, I'm here to help!")
28
+
29
+ # Now generate will return the stubbed response
30
+ response = agent.generate("Hi")
31
+ # => "Hello, I'm here to help!"
32
+ ```
33
+
34
+ ## Stubbing Tool Calls
35
+
36
+ Stub responses that trigger tool execution:
37
+
38
+ ```ruby
39
+ provider.stub_response("", tool_calls: [
40
+ {name: "my_tool", arguments: '{"query":"test"}'}
41
+ ])
42
+
43
+ # Queue the response after tool execution
44
+ provider.stub_response("Based on the tool result, here's my answer.")
45
+
46
+ response = agent.generate("Use the tool")
47
+ ```
48
+
49
+ ## Queueing Multiple Responses
50
+
51
+ Responses are consumed in order:
52
+
53
+ ```ruby
54
+ provider.stub_response("First response")
55
+ provider.stub_response("Second response")
56
+ provider.stub_response("Third response")
57
+
58
+ agent.generate("Message 1") # => "First response"
59
+ agent.generate("Message 2") # => "Second response"
60
+ agent.generate("Message 3") # => "Third response"
61
+ agent.generate("Message 4") # => "Test response" (default)
62
+ ```
63
+
64
+ ## Inspecting Calls
65
+
66
+ Access recorded calls for assertions:
67
+
68
+ ```ruby
69
+ provider.calls
70
+ # => [
71
+ # {messages: [...], model: "any", tools: [...], ...},
72
+ # {messages: [...], model: "any", tools: [...], ...}
73
+ # ]
74
+
75
+ # Check what was sent
76
+ expect(provider.calls.last[:messages].last[:content]).to eq("Hi")
77
+ ```
78
+
79
+ ## Clearing State
80
+
81
+ Reset stubbed responses:
82
+
83
+ ```ruby
84
+ provider.clear_stubs
85
+ ```
86
+
87
+ ## Example Test
88
+
89
+ ```ruby
90
+ require 'minitest/autorun'
91
+
92
+ class MyAgentTest < Minitest::Test
93
+ def setup
94
+ @agent = TestableAgent.new
95
+ @provider = @agent.send(:provider_instance)
96
+ end
97
+
98
+ def test_generates_response
99
+ @provider.stub_response("Hello!")
100
+
101
+ response = @agent.generate("Hi")
102
+
103
+ assert_equal "Hello!", response
104
+ end
105
+
106
+ def test_executes_tool
107
+ @provider.stub_response("", tool_calls: [
108
+ {name: "weather_tool", arguments: '{"city":"Tokyo"}'}
109
+ ])
110
+ @provider.stub_response("The weather is sunny.")
111
+
112
+ response = @agent.generate("What's the weather?")
113
+
114
+ assert_equal "The weather is sunny.", response
115
+ assert_equal 2, @provider.calls.length
116
+ end
117
+
118
+ def test_passes_context_to_tools
119
+ @provider.stub_response("", tool_calls: [
120
+ {name: "user_tool", arguments: '{}'}
121
+ ])
122
+ @provider.stub_response("Done.")
123
+
124
+ @agent.generate("Do something", tool_context: {user_id: 123})
125
+
126
+ # Tool receives the context
127
+ end
128
+ end
129
+ ```
130
+
131
+ ## Streaming
132
+
133
+ The test provider also supports streaming:
134
+
135
+ ```ruby
136
+ provider.stub_response("Hello world.")
137
+
138
+ events = []
139
+ agent.stream("Hi").each { |e| events << e }
140
+
141
+ # Events include TextDelta and TextDone
142
+ text_deltas = events.select { |e| e.is_a?(Riffer::StreamEvents::TextDelta) }
143
+ text_done = events.find { |e| e.is_a?(Riffer::StreamEvents::TextDone) }
144
+ ```
145
+
146
+ ## Initial Responses
147
+
148
+ Pass responses during initialization:
149
+
150
+ ```ruby
151
+ provider = Riffer::Providers::Test.new(responses: [
152
+ {content: "First"},
153
+ {content: "Second"}
154
+ ])
155
+ ```
156
+
157
+ ## Default Response
158
+
159
+ When no stubs are queued and initial responses are exhausted, the provider returns:
160
+
161
+ ```ruby
162
+ {role: "assistant", content: "Test response"}
163
+ ```
@@ -0,0 +1,304 @@
1
+ # Creating Custom Providers
2
+
3
+ You can create custom providers to connect Riffer to other LLM services.
4
+
5
+ ## Basic Structure
6
+
7
+ Extend `Riffer::Providers::Base` and implement the required methods:
8
+
9
+ ```ruby
10
+ class Riffer::Providers::MyProvider < Riffer::Providers::Base
11
+ def initialize(**options)
12
+ # Initialize your client
13
+ @api_key = options[:api_key] || ENV['MY_PROVIDER_API_KEY']
14
+ @client = MyProviderClient.new(api_key: @api_key)
15
+ end
16
+
17
+ private
18
+
19
+ def perform_generate_text(messages, model:, **options)
20
+ # Convert messages to provider format
21
+ formatted = convert_messages(messages)
22
+
23
+ # Call your provider's API
24
+ response = @client.generate(
25
+ model: model,
26
+ messages: formatted,
27
+ **options
28
+ )
29
+
30
+ # Return a Riffer::Messages::Assistant
31
+ Riffer::Messages::Assistant.new(
32
+ response.text,
33
+ tool_calls: extract_tool_calls(response)
34
+ )
35
+ end
36
+
37
+ def perform_stream_text(messages, model:, **options)
38
+ Enumerator.new do |yielder|
39
+ formatted = convert_messages(messages)
40
+
41
+ @client.stream(model: model, messages: formatted, **options) do |chunk|
42
+ # Yield appropriate stream events
43
+ case chunk.type
44
+ when :text
45
+ yielder << Riffer::StreamEvents::TextDelta.new(chunk.content)
46
+ when :text_done
47
+ yielder << Riffer::StreamEvents::TextDone.new(chunk.content)
48
+ when :tool_call
49
+ yielder << Riffer::StreamEvents::ToolCallDone.new(
50
+ item_id: chunk.id,
51
+ call_id: chunk.id,
52
+ name: chunk.name,
53
+ arguments: chunk.arguments
54
+ )
55
+ end
56
+ end
57
+ end
58
+ end
59
+
60
+ def convert_messages(messages)
61
+ messages.map do |msg|
62
+ case msg
63
+ when Riffer::Messages::System
64
+ {role: "system", content: msg.content}
65
+ when Riffer::Messages::User
66
+ {role: "user", content: msg.content}
67
+ when Riffer::Messages::Assistant
68
+ convert_assistant(msg)
69
+ when Riffer::Messages::Tool
70
+ {role: "tool", tool_call_id: msg.tool_call_id, content: msg.content}
71
+ end
72
+ end
73
+ end
74
+
75
+ def convert_assistant(msg)
76
+ # Handle tool calls if present
77
+ {role: "assistant", content: msg.content, tool_calls: msg.tool_calls}
78
+ end
79
+
80
+ def extract_tool_calls(response)
81
+ return [] unless response.tool_calls
82
+
83
+ response.tool_calls.map do |tc|
84
+ {
85
+ id: tc.id,
86
+ call_id: tc.id,
87
+ name: tc.name,
88
+ arguments: tc.arguments
89
+ }
90
+ end
91
+ end
92
+ end
93
+ ```
94
+
95
+ ## Using depends_on
96
+
97
+ For lazy loading of external gems:
98
+
99
+ ```ruby
100
+ class Riffer::Providers::MyProvider < Riffer::Providers::Base
101
+ def initialize(**options)
102
+ depends_on "my_provider_gem" # Only loaded when provider is used
103
+
104
+ @client = ::MyProviderGem::Client.new(**options)
105
+ end
106
+ end
107
+ ```
108
+
109
+ ## Registering Your Provider
110
+
111
+ Add your provider to the repository:
112
+
113
+ ```ruby
114
+ # In lib/riffer/providers/repository.rb or your own code
115
+
116
+ Riffer::Providers::Repository::REPO[:my_provider] = -> { Riffer::Providers::MyProvider }
117
+ ```
118
+
119
+ Or create a custom repository:
120
+
121
+ ```ruby
122
+ module MyApp
123
+ module Providers
124
+ def self.find(identifier)
125
+ case identifier.to_sym
126
+ when :my_provider
127
+ Riffer::Providers::MyProvider
128
+ else
129
+ Riffer::Providers::Repository.find(identifier)
130
+ end
131
+ end
132
+ end
133
+ end
134
+ ```
135
+
136
+ ## Using Your Provider
137
+
138
+ ```ruby
139
+ class MyAgent < Riffer::Agent
140
+ model 'my_provider/model-name'
141
+ end
142
+ ```
143
+
144
+ ## Tool Support
145
+
146
+ Convert tools to your provider's format:
147
+
148
+ ```ruby
149
+ def perform_generate_text(messages, model:, tools: nil, **options)
150
+ params = {
151
+ model: model,
152
+ messages: convert_messages(messages)
153
+ }
154
+
155
+ if tools && !tools.empty?
156
+ params[:tools] = tools.map { |t| convert_tool(t) }
157
+ end
158
+
159
+ response = @client.generate(**params)
160
+ # ...
161
+ end
162
+
163
+ def convert_tool(tool)
164
+ {
165
+ name: tool.name,
166
+ description: tool.description,
167
+ parameters: tool.parameters_schema
168
+ }
169
+ end
170
+ ```
171
+
172
+ ## Stream Events
173
+
174
+ Use the appropriate stream event classes:
175
+
176
+ ```ruby
177
+ # Text streaming
178
+ Riffer::StreamEvents::TextDelta.new("chunk of text")
179
+ Riffer::StreamEvents::TextDone.new("complete text")
180
+
181
+ # Tool calls
182
+ Riffer::StreamEvents::ToolCallDelta.new(
183
+ item_id: "id",
184
+ name: "tool_name",
185
+ arguments_delta: '{"partial":'
186
+ )
187
+ Riffer::StreamEvents::ToolCallDone.new(
188
+ item_id: "id",
189
+ call_id: "call_id",
190
+ name: "tool_name",
191
+ arguments: '{"complete":"args"}'
192
+ )
193
+
194
+ # Reasoning (if supported)
195
+ Riffer::StreamEvents::ReasoningDelta.new("thinking...")
196
+ Riffer::StreamEvents::ReasoningDone.new("complete reasoning")
197
+ ```
198
+
199
+ ## Error Handling
200
+
201
+ Raise appropriate Riffer errors:
202
+
203
+ ```ruby
204
+ def perform_generate_text(messages, model:, **options)
205
+ response = @client.generate(...)
206
+
207
+ if response.error?
208
+ raise Riffer::Error, "Provider error: #{response.error_message}"
209
+ end
210
+
211
+ # ...
212
+ rescue MyProviderGem::AuthError => e
213
+ raise Riffer::ArgumentError, "Authentication failed: #{e.message}"
214
+ end
215
+ ```
216
+
217
+ ## Complete Example
218
+
219
+ ```ruby
220
+ # lib/riffer/providers/anthropic.rb
221
+
222
+ class Riffer::Providers::Anthropic < Riffer::Providers::Base
223
+ def initialize(**options)
224
+ depends_on "anthropic"
225
+
226
+ api_key = options[:api_key] || ENV['ANTHROPIC_API_KEY']
227
+ @client = ::Anthropic::Client.new(api_key: api_key)
228
+ end
229
+
230
+ private
231
+
232
+ def perform_generate_text(messages, model:, tools: nil, **options)
233
+ system_message = extract_system(messages)
234
+ conversation = messages.reject { |m| m.is_a?(Riffer::Messages::System) }
235
+
236
+ params = {
237
+ model: model,
238
+ messages: convert_messages(conversation),
239
+ system: system_message,
240
+ max_tokens: options[:max_tokens] || 4096
241
+ }
242
+
243
+ if tools && !tools.empty?
244
+ params[:tools] = tools.map { |t| convert_tool(t) }
245
+ end
246
+
247
+ response = @client.messages.create(**params)
248
+ extract_assistant_message(response)
249
+ end
250
+
251
+ def perform_stream_text(messages, model:, tools: nil, **options)
252
+ Enumerator.new do |yielder|
253
+ # Similar implementation with streaming
254
+ end
255
+ end
256
+
257
+ def extract_system(messages)
258
+ system_msg = messages.find { |m| m.is_a?(Riffer::Messages::System) }
259
+ system_msg&.content
260
+ end
261
+
262
+ def convert_messages(messages)
263
+ messages.map do |msg|
264
+ case msg
265
+ when Riffer::Messages::User
266
+ {role: "user", content: msg.content}
267
+ when Riffer::Messages::Assistant
268
+ {role: "assistant", content: msg.content}
269
+ when Riffer::Messages::Tool
270
+ {role: "user", content: [{type: "tool_result", tool_use_id: msg.tool_call_id, content: msg.content}]}
271
+ end
272
+ end
273
+ end
274
+
275
+ def convert_tool(tool)
276
+ {
277
+ name: tool.name,
278
+ description: tool.description,
279
+ input_schema: tool.parameters_schema
280
+ }
281
+ end
282
+
283
+ def extract_assistant_message(response)
284
+ text = ""
285
+ tool_calls = []
286
+
287
+ response.content.each do |block|
288
+ case block.type
289
+ when "text"
290
+ text = block.text
291
+ when "tool_use"
292
+ tool_calls << {
293
+ id: block.id,
294
+ call_id: block.id,
295
+ name: block.name,
296
+ arguments: block.input.to_json
297
+ }
298
+ end
299
+ end
300
+
301
+ Riffer::Messages::Assistant.new(text, tool_calls: tool_calls)
302
+ end
303
+ end
304
+ ```