riffer 0.7.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/.agents/architecture.md +113 -0
  3. data/.agents/code-style.md +42 -0
  4. data/.agents/providers.md +46 -0
  5. data/.agents/rdoc.md +51 -0
  6. data/.agents/testing.md +56 -0
  7. data/.release-please-manifest.json +1 -1
  8. data/AGENTS.md +21 -308
  9. data/CHANGELOG.md +17 -0
  10. data/README.md +21 -112
  11. data/Rakefile +1 -1
  12. data/docs/01_OVERVIEW.md +106 -0
  13. data/docs/02_GETTING_STARTED.md +128 -0
  14. data/docs/03_AGENTS.md +226 -0
  15. data/docs/04_TOOLS.md +342 -0
  16. data/docs/05_MESSAGES.md +173 -0
  17. data/docs/06_STREAM_EVENTS.md +191 -0
  18. data/docs/07_CONFIGURATION.md +195 -0
  19. data/docs_providers/01_PROVIDERS.md +168 -0
  20. data/docs_providers/02_AMAZON_BEDROCK.md +196 -0
  21. data/docs_providers/03_ANTHROPIC.md +211 -0
  22. data/docs_providers/04_OPENAI.md +157 -0
  23. data/docs_providers/05_TEST_PROVIDER.md +163 -0
  24. data/docs_providers/06_CUSTOM_PROVIDERS.md +304 -0
  25. data/lib/riffer/agent.rb +103 -63
  26. data/lib/riffer/config.rb +20 -12
  27. data/lib/riffer/core.rb +7 -7
  28. data/lib/riffer/helpers/class_name_converter.rb +6 -3
  29. data/lib/riffer/helpers/dependencies.rb +18 -0
  30. data/lib/riffer/helpers/validations.rb +9 -0
  31. data/lib/riffer/messages/assistant.rb +23 -1
  32. data/lib/riffer/messages/base.rb +15 -0
  33. data/lib/riffer/messages/converter.rb +15 -5
  34. data/lib/riffer/messages/system.rb +8 -1
  35. data/lib/riffer/messages/tool.rb +45 -2
  36. data/lib/riffer/messages/user.rb +8 -1
  37. data/lib/riffer/messages.rb +7 -0
  38. data/lib/riffer/providers/amazon_bedrock.rb +8 -4
  39. data/lib/riffer/providers/anthropic.rb +209 -0
  40. data/lib/riffer/providers/base.rb +17 -12
  41. data/lib/riffer/providers/open_ai.rb +7 -1
  42. data/lib/riffer/providers/repository.rb +9 -4
  43. data/lib/riffer/providers/test.rb +25 -7
  44. data/lib/riffer/providers.rb +6 -0
  45. data/lib/riffer/stream_events/base.rb +13 -1
  46. data/lib/riffer/stream_events/reasoning_delta.rb +15 -1
  47. data/lib/riffer/stream_events/reasoning_done.rb +15 -1
  48. data/lib/riffer/stream_events/text_delta.rb +14 -1
  49. data/lib/riffer/stream_events/text_done.rb +14 -1
  50. data/lib/riffer/stream_events/tool_call_delta.rb +18 -11
  51. data/lib/riffer/stream_events/tool_call_done.rb +22 -12
  52. data/lib/riffer/stream_events.rb +9 -0
  53. data/lib/riffer/tool.rb +92 -25
  54. data/lib/riffer/tools/param.rb +19 -16
  55. data/lib/riffer/tools/params.rb +28 -22
  56. data/lib/riffer/tools/response.rb +90 -0
  57. data/lib/riffer/tools.rb +6 -0
  58. data/lib/riffer/version.rb +1 -1
  59. data/lib/riffer.rb +21 -21
  60. metadata +35 -1
@@ -1,9 +1,27 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Helper module for lazy loading gem dependencies.
4
+ #
5
+ # Used by providers to load their required gems only when needed.
3
6
  module Riffer::Helpers::Dependencies
7
+ # Raised when a required gem cannot be loaded.
4
8
  class LoadError < ::LoadError; end
9
+
10
+ # Raised when a gem version requirement is not satisfied.
5
11
  class VersionError < ScriptError; end
6
12
 
13
+ # Declares a dependency on a gem.
14
+ #
15
+ # Verifies the gem is installed and satisfies version requirements,
16
+ # then requires it.
17
+ #
18
+ # gem_name:: String - the gem name
19
+ # req:: Boolean or String - true to require the gem, false to skip, or String to require a different lib
20
+ #
21
+ # Returns true if successful.
22
+ #
23
+ # Raises LoadError if the gem is not installed.
24
+ # Raises VersionError if the gem version does not satisfy requirements.
7
25
  def depends_on(gem_name, req: true)
8
26
  gem(gem_name)
9
27
 
@@ -1,6 +1,15 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Helper module for input validation.
3
4
  module Riffer::Helpers::Validations
5
+ # Validates that a value is a non-empty string.
6
+ #
7
+ # value:: Object - the value to validate
8
+ # name:: String - the name of the value for error messages
9
+ #
10
+ # Returns true if valid.
11
+ #
12
+ # Raises Riffer::ArgumentError if the value is not a string or is empty.
4
13
  def validate_is_string!(value, name = "value")
5
14
  raise Riffer::ArgumentError, "#{name} must be a String" unless value.is_a?(String)
6
15
  raise Riffer::ArgumentError, "#{name} cannot be empty" if value.strip.empty?
@@ -1,17 +1,39 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Represents an assistant (LLM) message in a conversation.
4
+ #
5
+ # May include tool calls when the LLM requests tool execution.
6
+ #
7
+ # msg = Riffer::Messages::Assistant.new("Hello!")
8
+ # msg.role # => :assistant
9
+ # msg.content # => "Hello!"
10
+ # msg.tool_calls # => []
11
+ #
3
12
  class Riffer::Messages::Assistant < Riffer::Messages::Base
13
+ # Array of tool calls requested by the assistant.
14
+ #
15
+ # Each tool call is a Hash with +:id+, +:call_id+, +:name+, and +:arguments+ keys.
16
+ #
17
+ # Returns Array of Hash.
4
18
  attr_reader :tool_calls
5
19
 
20
+ # Creates a new assistant message.
21
+ #
22
+ # content:: String - the message content
23
+ # tool_calls:: Array of Hash - optional tool calls
6
24
  def initialize(content, tool_calls: [])
7
25
  super(content)
8
26
  @tool_calls = tool_calls
9
27
  end
10
28
 
29
+ # Returns :assistant.
11
30
  def role
12
- "assistant"
31
+ :assistant
13
32
  end
14
33
 
34
+ # Converts the message to a hash.
35
+ #
36
+ # Returns Hash with +:role+, +:content+, and optionally +:tool_calls+.
15
37
  def to_h
16
38
  hash = {role: role, content: content}
17
39
  hash[:tool_calls] = tool_calls unless tool_calls.empty?
@@ -1,16 +1,31 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Base class for all message types in the Riffer framework.
4
+ #
5
+ # Subclasses must implement the +role+ method.
3
6
  class Riffer::Messages::Base
7
+ # The message content.
8
+ #
9
+ # Returns String.
4
10
  attr_reader :content
5
11
 
12
+ # Creates a new message.
13
+ #
14
+ # content:: String - the message content
6
15
  def initialize(content)
7
16
  @content = content
8
17
  end
9
18
 
19
+ # Converts the message to a hash.
20
+ #
21
+ # Returns Hash with +:role+ and +:content+ keys.
10
22
  def to_h
11
23
  {role: role, content: content}
12
24
  end
13
25
 
26
+ # Returns the message role.
27
+ #
28
+ # Raises NotImplementedError if not implemented by subclass.
14
29
  def role
15
30
  raise NotImplementedError, "Subclasses must implement #role"
16
31
  end
@@ -1,6 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Module for converting hashes to message objects.
4
+ #
5
+ # Included in Agent and Provider classes to handle message normalization.
3
6
  module Riffer::Messages::Converter
7
+ # Converts a hash or message object to a Riffer::Messages::Base subclass.
8
+ #
9
+ # msg:: Hash or Riffer::Messages::Base - the message to convert
10
+ #
11
+ # Returns Riffer::Messages::Base subclass.
12
+ #
13
+ # Raises Riffer::ArgumentError if the message format is invalid.
4
14
  def convert_to_message_object(msg)
5
15
  return msg if msg.is_a?(Riffer::Messages::Base)
6
16
 
@@ -21,15 +31,15 @@ module Riffer::Messages::Converter
21
31
  raise Riffer::ArgumentError, "Message hash must include a 'role' key"
22
32
  end
23
33
 
24
- case role
25
- when "user"
34
+ case role.to_sym
35
+ when :user
26
36
  Riffer::Messages::User.new(content)
27
- when "assistant"
37
+ when :assistant
28
38
  tool_calls = hash[:tool_calls] || hash["tool_calls"] || []
29
39
  Riffer::Messages::Assistant.new(content, tool_calls: tool_calls)
30
- when "system"
40
+ when :system
31
41
  Riffer::Messages::System.new(content)
32
- when "tool"
42
+ when :tool
33
43
  tool_call_id = hash[:tool_call_id] || hash["tool_call_id"]
34
44
  name = hash[:name] || hash["name"]
35
45
  Riffer::Messages::Tool.new(content, tool_call_id: tool_call_id, name: name)
@@ -1,7 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Represents a system message (instructions) in a conversation.
4
+ #
5
+ # msg = Riffer::Messages::System.new("You are a helpful assistant.")
6
+ # msg.role # => :system
7
+ # msg.content # => "You are a helpful assistant."
8
+ #
3
9
  class Riffer::Messages::System < Riffer::Messages::Base
10
+ # Returns :system.
4
11
  def role
5
- "system"
12
+ :system
6
13
  end
7
14
  end
@@ -1,8 +1,44 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Represents a tool execution result in a conversation.
4
+ #
5
+ # msg = Riffer::Messages::Tool.new(
6
+ # "The weather is sunny.",
7
+ # tool_call_id: "call_123",
8
+ # name: "weather_tool"
9
+ # )
10
+ # msg.role # => :tool
11
+ # msg.tool_call_id # => "call_123"
12
+ # msg.error? # => false
13
+ #
3
14
  class Riffer::Messages::Tool < Riffer::Messages::Base
4
- attr_reader :tool_call_id, :name, :error, :error_type
15
+ # The ID of the tool call this result responds to.
16
+ #
17
+ # Returns String.
18
+ attr_reader :tool_call_id
5
19
 
20
+ # The name of the tool that was called.
21
+ #
22
+ # Returns String.
23
+ attr_reader :name
24
+
25
+ # The error message if the tool execution failed.
26
+ #
27
+ # Returns String or nil.
28
+ attr_reader :error
29
+
30
+ # The type of error (:unknown_tool, :validation_error, :execution_error, :timeout_error).
31
+ #
32
+ # Returns Symbol or nil.
33
+ attr_reader :error_type
34
+
35
+ # Creates a new tool result message.
36
+ #
37
+ # content:: String - the tool execution result
38
+ # tool_call_id:: String - the ID of the tool call
39
+ # name:: String - the tool name
40
+ # error:: String or nil - optional error message
41
+ # error_type:: Symbol or nil - optional error type
6
42
  def initialize(content, tool_call_id:, name:, error: nil, error_type: nil)
7
43
  super(content)
8
44
  @tool_call_id = tool_call_id
@@ -11,14 +47,21 @@ class Riffer::Messages::Tool < Riffer::Messages::Base
11
47
  @error_type = error_type
12
48
  end
13
49
 
50
+ # Returns true if the tool execution resulted in an error.
51
+ #
52
+ # Returns Boolean.
14
53
  def error?
15
54
  !@error.nil?
16
55
  end
17
56
 
57
+ # Returns :tool.
18
58
  def role
19
- "tool"
59
+ :tool
20
60
  end
21
61
 
62
+ # Converts the message to a hash.
63
+ #
64
+ # Returns Hash with message data including error info if present.
22
65
  def to_h
23
66
  hash = {role: role, content: content, tool_call_id: tool_call_id, name: name}
24
67
  if error?
@@ -1,7 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Represents a user message in a conversation.
4
+ #
5
+ # msg = Riffer::Messages::User.new("Hello!")
6
+ # msg.role # => :user
7
+ # msg.content # => "Hello!"
8
+ #
3
9
  class Riffer::Messages::User < Riffer::Messages::Base
10
+ # Returns :user.
4
11
  def role
5
- "user"
12
+ :user
6
13
  end
7
14
  end
@@ -1,4 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Namespace for message types in the Riffer framework.
4
+ #
5
+ # Message objects represent the conversation between users and the assistant:
6
+ # - Riffer::Messages::System - System instructions
7
+ # - Riffer::Messages::User - User input
8
+ # - Riffer::Messages::Assistant - LLM responses
9
+ # - Riffer::Messages::Tool - Tool execution results
3
10
  module Riffer::Messages
4
11
  end
@@ -2,13 +2,17 @@
2
2
 
3
3
  require "json"
4
4
 
5
+ # Amazon Bedrock provider for Claude and other foundation models.
6
+ #
7
+ # Requires the +aws-sdk-bedrockruntime+ gem to be installed.
8
+ #
9
+ # See https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/BedrockRuntime/Client.html
5
10
  class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
6
11
  # Initializes the Amazon Bedrock provider.
7
12
  #
8
- # @param options [Hash] options passed to Aws::BedrockRuntime::Client
9
- # @option options [String] :api_token Bearer token for API authentication (requires :region)
10
- # @option options [String] :region AWS region
11
- # @see https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/BedrockRuntime/Client.html
13
+ # api_token:: String or nil - Bearer token for API authentication
14
+ # region:: String or nil - AWS region
15
+ # options:: Hash - additional options passed to Aws::BedrockRuntime::Client
12
16
  def initialize(api_token: nil, region: nil, **options)
13
17
  depends_on "aws-sdk-bedrockruntime"
14
18
 
@@ -0,0 +1,209 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ # Anthropic provider for Claude models via the Anthropic API.
6
+ #
7
+ # Requires the +anthropic+ gem to be installed.
8
+ #
9
+ # See https://github.com/anthropics/anthropic-sdk-ruby
10
+ class Riffer::Providers::Anthropic < Riffer::Providers::Base
11
+ # Initializes the Anthropic provider.
12
+ #
13
+ # api_key:: String or nil - Anthropic API key
14
+ # options:: Hash - additional options passed to Anthropic::Client
15
+ def initialize(api_key: nil, **options)
16
+ depends_on "anthropic"
17
+
18
+ api_key ||= Riffer.config.anthropic.api_key
19
+
20
+ @client = Anthropic::Client.new(api_key: api_key, **options)
21
+ end
22
+
23
+ private
24
+
25
+ def perform_generate_text(messages, model:, **options)
26
+ partitioned_messages = partition_messages(messages)
27
+ tools = options[:tools]
28
+
29
+ max_tokens = options.fetch(:max_tokens, 4096)
30
+
31
+ params = {
32
+ model: model,
33
+ messages: partitioned_messages[:conversation],
34
+ max_tokens: max_tokens,
35
+ **options.except(:tools, :max_tokens)
36
+ }
37
+
38
+ params[:system] = partitioned_messages[:system] if partitioned_messages[:system]
39
+
40
+ if tools && !tools.empty?
41
+ params[:tools] = tools.map { |t| convert_tool_to_anthropic_format(t) }
42
+ end
43
+
44
+ response = @client.messages.create(**params)
45
+ extract_assistant_message(response)
46
+ end
47
+
48
+ def perform_stream_text(messages, model:, **options)
49
+ Enumerator.new do |yielder|
50
+ partitioned_messages = partition_messages(messages)
51
+ tools = options[:tools]
52
+
53
+ max_tokens = options.fetch(:max_tokens, 4096)
54
+
55
+ params = {
56
+ model: model,
57
+ messages: partitioned_messages[:conversation],
58
+ max_tokens: max_tokens,
59
+ **options.except(:tools, :max_tokens)
60
+ }
61
+
62
+ params[:system] = partitioned_messages[:system] if partitioned_messages[:system]
63
+
64
+ if tools && !tools.empty?
65
+ params[:tools] = tools.map { |t| convert_tool_to_anthropic_format(t) }
66
+ end
67
+
68
+ accumulated_text = ""
69
+ accumulated_reasoning = ""
70
+ current_tool_use = nil
71
+
72
+ stream = @client.messages.stream(**params)
73
+ stream.each do |event|
74
+ case event
75
+ when Anthropic::Streaming::TextEvent
76
+ accumulated_text += event.text
77
+ yielder << Riffer::StreamEvents::TextDelta.new(event.text)
78
+
79
+ when Anthropic::Streaming::ThinkingEvent
80
+ accumulated_reasoning += event.thinking
81
+ yielder << Riffer::StreamEvents::ReasoningDelta.new(event.thinking)
82
+
83
+ when Anthropic::Streaming::InputJsonEvent
84
+ # Tool call JSON delta - we need to track the tool use block
85
+ if current_tool_use.nil?
86
+ # Find the current tool use block being built
87
+ current_tool_use = {id: nil, name: nil, arguments: ""}
88
+ end
89
+ current_tool_use[:arguments] += event.partial_json
90
+ yielder << Riffer::StreamEvents::ToolCallDelta.new(
91
+ item_id: current_tool_use[:id] || "pending",
92
+ name: current_tool_use[:name],
93
+ arguments_delta: event.partial_json
94
+ )
95
+
96
+ when Anthropic::Streaming::ContentBlockStopEvent
97
+ content_block = event.content_block
98
+ if content_block.respond_to?(:type)
99
+ block_type = content_block.type.to_s
100
+ if block_type == "tool_use"
101
+ # content_block.input is already a JSON string when streaming
102
+ arguments = content_block.input.is_a?(String) ? content_block.input : content_block.input.to_json
103
+ yielder << Riffer::StreamEvents::ToolCallDone.new(
104
+ item_id: content_block.id,
105
+ call_id: content_block.id,
106
+ name: content_block.name,
107
+ arguments: arguments
108
+ )
109
+ current_tool_use = nil
110
+ elsif block_type == "thinking" && !accumulated_reasoning.empty?
111
+ yielder << Riffer::StreamEvents::ReasoningDone.new(accumulated_reasoning)
112
+ end
113
+ end
114
+
115
+ when Anthropic::Streaming::MessageStopEvent
116
+ yielder << Riffer::StreamEvents::TextDone.new(accumulated_text)
117
+ end
118
+ end
119
+ end
120
+ end
121
+
122
+ def partition_messages(messages)
123
+ system_prompts = []
124
+ conversation_messages = []
125
+
126
+ messages.each do |message|
127
+ case message
128
+ when Riffer::Messages::System
129
+ system_prompts << {type: "text", text: message.content}
130
+ when Riffer::Messages::User
131
+ conversation_messages << {role: "user", content: message.content}
132
+ when Riffer::Messages::Assistant
133
+ conversation_messages << convert_assistant_to_anthropic_format(message)
134
+ when Riffer::Messages::Tool
135
+ conversation_messages << {
136
+ role: "user",
137
+ content: [{
138
+ type: "tool_result",
139
+ tool_use_id: message.tool_call_id,
140
+ content: message.content
141
+ }]
142
+ }
143
+ end
144
+ end
145
+
146
+ {
147
+ system: system_prompts.empty? ? nil : system_prompts,
148
+ conversation: conversation_messages
149
+ }
150
+ end
151
+
152
+ def convert_assistant_to_anthropic_format(message)
153
+ content = []
154
+ content << {type: "text", text: message.content} if message.content && !message.content.empty?
155
+
156
+ message.tool_calls.each do |tc|
157
+ content << {
158
+ type: "tool_use",
159
+ id: tc[:id] || tc[:call_id],
160
+ name: tc[:name],
161
+ input: parse_tool_arguments(tc[:arguments])
162
+ }
163
+ end
164
+
165
+ {role: "assistant", content: content}
166
+ end
167
+
168
+ def parse_tool_arguments(arguments)
169
+ return {} if arguments.nil? || arguments.empty?
170
+ arguments.is_a?(String) ? JSON.parse(arguments) : arguments
171
+ end
172
+
173
+ def extract_assistant_message(response)
174
+ content_blocks = response.content
175
+ raise Riffer::Error, "No content returned from Anthropic API" if content_blocks.nil? || content_blocks.empty?
176
+
177
+ text_content = ""
178
+ tool_calls = []
179
+
180
+ content_blocks.each do |block|
181
+ block_type = block.type.to_s
182
+ case block_type
183
+ when "text"
184
+ text_content = block.text
185
+ when "tool_use"
186
+ tool_calls << {
187
+ id: block.id,
188
+ call_id: block.id,
189
+ name: block.name,
190
+ arguments: block.input.to_json
191
+ }
192
+ end
193
+ end
194
+
195
+ if text_content.empty? && tool_calls.empty?
196
+ raise Riffer::Error, "No content returned from Anthropic API"
197
+ end
198
+
199
+ Riffer::Messages::Assistant.new(text_content, tool_calls: tool_calls)
200
+ end
201
+
202
+ def convert_tool_to_anthropic_format(tool)
203
+ {
204
+ name: tool.name,
205
+ description: tool.description,
206
+ input_schema: tool.parameters_schema
207
+ }
208
+ end
209
+ end
@@ -1,17 +1,21 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Base class for all LLM providers in the Riffer framework.
4
+ #
5
+ # Subclasses must implement +perform_generate_text+ and +perform_stream_text+.
3
6
  class Riffer::Providers::Base
4
7
  include Riffer::Helpers::Dependencies
5
8
  include Riffer::Messages::Converter
6
9
 
7
10
  # Generates text using the provider.
8
11
  #
9
- # @param prompt [String, nil] the user prompt (required when `messages` is not provided)
10
- # @param system [String, nil] an optional system message
11
- # @param messages [Array<Hash, Riffer::Messages::Base>, nil] optional messages array
12
- # @param model [String, nil] optional model string to override the configured model
13
- # @param options [Hash] additional options passed to the model invocation
14
- # @return [Riffer::Messages::Assistant] the generated assistant message
12
+ # prompt:: String or nil - the user prompt (required when messages is not provided)
13
+ # system:: String or nil - an optional system message
14
+ # messages:: Array or nil - optional messages array
15
+ # model:: String or nil - optional model string to override the configured model
16
+ # options:: Hash - additional options passed to the model invocation
17
+ #
18
+ # Returns Riffer::Messages::Assistant - the generated assistant message.
15
19
  def generate_text(prompt: nil, system: nil, messages: nil, model: nil, **options)
16
20
  validate_input!(prompt: prompt, system: system, messages: messages)
17
21
  normalized_messages = normalize_messages(prompt: prompt, system: system, messages: messages)
@@ -21,12 +25,13 @@ class Riffer::Providers::Base
21
25
 
22
26
  # Streams text from the provider.
23
27
  #
24
- # @param prompt [String, nil] the user prompt (required when `messages` is not provided)
25
- # @param system [String, nil] an optional system message
26
- # @param messages [Array<Hash, Riffer::Messages::Base>, nil] optional messages array
27
- # @param model [String, nil] optional model string to override the configured model
28
- # @param options [Hash] additional options passed to the model invocation
29
- # @return [Enumerator] an enumerator yielding stream events or chunks (provider-specific)
28
+ # prompt:: String or nil - the user prompt (required when messages is not provided)
29
+ # system:: String or nil - an optional system message
30
+ # messages:: Array or nil - optional messages array
31
+ # model:: String or nil - optional model string to override the configured model
32
+ # options:: Hash - additional options passed to the model invocation
33
+ #
34
+ # Returns Enumerator - an enumerator yielding stream events.
30
35
  def stream_text(prompt: nil, system: nil, messages: nil, model: nil, **options)
31
36
  validate_input!(prompt: prompt, system: system, messages: messages)
32
37
  normalized_messages = normalize_messages(prompt: prompt, system: system, messages: messages)
@@ -1,8 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # OpenAI provider for GPT models.
4
+ #
5
+ # Requires the +openai+ gem to be installed.
3
6
  class Riffer::Providers::OpenAI < Riffer::Providers::Base
4
7
  # Initializes the OpenAI provider.
5
- # @param options [Hash] optional client options. Use `:api_key` to override `Riffer.config.openai.api_key`.
8
+ #
9
+ # options:: Hash - optional client options
10
+ #
11
+ # Use +:api_key+ to override +Riffer.config.openai.api_key+.
6
12
  def initialize(**options)
7
13
  depends_on "openai"
8
14
 
@@ -1,14 +1,19 @@
1
+ # Registry for finding provider classes by identifier.
1
2
  class Riffer::Providers::Repository
3
+ # Mapping of provider identifiers to provider class lambdas.
2
4
  REPO = {
3
- openai: -> { Riffer::Providers::OpenAI },
4
5
  amazon_bedrock: -> { Riffer::Providers::AmazonBedrock },
6
+ anthropic: -> { Riffer::Providers::Anthropic },
7
+ openai: -> { Riffer::Providers::OpenAI },
5
8
  test: -> { Riffer::Providers::Test }
6
9
  }.freeze
7
10
 
8
11
  class << self
9
- # Finds a provider class by identifier
10
- # @param identifier [String, Symbol] the identifier to search for
11
- # @return [Class, nil] the provider class, or nil if not found
12
+ # Finds a provider class by identifier.
13
+ #
14
+ # identifier:: String or Symbol - the identifier to search for
15
+ #
16
+ # Returns Class or nil - the provider class, or nil if not found.
12
17
  def find(identifier)
13
18
  REPO.fetch(identifier.to_sym, nil)&.call
14
19
  end
@@ -1,8 +1,19 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Test provider for mocking LLM responses in tests.
4
+ #
5
+ # No external gems required.
3
6
  class Riffer::Providers::Test < Riffer::Providers::Base
7
+ # Array of recorded method calls for assertions.
8
+ #
9
+ # Returns Array of Hash.
4
10
  attr_reader :calls
5
11
 
12
+ # Initializes the test provider.
13
+ #
14
+ # options:: Hash - optional configuration
15
+ #
16
+ # Use +:responses+ to pre-configure responses.
6
17
  def initialize(**options)
7
18
  @responses = options[:responses] || []
8
19
  @current_index = 0
@@ -10,14 +21,19 @@ class Riffer::Providers::Test < Riffer::Providers::Base
10
21
  @stubbed_responses = []
11
22
  end
12
23
 
13
- # Stubs the next response from the provider
14
- # Can be called multiple times to queue responses
15
- # @param content [String] the response content
16
- # @param tool_calls [Array<Hash>] optional tool calls to include
17
- # @example
24
+ # Stubs the next response from the provider.
25
+ #
26
+ # Can be called multiple times to queue responses.
27
+ #
28
+ # content:: String - the response content
29
+ # tool_calls:: Array of Hash - optional tool calls to include
30
+ #
31
+ # Returns void.
32
+ #
18
33
  # provider.stub_response("Hello")
19
34
  # provider.stub_response("", tool_calls: [{name: "my_tool", arguments: '{"key":"value"}'}])
20
- # provider.stub_response("Final response") # Queued for after tool execution
35
+ # provider.stub_response("Final response")
36
+ #
21
37
  def stub_response(content, tool_calls: [])
22
38
  formatted_tool_calls = tool_calls.map.with_index do |tc, idx|
23
39
  {
@@ -30,7 +46,9 @@ class Riffer::Providers::Test < Riffer::Providers::Base
30
46
  @stubbed_responses << {role: "assistant", content: content, tool_calls: formatted_tool_calls}
31
47
  end
32
48
 
33
- # Clears all stubbed responses
49
+ # Clears all stubbed responses.
50
+ #
51
+ # Returns void.
34
52
  def clear_stubs
35
53
  @stubbed_responses = []
36
54
  end
@@ -1,4 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Namespace for LLM provider adapters in the Riffer framework.
4
+ #
5
+ # Providers connect Riffer to LLM services:
6
+ # - Riffer::Providers::OpenAI - OpenAI GPT models
7
+ # - Riffer::Providers::AmazonBedrock - AWS Bedrock models
8
+ # - Riffer::Providers::Test - Mock provider for testing
3
9
  module Riffer::Providers
4
10
  end