riffer 0.6.1 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/.agents/architecture.md +113 -0
  3. data/.agents/code-style.md +42 -0
  4. data/.agents/providers.md +46 -0
  5. data/.agents/rdoc.md +51 -0
  6. data/.agents/testing.md +56 -0
  7. data/.release-please-manifest.json +1 -1
  8. data/AGENTS.md +28 -0
  9. data/CHANGELOG.md +17 -0
  10. data/README.md +26 -36
  11. data/Rakefile +1 -1
  12. data/docs/01_OVERVIEW.md +106 -0
  13. data/docs/02_GETTING_STARTED.md +128 -0
  14. data/docs/03_AGENTS.md +226 -0
  15. data/docs/04_TOOLS.md +251 -0
  16. data/docs/05_MESSAGES.md +173 -0
  17. data/docs/06_STREAM_EVENTS.md +191 -0
  18. data/docs/07_CONFIGURATION.md +195 -0
  19. data/docs_providers/01_PROVIDERS.md +168 -0
  20. data/docs_providers/02_AMAZON_BEDROCK.md +196 -0
  21. data/docs_providers/03_ANTHROPIC.md +211 -0
  22. data/docs_providers/04_OPENAI.md +157 -0
  23. data/docs_providers/05_TEST_PROVIDER.md +163 -0
  24. data/docs_providers/06_CUSTOM_PROVIDERS.md +304 -0
  25. data/lib/riffer/agent.rb +220 -57
  26. data/lib/riffer/config.rb +20 -12
  27. data/lib/riffer/core.rb +7 -7
  28. data/lib/riffer/helpers/class_name_converter.rb +6 -3
  29. data/lib/riffer/helpers/dependencies.rb +18 -0
  30. data/lib/riffer/helpers/validations.rb +9 -0
  31. data/lib/riffer/messages/assistant.rb +23 -1
  32. data/lib/riffer/messages/base.rb +15 -0
  33. data/lib/riffer/messages/converter.rb +15 -5
  34. data/lib/riffer/messages/system.rb +8 -1
  35. data/lib/riffer/messages/tool.rb +58 -4
  36. data/lib/riffer/messages/user.rb +8 -1
  37. data/lib/riffer/messages.rb +7 -0
  38. data/lib/riffer/providers/amazon_bedrock.rb +128 -13
  39. data/lib/riffer/providers/anthropic.rb +209 -0
  40. data/lib/riffer/providers/base.rb +23 -18
  41. data/lib/riffer/providers/open_ai.rb +119 -39
  42. data/lib/riffer/providers/repository.rb +9 -4
  43. data/lib/riffer/providers/test.rb +78 -24
  44. data/lib/riffer/providers.rb +6 -0
  45. data/lib/riffer/stream_events/base.rb +13 -1
  46. data/lib/riffer/stream_events/reasoning_delta.rb +15 -1
  47. data/lib/riffer/stream_events/reasoning_done.rb +15 -1
  48. data/lib/riffer/stream_events/text_delta.rb +14 -1
  49. data/lib/riffer/stream_events/text_done.rb +14 -1
  50. data/lib/riffer/stream_events/tool_call_delta.rb +35 -0
  51. data/lib/riffer/stream_events/tool_call_done.rb +40 -0
  52. data/lib/riffer/stream_events.rb +9 -0
  53. data/lib/riffer/tool.rb +120 -0
  54. data/lib/riffer/tools/param.rb +68 -0
  55. data/lib/riffer/tools/params.rb +118 -0
  56. data/lib/riffer/tools.rb +9 -0
  57. data/lib/riffer/version.rb +1 -1
  58. data/lib/riffer.rb +23 -19
  59. metadata +41 -2
  60. data/CLAUDE.md +0 -73
@@ -1,19 +1,73 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Represents a tool execution result in a conversation.
4
+ #
5
+ # msg = Riffer::Messages::Tool.new(
6
+ # "The weather is sunny.",
7
+ # tool_call_id: "call_123",
8
+ # name: "weather_tool"
9
+ # )
10
+ # msg.role # => :tool
11
+ # msg.tool_call_id # => "call_123"
12
+ # msg.error? # => false
13
+ #
3
14
  class Riffer::Messages::Tool < Riffer::Messages::Base
4
- attr_reader :tool_call_id, :name
15
+ # The ID of the tool call this result responds to.
16
+ #
17
+ # Returns String.
18
+ attr_reader :tool_call_id
5
19
 
6
- def initialize(content, tool_call_id:, name:)
20
+ # The name of the tool that was called.
21
+ #
22
+ # Returns String.
23
+ attr_reader :name
24
+
25
+ # The error message if the tool execution failed.
26
+ #
27
+ # Returns String or nil.
28
+ attr_reader :error
29
+
30
+ # The type of error (:unknown_tool, :validation_error, :execution_error, :timeout_error).
31
+ #
32
+ # Returns Symbol or nil.
33
+ attr_reader :error_type
34
+
35
+ # Creates a new tool result message.
36
+ #
37
+ # content:: String - the tool execution result
38
+ # tool_call_id:: String - the ID of the tool call
39
+ # name:: String - the tool name
40
+ # error:: String or nil - optional error message
41
+ # error_type:: Symbol or nil - optional error type
42
+ def initialize(content, tool_call_id:, name:, error: nil, error_type: nil)
7
43
  super(content)
8
44
  @tool_call_id = tool_call_id
9
45
  @name = name
46
+ @error = error
47
+ @error_type = error_type
48
+ end
49
+
50
+ # Returns true if the tool execution resulted in an error.
51
+ #
52
+ # Returns Boolean.
53
+ def error?
54
+ !@error.nil?
10
55
  end
11
56
 
57
+ # Returns :tool.
12
58
  def role
13
- "tool"
59
+ :tool
14
60
  end
15
61
 
62
+ # Converts the message to a hash.
63
+ #
64
+ # Returns Hash with message data including error info if present.
16
65
  def to_h
17
- {role: role, content: content, tool_call_id: tool_call_id, name: name}
66
+ hash = {role: role, content: content, tool_call_id: tool_call_id, name: name}
67
+ if error?
68
+ hash[:error] = error
69
+ hash[:error_type] = error_type
70
+ end
71
+ hash
18
72
  end
19
73
  end
@@ -1,7 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Represents a user message in a conversation.
4
+ #
5
+ # msg = Riffer::Messages::User.new("Hello!")
6
+ # msg.role # => :user
7
+ # msg.content # => "Hello!"
8
+ #
3
9
  class Riffer::Messages::User < Riffer::Messages::Base
10
+ # Returns :user.
4
11
  def role
5
- "user"
12
+ :user
6
13
  end
7
14
  end
@@ -1,4 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Namespace for message types in the Riffer framework.
4
+ #
5
+ # Message objects represent the conversation between users and the assistant:
6
+ # - Riffer::Messages::System - System instructions
7
+ # - Riffer::Messages::User - User input
8
+ # - Riffer::Messages::Assistant - LLM responses
9
+ # - Riffer::Messages::Tool - Tool execution results
3
10
  module Riffer::Messages
4
11
  end
@@ -1,12 +1,18 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "json"
4
+
5
+ # Amazon Bedrock provider for Claude and other foundation models.
6
+ #
7
+ # Requires the +aws-sdk-bedrockruntime+ gem to be installed.
8
+ #
9
+ # See https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/BedrockRuntime/Client.html
3
10
  class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
4
11
  # Initializes the Amazon Bedrock provider.
5
12
  #
6
- # @param options [Hash] options passed to Aws::BedrockRuntime::Client
7
- # @option options [String] :api_token Bearer token for API authentication (requires :region)
8
- # @option options [String] :region AWS region
9
- # @see https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/BedrockRuntime/Client.html
13
+ # api_token:: String or nil - Bearer token for API authentication
14
+ # region:: String or nil - AWS region
15
+ # options:: Hash - additional options passed to Aws::BedrockRuntime::Client
10
16
  def initialize(api_token: nil, region: nil, **options)
11
17
  depends_on "aws-sdk-bedrockruntime"
12
18
 
@@ -27,37 +33,87 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
27
33
 
28
34
  private
29
35
 
30
- def perform_generate_text(messages, model:, reasoning: nil)
36
+ def perform_generate_text(messages, model:, **options)
31
37
  partitioned_messages = partition_messages(messages)
38
+ tools = options[:tools]
32
39
 
33
40
  params = {
34
41
  model_id: model,
35
42
  system: partitioned_messages[:system],
36
- messages: partitioned_messages[:conversation]
43
+ messages: partitioned_messages[:conversation],
44
+ **options.except(:tools)
37
45
  }
38
46
 
47
+ if tools && !tools.empty?
48
+ params[:tool_config] = {
49
+ tools: tools.map { |t| convert_tool_to_bedrock_format(t) }
50
+ }
51
+ end
52
+
39
53
  response = @client.converse(**params)
40
54
  extract_assistant_message(response)
41
55
  end
42
56
 
43
- def perform_stream_text(messages, model:, reasoning: nil)
57
+ def perform_stream_text(messages, model:, **options)
44
58
  Enumerator.new do |yielder|
45
59
  partitioned_messages = partition_messages(messages)
60
+ tools = options[:tools]
46
61
 
47
62
  params = {
48
63
  model_id: model,
49
64
  system: partitioned_messages[:system],
50
- messages: partitioned_messages[:conversation]
65
+ messages: partitioned_messages[:conversation],
66
+ **options.except(:tools)
51
67
  }
52
68
 
69
+ if tools && !tools.empty?
70
+ params[:tool_config] = {
71
+ tools: tools.map { |t| convert_tool_to_bedrock_format(t) }
72
+ }
73
+ end
74
+
53
75
  accumulated_text = ""
76
+ current_tool_use = nil
54
77
 
55
78
  @client.converse_stream(**params) do |stream|
79
+ stream.on_content_block_start_event do |event|
80
+ if event.start&.tool_use
81
+ tool_use = event.start.tool_use
82
+ current_tool_use = {
83
+ id: tool_use.tool_use_id,
84
+ name: tool_use.name,
85
+ arguments: ""
86
+ }
87
+ end
88
+ end
89
+
56
90
  stream.on_content_block_delta_event do |event|
57
91
  if event.delta&.text
58
92
  delta_text = event.delta.text
59
93
  accumulated_text += delta_text
60
94
  yielder << Riffer::StreamEvents::TextDelta.new(delta_text)
95
+ elsif event.delta&.tool_use
96
+ input_delta = event.delta.tool_use.input
97
+ if current_tool_use && input_delta
98
+ current_tool_use[:arguments] += input_delta
99
+ yielder << Riffer::StreamEvents::ToolCallDelta.new(
100
+ item_id: current_tool_use[:id],
101
+ name: current_tool_use[:name],
102
+ arguments_delta: input_delta
103
+ )
104
+ end
105
+ end
106
+ end
107
+
108
+ stream.on_content_block_stop_event do |_event|
109
+ if current_tool_use
110
+ yielder << Riffer::StreamEvents::ToolCallDone.new(
111
+ item_id: current_tool_use[:id],
112
+ call_id: current_tool_use[:id],
113
+ name: current_tool_use[:name],
114
+ arguments: current_tool_use[:arguments]
115
+ )
116
+ current_tool_use = nil
61
117
  end
62
118
  end
63
119
 
@@ -79,9 +135,17 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
79
135
  when Riffer::Messages::User
80
136
  conversation_messages << {role: "user", content: [{text: message.content}]}
81
137
  when Riffer::Messages::Assistant
82
- conversation_messages << {role: "assistant", content: [{text: message.content}]}
138
+ conversation_messages << convert_assistant_to_bedrock_format(message)
83
139
  when Riffer::Messages::Tool
84
- raise NotImplementedError, "Tool messages are not supported by Amazon Bedrock provider yet"
140
+ conversation_messages << {
141
+ role: "user",
142
+ content: [{
143
+ tool_result: {
144
+ tool_use_id: message.tool_call_id,
145
+ content: [{text: message.content}]
146
+ }
147
+ }]
148
+ }
85
149
  end
86
150
  end
87
151
 
@@ -91,6 +155,28 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
91
155
  }
92
156
  end
93
157
 
158
+ def convert_assistant_to_bedrock_format(message)
159
+ content = []
160
+ content << {text: message.content} if message.content && !message.content.empty?
161
+
162
+ message.tool_calls.each do |tc|
163
+ content << {
164
+ tool_use: {
165
+ tool_use_id: tc[:id] || tc[:call_id],
166
+ name: tc[:name],
167
+ input: parse_tool_arguments(tc[:arguments])
168
+ }
169
+ }
170
+ end
171
+
172
+ {role: "assistant", content: content}
173
+ end
174
+
175
+ def parse_tool_arguments(arguments)
176
+ return {} if arguments.nil? || arguments.empty?
177
+ arguments.is_a?(String) ? JSON.parse(arguments) : arguments
178
+ end
179
+
94
180
  def extract_assistant_message(response)
95
181
  output = response.output
96
182
  raise Riffer::Error, "No output returned from Bedrock API" if output.nil? || output.message.nil?
@@ -98,9 +184,38 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
98
184
  content_blocks = output.message.content
99
185
  raise Riffer::Error, "No content returned from Bedrock API" if content_blocks.nil? || content_blocks.empty?
100
186
 
101
- text_block = content_blocks.find { |block| block.respond_to?(:text) && block.text }
102
- raise Riffer::Error, "No text content returned from Bedrock API" if text_block.nil?
187
+ text_content = ""
188
+ tool_calls = []
189
+
190
+ content_blocks.each do |block|
191
+ if block.respond_to?(:text) && block.text
192
+ text_content = block.text
193
+ elsif block.respond_to?(:tool_use) && block.tool_use
194
+ tool_calls << {
195
+ id: block.tool_use.tool_use_id,
196
+ call_id: block.tool_use.tool_use_id,
197
+ name: block.tool_use.name,
198
+ arguments: block.tool_use.input.to_json
199
+ }
200
+ end
201
+ end
202
+
203
+ if text_content.empty? && tool_calls.empty?
204
+ raise Riffer::Error, "No content returned from Bedrock API"
205
+ end
206
+
207
+ Riffer::Messages::Assistant.new(text_content, tool_calls: tool_calls)
208
+ end
103
209
 
104
- Riffer::Messages::Assistant.new(text_block.text)
210
+ def convert_tool_to_bedrock_format(tool)
211
+ {
212
+ tool_spec: {
213
+ name: tool.name,
214
+ description: tool.description,
215
+ input_schema: {
216
+ json: tool.parameters_schema
217
+ }
218
+ }
219
+ }
105
220
  end
106
221
  end
@@ -0,0 +1,209 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ # Anthropic provider for Claude models via the Anthropic API.
6
+ #
7
+ # Requires the +anthropic+ gem to be installed.
8
+ #
9
+ # See https://github.com/anthropics/anthropic-sdk-ruby
10
+ class Riffer::Providers::Anthropic < Riffer::Providers::Base
11
+ # Initializes the Anthropic provider.
12
+ #
13
+ # api_key:: String or nil - Anthropic API key
14
+ # options:: Hash - additional options passed to Anthropic::Client
15
+ def initialize(api_key: nil, **options)
16
+ depends_on "anthropic"
17
+
18
+ api_key ||= Riffer.config.anthropic.api_key
19
+
20
+ @client = Anthropic::Client.new(api_key: api_key, **options)
21
+ end
22
+
23
+ private
24
+
25
+ def perform_generate_text(messages, model:, **options)
26
+ partitioned_messages = partition_messages(messages)
27
+ tools = options[:tools]
28
+
29
+ max_tokens = options.fetch(:max_tokens, 4096)
30
+
31
+ params = {
32
+ model: model,
33
+ messages: partitioned_messages[:conversation],
34
+ max_tokens: max_tokens,
35
+ **options.except(:tools, :max_tokens)
36
+ }
37
+
38
+ params[:system] = partitioned_messages[:system] if partitioned_messages[:system]
39
+
40
+ if tools && !tools.empty?
41
+ params[:tools] = tools.map { |t| convert_tool_to_anthropic_format(t) }
42
+ end
43
+
44
+ response = @client.messages.create(**params)
45
+ extract_assistant_message(response)
46
+ end
47
+
48
+ def perform_stream_text(messages, model:, **options)
49
+ Enumerator.new do |yielder|
50
+ partitioned_messages = partition_messages(messages)
51
+ tools = options[:tools]
52
+
53
+ max_tokens = options.fetch(:max_tokens, 4096)
54
+
55
+ params = {
56
+ model: model,
57
+ messages: partitioned_messages[:conversation],
58
+ max_tokens: max_tokens,
59
+ **options.except(:tools, :max_tokens)
60
+ }
61
+
62
+ params[:system] = partitioned_messages[:system] if partitioned_messages[:system]
63
+
64
+ if tools && !tools.empty?
65
+ params[:tools] = tools.map { |t| convert_tool_to_anthropic_format(t) }
66
+ end
67
+
68
+ accumulated_text = ""
69
+ accumulated_reasoning = ""
70
+ current_tool_use = nil
71
+
72
+ stream = @client.messages.stream(**params)
73
+ stream.each do |event|
74
+ case event
75
+ when Anthropic::Streaming::TextEvent
76
+ accumulated_text += event.text
77
+ yielder << Riffer::StreamEvents::TextDelta.new(event.text)
78
+
79
+ when Anthropic::Streaming::ThinkingEvent
80
+ accumulated_reasoning += event.thinking
81
+ yielder << Riffer::StreamEvents::ReasoningDelta.new(event.thinking)
82
+
83
+ when Anthropic::Streaming::InputJsonEvent
84
+ # Tool call JSON delta - we need to track the tool use block
85
+ if current_tool_use.nil?
86
+ # Find the current tool use block being built
87
+ current_tool_use = {id: nil, name: nil, arguments: ""}
88
+ end
89
+ current_tool_use[:arguments] += event.partial_json
90
+ yielder << Riffer::StreamEvents::ToolCallDelta.new(
91
+ item_id: current_tool_use[:id] || "pending",
92
+ name: current_tool_use[:name],
93
+ arguments_delta: event.partial_json
94
+ )
95
+
96
+ when Anthropic::Streaming::ContentBlockStopEvent
97
+ content_block = event.content_block
98
+ if content_block.respond_to?(:type)
99
+ block_type = content_block.type.to_s
100
+ if block_type == "tool_use"
101
+ # content_block.input is already a JSON string when streaming
102
+ arguments = content_block.input.is_a?(String) ? content_block.input : content_block.input.to_json
103
+ yielder << Riffer::StreamEvents::ToolCallDone.new(
104
+ item_id: content_block.id,
105
+ call_id: content_block.id,
106
+ name: content_block.name,
107
+ arguments: arguments
108
+ )
109
+ current_tool_use = nil
110
+ elsif block_type == "thinking" && !accumulated_reasoning.empty?
111
+ yielder << Riffer::StreamEvents::ReasoningDone.new(accumulated_reasoning)
112
+ end
113
+ end
114
+
115
+ when Anthropic::Streaming::MessageStopEvent
116
+ yielder << Riffer::StreamEvents::TextDone.new(accumulated_text)
117
+ end
118
+ end
119
+ end
120
+ end
121
+
122
+ def partition_messages(messages)
123
+ system_prompts = []
124
+ conversation_messages = []
125
+
126
+ messages.each do |message|
127
+ case message
128
+ when Riffer::Messages::System
129
+ system_prompts << {type: "text", text: message.content}
130
+ when Riffer::Messages::User
131
+ conversation_messages << {role: "user", content: message.content}
132
+ when Riffer::Messages::Assistant
133
+ conversation_messages << convert_assistant_to_anthropic_format(message)
134
+ when Riffer::Messages::Tool
135
+ conversation_messages << {
136
+ role: "user",
137
+ content: [{
138
+ type: "tool_result",
139
+ tool_use_id: message.tool_call_id,
140
+ content: message.content
141
+ }]
142
+ }
143
+ end
144
+ end
145
+
146
+ {
147
+ system: system_prompts.empty? ? nil : system_prompts,
148
+ conversation: conversation_messages
149
+ }
150
+ end
151
+
152
+ def convert_assistant_to_anthropic_format(message)
153
+ content = []
154
+ content << {type: "text", text: message.content} if message.content && !message.content.empty?
155
+
156
+ message.tool_calls.each do |tc|
157
+ content << {
158
+ type: "tool_use",
159
+ id: tc[:id] || tc[:call_id],
160
+ name: tc[:name],
161
+ input: parse_tool_arguments(tc[:arguments])
162
+ }
163
+ end
164
+
165
+ {role: "assistant", content: content}
166
+ end
167
+
168
+ def parse_tool_arguments(arguments)
169
+ return {} if arguments.nil? || arguments.empty?
170
+ arguments.is_a?(String) ? JSON.parse(arguments) : arguments
171
+ end
172
+
173
+ def extract_assistant_message(response)
174
+ content_blocks = response.content
175
+ raise Riffer::Error, "No content returned from Anthropic API" if content_blocks.nil? || content_blocks.empty?
176
+
177
+ text_content = ""
178
+ tool_calls = []
179
+
180
+ content_blocks.each do |block|
181
+ block_type = block.type.to_s
182
+ case block_type
183
+ when "text"
184
+ text_content = block.text
185
+ when "tool_use"
186
+ tool_calls << {
187
+ id: block.id,
188
+ call_id: block.id,
189
+ name: block.name,
190
+ arguments: block.input.to_json
191
+ }
192
+ end
193
+ end
194
+
195
+ if text_content.empty? && tool_calls.empty?
196
+ raise Riffer::Error, "No content returned from Anthropic API"
197
+ end
198
+
199
+ Riffer::Messages::Assistant.new(text_content, tool_calls: tool_calls)
200
+ end
201
+
202
+ def convert_tool_to_anthropic_format(tool)
203
+ {
204
+ name: tool.name,
205
+ description: tool.description,
206
+ input_schema: tool.parameters_schema
207
+ }
208
+ end
209
+ end
@@ -1,46 +1,51 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # Base class for all LLM providers in the Riffer framework.
4
+ #
5
+ # Subclasses must implement +perform_generate_text+ and +perform_stream_text+.
3
6
  class Riffer::Providers::Base
4
7
  include Riffer::Helpers::Dependencies
5
8
  include Riffer::Messages::Converter
6
9
 
7
10
  # Generates text using the provider.
8
11
  #
9
- # @param prompt [String, nil] the user prompt (required when `messages` is not provided)
10
- # @param system [String, nil] an optional system message
11
- # @param messages [Array<Hash, Riffer::Messages::Base>, nil] optional messages array
12
- # @param model [String, nil] optional model string to override the configured model
13
- # @param reasoning [String, nil] optional reasoning level or instructions
14
- # @return [Riffer::Messages::Assistant] the generated assistant message
15
- def generate_text(prompt: nil, system: nil, messages: nil, model: nil, reasoning: nil)
12
+ # prompt:: String or nil - the user prompt (required when messages is not provided)
13
+ # system:: String or nil - an optional system message
14
+ # messages:: Array or nil - optional messages array
15
+ # model:: String or nil - optional model string to override the configured model
16
+ # options:: Hash - additional options passed to the model invocation
17
+ #
18
+ # Returns Riffer::Messages::Assistant - the generated assistant message.
19
+ def generate_text(prompt: nil, system: nil, messages: nil, model: nil, **options)
16
20
  validate_input!(prompt: prompt, system: system, messages: messages)
17
21
  normalized_messages = normalize_messages(prompt: prompt, system: system, messages: messages)
18
22
  validate_normalized_messages!(normalized_messages)
19
- perform_generate_text(normalized_messages, model: model, reasoning: reasoning)
23
+ perform_generate_text(normalized_messages, model: model, **options)
20
24
  end
21
25
 
22
26
  # Streams text from the provider.
23
27
  #
24
- # @param prompt [String, nil] the user prompt (required when `messages` is not provided)
25
- # @param system [String, nil] an optional system message
26
- # @param messages [Array<Hash, Riffer::Messages::Base>, nil] optional messages array
27
- # @param model [String, nil] optional model string to override the configured model
28
- # @param reasoning [String, nil] optional reasoning level or instructions
29
- # @return [Enumerator] an enumerator yielding stream events or chunks (provider-specific)
30
- def stream_text(prompt: nil, system: nil, messages: nil, model: nil, reasoning: nil)
28
+ # prompt:: String or nil - the user prompt (required when messages is not provided)
29
+ # system:: String or nil - an optional system message
30
+ # messages:: Array or nil - optional messages array
31
+ # model:: String or nil - optional model string to override the configured model
32
+ # options:: Hash - additional options passed to the model invocation
33
+ #
34
+ # Returns Enumerator - an enumerator yielding stream events.
35
+ def stream_text(prompt: nil, system: nil, messages: nil, model: nil, **options)
31
36
  validate_input!(prompt: prompt, system: system, messages: messages)
32
37
  normalized_messages = normalize_messages(prompt: prompt, system: system, messages: messages)
33
38
  validate_normalized_messages!(normalized_messages)
34
- perform_stream_text(normalized_messages, model: model, reasoning: reasoning)
39
+ perform_stream_text(normalized_messages, model: model, **options)
35
40
  end
36
41
 
37
42
  private
38
43
 
39
- def perform_generate_text(messages, model: nil, reasoning: nil)
44
+ def perform_generate_text(messages, model: nil, **options)
40
45
  raise NotImplementedError, "Subclasses must implement #perform_generate_text"
41
46
  end
42
47
 
43
- def perform_stream_text(messages, model: nil, reasoning: nil)
48
+ def perform_stream_text(messages, model: nil, **options)
44
49
  raise NotImplementedError, "Subclasses must implement #perform_stream_text"
45
50
  end
46
51