riffer 0.6.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.release-please-manifest.json +1 -1
- data/AGENTS.md +315 -0
- data/CHANGELOG.md +7 -0
- data/README.md +82 -1
- data/lib/riffer/agent.rb +135 -26
- data/lib/riffer/messages/tool.rb +14 -3
- data/lib/riffer/providers/amazon_bedrock.rb +120 -9
- data/lib/riffer/providers/base.rb +8 -8
- data/lib/riffer/providers/open_ai.rb +112 -38
- data/lib/riffer/providers/test.rb +60 -24
- data/lib/riffer/stream_events/tool_call_delta.rb +28 -0
- data/lib/riffer/stream_events/tool_call_done.rb +30 -0
- data/lib/riffer/tool.rb +88 -0
- data/lib/riffer/tools/param.rb +65 -0
- data/lib/riffer/tools/params.rb +112 -0
- data/lib/riffer/tools.rb +4 -0
- data/lib/riffer/version.rb +1 -1
- data/lib/riffer.rb +4 -0
- metadata +8 -2
- data/CLAUDE.md +0 -73
data/lib/riffer/messages/tool.rb
CHANGED
|
@@ -1,12 +1,18 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
class Riffer::Messages::Tool < Riffer::Messages::Base
|
|
4
|
-
attr_reader :tool_call_id, :name
|
|
4
|
+
attr_reader :tool_call_id, :name, :error, :error_type
|
|
5
5
|
|
|
6
|
-
def initialize(content, tool_call_id:, name:)
|
|
6
|
+
def initialize(content, tool_call_id:, name:, error: nil, error_type: nil)
|
|
7
7
|
super(content)
|
|
8
8
|
@tool_call_id = tool_call_id
|
|
9
9
|
@name = name
|
|
10
|
+
@error = error
|
|
11
|
+
@error_type = error_type
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def error?
|
|
15
|
+
!@error.nil?
|
|
10
16
|
end
|
|
11
17
|
|
|
12
18
|
def role
|
|
@@ -14,6 +20,11 @@ class Riffer::Messages::Tool < Riffer::Messages::Base
|
|
|
14
20
|
end
|
|
15
21
|
|
|
16
22
|
def to_h
|
|
17
|
-
{role: role, content: content, tool_call_id: tool_call_id, name: name}
|
|
23
|
+
hash = {role: role, content: content, tool_call_id: tool_call_id, name: name}
|
|
24
|
+
if error?
|
|
25
|
+
hash[:error] = error
|
|
26
|
+
hash[:error_type] = error_type
|
|
27
|
+
end
|
|
28
|
+
hash
|
|
18
29
|
end
|
|
19
30
|
end
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
3
5
|
class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
|
|
4
6
|
# Initializes the Amazon Bedrock provider.
|
|
5
7
|
#
|
|
@@ -27,37 +29,87 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
|
|
|
27
29
|
|
|
28
30
|
private
|
|
29
31
|
|
|
30
|
-
def perform_generate_text(messages, model:,
|
|
32
|
+
def perform_generate_text(messages, model:, **options)
|
|
31
33
|
partitioned_messages = partition_messages(messages)
|
|
34
|
+
tools = options[:tools]
|
|
32
35
|
|
|
33
36
|
params = {
|
|
34
37
|
model_id: model,
|
|
35
38
|
system: partitioned_messages[:system],
|
|
36
|
-
messages: partitioned_messages[:conversation]
|
|
39
|
+
messages: partitioned_messages[:conversation],
|
|
40
|
+
**options.except(:tools)
|
|
37
41
|
}
|
|
38
42
|
|
|
43
|
+
if tools && !tools.empty?
|
|
44
|
+
params[:tool_config] = {
|
|
45
|
+
tools: tools.map { |t| convert_tool_to_bedrock_format(t) }
|
|
46
|
+
}
|
|
47
|
+
end
|
|
48
|
+
|
|
39
49
|
response = @client.converse(**params)
|
|
40
50
|
extract_assistant_message(response)
|
|
41
51
|
end
|
|
42
52
|
|
|
43
|
-
def perform_stream_text(messages, model:,
|
|
53
|
+
def perform_stream_text(messages, model:, **options)
|
|
44
54
|
Enumerator.new do |yielder|
|
|
45
55
|
partitioned_messages = partition_messages(messages)
|
|
56
|
+
tools = options[:tools]
|
|
46
57
|
|
|
47
58
|
params = {
|
|
48
59
|
model_id: model,
|
|
49
60
|
system: partitioned_messages[:system],
|
|
50
|
-
messages: partitioned_messages[:conversation]
|
|
61
|
+
messages: partitioned_messages[:conversation],
|
|
62
|
+
**options.except(:tools)
|
|
51
63
|
}
|
|
52
64
|
|
|
65
|
+
if tools && !tools.empty?
|
|
66
|
+
params[:tool_config] = {
|
|
67
|
+
tools: tools.map { |t| convert_tool_to_bedrock_format(t) }
|
|
68
|
+
}
|
|
69
|
+
end
|
|
70
|
+
|
|
53
71
|
accumulated_text = ""
|
|
72
|
+
current_tool_use = nil
|
|
54
73
|
|
|
55
74
|
@client.converse_stream(**params) do |stream|
|
|
75
|
+
stream.on_content_block_start_event do |event|
|
|
76
|
+
if event.start&.tool_use
|
|
77
|
+
tool_use = event.start.tool_use
|
|
78
|
+
current_tool_use = {
|
|
79
|
+
id: tool_use.tool_use_id,
|
|
80
|
+
name: tool_use.name,
|
|
81
|
+
arguments: ""
|
|
82
|
+
}
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
56
86
|
stream.on_content_block_delta_event do |event|
|
|
57
87
|
if event.delta&.text
|
|
58
88
|
delta_text = event.delta.text
|
|
59
89
|
accumulated_text += delta_text
|
|
60
90
|
yielder << Riffer::StreamEvents::TextDelta.new(delta_text)
|
|
91
|
+
elsif event.delta&.tool_use
|
|
92
|
+
input_delta = event.delta.tool_use.input
|
|
93
|
+
if current_tool_use && input_delta
|
|
94
|
+
current_tool_use[:arguments] += input_delta
|
|
95
|
+
yielder << Riffer::StreamEvents::ToolCallDelta.new(
|
|
96
|
+
item_id: current_tool_use[:id],
|
|
97
|
+
name: current_tool_use[:name],
|
|
98
|
+
arguments_delta: input_delta
|
|
99
|
+
)
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
stream.on_content_block_stop_event do |_event|
|
|
105
|
+
if current_tool_use
|
|
106
|
+
yielder << Riffer::StreamEvents::ToolCallDone.new(
|
|
107
|
+
item_id: current_tool_use[:id],
|
|
108
|
+
call_id: current_tool_use[:id],
|
|
109
|
+
name: current_tool_use[:name],
|
|
110
|
+
arguments: current_tool_use[:arguments]
|
|
111
|
+
)
|
|
112
|
+
current_tool_use = nil
|
|
61
113
|
end
|
|
62
114
|
end
|
|
63
115
|
|
|
@@ -79,9 +131,17 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
|
|
|
79
131
|
when Riffer::Messages::User
|
|
80
132
|
conversation_messages << {role: "user", content: [{text: message.content}]}
|
|
81
133
|
when Riffer::Messages::Assistant
|
|
82
|
-
conversation_messages <<
|
|
134
|
+
conversation_messages << convert_assistant_to_bedrock_format(message)
|
|
83
135
|
when Riffer::Messages::Tool
|
|
84
|
-
|
|
136
|
+
conversation_messages << {
|
|
137
|
+
role: "user",
|
|
138
|
+
content: [{
|
|
139
|
+
tool_result: {
|
|
140
|
+
tool_use_id: message.tool_call_id,
|
|
141
|
+
content: [{text: message.content}]
|
|
142
|
+
}
|
|
143
|
+
}]
|
|
144
|
+
}
|
|
85
145
|
end
|
|
86
146
|
end
|
|
87
147
|
|
|
@@ -91,6 +151,28 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
|
|
|
91
151
|
}
|
|
92
152
|
end
|
|
93
153
|
|
|
154
|
+
def convert_assistant_to_bedrock_format(message)
|
|
155
|
+
content = []
|
|
156
|
+
content << {text: message.content} if message.content && !message.content.empty?
|
|
157
|
+
|
|
158
|
+
message.tool_calls.each do |tc|
|
|
159
|
+
content << {
|
|
160
|
+
tool_use: {
|
|
161
|
+
tool_use_id: tc[:id] || tc[:call_id],
|
|
162
|
+
name: tc[:name],
|
|
163
|
+
input: parse_tool_arguments(tc[:arguments])
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
{role: "assistant", content: content}
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def parse_tool_arguments(arguments)
|
|
172
|
+
return {} if arguments.nil? || arguments.empty?
|
|
173
|
+
arguments.is_a?(String) ? JSON.parse(arguments) : arguments
|
|
174
|
+
end
|
|
175
|
+
|
|
94
176
|
def extract_assistant_message(response)
|
|
95
177
|
output = response.output
|
|
96
178
|
raise Riffer::Error, "No output returned from Bedrock API" if output.nil? || output.message.nil?
|
|
@@ -98,9 +180,38 @@ class Riffer::Providers::AmazonBedrock < Riffer::Providers::Base
|
|
|
98
180
|
content_blocks = output.message.content
|
|
99
181
|
raise Riffer::Error, "No content returned from Bedrock API" if content_blocks.nil? || content_blocks.empty?
|
|
100
182
|
|
|
101
|
-
|
|
102
|
-
|
|
183
|
+
text_content = ""
|
|
184
|
+
tool_calls = []
|
|
185
|
+
|
|
186
|
+
content_blocks.each do |block|
|
|
187
|
+
if block.respond_to?(:text) && block.text
|
|
188
|
+
text_content = block.text
|
|
189
|
+
elsif block.respond_to?(:tool_use) && block.tool_use
|
|
190
|
+
tool_calls << {
|
|
191
|
+
id: block.tool_use.tool_use_id,
|
|
192
|
+
call_id: block.tool_use.tool_use_id,
|
|
193
|
+
name: block.tool_use.name,
|
|
194
|
+
arguments: block.tool_use.input.to_json
|
|
195
|
+
}
|
|
196
|
+
end
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
if text_content.empty? && tool_calls.empty?
|
|
200
|
+
raise Riffer::Error, "No content returned from Bedrock API"
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
Riffer::Messages::Assistant.new(text_content, tool_calls: tool_calls)
|
|
204
|
+
end
|
|
103
205
|
|
|
104
|
-
|
|
206
|
+
def convert_tool_to_bedrock_format(tool)
|
|
207
|
+
{
|
|
208
|
+
tool_spec: {
|
|
209
|
+
name: tool.name,
|
|
210
|
+
description: tool.description,
|
|
211
|
+
input_schema: {
|
|
212
|
+
json: tool.parameters_schema
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
}
|
|
105
216
|
end
|
|
106
217
|
end
|
|
@@ -10,13 +10,13 @@ class Riffer::Providers::Base
|
|
|
10
10
|
# @param system [String, nil] an optional system message
|
|
11
11
|
# @param messages [Array<Hash, Riffer::Messages::Base>, nil] optional messages array
|
|
12
12
|
# @param model [String, nil] optional model string to override the configured model
|
|
13
|
-
# @param
|
|
13
|
+
# @param options [Hash] additional options passed to the model invocation
|
|
14
14
|
# @return [Riffer::Messages::Assistant] the generated assistant message
|
|
15
|
-
def generate_text(prompt: nil, system: nil, messages: nil, model: nil,
|
|
15
|
+
def generate_text(prompt: nil, system: nil, messages: nil, model: nil, **options)
|
|
16
16
|
validate_input!(prompt: prompt, system: system, messages: messages)
|
|
17
17
|
normalized_messages = normalize_messages(prompt: prompt, system: system, messages: messages)
|
|
18
18
|
validate_normalized_messages!(normalized_messages)
|
|
19
|
-
perform_generate_text(normalized_messages, model: model,
|
|
19
|
+
perform_generate_text(normalized_messages, model: model, **options)
|
|
20
20
|
end
|
|
21
21
|
|
|
22
22
|
# Streams text from the provider.
|
|
@@ -25,22 +25,22 @@ class Riffer::Providers::Base
|
|
|
25
25
|
# @param system [String, nil] an optional system message
|
|
26
26
|
# @param messages [Array<Hash, Riffer::Messages::Base>, nil] optional messages array
|
|
27
27
|
# @param model [String, nil] optional model string to override the configured model
|
|
28
|
-
# @param
|
|
28
|
+
# @param options [Hash] additional options passed to the model invocation
|
|
29
29
|
# @return [Enumerator] an enumerator yielding stream events or chunks (provider-specific)
|
|
30
|
-
def stream_text(prompt: nil, system: nil, messages: nil, model: nil,
|
|
30
|
+
def stream_text(prompt: nil, system: nil, messages: nil, model: nil, **options)
|
|
31
31
|
validate_input!(prompt: prompt, system: system, messages: messages)
|
|
32
32
|
normalized_messages = normalize_messages(prompt: prompt, system: system, messages: messages)
|
|
33
33
|
validate_normalized_messages!(normalized_messages)
|
|
34
|
-
perform_stream_text(normalized_messages, model: model,
|
|
34
|
+
perform_stream_text(normalized_messages, model: model, **options)
|
|
35
35
|
end
|
|
36
36
|
|
|
37
37
|
private
|
|
38
38
|
|
|
39
|
-
def perform_generate_text(messages, model: nil,
|
|
39
|
+
def perform_generate_text(messages, model: nil, **options)
|
|
40
40
|
raise NotImplementedError, "Subclasses must implement #perform_generate_text"
|
|
41
41
|
end
|
|
42
42
|
|
|
43
|
-
def perform_stream_text(messages, model: nil,
|
|
43
|
+
def perform_stream_text(messages, model: nil, **options)
|
|
44
44
|
raise NotImplementedError, "Subclasses must implement #perform_stream_text"
|
|
45
45
|
end
|
|
46
46
|
|
|
@@ -3,83 +3,122 @@
|
|
|
3
3
|
class Riffer::Providers::OpenAI < Riffer::Providers::Base
|
|
4
4
|
# Initializes the OpenAI provider.
|
|
5
5
|
# @param options [Hash] optional client options. Use `:api_key` to override `Riffer.config.openai.api_key`.
|
|
6
|
-
# @raise [Riffer::ArgumentError] if an API key is not provided either via `:api_key` or `Riffer.config.openai.api_key`.
|
|
7
6
|
def initialize(**options)
|
|
8
7
|
depends_on "openai"
|
|
9
8
|
|
|
10
9
|
api_key = options.fetch(:api_key, Riffer.config.openai.api_key)
|
|
11
|
-
raise Riffer::ArgumentError, "OpenAI API key is required. Set it via Riffer.configure or pass :api_key option" if api_key.nil? || api_key.empty?
|
|
12
|
-
|
|
13
10
|
@client = ::OpenAI::Client.new(api_key: api_key, **options.except(:api_key))
|
|
14
11
|
end
|
|
15
12
|
|
|
16
13
|
private
|
|
17
14
|
|
|
18
|
-
def perform_generate_text(messages, model:,
|
|
19
|
-
params = build_request_params(messages, model,
|
|
15
|
+
def perform_generate_text(messages, model:, **options)
|
|
16
|
+
params = build_request_params(messages, model, options)
|
|
20
17
|
response = @client.responses.create(params)
|
|
21
18
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
if output.nil?
|
|
25
|
-
raise Riffer::Error, "No output returned from OpenAI API"
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
content = output.content.find { |c| c.type == :output_text }
|
|
29
|
-
|
|
30
|
-
if content.nil?
|
|
31
|
-
raise Riffer::Error, "No content returned from OpenAI API"
|
|
32
|
-
end
|
|
33
|
-
|
|
34
|
-
if content.type == :refusal
|
|
35
|
-
raise Riffer::Error, "Request was refused: #{content.refusal}"
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
if content.type != :output_text
|
|
39
|
-
raise Riffer::Error, "Unexpected content type: #{content.type}"
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
Riffer::Messages::Assistant.new(content.text)
|
|
19
|
+
extract_assistant_message(response.output)
|
|
43
20
|
end
|
|
44
21
|
|
|
45
|
-
def perform_stream_text(messages, model:,
|
|
22
|
+
def perform_stream_text(messages, model:, **options)
|
|
46
23
|
Enumerator.new do |yielder|
|
|
47
|
-
params = build_request_params(messages, model,
|
|
24
|
+
params = build_request_params(messages, model, options)
|
|
48
25
|
stream = @client.responses.stream(params)
|
|
49
26
|
|
|
50
27
|
process_stream_events(stream, yielder)
|
|
51
28
|
end
|
|
52
29
|
end
|
|
53
30
|
|
|
54
|
-
def build_request_params(messages, model,
|
|
55
|
-
|
|
56
|
-
|
|
31
|
+
def build_request_params(messages, model, options)
|
|
32
|
+
reasoning = options[:reasoning]
|
|
33
|
+
tools = options[:tools]
|
|
34
|
+
|
|
35
|
+
params = {
|
|
36
|
+
input: convert_messages_to_openai_format(messages),
|
|
57
37
|
model: model,
|
|
58
38
|
reasoning: reasoning && {
|
|
59
39
|
effort: reasoning,
|
|
60
40
|
summary: "auto"
|
|
61
|
-
}
|
|
41
|
+
},
|
|
42
|
+
**options.except(:reasoning, :tools)
|
|
62
43
|
}
|
|
44
|
+
|
|
45
|
+
if tools && !tools.empty?
|
|
46
|
+
params[:tools] = tools.map { |t| convert_tool_to_openai_format(t) }
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
params.compact
|
|
63
50
|
end
|
|
64
51
|
|
|
65
|
-
def
|
|
66
|
-
messages.
|
|
52
|
+
def convert_messages_to_openai_format(messages)
|
|
53
|
+
messages.flat_map do |message|
|
|
67
54
|
case message
|
|
68
55
|
when Riffer::Messages::System
|
|
69
56
|
{role: "developer", content: message.content}
|
|
70
57
|
when Riffer::Messages::User
|
|
71
58
|
{role: "user", content: message.content}
|
|
72
59
|
when Riffer::Messages::Assistant
|
|
73
|
-
|
|
60
|
+
convert_assistant_to_openai_format(message)
|
|
74
61
|
when Riffer::Messages::Tool
|
|
75
|
-
|
|
62
|
+
{
|
|
63
|
+
type: "function_call_output",
|
|
64
|
+
call_id: message.tool_call_id,
|
|
65
|
+
output: message.content
|
|
66
|
+
}
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def convert_assistant_to_openai_format(message)
|
|
72
|
+
if message.tool_calls.empty?
|
|
73
|
+
{role: "assistant", content: message.content}
|
|
74
|
+
else
|
|
75
|
+
items = []
|
|
76
|
+
items << {type: "message", role: "assistant", content: message.content} if message.content && !message.content.empty?
|
|
77
|
+
message.tool_calls.each do |tc|
|
|
78
|
+
items << {
|
|
79
|
+
type: "function_call",
|
|
80
|
+
id: tc[:id],
|
|
81
|
+
call_id: tc[:call_id] || tc[:id],
|
|
82
|
+
name: tc[:name],
|
|
83
|
+
arguments: tc[:arguments].is_a?(String) ? tc[:arguments] : tc[:arguments].to_json
|
|
84
|
+
}
|
|
85
|
+
end
|
|
86
|
+
items
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def extract_assistant_message(output_items)
|
|
91
|
+
text_content = ""
|
|
92
|
+
tool_calls = []
|
|
93
|
+
|
|
94
|
+
output_items.each do |item|
|
|
95
|
+
case item.type
|
|
96
|
+
when :message
|
|
97
|
+
text_block = item.content&.find { |c| c.type == :output_text }
|
|
98
|
+
text_content = text_block&.text || "" if text_block
|
|
99
|
+
when :function_call
|
|
100
|
+
tool_calls << {
|
|
101
|
+
id: item.id,
|
|
102
|
+
call_id: item.call_id,
|
|
103
|
+
name: item.name,
|
|
104
|
+
arguments: item.arguments
|
|
105
|
+
}
|
|
76
106
|
end
|
|
77
107
|
end
|
|
108
|
+
|
|
109
|
+
if text_content.empty? && tool_calls.empty?
|
|
110
|
+
raise Riffer::Error, "No output returned from OpenAI API"
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
Riffer::Messages::Assistant.new(text_content, tool_calls: tool_calls)
|
|
78
114
|
end
|
|
79
115
|
|
|
80
116
|
def process_stream_events(stream, yielder)
|
|
117
|
+
tool_info = {}
|
|
118
|
+
|
|
81
119
|
stream.each do |raw_event|
|
|
82
|
-
|
|
120
|
+
track_tool_info(raw_event, tool_info)
|
|
121
|
+
event = convert_event(raw_event, tool_info)
|
|
83
122
|
|
|
84
123
|
next unless event
|
|
85
124
|
|
|
@@ -87,7 +126,17 @@ class Riffer::Providers::OpenAI < Riffer::Providers::Base
|
|
|
87
126
|
end
|
|
88
127
|
end
|
|
89
128
|
|
|
90
|
-
def
|
|
129
|
+
def track_tool_info(event, tool_info)
|
|
130
|
+
return unless event.type == :"response.output_item.added"
|
|
131
|
+
return unless event.item&.type == :function_call
|
|
132
|
+
|
|
133
|
+
tool_info[event.item.id] = {
|
|
134
|
+
name: event.item.name,
|
|
135
|
+
call_id: event.item.call_id
|
|
136
|
+
}
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
def convert_event(event, tool_info = {})
|
|
91
140
|
case event.type
|
|
92
141
|
when :"response.output_text.delta"
|
|
93
142
|
Riffer::StreamEvents::TextDelta.new(event.delta)
|
|
@@ -97,6 +146,31 @@ class Riffer::Providers::OpenAI < Riffer::Providers::Base
|
|
|
97
146
|
Riffer::StreamEvents::ReasoningDelta.new(event.delta)
|
|
98
147
|
when :"response.reasoning_summary_text.done"
|
|
99
148
|
Riffer::StreamEvents::ReasoningDone.new(event.text)
|
|
149
|
+
when :"response.function_call_arguments.delta"
|
|
150
|
+
tracked = tool_info[event.item_id] || {}
|
|
151
|
+
Riffer::StreamEvents::ToolCallDelta.new(
|
|
152
|
+
item_id: event.item_id,
|
|
153
|
+
name: tracked[:name],
|
|
154
|
+
arguments_delta: event.delta
|
|
155
|
+
)
|
|
156
|
+
when :"response.function_call_arguments.done"
|
|
157
|
+
tracked = tool_info[event.item_id] || {}
|
|
158
|
+
Riffer::StreamEvents::ToolCallDone.new(
|
|
159
|
+
item_id: event.item_id,
|
|
160
|
+
call_id: tracked[:call_id] || event.item_id,
|
|
161
|
+
name: tracked[:name],
|
|
162
|
+
arguments: event.arguments
|
|
163
|
+
)
|
|
100
164
|
end
|
|
101
165
|
end
|
|
166
|
+
|
|
167
|
+
def convert_tool_to_openai_format(tool)
|
|
168
|
+
{
|
|
169
|
+
type: "function",
|
|
170
|
+
name: tool.name,
|
|
171
|
+
description: tool.description,
|
|
172
|
+
parameters: tool.parameters_schema,
|
|
173
|
+
strict: true
|
|
174
|
+
}
|
|
175
|
+
end
|
|
102
176
|
end
|
|
@@ -7,21 +7,51 @@ class Riffer::Providers::Test < Riffer::Providers::Base
|
|
|
7
7
|
@responses = options[:responses] || []
|
|
8
8
|
@current_index = 0
|
|
9
9
|
@calls = []
|
|
10
|
-
@
|
|
11
|
-
@stubbed_reasoning = nil
|
|
10
|
+
@stubbed_responses = []
|
|
12
11
|
end
|
|
13
12
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
# Stubs the next response from the provider
|
|
14
|
+
# Can be called multiple times to queue responses
|
|
15
|
+
# @param content [String] the response content
|
|
16
|
+
# @param tool_calls [Array<Hash>] optional tool calls to include
|
|
17
|
+
# @example
|
|
18
|
+
# provider.stub_response("Hello")
|
|
19
|
+
# provider.stub_response("", tool_calls: [{name: "my_tool", arguments: '{"key":"value"}'}])
|
|
20
|
+
# provider.stub_response("Final response") # Queued for after tool execution
|
|
21
|
+
def stub_response(content, tool_calls: [])
|
|
22
|
+
formatted_tool_calls = tool_calls.map.with_index do |tc, idx|
|
|
23
|
+
{
|
|
24
|
+
id: tc[:id] || "test_id_#{idx}",
|
|
25
|
+
call_id: tc[:call_id] || tc[:id] || "test_call_#{idx}",
|
|
26
|
+
name: tc[:name],
|
|
27
|
+
arguments: tc[:arguments].is_a?(String) ? tc[:arguments] : tc[:arguments].to_json
|
|
28
|
+
}
|
|
29
|
+
end
|
|
30
|
+
@stubbed_responses << {role: "assistant", content: content, tool_calls: formatted_tool_calls}
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Clears all stubbed responses
|
|
34
|
+
def clear_stubs
|
|
35
|
+
@stubbed_responses = []
|
|
17
36
|
end
|
|
18
37
|
|
|
19
38
|
private
|
|
20
39
|
|
|
21
|
-
def
|
|
22
|
-
@
|
|
23
|
-
|
|
24
|
-
@current_index
|
|
40
|
+
def next_response
|
|
41
|
+
if @stubbed_responses.any?
|
|
42
|
+
@stubbed_responses.shift
|
|
43
|
+
elsif @responses[@current_index]
|
|
44
|
+
response = @responses[@current_index]
|
|
45
|
+
@current_index += 1
|
|
46
|
+
response
|
|
47
|
+
else
|
|
48
|
+
{role: "assistant", content: "Test response"}
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def perform_generate_text(messages, model: nil, **options)
|
|
53
|
+
@calls << {messages: messages.map(&:to_h), model: model, **options}
|
|
54
|
+
response = next_response
|
|
25
55
|
|
|
26
56
|
if response.is_a?(Hash)
|
|
27
57
|
Riffer::Messages::Assistant.new(response[:content], tool_calls: response[:tool_calls] || [])
|
|
@@ -30,26 +60,32 @@ class Riffer::Providers::Test < Riffer::Providers::Base
|
|
|
30
60
|
end
|
|
31
61
|
end
|
|
32
62
|
|
|
33
|
-
def perform_stream_text(messages, model: nil,
|
|
34
|
-
@calls << {messages: messages.map(&:to_h), model: model,
|
|
35
|
-
response =
|
|
36
|
-
@current_index += 1
|
|
63
|
+
def perform_stream_text(messages, model: nil, **options)
|
|
64
|
+
@calls << {messages: messages.map(&:to_h), model: model, **options}
|
|
65
|
+
response = next_response
|
|
37
66
|
Enumerator.new do |yielder|
|
|
38
|
-
|
|
39
|
-
|
|
67
|
+
full_content = response[:content] || ""
|
|
68
|
+
tool_calls = response[:tool_calls] || []
|
|
40
69
|
|
|
41
|
-
|
|
42
|
-
|
|
70
|
+
unless full_content.empty?
|
|
71
|
+
content_parts = full_content.split(". ").map { |part| part + (part.end_with?(".") ? "" : ".") }
|
|
72
|
+
content_parts.each do |part|
|
|
73
|
+
yielder << Riffer::StreamEvents::TextDelta.new(part + " ")
|
|
43
74
|
end
|
|
44
|
-
|
|
45
|
-
yielder << Riffer::StreamEvents::ReasoningDone.new(@stubbed_reasoning)
|
|
46
75
|
end
|
|
47
76
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
77
|
+
tool_calls.each do |tc|
|
|
78
|
+
yielder << Riffer::StreamEvents::ToolCallDelta.new(
|
|
79
|
+
item_id: tc[:id],
|
|
80
|
+
name: tc[:name],
|
|
81
|
+
arguments_delta: tc[:arguments]
|
|
82
|
+
)
|
|
83
|
+
yielder << Riffer::StreamEvents::ToolCallDone.new(
|
|
84
|
+
item_id: tc[:id],
|
|
85
|
+
call_id: tc[:call_id],
|
|
86
|
+
name: tc[:name],
|
|
87
|
+
arguments: tc[:arguments]
|
|
88
|
+
)
|
|
53
89
|
end
|
|
54
90
|
|
|
55
91
|
yielder << Riffer::StreamEvents::TextDone.new(full_content)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Riffer::StreamEvents::ToolCallDelta represents an incremental tool call chunk during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM is building a tool call, containing partial argument data.
|
|
6
|
+
#
|
|
7
|
+
# @api public
|
|
8
|
+
class Riffer::StreamEvents::ToolCallDelta < Riffer::StreamEvents::Base
|
|
9
|
+
attr_reader :item_id, :name, :arguments_delta
|
|
10
|
+
|
|
11
|
+
# Creates a new tool call delta event
|
|
12
|
+
# @param item_id [String] the tool call item identifier
|
|
13
|
+
# @param name [String, nil] the tool name (may only be present in first delta)
|
|
14
|
+
# @param arguments_delta [String] the incremental arguments JSON fragment
|
|
15
|
+
# @param role [String] the message role (defaults to "assistant")
|
|
16
|
+
def initialize(item_id:, arguments_delta:, name: nil, role: "assistant")
|
|
17
|
+
super(role: role)
|
|
18
|
+
@item_id = item_id
|
|
19
|
+
@name = name
|
|
20
|
+
@arguments_delta = arguments_delta
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Converts the event to a hash
|
|
24
|
+
# @return [Hash] the event data
|
|
25
|
+
def to_h
|
|
26
|
+
{role: @role, item_id: @item_id, name: @name, arguments_delta: @arguments_delta}.compact
|
|
27
|
+
end
|
|
28
|
+
end
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Riffer::StreamEvents::ToolCallDone represents a completed tool call during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM has finished building a tool call with complete arguments.
|
|
6
|
+
#
|
|
7
|
+
# @api public
|
|
8
|
+
class Riffer::StreamEvents::ToolCallDone < Riffer::StreamEvents::Base
|
|
9
|
+
attr_reader :item_id, :call_id, :name, :arguments
|
|
10
|
+
|
|
11
|
+
# Creates a new tool call done event
|
|
12
|
+
# @param item_id [String] the tool call item identifier
|
|
13
|
+
# @param call_id [String] the call identifier for response matching
|
|
14
|
+
# @param name [String] the tool name
|
|
15
|
+
# @param arguments [String] the complete arguments JSON string
|
|
16
|
+
# @param role [String] the message role (defaults to "assistant")
|
|
17
|
+
def initialize(item_id:, call_id:, name:, arguments:, role: "assistant")
|
|
18
|
+
super(role: role)
|
|
19
|
+
@item_id = item_id
|
|
20
|
+
@call_id = call_id
|
|
21
|
+
@name = name
|
|
22
|
+
@arguments = arguments
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Converts the event to a hash
|
|
26
|
+
# @return [Hash] the event data
|
|
27
|
+
def to_h
|
|
28
|
+
{role: @role, item_id: @item_id, call_id: @call_id, name: @name, arguments: @arguments}
|
|
29
|
+
end
|
|
30
|
+
end
|