riffer 0.6.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.agents/architecture.md +113 -0
- data/.agents/code-style.md +42 -0
- data/.agents/providers.md +46 -0
- data/.agents/rdoc.md +51 -0
- data/.agents/testing.md +56 -0
- data/.release-please-manifest.json +1 -1
- data/AGENTS.md +28 -0
- data/CHANGELOG.md +17 -0
- data/README.md +26 -36
- data/Rakefile +1 -1
- data/docs/01_OVERVIEW.md +106 -0
- data/docs/02_GETTING_STARTED.md +128 -0
- data/docs/03_AGENTS.md +226 -0
- data/docs/04_TOOLS.md +251 -0
- data/docs/05_MESSAGES.md +173 -0
- data/docs/06_STREAM_EVENTS.md +191 -0
- data/docs/07_CONFIGURATION.md +195 -0
- data/docs_providers/01_PROVIDERS.md +168 -0
- data/docs_providers/02_AMAZON_BEDROCK.md +196 -0
- data/docs_providers/03_ANTHROPIC.md +211 -0
- data/docs_providers/04_OPENAI.md +157 -0
- data/docs_providers/05_TEST_PROVIDER.md +163 -0
- data/docs_providers/06_CUSTOM_PROVIDERS.md +304 -0
- data/lib/riffer/agent.rb +220 -57
- data/lib/riffer/config.rb +20 -12
- data/lib/riffer/core.rb +7 -7
- data/lib/riffer/helpers/class_name_converter.rb +6 -3
- data/lib/riffer/helpers/dependencies.rb +18 -0
- data/lib/riffer/helpers/validations.rb +9 -0
- data/lib/riffer/messages/assistant.rb +23 -1
- data/lib/riffer/messages/base.rb +15 -0
- data/lib/riffer/messages/converter.rb +15 -5
- data/lib/riffer/messages/system.rb +8 -1
- data/lib/riffer/messages/tool.rb +58 -4
- data/lib/riffer/messages/user.rb +8 -1
- data/lib/riffer/messages.rb +7 -0
- data/lib/riffer/providers/amazon_bedrock.rb +128 -13
- data/lib/riffer/providers/anthropic.rb +209 -0
- data/lib/riffer/providers/base.rb +23 -18
- data/lib/riffer/providers/open_ai.rb +119 -39
- data/lib/riffer/providers/repository.rb +9 -4
- data/lib/riffer/providers/test.rb +78 -24
- data/lib/riffer/providers.rb +6 -0
- data/lib/riffer/stream_events/base.rb +13 -1
- data/lib/riffer/stream_events/reasoning_delta.rb +15 -1
- data/lib/riffer/stream_events/reasoning_done.rb +15 -1
- data/lib/riffer/stream_events/text_delta.rb +14 -1
- data/lib/riffer/stream_events/text_done.rb +14 -1
- data/lib/riffer/stream_events/tool_call_delta.rb +35 -0
- data/lib/riffer/stream_events/tool_call_done.rb +40 -0
- data/lib/riffer/stream_events.rb +9 -0
- data/lib/riffer/tool.rb +120 -0
- data/lib/riffer/tools/param.rb +68 -0
- data/lib/riffer/tools/params.rb +118 -0
- data/lib/riffer/tools.rb +9 -0
- data/lib/riffer/version.rb +1 -1
- data/lib/riffer.rb +23 -19
- metadata +41 -2
- data/CLAUDE.md +0 -73
|
@@ -1,85 +1,130 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# OpenAI provider for GPT models.
|
|
4
|
+
#
|
|
5
|
+
# Requires the +openai+ gem to be installed.
|
|
3
6
|
class Riffer::Providers::OpenAI < Riffer::Providers::Base
|
|
4
7
|
# Initializes the OpenAI provider.
|
|
5
|
-
#
|
|
6
|
-
#
|
|
8
|
+
#
|
|
9
|
+
# options:: Hash - optional client options
|
|
10
|
+
#
|
|
11
|
+
# Use +:api_key+ to override +Riffer.config.openai.api_key+.
|
|
7
12
|
def initialize(**options)
|
|
8
13
|
depends_on "openai"
|
|
9
14
|
|
|
10
15
|
api_key = options.fetch(:api_key, Riffer.config.openai.api_key)
|
|
11
|
-
raise Riffer::ArgumentError, "OpenAI API key is required. Set it via Riffer.configure or pass :api_key option" if api_key.nil? || api_key.empty?
|
|
12
|
-
|
|
13
16
|
@client = ::OpenAI::Client.new(api_key: api_key, **options.except(:api_key))
|
|
14
17
|
end
|
|
15
18
|
|
|
16
19
|
private
|
|
17
20
|
|
|
18
|
-
def perform_generate_text(messages, model:,
|
|
19
|
-
params = build_request_params(messages, model,
|
|
21
|
+
def perform_generate_text(messages, model:, **options)
|
|
22
|
+
params = build_request_params(messages, model, options)
|
|
20
23
|
response = @client.responses.create(params)
|
|
21
24
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
if output.nil?
|
|
25
|
-
raise Riffer::Error, "No output returned from OpenAI API"
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
content = output.content.find { |c| c.type == :output_text }
|
|
29
|
-
|
|
30
|
-
if content.nil?
|
|
31
|
-
raise Riffer::Error, "No content returned from OpenAI API"
|
|
32
|
-
end
|
|
33
|
-
|
|
34
|
-
if content.type == :refusal
|
|
35
|
-
raise Riffer::Error, "Request was refused: #{content.refusal}"
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
if content.type != :output_text
|
|
39
|
-
raise Riffer::Error, "Unexpected content type: #{content.type}"
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
Riffer::Messages::Assistant.new(content.text)
|
|
25
|
+
extract_assistant_message(response.output)
|
|
43
26
|
end
|
|
44
27
|
|
|
45
|
-
def perform_stream_text(messages, model:,
|
|
28
|
+
def perform_stream_text(messages, model:, **options)
|
|
46
29
|
Enumerator.new do |yielder|
|
|
47
|
-
params = build_request_params(messages, model,
|
|
30
|
+
params = build_request_params(messages, model, options)
|
|
48
31
|
stream = @client.responses.stream(params)
|
|
49
32
|
|
|
50
33
|
process_stream_events(stream, yielder)
|
|
51
34
|
end
|
|
52
35
|
end
|
|
53
36
|
|
|
54
|
-
def build_request_params(messages, model,
|
|
55
|
-
|
|
56
|
-
|
|
37
|
+
def build_request_params(messages, model, options)
|
|
38
|
+
reasoning = options[:reasoning]
|
|
39
|
+
tools = options[:tools]
|
|
40
|
+
|
|
41
|
+
params = {
|
|
42
|
+
input: convert_messages_to_openai_format(messages),
|
|
57
43
|
model: model,
|
|
58
44
|
reasoning: reasoning && {
|
|
59
45
|
effort: reasoning,
|
|
60
46
|
summary: "auto"
|
|
61
|
-
}
|
|
47
|
+
},
|
|
48
|
+
**options.except(:reasoning, :tools)
|
|
62
49
|
}
|
|
50
|
+
|
|
51
|
+
if tools && !tools.empty?
|
|
52
|
+
params[:tools] = tools.map { |t| convert_tool_to_openai_format(t) }
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
params.compact
|
|
63
56
|
end
|
|
64
57
|
|
|
65
|
-
def
|
|
66
|
-
messages.
|
|
58
|
+
def convert_messages_to_openai_format(messages)
|
|
59
|
+
messages.flat_map do |message|
|
|
67
60
|
case message
|
|
68
61
|
when Riffer::Messages::System
|
|
69
62
|
{role: "developer", content: message.content}
|
|
70
63
|
when Riffer::Messages::User
|
|
71
64
|
{role: "user", content: message.content}
|
|
72
65
|
when Riffer::Messages::Assistant
|
|
73
|
-
|
|
66
|
+
convert_assistant_to_openai_format(message)
|
|
74
67
|
when Riffer::Messages::Tool
|
|
75
|
-
|
|
68
|
+
{
|
|
69
|
+
type: "function_call_output",
|
|
70
|
+
call_id: message.tool_call_id,
|
|
71
|
+
output: message.content
|
|
72
|
+
}
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def convert_assistant_to_openai_format(message)
|
|
78
|
+
if message.tool_calls.empty?
|
|
79
|
+
{role: "assistant", content: message.content}
|
|
80
|
+
else
|
|
81
|
+
items = []
|
|
82
|
+
items << {type: "message", role: "assistant", content: message.content} if message.content && !message.content.empty?
|
|
83
|
+
message.tool_calls.each do |tc|
|
|
84
|
+
items << {
|
|
85
|
+
type: "function_call",
|
|
86
|
+
id: tc[:id],
|
|
87
|
+
call_id: tc[:call_id] || tc[:id],
|
|
88
|
+
name: tc[:name],
|
|
89
|
+
arguments: tc[:arguments].is_a?(String) ? tc[:arguments] : tc[:arguments].to_json
|
|
90
|
+
}
|
|
91
|
+
end
|
|
92
|
+
items
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def extract_assistant_message(output_items)
|
|
97
|
+
text_content = ""
|
|
98
|
+
tool_calls = []
|
|
99
|
+
|
|
100
|
+
output_items.each do |item|
|
|
101
|
+
case item.type
|
|
102
|
+
when :message
|
|
103
|
+
text_block = item.content&.find { |c| c.type == :output_text }
|
|
104
|
+
text_content = text_block&.text || "" if text_block
|
|
105
|
+
when :function_call
|
|
106
|
+
tool_calls << {
|
|
107
|
+
id: item.id,
|
|
108
|
+
call_id: item.call_id,
|
|
109
|
+
name: item.name,
|
|
110
|
+
arguments: item.arguments
|
|
111
|
+
}
|
|
76
112
|
end
|
|
77
113
|
end
|
|
114
|
+
|
|
115
|
+
if text_content.empty? && tool_calls.empty?
|
|
116
|
+
raise Riffer::Error, "No output returned from OpenAI API"
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
Riffer::Messages::Assistant.new(text_content, tool_calls: tool_calls)
|
|
78
120
|
end
|
|
79
121
|
|
|
80
122
|
def process_stream_events(stream, yielder)
|
|
123
|
+
tool_info = {}
|
|
124
|
+
|
|
81
125
|
stream.each do |raw_event|
|
|
82
|
-
|
|
126
|
+
track_tool_info(raw_event, tool_info)
|
|
127
|
+
event = convert_event(raw_event, tool_info)
|
|
83
128
|
|
|
84
129
|
next unless event
|
|
85
130
|
|
|
@@ -87,7 +132,17 @@ class Riffer::Providers::OpenAI < Riffer::Providers::Base
|
|
|
87
132
|
end
|
|
88
133
|
end
|
|
89
134
|
|
|
90
|
-
def
|
|
135
|
+
def track_tool_info(event, tool_info)
|
|
136
|
+
return unless event.type == :"response.output_item.added"
|
|
137
|
+
return unless event.item&.type == :function_call
|
|
138
|
+
|
|
139
|
+
tool_info[event.item.id] = {
|
|
140
|
+
name: event.item.name,
|
|
141
|
+
call_id: event.item.call_id
|
|
142
|
+
}
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def convert_event(event, tool_info = {})
|
|
91
146
|
case event.type
|
|
92
147
|
when :"response.output_text.delta"
|
|
93
148
|
Riffer::StreamEvents::TextDelta.new(event.delta)
|
|
@@ -97,6 +152,31 @@ class Riffer::Providers::OpenAI < Riffer::Providers::Base
|
|
|
97
152
|
Riffer::StreamEvents::ReasoningDelta.new(event.delta)
|
|
98
153
|
when :"response.reasoning_summary_text.done"
|
|
99
154
|
Riffer::StreamEvents::ReasoningDone.new(event.text)
|
|
155
|
+
when :"response.function_call_arguments.delta"
|
|
156
|
+
tracked = tool_info[event.item_id] || {}
|
|
157
|
+
Riffer::StreamEvents::ToolCallDelta.new(
|
|
158
|
+
item_id: event.item_id,
|
|
159
|
+
name: tracked[:name],
|
|
160
|
+
arguments_delta: event.delta
|
|
161
|
+
)
|
|
162
|
+
when :"response.function_call_arguments.done"
|
|
163
|
+
tracked = tool_info[event.item_id] || {}
|
|
164
|
+
Riffer::StreamEvents::ToolCallDone.new(
|
|
165
|
+
item_id: event.item_id,
|
|
166
|
+
call_id: tracked[:call_id] || event.item_id,
|
|
167
|
+
name: tracked[:name],
|
|
168
|
+
arguments: event.arguments
|
|
169
|
+
)
|
|
100
170
|
end
|
|
101
171
|
end
|
|
172
|
+
|
|
173
|
+
def convert_tool_to_openai_format(tool)
|
|
174
|
+
{
|
|
175
|
+
type: "function",
|
|
176
|
+
name: tool.name,
|
|
177
|
+
description: tool.description,
|
|
178
|
+
parameters: tool.parameters_schema,
|
|
179
|
+
strict: true
|
|
180
|
+
}
|
|
181
|
+
end
|
|
102
182
|
end
|
|
@@ -1,14 +1,19 @@
|
|
|
1
|
+
# Registry for finding provider classes by identifier.
|
|
1
2
|
class Riffer::Providers::Repository
|
|
3
|
+
# Mapping of provider identifiers to provider class lambdas.
|
|
2
4
|
REPO = {
|
|
3
|
-
openai: -> { Riffer::Providers::OpenAI },
|
|
4
5
|
amazon_bedrock: -> { Riffer::Providers::AmazonBedrock },
|
|
6
|
+
anthropic: -> { Riffer::Providers::Anthropic },
|
|
7
|
+
openai: -> { Riffer::Providers::OpenAI },
|
|
5
8
|
test: -> { Riffer::Providers::Test }
|
|
6
9
|
}.freeze
|
|
7
10
|
|
|
8
11
|
class << self
|
|
9
|
-
# Finds a provider class by identifier
|
|
10
|
-
#
|
|
11
|
-
#
|
|
12
|
+
# Finds a provider class by identifier.
|
|
13
|
+
#
|
|
14
|
+
# identifier:: String or Symbol - the identifier to search for
|
|
15
|
+
#
|
|
16
|
+
# Returns Class or nil - the provider class, or nil if not found.
|
|
12
17
|
def find(identifier)
|
|
13
18
|
REPO.fetch(identifier.to_sym, nil)&.call
|
|
14
19
|
end
|
|
@@ -1,27 +1,75 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Test provider for mocking LLM responses in tests.
|
|
4
|
+
#
|
|
5
|
+
# No external gems required.
|
|
3
6
|
class Riffer::Providers::Test < Riffer::Providers::Base
|
|
7
|
+
# Array of recorded method calls for assertions.
|
|
8
|
+
#
|
|
9
|
+
# Returns Array of Hash.
|
|
4
10
|
attr_reader :calls
|
|
5
11
|
|
|
12
|
+
# Initializes the test provider.
|
|
13
|
+
#
|
|
14
|
+
# options:: Hash - optional configuration
|
|
15
|
+
#
|
|
16
|
+
# Use +:responses+ to pre-configure responses.
|
|
6
17
|
def initialize(**options)
|
|
7
18
|
@responses = options[:responses] || []
|
|
8
19
|
@current_index = 0
|
|
9
20
|
@calls = []
|
|
10
|
-
@
|
|
11
|
-
@stubbed_reasoning = nil
|
|
21
|
+
@stubbed_responses = []
|
|
12
22
|
end
|
|
13
23
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
24
|
+
# Stubs the next response from the provider.
|
|
25
|
+
#
|
|
26
|
+
# Can be called multiple times to queue responses.
|
|
27
|
+
#
|
|
28
|
+
# content:: String - the response content
|
|
29
|
+
# tool_calls:: Array of Hash - optional tool calls to include
|
|
30
|
+
#
|
|
31
|
+
# Returns void.
|
|
32
|
+
#
|
|
33
|
+
# provider.stub_response("Hello")
|
|
34
|
+
# provider.stub_response("", tool_calls: [{name: "my_tool", arguments: '{"key":"value"}'}])
|
|
35
|
+
# provider.stub_response("Final response")
|
|
36
|
+
#
|
|
37
|
+
def stub_response(content, tool_calls: [])
|
|
38
|
+
formatted_tool_calls = tool_calls.map.with_index do |tc, idx|
|
|
39
|
+
{
|
|
40
|
+
id: tc[:id] || "test_id_#{idx}",
|
|
41
|
+
call_id: tc[:call_id] || tc[:id] || "test_call_#{idx}",
|
|
42
|
+
name: tc[:name],
|
|
43
|
+
arguments: tc[:arguments].is_a?(String) ? tc[:arguments] : tc[:arguments].to_json
|
|
44
|
+
}
|
|
45
|
+
end
|
|
46
|
+
@stubbed_responses << {role: "assistant", content: content, tool_calls: formatted_tool_calls}
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# Clears all stubbed responses.
|
|
50
|
+
#
|
|
51
|
+
# Returns void.
|
|
52
|
+
def clear_stubs
|
|
53
|
+
@stubbed_responses = []
|
|
17
54
|
end
|
|
18
55
|
|
|
19
56
|
private
|
|
20
57
|
|
|
21
|
-
def
|
|
22
|
-
@
|
|
23
|
-
|
|
24
|
-
@current_index
|
|
58
|
+
def next_response
|
|
59
|
+
if @stubbed_responses.any?
|
|
60
|
+
@stubbed_responses.shift
|
|
61
|
+
elsif @responses[@current_index]
|
|
62
|
+
response = @responses[@current_index]
|
|
63
|
+
@current_index += 1
|
|
64
|
+
response
|
|
65
|
+
else
|
|
66
|
+
{role: "assistant", content: "Test response"}
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def perform_generate_text(messages, model: nil, **options)
|
|
71
|
+
@calls << {messages: messages.map(&:to_h), model: model, **options}
|
|
72
|
+
response = next_response
|
|
25
73
|
|
|
26
74
|
if response.is_a?(Hash)
|
|
27
75
|
Riffer::Messages::Assistant.new(response[:content], tool_calls: response[:tool_calls] || [])
|
|
@@ -30,26 +78,32 @@ class Riffer::Providers::Test < Riffer::Providers::Base
|
|
|
30
78
|
end
|
|
31
79
|
end
|
|
32
80
|
|
|
33
|
-
def perform_stream_text(messages, model: nil,
|
|
34
|
-
@calls << {messages: messages.map(&:to_h), model: model,
|
|
35
|
-
response =
|
|
36
|
-
@current_index += 1
|
|
81
|
+
def perform_stream_text(messages, model: nil, **options)
|
|
82
|
+
@calls << {messages: messages.map(&:to_h), model: model, **options}
|
|
83
|
+
response = next_response
|
|
37
84
|
Enumerator.new do |yielder|
|
|
38
|
-
|
|
39
|
-
|
|
85
|
+
full_content = response[:content] || ""
|
|
86
|
+
tool_calls = response[:tool_calls] || []
|
|
40
87
|
|
|
41
|
-
|
|
42
|
-
|
|
88
|
+
unless full_content.empty?
|
|
89
|
+
content_parts = full_content.split(". ").map { |part| part + (part.end_with?(".") ? "" : ".") }
|
|
90
|
+
content_parts.each do |part|
|
|
91
|
+
yielder << Riffer::StreamEvents::TextDelta.new(part + " ")
|
|
43
92
|
end
|
|
44
|
-
|
|
45
|
-
yielder << Riffer::StreamEvents::ReasoningDone.new(@stubbed_reasoning)
|
|
46
93
|
end
|
|
47
94
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
95
|
+
tool_calls.each do |tc|
|
|
96
|
+
yielder << Riffer::StreamEvents::ToolCallDelta.new(
|
|
97
|
+
item_id: tc[:id],
|
|
98
|
+
name: tc[:name],
|
|
99
|
+
arguments_delta: tc[:arguments]
|
|
100
|
+
)
|
|
101
|
+
yielder << Riffer::StreamEvents::ToolCallDone.new(
|
|
102
|
+
item_id: tc[:id],
|
|
103
|
+
call_id: tc[:call_id],
|
|
104
|
+
name: tc[:name],
|
|
105
|
+
arguments: tc[:arguments]
|
|
106
|
+
)
|
|
53
107
|
end
|
|
54
108
|
|
|
55
109
|
yielder << Riffer::StreamEvents::TextDone.new(full_content)
|
data/lib/riffer/providers.rb
CHANGED
|
@@ -1,4 +1,10 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Namespace for LLM provider adapters in the Riffer framework.
|
|
4
|
+
#
|
|
5
|
+
# Providers connect Riffer to LLM services:
|
|
6
|
+
# - Riffer::Providers::OpenAI - OpenAI GPT models
|
|
7
|
+
# - Riffer::Providers::AmazonBedrock - AWS Bedrock models
|
|
8
|
+
# - Riffer::Providers::Test - Mock provider for testing
|
|
3
9
|
module Riffer::Providers
|
|
4
10
|
end
|
|
@@ -1,12 +1,24 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Base class for all streaming events in the Riffer framework.
|
|
4
|
+
#
|
|
5
|
+
# Subclasses must implement the +to_h+ method.
|
|
3
6
|
class Riffer::StreamEvents::Base
|
|
7
|
+
# The message role (typically :assistant).
|
|
8
|
+
#
|
|
9
|
+
# Returns Symbol.
|
|
4
10
|
attr_reader :role
|
|
5
11
|
|
|
6
|
-
|
|
12
|
+
# Creates a new stream event.
|
|
13
|
+
#
|
|
14
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
15
|
+
def initialize(role: :assistant)
|
|
7
16
|
@role = role
|
|
8
17
|
end
|
|
9
18
|
|
|
19
|
+
# Converts the event to a hash.
|
|
20
|
+
#
|
|
21
|
+
# Raises NotImplementedError if not implemented by subclass.
|
|
10
22
|
def to_h
|
|
11
23
|
raise NotImplementedError, "Subclasses must implement #to_h"
|
|
12
24
|
end
|
|
@@ -1,13 +1,27 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Represents an incremental reasoning chunk during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM produces reasoning/thinking content incrementally.
|
|
6
|
+
# Only available with providers that support reasoning (e.g., OpenAI with reasoning option).
|
|
3
7
|
class Riffer::StreamEvents::ReasoningDelta < Riffer::StreamEvents::Base
|
|
8
|
+
# The incremental reasoning content.
|
|
9
|
+
#
|
|
10
|
+
# Returns String.
|
|
4
11
|
attr_reader :content
|
|
5
12
|
|
|
6
|
-
|
|
13
|
+
# Creates a new reasoning delta event.
|
|
14
|
+
#
|
|
15
|
+
# content:: String - the incremental reasoning content
|
|
16
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
17
|
+
def initialize(content, role: :assistant)
|
|
7
18
|
super(role: role)
|
|
8
19
|
@content = content
|
|
9
20
|
end
|
|
10
21
|
|
|
22
|
+
# Converts the event to a hash.
|
|
23
|
+
#
|
|
24
|
+
# Returns Hash with +:role+ and +:content+ keys.
|
|
11
25
|
def to_h
|
|
12
26
|
{role: @role, content: @content}
|
|
13
27
|
end
|
|
@@ -1,13 +1,27 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Represents completion of reasoning during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM has finished producing reasoning/thinking content.
|
|
6
|
+
# Only available with providers that support reasoning (e.g., OpenAI with reasoning option).
|
|
3
7
|
class Riffer::StreamEvents::ReasoningDone < Riffer::StreamEvents::Base
|
|
8
|
+
# The complete reasoning content.
|
|
9
|
+
#
|
|
10
|
+
# Returns String.
|
|
4
11
|
attr_reader :content
|
|
5
12
|
|
|
6
|
-
|
|
13
|
+
# Creates a new reasoning done event.
|
|
14
|
+
#
|
|
15
|
+
# content:: String - the complete reasoning content
|
|
16
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
17
|
+
def initialize(content, role: :assistant)
|
|
7
18
|
super(role: role)
|
|
8
19
|
@content = content
|
|
9
20
|
end
|
|
10
21
|
|
|
22
|
+
# Converts the event to a hash.
|
|
23
|
+
#
|
|
24
|
+
# Returns Hash with +:role+ and +:content+ keys.
|
|
11
25
|
def to_h
|
|
12
26
|
{role: @role, content: @content}
|
|
13
27
|
end
|
|
@@ -1,13 +1,26 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Represents an incremental text chunk during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM produces text content incrementally.
|
|
3
6
|
class Riffer::StreamEvents::TextDelta < Riffer::StreamEvents::Base
|
|
7
|
+
# The incremental text content.
|
|
8
|
+
#
|
|
9
|
+
# Returns String.
|
|
4
10
|
attr_reader :content
|
|
5
11
|
|
|
6
|
-
|
|
12
|
+
# Creates a new text delta event.
|
|
13
|
+
#
|
|
14
|
+
# content:: String - the incremental text content
|
|
15
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
16
|
+
def initialize(content, role: :assistant)
|
|
7
17
|
super(role: role)
|
|
8
18
|
@content = content
|
|
9
19
|
end
|
|
10
20
|
|
|
21
|
+
# Converts the event to a hash.
|
|
22
|
+
#
|
|
23
|
+
# Returns Hash with +:role+ and +:content+ keys.
|
|
11
24
|
def to_h
|
|
12
25
|
{role: @role, content: @content}
|
|
13
26
|
end
|
|
@@ -1,13 +1,26 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Represents completion of text generation during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM has finished producing text content.
|
|
3
6
|
class Riffer::StreamEvents::TextDone < Riffer::StreamEvents::Base
|
|
7
|
+
# The complete text content.
|
|
8
|
+
#
|
|
9
|
+
# Returns String.
|
|
4
10
|
attr_reader :content
|
|
5
11
|
|
|
6
|
-
|
|
12
|
+
# Creates a new text done event.
|
|
13
|
+
#
|
|
14
|
+
# content:: String - the complete text content
|
|
15
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
16
|
+
def initialize(content, role: :assistant)
|
|
7
17
|
super(role: role)
|
|
8
18
|
@content = content
|
|
9
19
|
end
|
|
10
20
|
|
|
21
|
+
# Converts the event to a hash.
|
|
22
|
+
#
|
|
23
|
+
# Returns Hash with +:role+ and +:content+ keys.
|
|
11
24
|
def to_h
|
|
12
25
|
{role: @role, content: @content}
|
|
13
26
|
end
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Riffer::StreamEvents::ToolCallDelta represents an incremental tool call chunk during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM is building a tool call, containing partial argument data.
|
|
6
|
+
class Riffer::StreamEvents::ToolCallDelta < Riffer::StreamEvents::Base
|
|
7
|
+
# The tool call item identifier.
|
|
8
|
+
attr_reader :item_id
|
|
9
|
+
|
|
10
|
+
# The tool name (may only be present in first delta).
|
|
11
|
+
attr_reader :name
|
|
12
|
+
|
|
13
|
+
# The incremental arguments JSON fragment.
|
|
14
|
+
attr_reader :arguments_delta
|
|
15
|
+
|
|
16
|
+
# Creates a new tool call delta event.
|
|
17
|
+
#
|
|
18
|
+
# item_id:: String - the tool call item identifier
|
|
19
|
+
# name:: String or nil - the tool name (may only be present in first delta)
|
|
20
|
+
# arguments_delta:: String - the incremental arguments JSON fragment
|
|
21
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
22
|
+
def initialize(item_id:, arguments_delta:, name: nil, role: :assistant)
|
|
23
|
+
super(role: role)
|
|
24
|
+
@item_id = item_id
|
|
25
|
+
@name = name
|
|
26
|
+
@arguments_delta = arguments_delta
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
# Converts the event to a hash.
|
|
30
|
+
#
|
|
31
|
+
# Returns Hash - the event data.
|
|
32
|
+
def to_h
|
|
33
|
+
{role: @role, item_id: @item_id, name: @name, arguments_delta: @arguments_delta}.compact
|
|
34
|
+
end
|
|
35
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# Riffer::StreamEvents::ToolCallDone represents a completed tool call during streaming.
|
|
4
|
+
#
|
|
5
|
+
# Emitted when the LLM has finished building a tool call with complete arguments.
|
|
6
|
+
class Riffer::StreamEvents::ToolCallDone < Riffer::StreamEvents::Base
|
|
7
|
+
# The tool call item identifier.
|
|
8
|
+
attr_reader :item_id
|
|
9
|
+
|
|
10
|
+
# The call identifier for response matching.
|
|
11
|
+
attr_reader :call_id
|
|
12
|
+
|
|
13
|
+
# The tool name.
|
|
14
|
+
attr_reader :name
|
|
15
|
+
|
|
16
|
+
# The complete arguments JSON string.
|
|
17
|
+
attr_reader :arguments
|
|
18
|
+
|
|
19
|
+
# Creates a new tool call done event.
|
|
20
|
+
#
|
|
21
|
+
# item_id:: String - the tool call item identifier
|
|
22
|
+
# call_id:: String - the call identifier for response matching
|
|
23
|
+
# name:: String - the tool name
|
|
24
|
+
# arguments:: String - the complete arguments JSON string
|
|
25
|
+
# role:: Symbol - the message role (defaults to :assistant)
|
|
26
|
+
def initialize(item_id:, call_id:, name:, arguments:, role: :assistant)
|
|
27
|
+
super(role: role)
|
|
28
|
+
@item_id = item_id
|
|
29
|
+
@call_id = call_id
|
|
30
|
+
@name = name
|
|
31
|
+
@arguments = arguments
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Converts the event to a hash.
|
|
35
|
+
#
|
|
36
|
+
# Returns Hash - the event data.
|
|
37
|
+
def to_h
|
|
38
|
+
{role: @role, item_id: @item_id, call_id: @call_id, name: @name, arguments: @arguments}
|
|
39
|
+
end
|
|
40
|
+
end
|
data/lib/riffer/stream_events.rb
CHANGED
|
@@ -1,4 +1,13 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
# Namespace for streaming event types in the Riffer framework.
|
|
4
|
+
#
|
|
5
|
+
# When streaming responses, these events are yielded to represent incremental updates:
|
|
6
|
+
# - Riffer::StreamEvents::TextDelta - Incremental text content
|
|
7
|
+
# - Riffer::StreamEvents::TextDone - Complete text content
|
|
8
|
+
# - Riffer::StreamEvents::ToolCallDelta - Incremental tool call arguments
|
|
9
|
+
# - Riffer::StreamEvents::ToolCallDone - Complete tool call
|
|
10
|
+
# - Riffer::StreamEvents::ReasoningDelta - Incremental reasoning content
|
|
11
|
+
# - Riffer::StreamEvents::ReasoningDone - Complete reasoning content
|
|
3
12
|
module Riffer::StreamEvents
|
|
4
13
|
end
|