activeagent 0.5.0rc3 → 0.6.0rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/active_agent/action_prompt/base.rb +33 -17
- data/lib/active_agent/action_prompt/prompt.rb +4 -2
- data/lib/active_agent/base.rb +2 -1
- data/lib/active_agent/configuration.rb +36 -0
- data/lib/active_agent/generation_provider/anthropic_provider.rb +75 -65
- data/lib/active_agent/generation_provider/base.rb +18 -5
- data/lib/active_agent/generation_provider/error_handling.rb +166 -0
- data/lib/active_agent/generation_provider/log_subscriber.rb +92 -0
- data/lib/active_agent/generation_provider/message_formatting.rb +107 -0
- data/lib/active_agent/generation_provider/open_ai_provider.rb +47 -80
- data/lib/active_agent/generation_provider/open_router_provider.rb +330 -2
- data/lib/active_agent/generation_provider/parameter_builder.rb +119 -0
- data/lib/active_agent/generation_provider/response.rb +3 -1
- data/lib/active_agent/generation_provider/stream_processing.rb +58 -0
- data/lib/active_agent/generation_provider/tool_management.rb +142 -0
- data/lib/active_agent/generation_provider.rb +1 -1
- data/lib/active_agent/log_subscriber.rb +6 -6
- data/lib/active_agent/parameterized.rb +6 -5
- data/lib/active_agent/sanitizers.rb +40 -0
- data/lib/active_agent/version.rb +1 -1
- data/lib/active_agent.rb +9 -6
- data/lib/generators/erb/agent_generator.rb +3 -0
- data/lib/generators/erb/templates/instructions.text.erb.tt +1 -0
- metadata +38 -29
@@ -0,0 +1,92 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "active_support/log_subscriber"
|
4
|
+
|
5
|
+
module ActiveAgent
|
6
|
+
module GenerationProvider
|
7
|
+
# = Generation Provider \\LogSubscriber
|
8
|
+
#
|
9
|
+
# Implements the ActiveSupport::LogSubscriber for logging notifications when
|
10
|
+
# generation providers make API calls and handle responses.
|
11
|
+
class LogSubscriber < ActiveSupport::LogSubscriber
|
12
|
+
# A generation request was made
|
13
|
+
def generate(event)
|
14
|
+
info do
|
15
|
+
provider = event.payload[:provider]
|
16
|
+
model = event.payload[:model]
|
17
|
+
|
18
|
+
if exception = event.payload[:exception_object]
|
19
|
+
"Failed generation with #{provider} model=#{model} error_class=#{exception.class} error_message=#{exception.message.inspect}"
|
20
|
+
else
|
21
|
+
"Generated response with #{provider} model=#{model} (#{event.duration.round(1)}ms)"
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
debug { event.payload[:prompt] } if event.payload[:prompt]
|
26
|
+
end
|
27
|
+
subscribe_log_level :generate, :debug
|
28
|
+
|
29
|
+
# Streaming chunk received
|
30
|
+
def stream_chunk(event)
|
31
|
+
debug do
|
32
|
+
provider = event.payload[:provider]
|
33
|
+
chunk_size = event.payload[:chunk_size]
|
34
|
+
"#{provider}: received stream chunk (#{chunk_size} bytes)"
|
35
|
+
end
|
36
|
+
end
|
37
|
+
subscribe_log_level :stream_chunk, :debug
|
38
|
+
|
39
|
+
# Tool/function call executed
|
40
|
+
def tool_call(event)
|
41
|
+
info do
|
42
|
+
tool_name = event.payload[:tool_name]
|
43
|
+
tool_id = event.payload[:tool_id]
|
44
|
+
|
45
|
+
if exception = event.payload[:exception_object]
|
46
|
+
"Failed tool call #{tool_name} id=#{tool_id} error=#{exception.class}"
|
47
|
+
else
|
48
|
+
"Executed tool call #{tool_name} id=#{tool_id} (#{event.duration.round(1)}ms)"
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
subscribe_log_level :tool_call, :debug
|
53
|
+
|
54
|
+
# Retry attempt
|
55
|
+
def retry(event)
|
56
|
+
warn do
|
57
|
+
provider = event.payload[:provider]
|
58
|
+
attempt = event.payload[:attempt]
|
59
|
+
max_attempts = event.payload[:max_attempts]
|
60
|
+
error_class = event.payload[:error_class]
|
61
|
+
|
62
|
+
"#{provider}: Retry attempt #{attempt}/#{max_attempts} after #{error_class}"
|
63
|
+
end
|
64
|
+
end
|
65
|
+
subscribe_log_level :retry, :warn
|
66
|
+
|
67
|
+
# Error occurred
|
68
|
+
def error(event)
|
69
|
+
error do
|
70
|
+
provider = event.payload[:provider]
|
71
|
+
error_class = event.payload[:error_class]
|
72
|
+
error_message = event.payload[:error_message]
|
73
|
+
|
74
|
+
"#{provider}: Error #{error_class} - #{error_message}"
|
75
|
+
end
|
76
|
+
end
|
77
|
+
subscribe_log_level :error, :error
|
78
|
+
|
79
|
+
# Use the logger configured for ActiveAgent::Base if available
|
80
|
+
def logger
|
81
|
+
if defined?(ActiveAgent::Base) && ActiveAgent::Base.respond_to?(:logger)
|
82
|
+
ActiveAgent::Base.logger
|
83
|
+
else
|
84
|
+
super
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
# Attach to active_agent.generation_provider namespace
|
92
|
+
ActiveAgent::GenerationProvider::LogSubscriber.attach_to :"active_agent.generation_provider"
|
@@ -0,0 +1,107 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module ActiveAgent
|
4
|
+
module GenerationProvider
|
5
|
+
module MessageFormatting
|
6
|
+
extend ActiveSupport::Concern
|
7
|
+
|
8
|
+
def provider_messages(messages)
|
9
|
+
messages.map do |message|
|
10
|
+
format_message(message)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
protected
|
15
|
+
|
16
|
+
def format_message(message)
|
17
|
+
base_message = {
|
18
|
+
role: convert_role(message.role),
|
19
|
+
content: format_content(message)
|
20
|
+
}
|
21
|
+
|
22
|
+
add_tool_fields(base_message, message)
|
23
|
+
add_metadata_fields(base_message, message)
|
24
|
+
|
25
|
+
base_message.compact
|
26
|
+
end
|
27
|
+
|
28
|
+
def convert_role(role)
|
29
|
+
# Default role conversion - override in provider for specific mappings
|
30
|
+
role.to_s
|
31
|
+
end
|
32
|
+
|
33
|
+
def format_content(message)
|
34
|
+
# Handle multimodal content
|
35
|
+
case message.content_type
|
36
|
+
when "image_url"
|
37
|
+
format_image_content(message)
|
38
|
+
when "multipart/mixed", "array"
|
39
|
+
format_multimodal_content(message)
|
40
|
+
else
|
41
|
+
message.content
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def format_image_content(message)
|
46
|
+
# Default implementation - override in provider
|
47
|
+
message.content
|
48
|
+
end
|
49
|
+
|
50
|
+
def format_multimodal_content(message)
|
51
|
+
# Default implementation for multimodal content
|
52
|
+
if message.content.is_a?(Array)
|
53
|
+
message.content.map do |item|
|
54
|
+
format_content_item(item)
|
55
|
+
end
|
56
|
+
else
|
57
|
+
message.content
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def format_content_item(item)
|
62
|
+
# Format individual content items in multimodal messages
|
63
|
+
# Override in provider for specific formatting
|
64
|
+
item
|
65
|
+
end
|
66
|
+
|
67
|
+
def add_tool_fields(base_message, message)
|
68
|
+
# Add tool-specific fields based on role
|
69
|
+
case message.role.to_s
|
70
|
+
when "assistant"
|
71
|
+
if message.action_requested && message.requested_actions.any?
|
72
|
+
base_message[:tool_calls] = format_tool_calls(message.requested_actions)
|
73
|
+
elsif message.raw_actions.present? && message.raw_actions.is_a?(Array)
|
74
|
+
base_message[:tool_calls] = message.raw_actions
|
75
|
+
end
|
76
|
+
when "tool"
|
77
|
+
base_message[:tool_call_id] = message.action_id if message.action_id
|
78
|
+
base_message[:name] = message.action_name if message.action_name
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
def add_metadata_fields(base_message, message)
|
83
|
+
# Override to add provider-specific metadata
|
84
|
+
# For example: message IDs, timestamps, etc.
|
85
|
+
end
|
86
|
+
|
87
|
+
def format_tool_calls(actions)
|
88
|
+
# Default implementation - override in provider for specific format
|
89
|
+
actions.map do |action|
|
90
|
+
format_single_tool_call(action)
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def format_single_tool_call(action)
|
95
|
+
# Default tool call format (OpenAI style)
|
96
|
+
{
|
97
|
+
type: "function",
|
98
|
+
function: {
|
99
|
+
name: action.name,
|
100
|
+
arguments: action.params.is_a?(String) ? action.params : action.params.to_json
|
101
|
+
},
|
102
|
+
id: action.id
|
103
|
+
}
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
@@ -1,5 +1,5 @@
|
|
1
1
|
begin
|
2
|
-
gem "ruby-openai", "~> 8.
|
2
|
+
gem "ruby-openai", "~> 8.2.0"
|
3
3
|
require "openai"
|
4
4
|
rescue LoadError
|
5
5
|
raise LoadError, "The 'ruby-openai' gem is required for OpenAIProvider. Please add it to your Gemfile and run `bundle install`."
|
@@ -9,10 +9,16 @@ require "active_agent/action_prompt/action"
|
|
9
9
|
require_relative "base"
|
10
10
|
require_relative "response"
|
11
11
|
require_relative "responses_adapter"
|
12
|
+
require_relative "stream_processing"
|
13
|
+
require_relative "message_formatting"
|
14
|
+
require_relative "tool_management"
|
12
15
|
|
13
16
|
module ActiveAgent
|
14
17
|
module GenerationProvider
|
15
18
|
class OpenAIProvider < Base
|
19
|
+
include StreamProcessing
|
20
|
+
include MessageFormatting
|
21
|
+
include ToolManagement
|
16
22
|
def initialize(config)
|
17
23
|
super
|
18
24
|
@host = config["host"] || nil
|
@@ -27,84 +33,58 @@ module ActiveAgent
|
|
27
33
|
def generate(prompt)
|
28
34
|
@prompt = prompt
|
29
35
|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
36
|
+
with_error_handling do
|
37
|
+
if @prompt.multimodal? || @prompt.content_type == "multipart/mixed"
|
38
|
+
responses_prompt(parameters: responses_parameters)
|
39
|
+
else
|
40
|
+
chat_prompt(parameters: prompt_parameters)
|
41
|
+
end
|
34
42
|
end
|
35
|
-
rescue => e
|
36
|
-
error_message = e.respond_to?(:message) ? e.message : e.to_s
|
37
|
-
raise GenerationProviderError, error_message
|
38
43
|
end
|
39
44
|
|
40
45
|
def embed(prompt)
|
41
46
|
@prompt = prompt
|
42
47
|
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
raise GenerationProviderError, error_message
|
48
|
+
with_error_handling do
|
49
|
+
embeddings_prompt(parameters: embeddings_parameters)
|
50
|
+
end
|
47
51
|
end
|
48
52
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
yield message, new_content if block_given?
|
65
|
-
end
|
66
|
-
elsif chunk.dig("choices", 0, "delta", "tool_calls") && chunk.dig("choices", 0, "delta", "role")
|
67
|
-
message = handle_message(chunk.dig("choices", 0, "delta"))
|
68
|
-
prompt.messages << message
|
69
|
-
@response = ActiveAgent::GenerationProvider::Response.new(prompt:, message:)
|
70
|
-
end
|
53
|
+
protected
|
54
|
+
|
55
|
+
# Override from StreamProcessing module
|
56
|
+
def process_stream_chunk(chunk, message, agent_stream)
|
57
|
+
new_content = chunk.dig("choices", 0, "delta", "content")
|
58
|
+
if new_content && !new_content.blank?
|
59
|
+
message.generation_id = chunk.dig("id")
|
60
|
+
message.content += new_content
|
61
|
+
# Call agent_stream directly without the block to avoid double execution
|
62
|
+
agent_stream&.call(message, new_content, false, prompt.action_name)
|
63
|
+
elsif chunk.dig("choices", 0, "delta", "tool_calls") && chunk.dig("choices", 0, "delta", "role")
|
64
|
+
message = handle_message(chunk.dig("choices", 0, "delta"))
|
65
|
+
prompt.messages << message
|
66
|
+
@response = ActiveAgent::GenerationProvider::Response.new(prompt:, message:)
|
67
|
+
end
|
71
68
|
|
72
|
-
|
73
|
-
|
74
|
-
end
|
69
|
+
if chunk.dig("choices", 0, "finish_reason")
|
70
|
+
finalize_stream(message, agent_stream)
|
75
71
|
end
|
76
72
|
end
|
77
73
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
tools: tools.presence
|
85
|
-
}.compact
|
74
|
+
# Override from MessageFormatting module to handle OpenAI image format
|
75
|
+
def format_image_content(message)
|
76
|
+
[ {
|
77
|
+
type: "image_url",
|
78
|
+
image_url: { url: message.content }
|
79
|
+
} ]
|
86
80
|
end
|
87
81
|
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
tool_calls: message.raw_actions.present? ? message.raw_actions[:tool_calls] : (message.requested_actions.map { |action| { type: "function", name: action.name, arguments: action.params.to_json } } if message.action_requested),
|
95
|
-
generation_id: message.generation_id,
|
96
|
-
content: message.content,
|
97
|
-
type: message.content_type,
|
98
|
-
charset: message.charset
|
99
|
-
}.compact
|
100
|
-
|
101
|
-
if message.content_type == "image_url" || message.content[0..4] == "data:"
|
102
|
-
provider_message[:type] = "image_url"
|
103
|
-
provider_message[:image_url] = { url: message.content }
|
104
|
-
end
|
105
|
-
provider_message
|
106
|
-
end
|
107
|
-
end
|
82
|
+
private
|
83
|
+
|
84
|
+
# Now using modules, but we can override build_provider_parameters for OpenAI-specific needs
|
85
|
+
# The prompt_parameters method comes from ParameterBuilder module
|
86
|
+
# The format_tools method comes from ToolManagement module
|
87
|
+
# The provider_messages method comes from MessageFormatting module
|
108
88
|
|
109
89
|
def chat_response(response)
|
110
90
|
return @response if prompt.options[:stream]
|
@@ -118,7 +98,7 @@ module ActiveAgent
|
|
118
98
|
end
|
119
99
|
|
120
100
|
def responses_response(response)
|
121
|
-
message_json = response
|
101
|
+
message_json = response["output"].find { |output_item| output_item["type"] == "message" }
|
122
102
|
message_json["id"] = response.dig("id") if message_json["id"].blank?
|
123
103
|
|
124
104
|
message = ActiveAgent::ActionPrompt::Message.new(
|
@@ -144,20 +124,7 @@ module ActiveAgent
|
|
144
124
|
)
|
145
125
|
end
|
146
126
|
|
147
|
-
|
148
|
-
return [] if tool_calls.nil? || tool_calls.empty?
|
149
|
-
|
150
|
-
tool_calls.map do |tool_call|
|
151
|
-
next if tool_call["function"].nil? || tool_call["function"]["name"].blank?
|
152
|
-
args = tool_call["function"]["arguments"].blank? ? nil : JSON.parse(tool_call["function"]["arguments"], { symbolize_names: true })
|
153
|
-
|
154
|
-
ActiveAgent::ActionPrompt::Action.new(
|
155
|
-
id: tool_call["id"],
|
156
|
-
name: tool_call.dig("function", "name"),
|
157
|
-
params: args
|
158
|
-
)
|
159
|
-
end.compact
|
160
|
-
end
|
127
|
+
# handle_actions is now provided by ToolManagement module
|
161
128
|
|
162
129
|
def chat_prompt(parameters: prompt_parameters)
|
163
130
|
parameters[:stream] = provider_stream if prompt.options[:stream] || config["stream"]
|