robot_lab 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.envrc +1 -0
- data/.github/workflows/deploy-github-pages.yml +52 -0
- data/.github/workflows/deploy-yard-docs.yml +52 -0
- data/CHANGELOG.md +55 -0
- data/COMMITS.md +196 -0
- data/LICENSE.txt +21 -0
- data/README.md +332 -0
- data/Rakefile +67 -0
- data/docs/api/adapters/anthropic.md +121 -0
- data/docs/api/adapters/gemini.md +133 -0
- data/docs/api/adapters/index.md +104 -0
- data/docs/api/adapters/openai.md +134 -0
- data/docs/api/core/index.md +113 -0
- data/docs/api/core/memory.md +314 -0
- data/docs/api/core/network.md +291 -0
- data/docs/api/core/robot.md +273 -0
- data/docs/api/core/state.md +273 -0
- data/docs/api/core/tool.md +353 -0
- data/docs/api/history/active-record-adapter.md +195 -0
- data/docs/api/history/config.md +191 -0
- data/docs/api/history/index.md +132 -0
- data/docs/api/history/thread-manager.md +144 -0
- data/docs/api/index.md +82 -0
- data/docs/api/mcp/client.md +221 -0
- data/docs/api/mcp/index.md +111 -0
- data/docs/api/mcp/server.md +225 -0
- data/docs/api/mcp/transports.md +264 -0
- data/docs/api/messages/index.md +67 -0
- data/docs/api/messages/text-message.md +102 -0
- data/docs/api/messages/tool-call-message.md +144 -0
- data/docs/api/messages/tool-result-message.md +154 -0
- data/docs/api/messages/user-message.md +171 -0
- data/docs/api/streaming/context.md +174 -0
- data/docs/api/streaming/events.md +237 -0
- data/docs/api/streaming/index.md +108 -0
- data/docs/architecture/core-concepts.md +243 -0
- data/docs/architecture/index.md +138 -0
- data/docs/architecture/message-flow.md +320 -0
- data/docs/architecture/network-orchestration.md +216 -0
- data/docs/architecture/robot-execution.md +243 -0
- data/docs/architecture/state-management.md +323 -0
- data/docs/assets/css/custom.css +56 -0
- data/docs/assets/images/robot_lab.jpg +0 -0
- data/docs/concepts.md +216 -0
- data/docs/examples/basic-chat.md +193 -0
- data/docs/examples/index.md +129 -0
- data/docs/examples/mcp-server.md +290 -0
- data/docs/examples/multi-robot-network.md +312 -0
- data/docs/examples/rails-application.md +420 -0
- data/docs/examples/tool-usage.md +310 -0
- data/docs/getting-started/configuration.md +230 -0
- data/docs/getting-started/index.md +56 -0
- data/docs/getting-started/installation.md +179 -0
- data/docs/getting-started/quick-start.md +203 -0
- data/docs/guides/building-robots.md +376 -0
- data/docs/guides/creating-networks.md +366 -0
- data/docs/guides/history.md +359 -0
- data/docs/guides/index.md +68 -0
- data/docs/guides/mcp-integration.md +356 -0
- data/docs/guides/memory.md +309 -0
- data/docs/guides/rails-integration.md +432 -0
- data/docs/guides/streaming.md +314 -0
- data/docs/guides/using-tools.md +394 -0
- data/docs/index.md +160 -0
- data/examples/01_simple_robot.rb +38 -0
- data/examples/02_tools.rb +106 -0
- data/examples/03_network.rb +103 -0
- data/examples/04_mcp.rb +219 -0
- data/examples/05_streaming.rb +124 -0
- data/examples/06_prompt_templates.rb +324 -0
- data/examples/07_network_memory.rb +329 -0
- data/examples/prompts/assistant/system.txt.erb +2 -0
- data/examples/prompts/assistant/user.txt.erb +1 -0
- data/examples/prompts/billing/system.txt.erb +7 -0
- data/examples/prompts/billing/user.txt.erb +1 -0
- data/examples/prompts/classifier/system.txt.erb +4 -0
- data/examples/prompts/classifier/user.txt.erb +1 -0
- data/examples/prompts/entity_extractor/system.txt.erb +11 -0
- data/examples/prompts/entity_extractor/user.txt.erb +3 -0
- data/examples/prompts/escalation/system.txt.erb +35 -0
- data/examples/prompts/escalation/user.txt.erb +34 -0
- data/examples/prompts/general/system.txt.erb +4 -0
- data/examples/prompts/general/user.txt.erb +1 -0
- data/examples/prompts/github_assistant/system.txt.erb +6 -0
- data/examples/prompts/github_assistant/user.txt.erb +1 -0
- data/examples/prompts/helper/system.txt.erb +1 -0
- data/examples/prompts/helper/user.txt.erb +1 -0
- data/examples/prompts/keyword_extractor/system.txt.erb +8 -0
- data/examples/prompts/keyword_extractor/user.txt.erb +3 -0
- data/examples/prompts/order_support/system.txt.erb +27 -0
- data/examples/prompts/order_support/user.txt.erb +22 -0
- data/examples/prompts/product_support/system.txt.erb +30 -0
- data/examples/prompts/product_support/user.txt.erb +32 -0
- data/examples/prompts/sentiment_analyzer/system.txt.erb +9 -0
- data/examples/prompts/sentiment_analyzer/user.txt.erb +3 -0
- data/examples/prompts/synthesizer/system.txt.erb +14 -0
- data/examples/prompts/synthesizer/user.txt.erb +15 -0
- data/examples/prompts/technical/system.txt.erb +7 -0
- data/examples/prompts/technical/user.txt.erb +1 -0
- data/examples/prompts/triage/system.txt.erb +16 -0
- data/examples/prompts/triage/user.txt.erb +17 -0
- data/lib/generators/robot_lab/install_generator.rb +78 -0
- data/lib/generators/robot_lab/robot_generator.rb +55 -0
- data/lib/generators/robot_lab/templates/initializer.rb.tt +41 -0
- data/lib/generators/robot_lab/templates/migration.rb.tt +32 -0
- data/lib/generators/robot_lab/templates/result_model.rb.tt +52 -0
- data/lib/generators/robot_lab/templates/robot.rb.tt +46 -0
- data/lib/generators/robot_lab/templates/robot_test.rb.tt +32 -0
- data/lib/generators/robot_lab/templates/routing_robot.rb.tt +53 -0
- data/lib/generators/robot_lab/templates/thread_model.rb.tt +40 -0
- data/lib/robot_lab/adapters/anthropic.rb +163 -0
- data/lib/robot_lab/adapters/base.rb +85 -0
- data/lib/robot_lab/adapters/gemini.rb +193 -0
- data/lib/robot_lab/adapters/openai.rb +159 -0
- data/lib/robot_lab/adapters/registry.rb +81 -0
- data/lib/robot_lab/configuration.rb +143 -0
- data/lib/robot_lab/error.rb +32 -0
- data/lib/robot_lab/errors.rb +70 -0
- data/lib/robot_lab/history/active_record_adapter.rb +146 -0
- data/lib/robot_lab/history/config.rb +115 -0
- data/lib/robot_lab/history/thread_manager.rb +93 -0
- data/lib/robot_lab/mcp/client.rb +210 -0
- data/lib/robot_lab/mcp/server.rb +84 -0
- data/lib/robot_lab/mcp/transports/base.rb +56 -0
- data/lib/robot_lab/mcp/transports/sse.rb +117 -0
- data/lib/robot_lab/mcp/transports/stdio.rb +133 -0
- data/lib/robot_lab/mcp/transports/streamable_http.rb +139 -0
- data/lib/robot_lab/mcp/transports/websocket.rb +108 -0
- data/lib/robot_lab/memory.rb +882 -0
- data/lib/robot_lab/memory_change.rb +123 -0
- data/lib/robot_lab/message.rb +357 -0
- data/lib/robot_lab/network.rb +350 -0
- data/lib/robot_lab/rails/engine.rb +29 -0
- data/lib/robot_lab/rails/railtie.rb +42 -0
- data/lib/robot_lab/robot.rb +560 -0
- data/lib/robot_lab/robot_result.rb +205 -0
- data/lib/robot_lab/robotic_model.rb +324 -0
- data/lib/robot_lab/state_proxy.rb +188 -0
- data/lib/robot_lab/streaming/context.rb +144 -0
- data/lib/robot_lab/streaming/events.rb +95 -0
- data/lib/robot_lab/streaming/sequence_counter.rb +48 -0
- data/lib/robot_lab/task.rb +117 -0
- data/lib/robot_lab/tool.rb +223 -0
- data/lib/robot_lab/tool_config.rb +112 -0
- data/lib/robot_lab/tool_manifest.rb +234 -0
- data/lib/robot_lab/user_message.rb +118 -0
- data/lib/robot_lab/version.rb +5 -0
- data/lib/robot_lab/waiter.rb +73 -0
- data/lib/robot_lab.rb +195 -0
- data/mkdocs.yml +214 -0
- data/sig/robot_lab.rbs +4 -0
- metadata +442 -0
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RobotLab
|
|
4
|
+
# Stores the result of a single robot execution
|
|
5
|
+
#
|
|
6
|
+
# RobotResult captures the LLM output, tool call results, and metadata
|
|
7
|
+
# from running an robot. Results are stored in State for conversation
|
|
8
|
+
# history and can be serialized for persistence.
|
|
9
|
+
#
|
|
10
|
+
# @example
|
|
11
|
+
# result = RobotResult.new(
|
|
12
|
+
# robot_name: "helper",
|
|
13
|
+
# output: [TextMessage.new(role: :assistant, content: "Hello!")],
|
|
14
|
+
# tool_calls: []
|
|
15
|
+
# )
|
|
16
|
+
# result.checksum # => "a1b2c3d4..."
|
|
17
|
+
#
|
|
18
|
+
class RobotResult
|
|
19
|
+
# @!attribute [r] robot_name
|
|
20
|
+
# @return [String] the name of the robot that produced this result
|
|
21
|
+
# @!attribute [r] output
|
|
22
|
+
# @return [Array<Message>] the output messages from the robot
|
|
23
|
+
# @!attribute [r] tool_calls
|
|
24
|
+
# @return [Array<ToolResultMessage>] the tool call results
|
|
25
|
+
# @!attribute [r] created_at
|
|
26
|
+
# @return [Time] when the result was created
|
|
27
|
+
# @!attribute [r] id
|
|
28
|
+
# @return [String] unique identifier for this result
|
|
29
|
+
# @!attribute [r] stop_reason
|
|
30
|
+
# @return [String, nil] reason execution stopped
|
|
31
|
+
attr_reader :robot_name, :output, :tool_calls, :created_at, :id, :stop_reason
|
|
32
|
+
|
|
33
|
+
# @!attribute [rw] prompt
|
|
34
|
+
# @return [Array<Message>, nil] the prompt messages used (debug)
|
|
35
|
+
# @!attribute [rw] history
|
|
36
|
+
# @return [Array<Message>, nil] the history used (debug)
|
|
37
|
+
# @!attribute [rw] raw
|
|
38
|
+
# @return [Object, nil] the raw LLM response (debug)
|
|
39
|
+
attr_accessor :prompt, :history, :raw
|
|
40
|
+
|
|
41
|
+
# Creates a new RobotResult instance.
|
|
42
|
+
#
|
|
43
|
+
# @param robot_name [String] the name of the robot
|
|
44
|
+
# @param output [Array<Message, Hash>] the output messages
|
|
45
|
+
# @param tool_calls [Array<ToolResultMessage, Hash>] tool call results
|
|
46
|
+
# @param created_at [Time, nil] creation timestamp (defaults to now)
|
|
47
|
+
# @param id [String, nil] unique ID (defaults to UUID)
|
|
48
|
+
# @param prompt [Array<Message>, nil] prompt messages (debug)
|
|
49
|
+
# @param history [Array<Message>, nil] history messages (debug)
|
|
50
|
+
# @param raw [Object, nil] raw LLM response (debug)
|
|
51
|
+
# @param stop_reason [String, nil] reason for stopping
|
|
52
|
+
def initialize(
|
|
53
|
+
robot_name:,
|
|
54
|
+
output:,
|
|
55
|
+
tool_calls: [],
|
|
56
|
+
created_at: nil,
|
|
57
|
+
id: nil,
|
|
58
|
+
prompt: nil,
|
|
59
|
+
history: nil,
|
|
60
|
+
raw: nil,
|
|
61
|
+
stop_reason: nil
|
|
62
|
+
)
|
|
63
|
+
@robot_name = robot_name
|
|
64
|
+
@output = normalize_messages(output)
|
|
65
|
+
@tool_calls = normalize_tool_results(tool_calls)
|
|
66
|
+
@created_at = created_at || Time.now
|
|
67
|
+
@id = id || SecureRandom.uuid
|
|
68
|
+
@prompt = prompt
|
|
69
|
+
@history = history
|
|
70
|
+
@raw = raw
|
|
71
|
+
@stop_reason = stop_reason
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Generate a checksum for deduplication
|
|
75
|
+
#
|
|
76
|
+
# Uses SHA256 hash of output + tool_calls + timestamp
|
|
77
|
+
# Useful for detecting duplicate results in persistence
|
|
78
|
+
#
|
|
79
|
+
# @return [String] Hex digest of the result content
|
|
80
|
+
#
|
|
81
|
+
def checksum
|
|
82
|
+
content = {
|
|
83
|
+
output: output.map(&:to_h),
|
|
84
|
+
tool_calls: tool_calls.map(&:to_h),
|
|
85
|
+
created_at: created_at.to_i
|
|
86
|
+
}
|
|
87
|
+
Digest::SHA256.hexdigest(content.to_json)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Export result for serialization/persistence
|
|
91
|
+
#
|
|
92
|
+
# Excludes debug fields (prompt, history, raw) by default
|
|
93
|
+
#
|
|
94
|
+
# @return [Hash] Serializable result data
|
|
95
|
+
#
|
|
96
|
+
def export
|
|
97
|
+
{
|
|
98
|
+
robot_name: robot_name,
|
|
99
|
+
output: output.map(&:to_h),
|
|
100
|
+
tool_calls: tool_calls.map(&:to_h),
|
|
101
|
+
created_at: created_at.iso8601,
|
|
102
|
+
id: id,
|
|
103
|
+
checksum: checksum,
|
|
104
|
+
stop_reason: stop_reason
|
|
105
|
+
}.compact
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Converts the result to a full hash representation.
|
|
109
|
+
#
|
|
110
|
+
# Includes debug fields (prompt, history, raw).
|
|
111
|
+
#
|
|
112
|
+
# @return [Hash] full result data including debug fields
|
|
113
|
+
def to_h
|
|
114
|
+
export.merge(
|
|
115
|
+
prompt: prompt&.map(&:to_h),
|
|
116
|
+
history: history&.map(&:to_h),
|
|
117
|
+
raw: raw
|
|
118
|
+
).compact
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
# Converts the result to JSON.
|
|
122
|
+
#
|
|
123
|
+
# Uses export format (excludes debug fields).
|
|
124
|
+
#
|
|
125
|
+
# @param args [Array] arguments passed to to_json
|
|
126
|
+
# @return [String] JSON representation
|
|
127
|
+
def to_json(*args)
|
|
128
|
+
export.to_json(*args)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
# Get the last text content from output
|
|
132
|
+
#
|
|
133
|
+
# @return [String, nil] The content of the last text message
|
|
134
|
+
#
|
|
135
|
+
def last_text_content
|
|
136
|
+
output.reverse.find(&:text?)&.content
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Check if result contains tool calls
|
|
140
|
+
#
|
|
141
|
+
# @return [Boolean]
|
|
142
|
+
#
|
|
143
|
+
def has_tool_calls?
|
|
144
|
+
output.any?(&:tool_call?) || tool_calls.any?
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
# Check if execution stopped naturally (not due to tool call)
|
|
148
|
+
#
|
|
149
|
+
# @return [Boolean]
|
|
150
|
+
#
|
|
151
|
+
def stopped?
|
|
152
|
+
last_output = output.last
|
|
153
|
+
last_output&.stopped? || (!has_tool_calls? && last_output&.stop_reason.nil?)
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# Reconstruct result from hash (e.g., from persistence)
|
|
157
|
+
#
|
|
158
|
+
# @param hash [Hash] Serialized result data
|
|
159
|
+
# @return [RobotResult]
|
|
160
|
+
#
|
|
161
|
+
def self.from_hash(hash)
|
|
162
|
+
hash = hash.transform_keys(&:to_sym)
|
|
163
|
+
|
|
164
|
+
new(
|
|
165
|
+
robot_name: hash[:robot_name],
|
|
166
|
+
output: (hash[:output] || []).map { |m| Message.from_hash(m) },
|
|
167
|
+
tool_calls: (hash[:tool_calls] || []).map { |m| Message.from_hash(m) },
|
|
168
|
+
created_at: hash[:created_at] ? Time.parse(hash[:created_at].to_s) : nil,
|
|
169
|
+
id: hash[:id],
|
|
170
|
+
prompt: hash[:prompt]&.map { |m| Message.from_hash(m) },
|
|
171
|
+
history: hash[:history]&.map { |m| Message.from_hash(m) },
|
|
172
|
+
raw: hash[:raw],
|
|
173
|
+
stop_reason: hash[:stop_reason]
|
|
174
|
+
)
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
private
|
|
178
|
+
|
|
179
|
+
def normalize_messages(messages)
|
|
180
|
+
Array(messages).map do |msg|
|
|
181
|
+
case msg
|
|
182
|
+
when Message
|
|
183
|
+
msg
|
|
184
|
+
when Hash
|
|
185
|
+
Message.from_hash(msg)
|
|
186
|
+
else
|
|
187
|
+
raise ArgumentError, "Invalid message: must be Message or Hash"
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def normalize_tool_results(results)
|
|
193
|
+
Array(results).map do |result|
|
|
194
|
+
case result
|
|
195
|
+
when ToolResultMessage
|
|
196
|
+
result
|
|
197
|
+
when Hash
|
|
198
|
+
result[:type] == "tool_result" ? ToolResultMessage.new(**result.slice(:tool, :content, :stop_reason)) : Message.from_hash(result)
|
|
199
|
+
else
|
|
200
|
+
raise ArgumentError, "Invalid tool result: must be ToolResultMessage or Hash"
|
|
201
|
+
end
|
|
202
|
+
end
|
|
203
|
+
end
|
|
204
|
+
end
|
|
205
|
+
end
|
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RobotLab
|
|
4
|
+
# Thread-local storage for capturing tool executions during RubyLLM auto-execution.
|
|
5
|
+
#
|
|
6
|
+
# Stores tool execution records in thread-local storage so they can be
|
|
7
|
+
# retrieved after an LLM inference call completes.
|
|
8
|
+
#
|
|
9
|
+
class ToolExecutionCapture
|
|
10
|
+
# Returns the captured tool executions for the current thread.
|
|
11
|
+
#
|
|
12
|
+
# @return [Array<Hash>] array of execution records
|
|
13
|
+
def self.captured
|
|
14
|
+
Thread.current[:robot_lab_tool_executions] ||= []
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Clears the captured tool executions for the current thread.
|
|
18
|
+
#
|
|
19
|
+
# @return [Array] empty array
|
|
20
|
+
def self.clear!
|
|
21
|
+
Thread.current[:robot_lab_tool_executions] = []
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Records a tool execution.
|
|
25
|
+
#
|
|
26
|
+
# @param tool_name [String] name of the executed tool
|
|
27
|
+
# @param tool_id [String] unique identifier for this execution
|
|
28
|
+
# @param input [Hash] input parameters passed to the tool
|
|
29
|
+
# @param output [Object] the tool's return value
|
|
30
|
+
# @return [Array<Hash>] the updated captured array
|
|
31
|
+
def self.record(tool_name:, tool_id:, input:, output:)
|
|
32
|
+
captured << {
|
|
33
|
+
tool_name: tool_name,
|
|
34
|
+
tool_id: tool_id,
|
|
35
|
+
input: input,
|
|
36
|
+
output: output
|
|
37
|
+
}
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Wrapper around ruby_llm for LLM inference
|
|
42
|
+
#
|
|
43
|
+
# RoboticModel provides a unified interface for LLM calls, handling:
|
|
44
|
+
# - Message format conversion via adapters
|
|
45
|
+
# - Tool registration and execution
|
|
46
|
+
# - Streaming support
|
|
47
|
+
#
|
|
48
|
+
# @example Basic usage
|
|
49
|
+
# model = RoboticModel.new("claude-sonnet-4", provider: :anthropic)
|
|
50
|
+
# messages = [TextMessage.new(role: :user, content: "Hello!")]
|
|
51
|
+
# response = model.infer(messages, [])
|
|
52
|
+
#
|
|
53
|
+
# @example With tools
|
|
54
|
+
# model.infer(messages, [weather_tool], tool_choice: "auto")
|
|
55
|
+
#
|
|
56
|
+
class RoboticModel
|
|
57
|
+
# @!attribute [r] model_id
|
|
58
|
+
# @return [String] the LLM model identifier
|
|
59
|
+
# @!attribute [r] provider
|
|
60
|
+
# @return [Symbol] the LLM provider (:anthropic, :openai, :gemini, etc.)
|
|
61
|
+
# @!attribute [r] adapter
|
|
62
|
+
# @return [Adapters::Base] the adapter for message conversion
|
|
63
|
+
attr_reader :model_id, :provider, :adapter
|
|
64
|
+
|
|
65
|
+
# Creates a new RoboticModel instance.
|
|
66
|
+
#
|
|
67
|
+
# @param model_id [String] the model identifier
|
|
68
|
+
# @param provider [Symbol, nil] the provider (auto-detected if not specified)
|
|
69
|
+
def initialize(model_id, provider: nil)
|
|
70
|
+
@model_id = model_id
|
|
71
|
+
@provider = provider || detect_provider(model_id)
|
|
72
|
+
@adapter = Adapters::Registry.for(@provider)
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Perform inference with messages and optional tools
|
|
76
|
+
#
|
|
77
|
+
# @param messages [Array<Message>] Conversation messages
|
|
78
|
+
# @param tools [Array<Tool>] Available tools
|
|
79
|
+
# @param tool_choice [String, Symbol] Tool selection mode
|
|
80
|
+
# @param streaming [Proc, nil] Streaming callback
|
|
81
|
+
# @return [InferenceResponse]
|
|
82
|
+
#
|
|
83
|
+
def infer(messages, tools = [], tool_choice: "auto", streaming: nil, &block)
|
|
84
|
+
chat = create_chat
|
|
85
|
+
|
|
86
|
+
# Register tools if any
|
|
87
|
+
if tools.any?
|
|
88
|
+
ruby_llm_tools = create_ruby_llm_tools(tools)
|
|
89
|
+
chat = chat.with_tools(*ruby_llm_tools)
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
# Add system message if present
|
|
93
|
+
system_content = @adapter.extract_system_message(messages)
|
|
94
|
+
chat = chat.with_instructions(system_content) if system_content
|
|
95
|
+
|
|
96
|
+
# Build conversation (excluding the last user message since ask() will add it)
|
|
97
|
+
conversation = @adapter.conversation_messages(messages)
|
|
98
|
+
conversation[0...-1].each do |msg|
|
|
99
|
+
add_message_to_chat(chat, msg)
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Make the request (ask adds the user message)
|
|
103
|
+
user_content = conversation.last&.content || ""
|
|
104
|
+
|
|
105
|
+
# Clear tool execution capture before making the request
|
|
106
|
+
ToolExecutionCapture.clear!
|
|
107
|
+
|
|
108
|
+
response = if block_given? || streaming
|
|
109
|
+
chat.ask(user_content, &(block || streaming))
|
|
110
|
+
else
|
|
111
|
+
chat.ask(user_content)
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
# Parse response
|
|
115
|
+
output = @adapter.parse_response(response)
|
|
116
|
+
|
|
117
|
+
# Build captured tool results from auto-executed tools
|
|
118
|
+
captured_tool_results = build_captured_tool_results(tools)
|
|
119
|
+
|
|
120
|
+
InferenceResponse.new(
|
|
121
|
+
output: output,
|
|
122
|
+
raw: response,
|
|
123
|
+
model: model_id,
|
|
124
|
+
provider: provider,
|
|
125
|
+
captured_tool_results: captured_tool_results
|
|
126
|
+
)
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
# Quick ask without full message array
|
|
130
|
+
#
|
|
131
|
+
# @param prompt [String] User prompt
|
|
132
|
+
# @param system [String, nil] System prompt
|
|
133
|
+
# @param tools [Array<Tool>] Available tools
|
|
134
|
+
# @return [InferenceResponse]
|
|
135
|
+
#
|
|
136
|
+
def ask(prompt, system: nil, tools: [], &block)
|
|
137
|
+
messages = []
|
|
138
|
+
messages << TextMessage.new(role: "system", content: system) if system
|
|
139
|
+
messages << TextMessage.new(role: "user", content: prompt)
|
|
140
|
+
|
|
141
|
+
infer(messages, tools, &block)
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
private
|
|
145
|
+
|
|
146
|
+
def create_chat
|
|
147
|
+
RubyLLM.chat(model: model_id, provider: provider)
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def create_ruby_llm_tools(tools)
|
|
151
|
+
tools.map do |tool|
|
|
152
|
+
# Create a dynamic RubyLLM::Tool subclass
|
|
153
|
+
create_tool_class(tool)
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def create_tool_class(tool)
|
|
158
|
+
# Build a RubyLLM::Tool subclass dynamically
|
|
159
|
+
tool_definition = tool
|
|
160
|
+
tool_name = tool.name
|
|
161
|
+
|
|
162
|
+
klass = Class.new(RubyLLM::Tool) do
|
|
163
|
+
description tool_definition.description || ""
|
|
164
|
+
|
|
165
|
+
# Add parameters from schema
|
|
166
|
+
schema = tool_definition.to_json_schema
|
|
167
|
+
if schema[:parameters] && schema[:parameters][:properties]
|
|
168
|
+
schema[:parameters][:properties].each do |prop_name, prop_def|
|
|
169
|
+
required = schema[:parameters][:required]&.include?(prop_name.to_s)
|
|
170
|
+
param prop_name.to_sym,
|
|
171
|
+
type: prop_def[:type] || "string",
|
|
172
|
+
desc: prop_def[:description],
|
|
173
|
+
required: required
|
|
174
|
+
end
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
define_method(:execute) do |**kwargs|
|
|
178
|
+
# This is called by ruby_llm when the tool is invoked
|
|
179
|
+
# Call the handler directly (bypassing Tool#call which requires context)
|
|
180
|
+
# Handlers should use **_context pattern to accept but ignore context
|
|
181
|
+
output = tool_definition.handler.call(kwargs, robot: nil, network: nil, step: nil)
|
|
182
|
+
|
|
183
|
+
# Record the execution for later retrieval
|
|
184
|
+
ToolExecutionCapture.record(
|
|
185
|
+
tool_name: tool_name,
|
|
186
|
+
tool_id: SecureRandom.uuid,
|
|
187
|
+
input: kwargs,
|
|
188
|
+
output: output
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
output
|
|
192
|
+
end
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
# Set the class name so RubyLLM can identify the tool
|
|
196
|
+
# RubyLLM converts class names to snake_case for tool identification
|
|
197
|
+
class_name = tool_name.split("_").map(&:capitalize).join
|
|
198
|
+
klass.define_singleton_method(:name) { class_name }
|
|
199
|
+
|
|
200
|
+
# Also define instance method for name (used by some RubyLLM code paths)
|
|
201
|
+
klass.define_method(:name) { tool_name }
|
|
202
|
+
|
|
203
|
+
# Store reference to our tool for later execution
|
|
204
|
+
klass.define_singleton_method(:robot_lab_tool) { tool_definition }
|
|
205
|
+
klass
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
def build_captured_tool_results(tools)
|
|
209
|
+
ToolExecutionCapture.captured.map do |capture|
|
|
210
|
+
_tool = tools.find { |t| t.name == capture[:tool_name] }
|
|
211
|
+
tool_message = ToolMessage.new(
|
|
212
|
+
id: capture[:tool_id],
|
|
213
|
+
name: capture[:tool_name],
|
|
214
|
+
input: capture[:input]
|
|
215
|
+
)
|
|
216
|
+
ToolResultMessage.new(
|
|
217
|
+
tool: tool_message,
|
|
218
|
+
content: { data: capture[:output] }
|
|
219
|
+
)
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
def add_message_to_chat(chat, msg)
|
|
224
|
+
case msg
|
|
225
|
+
when TextMessage
|
|
226
|
+
if msg.user?
|
|
227
|
+
chat.add_message(role: :user, content: msg.content)
|
|
228
|
+
elsif msg.assistant?
|
|
229
|
+
chat.add_message(role: :assistant, content: msg.content)
|
|
230
|
+
end
|
|
231
|
+
when ToolResultMessage
|
|
232
|
+
# Tool results are handled by ruby_llm internally
|
|
233
|
+
end
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
def detect_provider(model_id)
|
|
237
|
+
case model_id.to_s.downcase
|
|
238
|
+
when /^claude/, /^anthropic/
|
|
239
|
+
:anthropic
|
|
240
|
+
when /^gpt/, /^o1/, /^o3/, /^chatgpt/
|
|
241
|
+
:openai
|
|
242
|
+
when /^gemini/
|
|
243
|
+
:gemini
|
|
244
|
+
when /^llama/, /^mistral/, /^mixtral/
|
|
245
|
+
:ollama
|
|
246
|
+
else
|
|
247
|
+
RobotLab.configuration.default_provider
|
|
248
|
+
end
|
|
249
|
+
end
|
|
250
|
+
end
|
|
251
|
+
|
|
252
|
+
# Response from LLM inference.
|
|
253
|
+
#
|
|
254
|
+
# Contains the parsed output, raw response, and any captured tool results.
|
|
255
|
+
#
|
|
256
|
+
class InferenceResponse
|
|
257
|
+
# @!attribute [r] output
|
|
258
|
+
# @return [Array<Message>] parsed output messages
|
|
259
|
+
# @!attribute [r] raw
|
|
260
|
+
# @return [Object] the raw response from RubyLLM
|
|
261
|
+
# @!attribute [r] model
|
|
262
|
+
# @return [String] the model that generated the response
|
|
263
|
+
# @!attribute [r] provider
|
|
264
|
+
# @return [Symbol] the provider that handled the request
|
|
265
|
+
# @!attribute [r] captured_tool_results
|
|
266
|
+
# @return [Array<ToolResultMessage>] tool executions that were auto-executed
|
|
267
|
+
attr_reader :output, :raw, :model, :provider, :captured_tool_results
|
|
268
|
+
|
|
269
|
+
# Creates a new InferenceResponse instance.
|
|
270
|
+
#
|
|
271
|
+
# @param output [Array<Message>] parsed output messages
|
|
272
|
+
# @param raw [Object] raw response from RubyLLM
|
|
273
|
+
# @param model [String] model identifier
|
|
274
|
+
# @param provider [Symbol] provider identifier
|
|
275
|
+
# @param captured_tool_results [Array<ToolResultMessage>] captured results
|
|
276
|
+
def initialize(output:, raw:, model:, provider:, captured_tool_results: [])
|
|
277
|
+
@output = output
|
|
278
|
+
@raw = raw
|
|
279
|
+
@model = model
|
|
280
|
+
@provider = provider
|
|
281
|
+
@captured_tool_results = captured_tool_results
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
# Get the stop reason from the last output message
|
|
285
|
+
#
|
|
286
|
+
# @return [String, nil]
|
|
287
|
+
#
|
|
288
|
+
def stop_reason
|
|
289
|
+
output.last&.stop_reason
|
|
290
|
+
end
|
|
291
|
+
|
|
292
|
+
# Check if inference stopped naturally
|
|
293
|
+
#
|
|
294
|
+
# @return [Boolean]
|
|
295
|
+
#
|
|
296
|
+
def stopped?
|
|
297
|
+
stop_reason == "stop"
|
|
298
|
+
end
|
|
299
|
+
|
|
300
|
+
# Check if inference wants to call tools
|
|
301
|
+
#
|
|
302
|
+
# @return [Boolean]
|
|
303
|
+
#
|
|
304
|
+
def wants_tools?
|
|
305
|
+
stop_reason == "tool" || output.any?(&:tool_call?)
|
|
306
|
+
end
|
|
307
|
+
|
|
308
|
+
# Get all tool calls from the response
|
|
309
|
+
#
|
|
310
|
+
# @return [Array<ToolMessage>]
|
|
311
|
+
#
|
|
312
|
+
def tool_calls
|
|
313
|
+
output.select(&:tool_call?).flat_map(&:tools)
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
# Get the text content
|
|
317
|
+
#
|
|
318
|
+
# @return [String, nil]
|
|
319
|
+
#
|
|
320
|
+
def text_content
|
|
321
|
+
output.select(&:text?).map(&:content).join
|
|
322
|
+
end
|
|
323
|
+
end
|
|
324
|
+
end
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RobotLab
|
|
4
|
+
# Proxy wrapper for state data that tracks mutations
|
|
5
|
+
#
|
|
6
|
+
# StateProxy wraps a hash and intercepts read/write operations,
|
|
7
|
+
# providing a clean interface for state access while enabling
|
|
8
|
+
# optional change tracking.
|
|
9
|
+
#
|
|
10
|
+
# @example
|
|
11
|
+
# data = { count: 0, name: "test" }
|
|
12
|
+
# proxy = StateProxy.new(data)
|
|
13
|
+
# proxy[:count] = 1
|
|
14
|
+
# proxy.count # => 1
|
|
15
|
+
# proxy[:name] # => "test"
|
|
16
|
+
# proxy.to_h # => { count: 1, name: "test" }
|
|
17
|
+
#
|
|
18
|
+
class StateProxy
|
|
19
|
+
# Creates a new StateProxy.
|
|
20
|
+
#
|
|
21
|
+
# @param data [Hash] the initial data
|
|
22
|
+
# @param on_change [Proc, nil] callback invoked when a value changes
|
|
23
|
+
def initialize(data = {}, on_change: nil)
|
|
24
|
+
@data = data.transform_keys(&:to_sym)
|
|
25
|
+
@on_change = on_change
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Get value by key
|
|
29
|
+
#
|
|
30
|
+
# @param key [Symbol, String]
|
|
31
|
+
# @return [Object]
|
|
32
|
+
#
|
|
33
|
+
def [](key)
|
|
34
|
+
@data[key.to_sym]
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
# Set value by key
|
|
38
|
+
#
|
|
39
|
+
# @param key [Symbol, String]
|
|
40
|
+
# @param value [Object]
|
|
41
|
+
#
|
|
42
|
+
def []=(key, value)
|
|
43
|
+
key = key.to_sym
|
|
44
|
+
old_value = @data[key]
|
|
45
|
+
@data[key] = value
|
|
46
|
+
@on_change&.call(key, old_value, value) if old_value != value
|
|
47
|
+
value
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Check if key exists
|
|
51
|
+
#
|
|
52
|
+
# @param key [Symbol, String]
|
|
53
|
+
# @return [Boolean]
|
|
54
|
+
#
|
|
55
|
+
def key?(key)
|
|
56
|
+
@data.key?(key.to_sym)
|
|
57
|
+
end
|
|
58
|
+
# @!method has_key?(key)
|
|
59
|
+
# Alias for {#key?}.
|
|
60
|
+
alias has_key? key?
|
|
61
|
+
|
|
62
|
+
# @!method include?(key)
|
|
63
|
+
# Alias for {#key?}.
|
|
64
|
+
alias include? key?
|
|
65
|
+
|
|
66
|
+
# Get all keys
|
|
67
|
+
#
|
|
68
|
+
# @return [Array<Symbol>]
|
|
69
|
+
#
|
|
70
|
+
def keys
|
|
71
|
+
@data.keys
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Get all values
|
|
75
|
+
#
|
|
76
|
+
# @return [Array]
|
|
77
|
+
#
|
|
78
|
+
def values
|
|
79
|
+
@data.values
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
# Iterate over key-value pairs
|
|
83
|
+
#
|
|
84
|
+
# @yield [Symbol, Object]
|
|
85
|
+
#
|
|
86
|
+
def each(&block)
|
|
87
|
+
@data.each(&block)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Delete a key
|
|
91
|
+
#
|
|
92
|
+
# @param key [Symbol, String]
|
|
93
|
+
# @return [Object] The deleted value
|
|
94
|
+
#
|
|
95
|
+
def delete(key)
|
|
96
|
+
@data.delete(key.to_sym)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Merge in additional data
|
|
100
|
+
#
|
|
101
|
+
# @param other [Hash]
|
|
102
|
+
# @return [self]
|
|
103
|
+
#
|
|
104
|
+
def merge!(other)
|
|
105
|
+
other.each { |k, v| self[k] = v }
|
|
106
|
+
self
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
# Convert to plain hash
|
|
110
|
+
#
|
|
111
|
+
# @return [Hash]
|
|
112
|
+
#
|
|
113
|
+
def to_h
|
|
114
|
+
@data.dup
|
|
115
|
+
end
|
|
116
|
+
alias to_hash to_h
|
|
117
|
+
|
|
118
|
+
# Deep duplicate
|
|
119
|
+
#
|
|
120
|
+
# @return [StateProxy]
|
|
121
|
+
#
|
|
122
|
+
def dup
|
|
123
|
+
StateProxy.new(deep_dup(@data), on_change: @on_change)
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Check if empty
|
|
127
|
+
#
|
|
128
|
+
# @return [Boolean]
|
|
129
|
+
#
|
|
130
|
+
def empty?
|
|
131
|
+
@data.empty?
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Number of keys
|
|
135
|
+
#
|
|
136
|
+
# @return [Integer]
|
|
137
|
+
#
|
|
138
|
+
def size
|
|
139
|
+
@data.size
|
|
140
|
+
end
|
|
141
|
+
alias length size
|
|
142
|
+
|
|
143
|
+
# Respond to method calls as hash access
|
|
144
|
+
#
|
|
145
|
+
def respond_to_missing?(method_name, include_private = false)
|
|
146
|
+
key = method_name.to_s.chomp("=").to_sym
|
|
147
|
+
@data.key?(key) || super
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
# Allow method-style access to keys
|
|
151
|
+
#
|
|
152
|
+
# @example
|
|
153
|
+
# proxy.name # Same as proxy[:name]
|
|
154
|
+
# proxy.name = "x" # Same as proxy[:name] = "x"
|
|
155
|
+
#
|
|
156
|
+
def method_missing(method_name, *args, &block)
|
|
157
|
+
method_str = method_name.to_s
|
|
158
|
+
|
|
159
|
+
if method_str.end_with?("=")
|
|
160
|
+
# Setter
|
|
161
|
+
key = method_str.chomp("=").to_sym
|
|
162
|
+
self[key] = args.first
|
|
163
|
+
elsif @data.key?(method_name.to_sym)
|
|
164
|
+
# Getter
|
|
165
|
+
self[method_name]
|
|
166
|
+
else
|
|
167
|
+
super
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def inspect
|
|
172
|
+
"#<RobotLab::StateProxy #{@data.inspect}>"
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
private
|
|
176
|
+
|
|
177
|
+
def deep_dup(obj)
|
|
178
|
+
case obj
|
|
179
|
+
when Hash
|
|
180
|
+
obj.transform_values { |v| deep_dup(v) }
|
|
181
|
+
when Array
|
|
182
|
+
obj.map { |v| deep_dup(v) }
|
|
183
|
+
else
|
|
184
|
+
obj.dup rescue obj
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
end
|