robot_lab 0.0.4 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +76 -0
  3. data/README.md +64 -6
  4. data/Rakefile +2 -1
  5. data/docs/api/core/index.md +41 -46
  6. data/docs/api/core/memory.md +200 -154
  7. data/docs/api/core/network.md +13 -3
  8. data/docs/api/core/robot.md +38 -26
  9. data/docs/api/core/state.md +55 -73
  10. data/docs/api/index.md +7 -28
  11. data/docs/api/messages/index.md +35 -20
  12. data/docs/api/messages/text-message.md +67 -21
  13. data/docs/api/messages/tool-call-message.md +80 -41
  14. data/docs/api/messages/tool-result-message.md +119 -50
  15. data/docs/api/messages/user-message.md +48 -24
  16. data/docs/architecture/core-concepts.md +10 -15
  17. data/docs/concepts.md +5 -7
  18. data/docs/examples/index.md +2 -2
  19. data/docs/getting-started/configuration.md +80 -0
  20. data/docs/guides/building-robots.md +10 -9
  21. data/docs/guides/creating-networks.md +49 -0
  22. data/docs/guides/index.md +0 -5
  23. data/docs/guides/rails-integration.md +244 -162
  24. data/docs/guides/streaming.md +118 -138
  25. data/docs/index.md +0 -8
  26. data/examples/03_network.rb +10 -7
  27. data/examples/08_llm_config.rb +40 -11
  28. data/examples/09_chaining.rb +45 -6
  29. data/examples/11_network_introspection.rb +30 -7
  30. data/examples/12_message_bus.rb +1 -1
  31. data/examples/14_rusty_circuit/heckler.rb +14 -8
  32. data/examples/14_rusty_circuit/open_mic.rb +5 -3
  33. data/examples/14_rusty_circuit/scout.rb +14 -31
  34. data/examples/15_memory_network_and_bus/editorial_pipeline.rb +1 -1
  35. data/examples/16_writers_room/display.rb +158 -0
  36. data/examples/16_writers_room/output/.gitignore +4 -0
  37. data/examples/16_writers_room/output/README.md +69 -0
  38. data/examples/16_writers_room/output/opus_001.md +263 -0
  39. data/examples/16_writers_room/output/opus_001_notes.log +470 -0
  40. data/examples/16_writers_room/output/opus_002.md +245 -0
  41. data/examples/16_writers_room/output/opus_002_notes.log +546 -0
  42. data/examples/16_writers_room/output/opus_002_screenplay.md +7989 -0
  43. data/examples/16_writers_room/output/opus_002_screenplay_notes.md +993 -0
  44. data/examples/16_writers_room/prompts/screenplay_writer.md +66 -0
  45. data/examples/16_writers_room/prompts/writer.md +37 -0
  46. data/examples/16_writers_room/room.rb +186 -0
  47. data/examples/16_writers_room/tools.rb +173 -0
  48. data/examples/16_writers_room/writer.rb +121 -0
  49. data/examples/16_writers_room/writers_room.rb +256 -0
  50. data/lib/generators/robot_lab/templates/initializer.rb.tt +0 -13
  51. data/lib/robot_lab/memory.rb +8 -32
  52. data/lib/robot_lab/network.rb +13 -20
  53. data/lib/robot_lab/robot/bus_messaging.rb +239 -0
  54. data/lib/robot_lab/robot/mcp_management.rb +88 -0
  55. data/lib/robot_lab/robot/template_rendering.rb +130 -0
  56. data/lib/robot_lab/robot.rb +56 -420
  57. data/lib/robot_lab/run_config.rb +184 -0
  58. data/lib/robot_lab/state_proxy.rb +2 -12
  59. data/lib/robot_lab/task.rb +8 -1
  60. data/lib/robot_lab/utils.rb +39 -0
  61. data/lib/robot_lab/version.rb +1 -1
  62. data/lib/robot_lab.rb +29 -8
  63. data/mkdocs.yml +0 -11
  64. metadata +21 -20
  65. data/docs/api/adapters/anthropic.md +0 -121
  66. data/docs/api/adapters/gemini.md +0 -133
  67. data/docs/api/adapters/index.md +0 -104
  68. data/docs/api/adapters/openai.md +0 -134
  69. data/docs/api/history/active-record-adapter.md +0 -275
  70. data/docs/api/history/config.md +0 -284
  71. data/docs/api/history/index.md +0 -128
  72. data/docs/api/history/thread-manager.md +0 -194
  73. data/docs/guides/history.md +0 -359
  74. data/lib/robot_lab/adapters/anthropic.rb +0 -163
  75. data/lib/robot_lab/adapters/base.rb +0 -85
  76. data/lib/robot_lab/adapters/gemini.rb +0 -193
  77. data/lib/robot_lab/adapters/openai.rb +0 -160
  78. data/lib/robot_lab/adapters/registry.rb +0 -81
  79. data/lib/robot_lab/errors.rb +0 -70
  80. data/lib/robot_lab/history/active_record_adapter.rb +0 -146
  81. data/lib/robot_lab/history/config.rb +0 -115
  82. data/lib/robot_lab/history/thread_manager.rb +0 -93
  83. data/lib/robot_lab/robotic_model.rb +0 -324
@@ -1,115 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module RobotLab
4
- module History
5
- # Configuration for conversation history persistence
6
- #
7
- # Defines callbacks for creating threads, retrieving history,
8
- # and appending messages/results.
9
- #
10
- # @example
11
- # config = History::Config.new(
12
- # create_thread: ->(state:, input:, **) {
13
- # { session_id: SecureRandom.uuid }
14
- # },
15
- # get: ->(session_id:, **) {
16
- # database.find_results(session_id)
17
- # },
18
- # append_results: ->(session_id:, new_results:, **) {
19
- # database.insert_results(session_id, new_results)
20
- # }
21
- # )
22
- #
23
- class Config
24
- # @!attribute [rw] create_thread
25
- # @return [Proc, nil] callback to create a new conversation thread
26
- # @!attribute [rw] get
27
- # @return [Proc, nil] callback to retrieve history for a thread
28
- # @!attribute [rw] append_user_message
29
- # @return [Proc, nil] callback to append user messages
30
- # @!attribute [rw] append_results
31
- # @return [Proc, nil] callback to append robot results
32
- attr_accessor :create_thread, :get, :append_user_message, :append_results
33
-
34
- # Initialize history configuration
35
- #
36
- # @param create_thread [Proc] Callback to create a new thread
37
- # @param get [Proc] Callback to retrieve history for a thread
38
- # @param append_user_message [Proc] Callback to append user messages
39
- # @param append_results [Proc] Callback to append robot results
40
- #
41
- def initialize(create_thread: nil, get: nil, append_user_message: nil, append_results: nil)
42
- @create_thread = create_thread
43
- @get = get
44
- @append_user_message = append_user_message
45
- @append_results = append_results
46
- end
47
-
48
- # Check if history persistence is configured
49
- #
50
- # @return [Boolean]
51
- #
52
- def configured?
53
- @create_thread && @get
54
- end
55
-
56
- # Create a new conversation thread
57
- #
58
- # @param state [State] Current state
59
- # @param input [String, UserMessage] Initial input
60
- # @param kwargs [Hash] Additional arguments
61
- # @return [Hash] Must include :session_id
62
- #
63
- def create_thread!(state:, input:, **kwargs)
64
- raise HistoryError, "create_thread callback not configured" unless @create_thread
65
-
66
- result = @create_thread.call(state: state, input: input, **kwargs)
67
-
68
- unless result.is_a?(Hash) && result[:session_id]
69
- raise HistoryError, "create_thread must return a hash with :session_id"
70
- end
71
-
72
- result
73
- end
74
-
75
- # Retrieve history for a thread
76
- #
77
- # @param session_id [String] Thread identifier
78
- # @param kwargs [Hash] Additional arguments
79
- # @return [Array<RobotResult>] History of results
80
- #
81
- def get!(session_id:, **kwargs)
82
- raise HistoryError, "get callback not configured" unless @get
83
-
84
- @get.call(session_id: session_id, **kwargs)
85
- end
86
-
87
- # Append a user message to the thread
88
- #
89
- # @param session_id [String] Thread identifier
90
- # @param message [UserMessage] Message to append
91
- # @param kwargs [Hash] Additional arguments
92
- #
93
- def append_user_message!(session_id:, message:, **kwargs)
94
- return unless @append_user_message
95
-
96
- @append_user_message.call(session_id: session_id, message: message, **kwargs)
97
- end
98
-
99
- # Append robot results to the thread
100
- #
101
- # @param session_id [String] Thread identifier
102
- # @param new_results [Array<RobotResult>] Results to append
103
- # @param kwargs [Hash] Additional arguments
104
- #
105
- def append_results!(session_id:, new_results:, **kwargs)
106
- return unless @append_results
107
-
108
- @append_results.call(session_id: session_id, new_results: new_results, **kwargs)
109
- end
110
- end
111
-
112
- # Error raised when history operations fail
113
- class HistoryError < RobotLab::Error; end
114
- end
115
- end
@@ -1,93 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module RobotLab
4
- module History
5
- # Manages conversation thread lifecycle
6
- #
7
- # Handles thread creation, history retrieval, and result persistence
8
- # using the configured history adapter.
9
- #
10
- # @example
11
- # manager = ThreadManager.new(config)
12
- # session_id = manager.create_thread(state: state, input: "Hello")
13
- # history = manager.get_history(session_id)
14
- #
15
- class ThreadManager
16
- # @!attribute [r] config
17
- # @return [Config] the history configuration
18
- attr_reader :config
19
-
20
- # Initialize thread manager
21
- #
22
- # @param config [Config] History configuration
23
- #
24
- def initialize(config)
25
- @config = config
26
- end
27
-
28
- # Create a new conversation thread
29
- #
30
- # @param state [State] Current state
31
- # @param input [String, UserMessage] Initial input
32
- # @return [String] Thread ID
33
- #
34
- def create_thread(state:, input:)
35
- result = @config.create_thread!(state: state, input: input)
36
- result[:session_id]
37
- end
38
-
39
- # Get history for a thread
40
- #
41
- # @param session_id [String] Thread identifier
42
- # @return [Array<RobotResult>] History of results
43
- #
44
- def get_history(session_id)
45
- @config.get!(session_id: session_id)
46
- end
47
-
48
- # Append user message to thread
49
- #
50
- # @param session_id [String] Thread identifier
51
- # @param message [UserMessage] Message to append
52
- #
53
- def append_user_message(session_id:, message:)
54
- @config.append_user_message!(session_id: session_id, message: message)
55
- end
56
-
57
- # Append results to thread
58
- #
59
- # @param session_id [String] Thread identifier
60
- # @param results [Array<RobotResult>] Results to append
61
- #
62
- def append_results(session_id:, results:)
63
- @config.append_results!(session_id: session_id, new_results: results)
64
- end
65
-
66
- # Load state from thread history
67
- #
68
- # @param session_id [String] Thread identifier
69
- # @param state [State, Memory] State/Memory to populate
70
- # @return [State, Memory] State/Memory with loaded history
71
- #
72
- def load_state(session_id:, state:)
73
- results = get_history(session_id)
74
-
75
- state.session_id = session_id
76
- results.each { |r| state.append_result(r) }
77
-
78
- state
79
- end
80
-
81
- # Save state results to thread
82
- #
83
- # @param session_id [String] Thread identifier
84
- # @param state [State] State with results to save
85
- # @param since_index [Integer] Save results from this index
86
- #
87
- def save_state(session_id:, state:, since_index: 0)
88
- new_results = state.results[since_index..]
89
- append_results(session_id: session_id, results: new_results) if new_results.any?
90
- end
91
- end
92
- end
93
- end
@@ -1,324 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module RobotLab
4
- # Thread-local storage for capturing tool executions during RubyLLM auto-execution.
5
- #
6
- # Stores tool execution records in thread-local storage so they can be
7
- # retrieved after an LLM inference call completes.
8
- #
9
- class ToolExecutionCapture
10
- # Returns the captured tool executions for the current thread.
11
- #
12
- # @return [Array<Hash>] array of execution records
13
- def self.captured
14
- Thread.current[:robot_lab_tool_executions] ||= []
15
- end
16
-
17
- # Clears the captured tool executions for the current thread.
18
- #
19
- # @return [Array] empty array
20
- def self.clear!
21
- Thread.current[:robot_lab_tool_executions] = []
22
- end
23
-
24
- # Records a tool execution.
25
- #
26
- # @param tool_name [String] name of the executed tool
27
- # @param tool_id [String] unique identifier for this execution
28
- # @param input [Hash] input parameters passed to the tool
29
- # @param output [Object] the tool's return value
30
- # @return [Array<Hash>] the updated captured array
31
- def self.record(tool_name:, tool_id:, input:, output:)
32
- captured << {
33
- tool_name: tool_name,
34
- tool_id: tool_id,
35
- input: input,
36
- output: output
37
- }
38
- end
39
- end
40
-
41
- # Wrapper around ruby_llm for LLM inference
42
- #
43
- # RoboticModel provides a unified interface for LLM calls, handling:
44
- # - Message format conversion via adapters
45
- # - Tool registration and execution
46
- # - Streaming support
47
- #
48
- # @example Basic usage
49
- # model = RoboticModel.new("claude-sonnet-4", provider: :anthropic)
50
- # messages = [TextMessage.new(role: :user, content: "Hello!")]
51
- # response = model.infer(messages, [])
52
- #
53
- # @example With tools
54
- # model.infer(messages, [weather_tool], tool_choice: "auto")
55
- #
56
- class RoboticModel
57
- # @!attribute [r] model_id
58
- # @return [String] the LLM model identifier
59
- # @!attribute [r] provider
60
- # @return [Symbol] the LLM provider (:anthropic, :openai, :gemini, etc.)
61
- # @!attribute [r] adapter
62
- # @return [Adapters::Base] the adapter for message conversion
63
- attr_reader :model_id, :provider, :adapter
64
-
65
- # Creates a new RoboticModel instance.
66
- #
67
- # @param model_id [String] the model identifier
68
- # @param provider [Symbol, nil] the provider (auto-detected if not specified)
69
- def initialize(model_id, provider: nil)
70
- @model_id = model_id
71
- @provider = provider || detect_provider(model_id)
72
- @adapter = Adapters::Registry.for(@provider)
73
- end
74
-
75
- # Perform inference with messages and optional tools
76
- #
77
- # @param messages [Array<Message>] Conversation messages
78
- # @param tools [Array<Tool>] Available tools
79
- # @param tool_choice [String, Symbol] Tool selection mode
80
- # @param streaming [Proc, nil] Streaming callback
81
- # @return [InferenceResponse]
82
- #
83
- def infer(messages, tools = [], tool_choice: "auto", streaming: nil, &block)
84
- chat = create_chat
85
-
86
- # Register tools if any
87
- if tools.any?
88
- ruby_llm_tools = create_ruby_llm_tools(tools)
89
- chat = chat.with_tools(*ruby_llm_tools)
90
- end
91
-
92
- # Add system message if present
93
- system_content = @adapter.extract_system_message(messages)
94
- chat = chat.with_instructions(system_content) if system_content
95
-
96
- # Build conversation (excluding the last user message since ask() will add it)
97
- conversation = @adapter.conversation_messages(messages)
98
- conversation[0...-1].each do |msg|
99
- add_message_to_chat(chat, msg)
100
- end
101
-
102
- # Make the request (ask adds the user message)
103
- user_content = conversation.last&.content || ""
104
-
105
- # Clear tool execution capture before making the request
106
- ToolExecutionCapture.clear!
107
-
108
- response = if block_given? || streaming
109
- chat.ask(user_content, &(block || streaming))
110
- else
111
- chat.ask(user_content)
112
- end
113
-
114
- # Parse response
115
- output = @adapter.parse_response(response)
116
-
117
- # Build captured tool results from auto-executed tools
118
- captured_tool_results = build_captured_tool_results(tools)
119
-
120
- InferenceResponse.new(
121
- output: output,
122
- raw: response,
123
- model: model_id,
124
- provider: provider,
125
- captured_tool_results: captured_tool_results
126
- )
127
- end
128
-
129
- # Quick ask without full message array
130
- #
131
- # @param prompt [String] User prompt
132
- # @param system [String, nil] System prompt
133
- # @param tools [Array<Tool>] Available tools
134
- # @return [InferenceResponse]
135
- #
136
- def ask(prompt, system: nil, tools: [], &block)
137
- messages = []
138
- messages << TextMessage.new(role: "system", content: system) if system
139
- messages << TextMessage.new(role: "user", content: prompt)
140
-
141
- infer(messages, tools, &block)
142
- end
143
-
144
- private
145
-
146
- def create_chat
147
- RubyLLM.chat(model: model_id, provider: provider)
148
- end
149
-
150
- def create_ruby_llm_tools(tools)
151
- tools.map do |tool|
152
- # Create a dynamic RubyLLM::Tool subclass
153
- create_tool_class(tool)
154
- end
155
- end
156
-
157
- def create_tool_class(tool)
158
- # Build a RubyLLM::Tool subclass dynamically
159
- tool_definition = tool
160
- tool_name = tool.name
161
-
162
- klass = Class.new(RubyLLM::Tool) do
163
- description tool_definition.description || ""
164
-
165
- # Add parameters from schema
166
- schema = tool_definition.to_json_schema
167
- if schema[:parameters] && schema[:parameters][:properties]
168
- schema[:parameters][:properties].each do |prop_name, prop_def|
169
- required = schema[:parameters][:required]&.include?(prop_name.to_s)
170
- param prop_name.to_sym,
171
- type: prop_def[:type] || "string",
172
- desc: prop_def[:description],
173
- required: required
174
- end
175
- end
176
-
177
- define_method(:execute) do |**kwargs|
178
- # This is called by ruby_llm when the tool is invoked
179
- # Call the handler directly (bypassing Tool#call which requires context)
180
- # Handlers should use **_context pattern to accept but ignore context
181
- output = tool_definition.handler.call(kwargs, robot: nil, network: nil, step: nil)
182
-
183
- # Record the execution for later retrieval
184
- ToolExecutionCapture.record(
185
- tool_name: tool_name,
186
- tool_id: SecureRandom.uuid,
187
- input: kwargs,
188
- output: output
189
- )
190
-
191
- output
192
- end
193
- end
194
-
195
- # Set the class name so RubyLLM can identify the tool
196
- # RubyLLM converts class names to snake_case for tool identification
197
- class_name = tool_name.split("_").map(&:capitalize).join
198
- klass.define_singleton_method(:name) { class_name }
199
-
200
- # Also define instance method for name (used by some RubyLLM code paths)
201
- klass.define_method(:name) { tool_name }
202
-
203
- # Store reference to our tool for later execution
204
- klass.define_singleton_method(:robot_lab_tool) { tool_definition }
205
- klass
206
- end
207
-
208
- def build_captured_tool_results(tools)
209
- ToolExecutionCapture.captured.map do |capture|
210
- _tool = tools.find { |t| t.name == capture[:tool_name] }
211
- tool_message = ToolMessage.new(
212
- id: capture[:tool_id],
213
- name: capture[:tool_name],
214
- input: capture[:input]
215
- )
216
- ToolResultMessage.new(
217
- tool: tool_message,
218
- content: { data: capture[:output] }
219
- )
220
- end
221
- end
222
-
223
- def add_message_to_chat(chat, msg)
224
- case msg
225
- when TextMessage
226
- if msg.user?
227
- chat.add_message(role: :user, content: msg.content)
228
- elsif msg.assistant?
229
- chat.add_message(role: :assistant, content: msg.content)
230
- end
231
- when ToolResultMessage
232
- # Tool results are handled by ruby_llm internally
233
- end
234
- end
235
-
236
- def detect_provider(model_id)
237
- case model_id.to_s.downcase
238
- when /^claude/, /^anthropic/
239
- :anthropic
240
- when /^gpt/, /^o1/, /^o3/, /^chatgpt/
241
- :openai
242
- when /^gemini/
243
- :gemini
244
- when /^llama/, /^mistral/, /^mixtral/
245
- :ollama
246
- else
247
- RobotLab.config.ruby_llm.provider
248
- end
249
- end
250
- end
251
-
252
- # Response from LLM inference.
253
- #
254
- # Contains the parsed output, raw response, and any captured tool results.
255
- #
256
- class InferenceResponse
257
- # @!attribute [r] output
258
- # @return [Array<Message>] parsed output messages
259
- # @!attribute [r] raw
260
- # @return [Object] the raw response from RubyLLM
261
- # @!attribute [r] model
262
- # @return [String] the model that generated the response
263
- # @!attribute [r] provider
264
- # @return [Symbol] the provider that handled the request
265
- # @!attribute [r] captured_tool_results
266
- # @return [Array<ToolResultMessage>] tool executions that were auto-executed
267
- attr_reader :output, :raw, :model, :provider, :captured_tool_results
268
-
269
- # Creates a new InferenceResponse instance.
270
- #
271
- # @param output [Array<Message>] parsed output messages
272
- # @param raw [Object] raw response from RubyLLM
273
- # @param model [String] model identifier
274
- # @param provider [Symbol] provider identifier
275
- # @param captured_tool_results [Array<ToolResultMessage>] captured results
276
- def initialize(output:, raw:, model:, provider:, captured_tool_results: [])
277
- @output = output
278
- @raw = raw
279
- @model = model
280
- @provider = provider
281
- @captured_tool_results = captured_tool_results
282
- end
283
-
284
- # Get the stop reason from the last output message
285
- #
286
- # @return [String, nil]
287
- #
288
- def stop_reason
289
- output.last&.stop_reason
290
- end
291
-
292
- # Check if inference stopped naturally
293
- #
294
- # @return [Boolean]
295
- #
296
- def stopped?
297
- stop_reason == "stop"
298
- end
299
-
300
- # Check if inference wants to call tools
301
- #
302
- # @return [Boolean]
303
- #
304
- def wants_tools?
305
- stop_reason == "tool" || output.any?(&:tool_call?)
306
- end
307
-
308
- # Get all tool calls from the response
309
- #
310
- # @return [Array<ToolMessage>]
311
- #
312
- def tool_calls
313
- output.select(&:tool_call?).flat_map(&:tools)
314
- end
315
-
316
- # Get the text content
317
- #
318
- # @return [String, nil]
319
- #
320
- def text_content
321
- output.select(&:text?).map(&:content).join
322
- end
323
- end
324
- end