robot_lab 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. checksums.yaml +7 -0
  2. data/.envrc +1 -0
  3. data/.github/workflows/deploy-github-pages.yml +52 -0
  4. data/.github/workflows/deploy-yard-docs.yml +52 -0
  5. data/CHANGELOG.md +55 -0
  6. data/COMMITS.md +196 -0
  7. data/LICENSE.txt +21 -0
  8. data/README.md +332 -0
  9. data/Rakefile +67 -0
  10. data/docs/api/adapters/anthropic.md +121 -0
  11. data/docs/api/adapters/gemini.md +133 -0
  12. data/docs/api/adapters/index.md +104 -0
  13. data/docs/api/adapters/openai.md +134 -0
  14. data/docs/api/core/index.md +113 -0
  15. data/docs/api/core/memory.md +314 -0
  16. data/docs/api/core/network.md +291 -0
  17. data/docs/api/core/robot.md +273 -0
  18. data/docs/api/core/state.md +273 -0
  19. data/docs/api/core/tool.md +353 -0
  20. data/docs/api/history/active-record-adapter.md +195 -0
  21. data/docs/api/history/config.md +191 -0
  22. data/docs/api/history/index.md +132 -0
  23. data/docs/api/history/thread-manager.md +144 -0
  24. data/docs/api/index.md +82 -0
  25. data/docs/api/mcp/client.md +221 -0
  26. data/docs/api/mcp/index.md +111 -0
  27. data/docs/api/mcp/server.md +225 -0
  28. data/docs/api/mcp/transports.md +264 -0
  29. data/docs/api/messages/index.md +67 -0
  30. data/docs/api/messages/text-message.md +102 -0
  31. data/docs/api/messages/tool-call-message.md +144 -0
  32. data/docs/api/messages/tool-result-message.md +154 -0
  33. data/docs/api/messages/user-message.md +171 -0
  34. data/docs/api/streaming/context.md +174 -0
  35. data/docs/api/streaming/events.md +237 -0
  36. data/docs/api/streaming/index.md +108 -0
  37. data/docs/architecture/core-concepts.md +243 -0
  38. data/docs/architecture/index.md +138 -0
  39. data/docs/architecture/message-flow.md +320 -0
  40. data/docs/architecture/network-orchestration.md +216 -0
  41. data/docs/architecture/robot-execution.md +243 -0
  42. data/docs/architecture/state-management.md +323 -0
  43. data/docs/assets/css/custom.css +56 -0
  44. data/docs/assets/images/robot_lab.jpg +0 -0
  45. data/docs/concepts.md +216 -0
  46. data/docs/examples/basic-chat.md +193 -0
  47. data/docs/examples/index.md +129 -0
  48. data/docs/examples/mcp-server.md +290 -0
  49. data/docs/examples/multi-robot-network.md +312 -0
  50. data/docs/examples/rails-application.md +420 -0
  51. data/docs/examples/tool-usage.md +310 -0
  52. data/docs/getting-started/configuration.md +230 -0
  53. data/docs/getting-started/index.md +56 -0
  54. data/docs/getting-started/installation.md +179 -0
  55. data/docs/getting-started/quick-start.md +203 -0
  56. data/docs/guides/building-robots.md +376 -0
  57. data/docs/guides/creating-networks.md +366 -0
  58. data/docs/guides/history.md +359 -0
  59. data/docs/guides/index.md +68 -0
  60. data/docs/guides/mcp-integration.md +356 -0
  61. data/docs/guides/memory.md +309 -0
  62. data/docs/guides/rails-integration.md +432 -0
  63. data/docs/guides/streaming.md +314 -0
  64. data/docs/guides/using-tools.md +394 -0
  65. data/docs/index.md +160 -0
  66. data/examples/01_simple_robot.rb +38 -0
  67. data/examples/02_tools.rb +106 -0
  68. data/examples/03_network.rb +103 -0
  69. data/examples/04_mcp.rb +219 -0
  70. data/examples/05_streaming.rb +124 -0
  71. data/examples/06_prompt_templates.rb +324 -0
  72. data/examples/07_network_memory.rb +329 -0
  73. data/examples/prompts/assistant/system.txt.erb +2 -0
  74. data/examples/prompts/assistant/user.txt.erb +1 -0
  75. data/examples/prompts/billing/system.txt.erb +7 -0
  76. data/examples/prompts/billing/user.txt.erb +1 -0
  77. data/examples/prompts/classifier/system.txt.erb +4 -0
  78. data/examples/prompts/classifier/user.txt.erb +1 -0
  79. data/examples/prompts/entity_extractor/system.txt.erb +11 -0
  80. data/examples/prompts/entity_extractor/user.txt.erb +3 -0
  81. data/examples/prompts/escalation/system.txt.erb +35 -0
  82. data/examples/prompts/escalation/user.txt.erb +34 -0
  83. data/examples/prompts/general/system.txt.erb +4 -0
  84. data/examples/prompts/general/user.txt.erb +1 -0
  85. data/examples/prompts/github_assistant/system.txt.erb +6 -0
  86. data/examples/prompts/github_assistant/user.txt.erb +1 -0
  87. data/examples/prompts/helper/system.txt.erb +1 -0
  88. data/examples/prompts/helper/user.txt.erb +1 -0
  89. data/examples/prompts/keyword_extractor/system.txt.erb +8 -0
  90. data/examples/prompts/keyword_extractor/user.txt.erb +3 -0
  91. data/examples/prompts/order_support/system.txt.erb +27 -0
  92. data/examples/prompts/order_support/user.txt.erb +22 -0
  93. data/examples/prompts/product_support/system.txt.erb +30 -0
  94. data/examples/prompts/product_support/user.txt.erb +32 -0
  95. data/examples/prompts/sentiment_analyzer/system.txt.erb +9 -0
  96. data/examples/prompts/sentiment_analyzer/user.txt.erb +3 -0
  97. data/examples/prompts/synthesizer/system.txt.erb +14 -0
  98. data/examples/prompts/synthesizer/user.txt.erb +15 -0
  99. data/examples/prompts/technical/system.txt.erb +7 -0
  100. data/examples/prompts/technical/user.txt.erb +1 -0
  101. data/examples/prompts/triage/system.txt.erb +16 -0
  102. data/examples/prompts/triage/user.txt.erb +17 -0
  103. data/lib/generators/robot_lab/install_generator.rb +78 -0
  104. data/lib/generators/robot_lab/robot_generator.rb +55 -0
  105. data/lib/generators/robot_lab/templates/initializer.rb.tt +41 -0
  106. data/lib/generators/robot_lab/templates/migration.rb.tt +32 -0
  107. data/lib/generators/robot_lab/templates/result_model.rb.tt +52 -0
  108. data/lib/generators/robot_lab/templates/robot.rb.tt +46 -0
  109. data/lib/generators/robot_lab/templates/robot_test.rb.tt +32 -0
  110. data/lib/generators/robot_lab/templates/routing_robot.rb.tt +53 -0
  111. data/lib/generators/robot_lab/templates/thread_model.rb.tt +40 -0
  112. data/lib/robot_lab/adapters/anthropic.rb +163 -0
  113. data/lib/robot_lab/adapters/base.rb +85 -0
  114. data/lib/robot_lab/adapters/gemini.rb +193 -0
  115. data/lib/robot_lab/adapters/openai.rb +159 -0
  116. data/lib/robot_lab/adapters/registry.rb +81 -0
  117. data/lib/robot_lab/configuration.rb +143 -0
  118. data/lib/robot_lab/error.rb +32 -0
  119. data/lib/robot_lab/errors.rb +70 -0
  120. data/lib/robot_lab/history/active_record_adapter.rb +146 -0
  121. data/lib/robot_lab/history/config.rb +115 -0
  122. data/lib/robot_lab/history/thread_manager.rb +93 -0
  123. data/lib/robot_lab/mcp/client.rb +210 -0
  124. data/lib/robot_lab/mcp/server.rb +84 -0
  125. data/lib/robot_lab/mcp/transports/base.rb +56 -0
  126. data/lib/robot_lab/mcp/transports/sse.rb +117 -0
  127. data/lib/robot_lab/mcp/transports/stdio.rb +133 -0
  128. data/lib/robot_lab/mcp/transports/streamable_http.rb +139 -0
  129. data/lib/robot_lab/mcp/transports/websocket.rb +108 -0
  130. data/lib/robot_lab/memory.rb +882 -0
  131. data/lib/robot_lab/memory_change.rb +123 -0
  132. data/lib/robot_lab/message.rb +357 -0
  133. data/lib/robot_lab/network.rb +350 -0
  134. data/lib/robot_lab/rails/engine.rb +29 -0
  135. data/lib/robot_lab/rails/railtie.rb +42 -0
  136. data/lib/robot_lab/robot.rb +560 -0
  137. data/lib/robot_lab/robot_result.rb +205 -0
  138. data/lib/robot_lab/robotic_model.rb +324 -0
  139. data/lib/robot_lab/state_proxy.rb +188 -0
  140. data/lib/robot_lab/streaming/context.rb +144 -0
  141. data/lib/robot_lab/streaming/events.rb +95 -0
  142. data/lib/robot_lab/streaming/sequence_counter.rb +48 -0
  143. data/lib/robot_lab/task.rb +117 -0
  144. data/lib/robot_lab/tool.rb +223 -0
  145. data/lib/robot_lab/tool_config.rb +112 -0
  146. data/lib/robot_lab/tool_manifest.rb +234 -0
  147. data/lib/robot_lab/user_message.rb +118 -0
  148. data/lib/robot_lab/version.rb +5 -0
  149. data/lib/robot_lab/waiter.rb +73 -0
  150. data/lib/robot_lab.rb +195 -0
  151. data/mkdocs.yml +214 -0
  152. data/sig/robot_lab.rbs +4 -0
  153. metadata +442 -0
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RobotLab
4
+ module Adapters
5
+ # Base adapter interface for LLM providers
6
+ #
7
+ # Adapters handle provider-specific message formatting and response parsing.
8
+ # Each provider (Anthropic, OpenAI, Gemini) has different API conventions
9
+ # that the adapter normalizes.
10
+ #
11
+ # @abstract Subclass and implement {#format_messages} and {#parse_response}
12
+ #
13
+ class Base
14
+ # @!attribute [r] provider
15
+ # @return [Symbol] the provider name
16
+ attr_reader :provider
17
+
18
+ # Creates a new adapter instance.
19
+ #
20
+ # @param provider [Symbol] the provider name
21
+ def initialize(provider)
22
+ @provider = provider
23
+ end
24
+
25
+ # Format internal messages for the provider's API
26
+ #
27
+ # @param messages [Array<Message>] Internal message format
28
+ # @return [Array<Hash>] Provider-specific message format
29
+ #
30
+ def format_messages(messages)
31
+ raise NotImplementedError, "#{self.class}#format_messages must be implemented"
32
+ end
33
+
34
+ # Parse provider response into internal message format
35
+ #
36
+ # @param response [Object] Provider-specific response
37
+ # @return [Array<Message>] Internal message format
38
+ #
39
+ def parse_response(response)
40
+ raise NotImplementedError, "#{self.class}#parse_response must be implemented"
41
+ end
42
+
43
+ # Format tools for the provider's function calling API
44
+ #
45
+ # @param tools [Array<Tool>] Internal tool definitions
46
+ # @return [Array<Hash>] Provider-specific tool format
47
+ #
48
+ def format_tools(tools)
49
+ tools.map(&:to_json_schema)
50
+ end
51
+
52
+ # Format tool choice for the provider
53
+ #
54
+ # @param choice [String, Symbol] "auto", "any", or specific tool name
55
+ # @return [Object] Provider-specific tool choice
56
+ #
57
+ def format_tool_choice(choice)
58
+ case choice.to_s
59
+ when "auto" then "auto"
60
+ when "any" then "required"
61
+ else { type: "function", function: { name: choice.to_s } }
62
+ end
63
+ end
64
+
65
+ # Extract system message from messages array
66
+ #
67
+ # @param messages [Array<Message>]
68
+ # @return [String, nil]
69
+ #
70
+ def extract_system_message(messages)
71
+ system_msg = messages.find(&:system?)
72
+ system_msg&.content
73
+ end
74
+
75
+ # Filter out system messages
76
+ #
77
+ # @param messages [Array<Message>]
78
+ # @return [Array<Message>]
79
+ #
80
+ def conversation_messages(messages)
81
+ messages.reject(&:system?)
82
+ end
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,193 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RobotLab
4
+ module Adapters
5
+ # Adapter for Google Gemini models
6
+ #
7
+ # Handles Gemini-specific API conventions:
8
+ # - Role mapping (assistant -> model)
9
+ # - Contents/parts array structure
10
+ # - functionCall/functionResponse format
11
+ #
12
+ class Gemini < Base
13
+ # Creates a new Gemini adapter instance.
14
+ def initialize
15
+ super(:gemini)
16
+ end
17
+
18
+ # Format messages for Gemini API
19
+ #
20
+ # Gemini uses "model" role instead of "assistant" and structures
21
+ # content as parts arrays.
22
+ #
23
+ # @param messages [Array<Message>]
24
+ # @return [Array<Hash>]
25
+ #
26
+ def format_messages(messages)
27
+ # Gemini handles system messages differently - as system_instruction
28
+ conversation_messages(messages).map { |msg| format_single_message(msg) }
29
+ end
30
+
31
+ # Parse Gemini response into internal messages
32
+ #
33
+ # @param response [RubyLLM::Response]
34
+ # @return [Array<Message>]
35
+ #
36
+ def parse_response(response)
37
+ messages = []
38
+
39
+ # Handle text content
40
+ if response.content && !response.content.empty?
41
+ messages << TextMessage.new(
42
+ role: "assistant",
43
+ content: response.content,
44
+ stop_reason: response.tool_calls&.any? ? "tool" : "stop"
45
+ )
46
+ end
47
+
48
+ # Handle function calls
49
+ if response.tool_calls&.any?
50
+ tool_messages = response.tool_calls.map do |id, tool_call|
51
+ ToolMessage.new(
52
+ id: id,
53
+ name: tool_call.name,
54
+ input: parse_tool_arguments(tool_call.arguments)
55
+ )
56
+ end
57
+
58
+ messages << ToolCallMessage.new(
59
+ role: "assistant",
60
+ tools: tool_messages,
61
+ stop_reason: "tool"
62
+ )
63
+ end
64
+
65
+ messages
66
+ end
67
+
68
+ # Format tools for Gemini function declarations
69
+ #
70
+ # Gemini doesn't support additionalProperties in schemas
71
+ #
72
+ # @param tools [Array<Tool>]
73
+ # @return [Array<Hash>]
74
+ #
75
+ def format_tools(tools)
76
+ tools.map do |tool|
77
+ schema = tool.to_json_schema
78
+ params = clean_schema_for_gemini(schema[:parameters] || { type: "object", properties: {} })
79
+ {
80
+ name: schema[:name],
81
+ description: schema[:description],
82
+ parameters: params
83
+ }
84
+ end
85
+ end
86
+
87
+ # Gemini tool choice format
88
+ #
89
+ # @param choice [String, Symbol]
90
+ # @return [Hash]
91
+ #
92
+ def format_tool_choice(choice)
93
+ case choice.to_s
94
+ when "auto" then { mode: "AUTO" }
95
+ when "any" then { mode: "ANY" }
96
+ when "none" then { mode: "NONE" }
97
+ else { mode: "ANY", allowed_function_names: [choice.to_s] }
98
+ end
99
+ end
100
+
101
+ private
102
+
103
+ def format_single_message(msg)
104
+ role = gemini_role(msg.role)
105
+
106
+ case msg
107
+ when TextMessage
108
+ {
109
+ role: role,
110
+ parts: [{ text: msg.content }]
111
+ }
112
+ when ToolCallMessage
113
+ {
114
+ role: "model",
115
+ parts: msg.tools.map do |tool|
116
+ {
117
+ functionCall: {
118
+ name: tool.name,
119
+ args: tool.input
120
+ }
121
+ }
122
+ end
123
+ }
124
+ when ToolResultMessage
125
+ {
126
+ role: "user",
127
+ parts: [
128
+ {
129
+ functionResponse: {
130
+ name: msg.tool.name,
131
+ response: format_tool_result_content(msg.content)
132
+ }
133
+ }
134
+ ]
135
+ }
136
+ else
137
+ { role: role, parts: [{ text: msg.content.to_s }] }
138
+ end
139
+ end
140
+
141
+ def gemini_role(role)
142
+ case role.to_s
143
+ when "assistant" then "model"
144
+ when "system" then "user" # Gemini handles system as system_instruction
145
+ else role.to_s
146
+ end
147
+ end
148
+
149
+ def format_tool_result_content(content)
150
+ case content
151
+ when Hash
152
+ content
153
+ when String
154
+ { result: content }
155
+ else
156
+ { result: content.to_s }
157
+ end
158
+ end
159
+
160
+ def parse_tool_arguments(arguments)
161
+ case arguments
162
+ when String
163
+ begin
164
+ JSON.parse(arguments, symbolize_names: true)
165
+ rescue JSON::ParserError
166
+ { raw: arguments }
167
+ end
168
+ when Hash
169
+ arguments.transform_keys(&:to_sym)
170
+ else
171
+ arguments || {}
172
+ end
173
+ end
174
+
175
+ # Remove additionalProperties which Gemini doesn't support
176
+ def clean_schema_for_gemini(schema)
177
+ return schema unless schema.is_a?(Hash)
178
+
179
+ cleaned = schema.dup
180
+ cleaned.delete(:additionalProperties)
181
+ cleaned.delete("additionalProperties")
182
+
183
+ if cleaned[:properties]
184
+ cleaned[:properties] = cleaned[:properties].transform_values do |prop|
185
+ clean_schema_for_gemini(prop)
186
+ end
187
+ end
188
+
189
+ cleaned
190
+ end
191
+ end
192
+ end
193
+ end
@@ -0,0 +1,159 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RobotLab
4
+ module Adapters
5
+ # Adapter for OpenAI GPT models
6
+ #
7
+ # Handles OpenAI-specific API conventions:
8
+ # - Function calling format
9
+ # - Strict mode for structured outputs
10
+ # - finish_reason to stop_reason mapping
11
+ #
12
+ class OpenAI < Base
13
+ # Creates a new OpenAI adapter instance.
14
+ def initialize
15
+ super(:openai)
16
+ end
17
+
18
+ # Format messages for OpenAI API
19
+ #
20
+ # @param messages [Array<Message>]
21
+ # @return [Array<Hash>]
22
+ #
23
+ def format_messages(messages)
24
+ messages.map { |msg| format_single_message(msg) }
25
+ end
26
+
27
+ # Parse OpenAI response into internal messages
28
+ #
29
+ # @param response [RubyLLM::Response]
30
+ # @return [Array<Message>]
31
+ #
32
+ def parse_response(response)
33
+ messages = []
34
+
35
+ # Handle text content
36
+ if response.content && !response.content.empty?
37
+ messages << TextMessage.new(
38
+ role: "assistant",
39
+ content: response.content,
40
+ stop_reason: response.tool_calls&.any? ? "tool" : "stop"
41
+ )
42
+ end
43
+
44
+ # Handle tool calls
45
+ if response.tool_calls&.any?
46
+ tool_messages = response.tool_calls.map do |id, tool_call|
47
+ ToolMessage.new(
48
+ id: id,
49
+ name: tool_call.name,
50
+ input: parse_tool_arguments(tool_call.arguments)
51
+ )
52
+ end
53
+
54
+ messages << ToolCallMessage.new(
55
+ role: "assistant",
56
+ tools: tool_messages,
57
+ stop_reason: "tool"
58
+ )
59
+ end
60
+
61
+ messages
62
+ end
63
+
64
+ # Format tools for OpenAI function calling
65
+ #
66
+ # @param tools [Array<Tool>]
67
+ # @return [Array<Hash>]
68
+ #
69
+ def format_tools(tools)
70
+ tools.map do |tool|
71
+ schema = tool.to_json_schema
72
+ {
73
+ type: "function",
74
+ function: {
75
+ name: schema[:name],
76
+ description: schema[:description],
77
+ parameters: schema[:parameters] || { type: "object", properties: {} },
78
+ strict: tool.strict.nil? ? true : tool.strict
79
+ }.compact
80
+ }
81
+ end
82
+ end
83
+
84
+ # OpenAI tool choice format
85
+ #
86
+ # @param choice [String, Symbol]
87
+ # @return [String, Hash]
88
+ #
89
+ def format_tool_choice(choice)
90
+ case choice.to_s
91
+ when "auto" then "auto"
92
+ when "any" then "required"
93
+ when "none" then "none"
94
+ else { type: "function", function: { name: choice.to_s } }
95
+ end
96
+ end
97
+
98
+ private
99
+
100
+ def format_single_message(msg)
101
+ case msg
102
+ when TextMessage
103
+ { role: msg.role, content: msg.content }
104
+ when ToolCallMessage
105
+ {
106
+ role: "assistant",
107
+ content: nil,
108
+ tool_calls: msg.tools.map do |tool|
109
+ {
110
+ id: tool.id,
111
+ type: "function",
112
+ function: {
113
+ name: tool.name,
114
+ arguments: JSON.generate(tool.input)
115
+ }
116
+ }
117
+ end
118
+ }
119
+ when ToolResultMessage
120
+ {
121
+ role: "tool",
122
+ tool_call_id: msg.tool.id,
123
+ content: format_tool_result_content(msg.content)
124
+ }
125
+ else
126
+ { role: msg.role, content: msg.content.to_s }
127
+ end
128
+ end
129
+
130
+ def format_tool_result_content(content)
131
+ case content
132
+ when Hash
133
+ JSON.generate(content)
134
+ when String
135
+ content
136
+ else
137
+ content.to_s
138
+ end
139
+ end
140
+
141
+ def parse_tool_arguments(arguments)
142
+ case arguments
143
+ when String
144
+ # Handle OpenAI's backtick wrapping quirk
145
+ cleaned = arguments.gsub(/\A```(?:json)?\n?/, "").gsub(/\n?```\z/, "")
146
+ begin
147
+ JSON.parse(cleaned, symbolize_names: true)
148
+ rescue JSON::ParserError
149
+ { raw: arguments }
150
+ end
151
+ when Hash
152
+ arguments.transform_keys(&:to_sym)
153
+ else
154
+ arguments || {}
155
+ end
156
+ end
157
+ end
158
+ end
159
+ end
@@ -0,0 +1,81 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RobotLab
4
+ module Adapters
5
+ # Registry for looking up provider adapters
6
+ #
7
+ # Maps provider symbols to their adapter classes.
8
+ #
9
+ # @example
10
+ # adapter = Registry.for(:anthropic)
11
+ # adapter.format_messages(messages)
12
+ #
13
+ module Registry
14
+ # @return [Hash<Symbol, Class>] mapping of provider symbols to adapter classes
15
+ ADAPTERS = {
16
+ anthropic: Anthropic,
17
+ openai: OpenAI,
18
+ gemini: Gemini,
19
+ # Azure uses OpenAI adapter
20
+ azure_openai: OpenAI,
21
+ # Grok uses OpenAI adapter
22
+ grok: OpenAI,
23
+ # Ollama uses OpenAI adapter
24
+ ollama: OpenAI,
25
+ # OpenRouter uses OpenAI adapter
26
+ openrouter: OpenAI,
27
+ # Bedrock uses Anthropic adapter
28
+ bedrock: Anthropic,
29
+ # VertexAI uses Gemini adapter
30
+ vertexai: Gemini
31
+ }.freeze
32
+
33
+ class << self
34
+ # Get adapter for a provider
35
+ #
36
+ # @param provider [Symbol, String] Provider name
37
+ # @return [Base] Adapter instance
38
+ # @raise [ArgumentError] If provider not found
39
+ #
40
+ def for(provider)
41
+ provider_sym = provider.to_s.downcase.gsub("-", "_").to_sym
42
+ adapter_class = ADAPTERS[provider_sym]
43
+
44
+ unless adapter_class
45
+ raise ArgumentError, "Unknown provider: #{provider}. " \
46
+ "Available providers: #{available.join(', ')}"
47
+ end
48
+
49
+ adapter_class.new
50
+ end
51
+
52
+ # List available providers
53
+ #
54
+ # @return [Array<Symbol>]
55
+ #
56
+ def available
57
+ ADAPTERS.keys
58
+ end
59
+
60
+ # Check if provider is supported
61
+ #
62
+ # @param provider [Symbol, String]
63
+ # @return [Boolean]
64
+ #
65
+ def supports?(provider)
66
+ provider_sym = provider.to_s.downcase.gsub("-", "_").to_sym
67
+ ADAPTERS.key?(provider_sym)
68
+ end
69
+
70
+ # Register a custom adapter
71
+ #
72
+ # @param provider [Symbol] Provider name
73
+ # @param adapter_class [Class] Adapter class
74
+ #
75
+ def register(provider, adapter_class)
76
+ ADAPTERS[provider.to_sym] = adapter_class
77
+ end
78
+ end
79
+ end
80
+ end
81
+ end
@@ -0,0 +1,143 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RobotLab
4
+ # Global configuration for RobotLab
5
+ #
6
+ # @example
7
+ # RobotLab.configure do |config|
8
+ # config.default_provider = :anthropic
9
+ # config.default_model = "claude-sonnet-4"
10
+ # config.template_path = "app/prompts"
11
+ # config.anthropic_api_key = ENV["ANTHROPIC_API_KEY"]
12
+ #
13
+ # # Global MCP servers available to all networks and robots
14
+ # config.mcp = [
15
+ # { name: "github", transport: { type: "stdio", command: "github-mcp" } }
16
+ # ]
17
+ #
18
+ # # Global tools whitelist (only these tools are available)
19
+ # config.tools = %w[search_code create_issue]
20
+ # end
21
+ #
22
+ class Configuration
23
+ # @!attribute [rw] default_provider
24
+ # @return [Symbol] the default LLM provider (defaults to :anthropic)
25
+ # @!attribute [rw] default_model
26
+ # @return [String] the default model to use (defaults to "claude-sonnet-4")
27
+ # @!attribute [rw] max_iterations
28
+ # @return [Integer] maximum robot iterations per network run (defaults to 10)
29
+ # @!attribute [rw] max_tool_iterations
30
+ # @return [Integer] maximum tool iterations per robot run (defaults to 10)
31
+ # @!attribute [rw] streaming_enabled
32
+ # @return [Boolean] whether streaming is enabled by default (defaults to true)
33
+ # @!attribute [rw] logger
34
+ # @return [Logger] the logger instance
35
+ # @!attribute [rw] mcp
36
+ # @return [Symbol, Array] global MCP server configuration (:none, :inherit, or array)
37
+ # @!attribute [rw] tools
38
+ # @return [Symbol, Array] global tools whitelist (:none, :inherit, or array)
39
+ attr_accessor :default_provider,
40
+ :default_model,
41
+ :max_iterations,
42
+ :max_tool_iterations,
43
+ :streaming_enabled,
44
+ :logger,
45
+ :mcp,
46
+ :tools
47
+
48
+ # Creates a new Configuration with default values.
49
+ def initialize
50
+ @default_provider = :anthropic
51
+ @default_model = "claude-sonnet-4"
52
+ @max_iterations = 10
53
+ @max_tool_iterations = 10
54
+ @streaming_enabled = true
55
+ @logger = default_logger
56
+ @template_path = nil
57
+ @mcp = :none
58
+ @tools = :none
59
+ end
60
+
61
+ # Sets the Anthropic API key.
62
+ #
63
+ # @param key [String] the API key
64
+ # @return [void]
65
+ def anthropic_api_key=(key)
66
+ RubyLLM.configure { |c| c.anthropic_api_key = key }
67
+ end
68
+
69
+ # Sets the OpenAI API key.
70
+ #
71
+ # @param key [String] the API key
72
+ # @return [void]
73
+ def openai_api_key=(key)
74
+ RubyLLM.configure { |c| c.openai_api_key = key }
75
+ end
76
+
77
+ # Sets the Google Gemini API key.
78
+ #
79
+ # @param key [String] the API key
80
+ # @return [void]
81
+ def gemini_api_key=(key)
82
+ RubyLLM.configure { |c| c.gemini_api_key = key }
83
+ end
84
+
85
+ # Sets the AWS Bedrock API key.
86
+ #
87
+ # @param key [String] the API key
88
+ # @return [void]
89
+ def bedrock_api_key=(key)
90
+ RubyLLM.configure { |c| c.bedrock_api_key = key }
91
+ end
92
+
93
+ # Sets the OpenRouter API key.
94
+ #
95
+ # @param key [String] the API key
96
+ # @return [void]
97
+ def openrouter_api_key=(key)
98
+ RubyLLM.configure { |c| c.openrouter_api_key = key }
99
+ end
100
+
101
+ # Set the template path and configure ruby_llm-template
102
+ #
103
+ # @param path [String] Path to the templates directory
104
+ #
105
+ def template_path=(path)
106
+ @template_path = path
107
+ configure_template_library if path
108
+ end
109
+
110
+ # Returns the template path.
111
+ #
112
+ # @return [String] the configured template path or default
113
+ def template_path
114
+ @template_path || default_template_path
115
+ end
116
+
117
+ private
118
+
119
+ def configure_template_library
120
+ require "ruby_llm/template"
121
+ RubyLLM::Template.configure do |config|
122
+ config.template_directory = @template_path
123
+ end
124
+ end
125
+
126
+ def default_template_path
127
+ if defined?(Rails) && Rails.root
128
+ Rails.root.join("app", "prompts").to_s
129
+ else
130
+ "prompts"
131
+ end
132
+ end
133
+
134
+ def default_logger
135
+ if defined?(Rails) && Rails.respond_to?(:logger)
136
+ Rails.logger
137
+ else
138
+ require "logger"
139
+ Logger.new($stdout, level: Logger::INFO)
140
+ end
141
+ end
142
+ end
143
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module RobotLab
4
+ # Base error class for all RobotLab errors.
5
+ #
6
+ # All RobotLab-specific exceptions inherit from this class.
7
+ class Error < StandardError; end
8
+
9
+ # Raised when configuration is invalid or missing required values.
10
+ #
11
+ # @example
12
+ # raise ConfigurationError, "API key not set"
13
+ class ConfigurationError < Error; end
14
+
15
+ # Raised when a requested tool is not found in the manifest.
16
+ #
17
+ # @example
18
+ # raise ToolNotFoundError, "Tool 'unknown_tool' not found"
19
+ class ToolNotFoundError < Error; end
20
+
21
+ # Raised when LLM inference fails.
22
+ #
23
+ # @example
24
+ # raise InferenceError, "API request failed: 429 Too Many Requests"
25
+ class InferenceError < Error; end
26
+
27
+ # Raised when MCP communication fails.
28
+ #
29
+ # @example
30
+ # raise MCPError, "Connection to MCP server refused"
31
+ class MCPError < Error; end
32
+ end