language-operator 0.0.1 → 0.1.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +125 -0
  3. data/CHANGELOG.md +88 -0
  4. data/Gemfile +8 -0
  5. data/Gemfile.lock +284 -0
  6. data/LICENSE +229 -21
  7. data/Makefile +82 -0
  8. data/README.md +3 -11
  9. data/Rakefile +63 -0
  10. data/bin/aictl +7 -0
  11. data/completions/_aictl +232 -0
  12. data/completions/aictl.bash +121 -0
  13. data/completions/aictl.fish +114 -0
  14. data/docs/architecture/agent-runtime.md +585 -0
  15. data/docs/dsl/SCHEMA_VERSION.md +250 -0
  16. data/docs/dsl/agent-reference.md +604 -0
  17. data/docs/dsl/best-practices.md +1078 -0
  18. data/docs/dsl/chat-endpoints.md +895 -0
  19. data/docs/dsl/constraints.md +671 -0
  20. data/docs/dsl/mcp-integration.md +1177 -0
  21. data/docs/dsl/webhooks.md +932 -0
  22. data/docs/dsl/workflows.md +744 -0
  23. data/lib/language_operator/agent/base.rb +110 -0
  24. data/lib/language_operator/agent/executor.rb +440 -0
  25. data/lib/language_operator/agent/instrumentation.rb +54 -0
  26. data/lib/language_operator/agent/metrics_tracker.rb +183 -0
  27. data/lib/language_operator/agent/safety/ast_validator.rb +272 -0
  28. data/lib/language_operator/agent/safety/audit_logger.rb +104 -0
  29. data/lib/language_operator/agent/safety/budget_tracker.rb +175 -0
  30. data/lib/language_operator/agent/safety/content_filter.rb +93 -0
  31. data/lib/language_operator/agent/safety/manager.rb +207 -0
  32. data/lib/language_operator/agent/safety/rate_limiter.rb +150 -0
  33. data/lib/language_operator/agent/safety/safe_executor.rb +127 -0
  34. data/lib/language_operator/agent/scheduler.rb +183 -0
  35. data/lib/language_operator/agent/telemetry.rb +116 -0
  36. data/lib/language_operator/agent/web_server.rb +610 -0
  37. data/lib/language_operator/agent/webhook_authenticator.rb +226 -0
  38. data/lib/language_operator/agent.rb +149 -0
  39. data/lib/language_operator/cli/commands/agent.rb +1205 -0
  40. data/lib/language_operator/cli/commands/cluster.rb +371 -0
  41. data/lib/language_operator/cli/commands/install.rb +404 -0
  42. data/lib/language_operator/cli/commands/model.rb +266 -0
  43. data/lib/language_operator/cli/commands/persona.rb +393 -0
  44. data/lib/language_operator/cli/commands/quickstart.rb +22 -0
  45. data/lib/language_operator/cli/commands/status.rb +143 -0
  46. data/lib/language_operator/cli/commands/system.rb +772 -0
  47. data/lib/language_operator/cli/commands/tool.rb +537 -0
  48. data/lib/language_operator/cli/commands/use.rb +47 -0
  49. data/lib/language_operator/cli/errors/handler.rb +180 -0
  50. data/lib/language_operator/cli/errors/suggestions.rb +176 -0
  51. data/lib/language_operator/cli/formatters/code_formatter.rb +77 -0
  52. data/lib/language_operator/cli/formatters/log_formatter.rb +288 -0
  53. data/lib/language_operator/cli/formatters/progress_formatter.rb +49 -0
  54. data/lib/language_operator/cli/formatters/status_formatter.rb +37 -0
  55. data/lib/language_operator/cli/formatters/table_formatter.rb +163 -0
  56. data/lib/language_operator/cli/formatters/value_formatter.rb +113 -0
  57. data/lib/language_operator/cli/helpers/cluster_context.rb +62 -0
  58. data/lib/language_operator/cli/helpers/cluster_validator.rb +101 -0
  59. data/lib/language_operator/cli/helpers/editor_helper.rb +58 -0
  60. data/lib/language_operator/cli/helpers/kubeconfig_validator.rb +167 -0
  61. data/lib/language_operator/cli/helpers/pastel_helper.rb +24 -0
  62. data/lib/language_operator/cli/helpers/resource_dependency_checker.rb +74 -0
  63. data/lib/language_operator/cli/helpers/schedule_builder.rb +108 -0
  64. data/lib/language_operator/cli/helpers/user_prompts.rb +69 -0
  65. data/lib/language_operator/cli/main.rb +236 -0
  66. data/lib/language_operator/cli/templates/tools/generic.yaml +66 -0
  67. data/lib/language_operator/cli/wizards/agent_wizard.rb +246 -0
  68. data/lib/language_operator/cli/wizards/quickstart_wizard.rb +588 -0
  69. data/lib/language_operator/client/base.rb +214 -0
  70. data/lib/language_operator/client/config.rb +136 -0
  71. data/lib/language_operator/client/cost_calculator.rb +37 -0
  72. data/lib/language_operator/client/mcp_connector.rb +123 -0
  73. data/lib/language_operator/client.rb +19 -0
  74. data/lib/language_operator/config/cluster_config.rb +101 -0
  75. data/lib/language_operator/config/tool_patterns.yaml +57 -0
  76. data/lib/language_operator/config/tool_registry.rb +96 -0
  77. data/lib/language_operator/config.rb +138 -0
  78. data/lib/language_operator/dsl/adapter.rb +124 -0
  79. data/lib/language_operator/dsl/agent_context.rb +90 -0
  80. data/lib/language_operator/dsl/agent_definition.rb +427 -0
  81. data/lib/language_operator/dsl/chat_endpoint_definition.rb +115 -0
  82. data/lib/language_operator/dsl/config.rb +119 -0
  83. data/lib/language_operator/dsl/context.rb +50 -0
  84. data/lib/language_operator/dsl/execution_context.rb +47 -0
  85. data/lib/language_operator/dsl/helpers.rb +109 -0
  86. data/lib/language_operator/dsl/http.rb +184 -0
  87. data/lib/language_operator/dsl/mcp_server_definition.rb +73 -0
  88. data/lib/language_operator/dsl/parameter_definition.rb +124 -0
  89. data/lib/language_operator/dsl/registry.rb +36 -0
  90. data/lib/language_operator/dsl/schema.rb +1102 -0
  91. data/lib/language_operator/dsl/shell.rb +125 -0
  92. data/lib/language_operator/dsl/tool_definition.rb +112 -0
  93. data/lib/language_operator/dsl/webhook_authentication.rb +114 -0
  94. data/lib/language_operator/dsl/webhook_definition.rb +106 -0
  95. data/lib/language_operator/dsl/workflow_definition.rb +259 -0
  96. data/lib/language_operator/dsl.rb +161 -0
  97. data/lib/language_operator/errors.rb +60 -0
  98. data/lib/language_operator/kubernetes/client.rb +279 -0
  99. data/lib/language_operator/kubernetes/resource_builder.rb +194 -0
  100. data/lib/language_operator/loggable.rb +47 -0
  101. data/lib/language_operator/logger.rb +141 -0
  102. data/lib/language_operator/retry.rb +123 -0
  103. data/lib/language_operator/retryable.rb +132 -0
  104. data/lib/language_operator/templates/README.md +23 -0
  105. data/lib/language_operator/templates/examples/agent_synthesis.tmpl +115 -0
  106. data/lib/language_operator/templates/examples/persona_distillation.tmpl +19 -0
  107. data/lib/language_operator/templates/schema/.gitkeep +0 -0
  108. data/lib/language_operator/templates/schema/CHANGELOG.md +93 -0
  109. data/lib/language_operator/templates/schema/agent_dsl_openapi.yaml +306 -0
  110. data/lib/language_operator/templates/schema/agent_dsl_schema.json +452 -0
  111. data/lib/language_operator/tool_loader.rb +242 -0
  112. data/lib/language_operator/validators.rb +170 -0
  113. data/lib/language_operator/version.rb +1 -1
  114. data/lib/language_operator.rb +65 -3
  115. data/requirements/tasks/challenge.md +9 -0
  116. data/requirements/tasks/iterate.md +36 -0
  117. data/requirements/tasks/optimize.md +21 -0
  118. data/requirements/tasks/tag.md +5 -0
  119. data/test_agent_dsl.rb +108 -0
  120. metadata +507 -20
@@ -0,0 +1,214 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'ruby_llm'
4
+ require 'ruby_llm/mcp'
5
+ require 'json'
6
+ require_relative 'config'
7
+ require_relative 'cost_calculator'
8
+ require_relative 'mcp_connector'
9
+ require_relative '../logger'
10
+ require_relative '../loggable'
11
+ require_relative '../retryable'
12
+ require_relative '../agent/instrumentation'
13
+
14
+ module LanguageOperator
15
+ module Client
16
+ # Core MCP client that connects to multiple servers and manages LLM chat
17
+ #
18
+ # This class handles all the backend logic for connecting to MCP servers,
19
+ # configuring the LLM, and managing chat sessions. It's designed to be
20
+ # UI-agnostic and reusable across different interfaces (CLI, web, headless).
21
+ #
22
+ # @example Basic usage
23
+ # config = Config.load('config.yaml')
24
+ # client = Base.new(config)
25
+ # client.connect!
26
+ # response = client.send_message("What tools are available?")
27
+ #
28
+ # @example Streaming responses
29
+ # client.stream_message("Search for Ruby news") do |chunk|
30
+ # print chunk
31
+ # end
32
+ class Base
33
+ include LanguageOperator::Loggable
34
+ include LanguageOperator::Retryable
35
+ include LanguageOperator::Agent::Instrumentation
36
+ include CostCalculator
37
+ include MCPConnector
38
+
39
+ attr_reader :config, :clients, :chat
40
+
41
+ # Initialize the client with configuration
42
+ #
43
+ # @param config [Hash, String] Configuration hash or path to YAML file
44
+ def initialize(config)
45
+ @config = config.is_a?(String) ? Config.load(config) : config
46
+ @clients = []
47
+ @chat = nil
48
+ @debug = @config['debug'] || false
49
+
50
+ logger.debug('Client initialized',
51
+ debug: @debug,
52
+ llm_provider: @config.dig('llm', 'provider'),
53
+ llm_model: @config.dig('llm', 'model'))
54
+ end
55
+
56
+ # Connect to all enabled MCP servers and configure LLM
57
+ #
58
+ # @return [Hash] Connection results with status and tool counts
59
+ # @raise [RuntimeError] If LLM configuration fails
60
+ def connect!
61
+ configure_llm
62
+ connect_mcp_servers
63
+ end
64
+
65
+ # Send a message and get the full response
66
+ #
67
+ # @param message [String] User message
68
+ # @return [String] Assistant response
69
+ # @raise [StandardError] If message fails
70
+ def send_message(message)
71
+ raise 'Not connected. Call #connect! first.' unless @chat
72
+
73
+ model = @config.dig('llm', 'model')
74
+ provider = @config.dig('llm', 'provider')
75
+
76
+ with_span('agent.llm.request', attributes: {
77
+ 'llm.model' => model,
78
+ 'llm.provider' => provider,
79
+ 'llm.message_count' => @chat.respond_to?(:messages) ? @chat.messages.length : nil
80
+ }) do |span|
81
+ result = @chat.ask(message)
82
+
83
+ # Add token usage and cost attributes if available
84
+ if result.respond_to?(:input_tokens)
85
+ input_tokens = result.input_tokens || 0
86
+ output_tokens = result.output_tokens || 0
87
+ cost = calculate_cost(model, input_tokens, output_tokens)
88
+
89
+ span.set_attribute('llm.input_tokens', input_tokens)
90
+ span.set_attribute('llm.output_tokens', output_tokens)
91
+ span.set_attribute('llm.cost_usd', cost.round(6)) if cost
92
+ end
93
+
94
+ result
95
+ end
96
+ end
97
+
98
+ # Stream a message and yield each chunk
99
+ #
100
+ # @param message [String] User message
101
+ # @yield [String] Each chunk of the response
102
+ # @raise [StandardError] If streaming fails
103
+ def stream_message(message, &block)
104
+ raise 'Not connected. Call #connect! first.' unless @chat
105
+
106
+ # NOTE: RubyLLM may not support streaming yet, so we'll call ask and yield the full response
107
+ response = @chat.ask(message)
108
+
109
+ # Convert response to string if it's a RubyLLM::Message object
110
+ response_text = response.respond_to?(:content) ? response.content : response.to_s
111
+
112
+ block.call(response_text) if block_given?
113
+ response_text
114
+ end
115
+
116
+ # Get all available tools from connected servers
117
+ #
118
+ # @return [Array] Array of tool objects
119
+ def tools
120
+ @clients.flat_map(&:tools)
121
+ end
122
+
123
+ # Get information about connected servers
124
+ #
125
+ # @return [Array<Hash>] Server information (name, url, tool_count)
126
+ def servers_info
127
+ @clients.map do |client|
128
+ {
129
+ name: client.name,
130
+ tool_count: client.tools.length,
131
+ tools: client.tools.map(&:name)
132
+ }
133
+ end
134
+ end
135
+
136
+ # Clear chat history while keeping MCP connections
137
+ #
138
+ # @return [void]
139
+ def clear_history!
140
+ llm_config = @config['llm']
141
+ chat_params = build_chat_params(llm_config)
142
+ @chat = RubyLLM.chat(**chat_params)
143
+
144
+ all_tools = tools
145
+ @chat.with_tools(*all_tools) unless all_tools.empty?
146
+ end
147
+
148
+ # Check if the client is connected
149
+ #
150
+ # @return [Boolean] True if connected to at least one server
151
+ def connected?
152
+ !@clients.empty? && !@chat.nil?
153
+ end
154
+
155
+ # Get debug mode status
156
+ #
157
+ # @return [Boolean] True if debug mode is enabled
158
+ def debug?
159
+ @debug
160
+ end
161
+
162
+ private
163
+
164
+ def logger_component
165
+ 'Client'
166
+ end
167
+
168
+ # Configure RubyLLM with provider settings
169
+ #
170
+ # @raise [RuntimeError] If provider is unknown
171
+ def configure_llm
172
+ llm_config = @config['llm']
173
+ provider = llm_config['provider']
174
+ model = llm_config['model']
175
+ timeout = llm_config['timeout'] || 300
176
+
177
+ logger.info('Configuring LLM',
178
+ provider: provider,
179
+ model: model,
180
+ timeout: timeout)
181
+
182
+ logger.debug('Using custom endpoint', endpoint: llm_config['endpoint']) if provider == 'openai_compatible' && llm_config['endpoint']
183
+
184
+ RubyLLM.configure do |config|
185
+ case provider
186
+ when 'openai'
187
+ config.openai_api_key = llm_config['api_key']
188
+ when 'openai_compatible'
189
+ config.openai_api_key = llm_config['api_key'] || 'not-needed'
190
+ config.openai_api_base = llm_config['endpoint']
191
+ when 'anthropic'
192
+ config.anthropic_api_key = llm_config['api_key']
193
+ else
194
+ logger.error('Unknown LLM provider', provider: provider)
195
+ raise "Unknown provider: #{provider}"
196
+ end
197
+
198
+ # Set timeout for LLM inference (default 300 seconds for slow local models)
199
+ # RubyLLM uses request_timeout to control HTTP request timeouts
200
+ config.request_timeout = timeout if config.respond_to?(:request_timeout=)
201
+ end
202
+
203
+ # Configure MCP timeout separately (MCP has its own timeout setting)
204
+ # MCP request_timeout is in milliseconds, default is 300000ms (5 minutes)
205
+ RubyLLM::MCP.configure do |config|
206
+ mcp_timeout_ms = timeout * 1000
207
+ config.request_timeout = mcp_timeout_ms if config.respond_to?(:request_timeout=)
208
+ end
209
+
210
+ logger.info('LLM configuration complete')
211
+ end
212
+ end
213
+ end
214
+ end
@@ -0,0 +1,136 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'yaml'
4
+
5
+ module LanguageOperator
6
+ module Client
7
+ # Configuration management for Langop MCP Client
8
+ #
9
+ # Handles loading configuration from YAML files or environment variables,
10
+ # with automatic provider detection and sensible defaults.
11
+ #
12
+ # @example Load from YAML file
13
+ # config = Config.load('/path/to/config.yaml')
14
+ #
15
+ # @example Load from environment variables
16
+ # config = Config.from_env
17
+ #
18
+ # @example Load with fallback
19
+ # config = Config.load_with_fallback('/path/to/config.yaml')
20
+ class Config
21
+ # Load configuration from a YAML file
22
+ #
23
+ # @param path [String] Path to YAML configuration file
24
+ # @return [Hash] Configuration hash
25
+ # @raise [Errno::ENOENT] If file doesn't exist
26
+ def self.load(path)
27
+ YAML.load_file(path)
28
+ end
29
+
30
+ # Load configuration from environment variables
31
+ #
32
+ # @return [Hash] Configuration hash built from ENV
33
+ def self.from_env
34
+ {
35
+ 'llm' => {
36
+ 'provider' => detect_provider_from_env,
37
+ 'model' => ENV.fetch('LLM_MODEL') { default_model_from_env },
38
+ 'endpoint' => parse_model_endpoint_from_env,
39
+ 'api_key' => ENV.fetch('OPENAI_API_KEY') { ENV.fetch('ANTHROPIC_API_KEY', 'dummy-key-for-local-proxy') }
40
+ },
41
+ 'mcp_servers' => parse_mcp_servers_from_env,
42
+ 'debug' => ENV['DEBUG'] == 'true'
43
+ }
44
+ end
45
+
46
+ # Parse model endpoint from environment variable
47
+ #
48
+ # Supports both MODEL_ENDPOINTS (comma-separated, uses first) and OPENAI_ENDPOINT
49
+ #
50
+ # @return [String, nil] Model endpoint URL
51
+ def self.parse_model_endpoint_from_env
52
+ # Support MODEL_ENDPOINTS (operator sets this)
53
+ endpoints_env = ENV.fetch('MODEL_ENDPOINTS', nil)
54
+ if endpoints_env && !endpoints_env.empty?
55
+ # Take the first endpoint from comma-separated list
56
+ endpoints_env.split(',').first.strip
57
+ else
58
+ # Fallback to legacy OPENAI_ENDPOINT
59
+ ENV.fetch('OPENAI_ENDPOINT', nil)
60
+ end
61
+ end
62
+
63
+ # Load configuration with automatic fallback to environment variables
64
+ #
65
+ # @param path [String] Path to YAML configuration file
66
+ # @return [Hash] Configuration hash
67
+ def self.load_with_fallback(path)
68
+ return from_env unless File.exist?(path)
69
+
70
+ load(path)
71
+ rescue StandardError => e
72
+ warn "⚠️ Error loading config from #{path}: #{e.message}"
73
+ warn 'Using environment variable fallback mode...'
74
+ from_env
75
+ end
76
+
77
+ # Detect LLM provider from environment variables
78
+ #
79
+ # @return [String] Provider name (openai_compatible, openai, or anthropic)
80
+ # @raise [RuntimeError] If no API key or endpoint is found
81
+ def self.detect_provider_from_env
82
+ if ENV['OPENAI_ENDPOINT'] || ENV['MODEL_ENDPOINTS']
83
+ 'openai_compatible'
84
+ elsif ENV['OPENAI_API_KEY']
85
+ 'openai'
86
+ elsif ENV['ANTHROPIC_API_KEY']
87
+ 'anthropic'
88
+ else
89
+ raise 'No API key or endpoint found. Set OPENAI_ENDPOINT or MODEL_ENDPOINTS for local LLM, ' \
90
+ 'or OPENAI_API_KEY/ANTHROPIC_API_KEY for cloud providers.'
91
+ end
92
+ end
93
+
94
+ # Get default model for detected provider
95
+ #
96
+ # @return [String] Default model name
97
+ def self.default_model_from_env
98
+ {
99
+ 'openai' => 'gpt-4',
100
+ 'openai_compatible' => 'gpt-3.5-turbo',
101
+ 'anthropic' => 'claude-3-5-sonnet-20241022'
102
+ }[detect_provider_from_env]
103
+ end
104
+
105
+ # Parse MCP servers from environment variables
106
+ #
107
+ # Supports MCP_SERVERS env var as comma-separated URLs or single MCP_URL
108
+ #
109
+ # @return [Array<Hash>] Array of MCP server configurations
110
+ def self.parse_mcp_servers_from_env
111
+ # Support both MCP_SERVERS (comma-separated) and legacy MCP_URL
112
+ servers_env = ENV.fetch('MCP_SERVERS', nil)
113
+ if servers_env && !servers_env.empty?
114
+ # Parse comma-separated URLs
115
+ servers_env.split(',').map.with_index do |url, index|
116
+ {
117
+ 'name' => "default-tools-#{index}",
118
+ 'url' => url.strip,
119
+ 'transport' => 'streamable',
120
+ 'enabled' => true
121
+ }
122
+ end
123
+ elsif ENV['MCP_URL']
124
+ [{
125
+ 'name' => 'default-tools',
126
+ 'url' => ENV['MCP_URL'],
127
+ 'transport' => 'streamable',
128
+ 'enabled' => true
129
+ }]
130
+ else
131
+ []
132
+ end
133
+ end
134
+ end
135
+ end
136
+ end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LanguageOperator
4
+ module Client
5
+ # Calculates LLM API costs based on token usage and model pricing
6
+ module CostCalculator
7
+ # Model pricing per 1M tokens (input, output) in USD
8
+ MODEL_PRICING = {
9
+ # OpenAI models
10
+ 'gpt-4' => [30.0, 60.0],
11
+ 'gpt-4-turbo' => [10.0, 30.0],
12
+ 'gpt-4o' => [5.0, 15.0],
13
+ 'gpt-3.5-turbo' => [0.5, 1.5],
14
+ # Anthropic models
15
+ 'claude-3-5-sonnet-20241022' => [3.0, 15.0],
16
+ 'claude-3-opus-20240229' => [15.0, 75.0],
17
+ 'claude-3-sonnet-20240229' => [3.0, 15.0],
18
+ 'claude-3-haiku-20240307' => [0.25, 1.25]
19
+ }.freeze
20
+
21
+ # Calculate cost based on model and token usage
22
+ #
23
+ # @param model [String] Model name
24
+ # @param input_tokens [Integer] Number of input tokens
25
+ # @param output_tokens [Integer] Number of output tokens
26
+ # @return [Float, nil] Cost in USD, or nil if model pricing not found
27
+ def calculate_cost(model, input_tokens, output_tokens)
28
+ pricing = MODEL_PRICING[model]
29
+ return nil unless pricing
30
+
31
+ input_cost = (input_tokens / 1_000_000.0) * pricing[0]
32
+ output_cost = (output_tokens / 1_000_000.0) * pricing[1]
33
+ input_cost + output_cost
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,123 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LanguageOperator
4
+ module Client
5
+ # Handles MCP server connection logic
6
+ module MCPConnector
7
+ private
8
+
9
+ # Connect to all enabled MCP servers
10
+ #
11
+ # @return [void]
12
+ def connect_mcp_servers
13
+ enabled_servers = @config['mcp_servers'].select { |s| s['enabled'] }
14
+
15
+ all_tools = []
16
+
17
+ if enabled_servers.empty?
18
+ logger.info('No MCP servers configured, agent will run without tools')
19
+ else
20
+ logger.info('Connecting to MCP servers', count: enabled_servers.length)
21
+
22
+ enabled_servers.each do |server_config|
23
+ client = connect_with_retry(server_config)
24
+ next unless client
25
+
26
+ @clients << client
27
+ tool_count = client.tools.length
28
+ all_tools.concat(client.tools)
29
+
30
+ logger.info('MCP server connected',
31
+ server: server_config['name'],
32
+ tool_count: tool_count,
33
+ tools: client.tools.map(&:name))
34
+ rescue StandardError => e
35
+ logger.error('Error connecting to MCP server',
36
+ server: server_config['name'],
37
+ error: e.message)
38
+ if @debug
39
+ logger.debug('Connection error backtrace',
40
+ server: server_config['name'],
41
+ backtrace: e.backtrace.join("\n"))
42
+ end
43
+ end
44
+
45
+ logger.info('MCP connection summary',
46
+ connected_servers: @clients.length,
47
+ total_tools: all_tools.length)
48
+ end
49
+
50
+ # Create chat with all collected tools (even if empty)
51
+ llm_config = @config['llm']
52
+ chat_params = build_chat_params(llm_config)
53
+ @chat = RubyLLM.chat(**chat_params)
54
+
55
+ @chat.with_tools(*all_tools) unless all_tools.empty?
56
+
57
+ logger.info('Chat session initialized', with_tools: !all_tools.empty?)
58
+ end
59
+
60
+ # Connect to MCP server with exponential backoff retry logic
61
+ #
62
+ # @param server_config [Hash] Server configuration
63
+ # @return [RubyLLM::MCP::Client, nil] Client if successful, nil if all retries failed
64
+ def connect_with_retry(server_config)
65
+ logger.debug('Attempting to connect to MCP server',
66
+ server: server_config['name'],
67
+ transport: server_config['transport'],
68
+ url: server_config['url'])
69
+
70
+ with_retry_or_nil(
71
+ max_attempts: 4, # 1 initial attempt + 3 retries
72
+ base_delay: 1.0,
73
+ max_delay: 30.0,
74
+ on_retry: lambda { |error, attempt, delay|
75
+ logger.warn('MCP server connection failed, retrying',
76
+ server: server_config['name'],
77
+ attempt: attempt,
78
+ max_attempts: 4,
79
+ error: error.message,
80
+ retry_delay: delay)
81
+ },
82
+ on_failure: lambda { |error, attempts|
83
+ logger.error('MCP server connection failed after all retries',
84
+ server: server_config['name'],
85
+ attempts: attempts,
86
+ error: error.message)
87
+ if @debug
88
+ logger.debug('Final connection error backtrace',
89
+ server: server_config['name'],
90
+ backtrace: error.backtrace.join("\n"))
91
+ end
92
+ }
93
+ ) do
94
+ client = RubyLLM::MCP.client(
95
+ name: server_config['name'],
96
+ transport_type: server_config['transport'].to_sym,
97
+ config: {
98
+ url: server_config['url']
99
+ }
100
+ )
101
+
102
+ logger.info('Successfully connected to MCP server',
103
+ server: server_config['name'])
104
+ client
105
+ end
106
+ end
107
+
108
+ # Build chat parameters based on LLM config
109
+ #
110
+ # @param llm_config [Hash] LLM configuration
111
+ # @return [Hash] Chat parameters for RubyLLM.chat
112
+ def build_chat_params(llm_config)
113
+ chat_params = { model: llm_config['model'] }
114
+ if llm_config['provider'] == 'openai_compatible'
115
+ chat_params[:provider] = :openai
116
+ chat_params[:assume_model_exists] = true
117
+ end
118
+
119
+ chat_params
120
+ end
121
+ end
122
+ end
123
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'client/base'
4
+ require_relative 'client/config'
5
+
6
+ module LanguageOperator
7
+ # MCP Client for connecting to and using MCP servers
8
+ #
9
+ # Provides a high-level interface for connecting to MCP servers,
10
+ # querying available tools, and sending messages to language models
11
+ # with tool calling capabilities.
12
+ #
13
+ # @example Basic usage
14
+ # client = LanguageOperator::Client::Base.new(config)
15
+ # client.connect!
16
+ # response = client.send_message("What can you do?")
17
+ module Client
18
+ end
19
+ end
@@ -0,0 +1,101 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'yaml'
4
+ require 'fileutils'
5
+
6
+ module LanguageOperator
7
+ module Config
8
+ # Manages cluster configuration in ~/.aictl/config.yaml
9
+ class ClusterConfig
10
+ CONFIG_DIR = File.expand_path('~/.aictl')
11
+ CONFIG_PATH = File.join(CONFIG_DIR, 'config.yaml')
12
+
13
+ class << self
14
+ def load
15
+ return default_config unless File.exist?(CONFIG_PATH)
16
+
17
+ YAML.load_file(CONFIG_PATH) || default_config
18
+ rescue StandardError => e
19
+ warn "Warning: Failed to load config from #{CONFIG_PATH}: #{e.message}"
20
+ default_config
21
+ end
22
+
23
+ def save(config)
24
+ FileUtils.mkdir_p(CONFIG_DIR)
25
+ File.write(CONFIG_PATH, YAML.dump(config))
26
+ end
27
+
28
+ def current_cluster
29
+ config = load
30
+ config['current-cluster']
31
+ end
32
+
33
+ def set_current_cluster(name)
34
+ config = load
35
+ raise ArgumentError, "Cluster '#{name}' does not exist" unless cluster_exists?(name)
36
+
37
+ config['current-cluster'] = name
38
+ save(config)
39
+ end
40
+
41
+ def add_cluster(name, namespace, kubeconfig, context)
42
+ config = load
43
+ config['clusters'] ||= []
44
+
45
+ # Remove existing cluster with same name
46
+ config['clusters'].reject! { |c| c['name'] == name }
47
+
48
+ # Add new cluster
49
+ config['clusters'] << {
50
+ 'name' => name,
51
+ 'namespace' => namespace,
52
+ 'kubeconfig' => kubeconfig,
53
+ 'context' => context,
54
+ 'created' => Time.now.utc.iso8601
55
+ }
56
+
57
+ save(config)
58
+ end
59
+
60
+ def remove_cluster(name)
61
+ config = load
62
+ config['clusters']&.reject! { |c| c['name'] == name }
63
+
64
+ # Clear current-cluster if it was the removed one
65
+ config['current-cluster'] = nil if config['current-cluster'] == name
66
+
67
+ save(config)
68
+ end
69
+
70
+ def get_cluster(name)
71
+ config = load
72
+ cluster = config['clusters']&.find { |c| c['name'] == name }
73
+ # Convert string keys to symbol keys for easier access
74
+ cluster&.transform_keys(&:to_sym)
75
+ end
76
+
77
+ def list_clusters
78
+ config = load
79
+ clusters = config['clusters'] || []
80
+ # Convert string keys to symbol keys for easier access
81
+ clusters.map { |c| c.transform_keys(&:to_sym) }
82
+ end
83
+
84
+ def cluster_exists?(name)
85
+ !get_cluster(name).nil?
86
+ end
87
+
88
+ private
89
+
90
+ def default_config
91
+ {
92
+ 'apiVersion' => 'aictl.langop.io/v1',
93
+ 'kind' => 'Config',
94
+ 'current-cluster' => nil,
95
+ 'clusters' => []
96
+ }
97
+ end
98
+ end
99
+ end
100
+ end
101
+ end
@@ -0,0 +1,57 @@
1
+ # Tool Registry Mapping
2
+ # Maps tool keywords to container images and default configurations
3
+
4
+ # Built-in tools (developed in this project)
5
+ web:
6
+ image: ghcr.io/language-operator/web-tool:latest
7
+ displayName: Web Tool
8
+ description: Search the web and fetch web pages using DuckDuckGo
9
+ deploymentMode: service
10
+ port: 8080
11
+ type: mcp
12
+ egress:
13
+ - description: Allow HTTPS to DuckDuckGo
14
+ dns:
15
+ - "*.duckduckgo.com"
16
+ - "duckduckgo.com"
17
+ ports:
18
+ - port: 443
19
+ protocol: TCP
20
+ - description: Allow HTTPS to any web destination
21
+ dns:
22
+ - "*"
23
+ ports:
24
+ - port: 443
25
+ protocol: TCP
26
+ - port: 80
27
+ protocol: TCP
28
+
29
+ email:
30
+ image: ghcr.io/language-operator/email-tool:latest
31
+ displayName: Email Tool
32
+ description: Send and receive emails via SMTP/IMAP
33
+ deploymentMode: service
34
+ port: 8080
35
+ type: mcp
36
+ authRequired: true
37
+ egress:
38
+ - description: Allow SMTP/IMAP connections
39
+ dns:
40
+ - "*"
41
+ ports:
42
+ - port: 587
43
+ protocol: TCP
44
+ - port: 465
45
+ protocol: TCP
46
+ - port: 993
47
+ protocol: TCP
48
+
49
+ # Aliases for convenience
50
+ web-search:
51
+ alias: web
52
+
53
+ web-fetch:
54
+ alias: web
55
+
56
+ http:
57
+ alias: web