aia 0.8.6 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,197 @@
1
+ # lib/extensions/ruby_llm/chat.rb
2
+
3
+ module RubyLLM
4
+ class Chat
5
+ class << self
6
+ # Sets up Model Control Protocol (MCP) tools
7
+ #
8
+ # @param client [instance object] MCP client instance to use
9
+ # @param call_tool_method [Symbol] Method name to use for tool execution
10
+ # @param tools [Array<Hash>] Array of MCP tool definitions
11
+ #
12
+ # @return [self] Returns self for method chaining
13
+ #
14
+ def with_mcp(client:, call_tool_method:, tools:)
15
+ # Validate all required parameters are present
16
+ if client.nil?
17
+ RubyLLM.logger.error "MCP setup failed: client must be provided"
18
+ return clear_mcp_state
19
+ end
20
+
21
+ if call_tool_method.nil?
22
+ RubyLLM.logger.error "MCP setup failed: call_tool_method must be provided"
23
+ return clear_mcp_state
24
+ end
25
+
26
+ if tools.nil?
27
+ RubyLLM.logger.error "MCP setup failed: tools must be provided"
28
+ return clear_mcp_state
29
+ end
30
+
31
+ # Validate call_tool_method type
32
+ unless call_tool_method.is_a?(Symbol) || call_tool_method.is_a?(String)
33
+ RubyLLM.logger.error "MCP setup failed: call_tool_method must be a Symbol or String, got #{call_tool_method.class}"
34
+ return clear_mcp_state
35
+ end
36
+
37
+ # Validate client responds to the method
38
+ unless client.respond_to?(call_tool_method)
39
+ RubyLLM.logger.error "MCP setup failed: client instance does not respond to call_tool_method #{call_tool_method}"
40
+ return clear_mcp_state
41
+ end
42
+
43
+ # Set MCP configuration
44
+ @mcp_client = client
45
+ @mcp_call_tool = call_tool_method.to_sym
46
+ @mcp_tools = tools
47
+
48
+ self
49
+ end
50
+
51
+ # Get the MCP client instance if configured
52
+ # @return [MCPClient::Client, nil] The MCP client instance or nil if not configured
53
+ def mcp_client
54
+ @mcp_client
55
+ end
56
+
57
+ # Get the method name to use for tool execution if configured
58
+ # @return [Symbol, nil] The method name or nil if not configured
59
+ def mcp_call_tool
60
+ @mcp_call_tool
61
+ end
62
+
63
+ # Get the MCP tool definitions if configured
64
+ # @return [Array<Hash>] The MCP tool definitions or empty array if not configured
65
+ def mcp_tools
66
+ @mcp_tools || []
67
+ end
68
+
69
+ private
70
+
71
+ # Clear all MCP state and return self
72
+ # @return [self]
73
+ def clear_mcp_state
74
+ @mcp_client = nil
75
+ @mcp_call_tool = nil
76
+ @mcp_tools = []
77
+ self
78
+ end
79
+ end
80
+
81
+ # Prepend a module to add MCP tool support
82
+ module MCPSupport
83
+ def initialize(...)
84
+ super
85
+ add_mcp_tools
86
+ end
87
+
88
+ private
89
+
90
+ def add_mcp_tools
91
+ self.class.mcp_tools.each do |tool_def|
92
+ debug_me{[ :tool_def ]}
93
+ tool_name = tool_def.dig(:function, :name).to_sym
94
+ next if @tools.key?(tool_name) # Skip if local or MCP tool exists with same name
95
+
96
+ @tools[tool_name] = MCPToolWrapper.new(tool_def)
97
+ end
98
+ end
99
+ end
100
+
101
+ # Add MCP support to the Chat class
102
+ prepend MCPSupport
103
+ end
104
+
105
+ # Wraps an MCP tool definition to match the RubyLLM::Tool interface
106
+ class MCPToolWrapper
107
+ def initialize(mcp_tool)
108
+ @mcp_tool = mcp_tool
109
+ end
110
+
111
+ def name
112
+ @mcp_tool.dig(:function, :name)
113
+ end
114
+
115
+ def description
116
+ @mcp_tool.dig(:function, :description)
117
+ end
118
+
119
+ # Simple parameter class that implements the interface expected by RubyLLM::Providers::OpenAI::Tools#param_schema
120
+ class Parameter
121
+ attr_reader :type, :description, :required
122
+
123
+ def initialize(type, description, required)
124
+ @type = type || 'string'
125
+ @description = description
126
+ @required = required
127
+ end
128
+ end
129
+
130
+ def parameters
131
+ @parameters ||= begin
132
+ props = @mcp_tool.dig(:function, :parameters, "properties") || {}
133
+ required_params = @mcp_tool.dig(:function, :parameters, "required") || []
134
+
135
+ # Create Parameter objects with the expected interface
136
+ # The parameter name is the key in the properties hash
137
+ result = {}
138
+ props.each do |param_name, param_def|
139
+ result[param_name.to_sym] = Parameter.new(
140
+ param_def["type"],
141
+ param_def["description"],
142
+ required_params.include?(param_name)
143
+ )
144
+ end
145
+ result
146
+ end
147
+ end
148
+
149
+ def call(args)
150
+ # Log the tool call with arguments
151
+ RubyLLM.logger.debug "Tool #{name} called with: #{args.inspect}"
152
+
153
+ # Verify MCP client is configured properly
154
+ unless Chat.mcp_client && Chat.mcp_call_tool
155
+ error = { error: "MCP client not properly configured" }
156
+ RubyLLM.logger.error error[:error]
157
+ return error
158
+ end
159
+
160
+ # Handle tool calls that require non-string parameters
161
+ normalized_args = {}
162
+ args.each do |key, value|
163
+ # Convert string numbers to actual numbers when needed
164
+ if value.is_a?(String) && value.match?(/\A-?\d+(\.\d+)?\z/)
165
+ param_type = @mcp_tool.dig(:function, :parameters, "properties", key.to_s, "type")
166
+ if param_type == "number" || param_type == "integer"
167
+ normalized_args[key] = value.include?('.') ? value.to_f : value.to_i
168
+ next
169
+ end
170
+ end
171
+ normalized_args[key] = value
172
+ end
173
+
174
+ # Execute the tool via the MCP client with a timeout
175
+ timeout = 10 # seconds
176
+ result = nil
177
+
178
+ begin
179
+ Timeout.timeout(timeout) do
180
+ result = Chat.mcp_client.send(Chat.mcp_call_tool, name, normalized_args)
181
+ end
182
+ rescue Timeout::Error
183
+ error = { error: "MCP tool execution timed out after #{timeout} seconds" }
184
+ RubyLLM.logger.error error[:error]
185
+ return error
186
+ rescue StandardError => e
187
+ error = { error: "MCP tool execution failed: #{e.message}" }
188
+ RubyLLM.logger.error error[:error]
189
+ return error
190
+ end
191
+
192
+ # Log the result
193
+ RubyLLM.logger.debug "Tool #{name} returned: #{result.inspect}"
194
+ result
195
+ end
196
+ end
197
+ end
@@ -0,0 +1,90 @@
1
+ # MCP Servers
2
+
3
+ The Model Context Protocol (MCP) enables tools from different server implementations to be defined in a common consistent way, allowing LLMs that support callback functions (aka tools) to access data that enhances the context of a prompt.
4
+
5
+ You can find additional MCP servers at https://mcpindex.net
6
+
7
+ ## Overview
8
+
9
+ This directory contains configuration files for various MCP servers that provide different capabilities to LLMs:
10
+
11
+ | Server | Purpose | Configuration File |
12
+ |--------|---------|--------------------|
13
+ | Filesystem | Provides access to the local filesystem | `filesystem.json` |
14
+ | iMCP | macOS-specific MCP server for system integration | `imcp.json` |
15
+ | Playwright | Enables web automation and scraping capabilities | `playwright_server_definition.json` |
16
+
17
+ ## Configuration Details
18
+
19
+ ### Filesystem Server
20
+
21
+ The filesystem server allows LLMs to read and interact with the local filesystem:
22
+
23
+ TODO: fix this JSON file to use a generic directory; maybe $HOME
24
+
25
+ ```json
26
+ {
27
+ "type": "stdio",
28
+ "command": [
29
+ "npx",
30
+ "-y",
31
+ "@modelcontextprotocol/server-filesystem",
32
+ "/Users/dewayne/sandbox/git_repos/madbomber/aia/develop"
33
+ ]
34
+ }
35
+ ```
36
+
37
+ The server is configured to access files within the project's development directory.
38
+
39
+ ### iMCP Server
40
+
41
+ See: https://github.com/loopwork-ai/iMCP
42
+
43
+ The iMCP server provides macOS-specific functionality:
44
+
45
+ ```json
46
+ {
47
+ "mcpServers" : {
48
+ "iMCP" : {
49
+ "command" : "/Applications/iMCP.app/Contents/MacOS/imcp-server"
50
+ }
51
+ }
52
+ }
53
+ ```
54
+
55
+ ### Playwright Server
56
+
57
+ The Playwright server enables web automation and browser interaction capabilities:
58
+
59
+ ```json
60
+ {
61
+ "mcpServers": {
62
+ "playwright": {
63
+ "url": "http://localhost:8931/sse",
64
+ "headers": {},
65
+ "comment": "Local Playwright MCP Server running on port 8931"
66
+ }
67
+ }
68
+ }
69
+ ```
70
+
71
+ ## Usage
72
+
73
+ These MCP servers can be utilized by LLM applications to extend their capabilities beyond text generation. The configuration files in this directory are used to define how to connect to each server and what capabilities they provide.
74
+
75
+ ## Getting Started
76
+
77
+ Use the --mcp option with aia to specify which MCP servers to use. You may use the --mcp option multiple times to select more than one server. For example:
78
+
79
+ ```bash
80
+ aia prompt_id --mcp filesystem.json --mcp imcp.json --mcp playwright_server_definition.json
81
+ # or
82
+ aia --chat --mcp filesystem.json --mcp imcp.json --mcp playwright_server_definition.json
83
+ # or
84
+ aia prompt_id --chat --mcp filesystem.json --mcp imcp.json --mcp playwright_server_definition.json
85
+ ```
86
+
87
+ ## Additional Resources
88
+
89
+ - [Model Context Protocol Documentation](https://github.com/anthropics/anthropic-cookbook/tree/main/model_context_protocol)
90
+ - [MCP Server Implementation Guidelines](https://modelcontextprotocol.github.io/)
@@ -0,0 +1,9 @@
1
+ {
2
+ "type": "stdio",
3
+ "command": [
4
+ "npx",
5
+ "-y",
6
+ "@modelcontextprotocol/server-filesystem",
7
+ "/Users/dewayne/sandbox/git_repos/madbomber/aia/develop"
8
+ ]
9
+ }
@@ -0,0 +1,7 @@
1
+ {
2
+ "mcpServers" : {
3
+ "iMCP" : {
4
+ "command" : "/Applications/iMCP.app/Contents/MacOS/imcp-server"
5
+ }
6
+ }
7
+ }
@@ -0,0 +1,11 @@
1
+ {
2
+ "mcpServers": {
3
+ "launcher": {
4
+ "command": "npx",
5
+ "args": [
6
+ "y",
7
+ "@joshuarileydev/mac-apps-launcher-mcp-server"
8
+ ]
9
+ }
10
+ }
11
+ }
@@ -0,0 +1,9 @@
1
+ {
2
+ "mcpServers": {
3
+ "playwright": {
4
+ "url": "http://localhost:8931/sse",
5
+ "headers": {},
6
+ "comment": "Local Playwright MCP Server running on port 8931"
7
+ }
8
+ }
9
+ }
@@ -0,0 +1,8 @@
1
+ {
2
+ "mcpServers": {
3
+ "timeserver": {
4
+ "command": "python",
5
+ "args": ["-m", "mcp_simple_timeserver"]
6
+ }
7
+ }
8
+ }
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.6
4
+ version: 0.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
@@ -10,7 +10,7 @@ cert_chain: []
10
10
  date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
- name: ai_client
13
+ name: amazing_print
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
16
  - - ">="
@@ -24,7 +24,7 @@ dependencies:
24
24
  - !ruby/object:Gem::Version
25
25
  version: '0'
26
26
  - !ruby/object:Gem::Dependency
27
- name: amazing_print
27
+ name: faraday
28
28
  requirement: !ruby/object:Gem::Requirement
29
29
  requirements:
30
30
  - - ">="
@@ -38,7 +38,7 @@ dependencies:
38
38
  - !ruby/object:Gem::Version
39
39
  version: '0'
40
40
  - !ruby/object:Gem::Dependency
41
- name: faraday
41
+ name: os
42
42
  requirement: !ruby/object:Gem::Requirement
43
43
  requirements:
44
44
  - - ">="
@@ -52,47 +52,47 @@ dependencies:
52
52
  - !ruby/object:Gem::Version
53
53
  version: '0'
54
54
  - !ruby/object:Gem::Dependency
55
- name: os
55
+ name: prompt_manager
56
56
  requirement: !ruby/object:Gem::Requirement
57
57
  requirements:
58
58
  - - ">="
59
59
  - !ruby/object:Gem::Version
60
- version: '0'
60
+ version: 0.5.2
61
61
  type: :runtime
62
62
  prerelease: false
63
63
  version_requirements: !ruby/object:Gem::Requirement
64
64
  requirements:
65
65
  - - ">="
66
66
  - !ruby/object:Gem::Version
67
- version: '0'
67
+ version: 0.5.2
68
68
  - !ruby/object:Gem::Dependency
69
- name: prompt_manager
69
+ name: ruby_llm
70
70
  requirement: !ruby/object:Gem::Requirement
71
71
  requirements:
72
72
  - - ">="
73
73
  - !ruby/object:Gem::Version
74
- version: 0.5.2
74
+ version: 1.2.0
75
75
  type: :runtime
76
76
  prerelease: false
77
77
  version_requirements: !ruby/object:Gem::Requirement
78
78
  requirements:
79
79
  - - ">="
80
80
  - !ruby/object:Gem::Version
81
- version: 0.5.2
81
+ version: 1.2.0
82
82
  - !ruby/object:Gem::Dependency
83
- name: ruby_llm
83
+ name: ruby-mcp-client
84
84
  requirement: !ruby/object:Gem::Requirement
85
85
  requirements:
86
86
  - - ">="
87
87
  - !ruby/object:Gem::Version
88
- version: 1.2.0
88
+ version: '0'
89
89
  type: :runtime
90
90
  prerelease: false
91
91
  version_requirements: !ruby/object:Gem::Requirement
92
92
  requirements:
93
93
  - - ">="
94
94
  - !ruby/object:Gem::Version
95
- version: 1.2.0
95
+ version: '0'
96
96
  - !ruby/object:Gem::Dependency
97
97
  name: reline
98
98
  requirement: !ruby/object:Gem::Requirement
@@ -294,6 +294,7 @@ files:
294
294
  - ".envrc"
295
295
  - ".version"
296
296
  - CHANGELOG.md
297
+ - COMMITS.md
297
298
  - LICENSE
298
299
  - README.md
299
300
  - Rakefile
@@ -303,7 +304,6 @@ files:
303
304
  - examples/headlines
304
305
  - justfile
305
306
  - lib/aia.rb
306
- - lib/aia/ai_client_adapter.rb
307
307
  - lib/aia/aia_completion.bash
308
308
  - lib/aia/aia_completion.fish
309
309
  - lib/aia/aia_completion.zsh
@@ -321,7 +321,14 @@ files:
321
321
  - lib/aia/utility.rb
322
322
  - lib/aia/version.rb
323
323
  - lib/extensions/openstruct_merge.rb
324
+ - lib/extensions/ruby_llm/chat.rb
324
325
  - main.just
326
+ - mcp_servers/README.md
327
+ - mcp_servers/filesystem.json
328
+ - mcp_servers/imcp.json
329
+ - mcp_servers/launcher.json
330
+ - mcp_servers/playwright_server_definition.json
331
+ - mcp_servers/timeserver.json
325
332
  homepage: https://github.com/MadBomber/aia
326
333
  licenses:
327
334
  - MIT
@@ -1,210 +0,0 @@
1
- # lib/aia/ai_client_adapter.rb
2
- #
3
-
4
-
5
- require 'ai_client'
6
- require 'tty-spinner'
7
-
8
-
9
- module AIA
10
- class AIClientAdapter
11
- def initialize
12
- @model = AIA.config.model
13
-
14
- model_info = extract_model_parts(@model)
15
- @client = AiClient.new(
16
- model_info[:model],
17
- provider: model_info[:provider]
18
- )
19
- end
20
-
21
-
22
- def chat(prompt)
23
- if @model.downcase.include?('dall-e') || @model.downcase.include?('image-generation')
24
- text_to_image(prompt)
25
- elsif @model.downcase.include?('vision') || @model.downcase.include?('image')
26
- image_to_text(prompt)
27
- elsif @model.downcase.include?('tts') || @model.downcase.include?('speech')
28
- text_to_audio(prompt)
29
- elsif @model.downcase.include?('whisper') || @model.downcase.include?('transcription')
30
- audio_to_text(prompt)
31
- else
32
- text_to_text(prompt)
33
- end
34
- end
35
-
36
-
37
-
38
- def transcribe(audio_file)
39
- @client.transcribe(audio_file)
40
- end
41
-
42
-
43
-
44
- def speak(text)
45
- output_file = "#{Time.now.to_i}.mp3"
46
-
47
- begin
48
- # Try with options
49
- @client.speak(text, output_file, {
50
- model: AIA.config.speech_model,
51
- voice: AIA.config.voice
52
- })
53
- rescue ArgumentError
54
- @client.speak(text)
55
- end
56
-
57
- system("#{AIA.config.speak_command} #{output_file}") if File.exist?(output_file) && system("which #{AIA.config.speak_command} > /dev/null 2>&1")
58
- end
59
-
60
- def method_missing(method, *args, &block)
61
- if @client.respond_to?(method)
62
- @client.public_send(method, *args, &block)
63
- else
64
- super
65
- end
66
- end
67
-
68
- def respond_to_missing?(method, include_private = false)
69
- @client.respond_to?(method) || super
70
- end
71
-
72
- private
73
-
74
-
75
-
76
- def extract_model_parts(model_string)
77
- parts = model_string.split('/')
78
- parts.map!(&:strip)
79
-
80
- if parts.length > 1
81
- provider = parts[0]
82
- model = parts[1]
83
- else
84
- provider = nil # AiClient will figure it out from the model name
85
- model = parts[0]
86
- end
87
-
88
-
89
-
90
- { provider: provider, model: model }
91
- end
92
-
93
-
94
-
95
- def extract_text_prompt(prompt)
96
-
97
- if prompt.is_a?(String)
98
- prompt
99
- elsif prompt.is_a?(Hash) && prompt[:text]
100
- prompt[:text]
101
- elsif prompt.is_a?(Hash) && prompt[:content]
102
- prompt[:content]
103
- else
104
- prompt.to_s
105
- end
106
- end
107
-
108
-
109
-
110
- def text_to_text(prompt)
111
- text_prompt = extract_text_prompt(prompt)
112
- @client.chat(text_prompt)
113
- end
114
-
115
-
116
-
117
- def text_to_image(prompt)
118
- text_prompt = extract_text_prompt(prompt)
119
-
120
-
121
- output_file = "#{Time.now.to_i}.png"
122
-
123
- begin
124
- begin
125
- @client.generate_image(text_prompt, output_file, {
126
- size: AIA.config.image_size,
127
- quality: AIA.config.image_quality,
128
- style: AIA.config.image_style
129
- })
130
- rescue ArgumentError
131
- @client.generate_image(text_prompt)
132
- end
133
-
134
- "Image generated and saved to: #{output_file}"
135
- rescue => e
136
- "Error generating image: #{e.message}"
137
- end
138
- end
139
-
140
-
141
-
142
- def image_to_text(prompt)
143
- image_path = extract_image_path(prompt)
144
- text_prompt = extract_text_prompt(prompt)
145
-
146
- if image_path && File.exist?(image_path)
147
- begin
148
- @client.chat("#{text_prompt}\n[Analyzing image: #{image_path}]")
149
- rescue => e
150
- "Error analyzing image: #{e.message}"
151
- end
152
- else
153
- text_to_text(prompt)
154
- end
155
- end
156
-
157
-
158
-
159
- def text_to_audio(prompt)
160
- text_prompt = extract_text_prompt(prompt)
161
-
162
- output_file = "#{Time.now.to_i}.mp3"
163
-
164
- begin
165
- begin
166
- @client.speak(text_prompt, output_file, {
167
- model: AIA.config.speech_model,
168
- voice: AIA.config.voice
169
- })
170
- rescue ArgumentError
171
- @client.speak(text_prompt)
172
- end
173
-
174
- system("#{AIA.config.speak_command} #{output_file}") if File.exist?(output_file) && system("which #{AIA.config.speak_command} > /dev/null 2>&1")
175
-
176
- "Audio generated and saved to: #{output_file}"
177
- rescue => e
178
- "Error generating audio: #{e.message}"
179
- end
180
- end
181
-
182
-
183
-
184
- def audio_to_text(prompt)
185
- if prompt.is_a?(String) && File.exist?(prompt) &&
186
- prompt.downcase.end_with?('.mp3', '.wav', '.m4a', '.flac')
187
- begin
188
- @client.transcribe(prompt)
189
- rescue => e
190
- "Error transcribing audio: #{e.message}"
191
- end
192
- else
193
- # Fall back to regular chat if no valid audio file is found
194
- text_to_text(prompt)
195
- end
196
- end
197
-
198
-
199
-
200
- def extract_image_path(prompt)
201
- if prompt.is_a?(String)
202
- prompt.scan(/\b[\w\/\.\-]+\.(jpg|jpeg|png|gif|webp)\b/i).first&.first
203
- elsif prompt.is_a?(Hash)
204
- prompt[:image] || prompt[:image_path]
205
- else
206
- nil
207
- end
208
- end
209
- end
210
- end