aia 0.8.5 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/aia/session.rb CHANGED
@@ -22,6 +22,7 @@ module AIA
22
22
 
23
23
  def initialize(prompt_handler)
24
24
  @prompt_handler = prompt_handler
25
+ @chat_prompt_id = nil # Initialize to nil
25
26
 
26
27
  # Special handling for chat mode with context files but no prompt ID
27
28
  if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
@@ -35,8 +36,8 @@ module AIA
35
36
  @history_manager = HistoryManager.new(prompt: prompt_instance)
36
37
  end
37
38
 
38
- @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt) # Add this line
39
- @ui_presenter = UIPresenter.new
39
+ @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
40
+ @ui_presenter = UIPresenter.new
40
41
  @directive_processor = DirectiveProcessor.new
41
42
  @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
42
43
 
@@ -111,7 +112,6 @@ module AIA
111
112
  end
112
113
 
113
114
  prompt.save
114
-
115
115
  # Substitute variables and get final prompt text
116
116
  prompt_text = prompt.to_s
117
117
 
@@ -151,126 +151,157 @@ module AIA
151
151
  end
152
152
 
153
153
  # Starts the interactive chat session.
154
+ # NOTE: there could have been an initial prompt sent into this session
155
+ # via a prompt_id on the command line, piped in text, or context files.
154
156
  def start_chat(skip_context_files: false)
155
- # Consider if display_chat_header is needed if robot+separator already shown
156
- # For now, let's keep it, maybe add an indicator message
157
157
  puts "\nEntering interactive chat mode..."
158
158
  @ui_presenter.display_chat_header
159
159
 
160
- Reline::HISTORY.clear # Keep Reline history for user input editing, separate from chat context
161
-
162
- # Load context files if any and not skipping
163
- if !skip_context_files && AIA.config.context_files && !AIA.config.context_files.empty?
164
- context_content = AIA.config.context_files.map do |file|
165
- File.read(file) rescue "Error reading file: #{file}"
166
- end.join("\n\n")
167
-
168
- if !context_content.empty?
169
- # Add context files content to context
170
- @context_manager.add_to_context(role: 'user', content: context_content)
171
-
172
- # Process the context
173
- operation_type = @chat_processor.determine_operation_type(AIA.config.model)
174
- @ui_presenter.display_thinking_animation
175
- response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
176
-
177
- # Add AI response to context
178
- @context_manager.add_to_context(role: 'assistant', content: response)
160
+ # Generate chat prompt ID
161
+ now = Time.now
162
+ @chat_prompt_id = "chat_#{now.strftime('%Y%m%d_%H%M%S')}"
179
163
 
180
- # Output the response
181
- @chat_processor.output_response(response)
182
- @chat_processor.speak(response)
183
- @ui_presenter.display_separator
164
+ # Create the temporary prompt
165
+ begin
166
+ # Create the unique? prompt ID in the file storage system with its initial text
167
+ PromptManager::Prompt.create(
168
+ id: @chat_prompt_id,
169
+ text: "Today's date is #{now.strftime('%Y-%m-%d')} and the current time is #{now.strftime('%H:%M:%S')}"
170
+ )
171
+
172
+ # Capture self for the handlers
173
+ session_instance = self
174
+
175
+ # Set up cleanup handlers only after prompt is created
176
+ at_exit { session_instance.send(:cleanup_chat_prompt) }
177
+ Signal.trap('INT') {
178
+ session_instance.send(:cleanup_chat_prompt)
179
+ exit
180
+ }
181
+
182
+ # Access this chat session's prompt object in order to do the dynamic things
183
+ # in follow up prompts that can be done in the batch mode like shell substitution. etc.
184
+ @chat_prompt = PromptManager::Prompt.new(
185
+ id: @chat_prompt_id,
186
+ directives_processor: @directive_processor,
187
+ erb_flag: AIA.config.erb,
188
+ envar_flag: AIA.config.shell,
189
+ external_binding: binding,
190
+ )
191
+
192
+ Reline::HISTORY.clear
193
+
194
+ # Load context files if any and not skipping
195
+ if !skip_context_files && AIA.config.context_files && !AIA.config.context_files.empty?
196
+ context = AIA.config.context_files.map do |file|
197
+ File.read(file) rescue "Error reading file: #{file}"
198
+ end.join("\n\n")
199
+
200
+ if !context.empty?
201
+ # Add context files content to context
202
+ @context_manager.add_to_context(role: 'user', content: context)
203
+
204
+ # Process the context
205
+ operation_type = @chat_processor.determine_operation_type(AIA.config.model)
206
+ @ui_presenter.display_thinking_animation
207
+ response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
208
+
209
+ # Add AI response to context
210
+ @context_manager.add_to_context(role: 'assistant', content: response)
211
+
212
+ # Output the response
213
+ @chat_processor.output_response(response)
214
+ @chat_processor.speak(response)
215
+ @ui_presenter.display_separator
216
+ end
184
217
  end
185
- end
186
218
 
187
- # Check for piped input (STDIN not a TTY and has data)
188
- if !STDIN.tty?
189
- # Save the original STDIN
190
- orig_stdin = STDIN.dup
219
+ # Handle piped input
220
+ if !STDIN.tty?
221
+ original_stdin = STDIN.dup
222
+ piped_input = STDIN.read.strip
223
+ STDIN.reopen('/dev/tty')
191
224
 
192
- # Read the piped input
193
- piped_input = STDIN.read.strip
225
+ if !piped_input.empty?
226
+ @chat_prompt.text = piped_input
227
+ processed_input = @chat_prompt.to_s
194
228
 
195
- # Reopen STDIN to the terminal
196
- STDIN.reopen('/dev/tty')
229
+ @context_manager.add_to_context(role: 'user', content: processed_input)
197
230
 
198
- if !piped_input.empty?
199
- # Add piped input to context
200
- @context_manager.add_to_context(role: 'user', content: piped_input)
231
+ operation_type = @chat_processor.determine_operation_type(AIA.config.model)
232
+ @ui_presenter.display_thinking_animation
233
+ response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
201
234
 
202
- # Process the piped input
203
- operation_type = @chat_processor.determine_operation_type(AIA.config.model)
204
- @ui_presenter.display_thinking_animation
205
- response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
206
-
207
- # Add AI response to context
208
- @context_manager.add_to_context(role: 'assistant', content: response)
235
+ @context_manager.add_to_context(role: 'assistant', content: response)
236
+ @chat_processor.output_response(response)
237
+ @chat_processor.speak(response) if AIA.speak?
238
+ @ui_presenter.display_separator
239
+ end
209
240
 
210
- # Output the response
211
- @chat_processor.output_response(response)
212
- @chat_processor.speak(response)
213
- @ui_presenter.display_separator
241
+ STDIN.reopen(original_stdin)
214
242
  end
215
- end
216
-
217
- loop do
218
- # Get user input
219
- prompt = @ui_presenter.ask_question
220
243
 
244
+ # Main chat loop
245
+ loop do
246
+ follow_up_prompt = @ui_presenter.ask_question
221
247
 
248
+ break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == 'exit' || follow_up_prompt.strip.empty?
222
249
 
223
- break if prompt.nil? || prompt.strip.downcase == 'exit' || prompt.strip.empty?
224
-
225
- if AIA.config.out_file
226
- File.open(AIA.config.out_file, 'a') do |file|
227
- file.puts "\nYou: #{prompt}"
250
+ if AIA.config.out_file
251
+ File.open(AIA.config.out_file, 'a') do |file|
252
+ file.puts "\nYou: #{follow_up_prompt}"
253
+ end
228
254
  end
229
- end
230
255
 
231
- if @directive_processor.directive?(prompt)
232
- directive_output = @directive_processor.process(prompt, @context_manager) # Pass context_manager
233
-
234
- # Add check for specific directives like //clear that might modify context
235
- if prompt.strip.start_with?('//clear')
236
- # Context is likely cleared within directive_processor.process now
237
- # or add @context_manager.clear_context here if not handled internally
238
- @ui_presenter.display_info("Chat context cleared.")
239
- next # Skip API call after clearing
240
- elsif directive_output.nil? || directive_output.strip.empty?
241
- next # Skip API call if directive produced no output and wasn't //clear
242
- else
243
- puts "\n#{directive_output}\n"
244
- # Optionally add directive output to context or handle as needed
245
- # Example: Add a summary to context
246
- # @context_manager.add_to_context(role: 'assistant', content: "Directive executed. Output:\n#{directive_output}")
247
- # For now, just use a placeholder prompt modification:
248
- prompt = "I executed this directive: #{prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
249
- # Fall through to add this modified prompt to context and send to AI
256
+ if @directive_processor.directive?(follow_up_prompt)
257
+ directive_output = @directive_processor.process(follow_up_prompt, @context_manager)
258
+
259
+ if follow_up_prompt.strip.start_with?('//clear')
260
+ @ui_presenter.display_info("Chat context cleared.")
261
+ next
262
+ elsif directive_output.nil? || directive_output.strip.empty?
263
+ next
264
+ else
265
+ puts "\n#{directive_output}\n"
266
+ follow_up_prompt = "I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
267
+ end
250
268
  end
251
- end
252
-
253
- # Use ContextManager instead of HistoryManager
254
- @context_manager.add_to_context(role: 'user', content: prompt)
255
269
 
256
- # Use ContextManager to get the conversation
257
- conversation = @context_manager.get_context # System prompt handled internally
270
+ @chat_prompt.text = follow_up_prompt
271
+ processed_prompt = @chat_prompt.to_s
258
272
 
259
- operation_type = @chat_processor.determine_operation_type(AIA.config.model)
260
- @ui_presenter.display_thinking_animation
261
- response = @chat_processor.process_prompt(conversation, operation_type)
273
+ @context_manager.add_to_context(role: 'user', content: processed_prompt)
274
+ conversation = @context_manager.get_context
262
275
 
263
- @ui_presenter.display_ai_response(response)
276
+ operation_type = @chat_processor.determine_operation_type(AIA.config.model)
277
+ @ui_presenter.display_thinking_animation
278
+ response = @chat_processor.process_prompt(conversation, operation_type)
264
279
 
265
- # Use ContextManager instead of HistoryManager
266
- @context_manager.add_to_context(role: 'assistant', content: response)
280
+ @ui_presenter.display_ai_response(response)
281
+ @context_manager.add_to_context(role: 'assistant', content: response)
282
+ @chat_processor.speak(response)
267
283
 
268
- @chat_processor.speak(response)
284
+ @ui_presenter.display_separator
285
+ end
269
286
 
270
- @ui_presenter.display_separator
287
+ ensure
288
+ @ui_presenter.display_chat_end
271
289
  end
290
+ end
272
291
 
273
- @ui_presenter.display_chat_end
292
+ private
293
+
294
+ def cleanup_chat_prompt
295
+ if @chat_prompt_id
296
+ puts "[DEBUG] Cleaning up chat prompt: #{@chat_prompt_id}" if AIA.debug?
297
+ begin
298
+ @chat_prompt.delete
299
+ @chat_prompt_id = nil # Prevent repeated attempts if error occurs elsewhere
300
+ rescue => e
301
+ STDERR.puts "[ERROR] Failed to delete chat prompt #{@chat_prompt_id}: #{e.class} - #{e.message}"
302
+ STDERR.puts e.backtrace.join("\n")
303
+ end
304
+ end
274
305
  end
275
306
  end
276
307
  end
@@ -38,10 +38,19 @@ module AIA
38
38
  def format_chat_response(response, output = $stdout)
39
39
  indent = ' '
40
40
 
41
+ # Convert RubyLLM::Message to string if necessary
42
+ response_text = if response.is_a?(RubyLLM::Message)
43
+ response.content.to_s
44
+ elsif response.respond_to?(:to_s)
45
+ response.to_s
46
+ else
47
+ response
48
+ end
49
+
41
50
  in_code_block = false
42
51
  language = ''
43
52
 
44
- response.each_line do |line|
53
+ response_text.each_line do |line|
45
54
  line = line.chomp
46
55
 
47
56
  # Check for code block delimiters
data/lib/aia/utility.rb CHANGED
@@ -11,7 +11,7 @@ module AIA
11
11
  (\\____/) AI Assistant
12
12
  (_oo_) #{AIA.config.model}
13
13
  (O) is Online
14
- __||__ \\)
14
+ __||__ \\) using #{AIA.config.adapter}
15
15
  [/______\\] /
16
16
  / \\__AI__/ \\/
17
17
  / /__\\
data/lib/aia.rb CHANGED
@@ -4,8 +4,12 @@
4
4
  # The AIA module serves as the namespace for the AIA application, which
5
5
  # provides an interface for interacting with AI models and managing prompts.
6
6
 
7
- require 'ai_client'
7
+ require 'ruby_llm'
8
+ require_relative 'extensions/ruby_llm/chat'
9
+
8
10
  require 'prompt_manager'
11
+ require 'mcp_client'
12
+
9
13
  require 'debug_me'
10
14
  include DebugMe
11
15
  $DEBUG_ME = false
@@ -17,7 +21,7 @@ require_relative 'aia/version'
17
21
  require_relative 'aia/config'
18
22
  require_relative 'aia/shell_command_executor'
19
23
  require_relative 'aia/prompt_handler'
20
- require_relative 'aia/ai_client_adapter'
24
+ require_relative 'aia/ruby_llm_adapter'
21
25
  require_relative 'aia/directive_processor'
22
26
  require_relative 'aia/history_manager'
23
27
  require_relative 'aia/ui_presenter'
@@ -76,7 +80,14 @@ module AIA
76
80
  end
77
81
 
78
82
  prompt_handler = PromptHandler.new
79
- @config.client = AIClientAdapter.new
83
+
84
+ # Initialize the appropriate client adapter based on configuration
85
+ @config.client = if @config.adapter == 'ruby_llm'
86
+ RubyLLMAdapter.new
87
+ else
88
+ AIClientAdapter.new
89
+ end
90
+
80
91
  session = Session.new(prompt_handler)
81
92
 
82
93
  session.start
@@ -0,0 +1,197 @@
1
+ # lib/extensions/ruby_llm/chat.rb
2
+
3
+ module RubyLLM
4
+ class Chat
5
+ class << self
6
+ # Sets up Model Control Protocol (MCP) tools
7
+ #
8
+ # @param client [instance object] MCP client instance to use
9
+ # @param call_tool_method [Symbol] Method name to use for tool execution
10
+ # @param tools [Array<Hash>] Array of MCP tool definitions
11
+ #
12
+ # @return [self] Returns self for method chaining
13
+ #
14
+ def with_mcp(client:, call_tool_method:, tools:)
15
+ # Validate all required parameters are present
16
+ if client.nil?
17
+ RubyLLM.logger.error "MCP setup failed: client must be provided"
18
+ return clear_mcp_state
19
+ end
20
+
21
+ if call_tool_method.nil?
22
+ RubyLLM.logger.error "MCP setup failed: call_tool_method must be provided"
23
+ return clear_mcp_state
24
+ end
25
+
26
+ if tools.nil?
27
+ RubyLLM.logger.error "MCP setup failed: tools must be provided"
28
+ return clear_mcp_state
29
+ end
30
+
31
+ # Validate call_tool_method type
32
+ unless call_tool_method.is_a?(Symbol) || call_tool_method.is_a?(String)
33
+ RubyLLM.logger.error "MCP setup failed: call_tool_method must be a Symbol or String, got #{call_tool_method.class}"
34
+ return clear_mcp_state
35
+ end
36
+
37
+ # Validate client responds to the method
38
+ unless client.respond_to?(call_tool_method)
39
+ RubyLLM.logger.error "MCP setup failed: client instance does not respond to call_tool_method #{call_tool_method}"
40
+ return clear_mcp_state
41
+ end
42
+
43
+ # Set MCP configuration
44
+ @mcp_client = client
45
+ @mcp_call_tool = call_tool_method.to_sym
46
+ @mcp_tools = tools
47
+
48
+ self
49
+ end
50
+
51
+ # Get the MCP client instance if configured
52
+ # @return [MCPClient::Client, nil] The MCP client instance or nil if not configured
53
+ def mcp_client
54
+ @mcp_client
55
+ end
56
+
57
+ # Get the method name to use for tool execution if configured
58
+ # @return [Symbol, nil] The method name or nil if not configured
59
+ def mcp_call_tool
60
+ @mcp_call_tool
61
+ end
62
+
63
+ # Get the MCP tool definitions if configured
64
+ # @return [Array<Hash>] The MCP tool definitions or empty array if not configured
65
+ def mcp_tools
66
+ @mcp_tools || []
67
+ end
68
+
69
+ private
70
+
71
+ # Clear all MCP state and return self
72
+ # @return [self]
73
+ def clear_mcp_state
74
+ @mcp_client = nil
75
+ @mcp_call_tool = nil
76
+ @mcp_tools = []
77
+ self
78
+ end
79
+ end
80
+
81
+ # Prepend a module to add MCP tool support
82
+ module MCPSupport
83
+ def initialize(...)
84
+ super
85
+ add_mcp_tools
86
+ end
87
+
88
+ private
89
+
90
+ def add_mcp_tools
91
+ self.class.mcp_tools.each do |tool_def|
92
+ debug_me{[ :tool_def ]}
93
+ tool_name = tool_def.dig(:function, :name).to_sym
94
+ next if @tools.key?(tool_name) # Skip if local or MCP tool exists with same name
95
+
96
+ @tools[tool_name] = MCPToolWrapper.new(tool_def)
97
+ end
98
+ end
99
+ end
100
+
101
+ # Add MCP support to the Chat class
102
+ prepend MCPSupport
103
+ end
104
+
105
+ # Wraps an MCP tool definition to match the RubyLLM::Tool interface
106
+ class MCPToolWrapper
107
+ def initialize(mcp_tool)
108
+ @mcp_tool = mcp_tool
109
+ end
110
+
111
+ def name
112
+ @mcp_tool.dig(:function, :name)
113
+ end
114
+
115
+ def description
116
+ @mcp_tool.dig(:function, :description)
117
+ end
118
+
119
+ # Simple parameter class that implements the interface expected by RubyLLM::Providers::OpenAI::Tools#param_schema
120
+ class Parameter
121
+ attr_reader :type, :description, :required
122
+
123
+ def initialize(type, description, required)
124
+ @type = type || 'string'
125
+ @description = description
126
+ @required = required
127
+ end
128
+ end
129
+
130
+ def parameters
131
+ @parameters ||= begin
132
+ props = @mcp_tool.dig(:function, :parameters, "properties") || {}
133
+ required_params = @mcp_tool.dig(:function, :parameters, "required") || []
134
+
135
+ # Create Parameter objects with the expected interface
136
+ # The parameter name is the key in the properties hash
137
+ result = {}
138
+ props.each do |param_name, param_def|
139
+ result[param_name.to_sym] = Parameter.new(
140
+ param_def["type"],
141
+ param_def["description"],
142
+ required_params.include?(param_name)
143
+ )
144
+ end
145
+ result
146
+ end
147
+ end
148
+
149
+ def call(args)
150
+ # Log the tool call with arguments
151
+ RubyLLM.logger.debug "Tool #{name} called with: #{args.inspect}"
152
+
153
+ # Verify MCP client is configured properly
154
+ unless Chat.mcp_client && Chat.mcp_call_tool
155
+ error = { error: "MCP client not properly configured" }
156
+ RubyLLM.logger.error error[:error]
157
+ return error
158
+ end
159
+
160
+ # Handle tool calls that require non-string parameters
161
+ normalized_args = {}
162
+ args.each do |key, value|
163
+ # Convert string numbers to actual numbers when needed
164
+ if value.is_a?(String) && value.match?(/\A-?\d+(\.\d+)?\z/)
165
+ param_type = @mcp_tool.dig(:function, :parameters, "properties", key.to_s, "type")
166
+ if param_type == "number" || param_type == "integer"
167
+ normalized_args[key] = value.include?('.') ? value.to_f : value.to_i
168
+ next
169
+ end
170
+ end
171
+ normalized_args[key] = value
172
+ end
173
+
174
+ # Execute the tool via the MCP client with a timeout
175
+ timeout = 10 # seconds
176
+ result = nil
177
+
178
+ begin
179
+ Timeout.timeout(timeout) do
180
+ result = Chat.mcp_client.send(Chat.mcp_call_tool, name, normalized_args)
181
+ end
182
+ rescue Timeout::Error
183
+ error = { error: "MCP tool execution timed out after #{timeout} seconds" }
184
+ RubyLLM.logger.error error[:error]
185
+ return error
186
+ rescue StandardError => e
187
+ error = { error: "MCP tool execution failed: #{e.message}" }
188
+ RubyLLM.logger.error error[:error]
189
+ return error
190
+ end
191
+
192
+ # Log the result
193
+ RubyLLM.logger.debug "Tool #{name} returned: #{result.inspect}"
194
+ result
195
+ end
196
+ end
197
+ end
@@ -0,0 +1,90 @@
1
+ # MCP Servers
2
+
3
+ The Model Context Protocol (MCP) enables tools from different server implementations to be defined in a common consistent way, allowing LLMs that support callback functions (aka tools) to access data that enhances the context of a prompt.
4
+
5
+ You can find additional MCP servers at https://mcpindex.net
6
+
7
+ ## Overview
8
+
9
+ This directory contains configuration files for various MCP servers that provide different capabilities to LLMs:
10
+
11
+ | Server | Purpose | Configuration File |
12
+ |--------|---------|--------------------|
13
+ | Filesystem | Provides access to the local filesystem | `filesystem.json` |
14
+ | iMCP | macOS-specific MCP server for system integration | `imcp.json` |
15
+ | Playwright | Enables web automation and scraping capabilities | `playwright_server_definition.json` |
16
+
17
+ ## Configuration Details
18
+
19
+ ### Filesystem Server
20
+
21
+ The filesystem server allows LLMs to read and interact with the local filesystem:
22
+
23
+ TODO: fix this JSON file to use a generic directory; maybe $HOME
24
+
25
+ ```json
26
+ {
27
+ "type": "stdio",
28
+ "command": [
29
+ "npx",
30
+ "-y",
31
+ "@modelcontextprotocol/server-filesystem",
32
+ "/Users/dewayne/sandbox/git_repos/madbomber/aia/develop"
33
+ ]
34
+ }
35
+ ```
36
+
37
+ The server is configured to access files within the project's development directory.
38
+
39
+ ### iMCP Server
40
+
41
+ See: https://github.com/loopwork-ai/iMCP
42
+
43
+ The iMCP server provides macOS-specific functionality:
44
+
45
+ ```json
46
+ {
47
+ "mcpServers" : {
48
+ "iMCP" : {
49
+ "command" : "/Applications/iMCP.app/Contents/MacOS/imcp-server"
50
+ }
51
+ }
52
+ }
53
+ ```
54
+
55
+ ### Playwright Server
56
+
57
+ The Playwright server enables web automation and browser interaction capabilities:
58
+
59
+ ```json
60
+ {
61
+ "mcpServers": {
62
+ "playwright": {
63
+ "url": "http://localhost:8931/sse",
64
+ "headers": {},
65
+ "comment": "Local Playwright MCP Server running on port 8931"
66
+ }
67
+ }
68
+ }
69
+ ```
70
+
71
+ ## Usage
72
+
73
+ These MCP servers can be utilized by LLM applications to extend their capabilities beyond text generation. The configuration files in this directory are used to define how to connect to each server and what capabilities they provide.
74
+
75
+ ## Getting Started
76
+
77
+ Use the --mcp option with aia to specify which MCP servers to use. You may use the --mcp option multiple times to select more than one server. For example:
78
+
79
+ ```bash
80
+ aia prompt_id --mcp filesystem.json --mcp imcp.json --mcp playwright_server_definition.json
81
+ # or
82
+ aia --chat --mcp filesystem.json --mcp imcp.json --mcp playwright_server_definition.json
83
+ # or
84
+ aia prompt_id --chat --mcp filesystem.json --mcp imcp.json --mcp playwright_server_definition.json
85
+ ```
86
+
87
+ ## Additional Resources
88
+
89
+ - [Model Context Protocol Documentation](https://github.com/anthropics/anthropic-cookbook/tree/main/model_context_protocol)
90
+ - [MCP Server Implementation Guidelines](https://modelcontextprotocol.github.io/)
@@ -0,0 +1,9 @@
1
+ {
2
+ "type": "stdio",
3
+ "command": [
4
+ "npx",
5
+ "-y",
6
+ "@modelcontextprotocol/server-filesystem",
7
+ "/Users/dewayne/sandbox/git_repos/madbomber/aia/develop"
8
+ ]
9
+ }
@@ -0,0 +1,7 @@
1
+ {
2
+ "mcpServers" : {
3
+ "iMCP" : {
4
+ "command" : "/Applications/iMCP.app/Contents/MacOS/imcp-server"
5
+ }
6
+ }
7
+ }
@@ -0,0 +1,11 @@
1
+ {
2
+ "mcpServers": {
3
+ "launcher": {
4
+ "command": "npx",
5
+ "args": [
6
+ "y",
7
+ "@joshuarileydev/mac-apps-launcher-mcp-server"
8
+ ]
9
+ }
10
+ }
11
+ }
@@ -0,0 +1,9 @@
1
+ {
2
+ "mcpServers": {
3
+ "playwright": {
4
+ "url": "http://localhost:8931/sse",
5
+ "headers": {},
6
+ "comment": "Local Playwright MCP Server running on port 8931"
7
+ }
8
+ }
9
+ }