aia 0.9.1 → 0.9.3rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -57,7 +57,7 @@ module AIA
57
57
  RubyLLM.chat.clear_history
58
58
  end
59
59
  rescue => e
60
- AIA.debug_me(tag: '== context_manager clear_context error =='){[ :e, e.message, e.backtrace ]}
60
+ STDERR.puts "ERROR: context_manager clear_context error #{e.message}"
61
61
  end
62
62
  end
63
63
 
@@ -57,7 +57,7 @@ module AIA
57
57
  else
58
58
  a_string.to_s
59
59
  end
60
-
60
+
61
61
  content.strip.start_with?(PromptManager::Prompt::DIRECTIVE_SIGNAL)
62
62
  end
63
63
 
@@ -162,6 +162,7 @@ module AIA
162
162
  end
163
163
  ''
164
164
  end
165
+ alias_method :workflow, :pipeline
165
166
 
166
167
  desc "Inserts the contents of a file Example: //include path/to/file"
167
168
  def include(args, context_manager=nil)
@@ -240,6 +241,10 @@ module AIA
240
241
 
241
242
  desc "Clears the conversation history (aka context) same as //config clear = true"
242
243
  def clear(args, context_manager=nil)
244
+ # TODO: review the robot's code in the Session class for when the
245
+ # //clear directive is used in a follow up prompt. That processing
246
+ # should be moved here so that it is also available in batch
247
+ # sessions.
243
248
  if context_manager.nil?
244
249
  return "Error: Context manager not available for //clear directive."
245
250
  end
@@ -1,55 +1,136 @@
1
1
  # lib/aia/ruby_llm_adapter.rb
2
2
 
3
3
  require 'ruby_llm'
4
- require 'mcp_client'
4
+
5
+ class RubyLLM::Modalities
6
+ def supports?(query_mode)
7
+ parts = query_mode
8
+ .to_s
9
+ .downcase
10
+ .split(/2|-to-| to |_to_/)
11
+ .map(&:strip)
12
+
13
+ if 2 == parts.size
14
+ input.include?(parts[0]) && output.include?(parts[1])
15
+ elsif 1 == parts.size
16
+ input.include?(parts[0]) || output.include?(parts[0])
17
+ else
18
+ false
19
+ end
20
+ end
21
+ end
5
22
 
6
23
  module AIA
7
24
  class RubyLLMAdapter
8
- def initialize
25
+ attr_reader :tools
9
26
 
10
- debug_me('=== RubyLLMAdapter ===')
27
+ def initialize
28
+ @provider, @model = extract_model_parts.values
11
29
 
12
- @model = AIA.config.model
13
- model_info = extract_model_parts(@model)
30
+ configure_rubyllm
31
+ refresh_local_model_registry
32
+ setup_chat_with_tools
33
+ end
14
34
 
15
- # Configure RubyLLM with available API keys
35
+ def configure_rubyllm
36
+ # TODO: Add some of these configuration items to AIA.config
16
37
  RubyLLM.configure do |config|
17
- config.openai_api_key = ENV.fetch('OPENAI_API_KEY', nil)
18
- config.anthropic_api_key = ENV.fetch('ANTHROPIC_API_KEY', nil)
19
- config.gemini_api_key = ENV.fetch('GEMINI_API_KEY', nil)
20
- config.deepseek_api_key = ENV.fetch('DEEPSEEK_API_KEY', nil)
21
-
22
- # Bedrock configuration
23
- config.bedrock_api_key = ENV.fetch('AWS_ACCESS_KEY_ID', nil)
24
- config.bedrock_secret_key = ENV.fetch('AWS_SECRET_ACCESS_KEY', nil)
25
- config.bedrock_region = ENV.fetch('AWS_REGION', nil)
26
- config.bedrock_session_token = ENV.fetch('AWS_SESSION_TOKEN', nil)
38
+ config.openai_api_key = ENV.fetch('OPENAI_API_KEY', nil)
39
+ config.openai_organization_id = ENV.fetch('OPENAI_ORGANIZATION_ID', nil)
40
+ config.openai_project_id = ENV.fetch('OPENAI_PROJECT_ID', nil)
41
+
42
+ config.anthropic_api_key = ENV.fetch('ANTHROPIC_API_KEY', nil)
43
+ config.gemini_api_key = ENV.fetch('GEMINI_API_KEY', nil)
44
+ config.deepseek_api_key = ENV.fetch('DEEPSEEK_API_KEY', nil)
45
+ config.openrouter_api_key = ENV.fetch('OPENROUTER_API_KEY', nil)
46
+
47
+ config.bedrock_api_key = ENV.fetch('BEDROCK_ACCESS_KEY_ID', nil)
48
+ config.bedrock_secret_key = ENV.fetch('BEDROCK_SECRET_ACCESS_KEY', nil)
49
+ config.bedrock_region = ENV.fetch('BEDROCK_REGION', nil)
50
+ config.bedrock_session_token = ENV.fetch('BEDROCK_SESSION_TOKEN', nil)
51
+
52
+ config.ollama_api_base = ENV.fetch('OLLAMA_API_BASE', nil)
53
+
54
+ # --- Custom OpenAI Endpoint ---
55
+ # Use this for Azure OpenAI, proxies, or self-hosted models via OpenAI-compatible APIs.
56
+ config.openai_api_base = ENV.fetch('OPENAI_API_BASE', nil) # e.g., "https://your-azure.openai.azure.com"
57
+
58
+ # --- Default Models ---
59
+ # Used by RubyLLM.chat, RubyLLM.embed, RubyLLM.paint if no model is specified.
60
+ # config.default_model = 'gpt-4.1-nano' # Default: 'gpt-4.1-nano'
61
+ # config.default_embedding_model = 'text-embedding-3-small' # Default: 'text-embedding-3-small'
62
+ # config.default_image_model = 'dall-e-3' # Default: 'dall-e-3'
63
+
64
+ # --- Connection Settings ---
65
+ # config.request_timeout = 120 # Request timeout in seconds (default: 120)
66
+ # config.max_retries = 3 # Max retries on transient network errors (default: 3)
67
+ # config.retry_interval = 0.1 # Initial delay in seconds (default: 0.1)
68
+ # config.retry_backoff_factor = 2 # Multiplier for subsequent retries (default: 2)
69
+ # config.retry_interval_randomness = 0.5 # Jitter factor (default: 0.5)
70
+
71
+ # --- Logging Settings ---
72
+ # config.log_file = '/logs/ruby_llm.log'
73
+ config.log_level = :fatal # debug level can also be set to debug by setting RUBYLLM_DEBUG envar to true
27
74
  end
75
+ end
28
76
 
29
- debug_me{[ :model_info ]}
77
+ def refresh_local_model_registry
78
+ if AIA.config.refresh.nil? ||
79
+ Integer(AIA.config.refresh).zero? ||
80
+ Date.today > (AIA.config.last_refresh + Integer(AIA.config.refresh))
81
+ RubyLLM.models.refresh!
82
+ AIA.config.last_refresh = Date.today
83
+ if AIA.config.config_file
84
+ AIA::Config.dump_config(AIA.config, AIA.config.config_file)
85
+ end
86
+ end
87
+ end
30
88
 
31
- mcp_client, mcp_tools = generate_mcp_tools(model_info[:provider])
89
+ def setup_chat_with_tools
90
+ begin
91
+ @chat = RubyLLM.chat(model: @model)
92
+ rescue => e
93
+ STDERR.puts "ERROR: #{e.message}"
94
+ exit 1
95
+ end
32
96
 
33
- debug_me{[ :mcp_tools ]}
97
+ if !AIA.config.tool_paths.empty? && !@chat.model.supports?(:function_calling)
98
+ STDERR.puts "ERROR: The model #{@model} does not support tools"
99
+ exit 1
100
+ end
34
101
 
35
- if mcp_tools && !mcp_tools.empty?
36
- RubyLLM::Chat.with_mcp(client: mcp_client, call_tool_method: :call_tool, tools: mcp_tools)
102
+ @tools = ObjectSpace.each_object(Class).select do |klass|
103
+ klass < RubyLLM::Tool
37
104
  end
38
105
 
39
- @chat = RubyLLM.chat(model: model_info[:model])
106
+ unless tools.empty?
107
+ @chat.with_tools(*tools)
108
+ AIA.config.tools = tools.map(&:name).join(', ')
109
+ end
40
110
  end
41
111
 
112
+ # TODO: Need to rethink this dispatcher pattern w/r/t RubyLLM's capabilities
113
+ # This code was originally designed for AiClient
114
+ #
42
115
  def chat(prompt)
43
- if @model.downcase.include?('dall-e') || @model.downcase.include?('image-generation')
44
- text_to_image(prompt)
45
- elsif @model.downcase.include?('vision') || @model.downcase.include?('image')
116
+ modes = @chat.model.modalities
117
+
118
+ # TODO: Need to consider how to handle multi-mode models
119
+ if modes.supports? :text_to_text
120
+ text_to_text(prompt)
121
+
122
+ elsif modes.supports? :image_to_text
46
123
  image_to_text(prompt)
47
- elsif @model.downcase.include?('tts') || @model.downcase.include?('speech')
124
+ elsif modes.supports? :text_to_image
125
+ text_to_image(prompt)
126
+
127
+ elsif modes.supports? :text_to_audio
48
128
  text_to_audio(prompt)
49
- elsif @model.downcase.include?('whisper') || @model.downcase.include?('transcription')
129
+ elsif modes.supports? :audio_to_text
50
130
  audio_to_text(prompt)
131
+
51
132
  else
52
- text_to_text(prompt)
133
+ # TODO: what else can be done?
53
134
  end
54
135
  end
55
136
 
@@ -73,74 +154,56 @@ module AIA
73
154
  end
74
155
  end
75
156
 
76
- def method_missing(method, *args, &block)
77
- debug_me(tag: '== missing ==', levels: 25){[ :method, :args ]}
78
- if @chat.respond_to?(method)
79
- @chat.public_send(method, *args, &block)
80
- else
81
- super
82
- end
83
- end
84
-
85
157
  # Clear the chat context/history
86
158
  # Needed for the //clear directive
87
159
  def clear_context
88
- AIA.debug_me(tag: '== AGGRESSIVELY clearing LLM context ==') do
160
+ begin
161
+ # Option 1: Directly clear the messages array in the current chat object
162
+ if @chat.instance_variable_defined?(:@messages)
163
+ old_messages = @chat.instance_variable_get(:@messages)
164
+ # Force a completely empty array, not just attempting to clear it
165
+ @chat.instance_variable_set(:@messages, [])
166
+ end
167
+
168
+ # Option 2: Force RubyLLM to create a new chat instance at the global level
169
+ # This ensures any shared state is reset
170
+ @provider, @model = extract_model_parts.values
171
+ RubyLLM.instance_variable_set(:@chat, nil) if RubyLLM.instance_variable_defined?(:@chat)
172
+
173
+ # Option 3: Create a completely fresh chat instance for this adapter
174
+ @chat = nil # First nil it to help garbage collection
175
+
89
176
  begin
90
- # Option 1: Directly clear the messages array in the current chat object
91
- if @chat.instance_variable_defined?(:@messages)
92
- AIA.debug_me("Directly clearing @messages array")
93
- old_messages = @chat.instance_variable_get(:@messages)
94
- AIA.debug_me{[:old_messages, old_messages.length]}
95
- # Force a completely empty array, not just attempting to clear it
96
- @chat.instance_variable_set(:@messages, [])
97
- end
98
-
99
- # Option 2: Force RubyLLM to create a new chat instance at the global level
100
- # This ensures any shared state is reset
101
- AIA.debug_me("Force global RubyLLM chat reset")
102
- model_info = extract_model_parts(@model)
103
- RubyLLM.instance_variable_set(:@chat, nil) if RubyLLM.instance_variable_defined?(:@chat)
104
-
105
- # Option 3: Create a completely fresh chat instance for this adapter
106
- @chat = nil # First nil it to help garbage collection
107
- @chat = RubyLLM.chat(model: model_info[:model])
108
- AIA.debug_me("Created fresh RubyLLM::Chat instance")
109
-
110
- # Option 4: Call official clear_history method if it exists
111
- if @chat.respond_to?(:clear_history)
112
- AIA.debug_me("Calling clear_history method")
113
- @chat.clear_history
114
- end
115
-
116
- # Option 5: If chat has messages, force set it to empty again as a final check
117
- if @chat.instance_variable_defined?(:@messages) && !@chat.instance_variable_get(:@messages).empty?
118
- AIA.debug_me("FINAL CHECK: @messages still not empty, forcing empty")
119
- @chat.instance_variable_set(:@messages, [])
120
- end
121
-
122
- # Reset any MCP tools configuration
123
- begin
124
- mcp_client, mcp_tools = generate_mcp_tools(model_info[:provider])
125
- if mcp_tools && !mcp_tools.empty?
126
- AIA.debug_me("Reconfiguring MCP tools")
127
- RubyLLM::Chat.with_mcp(client: mcp_client, call_tool_method: :call_tool, tools: mcp_tools)
128
- end
129
- rescue => mcp_error
130
- AIA.debug_me{[:mcp_error, mcp_error.message]}
131
- end
132
-
133
- # Final verification
134
- new_messages = @chat.instance_variable_defined?(:@messages) ? @chat.instance_variable_get(:@messages) : []
135
- AIA.debug_me{[:new_messages, new_messages.length]}
136
-
137
- return "Chat context successfully cleared."
177
+ @chat = RubyLLM.chat(model: @model)
138
178
  rescue => e
139
- AIA.debug_me{
140
- [ :e, e.message, e.backtrace ]
141
- }
142
- return "Error clearing chat context: #{e.message}"
179
+ STDERR.puts "ERROR: #{e.message}"
180
+ exit 1
181
+ end
182
+
183
+ # Option 4: Call official clear_history method if it exists
184
+ if @chat.respond_to?(:clear_history)
185
+ @chat.clear_history
143
186
  end
187
+
188
+ # Option 5: If chat has messages, force set it to empty again as a final check
189
+ if @chat.instance_variable_defined?(:@messages) && !@chat.instance_variable_get(:@messages).empty?
190
+ @chat.instance_variable_set(:@messages, [])
191
+ end
192
+
193
+ # Final verification
194
+ new_messages = @chat.instance_variable_defined?(:@messages) ? @chat.instance_variable_get(:@messages) : []
195
+
196
+ return "Chat context successfully cleared."
197
+ rescue => e
198
+ return "Error clearing chat context: #{e.message}"
199
+ end
200
+ end
201
+
202
+ def method_missing(method, *args, &block)
203
+ if @chat.respond_to?(method)
204
+ @chat.public_send(method, *args, &block)
205
+ else
206
+ super
144
207
  end
145
208
  end
146
209
 
@@ -150,49 +213,19 @@ module AIA
150
213
 
151
214
  private
152
215
 
153
- # Generate an array of MCP tools, filtered and formatted for the correct provider.
154
- # @param config [OpenStruct] the config object containing mcp_servers, allowed_tools, and model
155
- # @return [Array<Hash>, nil] the filtered and formatted MCP tools or nil if no tools
156
- def generate_mcp_tools(provider)
157
- return [nil, nil] unless AIA.config.mcp_servers && !AIA.config.mcp_servers.empty?
158
-
159
- debug_me('=== generate_mcp_tools ===')
160
-
161
- # AIA.config.mcp_servers is now a path to the combined JSON file
162
- mcp_client = MCPClient.create_client(server_definition_file: AIA.config.mcp_servers)
163
- debug_me
164
- all_tools = mcp_client.list_tools(cache: false).map(&:name)
165
- debug_me
166
- allowed = AIA.config.allowed_tools
167
- debug_me
168
- filtered_tools = allowed.nil? ? all_tools : all_tools & allowed
169
- debug_me{[ :filtered_tools ]}
170
-
171
- debug_me{[ :provider ]}
172
-
173
- mcp_tools = if :anthropic == provider.to_sym
174
- debug_me
175
- mcp_client.to_anthropic_tools(tool_names: filtered_tools)
176
- else
177
- debug_me
178
- mcp_client.to_openai_tools(tool_names: filtered_tools)
179
- end
180
- [mcp_client, mcp_tools]
181
- rescue => e
182
- STDERR.puts "ERROR: Failed to generate MCP tools: #{e.message}"
183
- nil
184
- end
185
-
186
- def extract_model_parts(model_string)
187
- parts = model_string.split('/')
216
+ def extract_model_parts
217
+ parts = AIA.config.model.split('/')
188
218
  parts.map!(&:strip)
189
219
 
190
- if parts.length > 1
191
- provider = parts[0]
192
- model = parts[1]
220
+ if 2 == parts.length
221
+ provider = parts[0]
222
+ model = parts[1]
223
+ elsif 1 == parts.length
224
+ provider = nil # RubyLLM will figure it out from the model name
225
+ model = parts[0]
193
226
  else
194
- provider = nil # RubyLLM will figure it out from the model name
195
- model = parts[0]
227
+ STDERR.puts "ERROR: malformed model name: #{AIA.config.model}"
228
+ exit 1
196
229
  end
197
230
 
198
231
  { provider: provider, model: model }
@@ -212,7 +245,7 @@ module AIA
212
245
 
213
246
  def text_to_text(prompt)
214
247
  text_prompt = extract_text_prompt(prompt)
215
- @chat.ask(text_prompt)
248
+ @chat.ask(text_prompt).content
216
249
  end
217
250
 
218
251
  def text_to_image(prompt)
@@ -221,9 +254,9 @@ module AIA
221
254
 
222
255
  begin
223
256
  RubyLLM.paint(text_prompt, output_path: output_file,
224
- size: AIA.config.image_size,
225
- quality: AIA.config.image_quality,
226
- style: AIA.config.image_style)
257
+ size: AIA.config.image_size,
258
+ quality: AIA.config.image_quality,
259
+ style: AIA.config.image_style)
227
260
  "Image generated and saved to: #{output_file}"
228
261
  rescue => e
229
262
  "Error generating image: #{e.message}"
@@ -231,12 +264,12 @@ module AIA
231
264
  end
232
265
 
233
266
  def image_to_text(prompt)
234
- image_path = extract_image_path(prompt)
267
+ image_path = extract_image_path(prompt)
235
268
  text_prompt = extract_text_prompt(prompt)
236
269
 
237
270
  if image_path && File.exist?(image_path)
238
271
  begin
239
- @chat.ask(text_prompt, with: { image: image_path })
272
+ @chat.ask(text_prompt, with: { image: image_path }).content
240
273
  rescue => e
241
274
  "Error analyzing image: #{e.message}"
242
275
  end
@@ -260,11 +293,16 @@ module AIA
260
293
  end
261
294
  end
262
295
 
296
+ # TODO: what if its a multi-mode model and a text prompt is provided with
297
+ # the audio file?
263
298
  def audio_to_text(prompt)
299
+ text = extract_text_prompt(prompt)
300
+ text = 'Transcribe this audio' if text.nil? || text.empty?
301
+
264
302
  if prompt.is_a?(String) && File.exist?(prompt) &&
265
303
  prompt.downcase.end_with?('.mp3', '.wav', '.m4a', '.flac')
266
304
  begin
267
- @chat.ask("Transcribe this audio", with: { audio: prompt })
305
+ @chat.ask(text, with: { audio: prompt }).content
268
306
  rescue => e
269
307
  "Error transcribing audio: #{e.message}"
270
308
  end
data/lib/aia/session.rb CHANGED
@@ -184,8 +184,8 @@ module AIA
184
184
  @chat_prompt = PromptManager::Prompt.new(
185
185
  id: @chat_prompt_id,
186
186
  directives_processor: @directive_processor,
187
- erb_flag: AIA.config.erb,
188
- envar_flag: AIA.config.shell,
187
+ erb_flag: true,
188
+ envar_flag: true,
189
189
  external_binding: binding,
190
190
  )
191
191
 
@@ -259,24 +259,23 @@ module AIA
259
259
  if follow_up_prompt.strip.start_with?('//clear')
260
260
  # The directive processor has called context_manager.clear_context
261
261
  # but we need a more aggressive approach to fully clear all context
262
-
262
+
263
263
  # First, clear the context manager's context
264
264
  @context_manager.clear_context(keep_system_prompt: true)
265
-
265
+
266
266
  # Second, try clearing the client's context
267
267
  if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
268
268
  AIA.config.client.clear_context
269
269
  end
270
-
270
+
271
271
  # Third, completely reinitialize the client to ensure fresh state
272
272
  # This is the most aggressive approach to ensure no context remains
273
273
  begin
274
274
  AIA.config.client = AIA::RubyLLMAdapter.new
275
- AIA.debug_me("Completely reinitialized client for //clear directive")
276
275
  rescue => e
277
- AIA.debug_me("Error reinitializing client: #{e.message}")
276
+ STDERR.puts "Error reinitializing client: #{e.message}"
278
277
  end
279
-
278
+
280
279
  @ui_presenter.display_info("Chat context cleared.")
281
280
  next
282
281
  elsif directive_output.nil? || directive_output.strip.empty?
data/lib/aia/utility.rb CHANGED
@@ -1,23 +1,33 @@
1
1
  # lib/aia/utility.rb
2
2
 
3
+ require 'word_wrapper' # Pure ruby word wrapping
4
+
3
5
  module AIA
4
6
  class Utility
5
7
  class << self
6
8
  # Displays the AIA robot ASCII art
9
+ # Yes, its slightly frivolous but it does contain some
10
+ # useful configuration information.
7
11
  def robot
12
+ indent = 18
13
+ spaces = " "*indent
14
+ width = TTY::Screen.width - indent - 2
15
+
8
16
  puts <<-ROBOT
9
17
 
10
18
  , ,
11
- (\\____/) AI Assistant
19
+ (\\____/) AI Assistant (v#{AIA::VERSION}) is Online
12
20
  (_oo_) #{AIA.config.model}
13
- (O) is Online
14
- __||__ \\) using #{AIA.config.adapter}
15
- [/______\\] /
16
- / \\__AI__/ \\/
21
+ (O) using #{AIA.config.adapter} (v#{RubyLLM::VERSION})
22
+ __||__ \\) model db was last refreshed on
23
+ [/______\\] / #{AIA.config.last_refresh}
24
+ / \\__AI__/ \\/ #{AIA.config.tool_paths.empty? ? 'I forgot my toolbox' : 'I brought some tools'}
17
25
  / /__\\
18
- (\\ /____\\
19
-
26
+ (\\ /____\\ #{AIA.config.tool_paths.empty? ? '' : 'My Toolbox contains:'}
20
27
  ROBOT
28
+ if AIA.config.tools
29
+ puts WordWrapper::MinimumRaggedness.new(width, AIA.config.tools).wrap.split("\n").map{|s| spaces+s+"\n"}.join
30
+ end
21
31
  end
22
32
  end
23
33
  end
data/lib/aia.rb CHANGED
@@ -5,10 +5,7 @@
5
5
  # provides an interface for interacting with AI models and managing prompts.
6
6
 
7
7
  require 'ruby_llm'
8
- require_relative 'extensions/ruby_llm/chat'
9
-
10
8
  require 'prompt_manager'
11
- require 'mcp_client'
12
9
 
13
10
  require 'debug_me'
14
11
  include DebugMe
@@ -82,12 +79,21 @@ module AIA
82
79
  prompt_handler = PromptHandler.new
83
80
 
84
81
  # Initialize the appropriate client adapter based on configuration
85
- @config.client = if @config.adapter == 'ruby_llm'
82
+ @config.client = if 'ruby_llm' == @config.adapter
86
83
  RubyLLMAdapter.new
87
84
  else
88
- AIClientAdapter.new
85
+ # TODO: ?? some other LLM API wrapper
86
+ STDERR.puts "ERROR: There is no adapter for #{@config.adapter}"
87
+ exit 1
89
88
  end
90
89
 
90
+ # There are two kinds of sessions: batch and chat
91
+ # A chat session is started when the --chat CLI option is used
92
+ # BUT its also possible to start a chat session with an initial prompt AND
93
+ # within that initial prompt there can be a workflow (aka pipeline)
94
+ # defined. If that is the case, then the chat session will not start
95
+ # until the initial prompt has completed its workflow.
96
+
91
97
  session = Session.new(prompt_handler)
92
98
 
93
99
  session.start