aia 0.9.22 → 0.9.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/aia/session.rb CHANGED
@@ -9,7 +9,6 @@ require "fileutils"
9
9
  require "amazing_print"
10
10
  require_relative "directive_processor"
11
11
  require_relative "history_manager"
12
- require_relative "context_manager"
13
12
  require_relative "ui_presenter"
14
13
  require_relative "chat_processor_service"
15
14
  require_relative "prompt_handler"
@@ -45,28 +44,9 @@ module AIA
45
44
  end
46
45
 
47
46
  def initialize_components
48
- # For multi-model: create separate context manager per model (ADR-002 revised + ADR-005)
49
- # For single-model: maintain backward compatibility with single context manager
50
- if AIA.config.model.is_a?(Array) && AIA.config.model.size > 1
51
- @context_managers = {}
52
- AIA.config.model.each do |model_spec|
53
- # Handle both old string format and new hash format (ADR-005)
54
- internal_id = if model_spec.is_a?(Hash)
55
- model_spec[:internal_id]
56
- else
57
- model_spec
58
- end
59
-
60
- @context_managers[internal_id] = ContextManager.new(
61
- system_prompt: AIA.config.system_prompt
62
- )
63
- end
64
- @context_manager = nil # Signal we're using per-model managers
65
- else
66
- @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
67
- @context_managers = nil
68
- end
69
-
47
+ # RubyLLM's Chat instances maintain conversation history internally
48
+ # via @messages array. No separate context manager needed.
49
+ # Checkpoint/restore directives access Chat.@messages directly via AIA.client.chats
70
50
  @ui_presenter = UIPresenter.new
71
51
  @directive_processor = DirectiveProcessor.new
72
52
  @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
@@ -225,23 +205,21 @@ module AIA
225
205
  end
226
206
 
227
207
  # Send prompt to AI and handle the response
208
+ # RubyLLM's Chat automatically adds user messages and responses to its internal @messages
228
209
  def send_prompt_and_get_response(prompt_text)
229
- # Add prompt to conversation context
230
- @context_manager.add_to_context(role: "user", content: prompt_text)
231
-
232
- # Process the prompt
210
+ # Process the prompt - RubyLLM Chat maintains conversation history internally
233
211
  @ui_presenter.display_thinking_animation
234
- response = @chat_processor.process_prompt(@context_manager.get_context)
212
+ response_data = @chat_processor.process_prompt(prompt_text)
235
213
 
236
- # Add AI response to context
237
- @context_manager.add_to_context(role: "assistant", content: response)
214
+ # Handle response format (may include metrics)
215
+ content = response_data.is_a?(Hash) ? response_data[:content] : response_data
238
216
 
239
217
  # Output the response
240
- @chat_processor.output_response(response)
218
+ @chat_processor.output_response(content)
241
219
 
242
220
  # Process any directives in the response
243
- if @directive_processor.directive?(response)
244
- directive_result = @directive_processor.process(response, @context_manager)
221
+ if @directive_processor.directive?(content)
222
+ directive_result = @directive_processor.process(content, nil)
245
223
  puts "\nDirective output: #{directive_result}" if directive_result && !directive_result.strip.empty?
246
224
  end
247
225
  end
@@ -315,19 +293,16 @@ module AIA
315
293
 
316
294
  return if context.empty?
317
295
 
318
- # Add context files content to context
319
- @context_manager.add_to_context(role: "user", content: context)
320
-
321
- # Process the context
296
+ # Process the context - RubyLLM Chat maintains conversation history internally
322
297
  @ui_presenter.display_thinking_animation
323
- response = @chat_processor.process_prompt(@context_manager.get_context)
298
+ response_data = @chat_processor.process_prompt(context)
324
299
 
325
- # Add AI response to context
326
- @context_manager.add_to_context(role: "assistant", content: response)
300
+ # Handle response format (may include metrics)
301
+ content = response_data.is_a?(Hash) ? response_data[:content] : response_data
327
302
 
328
303
  # Output the response
329
- @chat_processor.output_response(response)
330
- @chat_processor.speak(response)
304
+ @chat_processor.output_response(content)
305
+ @chat_processor.speak(content)
331
306
  @ui_presenter.display_separator
332
307
  end
333
308
 
@@ -347,14 +322,15 @@ module AIA
347
322
  @chat_prompt.text = piped_input
348
323
  processed_input = @chat_prompt.to_s
349
324
 
350
- @context_manager.add_to_context(role: "user", content: processed_input)
351
-
325
+ # Process the piped input - RubyLLM Chat maintains conversation history internally
352
326
  @ui_presenter.display_thinking_animation
353
- response = @chat_processor.process_prompt(@context_manager.get_context)
327
+ response_data = @chat_processor.process_prompt(processed_input)
328
+
329
+ # Handle response format (may include metrics)
330
+ content = response_data.is_a?(Hash) ? response_data[:content] : response_data
354
331
 
355
- @context_manager.add_to_context(role: "assistant", content: response)
356
- @chat_processor.output_response(response)
357
- @chat_processor.speak(response) if AIA.speak?
332
+ @chat_processor.output_response(content)
333
+ @chat_processor.speak(content) if AIA.speak?
358
334
  @ui_presenter.display_separator
359
335
 
360
336
  STDIN.reopen(original_stdin)
@@ -389,29 +365,10 @@ module AIA
389
365
  @chat_prompt.text = follow_up_prompt
390
366
  processed_prompt = @chat_prompt.to_s
391
367
 
392
- # Handle per-model contexts (ADR-002 revised)
393
- if @context_managers
394
- # Multi-model: add user prompt to each model's context
395
- @context_managers.each_value do |ctx_mgr|
396
- ctx_mgr.add_to_context(role: "user", content: processed_prompt)
397
- end
398
-
399
- # Get per-model conversations
400
- conversations = {}
401
- @context_managers.each do |model_name, ctx_mgr|
402
- conversations[model_name] = ctx_mgr.get_context
403
- end
404
-
405
- @ui_presenter.display_thinking_animation
406
- response_data = @chat_processor.process_prompt(conversations)
407
- else
408
- # Single-model: use original logic
409
- @context_manager.add_to_context(role: "user", content: processed_prompt)
410
- conversation = @context_manager.get_context
411
-
412
- @ui_presenter.display_thinking_animation
413
- response_data = @chat_processor.process_prompt(conversation)
414
- end
368
+ # Process the prompt - RubyLLM Chat maintains conversation history internally
369
+ # via @messages array. Each model's Chat instance tracks its own conversation.
370
+ @ui_presenter.display_thinking_animation
371
+ response_data = @chat_processor.process_prompt(processed_prompt)
415
372
 
416
373
  # Handle new response format with metrics
417
374
  if response_data.is_a?(Hash)
@@ -437,21 +394,6 @@ module AIA
437
394
  end
438
395
  end
439
396
 
440
- # Add responses to context (ADR-002 revised)
441
- if @context_managers
442
- # Multi-model: parse combined response and add each model's response to its own context
443
- parsed_responses = parse_multi_model_response(content)
444
- parsed_responses.each do |model_name, model_response|
445
- @context_managers[model_name]&.add_to_context(
446
- role: "assistant",
447
- content: model_response
448
- )
449
- end
450
- else
451
- # Single-model: add response to single context
452
- @context_manager.add_to_context(role: "assistant", content: content)
453
- end
454
-
455
397
  @chat_processor.speak(content)
456
398
 
457
399
  @ui_presenter.display_separator
@@ -459,71 +401,21 @@ module AIA
459
401
  end
460
402
 
461
403
  def process_chat_directive(follow_up_prompt)
462
- # For multi-model, use first context manager for directives (ADR-002 revised)
463
- # TODO: Consider if directives should affect all contexts or just one
464
- context_for_directive = @context_managers ? @context_managers.values.first : @context_manager
465
- directive_output = @directive_processor.process(follow_up_prompt, context_for_directive)
466
-
467
- return handle_clear_directive if follow_up_prompt.strip.start_with?("//clear")
468
- return handle_checkpoint_directive(directive_output) if follow_up_prompt.strip.start_with?("//checkpoint")
469
- return handle_restore_directive(directive_output) if follow_up_prompt.strip.start_with?("//restore")
470
- return handle_empty_directive_output if directive_output.nil? || directive_output.strip.empty?
471
-
472
- handle_successful_directive(follow_up_prompt, directive_output)
473
- end
474
-
475
- def handle_clear_directive
476
- # Clear context manager(s) - ADR-002 revised
477
- if @context_managers
478
- # Multi-model: clear all context managers
479
- @context_managers.each_value { |ctx_mgr| ctx_mgr.clear_context(keep_system_prompt: true) }
480
- else
481
- # Single-model: clear single context manager
482
- @context_manager.clear_context(keep_system_prompt: true)
483
- end
484
-
485
- # Try clearing the client's context
486
- if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
487
- begin
488
- AIA.config.client.clear_context
489
- rescue => e
490
- STDERR.puts "Warning: Error clearing client context: #{e.message}"
491
- # Continue anyway - the context manager has been cleared which is the main goal
492
- end
404
+ # Directives now access RubyLLM's Chat.@messages directly via AIA.client
405
+ # The second parameter is no longer used by checkpoint/restore/clear/review
406
+ directive_output = @directive_processor.process(follow_up_prompt, nil)
407
+
408
+ # Checkpoint-related directives (clear, checkpoint, restore, review) handle
409
+ # everything internally via the Checkpoint module, which operates directly
410
+ # on RubyLLM's Chat.@messages - no additional handling needed here.
411
+ if follow_up_prompt.strip.start_with?("//clear", "//checkpoint", "//restore", "//review", "//context")
412
+ @ui_presenter.display_info(directive_output) unless directive_output.nil? || directive_output.strip.empty?
413
+ return nil
493
414
  end
494
415
 
495
- # Note: We intentionally do NOT reinitialize the client here
496
- # as that could cause termination if model initialization fails
497
-
498
- @ui_presenter.display_info("Chat context cleared.")
499
- nil
500
- end
501
-
502
- def handle_checkpoint_directive(directive_output)
503
- @ui_presenter.display_info(directive_output)
504
- nil
505
- end
506
-
507
- def handle_restore_directive(directive_output)
508
- # If the restore was successful, we also need to refresh the client's context - ADR-002 revised
509
- if directive_output.start_with?("Context restored")
510
- # Clear the client's context without reinitializing the entire adapter
511
- if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
512
- begin
513
- AIA.config.client.clear_context
514
- rescue => e
515
- STDERR.puts "Warning: Error clearing client context after restore: #{e.message}"
516
- # Continue anyway - the context manager has been restored which is the main goal
517
- end
518
- end
519
-
520
- # Note: For multi-model, only the first context manager was used for restore
521
- # This is a limitation of the current directive system
522
- # TODO: Consider supporting restore for all context managers
523
- end
416
+ return handle_empty_directive_output if directive_output.nil? || directive_output.strip.empty?
524
417
 
525
- @ui_presenter.display_info(directive_output)
526
- nil
418
+ handle_successful_directive(follow_up_prompt, directive_output)
527
419
  end
528
420
 
529
421
  def handle_empty_directive_output
@@ -0,0 +1,125 @@
1
+ # lib/aia/topic_context.rb
2
+ # Just thinking about the problem ...
3
+ # maybe a directive like //topic [topic]
4
+ # sets manually (when present) or dynamically when not present
5
+ # and //topics - will list current topics
6
+ # thinking about the //checkpoint and //restore directives
7
+ #
8
+ module AIA
9
+ class TopicContext
10
+ attr_reader :context_size
11
+
12
+ # Initialize topic context manager
13
+ # @param context_size [Integer] max allowed bytes per topic
14
+ def initialize(context_size = 128_000)
15
+ @storage = Hash.new { |h, k| h[k] = [] } # auto-initialize empty array
16
+ @context_size = context_size
17
+ @total_chars = 0
18
+ @mutex = Mutex.new # ensure thread safety
19
+ end
20
+
21
+ # Store a request/response pair under the given topic (or auto-generate one)
22
+ # @param request [String]
23
+ # @param response [String]
24
+ # @param topic [String, nil]
25
+ # @return [String] topic name used
26
+ def store_conversation(request, response, topic = nil)
27
+ raise ArgumentError, "request and response must be strings" unless request.is_a?(String) && response.is_a?(String)
28
+
29
+ topic ||= generate_topic(request)
30
+ size = request.bytesize + response.bytesize
31
+
32
+ @mutex.synchronize do
33
+ # Add the new context
34
+ @storage[topic] << { request:, response:, size:, time: Time.now }
35
+
36
+ # Update the global total
37
+ @total_chars += size
38
+
39
+ # Trim old entries if we exceeded the per-topic limit
40
+ trim_topic(topic)
41
+ end
42
+
43
+ topic
44
+ end
45
+
46
+ # Return an array of contexts for the given topic
47
+ # @param topic [String]
48
+ # @return [Array<Hash>]
49
+ def get_conversation(topic)
50
+ @mutex.synchronize { @storage[topic] || [] }
51
+ end
52
+
53
+ # All topic names
54
+ # @return [Array<String>]
55
+ def topics
56
+ @mutex.synchronize { @storage.keys }
57
+ end
58
+
59
+ # Hash of topic => array_of_contexts
60
+ # @return [Hash<String, Array<Hash>>]
61
+ def all_conversations
62
+ @mutex.synchronize { @storage.dup }
63
+ end
64
+
65
+ # Total number of characters stored across all topics
66
+ # @return [Integer]
67
+ def total_chars
68
+ @mutex.synchronize { @total_chars }
69
+ end
70
+
71
+ # Empty the storage and reset counters
72
+ def clear
73
+ @mutex.synchronize do
74
+ @storage.clear
75
+ @total_chars = 0
76
+ end
77
+ end
78
+
79
+ # Get memory usage statistics for a topic
80
+ # @param topic [String]
81
+ # @return [Hash{Symbol => Integer}]
82
+ def topic_stats(topic)
83
+ @mutex.synchronize do
84
+ return {} unless @storage.key?(topic)
85
+
86
+ {
87
+ count: @storage[topic].length,
88
+ size: topic_total_size(topic),
89
+ avg_size: topic_total_size(topic).fdiv(@storage[topic].length),
90
+ }
91
+ end
92
+ end
93
+
94
+ private
95
+
96
+ # Topic extractor with better heuristic - uses first meaningful 3 words
97
+ # @param request [String]
98
+ # @return [String]
99
+ def generate_topic(request)
100
+ cleaned = request.downcase.gsub(/[^a-z0-9\s]/, "")
101
+ words = cleaned.split
102
+ return "general" if words.empty?
103
+
104
+ words.first(3).join("_")
105
+ end
106
+
107
+ # Remove oldest contexts from the topic until size <= @context_size
108
+ # @param topic [String]
109
+ def trim_topic(topic)
110
+ return unless @storage.key?(topic) && @storage[topic].size > 1
111
+
112
+ while topic_total_size(topic) > @context_size
113
+ removed = @storage[topic].shift # oldest context
114
+ @total_chars -= removed[:size] # adjust global counter
115
+ end
116
+ end
117
+
118
+ # Helper to compute the sum of sizes for a topic
119
+ # @param topic [String]
120
+ # @return [Integer]
121
+ def topic_total_size(topic)
122
+ @storage[topic].sum { |ctx| ctx[:size] }
123
+ end
124
+ end
125
+ end
data/lib/aia/utility.rb CHANGED
@@ -13,6 +13,15 @@ module AIA
13
13
  AIA.config&.tool_paths && !AIA.config.tool_paths.empty?
14
14
  end
15
15
 
16
+ def mcp_servers?
17
+ AIA.config&.mcp_servers && !AIA.config.mcp_servers.empty?
18
+ end
19
+
20
+ def mcp_server_names
21
+ return [] unless mcp_servers?
22
+ AIA.config.mcp_servers.map { |s| s[:name] || s["name"] }.compact
23
+ end
24
+
16
25
  def supports_tools?
17
26
  AIA.config&.client&.model&.supports_functions? || false
18
27
  end
@@ -46,6 +55,8 @@ module AIA
46
55
  'unknown-model'
47
56
  end
48
57
 
58
+ mcp_line = mcp_servers? ? "MCP: #{mcp_server_names.join(', ')}" : ''
59
+
49
60
  puts <<-ROBOT
50
61
 
51
62
  , ,
@@ -55,7 +66,7 @@ module AIA
55
66
  __||__ \\) model db was last refreshed on
56
67
  [/______\\] / #{AIA.config&.last_refresh || 'unknown'}
57
68
  / \\__AI__/ \\/ #{user_tools? ? 'I will also use your tools' : (tools? ? 'You can share my tools' : 'I did not bring any tools')}
58
- / /__\\
69
+ / /__\\ #{mcp_line}
59
70
  (\\ /____\\ #{user_tools? && tools? ? 'My Toolbox contains:' : ''}
60
71
  ROBOT
61
72
  if user_tools? && tools?
@@ -21,7 +21,11 @@ class OpenStruct
21
21
  end
22
22
 
23
23
  # Sets value in result OpenStruct, handling nested OpenStruct and Hash objects
24
+ # Skip nil values to avoid overwriting existing values with nil
24
25
  def self.set_value(result, key, value)
26
+ # Skip nil values - don't overwrite existing values with nil
27
+ return if value.nil?
28
+
25
29
  if value.is_a?(OpenStruct) || value.is_a?(Hash)
26
30
  current_value = result[key]
27
31
  current_value = {} if current_value.nil?
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.22
4
+ version: 0.9.24
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
@@ -426,8 +426,8 @@ files:
426
426
  - lib/aia/config/defaults.rb
427
427
  - lib/aia/config/file_loader.rb
428
428
  - lib/aia/config/validator.rb
429
- - lib/aia/context_manager.rb
430
429
  - lib/aia/directive_processor.rb
430
+ - lib/aia/directives/checkpoint.rb
431
431
  - lib/aia/directives/configuration.rb
432
432
  - lib/aia/directives/execution.rb
433
433
  - lib/aia/directives/models.rb
@@ -439,6 +439,7 @@ files:
439
439
  - lib/aia/prompt_handler.rb
440
440
  - lib/aia/ruby_llm_adapter.rb
441
441
  - lib/aia/session.rb
442
+ - lib/aia/topic_context.rb
442
443
  - lib/aia/ui_presenter.rb
443
444
  - lib/aia/utility.rb
444
445
  - lib/aia/version.rb
@@ -477,7 +478,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
477
478
  - !ruby/object:Gem::Version
478
479
  version: '0'
479
480
  requirements: []
480
- rubygems_version: 3.7.2
481
+ rubygems_version: 4.0.1
481
482
  specification_version: 4
482
483
  summary: Multi-model AI CLI with dynamic prompts, consensus responses, shell & Ruby
483
484
  integration, and seamless chat workflows.
@@ -1,134 +0,0 @@
1
- # lib/aia/context_manager.rb
2
-
3
- module AIA
4
- # Manages the conversation context for chat sessions.
5
- class ContextManager
6
- attr_reader :context, :checkpoints
7
-
8
- # Initializes the ContextManager with an optional system prompt.
9
- def initialize(system_prompt: nil)
10
- @context = []
11
- @checkpoints = {}
12
- @checkpoint_counter = 0
13
- add_system_prompt(system_prompt) if system_prompt && !system_prompt.strip.empty?
14
- end
15
-
16
- # Adds a message to the conversation context.
17
- #
18
- # @param role [String] The role of the message sender ('user' or 'assistant').
19
- # @param content [String] The content of the message.
20
- def add_to_context(role:, content:)
21
- @context << { role: role, content: content }
22
- end
23
-
24
- # Returns the current conversation context.
25
- # Optionally adds the system prompt if it wasn't added during initialization
26
- # or needs to be re-added after clearing.
27
- #
28
- # @param system_prompt [String, nil] The system prompt to potentially prepend.
29
- # @return [Array<Hash>] The conversation context array.
30
- def get_context(system_prompt: nil)
31
- # Add or replace system prompt if provided and not empty
32
- if system_prompt && !system_prompt.strip.empty?
33
- add_system_prompt(system_prompt)
34
- end
35
- @context
36
- end
37
-
38
- # Clears the conversation context, optionally keeping the system prompt.
39
- #
40
- # @param keep_system_prompt [Boolean] Whether to retain the initial system prompt.
41
- def clear_context(keep_system_prompt: true)
42
- if keep_system_prompt && !@context.empty? && @context.first[:role] == 'system'
43
- @context = [@context.first]
44
- else
45
- @context = []
46
- end
47
-
48
- # Clear all checkpoints when clearing context
49
- @checkpoints.clear
50
- @checkpoint_counter = 0
51
-
52
- # Attempt to clear the LLM client's context as well
53
- begin
54
- if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
55
- AIA.config.client.clear_context
56
- end
57
-
58
- if AIA.config.respond_to?(:llm) && AIA.config.llm && AIA.config.llm.respond_to?(:clear_context)
59
- AIA.config.llm.clear_context
60
- end
61
-
62
- if defined?(RubyLLM) && RubyLLM.respond_to?(:chat) && RubyLLM.chat.respond_to?(:clear_history)
63
- RubyLLM.chat.clear_history
64
- end
65
- rescue => e
66
- STDERR.puts "ERROR: context_manager clear_context error #{e.message}"
67
- end
68
- end
69
-
70
- # Creates a checkpoint of the current context with an optional name.
71
- #
72
- # @param name [String, nil] The name of the checkpoint. If nil, uses an incrementing integer.
73
- # @return [String] The name of the created checkpoint.
74
- def create_checkpoint(name: nil)
75
- if name.nil?
76
- @checkpoint_counter += 1
77
- name = @checkpoint_counter.to_s
78
- end
79
-
80
- # Store a deep copy of the current context and its position
81
- @checkpoints[name] = {
82
- context: @context.map(&:dup),
83
- position: @context.size
84
- }
85
- @last_checkpoint_name = name
86
- name
87
- end
88
-
89
- # Restores the context to a previously saved checkpoint.
90
- #
91
- # @param name [String, nil] The name of the checkpoint to restore. If nil, uses the last checkpoint.
92
- # @return [Boolean] True if restore was successful, false otherwise.
93
- def restore_checkpoint(name: nil)
94
- name = @last_checkpoint_name if name.nil?
95
-
96
- return false if name.nil? || !@checkpoints.key?(name)
97
-
98
- # Restore the context from the checkpoint
99
- checkpoint_data = @checkpoints[name]
100
- @context = checkpoint_data[:context].map(&:dup)
101
- true
102
- end
103
-
104
- # Returns the list of available checkpoint names.
105
- #
106
- # @return [Array<String>] The names of all checkpoints.
107
- def checkpoint_names
108
- @checkpoints.keys
109
- end
110
-
111
- # Returns checkpoint information mapped to context positions.
112
- #
113
- # @return [Hash<Integer, Array<String>>] Position to checkpoint names mapping.
114
- def checkpoint_positions
115
- positions = {}
116
- @checkpoints.each do |name, data|
117
- position = data[:position]
118
- positions[position] ||= []
119
- positions[position] << name
120
- end
121
- positions
122
- end
123
-
124
- private
125
-
126
- # Adds or replaces the system prompt at the beginning of the context.
127
- def add_system_prompt(system_prompt)
128
- # Remove existing system prompt if present
129
- @context.shift if !@context.empty? && @context.first[:role] == 'system'
130
- # Add the new system prompt at the beginning
131
- @context.unshift({ role: 'system', content: system_prompt })
132
- end
133
- end
134
- end