aia 0.9.18 → 0.9.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/aia/session.rb CHANGED
@@ -45,7 +45,28 @@ module AIA
45
45
  end
46
46
 
47
47
  def initialize_components
48
- @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
48
+ # For multi-model: create separate context manager per model (ADR-002 revised + ADR-005)
49
+ # For single-model: maintain backward compatibility with single context manager
50
+ if AIA.config.model.is_a?(Array) && AIA.config.model.size > 1
51
+ @context_managers = {}
52
+ AIA.config.model.each do |model_spec|
53
+ # Handle both old string format and new hash format (ADR-005)
54
+ internal_id = if model_spec.is_a?(Hash)
55
+ model_spec[:internal_id]
56
+ else
57
+ model_spec
58
+ end
59
+
60
+ @context_managers[internal_id] = ContextManager.new(
61
+ system_prompt: AIA.config.system_prompt
62
+ )
63
+ end
64
+ @context_manager = nil # Signal we're using per-model managers
65
+ else
66
+ @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
67
+ @context_managers = nil
68
+ end
69
+
49
70
  @ui_presenter = UIPresenter.new
50
71
  @directive_processor = DirectiveProcessor.new
51
72
  @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
@@ -368,11 +389,29 @@ module AIA
368
389
  @chat_prompt.text = follow_up_prompt
369
390
  processed_prompt = @chat_prompt.to_s
370
391
 
371
- @context_manager.add_to_context(role: "user", content: processed_prompt)
372
- conversation = @context_manager.get_context
392
+ # Handle per-model contexts (ADR-002 revised)
393
+ if @context_managers
394
+ # Multi-model: add user prompt to each model's context
395
+ @context_managers.each_value do |ctx_mgr|
396
+ ctx_mgr.add_to_context(role: "user", content: processed_prompt)
397
+ end
398
+
399
+ # Get per-model conversations
400
+ conversations = {}
401
+ @context_managers.each do |model_name, ctx_mgr|
402
+ conversations[model_name] = ctx_mgr.get_context
403
+ end
373
404
 
374
- @ui_presenter.display_thinking_animation
375
- response_data = @chat_processor.process_prompt(conversation)
405
+ @ui_presenter.display_thinking_animation
406
+ response_data = @chat_processor.process_prompt(conversations)
407
+ else
408
+ # Single-model: use original logic
409
+ @context_manager.add_to_context(role: "user", content: processed_prompt)
410
+ conversation = @context_manager.get_context
411
+
412
+ @ui_presenter.display_thinking_animation
413
+ response_data = @chat_processor.process_prompt(conversation)
414
+ end
376
415
 
377
416
  # Handle new response format with metrics
378
417
  if response_data.is_a?(Hash)
@@ -386,7 +425,7 @@ module AIA
386
425
  end
387
426
 
388
427
  @ui_presenter.display_ai_response(content)
389
-
428
+
390
429
  # Display metrics if enabled and available (chat mode only)
391
430
  if AIA.config.show_metrics
392
431
  if multi_metrics
@@ -397,8 +436,22 @@ module AIA
397
436
  @ui_presenter.display_token_metrics(metrics)
398
437
  end
399
438
  end
400
-
401
- @context_manager.add_to_context(role: "assistant", content: content)
439
+
440
+ # Add responses to context (ADR-002 revised)
441
+ if @context_managers
442
+ # Multi-model: parse combined response and add each model's response to its own context
443
+ parsed_responses = parse_multi_model_response(content)
444
+ parsed_responses.each do |model_name, model_response|
445
+ @context_managers[model_name]&.add_to_context(
446
+ role: "assistant",
447
+ content: model_response
448
+ )
449
+ end
450
+ else
451
+ # Single-model: add response to single context
452
+ @context_manager.add_to_context(role: "assistant", content: content)
453
+ end
454
+
402
455
  @chat_processor.speak(content)
403
456
 
404
457
  @ui_presenter.display_separator
@@ -406,7 +459,10 @@ module AIA
406
459
  end
407
460
 
408
461
  def process_chat_directive(follow_up_prompt)
409
- directive_output = @directive_processor.process(follow_up_prompt, @context_manager)
462
+ # For multi-model, use first context manager for directives (ADR-002 revised)
463
+ # TODO: Consider if directives should affect all contexts or just one
464
+ context_for_directive = @context_managers ? @context_managers.values.first : @context_manager
465
+ directive_output = @directive_processor.process(follow_up_prompt, context_for_directive)
410
466
 
411
467
  return handle_clear_directive if follow_up_prompt.strip.start_with?("//clear")
412
468
  return handle_checkpoint_directive(directive_output) if follow_up_prompt.strip.start_with?("//checkpoint")
@@ -417,13 +473,16 @@ module AIA
417
473
  end
418
474
 
419
475
  def handle_clear_directive
420
- # The directive processor has called context_manager.clear_context
421
- # but we need to also clear the LLM client's context
422
-
423
- # First, clear the context manager's context
424
- @context_manager.clear_context(keep_system_prompt: true)
476
+ # Clear context manager(s) - ADR-002 revised
477
+ if @context_managers
478
+ # Multi-model: clear all context managers
479
+ @context_managers.each_value { |ctx_mgr| ctx_mgr.clear_context(keep_system_prompt: true) }
480
+ else
481
+ # Single-model: clear single context manager
482
+ @context_manager.clear_context(keep_system_prompt: true)
483
+ end
425
484
 
426
- # Second, try clearing the client's context
485
+ # Try clearing the client's context
427
486
  if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
428
487
  begin
429
488
  AIA.config.client.clear_context
@@ -446,10 +505,9 @@ module AIA
446
505
  end
447
506
 
448
507
  def handle_restore_directive(directive_output)
449
- # If the restore was successful, we also need to refresh the client's context
508
+ # If the restore was successful, we also need to refresh the client's context - ADR-002 revised
450
509
  if directive_output.start_with?("Context restored")
451
510
  # Clear the client's context without reinitializing the entire adapter
452
- # This avoids the risk of exiting if model initialization fails
453
511
  if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
454
512
  begin
455
513
  AIA.config.client.clear_context
@@ -459,17 +517,9 @@ module AIA
459
517
  end
460
518
  end
461
519
 
462
- # Rebuild the conversation in the LLM client from the restored context
463
- # This ensures the LLM's internal state matches what we restored
464
- if AIA.config.client && @context_manager
465
- begin
466
- restored_context = @context_manager.get_context
467
- # The client's context has been cleared, so we can safely continue
468
- # The next interaction will use the restored context from context_manager
469
- rescue => e
470
- STDERR.puts "Warning: Error syncing restored context: #{e.message}"
471
- end
472
- end
520
+ # Note: For multi-model, only the first context manager was used for restore
521
+ # This is a limitation of the current directive system
522
+ # TODO: Consider supporting restore for all context managers
473
523
  end
474
524
 
475
525
  @ui_presenter.display_info(directive_output)
@@ -485,6 +535,48 @@ module AIA
485
535
  "I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
486
536
  end
487
537
 
538
+ # Parse multi-model response into per-model responses (ADR-002 revised + ADR-005)
539
+ # Input: "from: lms/model #2 (role)\nHabari!\n\nfrom: ollama/model\nKaixo!"
540
+ # Output: {"lms/model#2" => "Habari!", "ollama/model" => "Kaixo!"}
541
+ def parse_multi_model_response(combined_response)
542
+ return {} if combined_response.nil? || combined_response.empty?
543
+
544
+ responses = {}
545
+ current_model = nil
546
+ current_content = []
547
+
548
+ combined_response.each_line do |line|
549
+ if line =~ /^from:\s+(.+)$/
550
+ # Save previous model's response
551
+ if current_model
552
+ responses[current_model] = current_content.join.strip
553
+ end
554
+
555
+ # Extract internal_id from display name (ADR-005)
556
+ # Display format: "model_name #N (role)" or "model_name (role)" or "model_name #N" or "model_name"
557
+ display_name = $1.strip
558
+
559
+ # Remove role part: " (role_name)"
560
+ internal_id = display_name.sub(/\s+\([^)]+\)\s*$/, '')
561
+
562
+ # Remove space before instance number: "model #2" -> "model#2"
563
+ internal_id = internal_id.sub(/\s+#/, '#')
564
+
565
+ current_model = internal_id
566
+ current_content = []
567
+ elsif current_model
568
+ current_content << line
569
+ end
570
+ end
571
+
572
+ # Save last model's response
573
+ if current_model
574
+ responses[current_model] = current_content.join.strip
575
+ end
576
+
577
+ responses
578
+ end
579
+
488
580
  def cleanup_chat_prompt
489
581
  if @chat_prompt_id
490
582
  puts "[DEBUG] Cleaning up chat prompt: #{@chat_prompt_id}" if AIA.debug?
data/lib/aia/utility.rb CHANGED
@@ -28,11 +28,29 @@ module AIA
28
28
 
29
29
  mcp_version = defined?(RubyLLM::MCP::VERSION) ? " MCP v" + RubyLLM::MCP::VERSION : ''
30
30
 
31
+ # Extract model names from config (handles hash format from ADR-005)
32
+ model_display = if AIA.config&.model
33
+ models = AIA.config.model
34
+ if models.is_a?(String)
35
+ models
36
+ elsif models.is_a?(Array)
37
+ if models.first.is_a?(Hash)
38
+ models.map { |spec| spec[:model] }.join(', ')
39
+ else
40
+ models.join(', ')
41
+ end
42
+ else
43
+ models.to_s
44
+ end
45
+ else
46
+ 'unknown-model'
47
+ end
48
+
31
49
  puts <<-ROBOT
32
50
 
33
51
  , ,
34
52
  (\\____/) AI Assistant (v#{AIA::VERSION}) is Online
35
- (_oo_) #{AIA.config&.model || 'unknown-model'}#{supports_tools? ? ' (supports tools)' : ''}
53
+ (_oo_) #{model_display}#{supports_tools? ? ' (supports tools)' : ''}
36
54
  (O) using #{AIA.config&.adapter || 'unknown-adapter'} (v#{RubyLLM::VERSION}#{mcp_version})
37
55
  __||__ \\) model db was last refreshed on
38
56
  [/______\\] / #{AIA.config&.last_refresh || 'unknown'}
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.18
4
+ version: 0.9.20
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer