aia 0.9.24 → 0.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/.version +1 -1
  3. data/CHANGELOG.md +84 -3
  4. data/README.md +179 -59
  5. data/bin/aia +6 -0
  6. data/docs/cli-reference.md +145 -72
  7. data/docs/configuration.md +156 -19
  8. data/docs/examples/tools/index.md +2 -2
  9. data/docs/faq.md +11 -11
  10. data/docs/guides/available-models.md +11 -11
  11. data/docs/guides/basic-usage.md +18 -17
  12. data/docs/guides/chat.md +57 -11
  13. data/docs/guides/executable-prompts.md +15 -15
  14. data/docs/guides/first-prompt.md +2 -2
  15. data/docs/guides/getting-started.md +6 -6
  16. data/docs/guides/image-generation.md +24 -24
  17. data/docs/guides/local-models.md +2 -2
  18. data/docs/guides/models.md +96 -18
  19. data/docs/guides/tools.md +4 -4
  20. data/docs/installation.md +2 -2
  21. data/docs/prompt_management.md +11 -11
  22. data/docs/security.md +3 -3
  23. data/docs/workflows-and-pipelines.md +1 -1
  24. data/examples/README.md +6 -6
  25. data/examples/headlines +3 -3
  26. data/lib/aia/aia_completion.bash +2 -2
  27. data/lib/aia/aia_completion.fish +4 -4
  28. data/lib/aia/aia_completion.zsh +2 -2
  29. data/lib/aia/chat_processor_service.rb +31 -21
  30. data/lib/aia/config/cli_parser.rb +403 -403
  31. data/lib/aia/config/config_section.rb +87 -0
  32. data/lib/aia/config/defaults.yml +219 -0
  33. data/lib/aia/config/defaults_loader.rb +147 -0
  34. data/lib/aia/config/mcp_parser.rb +151 -0
  35. data/lib/aia/config/model_spec.rb +67 -0
  36. data/lib/aia/config/validator.rb +185 -136
  37. data/lib/aia/config.rb +336 -17
  38. data/lib/aia/directive_processor.rb +14 -6
  39. data/lib/aia/directives/configuration.rb +24 -10
  40. data/lib/aia/directives/models.rb +3 -4
  41. data/lib/aia/directives/utility.rb +3 -2
  42. data/lib/aia/directives/web_and_file.rb +50 -47
  43. data/lib/aia/logger.rb +328 -0
  44. data/lib/aia/prompt_handler.rb +18 -22
  45. data/lib/aia/ruby_llm_adapter.rb +572 -69
  46. data/lib/aia/session.rb +9 -8
  47. data/lib/aia/ui_presenter.rb +20 -16
  48. data/lib/aia/utility.rb +50 -18
  49. data/lib/aia.rb +91 -66
  50. data/lib/extensions/ruby_llm/modalities.rb +2 -0
  51. data/mcp_servers/apple-mcp.json +8 -0
  52. data/mcp_servers/mcp_server_chart.json +11 -0
  53. data/mcp_servers/playwright_one.json +8 -0
  54. data/mcp_servers/playwright_two.json +8 -0
  55. data/mcp_servers/tavily_mcp_server.json +8 -0
  56. metadata +83 -25
  57. data/lib/aia/config/base.rb +0 -308
  58. data/lib/aia/config/defaults.rb +0 -91
  59. data/lib/aia/config/file_loader.rb +0 -163
  60. data/mcp_servers/imcp.json +0 -7
  61. data/mcp_servers/launcher.json +0 -11
  62. data/mcp_servers/timeserver.json +0 -8
@@ -4,7 +4,7 @@ module AIA
4
4
  class ChatProcessorService
5
5
  def initialize(ui_presenter, directive_processor = nil)
6
6
  @ui_presenter = ui_presenter
7
- @speaker = AIA.speak? ? AiClient.new(AIA.config.speech_model) : nil
7
+ @speaker = AIA.speak? ? AiClient.new(AIA.config.audio.speech_model) : nil
8
8
  @directive_processor = directive_processor
9
9
  end
10
10
 
@@ -12,10 +12,10 @@ module AIA
12
12
  def speak(text)
13
13
  return unless AIA.speak?
14
14
 
15
- @speaker ||= AiClient.new(AIA.config.speech_model) if AIA.config.speech_model
15
+ @speaker ||= AiClient.new(AIA.config.audio.speech_model) if AIA.config.audio.speech_model
16
16
 
17
17
  if @speaker
18
- `#{AIA.config.speak_command} #{@speaker.speak(text).path}`
18
+ `#{AIA.config.audio.speak_command} #{@speaker.speak(text).path}`
19
19
  else
20
20
  puts "Warning: Unable to speak. Speech model not configured properly."
21
21
  end
@@ -29,15 +29,15 @@ module AIA
29
29
  end
30
30
 
31
31
  # Debug output to understand what we're receiving
32
- puts "[DEBUG ChatProcessor] Result class: #{result.class}" if AIA.config.debug
33
- puts "[DEBUG ChatProcessor] Result inspect: #{result.inspect[0..500]}..." if AIA.config.debug
32
+ logger.debug("Result received", result_class: result.class.name)
33
+ logger.debug("Result details", inspect: result.inspect[0..500])
34
34
 
35
35
  # Preserve token information if available for metrics
36
36
  if result.is_a?(String)
37
- puts "[DEBUG ChatProcessor] Processing as String" if AIA.config.debug
37
+ logger.debug("Processing result", type: "String")
38
38
  { content: result, metrics: nil }
39
39
  elsif result.respond_to?(:multi_model?) && result.multi_model?
40
- puts "[DEBUG ChatProcessor] Processing as multi-model response" if AIA.config.debug
40
+ logger.debug("Processing result", type: "multi-model response")
41
41
  # Handle multi-model response with metrics
42
42
  {
43
43
  content: result.content,
@@ -45,7 +45,7 @@ module AIA
45
45
  multi_metrics: result.metrics_list
46
46
  }
47
47
  elsif result.respond_to?(:content)
48
- puts "[DEBUG ChatProcessor] Processing as standard response with content method" if AIA.config.debug
48
+ logger.debug("Processing result", type: "standard response with content method")
49
49
  # Standard response object with content method
50
50
  {
51
51
  content: result.content,
@@ -56,7 +56,7 @@ module AIA
56
56
  }
57
57
  }
58
58
  else
59
- puts "[DEBUG ChatProcessor] Processing as fallback (unexpected type)" if AIA.config.debug
59
+ logger.debug("Processing result", type: "fallback (unexpected type)")
60
60
  # Fallback for unexpected response types
61
61
  { content: result.to_s, metrics: nil }
62
62
  end
@@ -71,15 +71,15 @@ module AIA
71
71
  # Handle per-model conversations (Hash) or single conversation (Array) - ADR-002 revised
72
72
  if conversation_or_conversations.is_a?(Hash)
73
73
  # Multi-model with per-model contexts: pass Hash directly to adapter
74
- puts "[DEBUG ChatProcessor] Sending per-model conversations to client" if AIA.config.debug
74
+ logger.debug("Sending per-model conversations to client")
75
75
  result = AIA.client.chat(conversation_or_conversations)
76
76
  else
77
77
  # Single conversation for single model
78
- puts "[DEBUG ChatProcessor] Sending conversation to client: #{conversation_or_conversations.inspect[0..500]}..." if AIA.config.debug
78
+ logger.debug("Sending conversation to client", conversation: conversation_or_conversations.inspect[0..500])
79
79
  result = AIA.client.chat(conversation_or_conversations)
80
80
  end
81
81
 
82
- puts "[DEBUG ChatProcessor] Client returned: #{result.class} - #{result.inspect[0..500]}..." if AIA.config.debug
82
+ logger.debug("Client returned", result_class: result.class.name, result: result.inspect[0..500])
83
83
  result
84
84
  end
85
85
 
@@ -88,11 +88,17 @@ module AIA
88
88
  # With multiple models, we don't need to change the model in the same way
89
89
  # The RubyLLMAdapter now handles multiple models internally
90
90
  # This method is kept for backward compatibility but may not be needed
91
- return if AIA.config.model.is_a?(Array)
91
+ models = AIA.config.models
92
+ return if models.is_a?(Array) && models.size > 1
92
93
 
93
- client_model = AIA.client.model.id # RubyLLM::Model instance
94
+ return unless AIA.client.respond_to?(:model) && AIA.client.model.respond_to?(:id)
95
+ client_model = AIA.client.model.id
94
96
 
95
- unless AIA.config.model.downcase.include?(client_model.downcase)
97
+ # Get the first model name for comparison
98
+ first_model = models.first
99
+ model_name = first_model.respond_to?(:name) ? first_model.name : first_model.to_s
100
+
101
+ unless model_name.downcase.include?(client_model.downcase)
96
102
  AIA.client = AIA.client.class.new
97
103
  end
98
104
  end
@@ -101,13 +107,15 @@ module AIA
101
107
  def output_response(response)
102
108
  speak(response)
103
109
 
110
+ out_file = AIA.config.output.file
111
+
104
112
  # Output to STDOUT or file based on out_file configuration
105
- if AIA.config.out_file.nil? || 'STDOUT' == AIA.config.out_file.upcase
113
+ if out_file.nil? || 'STDOUT' == out_file.upcase
106
114
  print "\nAI:\n "
107
115
  puts response
108
116
  else
109
117
  mode = AIA.append? ? 'a' : 'w'
110
- File.open(AIA.config.out_file, mode) do |file|
118
+ File.open(out_file, mode) do |file|
111
119
  file.puts "\nAI: "
112
120
  # Handle multi-line responses by adding proper indentation
113
121
  response_lines = response.to_s.split("\n")
@@ -117,8 +125,9 @@ module AIA
117
125
  end
118
126
  end
119
127
 
120
- if AIA.config.log_file
121
- File.open(AIA.config.log_file, 'a') do |f|
128
+ history_file = AIA.config.output.history_file
129
+ if history_file
130
+ File.open(history_file, 'a') do |f|
122
131
  f.puts "=== #{Time.now} ==="
123
132
  f.puts "Prompt: #{AIA.config.prompt_id}"
124
133
  f.puts "Response: #{response}"
@@ -140,10 +149,11 @@ module AIA
140
149
  def determine_operation_type
141
150
  # With multiple models, determine operation type from the first model
142
151
  # or provide a generic description
143
- if AIA.config.model.is_a?(Array) && AIA.config.model.size > 1
152
+ models = AIA.config.models
153
+ if models.is_a?(Array) && models.size > 1
144
154
  "MULTI-MODEL PROCESSING"
145
155
  else
146
- mode = AIA.config.client.model.modalities
156
+ mode = AIA.client.model.modalities
147
157
  mode.input.join(',') + " TO " + mode.output.join(',')
148
158
  end
149
159
  end